From 686697e890ea4d492601ae809031d9d919c27bda Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 21 Feb 2024 11:30:22 +0000 Subject: [PATCH 001/136] Enforce using example.com as a domain for emails. --- .eslintrc.json | 3 +- eslint-local-rules/index.js | 37 +++++++++++++++++++ .../core/utilities/structures/accounts.ts | 2 +- .../tests/core/utilities/structures/scim.ts | 2 +- .../src/tests/utilities/TestConfiguration.ts | 8 ++-- .../api/routes/global/tests/groups.spec.ts | 2 +- .../tests/accounts/accounts.cloud.spec.ts | 2 +- qa-core/src/public-api/fixtures/users.ts | 2 +- 8 files changed, 48 insertions(+), 10 deletions(-) diff --git a/.eslintrc.json b/.eslintrc.json index 3de9d13046..ae9512152f 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -44,7 +44,8 @@ "no-undef": "off", "no-prototype-builtins": "off", "local-rules/no-budibase-imports": "error", - "local-rules/no-test-com": "error" + "local-rules/no-test-com": "error", + "local-rules/email-domain-example-com": "error" } }, { diff --git a/eslint-local-rules/index.js b/eslint-local-rules/index.js index 71bb5068da..177b0a129c 100644 --- a/eslint-local-rules/index.js +++ b/eslint-local-rules/index.js @@ -51,4 +51,41 @@ module.exports = { } }, }, + "email-domain-example-com": { + meta: { + type: "problem", + docs: { + description: + "enforce using the example.com domain for generator.email calls", + category: "Possible Errors", + recommended: false, + }, + fixable: "code", + schema: [], + }, + create: function (context) { + return { + CallExpression(node) { + if ( + node.callee.type === "MemberExpression" && + node.callee.object.name === "generator" && + node.callee.property.name === "email" && + node.arguments.length === 0 + ) { + context.report({ + node, + message: + "Prefer using generator.email with the domain \"{ domain: 'example.com' }\".", + fix: function (fixer) { + return fixer.replaceText( + node, + 'generator.email({ domain: "example.com" })' + ) + }, + }) + } + }, + } + }, + }, } diff --git a/packages/backend-core/tests/core/utilities/structures/accounts.ts b/packages/backend-core/tests/core/utilities/structures/accounts.ts index 515f94db1e..7dcc2de116 100644 --- a/packages/backend-core/tests/core/utilities/structures/accounts.ts +++ b/packages/backend-core/tests/core/utilities/structures/accounts.ts @@ -18,7 +18,7 @@ export const account = (partial: Partial = {}): Account => { return { accountId: uuid(), tenantId: generator.word(), - email: generator.email(), + email: generator.email({ domain: "example.com" }), tenantName: generator.word(), hosting: Hosting.SELF, createdAt: Date.now(), diff --git a/packages/backend-core/tests/core/utilities/structures/scim.ts b/packages/backend-core/tests/core/utilities/structures/scim.ts index 80f41c605d..f36ccd2374 100644 --- a/packages/backend-core/tests/core/utilities/structures/scim.ts +++ b/packages/backend-core/tests/core/utilities/structures/scim.ts @@ -13,7 +13,7 @@ interface CreateUserRequestFields { export function createUserRequest(userData?: Partial) { const defaultValues = { externalId: uuid(), - email: generator.email(), + email: generator.email({ domain: "example.com" }), firstName: generator.first(), lastName: generator.last(), username: generator.name(), diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 8e6ecdfeb1..2e6bbb6290 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -301,7 +301,7 @@ export default class TestConfiguration { lastName = generator.last(), builder = true, admin = false, - email = generator.email(), + email = generator.email({ domain: "example.com" }), roles, } = config @@ -357,7 +357,7 @@ export default class TestConfiguration { id, firstName = generator.first(), lastName = generator.last(), - email = generator.email(), + email = generator.email({ domain: "example.com" }), builder = true, admin, roles, @@ -485,7 +485,7 @@ export default class TestConfiguration { async basicRoleHeaders() { return await this.roleHeaders({ - email: generator.email(), + email: generator.email({ domain: "example.com" }), builder: false, prodApp: true, roleId: roles.BUILTIN_ROLE_IDS.BASIC, @@ -493,7 +493,7 @@ export default class TestConfiguration { } async roleHeaders({ - email = generator.email(), + email = generator.email({ domain: "example.com" }), roleId = roles.BUILTIN_ROLE_IDS.ADMIN, builder = false, prodApp = true, diff --git a/packages/worker/src/api/routes/global/tests/groups.spec.ts b/packages/worker/src/api/routes/global/tests/groups.spec.ts index 8f0739a812..b69c4781c4 100644 --- a/packages/worker/src/api/routes/global/tests/groups.spec.ts +++ b/packages/worker/src/api/routes/global/tests/groups.spec.ts @@ -147,7 +147,7 @@ describe("/api/global/groups", () => { await Promise.all( Array.from({ length: 30 }).map(async (_, i) => { - const email = `user${i}@${generator.domain()}` + const email = `user${i}@example.com` const user = await config.api.users.saveUser({ ...structures.users.user(), email, diff --git a/qa-core/src/account-api/tests/accounts/accounts.cloud.spec.ts b/qa-core/src/account-api/tests/accounts/accounts.cloud.spec.ts index 0969b72cf9..01338b609c 100644 --- a/qa-core/src/account-api/tests/accounts/accounts.cloud.spec.ts +++ b/qa-core/src/account-api/tests/accounts/accounts.cloud.spec.ts @@ -84,7 +84,7 @@ describe("Accounts", () => { }) it("searches by email", async () => { - const email = generator.email() + const email = generator.email({ domain: "example.com" }) // Empty result const [_, emptyBody] = await config.api.accounts.search(email, "email") diff --git a/qa-core/src/public-api/fixtures/users.ts b/qa-core/src/public-api/fixtures/users.ts index e20c464b34..418b565d2a 100644 --- a/qa-core/src/public-api/fixtures/users.ts +++ b/qa-core/src/public-api/fixtures/users.ts @@ -4,7 +4,7 @@ import { generator } from "../../shared" export const generateUser = ( overrides: Partial = {} ): CreateUserParams => ({ - email: generator.email(), + email: generator.email({ domain: "example.com" }), roles: { [generator.string({ length: 32, alpha: true, numeric: true })]: generator.word(), From 90dd267aaf5ad6a95583c0d6d888a9a19068d299 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 21 Feb 2024 14:10:03 +0000 Subject: [PATCH 002/136] Update account-portal submodule to latest master. --- packages/account-portal | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/account-portal b/packages/account-portal index 4384bc742c..4de0d98e2f 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit 4384bc742ca22fb1e9bf91843e65ae929daf17e2 +Subproject commit 4de0d98e2f8d80ee7631dffe076063273812a441 From 2933571c62e9a8f58a24cdb73e5d7eed768b8812 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 28 Feb 2024 08:34:41 +0000 Subject: [PATCH 003/136] update runLuceneQuery in client to allow for all filter matching --- packages/shared-core/src/filters.ts | 59 +++++++++++++++++++++-------- 1 file changed, 44 insertions(+), 15 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 46d765a7b5..2c4861ed60 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -390,23 +390,52 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { } ) - // Match a document against all criteria const docMatch = (doc: any) => { - return ( - stringMatch(doc) && - fuzzyMatch(doc) && - rangeMatch(doc) && - equalMatch(doc) && - notEqualMatch(doc) && - emptyMatch(doc) && - notEmptyMatch(doc) && - oneOf(doc) && - contains(doc) && - containsAny(doc) && - notContains(doc) - ) - } + // Determine active filters based on query object + const activeFilterKeys = Object.entries(query || {}) + .filter( + ([key, value]) => + !["allOr", "onEmptyFilter"].includes(key) && + Object.keys(value).length > 0 + ) + .map(([key]) => key) + // Apply filters dynamically based on activeFilterKeys + const results = activeFilterKeys.map(filterKey => { + switch (filterKey) { + case "string": + return stringMatch(doc) + case "fuzzy": + return fuzzyMatch(doc) + case "range": + return rangeMatch(doc) + case "equal": + return equalMatch(doc) + case "notEqual": + return notEqualMatch(doc) + case "empty": + return emptyMatch(doc) + case "notEmpty": + return notEmptyMatch(doc) + case "oneOf": + return oneOf(doc) + case "contains": + return contains(doc) + case "containsAny": + return containsAny(doc) + case "notContains": + return notContains(doc) + default: + return true // If the filter type is not recognized, default to true (assuming pass) + } + }) + + if (query!.allOr) { + return results.some(result => result === true) + } else { + return results.every(result => result === true) + } + } // Process all docs return docs.filter(docMatch) } From 76b9cbcc5feb599017a43cacbd0f665c9bab7fd8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 11:22:27 +0100 Subject: [PATCH 004/136] Create docWriteThrough redis cache --- packages/backend-core/src/redis/init.ts | 13 ++++++++++++- packages/backend-core/src/redis/utils.ts | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..7920dfed2d 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -104,3 +108,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** From ff7c8d3b9546fc60424fb8cf24f8ab4615416f27 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:44:52 +0100 Subject: [PATCH 005/136] DocWritethrough --- .../backend-core/src/cache/docWritethrough.ts | 102 ++++++++++++++++++ .../backend-core/src/db/couch/DatabaseImpl.ts | 9 ++ .../backend-core/src/db/instrumentation.ts | 7 ++ packages/types/src/sdk/db.ts | 1 + 4 files changed, 119 insertions(+) create mode 100644 packages/backend-core/src/cache/docWritethrough.ts diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..9e1977f797 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,102 @@ +import BaseCache from "./base" +import { getDocWritethroughClient } from "../redis/init" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" +import * as locks from "../redis/redlockImpl" + +const DEFAULT_WRITE_RATE_MS = 10000 + +let CACHE: BaseCache | null = null +async function getCache() { + if (!CACHE) { + const client = await getDocWritethroughClient() + CACHE = new BaseCache(client) + } + return CACHE +} + +interface CacheItem { + lastWrite: number +} + +export class DocWritethrough { + db: Database + docId: string + writeRateMs: number + + constructor( + db: Database, + docId: string, + writeRateMs: number = DEFAULT_WRITE_RATE_MS + ) { + this.db = db + this.docId = docId + this.writeRateMs = writeRateMs + } + + private makeCacheItem(): CacheItem { + return { lastWrite: Date.now() } + } + + async patch(data: Record) { + const cache = await getCache() + + const key = `${this.docId}:info` + const cacheItem = await cache.withCache( + key, + null, + () => this.makeCacheItem(), + { + useTenancy: false, + } + ) + + await this.storeToCache(cache, data) + + const updateDb = + !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs + // let output = this.doc + if (updateDb) { + await this.persistToDb(cache) + } + } + + private async storeToCache(cache: BaseCache, data: Record) { + for (const [key, value] of Object.entries(data)) { + const cacheKey = this.docId + ":data:" + key + await cache.store(cacheKey, { key, value }, undefined) + } + } + + private async persistToDb(cache: BaseCache) { + const key = `${this.db.name}_${this.docId}` + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: key, + ttl: 15000, + }, + async () => { + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } + + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await this.db.put(doc) + } + ) + + if (!lockResponse.executed) { + throw `DocWriteThrough could not be persisted to db for ${key}` + } + } +} diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 0e2b4173b0..221399325d 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -134,6 +134,15 @@ export class DatabaseImpl implements Database { }) } + async docExists(id: string): Promise { + try { + await this.get(id) + return true + } catch { + return false + } + } + async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index aa2ac424ae..92bd55406f 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -38,6 +38,13 @@ export class DDInstrumentedDatabase implements Database { }) } + docExists(id: string): Promise { + return tracer.trace("db.docExists", span => { + span?.addTags({ db_name: this.name, doc_id: id }) + return this.db.docExists(id) + }) + } + getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index 9e44a4827f..4ae0869156 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -122,6 +122,7 @@ export interface Database { exists(): Promise get(id?: string): Promise + docExists(id: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 7d50a70d039c3e8308ac3a04f7a1ad32b4383b7e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:47:18 +0100 Subject: [PATCH 006/136] USe get for doc exists --- packages/backend-core/src/cache/base/index.ts | 2 +- packages/backend-core/src/db/couch/DatabaseImpl.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..23c952c7b2 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -60,7 +60,7 @@ export default class BaseCache { */ async withCache( key: string, - ttl: number, + ttl: number | null = null, fetchFn: any, opts = { useTenancy: true } ) { diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 221399325d..6be53a9c54 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -136,7 +136,7 @@ export class DatabaseImpl implements Database { async docExists(id: string): Promise { try { - await this.get(id) + await this.performCall(db => () => db.head(id)) return true } catch { return false From 3af2da3b7df8f14cda67879f5942dfc7404ad0e8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:17:18 +0100 Subject: [PATCH 007/136] DatabaseImpl.docExists test --- .../src/db/tests/DatabaseImpl.spec.ts | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 packages/backend-core/src/db/tests/DatabaseImpl.spec.ts diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..140ecf4f2c --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("docExists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.docExists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.docExists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.docExists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.docExists(id)).toBe(false) + }) + }) +}) From 40d7a0a7413325104a49dd7e3a880fe2462ed0b4 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:23:32 +0100 Subject: [PATCH 008/136] docWritethrough test --- .../src/cache/tests/docWritethrough.spec.ts | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 packages/backend-core/src/cache/tests/docWritethrough.spec.ts diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..bfb1da5f1c --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,47 @@ +import tk from "timekeeper" +import { env } from "../.." +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" +import { DocWritethrough } from "../docWritethrough" +import _ from "lodash" + +env._set("MOCK_REDIS", null) + +const initialTime = Date.now() + +const WRITE_RATE_MS = 500 + +describe("docWritethrough", () => { + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.word(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(() => { + tk.freeze(initialTime) + documentId = structures.db.id() + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) + + it("patching will not persist until timeout is hit", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + tk.travel(Date.now() + WRITE_RATE_MS - 1) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) + }) +}) From dc4d1fdbda5eb822b1c8a5a14dcc08076ec066df Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:28:35 +0100 Subject: [PATCH 009/136] Add persisting tests --- .../src/cache/tests/docWritethrough.spec.ts | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bfb1da5f1c..ab0de53bee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -33,7 +33,7 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist until timeout is hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) @@ -43,5 +43,42 @@ describe("docWritethrough", () => { expect(await db.docExists(documentId)).toBe(false) }) }) + + it("patching will persist if timeout hits and next patch is called", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + _rev: expect.stringMatching(/1-.+/), + createdAt: new Date(initialTime + 500).toISOString(), + updatedAt: new Date(initialTime + 500).toISOString(), + }) + }) + }) + + it("patching will not persist even if timeout hits but next patch is not callec", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) }) }) From 3ec00524811a2734e1c2601cae728df141b024ff Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:41:26 +0100 Subject: [PATCH 010/136] Add extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 86 ++++++++++++++++--- 1 file changed, 75 insertions(+), 11 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index ab0de53bee..16e47ce3c3 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -7,9 +7,17 @@ import _ from "lodash" env._set("MOCK_REDIS", null) +const WRITE_RATE_MS = 500 + const initialTime = Date.now() -const WRITE_RATE_MS = 500 +function resetTime() { + tk.travel(initialTime) +} +function travelForward(ms: number) { + const updatedTime = Date.now() + ms + tk.travel(updatedTime) +} describe("docWritethrough", () => { const config = new DBTestConfiguration() @@ -28,7 +36,7 @@ describe("docWritethrough", () => { } beforeEach(() => { - tk.freeze(initialTime) + resetTime() documentId = structures.db.id() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -37,7 +45,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS - 1) + travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) expect(await db.docExists(documentId)).toBe(false) @@ -51,7 +59,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -62,23 +70,79 @@ describe("docWritethrough", () => { ...patch2, ...patch3, _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + 500).toISOString(), - updatedAt: new Date(initialTime + 500).toISOString(), + createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), }) }) }) + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + travelForward(WRITE_RATE_MS) + const date1 = new Date() + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + const date2 = new Date() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + it("patching will not persist even if timeout hits but next patch is not callec", async () => { await config.doInTenant(async () => { - const patch1 = generatePatchObject(2) - const patch2 = generatePatchObject(2) - await docWritethrough.patch(patch1) - await docWritethrough.patch(patch2) + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) expect(await db.docExists(documentId)).toBe(false) }) }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const time1 = travelForward(WRITE_RATE_MS) + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + travelForward(WRITE_RATE_MS) + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) }) }) From 720d5a41052179da6c734b5edb10e63b6e6d8436 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:51:42 +0100 Subject: [PATCH 011/136] Test concurrency --- .../backend-core/src/cache/docWritethrough.ts | 12 ++++-- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++++++++++++- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 9e1977f797..13a85a0d84 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -19,9 +19,9 @@ interface CacheItem { } export class DocWritethrough { - db: Database - docId: string - writeRateMs: number + private db: Database + private _docId: string + private writeRateMs: number constructor( db: Database, @@ -29,10 +29,14 @@ export class DocWritethrough { writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { this.db = db - this.docId = docId + this._docId = docId this.writeRateMs = writeRateMs } + get docId() { + return this._docId + } + private makeCacheItem(): CacheItem { return { lastWrite: Date.now() } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 16e47ce3c3..aed87499ee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -41,8 +41,9 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist if timeout from the creation does not hit", async () => { await config.doInTenant(async () => { + travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) travelForward(WRITE_RATE_MS - 1) @@ -116,7 +117,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - const time1 = travelForward(WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -144,5 +145,41 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id(), + WRITE_RATE_MS + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + travelForward(WRITE_RATE_MS) + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) }) }) From 3068e58c31db762fd9abd77c2c3665f8be181645 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 16:48:16 +0100 Subject: [PATCH 012/136] Ensure keys are removed --- .../backend-core/src/cache/docWritethrough.ts | 4 +++ .../src/cache/tests/docWritethrough.spec.ts | 28 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 13a85a0d84..bde93182a9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -96,6 +96,10 @@ export class DocWritethrough { } await this.db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } ) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index aed87499ee..65e9450f62 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -181,5 +181,33 @@ describe("docWritethrough", () => { ) }) }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + travelForward(WRITE_RATE_MS) + + await docWritethrough.patch({}) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + travelForward(WRITE_RATE_MS) + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) }) }) From 6b8f67ed417fd0405ebc8d71bc3c62639beb67fb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 17:01:16 +0100 Subject: [PATCH 013/136] Extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 65e9450f62..974494d1c9 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -77,6 +77,35 @@ describe("docWritethrough", () => { }) }) + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + travelForward(WRITE_RATE_MS) + + const patch4 = generatePatchObject(3) + await docWritethrough.patch(patch4) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + ...patch4, + }) + ) + }) + }) + it("date audit fields are set correctly when persisting", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) From 66751728bbc7ba32ed98b4b53afe8fad909cf72e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 10:53:18 +0100 Subject: [PATCH 014/136] Fixes and tests --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++---------- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++- 2 files changed, 82 insertions(+), 47 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index bde93182a9..80063e4772 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,8 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private docInfoCacheKey: string + constructor( db: Database, docId: string, @@ -31,6 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs + this.docInfoCacheKey = `${this.docId}:info` } get docId() { @@ -44,26 +47,39 @@ export class DocWritethrough { async patch(data: Record) { const cache = await getCache() - const key = `${this.docId}:info` - const cacheItem = await cache.withCache( - key, - null, - () => this.makeCacheItem(), - { - useTenancy: false, - } - ) - await this.storeToCache(cache, data) - const updateDb = - !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs - // let output = this.doc + const updateDb = await this.shouldUpdateDb(cache) + if (updateDb) { - await this.persistToDb(cache) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: this.docInfoCacheKey, + ttl: 15000, + }, + async () => { + if (await this.shouldUpdateDb(cache)) { + await this.persistToDb(cache) + await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } } } + private async shouldUpdateDb(cache: BaseCache) { + const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => + this.makeCacheItem() + ) + return cacheItem.lastWrite <= Date.now() - this.writeRateMs + } + private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { const cacheKey = this.docId + ":data:" + key @@ -72,39 +88,23 @@ export class DocWritethrough { } private async persistToDb(cache: BaseCache) { - const key = `${this.db.name}_${this.docId}` + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: key, - ttl: 15000, - }, - async () => { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } + await this.db.put(doc) - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } - ) - - if (!lockResponse.executed) { - throw `DocWriteThrough could not be persisted to db for ${key}` + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) } } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 974494d1c9..bca781e377 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,10 @@ import tk from "timekeeper" -import { env } from "../.." + import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -env._set("MOCK_REDIS", null) - const WRITE_RATE_MS = 500 const initialTime = Date.now() @@ -238,5 +236,42 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent calls will not cause multiple saves", async () => { + async function parallelPatch(count: number) { + await Promise.all( + Array.from({ length: count }).map(() => + docWritethrough.patch(generatePatchObject(1)) + ) + ) + } + + const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") + const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + + await config.doInTenant(async () => { + await parallelPatch(5) + expect(persistToDbSpy).not.toBeCalled() + expect(storeToCacheSpy).toBeCalledTimes(5) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(40) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(45) + + await parallelPatch(10) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(55) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(5) + expect(persistToDbSpy).toBeCalledTimes(2) + expect(storeToCacheSpy).toBeCalledTimes(60) + }) + }) }) }) From 2b7c988823384b60201a3122b1081a55200b2157 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:30 +0100 Subject: [PATCH 015/136] Making code more readable --- .../backend-core/src/cache/docWritethrough.ts | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 80063e4772..5148950c1d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -15,7 +15,7 @@ async function getCache() { } interface CacheItem { - lastWrite: number + nextWrite: number } export class DocWritethrough { @@ -40,8 +40,8 @@ export class DocWritethrough { return this._docId } - private makeCacheItem(): CacheItem { - return { lastWrite: Date.now() } + private makeNextWriteInfoItem(): CacheItem { + return { nextWrite: Date.now() + this.writeRateMs } } async patch(data: Record) { @@ -62,7 +62,10 @@ export class DocWritethrough { async () => { if (await this.shouldUpdateDb(cache)) { await this.persistToDb(cache) - await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + await cache.store( + this.docInfoCacheKey, + this.makeNextWriteInfoItem() + ) } } ) @@ -75,9 +78,9 @@ export class DocWritethrough { private async shouldUpdateDb(cache: BaseCache) { const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeCacheItem() + this.makeNextWriteInfoItem() ) - return cacheItem.lastWrite <= Date.now() - this.writeRateMs + return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { From ff7c784342ba79f994a15500984ab6668efef635 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:55 +0100 Subject: [PATCH 016/136] Type caches --- packages/backend-core/src/cache/base/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 23c952c7b2..911bd6a831 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -58,12 +58,12 @@ export default class BaseCache { /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, ttl: number | null = null, - fetchFn: any, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue From 1c171215680e99a07848168f9579557f3988bd15 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:12:31 +0100 Subject: [PATCH 017/136] Fix types --- packages/backend-core/src/cache/generic.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) From 3a341338a197a13f76993fbc372baf68566cefe9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:07:27 +0100 Subject: [PATCH 018/136] Log requests --- packages/backend-core/src/cache/index.ts | 1 + packages/pro | 2 +- packages/types/src/documents/document.ts | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/index.ts b/packages/backend-core/src/cache/index.ts index 4fa986e4e2..3b25108634 100644 --- a/packages/backend-core/src/cache/index.ts +++ b/packages/backend-core/src/cache/index.ts @@ -5,3 +5,4 @@ export * as writethrough from "./writethrough" export * as invite from "./invite" export * as passwordReset from "./passwordReset" export * from "./generic" +export * as docWritethrough from "./docWritethrough" diff --git a/packages/pro b/packages/pro index 183b35d3ac..c83fbd01f5 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec +Subproject commit c83fbd01f50872eedb772fba9a90d79650403126 diff --git a/packages/types/src/documents/document.ts b/packages/types/src/documents/document.ts index 18feb9b518..0de4337f4b 100644 --- a/packages/types/src/documents/document.ts +++ b/packages/types/src/documents/document.ts @@ -38,6 +38,7 @@ export enum DocumentType { AUTOMATION_METADATA = "meta_au", AUDIT_LOG = "al", APP_MIGRATION_METADATA = "_design/migrations", + SCIM_LOG = "scimlog", } // these are the core documents that make up the data, design From 4e53cb5143de0c7cc8a947889faf558dfa7c40c7 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:09:42 +0100 Subject: [PATCH 019/136] Flags --- packages/backend-core/src/environment.ts | 1 + packages/pro | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index b3179cbeea..2da2a77d67 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -186,6 +186,7 @@ const environment = { environment[key] = value }, ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M", + DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS, } // clean up any environment variable edge cases diff --git a/packages/pro b/packages/pro index c83fbd01f5..35c46cc6c5 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit c83fbd01f50872eedb772fba9a90d79650403126 +Subproject commit 35c46cc6c5f4a6d6f874ec1b51a042cb28d237da From 1c701fa81ed8f58d5f5e8db5c4cf5e08e420a899 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:30:43 +0100 Subject: [PATCH 020/136] Log responses --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 35c46cc6c5..4f8998c4be 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 35c46cc6c5f4a6d6f874ec1b51a042cb28d237da +Subproject commit 4f8998c4be4642a0fe55011514462235edbac7b8 From 93e462b8c769881150026b6c6e9e7048daa9a8e9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:38:48 +0100 Subject: [PATCH 021/136] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5148950c1d..e46c763906 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -33,7 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.docId}:info` + this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` } get docId() { From 2da5cb3ddbf0f4844bb259d83960243c1612a2eb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:41:40 +0100 Subject: [PATCH 022/136] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e46c763906..e367c9e060 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,7 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private cacheKeyPrefix: string private docInfoCacheKey: string constructor( @@ -33,7 +34,8 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` + this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { @@ -85,7 +87,7 @@ export class DocWritethrough { private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { - const cacheKey = this.docId + ":data:" + key + const cacheKey = this.cacheKeyPrefix + ":data:" + key await cache.store(cacheKey, { key, value }, undefined) } } @@ -98,7 +100,7 @@ export class DocWritethrough { doc = { _id: this.docId } } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) + const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value From 4ff2b36553c8b76c9fed6b37989e91ca8618fb34 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:59:51 +0100 Subject: [PATCH 023/136] Use overloads --- .../src/cache/tests/docWritethrough.spec.ts | 6 ++-- .../backend-core/src/db/couch/DatabaseImpl.ts | 28 ++++++++++++------- .../backend-core/src/db/instrumentation.ts | 14 ++++------ .../src/db/tests/DatabaseImpl.spec.ts | 16 +++++------ packages/types/src/sdk/db.ts | 2 +- 5 files changed, 35 insertions(+), 31 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bca781e377..4c4a4b2b60 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -35,7 +35,7 @@ describe("docWritethrough", () => { beforeEach(() => { resetTime() - documentId = structures.db.id() + documentId = structures.uuid() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -47,7 +47,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) @@ -136,7 +136,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 9d198e4307..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -70,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -79,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } @@ -135,15 +152,6 @@ export class DatabaseImpl implements Database { }) } - async docExists(id: string): Promise { - try { - await this.performCall(db => () => db.head(id)) - return true - } catch { - return false - } - } - async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 87af0e3127..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } @@ -38,13 +41,6 @@ export class DDInstrumentedDatabase implements Database { }) } - docExists(id: string): Promise { - return tracer.trace("db.docExists", span => { - span?.addTags({ db_name: this.name, doc_id: id }) - return this.db.docExists(id) - }) - } - getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts index 140ecf4f2c..586f13f417 100644 --- a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -17,16 +17,16 @@ describe("DatabaseImpl", () => { documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) }) - describe("docExists", () => { + describe("document exists", () => { it("can check existing docs by id", async () => { const existingDoc = _.sample(documents) - const result = await database.docExists(existingDoc!._id!) + const result = await database.exists(existingDoc!._id!) expect(result).toBe(true) }) it("can check non existing docs by id", async () => { - const result = await database.docExists(newid()) + const result = await database.exists(newid()) expect(result).toBe(false) }) @@ -36,9 +36,9 @@ describe("DatabaseImpl", () => { const id = existingDoc!._id! const results = [] - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) expect(results).toEqual([true, true, true]) }) @@ -46,10 +46,10 @@ describe("DatabaseImpl", () => { it("returns false after the doc is deleted", async () => { const existingDoc = _.sample(documents) const id = existingDoc!._id! - expect(await database.docExists(id)).toBe(true) + expect(await database.exists(id)).toBe(true) await database.remove(existingDoc!) - expect(await database.docExists(id)).toBe(false) + expect(await database.exists(id)).toBe(false) }) }) }) diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index dafc9ced57..4d103d5be6 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,7 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise - docExists(id: string): Promise + exists(docId: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From e3c514e45aee8398c4b65b530c86536f7169dafd Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Mon, 4 Mar 2024 09:48:47 +0000 Subject: [PATCH 024/136] Update test lucene builder and add more tests --- packages/shared-core/src/filters.ts | 57 ++++---- .../shared-core/src/tests/filters.test.ts | 128 +++++++++++++----- 2 files changed, 116 insertions(+), 69 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 2c4861ed60..5f975ff541 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -391,43 +391,32 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { ) const docMatch = (doc: any) => { - // Determine active filters based on query object - const activeFilterKeys = Object.entries(query || {}) + const filterFunctions = { + string: stringMatch, + fuzzy: fuzzyMatch, + range: rangeMatch, + equal: equalMatch, + notEqual: notEqualMatch, + empty: emptyMatch, + notEmpty: notEmptyMatch, + oneOf: oneOf, + contains: contains, + containsAny: containsAny, + notContains: notContains, + } + const activeFilterKeys: (keyof typeof filterFunctions)[] = Object.entries( + query + ) .filter( - ([key, value]) => + ([key, value]: [string, any]) => !["allOr", "onEmptyFilter"].includes(key) && - Object.keys(value).length > 0 + Object.keys(value as Record).length > 0 ) - .map(([key]) => key) + .map(([key]) => key as keyof typeof filterFunctions) - // Apply filters dynamically based on activeFilterKeys - const results = activeFilterKeys.map(filterKey => { - switch (filterKey) { - case "string": - return stringMatch(doc) - case "fuzzy": - return fuzzyMatch(doc) - case "range": - return rangeMatch(doc) - case "equal": - return equalMatch(doc) - case "notEqual": - return notEqualMatch(doc) - case "empty": - return emptyMatch(doc) - case "notEmpty": - return notEmptyMatch(doc) - case "oneOf": - return oneOf(doc) - case "contains": - return contains(doc) - case "containsAny": - return containsAny(doc) - case "notContains": - return notContains(doc) - default: - return true // If the filter type is not recognized, default to true (assuming pass) - } + const results: boolean[] = activeFilterKeys.map(filterKey => { + const filterFunction = filterFunctions[filterKey] + return filterFunction ? filterFunction(doc) : true }) if (query!.allOr) { @@ -436,7 +425,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { return results.every(result => result === true) } } - // Process all docs + return docs.filter(docMatch) } diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index 8586d58777..1e0a68de89 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -47,10 +47,7 @@ describe("runLuceneQuery", () => { }, ] - function buildQuery( - filterKey: string, - value: { [key: string]: any } - ): SearchQuery { + function buildQuery(filters: { [filterKey: string]: any }): SearchQuery { const query: SearchQuery = { string: {}, fuzzy: {}, @@ -63,8 +60,13 @@ describe("runLuceneQuery", () => { notContains: {}, oneOf: {}, containsAny: {}, + allOr: false, } - query[filterKey as SearchQueryOperators] = value + + for (const filterKey in filters) { + query[filterKey as SearchQueryOperators] = filters[filterKey] + } + return query } @@ -73,16 +75,17 @@ describe("runLuceneQuery", () => { }) it("should return matching rows for equal filter", () => { - const query = buildQuery("equal", { - order_status: 4, + const query = buildQuery({ + equal: { order_status: 4 }, }) expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) }) it("should return matching row for notEqual filter", () => { - const query = buildQuery("notEqual", { - order_status: 4, + const query = buildQuery({ + notEqual: { order_status: 4 }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) }) @@ -90,48 +93,56 @@ describe("runLuceneQuery", () => { expect( runLuceneQuery( docs, - buildQuery("fuzzy", { - description: "sm", + buildQuery({ + fuzzy: { description: "sm" }, }) ).map(row => row.description) ).toEqual(["Small box"]) expect( runLuceneQuery( docs, - buildQuery("string", { - description: "SM", + buildQuery({ + string: { description: "SM" }, }) ).map(row => row.description) ).toEqual(["Small box"]) }) it("should return rows within a range filter", () => { - const query = buildQuery("range", { - customer_id: { - low: 500, - high: 1000, + const query = buildQuery({ + range: { + customer_id: { + low: 500, + high: 1000, + }, }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) }) it("should return rows with numeric strings within a range filter", () => { - const query = buildQuery("range", { - customer_id: { - low: "500", - high: "1000", + const query = buildQuery({ + range: { + customer_id: { + low: "500", + high: "1000", + }, }, }) expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) }) it("should return rows with ISO date strings within a range filter", () => { - const query = buildQuery("range", { - order_date: { - low: "2016-01-04T00:00:00.000Z", - high: "2016-01-11T00:00:00.000Z", + const query = buildQuery({ + range: { + order_date: { + low: "2016-01-04T00:00:00.000Z", + high: "2016-01-11T00:00:00.000Z", + }, }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2]) }) @@ -150,40 +161,87 @@ describe("runLuceneQuery", () => { label: "", }, ] - const query = buildQuery("range", { - order_date: { - low: "2016-01-04T00:00:00.000Z", - high: "2016-01-11T00:00:00.000Z", + + const query = buildQuery({ + range: { + order_date: { + low: "2016-01-04T00:00:00.000Z", + high: "2016-01-11T00:00:00.000Z", + }, }, }) + expect(runLuceneQuery(docs, query)).toEqual(docs) }) it("should return rows with matches on empty filter", () => { - const query = buildQuery("empty", { - label: null, + const query = buildQuery({ + empty: { + label: null, + }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1]) }) it("should return rows with matches on notEmpty filter", () => { - const query = buildQuery("notEmpty", { - label: null, + const query = buildQuery({ + notEmpty: { + label: null, + }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2, 3]) }) test.each([[523, 259], "523,259"])( "should return rows with matches on numeric oneOf filter", input => { - let query = buildQuery("oneOf", { - customer_id: input, + const query = buildQuery({ + oneOf: { + customer_id: input, + }, }) + expect(runLuceneQuery(docs, query).map(row => row.customer_id)).toEqual([ 259, 523, ]) } ) + + it("should return matching results if allOr is true and only one filter matches", () => { + const query = buildQuery({ + allOr: true, + oneOf: { staff_id: [10] }, + contains: { description: ["box"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([ + 1, 2, 3, + ]) + }) + + // what should the name of this test be if it's the same test as above but with different operands + + it("should return matching results if allOr is true and only one filter matches with different operands", () => { + const query = buildQuery({ + allOr: true, + equal: { order_status: 4 }, + oneOf: { label: ["FRAGILE"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) + }) + + it("should return nothing if allOr is false and only one filter matches", () => { + const query = buildQuery({ + allOr: false, + oneOf: { staff_id: [10] }, + contains: { description: ["box"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([]) + }) }) describe("buildLuceneQuery", () => { From 5679acb86811c290dd84faf1d81b19d615680f6b Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Mon, 4 Mar 2024 09:55:28 +0000 Subject: [PATCH 025/136] fix types --- packages/shared-core/src/filters.ts | 34 ++++++++++++++--------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 5f975ff541..6d81bbdc62 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -391,28 +391,28 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { ) const docMatch = (doc: any) => { - const filterFunctions = { - string: stringMatch, - fuzzy: fuzzyMatch, - range: rangeMatch, - equal: equalMatch, - notEqual: notEqualMatch, - empty: emptyMatch, - notEmpty: notEmptyMatch, - oneOf: oneOf, - contains: contains, - containsAny: containsAny, - notContains: notContains, - } - const activeFilterKeys: (keyof typeof filterFunctions)[] = Object.entries( - query - ) + const filterFunctions: Record boolean> = + { + string: stringMatch, + fuzzy: fuzzyMatch, + range: rangeMatch, + equal: equalMatch, + notEqual: notEqualMatch, + empty: emptyMatch, + notEmpty: notEmptyMatch, + oneOf: oneOf, + contains: contains, + containsAny: containsAny, + notContains: notContains, + } + + const activeFilterKeys: SearchQueryOperators[] = Object.entries(query) .filter( ([key, value]: [string, any]) => !["allOr", "onEmptyFilter"].includes(key) && Object.keys(value as Record).length > 0 ) - .map(([key]) => key as keyof typeof filterFunctions) + .map(([key]) => key as any) const results: boolean[] = activeFilterKeys.map(filterKey => { const filterFunction = filterFunctions[filterKey] From 3d9a7e5ddf5f76236a304e65239c899e8e865cd7 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Mon, 4 Mar 2024 10:07:06 +0000 Subject: [PATCH 026/136] fix type --- packages/shared-core/src/filters.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 6d81bbdc62..0a1673e558 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -406,7 +406,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { notContains: notContains, } - const activeFilterKeys: SearchQueryOperators[] = Object.entries(query) + const activeFilterKeys: SearchQueryOperators[] = Object.entries(query || {}) .filter( ([key, value]: [string, any]) => !["allOr", "onEmptyFilter"].includes(key) && From 824dd1c1fc601bf890e9390f63b4943303518d15 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:38:45 +0100 Subject: [PATCH 027/136] Type inMemoryQueue --- .../backend-core/src/queue/inMemoryQueue.ts | 36 ++++++++++--------- packages/backend-core/src/queue/queue.ts | 2 ++ 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..3205b6f383 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,6 @@ import events from "events" import { timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -24,9 +25,9 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any + _opts?: QueueOptions _messages: any[] _emitter: EventEmitter _runCount: number @@ -37,7 +38,7 @@ class InMemoryQueue { * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] @@ -55,8 +56,12 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { + const delay = this._opts?.defaultJobOptions?.delay + if (delay) { + await new Promise(r => setTimeout(() => r(), delay)) + } if (this._messages.length <= 0) { return } @@ -70,7 +75,7 @@ class InMemoryQueue { } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +88,26 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) + this._messages.push(newJob(this._name, data)) this._addCount++ this._emitter.emit("message") + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +115,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + async removeJobs(pattern: string) { // no-op } @@ -128,12 +132,12 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds From ae85c832483d7533ee141803fc0336a730846dc1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:43:47 +0100 Subject: [PATCH 028/136] Clean --- packages/worker/src/initPro.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/worker/src/initPro.ts b/packages/worker/src/initPro.ts index ddc8d2562a..b34d514992 100644 --- a/packages/worker/src/initPro.ts +++ b/packages/worker/src/initPro.ts @@ -1,5 +1,4 @@ import { sdk as proSdk } from "@budibase/pro" -import * as userSdk from "./sdk/users" export const initPro = async () => { await proSdk.init({}) From 91468d2569e8c1828c4e107750cab8bcc81f016f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:18:01 +0100 Subject: [PATCH 029/136] Add doc-writethrough queue --- packages/backend-core/src/queue/constants.ts | 1 + packages/backend-core/src/queue/listeners.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..14dce5fe8d 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { From 2d84bc5da2b5a3eada63eba3866b04324a519afb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:34:05 +0100 Subject: [PATCH 030/136] Use bull --- .../backend-core/src/cache/docWritethrough.ts | 123 +++++++++--------- 1 file changed, 64 insertions(+), 59 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e367c9e060..38a162435d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,6 +3,9 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" +import { JobQueue, createQueue } from "../queue" +import { context, db as dbUtils } from ".." + const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null @@ -14,17 +17,63 @@ async function getCache() { return CACHE } -interface CacheItem { - nextWrite: number +interface ProcessDocMessage { + tenantId: string + dbName: string + docId: string + cacheKeyPrefix: string } +export const docWritethroughProcessorQueue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE +) + +docWritethroughProcessorQueue.process(async message => { + const { dbName, tenantId, docId, cacheKeyPrefix } = message.data + const cache = await getCache() + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } + }) +}) + export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number private cacheKeyPrefix: string - private docInfoCacheKey: string constructor( db: Database, @@ -35,54 +84,31 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { return this._docId } - private makeNextWriteInfoItem(): CacheItem { - return { nextWrite: Date.now() + this.writeRateMs } - } - async patch(data: Record) { const cache = await getCache() await this.storeToCache(cache, data) - const updateDb = await this.shouldUpdateDb(cache) - - if (updateDb) { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: this.docInfoCacheKey, - ttl: 15000, - }, - async () => { - if (await this.shouldUpdateDb(cache)) { - await this.persistToDb(cache) - await cache.store( - this.docInfoCacheKey, - this.makeNextWriteInfoItem() - ) - } - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + docWritethroughProcessorQueue.add( + { + tenantId: context.getTenantId(), + dbName: this.db.name, + docId: this.docId, + cacheKeyPrefix: this.cacheKeyPrefix, + }, + { + delay: this.writeRateMs - 1, + jobId: this.cacheKeyPrefix, + removeOnFail: true, + removeOnComplete: true, } - } - } - - private async shouldUpdateDb(cache: BaseCache) { - const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeNextWriteInfoItem() ) - return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { @@ -91,25 +117,4 @@ export class DocWritethrough { await cache.store(cacheKey, { key, value }, undefined) } } - - private async persistToDb(cache: BaseCache) { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } - - const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } } From 13563d18dca87872dad9294c61c8018158fa191d Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 09:20:20 +0000 Subject: [PATCH 031/136] Write a failing test. --- .../src/api/routes/tests/application.spec.ts | 46 ++++++++++++++++++- .../server/src/tests/utilities/api/index.ts | 3 ++ .../server/src/tests/utilities/api/role.ts | 41 +++++++++++++++++ 3 files changed, 89 insertions(+), 1 deletion(-) create mode 100644 packages/server/src/tests/utilities/api/role.ts diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index 5a3be462e8..b452e8742f 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -16,7 +16,13 @@ import * as setup from "./utilities" import { AppStatus } from "../../../db/utils" import { events, utils, context } from "@budibase/backend-core" import env from "../../../environment" -import type { App } from "@budibase/types" +import { + PermissionLevel, + type App, + INTERNAL_TABLE_SOURCE_ID, + TableSourceType, + FieldType, +} from "@budibase/types" import tk from "timekeeper" describe("/applications", () => { @@ -256,10 +262,48 @@ describe("/applications", () => { admin: { global: false }, }) + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + }, + }) + await config.withUser(user, async () => { const apps = await config.api.application.fetch() expect(apps).toHaveLength(0) }) + + const role = await config.api.roles.save({ + name: "Test", + inherits: "PUBLIC", + permissionId: "read_only", + version: "name", + }) + + await config.api.user.update({ + ...user, + roles: { + [config.getAppId()]: role._id!, + }, + }) + + await config.api.permission.add({ + resourceId: table._id!, + roleId: role._id!, + level: PermissionLevel.READ, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(1) + }) }) }) }) diff --git a/packages/server/src/tests/utilities/api/index.ts b/packages/server/src/tests/utilities/api/index.ts index fdcec3098d..d66acd86fd 100644 --- a/packages/server/src/tests/utilities/api/index.ts +++ b/packages/server/src/tests/utilities/api/index.ts @@ -11,6 +11,7 @@ import { BackupAPI } from "./backup" import { AttachmentAPI } from "./attachment" import { UserAPI } from "./user" import { QueryAPI } from "./query" +import { RoleAPI } from "./role" export default class API { table: TableAPI @@ -25,6 +26,7 @@ export default class API { attachment: AttachmentAPI user: UserAPI query: QueryAPI + roles: RoleAPI constructor(config: TestConfiguration) { this.table = new TableAPI(config) @@ -39,5 +41,6 @@ export default class API { this.attachment = new AttachmentAPI(config) this.user = new UserAPI(config) this.query = new QueryAPI(config) + this.roles = new RoleAPI(config) } } diff --git a/packages/server/src/tests/utilities/api/role.ts b/packages/server/src/tests/utilities/api/role.ts new file mode 100644 index 0000000000..4defbc1220 --- /dev/null +++ b/packages/server/src/tests/utilities/api/role.ts @@ -0,0 +1,41 @@ +import { + AccessibleRolesResponse, + FetchRolesResponse, + FindRoleResponse, + SaveRoleRequest, + SaveRoleResponse, +} from "@budibase/types" +import { Expectations, TestAPI } from "./base" + +export class RoleAPI extends TestAPI { + fetch = async (expectations?: Expectations) => { + return await this._get(`/api/roles`, { + expectations, + }) + } + + find = async (roleId: string, expectations?: Expectations) => { + return await this._get(`/api/roles/${roleId}`, { + expectations, + }) + } + + save = async (body: SaveRoleRequest, expectations?: Expectations) => { + return await this._post(`/api/roles`, { + body, + expectations, + }) + } + + destroy = async (roleId: string, expectations?: Expectations) => { + return await this._delete(`/api/roles/${roleId}`, { + expectations, + }) + } + + accesssible = async (expectations?: Expectations) => { + return await this._get(`/api/roles/accessible`, { + expectations, + }) + } +} From aa124524d4bc93e228c5ada844fccb541cb55e6e Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 10:05:05 +0000 Subject: [PATCH 032/136] Add a simpler test. --- packages/backend-core/src/cache/user.ts | 4 +- packages/server/src/api/controllers/user.ts | 3 +- .../src/api/routes/tests/application.spec.ts | 39 ++++++++++++++++++- .../src/tests/utilities/TestConfiguration.ts | 4 +- 4 files changed, 44 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/user.ts b/packages/backend-core/src/cache/user.ts index 313b9a4d4a..ecfa20f99e 100644 --- a/packages/backend-core/src/cache/user.ts +++ b/packages/backend-core/src/cache/user.ts @@ -6,7 +6,7 @@ import env from "../environment" import * as accounts from "../accounts" import { UserDB } from "../users" import { sdk } from "@budibase/shared-core" -import { User } from "@budibase/types" +import { User, UserMetadata } from "@budibase/types" const EXPIRY_SECONDS = 3600 @@ -15,7 +15,7 @@ const EXPIRY_SECONDS = 3600 */ async function populateFromDB(userId: string, tenantId: string) { const db = tenancy.getTenantDB(tenantId) - const user = await db.get(userId) + const user = await db.get(userId) user.budibaseAccess = true if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { const account = await accounts.getAccount(user.email) diff --git a/packages/server/src/api/controllers/user.ts b/packages/server/src/api/controllers/user.ts index 108e29fd3d..d1658f9820 100644 --- a/packages/server/src/api/controllers/user.ts +++ b/packages/server/src/api/controllers/user.ts @@ -1,6 +1,6 @@ import { generateUserFlagID, InternalTables } from "../../db/utils" import { getFullUser } from "../../utilities/users" -import { context } from "@budibase/backend-core" +import { cache, context } from "@budibase/backend-core" import { ContextUserMetadata, Ctx, @@ -42,6 +42,7 @@ export async function updateMetadata( // this isn't applicable to the user delete metadata.roles ctx.body = await db.put(metadata) + await cache.user.invalidateUser(user._id!) } export async function destroyMetadata(ctx: UserCtx) { diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index b452e8742f..7424511200 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -256,7 +256,44 @@ describe("/applications", () => { }) describe("permissions", () => { - it("should only return apps a user has access to", async () => { + it.only("should only return apps a user has access to", async () => { + const user = await config.createUser({ + builder: { global: false }, + admin: { global: false }, + }) + + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(0) + }) + + await config.api.user.update({ + ...user, + builder: { + [config.getAppId()]: true, + }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(1) + }) + }) + + it("should only return apps a user has access to through a custom role on a group", async () => { const user = await config.createUser({ builder: { global: false }, admin: { global: false }, diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 2127e9d1cd..32af88836e 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -299,11 +299,11 @@ export default class TestConfiguration { } } - withUser(user: User, f: () => Promise) { + async withUser(user: User, f: () => Promise) { const oldUser = this.user this.user = user try { - return f() + return await f() } finally { this.user = oldUser } From e648503e4f31045b0b68e4baed76003adb6d5496 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:50:58 +0100 Subject: [PATCH 033/136] Tests --- .../backend-core/src/cache/docWritethrough.ts | 99 +++++++++------ .../src/cache/tests/docWritethrough.spec.ts | 120 ++++++++++-------- .../backend-core/src/queue/inMemoryQueue.ts | 76 ++++++++--- 3 files changed, 186 insertions(+), 109 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 38a162435d..f53cfbfe5f 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -4,7 +4,8 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" import { JobQueue, createQueue } from "../queue" -import { context, db as dbUtils } from ".." +import * as context from "../context" +import * as dbUtils from "../db" const DEFAULT_WRITE_RATE_MS = 10000 @@ -28,50 +29,71 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -docWritethroughProcessorQueue.process(async message => { - const { dbName, tenantId, docId, cacheKeyPrefix } = message.data - const cache = await getCache() - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } +let _init = false +export const init = () => { + if (_init) { + return + } + docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) } + ) - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) - } + }) }) -}) + _init = true +} + +export async function persistToDb({ + dbName, + docId, + cacheKeyPrefix, +}: { + dbName: string + docId: string + cacheKeyPrefix: string +}) { + const cache = await getCache() + + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } +} export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number + private tenantId: string private cacheKeyPrefix: string @@ -84,6 +106,7 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.tenantId = context.getTenantId() } get docId() { @@ -97,13 +120,13 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: context.getTenantId(), + tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, }, { - delay: this.writeRateMs - 1, + delay: this.writeRateMs, jobId: this.cacheKeyPrefix, removeOnFail: true, removeOnComplete: true, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 4c4a4b2b60..83af66a9d2 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,20 +1,32 @@ -import tk from "timekeeper" - import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -const WRITE_RATE_MS = 500 +import { + DocWritethrough, + docWritethroughProcessorQueue, + init, +} from "../docWritethrough" +import InMemoryQueue from "../../queue/inMemoryQueue" + +const WRITE_RATE_MS = 1000 const initialTime = Date.now() +jest.useFakeTimers({ + now: initialTime, +}) + function resetTime() { - tk.travel(initialTime) + jest.setSystemTime(initialTime) } -function travelForward(ms: number) { - const updatedTime = Date.now() + ms - tk.travel(updatedTime) +async function travelForward(ms: number) { + await jest.advanceTimersByTimeAsync(ms) + + const queue: InMemoryQueue = docWritethroughProcessorQueue as never + while (queue.hasRunningJobs()) { + await jest.runOnlyPendingTimersAsync() + } } describe("docWritethrough", () => { @@ -33,33 +45,37 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeEach(() => { + beforeAll(() => init()) + + beforeEach(async () => { resetTime() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + await config.doInTenant(async () => { + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) }) - it("patching will not persist if timeout from the creation does not hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - travelForward(WRITE_RATE_MS - 1) - await docWritethrough.patch(generatePatchObject(2)) + await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits and next patch is called", async () => { + it("patching will persist if timeout hits", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) + // This will not be persisted const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -67,7 +83,6 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - ...patch3, _rev: expect.stringMatching(/1-.+/), createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), @@ -82,15 +97,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - travelForward(WRITE_RATE_MS) - - const patch4 = generatePatchObject(3) - await docWritethrough.patch(patch4) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -98,7 +110,6 @@ describe("docWritethrough", () => { ...patch1, ...patch2, ...patch3, - ...patch4, }) ) }) @@ -109,16 +120,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date1 = new Date() await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date2 = new Date() - const patch3 = generatePatchObject(3) - await docWritethrough.patch(patch3) - expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -129,22 +137,11 @@ describe("docWritethrough", () => { }) }) - it("patching will not persist even if timeout hits but next patch is not callec", async () => { - await config.doInTenant(async () => { - await docWritethrough.patch(generatePatchObject(2)) - await docWritethrough.patch(generatePatchObject(2)) - - travelForward(WRITE_RATE_MS) - - expect(await db.exists(documentId)).toBe(false) - }) - }) - it("concurrent patches will override keys", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -155,13 +152,14 @@ describe("docWritethrough", () => { }) ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -173,7 +171,7 @@ describe("docWritethrough", () => { }) }) - it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + it("concurrent patches to different docWritethrough will not pollute each other", async () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, @@ -186,12 +184,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) + await travelForward(WRITE_RATE_MS) expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -214,7 +213,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch({}) @@ -224,9 +223,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -246,30 +246,46 @@ describe("docWritethrough", () => { ) } - const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) - expect(persistToDbSpy).not.toBeCalled() expect(storeToCacheSpy).toBeCalledTimes(5) + expect(await db.exists(documentId)).toBe(false) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(40) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(45) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) + await parallelPatch(10) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(55) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(5) - expect(persistToDbSpy).toBeCalledTimes(2) + await travelForward(WRITE_RATE_MS) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/3-.+/), + }) + ) expect(storeToCacheSpy).toBeCalledTimes(60) }) }) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 3205b6f383..f201714903 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -2,6 +2,13 @@ import events from "events" import { timeout } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" +interface JobMessage { + timestamp: number + queue: string + data: any + opts?: JobOptions +} + /** * Bull works with a Job wrapper around all messages that contains a lot more information about * the state of the message, this object constructor implements the same schema of Bull jobs @@ -11,12 +18,12 @@ import { Queue, QueueOptions, JobOptions } from "./queue" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -28,10 +35,12 @@ function newJob(queue: string, message: any) { class InMemoryQueue implements Partial { _name: string _opts?: QueueOptions - _messages: any[] + _messages: JobMessage[] + _queuedJobIds: Set _emitter: EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. @@ -45,6 +54,7 @@ class InMemoryQueue implements Partial { this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -58,19 +68,24 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - const delay = this._opts?.defaultJobOptions?.delay - if (delay) { - await new Promise(r => setTimeout(() => r(), delay)) + try { + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() + + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } + } catch (e: any) { + throw e } - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ }) } @@ -89,12 +104,31 @@ class InMemoryQueue implements Partial { */ // eslint-disable-next-line no-unused-vars async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, data)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } return {} as any } @@ -143,7 +177,11 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } From caf142f1db37b4454c03427468d29a7c915de255 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:55:07 +0100 Subject: [PATCH 034/136] Clean --- .../backend-core/src/queue/inMemoryQueue.ts | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index f201714903..6c8107c7a4 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -68,23 +68,19 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - try { - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ - const jobId = msg?.opts?.jobId?.toString() - if (jobId && msg?.opts?.removeOnComplete) { - this._queuedJobIds.delete(jobId) - } - } catch (e: any) { - throw e + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) } }) } From 6b86633c650ea637c1387351c41b2ba52d84236e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:01:41 +0100 Subject: [PATCH 035/136] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 217e0a93a1..f5134a01fc 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 217e0a93a12f6ed56f122729366a3068c6bd957e +Subproject commit f5134a01fc122be2535c6b17e47d956c145fb186 From dfdbc7b22bbb70c3f694cd7f88ff0718b92878c8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:05:39 +0100 Subject: [PATCH 036/136] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index f5134a01fc..54411929db 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit f5134a01fc122be2535c6b17e47d956c145fb186 +Subproject commit 54411929db75f6bc4335491e86871c889fe3a98a From ca0f583399d9786ab25374a4ab42cc6a0861f27d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:19:05 +0100 Subject: [PATCH 037/136] Remove defaults and init --- .../backend-core/src/cache/docWritethrough.ts | 52 +++++++------------ .../src/cache/tests/docWritethrough.spec.ts | 3 -- 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index f53cfbfe5f..1a16f60eb9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -7,8 +7,6 @@ import { JobQueue, createQueue } from "../queue" import * as context from "../context" import * as dbUtils from "../db" -const DEFAULT_WRITE_RATE_MS = 10000 - let CACHE: BaseCache | null = null async function getCache() { if (!CACHE) { @@ -29,33 +27,27 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -let _init = false -export const init = () => { - if (_init) { - return - } - docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) +docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) } - }) + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) - _init = true -} +}) export async function persistToDb({ dbName, @@ -97,11 +89,7 @@ export class DocWritethrough { private cacheKeyPrefix: string - constructor( - db: Database, - docId: string, - writeRateMs: number = DEFAULT_WRITE_RATE_MS - ) { + constructor(db: Database, docId: string, writeRateMs: number) { this.db = db this._docId = docId this.writeRateMs = writeRateMs diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 83af66a9d2..a5765171cb 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -5,7 +5,6 @@ import _ from "lodash" import { DocWritethrough, docWritethroughProcessorQueue, - init, } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" @@ -45,8 +44,6 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeAll(() => init()) - beforeEach(async () => { resetTime() documentId = structures.uuid() From e8c3f20c3047bbd7cb909f1b3735eae05bf0ca4f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:32:18 +0100 Subject: [PATCH 038/136] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 54411929db..9daa77883c 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 54411929db75f6bc4335491e86871c889fe3a98a +Subproject commit 9daa77883cc0b395e5badffe48260324527b6924 From cb5f3e3bd3a8aaa0cce5c6530a0a949b080e3a71 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:38:36 +0100 Subject: [PATCH 039/136] Lint --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 9daa77883c..6079868997 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 9daa77883cc0b395e5badffe48260324527b6924 +Subproject commit 607986899781aa7c0b6ccfd9746497b6fc32b569 From f1decee0102c0bc6b4687fffa14f5b445e1c3689 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 14:37:06 +0000 Subject: [PATCH 040/136] Get test passing. --- .../src/api/routes/tests/application.spec.ts | 8 +++++--- .../src/tests/utilities/TestConfiguration.ts | 18 +++++++++++------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index 7424511200..6f948d9977 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -25,6 +25,8 @@ import { } from "@budibase/types" import tk from "timekeeper" +jest.setTimeout(99999999) + describe("/applications", () => { let config = setup.getConfig() let app: App @@ -257,7 +259,7 @@ describe("/applications", () => { describe("permissions", () => { it.only("should only return apps a user has access to", async () => { - const user = await config.createUser({ + let user = await config.createUser({ builder: { global: false }, admin: { global: false }, }) @@ -280,10 +282,10 @@ describe("/applications", () => { expect(apps).toHaveLength(0) }) - await config.api.user.update({ + user = await config.globalUser({ ...user, builder: { - [config.getAppId()]: true, + apps: [config.getProdAppId()], }, }) diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 32af88836e..cfe1bf4066 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -363,6 +363,7 @@ export default class TestConfiguration { _id, ...existing, ...config, + _rev: existing._rev, email, roles, tenantId, @@ -372,11 +373,12 @@ export default class TestConfiguration { admin, } await sessions.createASession(_id, { - sessionId: "sessionid", + sessionId: this.sessionIdForUser(_id), tenantId: this.getTenantId(), csrfToken: this.csrfToken, }) const resp = await db.put(user) + await cache.user.invalidateUser(_id) return { _rev: resp.rev, ...user, @@ -384,9 +386,7 @@ export default class TestConfiguration { } async createUser(user: Partial = {}): Promise { - const resp = await this.globalUser(user) - await cache.user.invalidateUser(resp._id!) - return resp + return await this.globalUser(user) } async createGroup(roleId: string = roles.BUILTIN_ROLE_IDS.BASIC) { @@ -416,6 +416,10 @@ export default class TestConfiguration { }) } + sessionIdForUser(userId: string): string { + return `sessionid-${userId}` + } + async login({ roleId, userId, @@ -442,13 +446,13 @@ export default class TestConfiguration { }) } await sessions.createASession(userId, { - sessionId: "sessionid", + sessionId: this.sessionIdForUser(userId), tenantId: this.getTenantId(), }) // have to fake this const authObj = { userId, - sessionId: "sessionid", + sessionId: this.sessionIdForUser(userId), tenantId: this.getTenantId(), } const authToken = jwt.sign(authObj, coreEnv.JWT_SECRET as Secret) @@ -470,7 +474,7 @@ export default class TestConfiguration { const user = this.getUser() const authObj: AuthToken = { userId: user._id!, - sessionId: "sessionid", + sessionId: this.sessionIdForUser(user._id!), tenantId, } const authToken = jwt.sign(authObj, coreEnv.JWT_SECRET as Secret) From 40cc383c0140fa3d960938162e7924aaacd079f5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 11:22:27 +0100 Subject: [PATCH 041/136] Create docWriteThrough redis cache --- packages/backend-core/src/redis/init.ts | 13 ++++++++++++- packages/backend-core/src/redis/utils.ts | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..7920dfed2d 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -104,3 +108,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** From 9f42ea6bbf2b8247e988b16ae8f3b84a9beb1f9e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:44:52 +0100 Subject: [PATCH 042/136] DocWritethrough --- .../backend-core/src/cache/docWritethrough.ts | 102 ++++++++++++++++++ .../backend-core/src/db/couch/DatabaseImpl.ts | 9 ++ .../backend-core/src/db/instrumentation.ts | 7 ++ packages/types/src/sdk/db.ts | 1 + 4 files changed, 119 insertions(+) create mode 100644 packages/backend-core/src/cache/docWritethrough.ts diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..9e1977f797 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,102 @@ +import BaseCache from "./base" +import { getDocWritethroughClient } from "../redis/init" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" +import * as locks from "../redis/redlockImpl" + +const DEFAULT_WRITE_RATE_MS = 10000 + +let CACHE: BaseCache | null = null +async function getCache() { + if (!CACHE) { + const client = await getDocWritethroughClient() + CACHE = new BaseCache(client) + } + return CACHE +} + +interface CacheItem { + lastWrite: number +} + +export class DocWritethrough { + db: Database + docId: string + writeRateMs: number + + constructor( + db: Database, + docId: string, + writeRateMs: number = DEFAULT_WRITE_RATE_MS + ) { + this.db = db + this.docId = docId + this.writeRateMs = writeRateMs + } + + private makeCacheItem(): CacheItem { + return { lastWrite: Date.now() } + } + + async patch(data: Record) { + const cache = await getCache() + + const key = `${this.docId}:info` + const cacheItem = await cache.withCache( + key, + null, + () => this.makeCacheItem(), + { + useTenancy: false, + } + ) + + await this.storeToCache(cache, data) + + const updateDb = + !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs + // let output = this.doc + if (updateDb) { + await this.persistToDb(cache) + } + } + + private async storeToCache(cache: BaseCache, data: Record) { + for (const [key, value] of Object.entries(data)) { + const cacheKey = this.docId + ":data:" + key + await cache.store(cacheKey, { key, value }, undefined) + } + } + + private async persistToDb(cache: BaseCache) { + const key = `${this.db.name}_${this.docId}` + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: key, + ttl: 15000, + }, + async () => { + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } + + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await this.db.put(doc) + } + ) + + if (!lockResponse.executed) { + throw `DocWriteThrough could not be persisted to db for ${key}` + } + } +} diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 7e7c997cbe..d4d17f6127 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -135,6 +135,15 @@ export class DatabaseImpl implements Database { }) } + async docExists(id: string): Promise { + try { + await this.get(id) + return true + } catch { + return false + } + } + async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 03010d4c92..87af0e3127 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -38,6 +38,13 @@ export class DDInstrumentedDatabase implements Database { }) } + docExists(id: string): Promise { + return tracer.trace("db.docExists", span => { + span?.addTags({ db_name: this.name, doc_id: id }) + return this.db.docExists(id) + }) + } + getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index c4e4a4f02f..dafc9ced57 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,6 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise + docExists(id: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 10568cccff8e4d342a03484f04be299fe4868917 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:47:18 +0100 Subject: [PATCH 043/136] USe get for doc exists --- packages/backend-core/src/cache/base/index.ts | 2 +- packages/backend-core/src/db/couch/DatabaseImpl.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..23c952c7b2 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -60,7 +60,7 @@ export default class BaseCache { */ async withCache( key: string, - ttl: number, + ttl: number | null = null, fetchFn: any, opts = { useTenancy: true } ) { diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index d4d17f6127..9d198e4307 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -137,7 +137,7 @@ export class DatabaseImpl implements Database { async docExists(id: string): Promise { try { - await this.get(id) + await this.performCall(db => () => db.head(id)) return true } catch { return false From 82132d539d2c535be99a8aee58360fff288a1907 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:17:18 +0100 Subject: [PATCH 044/136] DatabaseImpl.docExists test --- .../src/db/tests/DatabaseImpl.spec.ts | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 packages/backend-core/src/db/tests/DatabaseImpl.spec.ts diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..140ecf4f2c --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("docExists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.docExists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.docExists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.docExists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.docExists(id)).toBe(false) + }) + }) +}) From 74aae19a7ebdd9fcb040679c2aeca40e991a8456 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:23:32 +0100 Subject: [PATCH 045/136] docWritethrough test --- .../src/cache/tests/docWritethrough.spec.ts | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 packages/backend-core/src/cache/tests/docWritethrough.spec.ts diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..bfb1da5f1c --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,47 @@ +import tk from "timekeeper" +import { env } from "../.." +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" +import { DocWritethrough } from "../docWritethrough" +import _ from "lodash" + +env._set("MOCK_REDIS", null) + +const initialTime = Date.now() + +const WRITE_RATE_MS = 500 + +describe("docWritethrough", () => { + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.word(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(() => { + tk.freeze(initialTime) + documentId = structures.db.id() + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) + + it("patching will not persist until timeout is hit", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + tk.travel(Date.now() + WRITE_RATE_MS - 1) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) + }) +}) From bfde028e9b8dcae7ed81d34542acfcef32cf791c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:28:35 +0100 Subject: [PATCH 046/136] Add persisting tests --- .../src/cache/tests/docWritethrough.spec.ts | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bfb1da5f1c..ab0de53bee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -33,7 +33,7 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist until timeout is hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) @@ -43,5 +43,42 @@ describe("docWritethrough", () => { expect(await db.docExists(documentId)).toBe(false) }) }) + + it("patching will persist if timeout hits and next patch is called", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + _rev: expect.stringMatching(/1-.+/), + createdAt: new Date(initialTime + 500).toISOString(), + updatedAt: new Date(initialTime + 500).toISOString(), + }) + }) + }) + + it("patching will not persist even if timeout hits but next patch is not callec", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) }) }) From 35536592e6558176e48960063ab71ddfebd2f8d1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:41:26 +0100 Subject: [PATCH 047/136] Add extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 86 ++++++++++++++++--- 1 file changed, 75 insertions(+), 11 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index ab0de53bee..16e47ce3c3 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -7,9 +7,17 @@ import _ from "lodash" env._set("MOCK_REDIS", null) +const WRITE_RATE_MS = 500 + const initialTime = Date.now() -const WRITE_RATE_MS = 500 +function resetTime() { + tk.travel(initialTime) +} +function travelForward(ms: number) { + const updatedTime = Date.now() + ms + tk.travel(updatedTime) +} describe("docWritethrough", () => { const config = new DBTestConfiguration() @@ -28,7 +36,7 @@ describe("docWritethrough", () => { } beforeEach(() => { - tk.freeze(initialTime) + resetTime() documentId = structures.db.id() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -37,7 +45,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS - 1) + travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) expect(await db.docExists(documentId)).toBe(false) @@ -51,7 +59,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -62,23 +70,79 @@ describe("docWritethrough", () => { ...patch2, ...patch3, _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + 500).toISOString(), - updatedAt: new Date(initialTime + 500).toISOString(), + createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), }) }) }) + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + travelForward(WRITE_RATE_MS) + const date1 = new Date() + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + const date2 = new Date() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + it("patching will not persist even if timeout hits but next patch is not callec", async () => { await config.doInTenant(async () => { - const patch1 = generatePatchObject(2) - const patch2 = generatePatchObject(2) - await docWritethrough.patch(patch1) - await docWritethrough.patch(patch2) + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) expect(await db.docExists(documentId)).toBe(false) }) }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const time1 = travelForward(WRITE_RATE_MS) + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + travelForward(WRITE_RATE_MS) + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) }) }) From 41dde9722f57f12d03450c4bc98e929c7133086d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:51:42 +0100 Subject: [PATCH 048/136] Test concurrency --- .../backend-core/src/cache/docWritethrough.ts | 12 ++++-- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++++++++++++- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 9e1977f797..13a85a0d84 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -19,9 +19,9 @@ interface CacheItem { } export class DocWritethrough { - db: Database - docId: string - writeRateMs: number + private db: Database + private _docId: string + private writeRateMs: number constructor( db: Database, @@ -29,10 +29,14 @@ export class DocWritethrough { writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { this.db = db - this.docId = docId + this._docId = docId this.writeRateMs = writeRateMs } + get docId() { + return this._docId + } + private makeCacheItem(): CacheItem { return { lastWrite: Date.now() } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 16e47ce3c3..aed87499ee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -41,8 +41,9 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist if timeout from the creation does not hit", async () => { await config.doInTenant(async () => { + travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) travelForward(WRITE_RATE_MS - 1) @@ -116,7 +117,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - const time1 = travelForward(WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -144,5 +145,41 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id(), + WRITE_RATE_MS + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + travelForward(WRITE_RATE_MS) + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) }) }) From 223637999a4679536ca68ca0a0115376753abfa1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 16:48:16 +0100 Subject: [PATCH 049/136] Ensure keys are removed --- .../backend-core/src/cache/docWritethrough.ts | 4 +++ .../src/cache/tests/docWritethrough.spec.ts | 28 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 13a85a0d84..bde93182a9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -96,6 +96,10 @@ export class DocWritethrough { } await this.db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } ) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index aed87499ee..65e9450f62 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -181,5 +181,33 @@ describe("docWritethrough", () => { ) }) }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + travelForward(WRITE_RATE_MS) + + await docWritethrough.patch({}) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + travelForward(WRITE_RATE_MS) + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) }) }) From 04fb27962390d79fe2fe3b65fe7ee44a48d6dbd8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 17:01:16 +0100 Subject: [PATCH 050/136] Extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 65e9450f62..974494d1c9 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -77,6 +77,35 @@ describe("docWritethrough", () => { }) }) + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + travelForward(WRITE_RATE_MS) + + const patch4 = generatePatchObject(3) + await docWritethrough.patch(patch4) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + ...patch4, + }) + ) + }) + }) + it("date audit fields are set correctly when persisting", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) From fd93eb79d5b96c7cf0c71a9d8501dfe189771d56 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 10:53:18 +0100 Subject: [PATCH 051/136] Fixes and tests --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++---------- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++- 2 files changed, 82 insertions(+), 47 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index bde93182a9..80063e4772 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,8 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private docInfoCacheKey: string + constructor( db: Database, docId: string, @@ -31,6 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs + this.docInfoCacheKey = `${this.docId}:info` } get docId() { @@ -44,26 +47,39 @@ export class DocWritethrough { async patch(data: Record) { const cache = await getCache() - const key = `${this.docId}:info` - const cacheItem = await cache.withCache( - key, - null, - () => this.makeCacheItem(), - { - useTenancy: false, - } - ) - await this.storeToCache(cache, data) - const updateDb = - !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs - // let output = this.doc + const updateDb = await this.shouldUpdateDb(cache) + if (updateDb) { - await this.persistToDb(cache) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: this.docInfoCacheKey, + ttl: 15000, + }, + async () => { + if (await this.shouldUpdateDb(cache)) { + await this.persistToDb(cache) + await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } } } + private async shouldUpdateDb(cache: BaseCache) { + const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => + this.makeCacheItem() + ) + return cacheItem.lastWrite <= Date.now() - this.writeRateMs + } + private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { const cacheKey = this.docId + ":data:" + key @@ -72,39 +88,23 @@ export class DocWritethrough { } private async persistToDb(cache: BaseCache) { - const key = `${this.db.name}_${this.docId}` + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: key, - ttl: 15000, - }, - async () => { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } + await this.db.put(doc) - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } - ) - - if (!lockResponse.executed) { - throw `DocWriteThrough could not be persisted to db for ${key}` + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) } } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 974494d1c9..bca781e377 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,10 @@ import tk from "timekeeper" -import { env } from "../.." + import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -env._set("MOCK_REDIS", null) - const WRITE_RATE_MS = 500 const initialTime = Date.now() @@ -238,5 +236,42 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent calls will not cause multiple saves", async () => { + async function parallelPatch(count: number) { + await Promise.all( + Array.from({ length: count }).map(() => + docWritethrough.patch(generatePatchObject(1)) + ) + ) + } + + const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") + const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + + await config.doInTenant(async () => { + await parallelPatch(5) + expect(persistToDbSpy).not.toBeCalled() + expect(storeToCacheSpy).toBeCalledTimes(5) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(40) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(45) + + await parallelPatch(10) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(55) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(5) + expect(persistToDbSpy).toBeCalledTimes(2) + expect(storeToCacheSpy).toBeCalledTimes(60) + }) + }) }) }) From eb9a1633944d84cbefa727b18a129feff27c9f56 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:30 +0100 Subject: [PATCH 052/136] Making code more readable --- .../backend-core/src/cache/docWritethrough.ts | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 80063e4772..5148950c1d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -15,7 +15,7 @@ async function getCache() { } interface CacheItem { - lastWrite: number + nextWrite: number } export class DocWritethrough { @@ -40,8 +40,8 @@ export class DocWritethrough { return this._docId } - private makeCacheItem(): CacheItem { - return { lastWrite: Date.now() } + private makeNextWriteInfoItem(): CacheItem { + return { nextWrite: Date.now() + this.writeRateMs } } async patch(data: Record) { @@ -62,7 +62,10 @@ export class DocWritethrough { async () => { if (await this.shouldUpdateDb(cache)) { await this.persistToDb(cache) - await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + await cache.store( + this.docInfoCacheKey, + this.makeNextWriteInfoItem() + ) } } ) @@ -75,9 +78,9 @@ export class DocWritethrough { private async shouldUpdateDb(cache: BaseCache) { const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeCacheItem() + this.makeNextWriteInfoItem() ) - return cacheItem.lastWrite <= Date.now() - this.writeRateMs + return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { From dc84eb4e806684c438ab18005bb14836720cc57b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:55 +0100 Subject: [PATCH 053/136] Type caches --- packages/backend-core/src/cache/base/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 23c952c7b2..911bd6a831 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -58,12 +58,12 @@ export default class BaseCache { /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, ttl: number | null = null, - fetchFn: any, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue From e986d34b8739258e81c6acc385afdd4cbe133a7b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:12:31 +0100 Subject: [PATCH 054/136] Fix types --- packages/backend-core/src/cache/generic.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) From da012c0f082d1bf44b6837e69da05d0a13db7fea Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:38:48 +0100 Subject: [PATCH 055/136] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5148950c1d..e46c763906 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -33,7 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.docId}:info` + this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` } get docId() { From 82a6f9027e5df55b113d550d5e26a8b958f87219 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:41:40 +0100 Subject: [PATCH 056/136] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e46c763906..e367c9e060 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,7 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private cacheKeyPrefix: string private docInfoCacheKey: string constructor( @@ -33,7 +34,8 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` + this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { @@ -85,7 +87,7 @@ export class DocWritethrough { private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { - const cacheKey = this.docId + ":data:" + key + const cacheKey = this.cacheKeyPrefix + ":data:" + key await cache.store(cacheKey, { key, value }, undefined) } } @@ -98,7 +100,7 @@ export class DocWritethrough { doc = { _id: this.docId } } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) + const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value From 774ff42f0c926eb91c84d8a467a9047947274573 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:59:51 +0100 Subject: [PATCH 057/136] Use overloads --- .../src/cache/tests/docWritethrough.spec.ts | 6 ++-- .../backend-core/src/db/couch/DatabaseImpl.ts | 28 ++++++++++++------- .../backend-core/src/db/instrumentation.ts | 14 ++++------ .../src/db/tests/DatabaseImpl.spec.ts | 16 +++++------ packages/types/src/sdk/db.ts | 2 +- 5 files changed, 35 insertions(+), 31 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bca781e377..4c4a4b2b60 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -35,7 +35,7 @@ describe("docWritethrough", () => { beforeEach(() => { resetTime() - documentId = structures.db.id() + documentId = structures.uuid() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -47,7 +47,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) @@ -136,7 +136,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 9d198e4307..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -70,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -79,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } @@ -135,15 +152,6 @@ export class DatabaseImpl implements Database { }) } - async docExists(id: string): Promise { - try { - await this.performCall(db => () => db.head(id)) - return true - } catch { - return false - } - } - async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 87af0e3127..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } @@ -38,13 +41,6 @@ export class DDInstrumentedDatabase implements Database { }) } - docExists(id: string): Promise { - return tracer.trace("db.docExists", span => { - span?.addTags({ db_name: this.name, doc_id: id }) - return this.db.docExists(id) - }) - } - getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts index 140ecf4f2c..586f13f417 100644 --- a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -17,16 +17,16 @@ describe("DatabaseImpl", () => { documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) }) - describe("docExists", () => { + describe("document exists", () => { it("can check existing docs by id", async () => { const existingDoc = _.sample(documents) - const result = await database.docExists(existingDoc!._id!) + const result = await database.exists(existingDoc!._id!) expect(result).toBe(true) }) it("can check non existing docs by id", async () => { - const result = await database.docExists(newid()) + const result = await database.exists(newid()) expect(result).toBe(false) }) @@ -36,9 +36,9 @@ describe("DatabaseImpl", () => { const id = existingDoc!._id! const results = [] - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) expect(results).toEqual([true, true, true]) }) @@ -46,10 +46,10 @@ describe("DatabaseImpl", () => { it("returns false after the doc is deleted", async () => { const existingDoc = _.sample(documents) const id = existingDoc!._id! - expect(await database.docExists(id)).toBe(true) + expect(await database.exists(id)).toBe(true) await database.remove(existingDoc!) - expect(await database.docExists(id)).toBe(false) + expect(await database.exists(id)).toBe(false) }) }) }) diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index dafc9ced57..4d103d5be6 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,7 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise - docExists(id: string): Promise + exists(docId: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 2412d75cacbe36f27d0f8c4d02804eb371bb292d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:38:45 +0100 Subject: [PATCH 058/136] Type inMemoryQueue --- .../backend-core/src/queue/inMemoryQueue.ts | 36 ++++++++++--------- packages/backend-core/src/queue/queue.ts | 2 ++ 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..3205b6f383 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,6 @@ import events from "events" import { timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -24,9 +25,9 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any + _opts?: QueueOptions _messages: any[] _emitter: EventEmitter _runCount: number @@ -37,7 +38,7 @@ class InMemoryQueue { * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] @@ -55,8 +56,12 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { + const delay = this._opts?.defaultJobOptions?.delay + if (delay) { + await new Promise(r => setTimeout(() => r(), delay)) + } if (this._messages.length <= 0) { return } @@ -70,7 +75,7 @@ class InMemoryQueue { } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +88,26 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) + this._messages.push(newJob(this._name, data)) this._addCount++ this._emitter.emit("message") + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +115,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + async removeJobs(pattern: string) { // no-op } @@ -128,12 +132,12 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds From b39400f08c5145a818aadd602f74c2a7a41e895c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:43:47 +0100 Subject: [PATCH 059/136] Clean --- packages/worker/src/initPro.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/worker/src/initPro.ts b/packages/worker/src/initPro.ts index ddc8d2562a..b34d514992 100644 --- a/packages/worker/src/initPro.ts +++ b/packages/worker/src/initPro.ts @@ -1,5 +1,4 @@ import { sdk as proSdk } from "@budibase/pro" -import * as userSdk from "./sdk/users" export const initPro = async () => { await proSdk.init({}) From df325e21c30fae69940ed04bc3eb9f2d2f8b160d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:18:01 +0100 Subject: [PATCH 060/136] Add doc-writethrough queue --- packages/backend-core/src/queue/constants.ts | 1 + packages/backend-core/src/queue/listeners.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..14dce5fe8d 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { From 936ddafee7c21aa939c2842e793e6865741054a5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:34:05 +0100 Subject: [PATCH 061/136] Use bull --- .../backend-core/src/cache/docWritethrough.ts | 123 +++++++++--------- 1 file changed, 64 insertions(+), 59 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e367c9e060..38a162435d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,6 +3,9 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" +import { JobQueue, createQueue } from "../queue" +import { context, db as dbUtils } from ".." + const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null @@ -14,17 +17,63 @@ async function getCache() { return CACHE } -interface CacheItem { - nextWrite: number +interface ProcessDocMessage { + tenantId: string + dbName: string + docId: string + cacheKeyPrefix: string } +export const docWritethroughProcessorQueue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE +) + +docWritethroughProcessorQueue.process(async message => { + const { dbName, tenantId, docId, cacheKeyPrefix } = message.data + const cache = await getCache() + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } + }) +}) + export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number private cacheKeyPrefix: string - private docInfoCacheKey: string constructor( db: Database, @@ -35,54 +84,31 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { return this._docId } - private makeNextWriteInfoItem(): CacheItem { - return { nextWrite: Date.now() + this.writeRateMs } - } - async patch(data: Record) { const cache = await getCache() await this.storeToCache(cache, data) - const updateDb = await this.shouldUpdateDb(cache) - - if (updateDb) { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: this.docInfoCacheKey, - ttl: 15000, - }, - async () => { - if (await this.shouldUpdateDb(cache)) { - await this.persistToDb(cache) - await cache.store( - this.docInfoCacheKey, - this.makeNextWriteInfoItem() - ) - } - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + docWritethroughProcessorQueue.add( + { + tenantId: context.getTenantId(), + dbName: this.db.name, + docId: this.docId, + cacheKeyPrefix: this.cacheKeyPrefix, + }, + { + delay: this.writeRateMs - 1, + jobId: this.cacheKeyPrefix, + removeOnFail: true, + removeOnComplete: true, } - } - } - - private async shouldUpdateDb(cache: BaseCache) { - const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeNextWriteInfoItem() ) - return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { @@ -91,25 +117,4 @@ export class DocWritethrough { await cache.store(cacheKey, { key, value }, undefined) } } - - private async persistToDb(cache: BaseCache) { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } - - const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } } From 420b0ffc03386fdf896b11ff0cc5a0f01741ef9f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:50:58 +0100 Subject: [PATCH 062/136] Tests --- .../backend-core/src/cache/docWritethrough.ts | 99 +++++++++------ .../src/cache/tests/docWritethrough.spec.ts | 120 ++++++++++-------- .../backend-core/src/queue/inMemoryQueue.ts | 76 ++++++++--- 3 files changed, 186 insertions(+), 109 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 38a162435d..f53cfbfe5f 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -4,7 +4,8 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" import { JobQueue, createQueue } from "../queue" -import { context, db as dbUtils } from ".." +import * as context from "../context" +import * as dbUtils from "../db" const DEFAULT_WRITE_RATE_MS = 10000 @@ -28,50 +29,71 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -docWritethroughProcessorQueue.process(async message => { - const { dbName, tenantId, docId, cacheKeyPrefix } = message.data - const cache = await getCache() - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } +let _init = false +export const init = () => { + if (_init) { + return + } + docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) } + ) - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) - } + }) }) -}) + _init = true +} + +export async function persistToDb({ + dbName, + docId, + cacheKeyPrefix, +}: { + dbName: string + docId: string + cacheKeyPrefix: string +}) { + const cache = await getCache() + + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } +} export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number + private tenantId: string private cacheKeyPrefix: string @@ -84,6 +106,7 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.tenantId = context.getTenantId() } get docId() { @@ -97,13 +120,13 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: context.getTenantId(), + tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, }, { - delay: this.writeRateMs - 1, + delay: this.writeRateMs, jobId: this.cacheKeyPrefix, removeOnFail: true, removeOnComplete: true, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 4c4a4b2b60..83af66a9d2 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,20 +1,32 @@ -import tk from "timekeeper" - import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -const WRITE_RATE_MS = 500 +import { + DocWritethrough, + docWritethroughProcessorQueue, + init, +} from "../docWritethrough" +import InMemoryQueue from "../../queue/inMemoryQueue" + +const WRITE_RATE_MS = 1000 const initialTime = Date.now() +jest.useFakeTimers({ + now: initialTime, +}) + function resetTime() { - tk.travel(initialTime) + jest.setSystemTime(initialTime) } -function travelForward(ms: number) { - const updatedTime = Date.now() + ms - tk.travel(updatedTime) +async function travelForward(ms: number) { + await jest.advanceTimersByTimeAsync(ms) + + const queue: InMemoryQueue = docWritethroughProcessorQueue as never + while (queue.hasRunningJobs()) { + await jest.runOnlyPendingTimersAsync() + } } describe("docWritethrough", () => { @@ -33,33 +45,37 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeEach(() => { + beforeAll(() => init()) + + beforeEach(async () => { resetTime() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + await config.doInTenant(async () => { + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) }) - it("patching will not persist if timeout from the creation does not hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - travelForward(WRITE_RATE_MS - 1) - await docWritethrough.patch(generatePatchObject(2)) + await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits and next patch is called", async () => { + it("patching will persist if timeout hits", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) + // This will not be persisted const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -67,7 +83,6 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - ...patch3, _rev: expect.stringMatching(/1-.+/), createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), @@ -82,15 +97,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - travelForward(WRITE_RATE_MS) - - const patch4 = generatePatchObject(3) - await docWritethrough.patch(patch4) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -98,7 +110,6 @@ describe("docWritethrough", () => { ...patch1, ...patch2, ...patch3, - ...patch4, }) ) }) @@ -109,16 +120,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date1 = new Date() await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date2 = new Date() - const patch3 = generatePatchObject(3) - await docWritethrough.patch(patch3) - expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -129,22 +137,11 @@ describe("docWritethrough", () => { }) }) - it("patching will not persist even if timeout hits but next patch is not callec", async () => { - await config.doInTenant(async () => { - await docWritethrough.patch(generatePatchObject(2)) - await docWritethrough.patch(generatePatchObject(2)) - - travelForward(WRITE_RATE_MS) - - expect(await db.exists(documentId)).toBe(false) - }) - }) - it("concurrent patches will override keys", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -155,13 +152,14 @@ describe("docWritethrough", () => { }) ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -173,7 +171,7 @@ describe("docWritethrough", () => { }) }) - it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + it("concurrent patches to different docWritethrough will not pollute each other", async () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, @@ -186,12 +184,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) + await travelForward(WRITE_RATE_MS) expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -214,7 +213,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch({}) @@ -224,9 +223,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -246,30 +246,46 @@ describe("docWritethrough", () => { ) } - const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) - expect(persistToDbSpy).not.toBeCalled() expect(storeToCacheSpy).toBeCalledTimes(5) + expect(await db.exists(documentId)).toBe(false) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(40) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(45) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) + await parallelPatch(10) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(55) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(5) - expect(persistToDbSpy).toBeCalledTimes(2) + await travelForward(WRITE_RATE_MS) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/3-.+/), + }) + ) expect(storeToCacheSpy).toBeCalledTimes(60) }) }) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 3205b6f383..f201714903 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -2,6 +2,13 @@ import events from "events" import { timeout } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" +interface JobMessage { + timestamp: number + queue: string + data: any + opts?: JobOptions +} + /** * Bull works with a Job wrapper around all messages that contains a lot more information about * the state of the message, this object constructor implements the same schema of Bull jobs @@ -11,12 +18,12 @@ import { Queue, QueueOptions, JobOptions } from "./queue" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -28,10 +35,12 @@ function newJob(queue: string, message: any) { class InMemoryQueue implements Partial { _name: string _opts?: QueueOptions - _messages: any[] + _messages: JobMessage[] + _queuedJobIds: Set _emitter: EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. @@ -45,6 +54,7 @@ class InMemoryQueue implements Partial { this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -58,19 +68,24 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - const delay = this._opts?.defaultJobOptions?.delay - if (delay) { - await new Promise(r => setTimeout(() => r(), delay)) + try { + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() + + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } + } catch (e: any) { + throw e } - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ }) } @@ -89,12 +104,31 @@ class InMemoryQueue implements Partial { */ // eslint-disable-next-line no-unused-vars async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, data)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } return {} as any } @@ -143,7 +177,11 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } From b94d28b7d63caa6061ff55f623be1f76c9665578 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:55:07 +0100 Subject: [PATCH 063/136] Clean --- .../backend-core/src/queue/inMemoryQueue.ts | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index f201714903..6c8107c7a4 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -68,23 +68,19 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - try { - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ - const jobId = msg?.opts?.jobId?.toString() - if (jobId && msg?.opts?.removeOnComplete) { - this._queuedJobIds.delete(jobId) - } - } catch (e: any) { - throw e + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) } }) } From 8d87850765efdea50d4127cc46743eed2c57a511 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:19:05 +0100 Subject: [PATCH 064/136] Remove defaults and init --- .../backend-core/src/cache/docWritethrough.ts | 52 +++++++------------ .../src/cache/tests/docWritethrough.spec.ts | 3 -- 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index f53cfbfe5f..1a16f60eb9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -7,8 +7,6 @@ import { JobQueue, createQueue } from "../queue" import * as context from "../context" import * as dbUtils from "../db" -const DEFAULT_WRITE_RATE_MS = 10000 - let CACHE: BaseCache | null = null async function getCache() { if (!CACHE) { @@ -29,33 +27,27 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -let _init = false -export const init = () => { - if (_init) { - return - } - docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) +docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) } - }) + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) - _init = true -} +}) export async function persistToDb({ dbName, @@ -97,11 +89,7 @@ export class DocWritethrough { private cacheKeyPrefix: string - constructor( - db: Database, - docId: string, - writeRateMs: number = DEFAULT_WRITE_RATE_MS - ) { + constructor(db: Database, docId: string, writeRateMs: number) { this.db = db this._docId = docId this.writeRateMs = writeRateMs diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 83af66a9d2..a5765171cb 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -5,7 +5,6 @@ import _ from "lodash" import { DocWritethrough, docWritethroughProcessorQueue, - init, } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" @@ -45,8 +44,6 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeAll(() => init()) - beforeEach(async () => { resetTime() documentId = structures.uuid() From 0649497ab53a1d73bac39f3c4ec8ba2cb8e88c3c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:47:23 +0100 Subject: [PATCH 065/136] Add comment --- packages/backend-core/src/cache/tests/docWritethrough.spec.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index a5765171cb..3e638a4eec 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -256,6 +256,8 @@ describe("docWritethrough", () => { expect(storeToCacheSpy).toBeCalledTimes(45) + // Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup. + // We are relying on the document being stored only once (otherwise we would have _rev updated) expect(await db.get(documentId)).toEqual( expect.objectContaining({ _id: documentId, From 2b25f9f0cb75ae1925db074348dbdaab521747c6 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 15:02:02 +0100 Subject: [PATCH 066/136] Improve redlock non executed response --- packages/backend-core/src/cache/docWritethrough.ts | 9 +++++++++ packages/backend-core/src/redis/redlockImpl.ts | 10 +++++++++- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 1a16f60eb9..ebb64ee9e5 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -44,6 +44,15 @@ docWritethroughProcessorQueue.process(async message => { ) if (!lockResponse.executed) { + if ( + lockResponse.reason !== + locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce + ) { + console.error("Error persisting docWritethrough", { + data: message.data, + }) + throw "Error persisting docWritethrough" + } console.log(`Ignoring redlock conflict in write-through cache`) } }) diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts index adeb5b12ec..28babb9405 100644 --- a/packages/backend-core/src/redis/redlockImpl.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -82,6 +82,11 @@ type SuccessfulRedlockExecution = { } type UnsuccessfulRedlockExecution = { executed: false + reason: UnsuccessfulRedlockExecutionReason +} + +export const enum UnsuccessfulRedlockExecutionReason { + LockTakenWithTryOnce = "LOCK_TAKEN_WITH_TRY_ONCE", } type RedlockExecution = @@ -141,7 +146,10 @@ export async function doWithLock( if (opts.type === LockType.TRY_ONCE) { // don't throw for try-once locks, they will always error // due to retry count (0) exceeded - return { executed: false } + return { + executed: false, + reason: UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce, + } } else { throw e } From 4fe7e67dd51617c36356ccc79343a8d12f261ea4 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 17:15:50 +0100 Subject: [PATCH 067/136] Do not use lock --- .../backend-core/src/cache/docWritethrough.ts | 37 ++----------------- .../src/cache/tests/docWritethrough.spec.ts | 4 +- 2 files changed, 4 insertions(+), 37 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index ebb64ee9e5..d4d651c688 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,7 +1,6 @@ import BaseCache from "./base" import { getDocWritethroughClient } from "../redis/init" -import { AnyDocument, Database, LockName, LockType } from "@budibase/types" -import * as locks from "../redis/redlockImpl" +import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" import * as context from "../context" @@ -17,7 +16,6 @@ async function getCache() { } interface ProcessDocMessage { - tenantId: string dbName: string docId: string cacheKeyPrefix: string @@ -28,34 +26,8 @@ export const docWritethroughProcessorQueue = createQueue( ) docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - console.log("DocWritethrough persisted", { data: message.data }) - } - ) - - if (!lockResponse.executed) { - if ( - lockResponse.reason !== - locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce - ) { - console.error("Error persisting docWritethrough", { - data: message.data, - }) - throw "Error persisting docWritethrough" - } - console.log(`Ignoring redlock conflict in write-through cache`) - } - }) + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) }) export async function persistToDb({ @@ -94,7 +66,6 @@ export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number - private tenantId: string private cacheKeyPrefix: string @@ -103,7 +74,6 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.tenantId = context.getTenantId() } get docId() { @@ -117,7 +87,6 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 3e638a4eec..9bbcd6af44 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -47,9 +47,7 @@ describe("docWritethrough", () => { beforeEach(async () => { resetTime() documentId = structures.uuid() - await config.doInTenant(async () => { - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) - }) + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) it("patching will not persist if timeout does not hit", async () => { From ebcb7718b8f6e60e88c1ca4bbcb7cf0f18857efa Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:06:14 +0100 Subject: [PATCH 068/136] Use bulk --- packages/backend-core/src/cache/base/index.ts | 19 +++++++++++++++++++ .../backend-core/src/cache/docWritethrough.ts | 10 +++++----- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 911bd6a831..942d70ae72 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -46,6 +46,25 @@ export default class BaseCache { await client.store(key, value, ttl) } + /** + * Bulk write to the cache. + */ + async bulkStore( + data: Record, + ttl: number | null = null, + opts = { useTenancy: true } + ) { + if (opts.useTenancy) { + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[generateTenantKey(key)] = value + return acc + }, {} as Record) + } + + const client = await this.getClient() + await client.bulkStore(data, ttl) + } + /** * Remove from cache. */ diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index d4d651c688..a0bc14ec5c 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,7 +3,6 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" -import * as context from "../context" import * as dbUtils from "../db" let CACHE: BaseCache | null = null @@ -101,9 +100,10 @@ export class DocWritethrough { } private async storeToCache(cache: BaseCache, data: Record) { - for (const [key, value] of Object.entries(data)) { - const cacheKey = this.cacheKeyPrefix + ":data:" + key - await cache.store(cacheKey, { key, value }, undefined) - } + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[this.cacheKeyPrefix + ":data:" + key] = { key, value } + return acc + }, {} as Record) + await cache.bulkStore(data, null) } } From db75c0594290551fd0a23e1b0c70079eb2ea5656 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:25:23 +0100 Subject: [PATCH 069/136] Use scim-logs db --- packages/backend-core/src/constants/db.ts | 3 +++ packages/backend-core/src/context/mainContext.ts | 11 +++++++++++ packages/pro | 2 +- 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index ac00483021..f4caac502e 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -57,6 +57,9 @@ export const StaticDatabases = { AUDIT_LOGS: { name: "audit-logs", }, + SCIM_LOGS: { + name: "scim-logs", + }, } export const APP_PREFIX = prefixed(DocumentType.APP) diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index 36fd5dcb48..ae86695168 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -35,6 +35,17 @@ export function getAuditLogDBName(tenantId?: string) { } } +export function getScimDBName(tenantId?: string) { + if (!tenantId) { + tenantId = getTenantId() + } + if (tenantId === DEFAULT_TENANT_ID) { + return StaticDatabases.SCIM_LOGS.name + } else { + return `${tenantId}${SEPARATOR}${StaticDatabases.SCIM_LOGS.name}` + } +} + export function baseGlobalDBName(tenantId: string | undefined | null) { if (!tenantId || tenantId === DEFAULT_TENANT_ID) { return StaticDatabases.GLOBAL.name diff --git a/packages/pro b/packages/pro index 6079868997..678c913246 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 607986899781aa7c0b6ccfd9746497b6fc32b569 +Subproject commit 678c913246bacb398fbda2ad73a8e1bb562983fd From 182a1df9606f98da9791cb50df8355fc54eb21c2 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 17:35:04 +0000 Subject: [PATCH 070/136] Fix the bug, I think. --- packages/backend-core/src/db/Replication.ts | 36 ++++++--- packages/backend-core/src/security/roles.ts | 5 +- packages/server/src/api/controllers/role.ts | 10 +++ .../src/api/routes/tests/application.spec.ts | 81 +++++++++---------- packages/types/src/documents/app/role.ts | 1 + 5 files changed, 72 insertions(+), 61 deletions(-) diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts index f91a37ce8f..12c11eb9e2 100644 --- a/packages/backend-core/src/db/Replication.ts +++ b/packages/backend-core/src/db/Replication.ts @@ -1,17 +1,18 @@ +import PouchDB from "pouchdb" import { getPouchDB, closePouchDB } from "./couch" import { DocumentType } from "../constants" class Replication { - source: any - target: any - replication: any + source: PouchDB.Database + target: PouchDB.Database + replication?: Promise /** * * @param source - the DB you want to replicate or rollback to * @param target - the DB you want to replicate to, or rollback from */ - constructor({ source, target }: any) { + constructor({ source, target }: { source: string; target: string }) { this.source = getPouchDB(source) this.target = getPouchDB(target) } @@ -40,7 +41,7 @@ class Replication { * Two way replication operation, intended to be promise based. * @param opts - PouchDB replication options */ - sync(opts = {}) { + sync(opts: PouchDB.Replication.SyncOptions = {}) { this.replication = this.promisify(this.source.sync, opts) return this.replication } @@ -49,18 +50,31 @@ class Replication { * One way replication operation, intended to be promise based. * @param opts - PouchDB replication options */ - replicate(opts = {}) { + replicate(opts: PouchDB.Replication.ReplicateOptions = {}) { this.replication = this.promisify(this.source.replicate.to, opts) return this.replication } - appReplicateOpts() { + appReplicateOpts( + opts: PouchDB.Replication.ReplicateOptions = {} + ): PouchDB.Replication.ReplicateOptions { + if (typeof opts.filter === "string") { + return opts + } + + const filter = opts.filter + delete opts.filter + return { - filter: (doc: any) => { + ...opts, + filter: (doc: any, params: any) => { if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) { return false } - return doc._id !== DocumentType.APP_METADATA + if (doc._id === DocumentType.APP_METADATA) { + return false + } + return filter ? filter(doc, params) : true }, } } @@ -75,10 +89,6 @@ class Replication { // take the opportunity to remove deleted tombstones await this.replicate() } - - cancel() { - this.replication.cancel() - } } export default Replication diff --git a/packages/backend-core/src/security/roles.ts b/packages/backend-core/src/security/roles.ts index 01473ad991..a64be6b319 100644 --- a/packages/backend-core/src/security/roles.ts +++ b/packages/backend-core/src/security/roles.ts @@ -101,10 +101,7 @@ export function getBuiltinRole(roleId: string): Role | undefined { /** * Works through the inheritance ranks to see how far up the builtin stack this ID is. */ -export function builtinRoleToNumber(id?: string) { - if (!id) { - return 0 - } +export function builtinRoleToNumber(id: string) { const builtins = getBuiltinRoles() const MAX = Object.values(builtins).length + 1 if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) { diff --git a/packages/server/src/api/controllers/role.ts b/packages/server/src/api/controllers/role.ts index b3eb61a255..fff58da86e 100644 --- a/packages/server/src/api/controllers/role.ts +++ b/packages/server/src/api/controllers/role.ts @@ -106,6 +106,16 @@ export async function save(ctx: UserCtx) { ) role._rev = result.rev ctx.body = role + + const replication = new dbCore.Replication({ + source: context.getDevAppDB().name, + target: context.getProdAppDB().name, + }) + await replication.replicate({ + filter: (doc: any, params: any) => { + return doc._id === _id + }, + }) } export async function destroy(ctx: UserCtx) { diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index 6f948d9977..63c9fe44b8 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -16,16 +16,9 @@ import * as setup from "./utilities" import { AppStatus } from "../../../db/utils" import { events, utils, context } from "@budibase/backend-core" import env from "../../../environment" -import { - PermissionLevel, - type App, - INTERNAL_TABLE_SOURCE_ID, - TableSourceType, - FieldType, -} from "@budibase/types" +import { type App } from "@budibase/types" import tk from "timekeeper" - -jest.setTimeout(99999999) +import * as uuid from "uuid" describe("/applications", () => { let config = setup.getConfig() @@ -258,25 +251,12 @@ describe("/applications", () => { }) describe("permissions", () => { - it.only("should only return apps a user has access to", async () => { + it("should only return apps a user has access to", async () => { let user = await config.createUser({ builder: { global: false }, admin: { global: false }, }) - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - }, - }) - await config.withUser(user, async () => { const apps = await config.api.application.fetch() expect(apps).toHaveLength(0) @@ -295,25 +275,12 @@ describe("/applications", () => { }) }) - it("should only return apps a user has access to through a custom role on a group", async () => { - const user = await config.createUser({ + it("should only return apps a user has access to through a custom role", async () => { + let user = await config.createUser({ builder: { global: false }, admin: { global: false }, }) - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - }, - }) - await config.withUser(user, async () => { const apps = await config.api.application.fetch() expect(apps).toHaveLength(0) @@ -326,17 +293,43 @@ describe("/applications", () => { version: "name", }) - await config.api.user.update({ + user = await config.globalUser({ ...user, roles: { - [config.getAppId()]: role._id!, + [config.getProdAppId()]: role.name, }, }) - await config.api.permission.add({ - resourceId: table._id!, - roleId: role._id!, - level: PermissionLevel.READ, + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(1) + }) + }) + + it.only("should only return apps a user has access to through a custom role on a group", async () => { + let user = await config.createUser({ + builder: { global: false }, + admin: { global: false }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(0) + }) + + const roleName = uuid.v4().replace(/-/g, "") + const role = await config.api.roles.save({ + name: roleName, + inherits: "PUBLIC", + permissionId: "read_only", + version: "name", + }) + + const group = await config.createGroup(role._id!) + + user = await config.globalUser({ + ...user, + userGroups: [group._id!], }) await config.withUser(user, async () => { diff --git a/packages/types/src/documents/app/role.ts b/packages/types/src/documents/app/role.ts index d126a67b16..f32ba810b0 100644 --- a/packages/types/src/documents/app/role.ts +++ b/packages/types/src/documents/app/role.ts @@ -5,4 +5,5 @@ export interface Role extends Document { inherits?: string permissions: { [key: string]: string[] } version?: string + name: string } From 11704ea983b5ec3d7426b6927afa41d1cdea81a7 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 17:40:38 +0000 Subject: [PATCH 071/136] TODO. --- packages/server/src/api/controllers/role.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/server/src/api/controllers/role.ts b/packages/server/src/api/controllers/role.ts index fff58da86e..6b62c568e2 100644 --- a/packages/server/src/api/controllers/role.ts +++ b/packages/server/src/api/controllers/role.ts @@ -107,6 +107,8 @@ export async function save(ctx: UserCtx) { role._rev = result.rev ctx.body = role + // TODO: need to check that the prod DB actually exists, I think it won't + // if the app has never been published. const replication = new dbCore.Replication({ source: context.getDevAppDB().name, target: context.getProdAppDB().name, From 86b715d5ce1cddaee37e531e661509dc55663bb5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 11:22:27 +0100 Subject: [PATCH 072/136] Create docWriteThrough redis cache --- packages/backend-core/src/redis/init.ts | 13 ++++++++++++- packages/backend-core/src/redis/utils.ts | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..7920dfed2d 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -104,3 +108,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** From c96c57bd9e6176418712525866bc89a9cd83c735 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:44:52 +0100 Subject: [PATCH 073/136] DocWritethrough --- .../backend-core/src/cache/docWritethrough.ts | 102 ++++++++++++++++++ .../backend-core/src/db/couch/DatabaseImpl.ts | 9 ++ .../backend-core/src/db/instrumentation.ts | 7 ++ packages/types/src/sdk/db.ts | 1 + 4 files changed, 119 insertions(+) create mode 100644 packages/backend-core/src/cache/docWritethrough.ts diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..9e1977f797 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,102 @@ +import BaseCache from "./base" +import { getDocWritethroughClient } from "../redis/init" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" +import * as locks from "../redis/redlockImpl" + +const DEFAULT_WRITE_RATE_MS = 10000 + +let CACHE: BaseCache | null = null +async function getCache() { + if (!CACHE) { + const client = await getDocWritethroughClient() + CACHE = new BaseCache(client) + } + return CACHE +} + +interface CacheItem { + lastWrite: number +} + +export class DocWritethrough { + db: Database + docId: string + writeRateMs: number + + constructor( + db: Database, + docId: string, + writeRateMs: number = DEFAULT_WRITE_RATE_MS + ) { + this.db = db + this.docId = docId + this.writeRateMs = writeRateMs + } + + private makeCacheItem(): CacheItem { + return { lastWrite: Date.now() } + } + + async patch(data: Record) { + const cache = await getCache() + + const key = `${this.docId}:info` + const cacheItem = await cache.withCache( + key, + null, + () => this.makeCacheItem(), + { + useTenancy: false, + } + ) + + await this.storeToCache(cache, data) + + const updateDb = + !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs + // let output = this.doc + if (updateDb) { + await this.persistToDb(cache) + } + } + + private async storeToCache(cache: BaseCache, data: Record) { + for (const [key, value] of Object.entries(data)) { + const cacheKey = this.docId + ":data:" + key + await cache.store(cacheKey, { key, value }, undefined) + } + } + + private async persistToDb(cache: BaseCache) { + const key = `${this.db.name}_${this.docId}` + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: key, + ttl: 15000, + }, + async () => { + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } + + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await this.db.put(doc) + } + ) + + if (!lockResponse.executed) { + throw `DocWriteThrough could not be persisted to db for ${key}` + } + } +} diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 7e7c997cbe..d4d17f6127 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -135,6 +135,15 @@ export class DatabaseImpl implements Database { }) } + async docExists(id: string): Promise { + try { + await this.get(id) + return true + } catch { + return false + } + } + async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 03010d4c92..87af0e3127 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -38,6 +38,13 @@ export class DDInstrumentedDatabase implements Database { }) } + docExists(id: string): Promise { + return tracer.trace("db.docExists", span => { + span?.addTags({ db_name: this.name, doc_id: id }) + return this.db.docExists(id) + }) + } + getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index c4e4a4f02f..dafc9ced57 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,6 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise + docExists(id: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 48c92377ad93c1a41bf98910b9345f9949e5093e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:47:18 +0100 Subject: [PATCH 074/136] USe get for doc exists --- packages/backend-core/src/cache/base/index.ts | 2 +- packages/backend-core/src/db/couch/DatabaseImpl.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..23c952c7b2 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -60,7 +60,7 @@ export default class BaseCache { */ async withCache( key: string, - ttl: number, + ttl: number | null = null, fetchFn: any, opts = { useTenancy: true } ) { diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index d4d17f6127..9d198e4307 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -137,7 +137,7 @@ export class DatabaseImpl implements Database { async docExists(id: string): Promise { try { - await this.get(id) + await this.performCall(db => () => db.head(id)) return true } catch { return false From 3998faaf3a2d6e0f831693f2578e8de3b2149854 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:17:18 +0100 Subject: [PATCH 075/136] DatabaseImpl.docExists test --- .../src/db/tests/DatabaseImpl.spec.ts | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 packages/backend-core/src/db/tests/DatabaseImpl.spec.ts diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..140ecf4f2c --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("docExists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.docExists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.docExists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.docExists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.docExists(id)).toBe(false) + }) + }) +}) From 64ea969aafcd9477ebf1bb65b53a3266aaa72068 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:23:32 +0100 Subject: [PATCH 076/136] docWritethrough test --- .../src/cache/tests/docWritethrough.spec.ts | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 packages/backend-core/src/cache/tests/docWritethrough.spec.ts diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..bfb1da5f1c --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,47 @@ +import tk from "timekeeper" +import { env } from "../.." +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" +import { DocWritethrough } from "../docWritethrough" +import _ from "lodash" + +env._set("MOCK_REDIS", null) + +const initialTime = Date.now() + +const WRITE_RATE_MS = 500 + +describe("docWritethrough", () => { + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.word(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(() => { + tk.freeze(initialTime) + documentId = structures.db.id() + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) + + it("patching will not persist until timeout is hit", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + tk.travel(Date.now() + WRITE_RATE_MS - 1) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) + }) +}) From b1027527b3e820de2e9061aec3fb8f35a66b13ac Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:28:35 +0100 Subject: [PATCH 077/136] Add persisting tests --- .../src/cache/tests/docWritethrough.spec.ts | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bfb1da5f1c..ab0de53bee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -33,7 +33,7 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist until timeout is hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) @@ -43,5 +43,42 @@ describe("docWritethrough", () => { expect(await db.docExists(documentId)).toBe(false) }) }) + + it("patching will persist if timeout hits and next patch is called", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + _rev: expect.stringMatching(/1-.+/), + createdAt: new Date(initialTime + 500).toISOString(), + updatedAt: new Date(initialTime + 500).toISOString(), + }) + }) + }) + + it("patching will not persist even if timeout hits but next patch is not callec", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) }) }) From b1e1226de6058c4c119546a8751f1ca955f1f078 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:41:26 +0100 Subject: [PATCH 078/136] Add extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 86 ++++++++++++++++--- 1 file changed, 75 insertions(+), 11 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index ab0de53bee..16e47ce3c3 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -7,9 +7,17 @@ import _ from "lodash" env._set("MOCK_REDIS", null) +const WRITE_RATE_MS = 500 + const initialTime = Date.now() -const WRITE_RATE_MS = 500 +function resetTime() { + tk.travel(initialTime) +} +function travelForward(ms: number) { + const updatedTime = Date.now() + ms + tk.travel(updatedTime) +} describe("docWritethrough", () => { const config = new DBTestConfiguration() @@ -28,7 +36,7 @@ describe("docWritethrough", () => { } beforeEach(() => { - tk.freeze(initialTime) + resetTime() documentId = structures.db.id() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -37,7 +45,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS - 1) + travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) expect(await db.docExists(documentId)).toBe(false) @@ -51,7 +59,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -62,23 +70,79 @@ describe("docWritethrough", () => { ...patch2, ...patch3, _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + 500).toISOString(), - updatedAt: new Date(initialTime + 500).toISOString(), + createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), }) }) }) + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + travelForward(WRITE_RATE_MS) + const date1 = new Date() + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + const date2 = new Date() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + it("patching will not persist even if timeout hits but next patch is not callec", async () => { await config.doInTenant(async () => { - const patch1 = generatePatchObject(2) - const patch2 = generatePatchObject(2) - await docWritethrough.patch(patch1) - await docWritethrough.patch(patch2) + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) expect(await db.docExists(documentId)).toBe(false) }) }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const time1 = travelForward(WRITE_RATE_MS) + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + travelForward(WRITE_RATE_MS) + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) }) }) From e228b9c207cca329a37305a7a7d0d6c932eeffa7 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:51:42 +0100 Subject: [PATCH 079/136] Test concurrency --- .../backend-core/src/cache/docWritethrough.ts | 12 ++++-- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++++++++++++- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 9e1977f797..13a85a0d84 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -19,9 +19,9 @@ interface CacheItem { } export class DocWritethrough { - db: Database - docId: string - writeRateMs: number + private db: Database + private _docId: string + private writeRateMs: number constructor( db: Database, @@ -29,10 +29,14 @@ export class DocWritethrough { writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { this.db = db - this.docId = docId + this._docId = docId this.writeRateMs = writeRateMs } + get docId() { + return this._docId + } + private makeCacheItem(): CacheItem { return { lastWrite: Date.now() } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 16e47ce3c3..aed87499ee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -41,8 +41,9 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist if timeout from the creation does not hit", async () => { await config.doInTenant(async () => { + travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) travelForward(WRITE_RATE_MS - 1) @@ -116,7 +117,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - const time1 = travelForward(WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -144,5 +145,41 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id(), + WRITE_RATE_MS + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + travelForward(WRITE_RATE_MS) + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) }) }) From 319b3afddbfc69afbc461326ee7374bed1115c30 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 16:48:16 +0100 Subject: [PATCH 080/136] Ensure keys are removed --- .../backend-core/src/cache/docWritethrough.ts | 4 +++ .../src/cache/tests/docWritethrough.spec.ts | 28 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 13a85a0d84..bde93182a9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -96,6 +96,10 @@ export class DocWritethrough { } await this.db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } ) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index aed87499ee..65e9450f62 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -181,5 +181,33 @@ describe("docWritethrough", () => { ) }) }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + travelForward(WRITE_RATE_MS) + + await docWritethrough.patch({}) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + travelForward(WRITE_RATE_MS) + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) }) }) From 00bf88c5bf210908d6416f893ef3e4cee3dd730e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 17:01:16 +0100 Subject: [PATCH 081/136] Extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 65e9450f62..974494d1c9 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -77,6 +77,35 @@ describe("docWritethrough", () => { }) }) + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + travelForward(WRITE_RATE_MS) + + const patch4 = generatePatchObject(3) + await docWritethrough.patch(patch4) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + ...patch4, + }) + ) + }) + }) + it("date audit fields are set correctly when persisting", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) From 6a81d21cb73be609e02e9b343bbef3d3264c6f9b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 10:53:18 +0100 Subject: [PATCH 082/136] Fixes and tests --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++---------- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++- 2 files changed, 82 insertions(+), 47 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index bde93182a9..80063e4772 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,8 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private docInfoCacheKey: string + constructor( db: Database, docId: string, @@ -31,6 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs + this.docInfoCacheKey = `${this.docId}:info` } get docId() { @@ -44,26 +47,39 @@ export class DocWritethrough { async patch(data: Record) { const cache = await getCache() - const key = `${this.docId}:info` - const cacheItem = await cache.withCache( - key, - null, - () => this.makeCacheItem(), - { - useTenancy: false, - } - ) - await this.storeToCache(cache, data) - const updateDb = - !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs - // let output = this.doc + const updateDb = await this.shouldUpdateDb(cache) + if (updateDb) { - await this.persistToDb(cache) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: this.docInfoCacheKey, + ttl: 15000, + }, + async () => { + if (await this.shouldUpdateDb(cache)) { + await this.persistToDb(cache) + await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } } } + private async shouldUpdateDb(cache: BaseCache) { + const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => + this.makeCacheItem() + ) + return cacheItem.lastWrite <= Date.now() - this.writeRateMs + } + private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { const cacheKey = this.docId + ":data:" + key @@ -72,39 +88,23 @@ export class DocWritethrough { } private async persistToDb(cache: BaseCache) { - const key = `${this.db.name}_${this.docId}` + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: key, - ttl: 15000, - }, - async () => { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } + await this.db.put(doc) - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } - ) - - if (!lockResponse.executed) { - throw `DocWriteThrough could not be persisted to db for ${key}` + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) } } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 974494d1c9..bca781e377 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,10 @@ import tk from "timekeeper" -import { env } from "../.." + import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -env._set("MOCK_REDIS", null) - const WRITE_RATE_MS = 500 const initialTime = Date.now() @@ -238,5 +236,42 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent calls will not cause multiple saves", async () => { + async function parallelPatch(count: number) { + await Promise.all( + Array.from({ length: count }).map(() => + docWritethrough.patch(generatePatchObject(1)) + ) + ) + } + + const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") + const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + + await config.doInTenant(async () => { + await parallelPatch(5) + expect(persistToDbSpy).not.toBeCalled() + expect(storeToCacheSpy).toBeCalledTimes(5) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(40) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(45) + + await parallelPatch(10) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(55) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(5) + expect(persistToDbSpy).toBeCalledTimes(2) + expect(storeToCacheSpy).toBeCalledTimes(60) + }) + }) }) }) From 8062c287227c55f22e952f2006d77aa768bf757b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:30 +0100 Subject: [PATCH 083/136] Making code more readable --- .../backend-core/src/cache/docWritethrough.ts | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 80063e4772..5148950c1d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -15,7 +15,7 @@ async function getCache() { } interface CacheItem { - lastWrite: number + nextWrite: number } export class DocWritethrough { @@ -40,8 +40,8 @@ export class DocWritethrough { return this._docId } - private makeCacheItem(): CacheItem { - return { lastWrite: Date.now() } + private makeNextWriteInfoItem(): CacheItem { + return { nextWrite: Date.now() + this.writeRateMs } } async patch(data: Record) { @@ -62,7 +62,10 @@ export class DocWritethrough { async () => { if (await this.shouldUpdateDb(cache)) { await this.persistToDb(cache) - await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + await cache.store( + this.docInfoCacheKey, + this.makeNextWriteInfoItem() + ) } } ) @@ -75,9 +78,9 @@ export class DocWritethrough { private async shouldUpdateDb(cache: BaseCache) { const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeCacheItem() + this.makeNextWriteInfoItem() ) - return cacheItem.lastWrite <= Date.now() - this.writeRateMs + return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { From cd56e1ffff58309ecec060b7f55770da0d49634e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:55 +0100 Subject: [PATCH 084/136] Type caches --- packages/backend-core/src/cache/base/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 23c952c7b2..911bd6a831 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -58,12 +58,12 @@ export default class BaseCache { /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, ttl: number | null = null, - fetchFn: any, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue From 608865fe9d11dc0649e8a96969b8c1234b1f5308 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:12:31 +0100 Subject: [PATCH 085/136] Fix types --- packages/backend-core/src/cache/generic.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) From 6bd7e6b374c77330a1acea9a7ee5440baff7420e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:38:48 +0100 Subject: [PATCH 086/136] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5148950c1d..e46c763906 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -33,7 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.docId}:info` + this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` } get docId() { From eb781037640ac07d5e1250271644e9bac2c27aab Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:41:40 +0100 Subject: [PATCH 087/136] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e46c763906..e367c9e060 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,7 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private cacheKeyPrefix: string private docInfoCacheKey: string constructor( @@ -33,7 +34,8 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` + this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { @@ -85,7 +87,7 @@ export class DocWritethrough { private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { - const cacheKey = this.docId + ":data:" + key + const cacheKey = this.cacheKeyPrefix + ":data:" + key await cache.store(cacheKey, { key, value }, undefined) } } @@ -98,7 +100,7 @@ export class DocWritethrough { doc = { _id: this.docId } } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) + const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value From 3c944073eb0467a75a979ee4c524093cef64d166 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:59:51 +0100 Subject: [PATCH 088/136] Use overloads --- .../src/cache/tests/docWritethrough.spec.ts | 6 ++-- .../backend-core/src/db/couch/DatabaseImpl.ts | 28 ++++++++++++------- .../backend-core/src/db/instrumentation.ts | 14 ++++------ .../src/db/tests/DatabaseImpl.spec.ts | 16 +++++------ packages/types/src/sdk/db.ts | 2 +- 5 files changed, 35 insertions(+), 31 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bca781e377..4c4a4b2b60 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -35,7 +35,7 @@ describe("docWritethrough", () => { beforeEach(() => { resetTime() - documentId = structures.db.id() + documentId = structures.uuid() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -47,7 +47,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) @@ -136,7 +136,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 9d198e4307..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -70,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -79,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } @@ -135,15 +152,6 @@ export class DatabaseImpl implements Database { }) } - async docExists(id: string): Promise { - try { - await this.performCall(db => () => db.head(id)) - return true - } catch { - return false - } - } - async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 87af0e3127..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } @@ -38,13 +41,6 @@ export class DDInstrumentedDatabase implements Database { }) } - docExists(id: string): Promise { - return tracer.trace("db.docExists", span => { - span?.addTags({ db_name: this.name, doc_id: id }) - return this.db.docExists(id) - }) - } - getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts index 140ecf4f2c..586f13f417 100644 --- a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -17,16 +17,16 @@ describe("DatabaseImpl", () => { documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) }) - describe("docExists", () => { + describe("document exists", () => { it("can check existing docs by id", async () => { const existingDoc = _.sample(documents) - const result = await database.docExists(existingDoc!._id!) + const result = await database.exists(existingDoc!._id!) expect(result).toBe(true) }) it("can check non existing docs by id", async () => { - const result = await database.docExists(newid()) + const result = await database.exists(newid()) expect(result).toBe(false) }) @@ -36,9 +36,9 @@ describe("DatabaseImpl", () => { const id = existingDoc!._id! const results = [] - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) expect(results).toEqual([true, true, true]) }) @@ -46,10 +46,10 @@ describe("DatabaseImpl", () => { it("returns false after the doc is deleted", async () => { const existingDoc = _.sample(documents) const id = existingDoc!._id! - expect(await database.docExists(id)).toBe(true) + expect(await database.exists(id)).toBe(true) await database.remove(existingDoc!) - expect(await database.docExists(id)).toBe(false) + expect(await database.exists(id)).toBe(false) }) }) }) diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index dafc9ced57..4d103d5be6 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,7 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise - docExists(id: string): Promise + exists(docId: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 770aff4f5e9f553443574d3feaba1ab6ddbc924d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:38:45 +0100 Subject: [PATCH 089/136] Type inMemoryQueue --- .../backend-core/src/queue/inMemoryQueue.ts | 36 ++++++++++--------- packages/backend-core/src/queue/queue.ts | 2 ++ 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..3205b6f383 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,6 @@ import events from "events" import { timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -24,9 +25,9 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any + _opts?: QueueOptions _messages: any[] _emitter: EventEmitter _runCount: number @@ -37,7 +38,7 @@ class InMemoryQueue { * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] @@ -55,8 +56,12 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { + const delay = this._opts?.defaultJobOptions?.delay + if (delay) { + await new Promise(r => setTimeout(() => r(), delay)) + } if (this._messages.length <= 0) { return } @@ -70,7 +75,7 @@ class InMemoryQueue { } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +88,26 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) + this._messages.push(newJob(this._name, data)) this._addCount++ this._emitter.emit("message") + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +115,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + async removeJobs(pattern: string) { // no-op } @@ -128,12 +132,12 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds From ee3cb5ae689ac0cc103227c3efd867e2ba31abec Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:43:47 +0100 Subject: [PATCH 090/136] Clean --- packages/worker/src/initPro.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/worker/src/initPro.ts b/packages/worker/src/initPro.ts index ddc8d2562a..b34d514992 100644 --- a/packages/worker/src/initPro.ts +++ b/packages/worker/src/initPro.ts @@ -1,5 +1,4 @@ import { sdk as proSdk } from "@budibase/pro" -import * as userSdk from "./sdk/users" export const initPro = async () => { await proSdk.init({}) From e4d8fe2f4758e55640f6733e81ce4c07526812c4 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:18:01 +0100 Subject: [PATCH 091/136] Add doc-writethrough queue --- packages/backend-core/src/queue/constants.ts | 1 + packages/backend-core/src/queue/listeners.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..14dce5fe8d 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { From 151bfd103b8f399d02654f403dba4738d11d82db Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:34:05 +0100 Subject: [PATCH 092/136] Use bull --- .../backend-core/src/cache/docWritethrough.ts | 123 +++++++++--------- 1 file changed, 64 insertions(+), 59 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e367c9e060..38a162435d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,6 +3,9 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" +import { JobQueue, createQueue } from "../queue" +import { context, db as dbUtils } from ".." + const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null @@ -14,17 +17,63 @@ async function getCache() { return CACHE } -interface CacheItem { - nextWrite: number +interface ProcessDocMessage { + tenantId: string + dbName: string + docId: string + cacheKeyPrefix: string } +export const docWritethroughProcessorQueue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE +) + +docWritethroughProcessorQueue.process(async message => { + const { dbName, tenantId, docId, cacheKeyPrefix } = message.data + const cache = await getCache() + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } + }) +}) + export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number private cacheKeyPrefix: string - private docInfoCacheKey: string constructor( db: Database, @@ -35,54 +84,31 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { return this._docId } - private makeNextWriteInfoItem(): CacheItem { - return { nextWrite: Date.now() + this.writeRateMs } - } - async patch(data: Record) { const cache = await getCache() await this.storeToCache(cache, data) - const updateDb = await this.shouldUpdateDb(cache) - - if (updateDb) { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: this.docInfoCacheKey, - ttl: 15000, - }, - async () => { - if (await this.shouldUpdateDb(cache)) { - await this.persistToDb(cache) - await cache.store( - this.docInfoCacheKey, - this.makeNextWriteInfoItem() - ) - } - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + docWritethroughProcessorQueue.add( + { + tenantId: context.getTenantId(), + dbName: this.db.name, + docId: this.docId, + cacheKeyPrefix: this.cacheKeyPrefix, + }, + { + delay: this.writeRateMs - 1, + jobId: this.cacheKeyPrefix, + removeOnFail: true, + removeOnComplete: true, } - } - } - - private async shouldUpdateDb(cache: BaseCache) { - const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeNextWriteInfoItem() ) - return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { @@ -91,25 +117,4 @@ export class DocWritethrough { await cache.store(cacheKey, { key, value }, undefined) } } - - private async persistToDb(cache: BaseCache) { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } - - const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } } From caff2876dd604a97e9370861927f52476860be11 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:50:58 +0100 Subject: [PATCH 093/136] Tests --- .../backend-core/src/cache/docWritethrough.ts | 99 +++++++++------ .../src/cache/tests/docWritethrough.spec.ts | 120 ++++++++++-------- .../backend-core/src/queue/inMemoryQueue.ts | 76 ++++++++--- 3 files changed, 186 insertions(+), 109 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 38a162435d..f53cfbfe5f 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -4,7 +4,8 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" import { JobQueue, createQueue } from "../queue" -import { context, db as dbUtils } from ".." +import * as context from "../context" +import * as dbUtils from "../db" const DEFAULT_WRITE_RATE_MS = 10000 @@ -28,50 +29,71 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -docWritethroughProcessorQueue.process(async message => { - const { dbName, tenantId, docId, cacheKeyPrefix } = message.data - const cache = await getCache() - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } +let _init = false +export const init = () => { + if (_init) { + return + } + docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) } + ) - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) - } + }) }) -}) + _init = true +} + +export async function persistToDb({ + dbName, + docId, + cacheKeyPrefix, +}: { + dbName: string + docId: string + cacheKeyPrefix: string +}) { + const cache = await getCache() + + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } +} export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number + private tenantId: string private cacheKeyPrefix: string @@ -84,6 +106,7 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.tenantId = context.getTenantId() } get docId() { @@ -97,13 +120,13 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: context.getTenantId(), + tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, }, { - delay: this.writeRateMs - 1, + delay: this.writeRateMs, jobId: this.cacheKeyPrefix, removeOnFail: true, removeOnComplete: true, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 4c4a4b2b60..83af66a9d2 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,20 +1,32 @@ -import tk from "timekeeper" - import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -const WRITE_RATE_MS = 500 +import { + DocWritethrough, + docWritethroughProcessorQueue, + init, +} from "../docWritethrough" +import InMemoryQueue from "../../queue/inMemoryQueue" + +const WRITE_RATE_MS = 1000 const initialTime = Date.now() +jest.useFakeTimers({ + now: initialTime, +}) + function resetTime() { - tk.travel(initialTime) + jest.setSystemTime(initialTime) } -function travelForward(ms: number) { - const updatedTime = Date.now() + ms - tk.travel(updatedTime) +async function travelForward(ms: number) { + await jest.advanceTimersByTimeAsync(ms) + + const queue: InMemoryQueue = docWritethroughProcessorQueue as never + while (queue.hasRunningJobs()) { + await jest.runOnlyPendingTimersAsync() + } } describe("docWritethrough", () => { @@ -33,33 +45,37 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeEach(() => { + beforeAll(() => init()) + + beforeEach(async () => { resetTime() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + await config.doInTenant(async () => { + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) }) - it("patching will not persist if timeout from the creation does not hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - travelForward(WRITE_RATE_MS - 1) - await docWritethrough.patch(generatePatchObject(2)) + await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits and next patch is called", async () => { + it("patching will persist if timeout hits", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) + // This will not be persisted const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -67,7 +83,6 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - ...patch3, _rev: expect.stringMatching(/1-.+/), createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), @@ -82,15 +97,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - travelForward(WRITE_RATE_MS) - - const patch4 = generatePatchObject(3) - await docWritethrough.patch(patch4) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -98,7 +110,6 @@ describe("docWritethrough", () => { ...patch1, ...patch2, ...patch3, - ...patch4, }) ) }) @@ -109,16 +120,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date1 = new Date() await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date2 = new Date() - const patch3 = generatePatchObject(3) - await docWritethrough.patch(patch3) - expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -129,22 +137,11 @@ describe("docWritethrough", () => { }) }) - it("patching will not persist even if timeout hits but next patch is not callec", async () => { - await config.doInTenant(async () => { - await docWritethrough.patch(generatePatchObject(2)) - await docWritethrough.patch(generatePatchObject(2)) - - travelForward(WRITE_RATE_MS) - - expect(await db.exists(documentId)).toBe(false) - }) - }) - it("concurrent patches will override keys", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -155,13 +152,14 @@ describe("docWritethrough", () => { }) ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -173,7 +171,7 @@ describe("docWritethrough", () => { }) }) - it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + it("concurrent patches to different docWritethrough will not pollute each other", async () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, @@ -186,12 +184,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) + await travelForward(WRITE_RATE_MS) expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -214,7 +213,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch({}) @@ -224,9 +223,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -246,30 +246,46 @@ describe("docWritethrough", () => { ) } - const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) - expect(persistToDbSpy).not.toBeCalled() expect(storeToCacheSpy).toBeCalledTimes(5) + expect(await db.exists(documentId)).toBe(false) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(40) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(45) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) + await parallelPatch(10) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(55) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(5) - expect(persistToDbSpy).toBeCalledTimes(2) + await travelForward(WRITE_RATE_MS) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/3-.+/), + }) + ) expect(storeToCacheSpy).toBeCalledTimes(60) }) }) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 3205b6f383..f201714903 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -2,6 +2,13 @@ import events from "events" import { timeout } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" +interface JobMessage { + timestamp: number + queue: string + data: any + opts?: JobOptions +} + /** * Bull works with a Job wrapper around all messages that contains a lot more information about * the state of the message, this object constructor implements the same schema of Bull jobs @@ -11,12 +18,12 @@ import { Queue, QueueOptions, JobOptions } from "./queue" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -28,10 +35,12 @@ function newJob(queue: string, message: any) { class InMemoryQueue implements Partial { _name: string _opts?: QueueOptions - _messages: any[] + _messages: JobMessage[] + _queuedJobIds: Set _emitter: EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. @@ -45,6 +54,7 @@ class InMemoryQueue implements Partial { this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -58,19 +68,24 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - const delay = this._opts?.defaultJobOptions?.delay - if (delay) { - await new Promise(r => setTimeout(() => r(), delay)) + try { + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() + + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } + } catch (e: any) { + throw e } - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ }) } @@ -89,12 +104,31 @@ class InMemoryQueue implements Partial { */ // eslint-disable-next-line no-unused-vars async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, data)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } return {} as any } @@ -143,7 +177,11 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } From fb2c7467b0aa29f99fdb56f541e0e1842d88b5de Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:55:07 +0100 Subject: [PATCH 094/136] Clean --- .../backend-core/src/queue/inMemoryQueue.ts | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index f201714903..6c8107c7a4 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -68,23 +68,19 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - try { - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ - const jobId = msg?.opts?.jobId?.toString() - if (jobId && msg?.opts?.removeOnComplete) { - this._queuedJobIds.delete(jobId) - } - } catch (e: any) { - throw e + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) } }) } From c4fc94fb280c6409ebcbd97f168277dfd6ce8f9a Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:19:05 +0100 Subject: [PATCH 095/136] Remove defaults and init --- .../backend-core/src/cache/docWritethrough.ts | 52 +++++++------------ .../src/cache/tests/docWritethrough.spec.ts | 3 -- 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index f53cfbfe5f..1a16f60eb9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -7,8 +7,6 @@ import { JobQueue, createQueue } from "../queue" import * as context from "../context" import * as dbUtils from "../db" -const DEFAULT_WRITE_RATE_MS = 10000 - let CACHE: BaseCache | null = null async function getCache() { if (!CACHE) { @@ -29,33 +27,27 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -let _init = false -export const init = () => { - if (_init) { - return - } - docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) +docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) } - }) + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) - _init = true -} +}) export async function persistToDb({ dbName, @@ -97,11 +89,7 @@ export class DocWritethrough { private cacheKeyPrefix: string - constructor( - db: Database, - docId: string, - writeRateMs: number = DEFAULT_WRITE_RATE_MS - ) { + constructor(db: Database, docId: string, writeRateMs: number) { this.db = db this._docId = docId this.writeRateMs = writeRateMs diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 83af66a9d2..a5765171cb 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -5,7 +5,6 @@ import _ from "lodash" import { DocWritethrough, docWritethroughProcessorQueue, - init, } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" @@ -45,8 +44,6 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeAll(() => init()) - beforeEach(async () => { resetTime() documentId = structures.uuid() From 37935112a478b064099552b223cbfb23e2ccf2fe Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:47:23 +0100 Subject: [PATCH 096/136] Add comment --- packages/backend-core/src/cache/tests/docWritethrough.spec.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index a5765171cb..3e638a4eec 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -256,6 +256,8 @@ describe("docWritethrough", () => { expect(storeToCacheSpy).toBeCalledTimes(45) + // Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup. + // We are relying on the document being stored only once (otherwise we would have _rev updated) expect(await db.get(documentId)).toEqual( expect.objectContaining({ _id: documentId, From e584d82e6f6dd052d828d45152aea29f95e7cb65 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 15:02:02 +0100 Subject: [PATCH 097/136] Improve redlock non executed response --- packages/backend-core/src/cache/docWritethrough.ts | 9 +++++++++ packages/backend-core/src/redis/redlockImpl.ts | 10 +++++++++- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 1a16f60eb9..ebb64ee9e5 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -44,6 +44,15 @@ docWritethroughProcessorQueue.process(async message => { ) if (!lockResponse.executed) { + if ( + lockResponse.reason !== + locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce + ) { + console.error("Error persisting docWritethrough", { + data: message.data, + }) + throw "Error persisting docWritethrough" + } console.log(`Ignoring redlock conflict in write-through cache`) } }) diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts index adeb5b12ec..28babb9405 100644 --- a/packages/backend-core/src/redis/redlockImpl.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -82,6 +82,11 @@ type SuccessfulRedlockExecution = { } type UnsuccessfulRedlockExecution = { executed: false + reason: UnsuccessfulRedlockExecutionReason +} + +export const enum UnsuccessfulRedlockExecutionReason { + LockTakenWithTryOnce = "LOCK_TAKEN_WITH_TRY_ONCE", } type RedlockExecution = @@ -141,7 +146,10 @@ export async function doWithLock( if (opts.type === LockType.TRY_ONCE) { // don't throw for try-once locks, they will always error // due to retry count (0) exceeded - return { executed: false } + return { + executed: false, + reason: UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce, + } } else { throw e } From f5e2dc7a2709019752d358228575db130dd5aed0 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 17:15:50 +0100 Subject: [PATCH 098/136] Do not use lock --- .../backend-core/src/cache/docWritethrough.ts | 37 ++----------------- .../src/cache/tests/docWritethrough.spec.ts | 4 +- 2 files changed, 4 insertions(+), 37 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index ebb64ee9e5..d4d651c688 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,7 +1,6 @@ import BaseCache from "./base" import { getDocWritethroughClient } from "../redis/init" -import { AnyDocument, Database, LockName, LockType } from "@budibase/types" -import * as locks from "../redis/redlockImpl" +import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" import * as context from "../context" @@ -17,7 +16,6 @@ async function getCache() { } interface ProcessDocMessage { - tenantId: string dbName: string docId: string cacheKeyPrefix: string @@ -28,34 +26,8 @@ export const docWritethroughProcessorQueue = createQueue( ) docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - console.log("DocWritethrough persisted", { data: message.data }) - } - ) - - if (!lockResponse.executed) { - if ( - lockResponse.reason !== - locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce - ) { - console.error("Error persisting docWritethrough", { - data: message.data, - }) - throw "Error persisting docWritethrough" - } - console.log(`Ignoring redlock conflict in write-through cache`) - } - }) + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) }) export async function persistToDb({ @@ -94,7 +66,6 @@ export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number - private tenantId: string private cacheKeyPrefix: string @@ -103,7 +74,6 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.tenantId = context.getTenantId() } get docId() { @@ -117,7 +87,6 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 3e638a4eec..9bbcd6af44 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -47,9 +47,7 @@ describe("docWritethrough", () => { beforeEach(async () => { resetTime() documentId = structures.uuid() - await config.doInTenant(async () => { - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) - }) + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) it("patching will not persist if timeout does not hit", async () => { From 6707da4ac21db6eaf0fa1b0fbbac85d4f027e5c8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:06:14 +0100 Subject: [PATCH 099/136] Use bulk --- packages/backend-core/src/cache/base/index.ts | 19 +++++++++++++++++++ .../backend-core/src/cache/docWritethrough.ts | 10 +++++----- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 911bd6a831..942d70ae72 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -46,6 +46,25 @@ export default class BaseCache { await client.store(key, value, ttl) } + /** + * Bulk write to the cache. + */ + async bulkStore( + data: Record, + ttl: number | null = null, + opts = { useTenancy: true } + ) { + if (opts.useTenancy) { + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[generateTenantKey(key)] = value + return acc + }, {} as Record) + } + + const client = await this.getClient() + await client.bulkStore(data, ttl) + } + /** * Remove from cache. */ diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index d4d651c688..a0bc14ec5c 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,7 +3,6 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" -import * as context from "../context" import * as dbUtils from "../db" let CACHE: BaseCache | null = null @@ -101,9 +100,10 @@ export class DocWritethrough { } private async storeToCache(cache: BaseCache, data: Record) { - for (const [key, value] of Object.entries(data)) { - const cacheKey = this.cacheKeyPrefix + ":data:" + key - await cache.store(cacheKey, { key, value }, undefined) - } + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[this.cacheKeyPrefix + ":data:" + key] = { key, value } + return acc + }, {} as Record) + await cache.bulkStore(data, null) } } From 0a2fb4a3a6b432ed8277738cbee054698911b07e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 23:32:02 +0100 Subject: [PATCH 100/136] Use message id to handle concurrency --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++++++++---- 1 file changed, 69 insertions(+), 19 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index a0bc14ec5c..3f84f82bc4 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,9 +1,11 @@ import BaseCache from "./base" import { getDocWritethroughClient } from "../redis/init" -import { AnyDocument, Database } from "@budibase/types" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import { JobQueue, createQueue } from "../queue" import * as dbUtils from "../db" +import { Duration, newid } from "../utils" +import { context, locks } from ".." let CACHE: BaseCache | null = null async function getCache() { @@ -18,6 +20,7 @@ interface ProcessDocMessage { dbName: string docId: string cacheKeyPrefix: string + messageId: string } export const docWritethroughProcessorQueue = createQueue( @@ -25,21 +28,55 @@ export const docWritethroughProcessorQueue = createQueue( ) docWritethroughProcessorQueue.process(async message => { - await persistToDb(message.data) - console.log("DocWritethrough persisted", { data: message.data }) + const { cacheKeyPrefix, messageId } = message.data + + const cache = await getCache() + const latestMessageId = await cache.get( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID + ) + if (messageId !== latestMessageId) { + // Nothing to do, another message overrode it + return + } + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: Duration.fromSeconds(60).toMs(), + }, + async () => { + const latestMessageId = await cache.get( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID + ) + if (messageId !== latestMessageId) { + // Nothing to do, another message overrode it + return + } + + await persistToDb(cache, message.data) + console.log("DocWritethrough persisted", { data: message.data }) + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) -export async function persistToDb({ - dbName, - docId, - cacheKeyPrefix, -}: { - dbName: string - docId: string - cacheKeyPrefix: string -}) { - const cache = await getCache() - +export async function persistToDb( + cache: BaseCache, + { + dbName, + docId, + cacheKeyPrefix, + }: { + dbName: string + docId: string + cacheKeyPrefix: string + } +) { const db = dbUtils.getDB(dbName) let doc: AnyDocument | undefined try { @@ -48,7 +85,9 @@ export async function persistToDb({ doc = { _id: docId } } - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + const keysToPersist = await cache.keys( + REDIS_KEYS(cacheKeyPrefix).DATA.GET_ALL + ) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value @@ -83,27 +122,38 @@ export class DocWritethrough { const cache = await getCache() await this.storeToCache(cache, data) + const messageId = newid() + await cache.store( + REDIS_KEYS(this.cacheKeyPrefix).LATEST_MESSAGE_ID, + messageId + ) docWritethroughProcessorQueue.add( { dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, + messageId, }, { delay: this.writeRateMs, - jobId: this.cacheKeyPrefix, - removeOnFail: true, - removeOnComplete: true, } ) } private async storeToCache(cache: BaseCache, data: Record) { data = Object.entries(data).reduce((acc, [key, value]) => { - acc[this.cacheKeyPrefix + ":data:" + key] = { key, value } + acc[REDIS_KEYS(this.cacheKeyPrefix).DATA.VALUE(key)] = { key, value } return acc }, {} as Record) await cache.bulkStore(data, null) } } + +const REDIS_KEYS = (prefix: string) => ({ + DATA: { + VALUE: (key: string) => prefix + ":data:" + key, + GET_ALL: prefix + ":data:*", + }, + LATEST_MESSAGE_ID: prefix + ":info:latestMessageId", +}) From 3dbf0b3a64789022897a044295be5501487e81b9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 23:33:20 +0100 Subject: [PATCH 101/136] Throw errors on lock not executed --- .../backend-core/src/cache/docWritethrough.ts | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 3f84f82bc4..14b186271c 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -24,7 +24,16 @@ interface ProcessDocMessage { } export const docWritethroughProcessorQueue = createQueue( - JobQueue.DOC_WRITETHROUGH_QUEUE + JobQueue.DOC_WRITETHROUGH_QUEUE, + { + jobOptions: { + attempts: 5, + backoff: { + type: "fixed", + delay: 1000, + }, + }, + } ) docWritethroughProcessorQueue.process(async message => { @@ -41,7 +50,7 @@ docWritethroughProcessorQueue.process(async message => { const lockResponse = await locks.doWithLock( { - type: LockType.TRY_ONCE, + type: LockType.TRY_TWICE, name: LockName.PERSIST_WRITETHROUGH, resource: cacheKeyPrefix, ttl: Duration.fromSeconds(60).toMs(), @@ -61,7 +70,7 @@ docWritethroughProcessorQueue.process(async message => { ) if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + throw new Error(`Ignoring redlock conflict in write-through cache`) } }) From a9046e9f26019904fb0158d142f8bf415214817c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 23:33:57 +0100 Subject: [PATCH 102/136] Use lock name --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- packages/types/src/sdk/locks.ts | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 14b186271c..5454362343 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -51,7 +51,7 @@ docWritethroughProcessorQueue.process(async message => { const lockResponse = await locks.doWithLock( { type: LockType.TRY_TWICE, - name: LockName.PERSIST_WRITETHROUGH, + name: LockName.PERSIST_DOC_WRITETHROUGH, resource: cacheKeyPrefix, ttl: Duration.fromSeconds(60).toMs(), }, diff --git a/packages/types/src/sdk/locks.ts b/packages/types/src/sdk/locks.ts index c7c028a135..67de109657 100644 --- a/packages/types/src/sdk/locks.ts +++ b/packages/types/src/sdk/locks.ts @@ -23,6 +23,7 @@ export enum LockName { APP_MIGRATION = "app_migrations", PROCESS_AUTO_COLUMNS = "process_auto_columns", PROCESS_USER_INVITE = "process_user_invite", + PERSIST_DOC_WRITETHROUGH = "persist_doc_writethrough", } export type LockOptions = { From 8e8378d1bebf5d1ea777f9fee755529e04090bcc Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 00:56:30 +0100 Subject: [PATCH 103/136] Delete info on process --- packages/backend-core/src/cache/base/index.ts | 9 +++++++++ packages/backend-core/src/cache/docWritethrough.ts | 5 +++++ 2 files changed, 14 insertions(+) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 942d70ae72..a712ddc263 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -108,4 +108,13 @@ export default class BaseCache { throw err } } + + /** + * Delete the entry if the provided value matches the stored one. + */ + async deleteIfValue(key: string, value: any, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + await client.deleteIfValue(key, value) + } } diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5454362343..e8e0e7beb3 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -66,6 +66,11 @@ docWritethroughProcessorQueue.process(async message => { await persistToDb(cache, message.data) console.log("DocWritethrough persisted", { data: message.data }) + + await cache.deleteIfValue( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID, + latestMessageId + ) } ) From 2b206f2105681140a1079ba49bcc434df1e7f489 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 6 Mar 2024 10:00:02 +0000 Subject: [PATCH 104/136] Fix the TODO I left myself last night. --- packages/backend-core/src/db/Replication.ts | 41 +++++---------------- packages/server/src/api/controllers/role.ts | 25 +++++++------ 2 files changed, 23 insertions(+), 43 deletions(-) diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts index 12c11eb9e2..9c960d76dd 100644 --- a/packages/backend-core/src/db/Replication.ts +++ b/packages/backend-core/src/db/Replication.ts @@ -5,56 +5,33 @@ import { DocumentType } from "../constants" class Replication { source: PouchDB.Database target: PouchDB.Database - replication?: Promise - /** - * - * @param source - the DB you want to replicate or rollback to - * @param target - the DB you want to replicate to, or rollback from - */ constructor({ source, target }: { source: string; target: string }) { this.source = getPouchDB(source) this.target = getPouchDB(target) } - close() { - return Promise.all([closePouchDB(this.source), closePouchDB(this.target)]) + async close() { + await Promise.all([closePouchDB(this.source), closePouchDB(this.target)]) } - promisify(operation: any, opts = {}) { - return new Promise(resolve => { - operation(this.target, opts) - .on("denied", function (err: any) { + replicate(opts: PouchDB.Replication.ReplicateOptions = {}) { + return new Promise>(resolve => { + this.source.replicate + .to(this.target, opts) + .on("denied", function (err) { // a document failed to replicate (e.g. due to permissions) throw new Error(`Denied: Document failed to replicate ${err}`) }) - .on("complete", function (info: any) { + .on("complete", function (info) { return resolve(info) }) - .on("error", function (err: any) { + .on("error", function (err) { throw new Error(`Replication Error: ${err}`) }) }) } - /** - * Two way replication operation, intended to be promise based. - * @param opts - PouchDB replication options - */ - sync(opts: PouchDB.Replication.SyncOptions = {}) { - this.replication = this.promisify(this.source.sync, opts) - return this.replication - } - - /** - * One way replication operation, intended to be promise based. - * @param opts - PouchDB replication options - */ - replicate(opts: PouchDB.Replication.ReplicateOptions = {}) { - this.replication = this.promisify(this.source.replicate.to, opts) - return this.replication - } - appReplicateOpts( opts: PouchDB.Replication.ReplicateOptions = {} ): PouchDB.Replication.ReplicateOptions { diff --git a/packages/server/src/api/controllers/role.ts b/packages/server/src/api/controllers/role.ts index 6b62c568e2..84179d8dbc 100644 --- a/packages/server/src/api/controllers/role.ts +++ b/packages/server/src/api/controllers/role.ts @@ -107,17 +107,20 @@ export async function save(ctx: UserCtx) { role._rev = result.rev ctx.body = role - // TODO: need to check that the prod DB actually exists, I think it won't - // if the app has never been published. - const replication = new dbCore.Replication({ - source: context.getDevAppDB().name, - target: context.getProdAppDB().name, - }) - await replication.replicate({ - filter: (doc: any, params: any) => { - return doc._id === _id - }, - }) + const devDb = context.getDevAppDB() + const prodDb = context.getProdAppDB() + + if (await prodDb.exists()) { + const replication = new dbCore.Replication({ + source: devDb.name, + target: prodDb.name, + }) + await replication.replicate({ + filter: (doc: any, params: any) => { + return doc._id === _id + }, + }) + } } export async function destroy(ctx: UserCtx) { From b232371efff95f7925c93960ba92862324cb1a46 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 6 Mar 2024 10:01:42 +0000 Subject: [PATCH 105/136] remove uneeded comment --- packages/shared-core/src/tests/filters.test.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index 1e0a68de89..0cf7e0e92a 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -221,8 +221,6 @@ describe("runLuceneQuery", () => { ]) }) - // what should the name of this test be if it's the same test as above but with different operands - it("should return matching results if allOr is true and only one filter matches with different operands", () => { const query = buildQuery({ allOr: true, From eb00ce401f9819406acde58c60018945bc95864e Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 6 Mar 2024 10:10:28 +0000 Subject: [PATCH 106/136] pr comments --- packages/shared-core/src/filters.ts | 7 ++++--- packages/shared-core/src/tests/filters.test.ts | 10 ++++++++++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 0a1673e558..84b6076d56 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -12,6 +12,7 @@ import { import dayjs from "dayjs" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { deepGet } from "./helpers" +import test from "node:test" const HBS_REGEX = /{{([^{].*?)}}/g @@ -359,6 +360,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { const oneOf = match( SearchQueryOperators.ONE_OF, (docValue: any, testValue: any) => { + console.log(testValue) if (typeof testValue === "string") { testValue = testValue.split(",") if (typeof docValue === "number") { @@ -410,13 +412,13 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { .filter( ([key, value]: [string, any]) => !["allOr", "onEmptyFilter"].includes(key) && + value && Object.keys(value as Record).length > 0 ) .map(([key]) => key as any) const results: boolean[] = activeFilterKeys.map(filterKey => { - const filterFunction = filterFunctions[filterKey] - return filterFunction ? filterFunction(doc) : true + return filterFunctions[filterKey]?.(doc) ?? false }) if (query!.allOr) { @@ -425,7 +427,6 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { return results.every(result => result === true) } } - return docs.filter(docMatch) } diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index 0cf7e0e92a..1f8f534f0d 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -240,6 +240,16 @@ describe("runLuceneQuery", () => { expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([]) }) + + it("should handle when a value is null or undefined", () => { + const query = buildQuery({ + allOr: true, + equal: { order_status: null }, + oneOf: { label: ["FRAGILE"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2]) + }) }) describe("buildLuceneQuery", () => { From 61c4b83650206626fcbe7ae3c2bf5bd1e76a1bdd Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 11:55:49 +0100 Subject: [PATCH 107/136] Test with spies --- .../backend-core/src/cache/docWritethrough.ts | 131 +++++++++--------- .../src/cache/tests/docWritethrough.spec.ts | 14 +- 2 files changed, 76 insertions(+), 69 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e8e0e7beb3..af3df11a9c 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,11 +1,11 @@ +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import BaseCache from "./base" import { getDocWritethroughClient } from "../redis/init" -import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import { JobQueue, createQueue } from "../queue" import * as dbUtils from "../db" import { Duration, newid } from "../utils" -import { context, locks } from ".." +import { locks } from ".." let CACHE: BaseCache | null = null async function getCache() { @@ -36,26 +36,12 @@ export const docWritethroughProcessorQueue = createQueue( } ) -docWritethroughProcessorQueue.process(async message => { - const { cacheKeyPrefix, messageId } = message.data +class DocWritethroughProcessor { + init() { + docWritethroughProcessorQueue.process(async message => { + const { cacheKeyPrefix, messageId } = message.data - const cache = await getCache() - const latestMessageId = await cache.get( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID - ) - if (messageId !== latestMessageId) { - // Nothing to do, another message overrode it - return - } - - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_TWICE, - name: LockName.PERSIST_DOC_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: Duration.fromSeconds(60).toMs(), - }, - async () => { + const cache = await getCache() const latestMessageId = await cache.get( REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID ) @@ -64,56 +50,77 @@ docWritethroughProcessorQueue.process(async message => { return } - await persistToDb(cache, message.data) - console.log("DocWritethrough persisted", { data: message.data }) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_TWICE, + name: LockName.PERSIST_DOC_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: Duration.fromSeconds(60).toMs(), + }, + async () => { + const latestMessageId = await cache.get( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID + ) + if (messageId !== latestMessageId) { + // Nothing to do, another message overrode it + return + } - await cache.deleteIfValue( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID, - latestMessageId + await this.persistToDb(cache, message.data) + console.log("DocWritethrough persisted", { data: message.data }) + + await cache.deleteIfValue( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID, + latestMessageId + ) + } ) + + if (!lockResponse.executed) { + throw new Error(`Ignoring redlock conflict in write-through cache`) + } + }) + return this + } + + private async persistToDb( + cache: BaseCache, + { + dbName, + docId, + cacheKeyPrefix, + }: { + dbName: string + docId: string + cacheKeyPrefix: string + } + ) { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } } - ) - if (!lockResponse.executed) { - throw new Error(`Ignoring redlock conflict in write-through cache`) - } -}) + const keysToPersist = await cache.keys( + REDIS_KEYS(cacheKeyPrefix).DATA.GET_ALL + ) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } -export async function persistToDb( - cache: BaseCache, - { - dbName, - docId, - cacheKeyPrefix, - }: { - dbName: string - docId: string - cacheKeyPrefix: string - } -) { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } - } + await db.put(doc) - const keysToPersist = await cache.keys( - REDIS_KEYS(cacheKeyPrefix).DATA.GET_ALL - ) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } } +export const processor = new DocWritethroughProcessor().init() + export class DocWritethrough { private db: Database private _docId: string diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 9bbcd6af44..b909f4624f 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,11 @@ +import _ from "lodash" import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import _ from "lodash" -import { - DocWritethrough, - docWritethroughProcessorQueue, -} from "../docWritethrough" +import { DocWritethrough, processor } from "../docWritethrough" + import InMemoryQueue from "../../queue/inMemoryQueue" +import { docWritethroughProcessorQueue } from "../docWritethrough" const WRITE_RATE_MS = 1000 @@ -240,12 +239,13 @@ describe("docWritethrough", () => { ) ) } - + const persistToDbSpy = jest.spyOn(processor as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) expect(storeToCacheSpy).toBeCalledTimes(5) + expect(persistToDbSpy).not.toBeCalled() expect(await db.exists(documentId)).toBe(false) await travelForward(WRITE_RATE_MS) @@ -253,7 +253,7 @@ describe("docWritethrough", () => { await parallelPatch(40) expect(storeToCacheSpy).toBeCalledTimes(45) - + expect(persistToDbSpy).toBeCalledTimes(1) // Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup. // We are relying on the document being stored only once (otherwise we would have _rev updated) expect(await db.get(documentId)).toEqual( From 1f107041a108aeaf677da20659819bfe2d06ec03 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 6 Mar 2024 11:57:45 +0000 Subject: [PATCH 108/136] use vitest each --- .../shared-core/src/tests/filters.test.ts | 23 +++++++------------ 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index 1f8f534f0d..de969562af 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -209,16 +209,19 @@ describe("runLuceneQuery", () => { } ) - it("should return matching results if allOr is true and only one filter matches", () => { + test.each([ + [false, []], + [true, [1, 2, 3]], + ])("should return %s if allOr is %s ", (allOr, expectedResult) => { const query = buildQuery({ - allOr: true, + allOr, oneOf: { staff_id: [10] }, contains: { description: ["box"] }, }) - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([ - 1, 2, 3, - ]) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual( + expectedResult + ) }) it("should return matching results if allOr is true and only one filter matches with different operands", () => { @@ -231,16 +234,6 @@ describe("runLuceneQuery", () => { expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) }) - it("should return nothing if allOr is false and only one filter matches", () => { - const query = buildQuery({ - allOr: false, - oneOf: { staff_id: [10] }, - contains: { description: ["box"] }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([]) - }) - it("should handle when a value is null or undefined", () => { const query = buildQuery({ allOr: true, From 5109477e526139202cb37392f2bba3347e4a82a5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 13:39:27 +0100 Subject: [PATCH 109/136] Fix checks --- packages/backend-core/src/queue/inMemoryQueue.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 6c8107c7a4..985501bcbe 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -173,7 +173,7 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this.hasRunningJobs) + } while (this.hasRunningJobs()) } hasRunningJobs() { From 4ce85cde1afd34872bfe7c401d73cbf77651a660 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 14:07:39 +0100 Subject: [PATCH 110/136] Simplify, use only queues --- .../backend-core/src/cache/docWritethrough.ts | 154 ++++-------------- .../src/cache/tests/docWritethrough.spec.ts | 126 ++++++-------- 2 files changed, 79 insertions(+), 201 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index af3df11a9c..cee272cef6 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,100 +1,55 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" -import BaseCache from "./base" -import { getDocWritethroughClient } from "../redis/init" import { JobQueue, createQueue } from "../queue" import * as dbUtils from "../db" -import { Duration, newid } from "../utils" -import { locks } from ".." - -let CACHE: BaseCache | null = null -async function getCache() { - if (!CACHE) { - const client = await getDocWritethroughClient() - CACHE = new BaseCache(client) - } - return CACHE -} +import { string } from "yargs" +import { db } from ".." +import { locks } from "../redis" +import { Duration } from "../utils" interface ProcessDocMessage { dbName: string docId: string - cacheKeyPrefix: string - messageId: string + + data: Record } export const docWritethroughProcessorQueue = createQueue( - JobQueue.DOC_WRITETHROUGH_QUEUE, - { - jobOptions: { - attempts: 5, - backoff: { - type: "fixed", - delay: 1000, - }, - }, - } + JobQueue.DOC_WRITETHROUGH_QUEUE ) class DocWritethroughProcessor { init() { docWritethroughProcessorQueue.process(async message => { - const { cacheKeyPrefix, messageId } = message.data - - const cache = await getCache() - const latestMessageId = await cache.get( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID - ) - if (messageId !== latestMessageId) { - // Nothing to do, another message overrode it - return - } - - const lockResponse = await locks.doWithLock( + const result = await locks.doWithLock( { - type: LockType.TRY_TWICE, + type: LockType.DEFAULT, name: LockName.PERSIST_DOC_WRITETHROUGH, - resource: cacheKeyPrefix, + resource: `${message.data.dbName}:${message.data.docId}`, ttl: Duration.fromSeconds(60).toMs(), }, async () => { - const latestMessageId = await cache.get( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID - ) - if (messageId !== latestMessageId) { - // Nothing to do, another message overrode it - return - } - - await this.persistToDb(cache, message.data) - console.log("DocWritethrough persisted", { data: message.data }) - - await cache.deleteIfValue( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID, - latestMessageId - ) + await this.persistToDb(message.data) } ) - - if (!lockResponse.executed) { - throw new Error(`Ignoring redlock conflict in write-through cache`) + if (!result.executed) { + throw new Error( + `Error persisting docWritethrough message: ${message.id}` + ) } }) return this } - private async persistToDb( - cache: BaseCache, - { - dbName, - docId, - cacheKeyPrefix, - }: { - dbName: string - docId: string - cacheKeyPrefix: string - } - ) { + private async persistToDb({ + dbName, + docId, + data, + }: { + dbName: string + docId: string + data: Record + }) { const db = dbUtils.getDB(dbName) let doc: AnyDocument | undefined try { @@ -103,19 +58,8 @@ class DocWritethroughProcessor { doc = { _id: docId } } - const keysToPersist = await cache.keys( - REDIS_KEYS(cacheKeyPrefix).DATA.GET_ALL - ) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - + doc = { ...doc, ...data } await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } } } @@ -124,15 +68,10 @@ export const processor = new DocWritethroughProcessor().init() export class DocWritethrough { private db: Database private _docId: string - private writeRateMs: number - private cacheKeyPrefix: string - - constructor(db: Database, docId: string, writeRateMs: number) { + constructor(db: Database, docId: string) { this.db = db this._docId = docId - this.writeRateMs = writeRateMs - this.cacheKeyPrefix = `${this.db.name}:${this.docId}` } get docId() { @@ -140,41 +79,10 @@ export class DocWritethrough { } async patch(data: Record) { - const cache = await getCache() - - await this.storeToCache(cache, data) - const messageId = newid() - await cache.store( - REDIS_KEYS(this.cacheKeyPrefix).LATEST_MESSAGE_ID, - messageId - ) - - docWritethroughProcessorQueue.add( - { - dbName: this.db.name, - docId: this.docId, - cacheKeyPrefix: this.cacheKeyPrefix, - messageId, - }, - { - delay: this.writeRateMs, - } - ) - } - - private async storeToCache(cache: BaseCache, data: Record) { - data = Object.entries(data).reduce((acc, [key, value]) => { - acc[REDIS_KEYS(this.cacheKeyPrefix).DATA.VALUE(key)] = { key, value } - return acc - }, {} as Record) - await cache.bulkStore(data, null) + await docWritethroughProcessorQueue.add({ + dbName: this.db.name, + docId: this.docId, + data, + }) } } - -const REDIS_KEYS = (prefix: string) => ({ - DATA: { - VALUE: (key: string) => prefix + ":data:" + key, - GET_ALL: prefix + ":data:*", - }, - LATEST_MESSAGE_ID: prefix + ":info:latestMessageId", -}) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index b909f4624f..9beb25df93 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,3 +1,5 @@ +import tk from "timekeeper" + import _ from "lodash" import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" @@ -7,24 +9,11 @@ import { DocWritethrough, processor } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" import { docWritethroughProcessorQueue } from "../docWritethrough" -const WRITE_RATE_MS = 1000 - const initialTime = Date.now() -jest.useFakeTimers({ - now: initialTime, -}) - -function resetTime() { - jest.setSystemTime(initialTime) -} -async function travelForward(ms: number) { - await jest.advanceTimersByTimeAsync(ms) - +async function waitForQueueCompletion() { const queue: InMemoryQueue = docWritethroughProcessorQueue as never - while (queue.hasRunningJobs()) { - await jest.runOnlyPendingTimersAsync() - } + await queue.waitForCompletion() } describe("docWritethrough", () => { @@ -44,30 +33,28 @@ describe("docWritethrough", () => { } beforeEach(async () => { - resetTime() + jest.clearAllMocks() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + docWritethrough = new DocWritethrough(db, documentId) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist until the messages are persisted", async () => { await config.doInTenant(async () => { - await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits", async () => { + it("patching will persist when the messages are persisted", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() // This will not be persisted const patch3 = generatePatchObject(3) @@ -77,9 +64,9 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), - updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + _rev: expect.stringMatching(/2-.+/), + createdAt: new Date(initialTime).toISOString(), + updatedAt: new Date(initialTime).toISOString(), }) }) }) @@ -91,12 +78,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -114,12 +101,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - await travelForward(WRITE_RATE_MS) const date1 = new Date() + await waitForQueueCompletion() await docWritethrough.patch(patch2) - await travelForward(WRITE_RATE_MS) + tk.travel(Date.now() + 100) const date2 = new Date() + await waitForQueueCompletion() expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( @@ -135,7 +123,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -146,14 +134,14 @@ describe("docWritethrough", () => { }) ) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -169,8 +157,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, - structures.db.id(), - WRITE_RATE_MS + structures.db.id() ) const doc1Patch = generatePatchObject(2) @@ -178,13 +165,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -207,9 +194,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - await travelForward(WRITE_RATE_MS) - - await docWritethrough.patch({}) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( expect.objectContaining(initialPatch) @@ -217,10 +202,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -231,59 +216,44 @@ describe("docWritethrough", () => { }) }) - it("concurrent calls will not cause multiple saves", async () => { + it("concurrent calls will not cause conflicts", async () => { async function parallelPatch(count: number) { - await Promise.all( - Array.from({ length: count }).map(() => - docWritethrough.patch(generatePatchObject(1)) - ) + const patches = Array.from({ length: count }).map(() => + generatePatchObject(1) ) + await Promise.all(patches.map(p => docWritethrough.patch(p))) + + return patches.reduce((acc, c) => { + acc = { ...acc, ...c } + return acc + }, {}) } - const persistToDbSpy = jest.spyOn(processor as any, "persistToDb") - const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + const queueMessageSpy = jest.spyOn(docWritethroughProcessorQueue, "add") await config.doInTenant(async () => { - await parallelPatch(5) - expect(storeToCacheSpy).toBeCalledTimes(5) - expect(persistToDbSpy).not.toBeCalled() - expect(await db.exists(documentId)).toBe(false) + let patches = await parallelPatch(5) + expect(queueMessageSpy).toBeCalledTimes(5) - await travelForward(WRITE_RATE_MS) - - await parallelPatch(40) - - expect(storeToCacheSpy).toBeCalledTimes(45) - expect(persistToDbSpy).toBeCalledTimes(1) - // Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup. - // We are relying on the document being stored only once (otherwise we would have _rev updated) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( - expect.objectContaining({ - _id: documentId, - _rev: expect.stringMatching(/1-.+/), - }) + expect.objectContaining(patches) ) - await parallelPatch(10) + patches = { ...patches, ...(await parallelPatch(40)) } + expect(queueMessageSpy).toBeCalledTimes(45) - expect(storeToCacheSpy).toBeCalledTimes(55) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( - expect.objectContaining({ - _id: documentId, - _rev: expect.stringMatching(/1-.+/), - }) + expect.objectContaining(patches) ) - await travelForward(WRITE_RATE_MS) + patches = { ...patches, ...(await parallelPatch(10)) } + expect(queueMessageSpy).toBeCalledTimes(55) - await parallelPatch(5) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( - expect.objectContaining({ - _id: documentId, - _rev: expect.stringMatching(/3-.+/), - }) + expect.objectContaining(patches) ) - expect(storeToCacheSpy).toBeCalledTimes(60) }) }) }) From a44faad046f17102fe68b9c4fffb72bace3318b6 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 15:37:38 +0100 Subject: [PATCH 111/136] Add same key tests --- .../src/cache/tests/docWritethrough.spec.ts | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 9beb25df93..2b66b6cc21 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -256,5 +256,30 @@ describe("docWritethrough", () => { ) }) }) + + it("patches will execute in order", async () => { + let incrementalValue = 0 + const keyToOverride = generator.word() + async function incrementalPatches(count: number) { + for (let i = 0; i < count; i++) { + await docWritethrough.patch({ [keyToOverride]: incrementalValue++ }) + } + } + + await config.doInTenant(async () => { + await incrementalPatches(5) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ [keyToOverride]: 5 }) + ) + + await incrementalPatches(40) + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ [keyToOverride]: 45 }) + ) + }) + }) }) }) From 632b9a26f4313216c28458db46dc9334aea7e909 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 6 Mar 2024 14:42:30 +0000 Subject: [PATCH 112/136] remove log --- packages/shared-core/src/filters.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 84b6076d56..d9fe533c88 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -12,7 +12,6 @@ import { import dayjs from "dayjs" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { deepGet } from "./helpers" -import test from "node:test" const HBS_REGEX = /{{([^{].*?)}}/g @@ -360,7 +359,6 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { const oneOf = match( SearchQueryOperators.ONE_OF, (docValue: any, testValue: any) => { - console.log(testValue) if (typeof testValue === "string") { testValue = testValue.split(",") if (typeof docValue === "number") { From 10ac21525ba205ce7dc138426b545befa3b030a0 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 6 Mar 2024 14:58:34 +0000 Subject: [PATCH 113/136] Update submodules. --- packages/account-portal | 2 +- packages/pro | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/account-portal b/packages/account-portal index 4de0d98e2f..0c050591c2 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit 4de0d98e2f8d80ee7631dffe076063273812a441 +Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac diff --git a/packages/pro b/packages/pro index 60e47a8249..22a278da72 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 60e47a8249fd6291a6bc20fe3fe6776b11938fa1 +Subproject commit 22a278da720d92991dabdcd4cb6c96e7abe29781 From 1b387d359c669b9672ab1de10e2811bb72f26811 Mon Sep 17 00:00:00 2001 From: Conor Webb <126772285+ConorWebb96@users.noreply.github.com> Date: Wed, 6 Mar 2024 16:32:00 +0000 Subject: [PATCH 114/136] Added icon to button component, reworked icon display code. (#12624) * Added icons to buttons, removed svg code added icon component code. * Added icon functionality to button group component. * Added gap to button manifest * Added gap to button setitngs. * Added gap setting to ButtonGroup component * Added the ability to clear the selected icon. * Added enter search to icon select * Removed use:styleable as its for the button * Moved non internal props up * Fixed broken DynamicFilter component icon * Updated DynamicFilter icon to a better suited one --------- Co-authored-by: melohagan <101575380+melohagan@users.noreply.github.com> --- .../controls/IconSelect/IconSelect.svelte | 17 ++++++++-- packages/client/manifest.json | 32 +++++++++++++++++++ .../client/src/components/app/Button.svelte | 24 +++++++------- .../src/components/app/ButtonGroup.svelte | 4 ++- .../app/dynamic-filter/DynamicFilter.svelte | 4 +-- 5 files changed, 65 insertions(+), 16 deletions(-) diff --git a/packages/builder/src/components/design/settings/controls/IconSelect/IconSelect.svelte b/packages/builder/src/components/design/settings/controls/IconSelect/IconSelect.svelte index 0c68c3c3e6..a28f5cfb3b 100644 --- a/packages/builder/src/components/design/settings/controls/IconSelect/IconSelect.svelte +++ b/packages/builder/src/components/design/settings/controls/IconSelect/IconSelect.svelte @@ -139,10 +139,22 @@ {/each}
-
- +
+ { + if (event.key === "Enter") { + searchForIcon() + } + }} + thin + placeholder="Search Icon" + />
+ {#if value} + + {/if}
@@ -239,6 +251,7 @@ flex-flow: row nowrap; width: 100%; padding-right: 15px; + gap: 10px; } .input-wrapper { width: 510px; diff --git a/packages/client/manifest.json b/packages/client/manifest.json index 43b75ebe26..10f9c5f412 100644 --- a/packages/client/manifest.json +++ b/packages/client/manifest.json @@ -525,6 +525,38 @@ "barTitle": "Disable button", "key": "disabled" }, + { + "type": "icon", + "label": "Icon", + "key": "icon" + }, + { + "type": "select", + "label": "Gap", + "key": "gap", + "showInBar": true, + "barStyle": "picker", + "dependsOn": "icon", + "options": [ + { + "label": "None", + "value": "N" + }, + { + "label": "Small", + "value": "S" + }, + { + "label": "Medium", + "value": "M" + }, + { + "label": "Large", + "value": "L" + } + ], + "defaultValue": "M" + }, { "type": "event", "label": "On click", diff --git a/packages/client/src/components/app/Button.svelte b/packages/client/src/components/app/Button.svelte index 361e64a983..c43face1bb 100644 --- a/packages/client/src/components/app/Button.svelte +++ b/packages/client/src/components/app/Button.svelte @@ -13,9 +13,10 @@ export let size = "M" export let type = "cta" export let quiet = false + export let icon = null + export let gap = "M" // For internal use only for now - not defined in the manifest - export let icon = null export let active = false const handleOnClick = async () => { @@ -47,7 +48,7 @@ {#key $component.editing} @@ -92,4 +85,13 @@ .active { color: var(--spectrum-global-color-blue-600); } + .gap-S { + gap: 8px; + } + .gap-M { + gap: 16px; + } + .gap-L { + gap: 32px; + } diff --git a/packages/client/src/components/app/ButtonGroup.svelte b/packages/client/src/components/app/ButtonGroup.svelte index 3ee703e253..2cf6b3db7d 100644 --- a/packages/client/src/components/app/ButtonGroup.svelte +++ b/packages/client/src/components/app/ButtonGroup.svelte @@ -20,7 +20,7 @@ wrap: true, }} > - {#each buttons as { text, type, quiet, disabled, onClick, size }} + {#each buttons as { text, type, quiet, disabled, onClick, size, icon, gap }} diff --git a/packages/client/src/components/app/dynamic-filter/DynamicFilter.svelte b/packages/client/src/components/app/dynamic-filter/DynamicFilter.svelte index 199a6122ab..549574e89b 100644 --- a/packages/client/src/components/app/dynamic-filter/DynamicFilter.svelte +++ b/packages/client/src/components/app/dynamic-filter/DynamicFilter.svelte @@ -92,9 +92,9 @@ {#if schemaLoaded}