From 76b9cbcc5feb599017a43cacbd0f665c9bab7fd8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 11:22:27 +0100 Subject: [PATCH 01/62] Create docWriteThrough redis cache --- packages/backend-core/src/redis/init.ts | 13 ++++++++++++- packages/backend-core/src/redis/utils.ts | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..7920dfed2d 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -104,3 +108,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** From ff7c8d3b9546fc60424fb8cf24f8ab4615416f27 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:44:52 +0100 Subject: [PATCH 02/62] DocWritethrough --- .../backend-core/src/cache/docWritethrough.ts | 102 ++++++++++++++++++ .../backend-core/src/db/couch/DatabaseImpl.ts | 9 ++ .../backend-core/src/db/instrumentation.ts | 7 ++ packages/types/src/sdk/db.ts | 1 + 4 files changed, 119 insertions(+) create mode 100644 packages/backend-core/src/cache/docWritethrough.ts diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..9e1977f797 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,102 @@ +import BaseCache from "./base" +import { getDocWritethroughClient } from "../redis/init" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" +import * as locks from "../redis/redlockImpl" + +const DEFAULT_WRITE_RATE_MS = 10000 + +let CACHE: BaseCache | null = null +async function getCache() { + if (!CACHE) { + const client = await getDocWritethroughClient() + CACHE = new BaseCache(client) + } + return CACHE +} + +interface CacheItem { + lastWrite: number +} + +export class DocWritethrough { + db: Database + docId: string + writeRateMs: number + + constructor( + db: Database, + docId: string, + writeRateMs: number = DEFAULT_WRITE_RATE_MS + ) { + this.db = db + this.docId = docId + this.writeRateMs = writeRateMs + } + + private makeCacheItem(): CacheItem { + return { lastWrite: Date.now() } + } + + async patch(data: Record) { + const cache = await getCache() + + const key = `${this.docId}:info` + const cacheItem = await cache.withCache( + key, + null, + () => this.makeCacheItem(), + { + useTenancy: false, + } + ) + + await this.storeToCache(cache, data) + + const updateDb = + !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs + // let output = this.doc + if (updateDb) { + await this.persistToDb(cache) + } + } + + private async storeToCache(cache: BaseCache, data: Record) { + for (const [key, value] of Object.entries(data)) { + const cacheKey = this.docId + ":data:" + key + await cache.store(cacheKey, { key, value }, undefined) + } + } + + private async persistToDb(cache: BaseCache) { + const key = `${this.db.name}_${this.docId}` + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: key, + ttl: 15000, + }, + async () => { + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } + + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await this.db.put(doc) + } + ) + + if (!lockResponse.executed) { + throw `DocWriteThrough could not be persisted to db for ${key}` + } + } +} diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 0e2b4173b0..221399325d 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -134,6 +134,15 @@ export class DatabaseImpl implements Database { }) } + async docExists(id: string): Promise { + try { + await this.get(id) + return true + } catch { + return false + } + } + async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index aa2ac424ae..92bd55406f 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -38,6 +38,13 @@ export class DDInstrumentedDatabase implements Database { }) } + docExists(id: string): Promise { + return tracer.trace("db.docExists", span => { + span?.addTags({ db_name: this.name, doc_id: id }) + return this.db.docExists(id) + }) + } + getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index 9e44a4827f..4ae0869156 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -122,6 +122,7 @@ export interface Database { exists(): Promise get(id?: string): Promise + docExists(id: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 7d50a70d039c3e8308ac3a04f7a1ad32b4383b7e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:47:18 +0100 Subject: [PATCH 03/62] USe get for doc exists --- packages/backend-core/src/cache/base/index.ts | 2 +- packages/backend-core/src/db/couch/DatabaseImpl.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..23c952c7b2 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -60,7 +60,7 @@ export default class BaseCache { */ async withCache( key: string, - ttl: number, + ttl: number | null = null, fetchFn: any, opts = { useTenancy: true } ) { diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 221399325d..6be53a9c54 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -136,7 +136,7 @@ export class DatabaseImpl implements Database { async docExists(id: string): Promise { try { - await this.get(id) + await this.performCall(db => () => db.head(id)) return true } catch { return false From 3af2da3b7df8f14cda67879f5942dfc7404ad0e8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:17:18 +0100 Subject: [PATCH 04/62] DatabaseImpl.docExists test --- .../src/db/tests/DatabaseImpl.spec.ts | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 packages/backend-core/src/db/tests/DatabaseImpl.spec.ts diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..140ecf4f2c --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("docExists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.docExists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.docExists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.docExists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.docExists(id)).toBe(false) + }) + }) +}) From 40d7a0a7413325104a49dd7e3a880fe2462ed0b4 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:23:32 +0100 Subject: [PATCH 05/62] docWritethrough test --- .../src/cache/tests/docWritethrough.spec.ts | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 packages/backend-core/src/cache/tests/docWritethrough.spec.ts diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..bfb1da5f1c --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,47 @@ +import tk from "timekeeper" +import { env } from "../.." +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" +import { DocWritethrough } from "../docWritethrough" +import _ from "lodash" + +env._set("MOCK_REDIS", null) + +const initialTime = Date.now() + +const WRITE_RATE_MS = 500 + +describe("docWritethrough", () => { + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.word(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(() => { + tk.freeze(initialTime) + documentId = structures.db.id() + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) + + it("patching will not persist until timeout is hit", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + tk.travel(Date.now() + WRITE_RATE_MS - 1) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) + }) +}) From dc4d1fdbda5eb822b1c8a5a14dcc08076ec066df Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:28:35 +0100 Subject: [PATCH 06/62] Add persisting tests --- .../src/cache/tests/docWritethrough.spec.ts | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bfb1da5f1c..ab0de53bee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -33,7 +33,7 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist until timeout is hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) @@ -43,5 +43,42 @@ describe("docWritethrough", () => { expect(await db.docExists(documentId)).toBe(false) }) }) + + it("patching will persist if timeout hits and next patch is called", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + _rev: expect.stringMatching(/1-.+/), + createdAt: new Date(initialTime + 500).toISOString(), + updatedAt: new Date(initialTime + 500).toISOString(), + }) + }) + }) + + it("patching will not persist even if timeout hits but next patch is not callec", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) }) }) From 3ec00524811a2734e1c2601cae728df141b024ff Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:41:26 +0100 Subject: [PATCH 07/62] Add extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 86 ++++++++++++++++--- 1 file changed, 75 insertions(+), 11 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index ab0de53bee..16e47ce3c3 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -7,9 +7,17 @@ import _ from "lodash" env._set("MOCK_REDIS", null) +const WRITE_RATE_MS = 500 + const initialTime = Date.now() -const WRITE_RATE_MS = 500 +function resetTime() { + tk.travel(initialTime) +} +function travelForward(ms: number) { + const updatedTime = Date.now() + ms + tk.travel(updatedTime) +} describe("docWritethrough", () => { const config = new DBTestConfiguration() @@ -28,7 +36,7 @@ describe("docWritethrough", () => { } beforeEach(() => { - tk.freeze(initialTime) + resetTime() documentId = structures.db.id() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -37,7 +45,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS - 1) + travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) expect(await db.docExists(documentId)).toBe(false) @@ -51,7 +59,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -62,23 +70,79 @@ describe("docWritethrough", () => { ...patch2, ...patch3, _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + 500).toISOString(), - updatedAt: new Date(initialTime + 500).toISOString(), + createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), }) }) }) + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + travelForward(WRITE_RATE_MS) + const date1 = new Date() + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + const date2 = new Date() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + it("patching will not persist even if timeout hits but next patch is not callec", async () => { await config.doInTenant(async () => { - const patch1 = generatePatchObject(2) - const patch2 = generatePatchObject(2) - await docWritethrough.patch(patch1) - await docWritethrough.patch(patch2) + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) expect(await db.docExists(documentId)).toBe(false) }) }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const time1 = travelForward(WRITE_RATE_MS) + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + travelForward(WRITE_RATE_MS) + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) }) }) From 720d5a41052179da6c734b5edb10e63b6e6d8436 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:51:42 +0100 Subject: [PATCH 08/62] Test concurrency --- .../backend-core/src/cache/docWritethrough.ts | 12 ++++-- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++++++++++++- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 9e1977f797..13a85a0d84 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -19,9 +19,9 @@ interface CacheItem { } export class DocWritethrough { - db: Database - docId: string - writeRateMs: number + private db: Database + private _docId: string + private writeRateMs: number constructor( db: Database, @@ -29,10 +29,14 @@ export class DocWritethrough { writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { this.db = db - this.docId = docId + this._docId = docId this.writeRateMs = writeRateMs } + get docId() { + return this._docId + } + private makeCacheItem(): CacheItem { return { lastWrite: Date.now() } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 16e47ce3c3..aed87499ee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -41,8 +41,9 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist if timeout from the creation does not hit", async () => { await config.doInTenant(async () => { + travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) travelForward(WRITE_RATE_MS - 1) @@ -116,7 +117,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - const time1 = travelForward(WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -144,5 +145,41 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id(), + WRITE_RATE_MS + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + travelForward(WRITE_RATE_MS) + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) }) }) From 3068e58c31db762fd9abd77c2c3665f8be181645 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 16:48:16 +0100 Subject: [PATCH 09/62] Ensure keys are removed --- .../backend-core/src/cache/docWritethrough.ts | 4 +++ .../src/cache/tests/docWritethrough.spec.ts | 28 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 13a85a0d84..bde93182a9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -96,6 +96,10 @@ export class DocWritethrough { } await this.db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } ) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index aed87499ee..65e9450f62 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -181,5 +181,33 @@ describe("docWritethrough", () => { ) }) }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + travelForward(WRITE_RATE_MS) + + await docWritethrough.patch({}) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + travelForward(WRITE_RATE_MS) + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) }) }) From 6b8f67ed417fd0405ebc8d71bc3c62639beb67fb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 17:01:16 +0100 Subject: [PATCH 10/62] Extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 65e9450f62..974494d1c9 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -77,6 +77,35 @@ describe("docWritethrough", () => { }) }) + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + travelForward(WRITE_RATE_MS) + + const patch4 = generatePatchObject(3) + await docWritethrough.patch(patch4) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + ...patch4, + }) + ) + }) + }) + it("date audit fields are set correctly when persisting", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) From 66751728bbc7ba32ed98b4b53afe8fad909cf72e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 10:53:18 +0100 Subject: [PATCH 11/62] Fixes and tests --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++---------- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++- 2 files changed, 82 insertions(+), 47 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index bde93182a9..80063e4772 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,8 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private docInfoCacheKey: string + constructor( db: Database, docId: string, @@ -31,6 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs + this.docInfoCacheKey = `${this.docId}:info` } get docId() { @@ -44,26 +47,39 @@ export class DocWritethrough { async patch(data: Record) { const cache = await getCache() - const key = `${this.docId}:info` - const cacheItem = await cache.withCache( - key, - null, - () => this.makeCacheItem(), - { - useTenancy: false, - } - ) - await this.storeToCache(cache, data) - const updateDb = - !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs - // let output = this.doc + const updateDb = await this.shouldUpdateDb(cache) + if (updateDb) { - await this.persistToDb(cache) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: this.docInfoCacheKey, + ttl: 15000, + }, + async () => { + if (await this.shouldUpdateDb(cache)) { + await this.persistToDb(cache) + await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } } } + private async shouldUpdateDb(cache: BaseCache) { + const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => + this.makeCacheItem() + ) + return cacheItem.lastWrite <= Date.now() - this.writeRateMs + } + private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { const cacheKey = this.docId + ":data:" + key @@ -72,39 +88,23 @@ export class DocWritethrough { } private async persistToDb(cache: BaseCache) { - const key = `${this.db.name}_${this.docId}` + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: key, - ttl: 15000, - }, - async () => { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } + await this.db.put(doc) - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } - ) - - if (!lockResponse.executed) { - throw `DocWriteThrough could not be persisted to db for ${key}` + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) } } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 974494d1c9..bca781e377 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,10 @@ import tk from "timekeeper" -import { env } from "../.." + import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -env._set("MOCK_REDIS", null) - const WRITE_RATE_MS = 500 const initialTime = Date.now() @@ -238,5 +236,42 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent calls will not cause multiple saves", async () => { + async function parallelPatch(count: number) { + await Promise.all( + Array.from({ length: count }).map(() => + docWritethrough.patch(generatePatchObject(1)) + ) + ) + } + + const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") + const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + + await config.doInTenant(async () => { + await parallelPatch(5) + expect(persistToDbSpy).not.toBeCalled() + expect(storeToCacheSpy).toBeCalledTimes(5) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(40) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(45) + + await parallelPatch(10) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(55) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(5) + expect(persistToDbSpy).toBeCalledTimes(2) + expect(storeToCacheSpy).toBeCalledTimes(60) + }) + }) }) }) From 2b7c988823384b60201a3122b1081a55200b2157 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:30 +0100 Subject: [PATCH 12/62] Making code more readable --- .../backend-core/src/cache/docWritethrough.ts | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 80063e4772..5148950c1d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -15,7 +15,7 @@ async function getCache() { } interface CacheItem { - lastWrite: number + nextWrite: number } export class DocWritethrough { @@ -40,8 +40,8 @@ export class DocWritethrough { return this._docId } - private makeCacheItem(): CacheItem { - return { lastWrite: Date.now() } + private makeNextWriteInfoItem(): CacheItem { + return { nextWrite: Date.now() + this.writeRateMs } } async patch(data: Record) { @@ -62,7 +62,10 @@ export class DocWritethrough { async () => { if (await this.shouldUpdateDb(cache)) { await this.persistToDb(cache) - await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + await cache.store( + this.docInfoCacheKey, + this.makeNextWriteInfoItem() + ) } } ) @@ -75,9 +78,9 @@ export class DocWritethrough { private async shouldUpdateDb(cache: BaseCache) { const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeCacheItem() + this.makeNextWriteInfoItem() ) - return cacheItem.lastWrite <= Date.now() - this.writeRateMs + return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { From ff7c784342ba79f994a15500984ab6668efef635 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:55 +0100 Subject: [PATCH 13/62] Type caches --- packages/backend-core/src/cache/base/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 23c952c7b2..911bd6a831 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -58,12 +58,12 @@ export default class BaseCache { /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, ttl: number | null = null, - fetchFn: any, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue From 1c171215680e99a07848168f9579557f3988bd15 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:12:31 +0100 Subject: [PATCH 14/62] Fix types --- packages/backend-core/src/cache/generic.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) From 3a341338a197a13f76993fbc372baf68566cefe9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:07:27 +0100 Subject: [PATCH 15/62] Log requests --- packages/backend-core/src/cache/index.ts | 1 + packages/pro | 2 +- packages/types/src/documents/document.ts | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/index.ts b/packages/backend-core/src/cache/index.ts index 4fa986e4e2..3b25108634 100644 --- a/packages/backend-core/src/cache/index.ts +++ b/packages/backend-core/src/cache/index.ts @@ -5,3 +5,4 @@ export * as writethrough from "./writethrough" export * as invite from "./invite" export * as passwordReset from "./passwordReset" export * from "./generic" +export * as docWritethrough from "./docWritethrough" diff --git a/packages/pro b/packages/pro index 183b35d3ac..c83fbd01f5 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec +Subproject commit c83fbd01f50872eedb772fba9a90d79650403126 diff --git a/packages/types/src/documents/document.ts b/packages/types/src/documents/document.ts index 18feb9b518..0de4337f4b 100644 --- a/packages/types/src/documents/document.ts +++ b/packages/types/src/documents/document.ts @@ -38,6 +38,7 @@ export enum DocumentType { AUTOMATION_METADATA = "meta_au", AUDIT_LOG = "al", APP_MIGRATION_METADATA = "_design/migrations", + SCIM_LOG = "scimlog", } // these are the core documents that make up the data, design From 4e53cb5143de0c7cc8a947889faf558dfa7c40c7 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:09:42 +0100 Subject: [PATCH 16/62] Flags --- packages/backend-core/src/environment.ts | 1 + packages/pro | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index b3179cbeea..2da2a77d67 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -186,6 +186,7 @@ const environment = { environment[key] = value }, ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M", + DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS, } // clean up any environment variable edge cases diff --git a/packages/pro b/packages/pro index c83fbd01f5..35c46cc6c5 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit c83fbd01f50872eedb772fba9a90d79650403126 +Subproject commit 35c46cc6c5f4a6d6f874ec1b51a042cb28d237da From 1c701fa81ed8f58d5f5e8db5c4cf5e08e420a899 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:30:43 +0100 Subject: [PATCH 17/62] Log responses --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 35c46cc6c5..4f8998c4be 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 35c46cc6c5f4a6d6f874ec1b51a042cb28d237da +Subproject commit 4f8998c4be4642a0fe55011514462235edbac7b8 From 93e462b8c769881150026b6c6e9e7048daa9a8e9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:38:48 +0100 Subject: [PATCH 18/62] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5148950c1d..e46c763906 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -33,7 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.docId}:info` + this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` } get docId() { From 2da5cb3ddbf0f4844bb259d83960243c1612a2eb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:41:40 +0100 Subject: [PATCH 19/62] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e46c763906..e367c9e060 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,7 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private cacheKeyPrefix: string private docInfoCacheKey: string constructor( @@ -33,7 +34,8 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` + this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { @@ -85,7 +87,7 @@ export class DocWritethrough { private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { - const cacheKey = this.docId + ":data:" + key + const cacheKey = this.cacheKeyPrefix + ":data:" + key await cache.store(cacheKey, { key, value }, undefined) } } @@ -98,7 +100,7 @@ export class DocWritethrough { doc = { _id: this.docId } } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) + const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value From 4ff2b36553c8b76c9fed6b37989e91ca8618fb34 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:59:51 +0100 Subject: [PATCH 20/62] Use overloads --- .../src/cache/tests/docWritethrough.spec.ts | 6 ++-- .../backend-core/src/db/couch/DatabaseImpl.ts | 28 ++++++++++++------- .../backend-core/src/db/instrumentation.ts | 14 ++++------ .../src/db/tests/DatabaseImpl.spec.ts | 16 +++++------ packages/types/src/sdk/db.ts | 2 +- 5 files changed, 35 insertions(+), 31 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bca781e377..4c4a4b2b60 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -35,7 +35,7 @@ describe("docWritethrough", () => { beforeEach(() => { resetTime() - documentId = structures.db.id() + documentId = structures.uuid() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -47,7 +47,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) @@ -136,7 +136,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 9d198e4307..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -70,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -79,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } @@ -135,15 +152,6 @@ export class DatabaseImpl implements Database { }) } - async docExists(id: string): Promise { - try { - await this.performCall(db => () => db.head(id)) - return true - } catch { - return false - } - } - async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 87af0e3127..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } @@ -38,13 +41,6 @@ export class DDInstrumentedDatabase implements Database { }) } - docExists(id: string): Promise { - return tracer.trace("db.docExists", span => { - span?.addTags({ db_name: this.name, doc_id: id }) - return this.db.docExists(id) - }) - } - getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts index 140ecf4f2c..586f13f417 100644 --- a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -17,16 +17,16 @@ describe("DatabaseImpl", () => { documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) }) - describe("docExists", () => { + describe("document exists", () => { it("can check existing docs by id", async () => { const existingDoc = _.sample(documents) - const result = await database.docExists(existingDoc!._id!) + const result = await database.exists(existingDoc!._id!) expect(result).toBe(true) }) it("can check non existing docs by id", async () => { - const result = await database.docExists(newid()) + const result = await database.exists(newid()) expect(result).toBe(false) }) @@ -36,9 +36,9 @@ describe("DatabaseImpl", () => { const id = existingDoc!._id! const results = [] - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) expect(results).toEqual([true, true, true]) }) @@ -46,10 +46,10 @@ describe("DatabaseImpl", () => { it("returns false after the doc is deleted", async () => { const existingDoc = _.sample(documents) const id = existingDoc!._id! - expect(await database.docExists(id)).toBe(true) + expect(await database.exists(id)).toBe(true) await database.remove(existingDoc!) - expect(await database.docExists(id)).toBe(false) + expect(await database.exists(id)).toBe(false) }) }) }) diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index dafc9ced57..4d103d5be6 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,7 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise - docExists(id: string): Promise + exists(docId: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 824dd1c1fc601bf890e9390f63b4943303518d15 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:38:45 +0100 Subject: [PATCH 21/62] Type inMemoryQueue --- .../backend-core/src/queue/inMemoryQueue.ts | 36 ++++++++++--------- packages/backend-core/src/queue/queue.ts | 2 ++ 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..3205b6f383 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,6 @@ import events from "events" import { timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -24,9 +25,9 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any + _opts?: QueueOptions _messages: any[] _emitter: EventEmitter _runCount: number @@ -37,7 +38,7 @@ class InMemoryQueue { * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] @@ -55,8 +56,12 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { + const delay = this._opts?.defaultJobOptions?.delay + if (delay) { + await new Promise(r => setTimeout(() => r(), delay)) + } if (this._messages.length <= 0) { return } @@ -70,7 +75,7 @@ class InMemoryQueue { } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +88,26 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) + this._messages.push(newJob(this._name, data)) this._addCount++ this._emitter.emit("message") + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +115,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + async removeJobs(pattern: string) { // no-op } @@ -128,12 +132,12 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds From ae85c832483d7533ee141803fc0336a730846dc1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:43:47 +0100 Subject: [PATCH 22/62] Clean --- packages/worker/src/initPro.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/worker/src/initPro.ts b/packages/worker/src/initPro.ts index ddc8d2562a..b34d514992 100644 --- a/packages/worker/src/initPro.ts +++ b/packages/worker/src/initPro.ts @@ -1,5 +1,4 @@ import { sdk as proSdk } from "@budibase/pro" -import * as userSdk from "./sdk/users" export const initPro = async () => { await proSdk.init({}) From 91468d2569e8c1828c4e107750cab8bcc81f016f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:18:01 +0100 Subject: [PATCH 23/62] Add doc-writethrough queue --- packages/backend-core/src/queue/constants.ts | 1 + packages/backend-core/src/queue/listeners.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..14dce5fe8d 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { From 2d84bc5da2b5a3eada63eba3866b04324a519afb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:34:05 +0100 Subject: [PATCH 24/62] Use bull --- .../backend-core/src/cache/docWritethrough.ts | 123 +++++++++--------- 1 file changed, 64 insertions(+), 59 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e367c9e060..38a162435d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,6 +3,9 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" +import { JobQueue, createQueue } from "../queue" +import { context, db as dbUtils } from ".." + const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null @@ -14,17 +17,63 @@ async function getCache() { return CACHE } -interface CacheItem { - nextWrite: number +interface ProcessDocMessage { + tenantId: string + dbName: string + docId: string + cacheKeyPrefix: string } +export const docWritethroughProcessorQueue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE +) + +docWritethroughProcessorQueue.process(async message => { + const { dbName, tenantId, docId, cacheKeyPrefix } = message.data + const cache = await getCache() + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } + }) +}) + export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number private cacheKeyPrefix: string - private docInfoCacheKey: string constructor( db: Database, @@ -35,54 +84,31 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { return this._docId } - private makeNextWriteInfoItem(): CacheItem { - return { nextWrite: Date.now() + this.writeRateMs } - } - async patch(data: Record) { const cache = await getCache() await this.storeToCache(cache, data) - const updateDb = await this.shouldUpdateDb(cache) - - if (updateDb) { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: this.docInfoCacheKey, - ttl: 15000, - }, - async () => { - if (await this.shouldUpdateDb(cache)) { - await this.persistToDb(cache) - await cache.store( - this.docInfoCacheKey, - this.makeNextWriteInfoItem() - ) - } - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + docWritethroughProcessorQueue.add( + { + tenantId: context.getTenantId(), + dbName: this.db.name, + docId: this.docId, + cacheKeyPrefix: this.cacheKeyPrefix, + }, + { + delay: this.writeRateMs - 1, + jobId: this.cacheKeyPrefix, + removeOnFail: true, + removeOnComplete: true, } - } - } - - private async shouldUpdateDb(cache: BaseCache) { - const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeNextWriteInfoItem() ) - return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { @@ -91,25 +117,4 @@ export class DocWritethrough { await cache.store(cacheKey, { key, value }, undefined) } } - - private async persistToDb(cache: BaseCache) { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } - - const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } } From e648503e4f31045b0b68e4baed76003adb6d5496 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:50:58 +0100 Subject: [PATCH 25/62] Tests --- .../backend-core/src/cache/docWritethrough.ts | 99 +++++++++------ .../src/cache/tests/docWritethrough.spec.ts | 120 ++++++++++-------- .../backend-core/src/queue/inMemoryQueue.ts | 76 ++++++++--- 3 files changed, 186 insertions(+), 109 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 38a162435d..f53cfbfe5f 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -4,7 +4,8 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" import { JobQueue, createQueue } from "../queue" -import { context, db as dbUtils } from ".." +import * as context from "../context" +import * as dbUtils from "../db" const DEFAULT_WRITE_RATE_MS = 10000 @@ -28,50 +29,71 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -docWritethroughProcessorQueue.process(async message => { - const { dbName, tenantId, docId, cacheKeyPrefix } = message.data - const cache = await getCache() - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } +let _init = false +export const init = () => { + if (_init) { + return + } + docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) } + ) - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) - } + }) }) -}) + _init = true +} + +export async function persistToDb({ + dbName, + docId, + cacheKeyPrefix, +}: { + dbName: string + docId: string + cacheKeyPrefix: string +}) { + const cache = await getCache() + + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } +} export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number + private tenantId: string private cacheKeyPrefix: string @@ -84,6 +106,7 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.tenantId = context.getTenantId() } get docId() { @@ -97,13 +120,13 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: context.getTenantId(), + tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, }, { - delay: this.writeRateMs - 1, + delay: this.writeRateMs, jobId: this.cacheKeyPrefix, removeOnFail: true, removeOnComplete: true, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 4c4a4b2b60..83af66a9d2 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,20 +1,32 @@ -import tk from "timekeeper" - import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -const WRITE_RATE_MS = 500 +import { + DocWritethrough, + docWritethroughProcessorQueue, + init, +} from "../docWritethrough" +import InMemoryQueue from "../../queue/inMemoryQueue" + +const WRITE_RATE_MS = 1000 const initialTime = Date.now() +jest.useFakeTimers({ + now: initialTime, +}) + function resetTime() { - tk.travel(initialTime) + jest.setSystemTime(initialTime) } -function travelForward(ms: number) { - const updatedTime = Date.now() + ms - tk.travel(updatedTime) +async function travelForward(ms: number) { + await jest.advanceTimersByTimeAsync(ms) + + const queue: InMemoryQueue = docWritethroughProcessorQueue as never + while (queue.hasRunningJobs()) { + await jest.runOnlyPendingTimersAsync() + } } describe("docWritethrough", () => { @@ -33,33 +45,37 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeEach(() => { + beforeAll(() => init()) + + beforeEach(async () => { resetTime() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + await config.doInTenant(async () => { + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) }) - it("patching will not persist if timeout from the creation does not hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - travelForward(WRITE_RATE_MS - 1) - await docWritethrough.patch(generatePatchObject(2)) + await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits and next patch is called", async () => { + it("patching will persist if timeout hits", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) + // This will not be persisted const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -67,7 +83,6 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - ...patch3, _rev: expect.stringMatching(/1-.+/), createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), @@ -82,15 +97,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - travelForward(WRITE_RATE_MS) - - const patch4 = generatePatchObject(3) - await docWritethrough.patch(patch4) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -98,7 +110,6 @@ describe("docWritethrough", () => { ...patch1, ...patch2, ...patch3, - ...patch4, }) ) }) @@ -109,16 +120,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date1 = new Date() await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date2 = new Date() - const patch3 = generatePatchObject(3) - await docWritethrough.patch(patch3) - expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -129,22 +137,11 @@ describe("docWritethrough", () => { }) }) - it("patching will not persist even if timeout hits but next patch is not callec", async () => { - await config.doInTenant(async () => { - await docWritethrough.patch(generatePatchObject(2)) - await docWritethrough.patch(generatePatchObject(2)) - - travelForward(WRITE_RATE_MS) - - expect(await db.exists(documentId)).toBe(false) - }) - }) - it("concurrent patches will override keys", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -155,13 +152,14 @@ describe("docWritethrough", () => { }) ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -173,7 +171,7 @@ describe("docWritethrough", () => { }) }) - it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + it("concurrent patches to different docWritethrough will not pollute each other", async () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, @@ -186,12 +184,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) + await travelForward(WRITE_RATE_MS) expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -214,7 +213,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch({}) @@ -224,9 +223,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -246,30 +246,46 @@ describe("docWritethrough", () => { ) } - const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) - expect(persistToDbSpy).not.toBeCalled() expect(storeToCacheSpy).toBeCalledTimes(5) + expect(await db.exists(documentId)).toBe(false) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(40) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(45) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) + await parallelPatch(10) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(55) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(5) - expect(persistToDbSpy).toBeCalledTimes(2) + await travelForward(WRITE_RATE_MS) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/3-.+/), + }) + ) expect(storeToCacheSpy).toBeCalledTimes(60) }) }) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 3205b6f383..f201714903 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -2,6 +2,13 @@ import events from "events" import { timeout } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" +interface JobMessage { + timestamp: number + queue: string + data: any + opts?: JobOptions +} + /** * Bull works with a Job wrapper around all messages that contains a lot more information about * the state of the message, this object constructor implements the same schema of Bull jobs @@ -11,12 +18,12 @@ import { Queue, QueueOptions, JobOptions } from "./queue" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -28,10 +35,12 @@ function newJob(queue: string, message: any) { class InMemoryQueue implements Partial { _name: string _opts?: QueueOptions - _messages: any[] + _messages: JobMessage[] + _queuedJobIds: Set _emitter: EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. @@ -45,6 +54,7 @@ class InMemoryQueue implements Partial { this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -58,19 +68,24 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - const delay = this._opts?.defaultJobOptions?.delay - if (delay) { - await new Promise(r => setTimeout(() => r(), delay)) + try { + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() + + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } + } catch (e: any) { + throw e } - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ }) } @@ -89,12 +104,31 @@ class InMemoryQueue implements Partial { */ // eslint-disable-next-line no-unused-vars async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, data)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } return {} as any } @@ -143,7 +177,11 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } From caf142f1db37b4454c03427468d29a7c915de255 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:55:07 +0100 Subject: [PATCH 26/62] Clean --- .../backend-core/src/queue/inMemoryQueue.ts | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index f201714903..6c8107c7a4 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -68,23 +68,19 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - try { - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ - const jobId = msg?.opts?.jobId?.toString() - if (jobId && msg?.opts?.removeOnComplete) { - this._queuedJobIds.delete(jobId) - } - } catch (e: any) { - throw e + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) } }) } From 6b86633c650ea637c1387351c41b2ba52d84236e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:01:41 +0100 Subject: [PATCH 27/62] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 217e0a93a1..f5134a01fc 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 217e0a93a12f6ed56f122729366a3068c6bd957e +Subproject commit f5134a01fc122be2535c6b17e47d956c145fb186 From dfdbc7b22bbb70c3f694cd7f88ff0718b92878c8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:05:39 +0100 Subject: [PATCH 28/62] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index f5134a01fc..54411929db 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit f5134a01fc122be2535c6b17e47d956c145fb186 +Subproject commit 54411929db75f6bc4335491e86871c889fe3a98a From ca0f583399d9786ab25374a4ab42cc6a0861f27d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:19:05 +0100 Subject: [PATCH 29/62] Remove defaults and init --- .../backend-core/src/cache/docWritethrough.ts | 52 +++++++------------ .../src/cache/tests/docWritethrough.spec.ts | 3 -- 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index f53cfbfe5f..1a16f60eb9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -7,8 +7,6 @@ import { JobQueue, createQueue } from "../queue" import * as context from "../context" import * as dbUtils from "../db" -const DEFAULT_WRITE_RATE_MS = 10000 - let CACHE: BaseCache | null = null async function getCache() { if (!CACHE) { @@ -29,33 +27,27 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -let _init = false -export const init = () => { - if (_init) { - return - } - docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) +docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) } - }) + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) - _init = true -} +}) export async function persistToDb({ dbName, @@ -97,11 +89,7 @@ export class DocWritethrough { private cacheKeyPrefix: string - constructor( - db: Database, - docId: string, - writeRateMs: number = DEFAULT_WRITE_RATE_MS - ) { + constructor(db: Database, docId: string, writeRateMs: number) { this.db = db this._docId = docId this.writeRateMs = writeRateMs diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 83af66a9d2..a5765171cb 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -5,7 +5,6 @@ import _ from "lodash" import { DocWritethrough, docWritethroughProcessorQueue, - init, } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" @@ -45,8 +44,6 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeAll(() => init()) - beforeEach(async () => { resetTime() documentId = structures.uuid() From e8c3f20c3047bbd7cb909f1b3735eae05bf0ca4f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:32:18 +0100 Subject: [PATCH 30/62] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 54411929db..9daa77883c 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 54411929db75f6bc4335491e86871c889fe3a98a +Subproject commit 9daa77883cc0b395e5badffe48260324527b6924 From cb5f3e3bd3a8aaa0cce5c6530a0a949b080e3a71 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:38:36 +0100 Subject: [PATCH 31/62] Lint --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 9daa77883c..6079868997 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 9daa77883cc0b395e5badffe48260324527b6924 +Subproject commit 607986899781aa7c0b6ccfd9746497b6fc32b569 From 40cc383c0140fa3d960938162e7924aaacd079f5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 11:22:27 +0100 Subject: [PATCH 32/62] Create docWriteThrough redis cache --- packages/backend-core/src/redis/init.ts | 13 ++++++++++++- packages/backend-core/src/redis/utils.ts | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..7920dfed2d 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -104,3 +108,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** From 9f42ea6bbf2b8247e988b16ae8f3b84a9beb1f9e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:44:52 +0100 Subject: [PATCH 33/62] DocWritethrough --- .../backend-core/src/cache/docWritethrough.ts | 102 ++++++++++++++++++ .../backend-core/src/db/couch/DatabaseImpl.ts | 9 ++ .../backend-core/src/db/instrumentation.ts | 7 ++ packages/types/src/sdk/db.ts | 1 + 4 files changed, 119 insertions(+) create mode 100644 packages/backend-core/src/cache/docWritethrough.ts diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..9e1977f797 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,102 @@ +import BaseCache from "./base" +import { getDocWritethroughClient } from "../redis/init" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" +import * as locks from "../redis/redlockImpl" + +const DEFAULT_WRITE_RATE_MS = 10000 + +let CACHE: BaseCache | null = null +async function getCache() { + if (!CACHE) { + const client = await getDocWritethroughClient() + CACHE = new BaseCache(client) + } + return CACHE +} + +interface CacheItem { + lastWrite: number +} + +export class DocWritethrough { + db: Database + docId: string + writeRateMs: number + + constructor( + db: Database, + docId: string, + writeRateMs: number = DEFAULT_WRITE_RATE_MS + ) { + this.db = db + this.docId = docId + this.writeRateMs = writeRateMs + } + + private makeCacheItem(): CacheItem { + return { lastWrite: Date.now() } + } + + async patch(data: Record) { + const cache = await getCache() + + const key = `${this.docId}:info` + const cacheItem = await cache.withCache( + key, + null, + () => this.makeCacheItem(), + { + useTenancy: false, + } + ) + + await this.storeToCache(cache, data) + + const updateDb = + !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs + // let output = this.doc + if (updateDb) { + await this.persistToDb(cache) + } + } + + private async storeToCache(cache: BaseCache, data: Record) { + for (const [key, value] of Object.entries(data)) { + const cacheKey = this.docId + ":data:" + key + await cache.store(cacheKey, { key, value }, undefined) + } + } + + private async persistToDb(cache: BaseCache) { + const key = `${this.db.name}_${this.docId}` + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: key, + ttl: 15000, + }, + async () => { + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } + + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await this.db.put(doc) + } + ) + + if (!lockResponse.executed) { + throw `DocWriteThrough could not be persisted to db for ${key}` + } + } +} diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 7e7c997cbe..d4d17f6127 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -135,6 +135,15 @@ export class DatabaseImpl implements Database { }) } + async docExists(id: string): Promise { + try { + await this.get(id) + return true + } catch { + return false + } + } + async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 03010d4c92..87af0e3127 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -38,6 +38,13 @@ export class DDInstrumentedDatabase implements Database { }) } + docExists(id: string): Promise { + return tracer.trace("db.docExists", span => { + span?.addTags({ db_name: this.name, doc_id: id }) + return this.db.docExists(id) + }) + } + getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index c4e4a4f02f..dafc9ced57 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,6 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise + docExists(id: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 10568cccff8e4d342a03484f04be299fe4868917 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:47:18 +0100 Subject: [PATCH 34/62] USe get for doc exists --- packages/backend-core/src/cache/base/index.ts | 2 +- packages/backend-core/src/db/couch/DatabaseImpl.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..23c952c7b2 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -60,7 +60,7 @@ export default class BaseCache { */ async withCache( key: string, - ttl: number, + ttl: number | null = null, fetchFn: any, opts = { useTenancy: true } ) { diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index d4d17f6127..9d198e4307 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -137,7 +137,7 @@ export class DatabaseImpl implements Database { async docExists(id: string): Promise { try { - await this.get(id) + await this.performCall(db => () => db.head(id)) return true } catch { return false From 82132d539d2c535be99a8aee58360fff288a1907 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:17:18 +0100 Subject: [PATCH 35/62] DatabaseImpl.docExists test --- .../src/db/tests/DatabaseImpl.spec.ts | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 packages/backend-core/src/db/tests/DatabaseImpl.spec.ts diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..140ecf4f2c --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("docExists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.docExists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.docExists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.docExists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.docExists(id)).toBe(false) + }) + }) +}) From 74aae19a7ebdd9fcb040679c2aeca40e991a8456 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:23:32 +0100 Subject: [PATCH 36/62] docWritethrough test --- .../src/cache/tests/docWritethrough.spec.ts | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 packages/backend-core/src/cache/tests/docWritethrough.spec.ts diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..bfb1da5f1c --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,47 @@ +import tk from "timekeeper" +import { env } from "../.." +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" +import { DocWritethrough } from "../docWritethrough" +import _ from "lodash" + +env._set("MOCK_REDIS", null) + +const initialTime = Date.now() + +const WRITE_RATE_MS = 500 + +describe("docWritethrough", () => { + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.word(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(() => { + tk.freeze(initialTime) + documentId = structures.db.id() + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) + + it("patching will not persist until timeout is hit", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + tk.travel(Date.now() + WRITE_RATE_MS - 1) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) + }) +}) From bfde028e9b8dcae7ed81d34542acfcef32cf791c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:28:35 +0100 Subject: [PATCH 37/62] Add persisting tests --- .../src/cache/tests/docWritethrough.spec.ts | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bfb1da5f1c..ab0de53bee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -33,7 +33,7 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist until timeout is hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) @@ -43,5 +43,42 @@ describe("docWritethrough", () => { expect(await db.docExists(documentId)).toBe(false) }) }) + + it("patching will persist if timeout hits and next patch is called", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + _rev: expect.stringMatching(/1-.+/), + createdAt: new Date(initialTime + 500).toISOString(), + updatedAt: new Date(initialTime + 500).toISOString(), + }) + }) + }) + + it("patching will not persist even if timeout hits but next patch is not callec", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) }) }) From 35536592e6558176e48960063ab71ddfebd2f8d1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:41:26 +0100 Subject: [PATCH 38/62] Add extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 86 ++++++++++++++++--- 1 file changed, 75 insertions(+), 11 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index ab0de53bee..16e47ce3c3 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -7,9 +7,17 @@ import _ from "lodash" env._set("MOCK_REDIS", null) +const WRITE_RATE_MS = 500 + const initialTime = Date.now() -const WRITE_RATE_MS = 500 +function resetTime() { + tk.travel(initialTime) +} +function travelForward(ms: number) { + const updatedTime = Date.now() + ms + tk.travel(updatedTime) +} describe("docWritethrough", () => { const config = new DBTestConfiguration() @@ -28,7 +36,7 @@ describe("docWritethrough", () => { } beforeEach(() => { - tk.freeze(initialTime) + resetTime() documentId = structures.db.id() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -37,7 +45,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS - 1) + travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) expect(await db.docExists(documentId)).toBe(false) @@ -51,7 +59,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -62,23 +70,79 @@ describe("docWritethrough", () => { ...patch2, ...patch3, _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + 500).toISOString(), - updatedAt: new Date(initialTime + 500).toISOString(), + createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), }) }) }) + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + travelForward(WRITE_RATE_MS) + const date1 = new Date() + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + const date2 = new Date() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + it("patching will not persist even if timeout hits but next patch is not callec", async () => { await config.doInTenant(async () => { - const patch1 = generatePatchObject(2) - const patch2 = generatePatchObject(2) - await docWritethrough.patch(patch1) - await docWritethrough.patch(patch2) + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) expect(await db.docExists(documentId)).toBe(false) }) }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const time1 = travelForward(WRITE_RATE_MS) + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + travelForward(WRITE_RATE_MS) + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) }) }) From 41dde9722f57f12d03450c4bc98e929c7133086d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:51:42 +0100 Subject: [PATCH 39/62] Test concurrency --- .../backend-core/src/cache/docWritethrough.ts | 12 ++++-- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++++++++++++- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 9e1977f797..13a85a0d84 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -19,9 +19,9 @@ interface CacheItem { } export class DocWritethrough { - db: Database - docId: string - writeRateMs: number + private db: Database + private _docId: string + private writeRateMs: number constructor( db: Database, @@ -29,10 +29,14 @@ export class DocWritethrough { writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { this.db = db - this.docId = docId + this._docId = docId this.writeRateMs = writeRateMs } + get docId() { + return this._docId + } + private makeCacheItem(): CacheItem { return { lastWrite: Date.now() } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 16e47ce3c3..aed87499ee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -41,8 +41,9 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist if timeout from the creation does not hit", async () => { await config.doInTenant(async () => { + travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) travelForward(WRITE_RATE_MS - 1) @@ -116,7 +117,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - const time1 = travelForward(WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -144,5 +145,41 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id(), + WRITE_RATE_MS + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + travelForward(WRITE_RATE_MS) + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) }) }) From 223637999a4679536ca68ca0a0115376753abfa1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 16:48:16 +0100 Subject: [PATCH 40/62] Ensure keys are removed --- .../backend-core/src/cache/docWritethrough.ts | 4 +++ .../src/cache/tests/docWritethrough.spec.ts | 28 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 13a85a0d84..bde93182a9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -96,6 +96,10 @@ export class DocWritethrough { } await this.db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } ) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index aed87499ee..65e9450f62 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -181,5 +181,33 @@ describe("docWritethrough", () => { ) }) }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + travelForward(WRITE_RATE_MS) + + await docWritethrough.patch({}) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + travelForward(WRITE_RATE_MS) + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) }) }) From 04fb27962390d79fe2fe3b65fe7ee44a48d6dbd8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 17:01:16 +0100 Subject: [PATCH 41/62] Extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 65e9450f62..974494d1c9 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -77,6 +77,35 @@ describe("docWritethrough", () => { }) }) + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + travelForward(WRITE_RATE_MS) + + const patch4 = generatePatchObject(3) + await docWritethrough.patch(patch4) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + ...patch4, + }) + ) + }) + }) + it("date audit fields are set correctly when persisting", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) From fd93eb79d5b96c7cf0c71a9d8501dfe189771d56 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 10:53:18 +0100 Subject: [PATCH 42/62] Fixes and tests --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++---------- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++- 2 files changed, 82 insertions(+), 47 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index bde93182a9..80063e4772 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,8 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private docInfoCacheKey: string + constructor( db: Database, docId: string, @@ -31,6 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs + this.docInfoCacheKey = `${this.docId}:info` } get docId() { @@ -44,26 +47,39 @@ export class DocWritethrough { async patch(data: Record) { const cache = await getCache() - const key = `${this.docId}:info` - const cacheItem = await cache.withCache( - key, - null, - () => this.makeCacheItem(), - { - useTenancy: false, - } - ) - await this.storeToCache(cache, data) - const updateDb = - !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs - // let output = this.doc + const updateDb = await this.shouldUpdateDb(cache) + if (updateDb) { - await this.persistToDb(cache) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: this.docInfoCacheKey, + ttl: 15000, + }, + async () => { + if (await this.shouldUpdateDb(cache)) { + await this.persistToDb(cache) + await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } } } + private async shouldUpdateDb(cache: BaseCache) { + const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => + this.makeCacheItem() + ) + return cacheItem.lastWrite <= Date.now() - this.writeRateMs + } + private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { const cacheKey = this.docId + ":data:" + key @@ -72,39 +88,23 @@ export class DocWritethrough { } private async persistToDb(cache: BaseCache) { - const key = `${this.db.name}_${this.docId}` + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: key, - ttl: 15000, - }, - async () => { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } + await this.db.put(doc) - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } - ) - - if (!lockResponse.executed) { - throw `DocWriteThrough could not be persisted to db for ${key}` + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) } } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 974494d1c9..bca781e377 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,10 @@ import tk from "timekeeper" -import { env } from "../.." + import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -env._set("MOCK_REDIS", null) - const WRITE_RATE_MS = 500 const initialTime = Date.now() @@ -238,5 +236,42 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent calls will not cause multiple saves", async () => { + async function parallelPatch(count: number) { + await Promise.all( + Array.from({ length: count }).map(() => + docWritethrough.patch(generatePatchObject(1)) + ) + ) + } + + const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") + const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + + await config.doInTenant(async () => { + await parallelPatch(5) + expect(persistToDbSpy).not.toBeCalled() + expect(storeToCacheSpy).toBeCalledTimes(5) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(40) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(45) + + await parallelPatch(10) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(55) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(5) + expect(persistToDbSpy).toBeCalledTimes(2) + expect(storeToCacheSpy).toBeCalledTimes(60) + }) + }) }) }) From eb9a1633944d84cbefa727b18a129feff27c9f56 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:30 +0100 Subject: [PATCH 43/62] Making code more readable --- .../backend-core/src/cache/docWritethrough.ts | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 80063e4772..5148950c1d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -15,7 +15,7 @@ async function getCache() { } interface CacheItem { - lastWrite: number + nextWrite: number } export class DocWritethrough { @@ -40,8 +40,8 @@ export class DocWritethrough { return this._docId } - private makeCacheItem(): CacheItem { - return { lastWrite: Date.now() } + private makeNextWriteInfoItem(): CacheItem { + return { nextWrite: Date.now() + this.writeRateMs } } async patch(data: Record) { @@ -62,7 +62,10 @@ export class DocWritethrough { async () => { if (await this.shouldUpdateDb(cache)) { await this.persistToDb(cache) - await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + await cache.store( + this.docInfoCacheKey, + this.makeNextWriteInfoItem() + ) } } ) @@ -75,9 +78,9 @@ export class DocWritethrough { private async shouldUpdateDb(cache: BaseCache) { const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeCacheItem() + this.makeNextWriteInfoItem() ) - return cacheItem.lastWrite <= Date.now() - this.writeRateMs + return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { From dc84eb4e806684c438ab18005bb14836720cc57b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:55 +0100 Subject: [PATCH 44/62] Type caches --- packages/backend-core/src/cache/base/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 23c952c7b2..911bd6a831 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -58,12 +58,12 @@ export default class BaseCache { /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, ttl: number | null = null, - fetchFn: any, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue From e986d34b8739258e81c6acc385afdd4cbe133a7b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:12:31 +0100 Subject: [PATCH 45/62] Fix types --- packages/backend-core/src/cache/generic.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) From da012c0f082d1bf44b6837e69da05d0a13db7fea Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:38:48 +0100 Subject: [PATCH 46/62] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5148950c1d..e46c763906 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -33,7 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.docId}:info` + this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` } get docId() { From 82a6f9027e5df55b113d550d5e26a8b958f87219 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:41:40 +0100 Subject: [PATCH 47/62] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e46c763906..e367c9e060 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,7 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private cacheKeyPrefix: string private docInfoCacheKey: string constructor( @@ -33,7 +34,8 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` + this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { @@ -85,7 +87,7 @@ export class DocWritethrough { private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { - const cacheKey = this.docId + ":data:" + key + const cacheKey = this.cacheKeyPrefix + ":data:" + key await cache.store(cacheKey, { key, value }, undefined) } } @@ -98,7 +100,7 @@ export class DocWritethrough { doc = { _id: this.docId } } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) + const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value From 774ff42f0c926eb91c84d8a467a9047947274573 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:59:51 +0100 Subject: [PATCH 48/62] Use overloads --- .../src/cache/tests/docWritethrough.spec.ts | 6 ++-- .../backend-core/src/db/couch/DatabaseImpl.ts | 28 ++++++++++++------- .../backend-core/src/db/instrumentation.ts | 14 ++++------ .../src/db/tests/DatabaseImpl.spec.ts | 16 +++++------ packages/types/src/sdk/db.ts | 2 +- 5 files changed, 35 insertions(+), 31 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bca781e377..4c4a4b2b60 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -35,7 +35,7 @@ describe("docWritethrough", () => { beforeEach(() => { resetTime() - documentId = structures.db.id() + documentId = structures.uuid() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -47,7 +47,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) @@ -136,7 +136,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 9d198e4307..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -70,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -79,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } @@ -135,15 +152,6 @@ export class DatabaseImpl implements Database { }) } - async docExists(id: string): Promise { - try { - await this.performCall(db => () => db.head(id)) - return true - } catch { - return false - } - } - async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 87af0e3127..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } @@ -38,13 +41,6 @@ export class DDInstrumentedDatabase implements Database { }) } - docExists(id: string): Promise { - return tracer.trace("db.docExists", span => { - span?.addTags({ db_name: this.name, doc_id: id }) - return this.db.docExists(id) - }) - } - getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts index 140ecf4f2c..586f13f417 100644 --- a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -17,16 +17,16 @@ describe("DatabaseImpl", () => { documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) }) - describe("docExists", () => { + describe("document exists", () => { it("can check existing docs by id", async () => { const existingDoc = _.sample(documents) - const result = await database.docExists(existingDoc!._id!) + const result = await database.exists(existingDoc!._id!) expect(result).toBe(true) }) it("can check non existing docs by id", async () => { - const result = await database.docExists(newid()) + const result = await database.exists(newid()) expect(result).toBe(false) }) @@ -36,9 +36,9 @@ describe("DatabaseImpl", () => { const id = existingDoc!._id! const results = [] - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) expect(results).toEqual([true, true, true]) }) @@ -46,10 +46,10 @@ describe("DatabaseImpl", () => { it("returns false after the doc is deleted", async () => { const existingDoc = _.sample(documents) const id = existingDoc!._id! - expect(await database.docExists(id)).toBe(true) + expect(await database.exists(id)).toBe(true) await database.remove(existingDoc!) - expect(await database.docExists(id)).toBe(false) + expect(await database.exists(id)).toBe(false) }) }) }) diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index dafc9ced57..4d103d5be6 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,7 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise - docExists(id: string): Promise + exists(docId: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 2412d75cacbe36f27d0f8c4d02804eb371bb292d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:38:45 +0100 Subject: [PATCH 49/62] Type inMemoryQueue --- .../backend-core/src/queue/inMemoryQueue.ts | 36 ++++++++++--------- packages/backend-core/src/queue/queue.ts | 2 ++ 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..3205b6f383 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,6 @@ import events from "events" import { timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -24,9 +25,9 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any + _opts?: QueueOptions _messages: any[] _emitter: EventEmitter _runCount: number @@ -37,7 +38,7 @@ class InMemoryQueue { * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] @@ -55,8 +56,12 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { + const delay = this._opts?.defaultJobOptions?.delay + if (delay) { + await new Promise(r => setTimeout(() => r(), delay)) + } if (this._messages.length <= 0) { return } @@ -70,7 +75,7 @@ class InMemoryQueue { } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +88,26 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) + this._messages.push(newJob(this._name, data)) this._addCount++ this._emitter.emit("message") + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +115,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + async removeJobs(pattern: string) { // no-op } @@ -128,12 +132,12 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds From b39400f08c5145a818aadd602f74c2a7a41e895c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:43:47 +0100 Subject: [PATCH 50/62] Clean --- packages/worker/src/initPro.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/worker/src/initPro.ts b/packages/worker/src/initPro.ts index ddc8d2562a..b34d514992 100644 --- a/packages/worker/src/initPro.ts +++ b/packages/worker/src/initPro.ts @@ -1,5 +1,4 @@ import { sdk as proSdk } from "@budibase/pro" -import * as userSdk from "./sdk/users" export const initPro = async () => { await proSdk.init({}) From df325e21c30fae69940ed04bc3eb9f2d2f8b160d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:18:01 +0100 Subject: [PATCH 51/62] Add doc-writethrough queue --- packages/backend-core/src/queue/constants.ts | 1 + packages/backend-core/src/queue/listeners.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..14dce5fe8d 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { From 936ddafee7c21aa939c2842e793e6865741054a5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:34:05 +0100 Subject: [PATCH 52/62] Use bull --- .../backend-core/src/cache/docWritethrough.ts | 123 +++++++++--------- 1 file changed, 64 insertions(+), 59 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e367c9e060..38a162435d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,6 +3,9 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" +import { JobQueue, createQueue } from "../queue" +import { context, db as dbUtils } from ".." + const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null @@ -14,17 +17,63 @@ async function getCache() { return CACHE } -interface CacheItem { - nextWrite: number +interface ProcessDocMessage { + tenantId: string + dbName: string + docId: string + cacheKeyPrefix: string } +export const docWritethroughProcessorQueue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE +) + +docWritethroughProcessorQueue.process(async message => { + const { dbName, tenantId, docId, cacheKeyPrefix } = message.data + const cache = await getCache() + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } + }) +}) + export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number private cacheKeyPrefix: string - private docInfoCacheKey: string constructor( db: Database, @@ -35,54 +84,31 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { return this._docId } - private makeNextWriteInfoItem(): CacheItem { - return { nextWrite: Date.now() + this.writeRateMs } - } - async patch(data: Record) { const cache = await getCache() await this.storeToCache(cache, data) - const updateDb = await this.shouldUpdateDb(cache) - - if (updateDb) { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: this.docInfoCacheKey, - ttl: 15000, - }, - async () => { - if (await this.shouldUpdateDb(cache)) { - await this.persistToDb(cache) - await cache.store( - this.docInfoCacheKey, - this.makeNextWriteInfoItem() - ) - } - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + docWritethroughProcessorQueue.add( + { + tenantId: context.getTenantId(), + dbName: this.db.name, + docId: this.docId, + cacheKeyPrefix: this.cacheKeyPrefix, + }, + { + delay: this.writeRateMs - 1, + jobId: this.cacheKeyPrefix, + removeOnFail: true, + removeOnComplete: true, } - } - } - - private async shouldUpdateDb(cache: BaseCache) { - const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeNextWriteInfoItem() ) - return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { @@ -91,25 +117,4 @@ export class DocWritethrough { await cache.store(cacheKey, { key, value }, undefined) } } - - private async persistToDb(cache: BaseCache) { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } - - const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } } From 420b0ffc03386fdf896b11ff0cc5a0f01741ef9f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:50:58 +0100 Subject: [PATCH 53/62] Tests --- .../backend-core/src/cache/docWritethrough.ts | 99 +++++++++------ .../src/cache/tests/docWritethrough.spec.ts | 120 ++++++++++-------- .../backend-core/src/queue/inMemoryQueue.ts | 76 ++++++++--- 3 files changed, 186 insertions(+), 109 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 38a162435d..f53cfbfe5f 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -4,7 +4,8 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" import { JobQueue, createQueue } from "../queue" -import { context, db as dbUtils } from ".." +import * as context from "../context" +import * as dbUtils from "../db" const DEFAULT_WRITE_RATE_MS = 10000 @@ -28,50 +29,71 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -docWritethroughProcessorQueue.process(async message => { - const { dbName, tenantId, docId, cacheKeyPrefix } = message.data - const cache = await getCache() - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } +let _init = false +export const init = () => { + if (_init) { + return + } + docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) } + ) - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) - } + }) }) -}) + _init = true +} + +export async function persistToDb({ + dbName, + docId, + cacheKeyPrefix, +}: { + dbName: string + docId: string + cacheKeyPrefix: string +}) { + const cache = await getCache() + + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } +} export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number + private tenantId: string private cacheKeyPrefix: string @@ -84,6 +106,7 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.tenantId = context.getTenantId() } get docId() { @@ -97,13 +120,13 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: context.getTenantId(), + tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, }, { - delay: this.writeRateMs - 1, + delay: this.writeRateMs, jobId: this.cacheKeyPrefix, removeOnFail: true, removeOnComplete: true, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 4c4a4b2b60..83af66a9d2 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,20 +1,32 @@ -import tk from "timekeeper" - import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -const WRITE_RATE_MS = 500 +import { + DocWritethrough, + docWritethroughProcessorQueue, + init, +} from "../docWritethrough" +import InMemoryQueue from "../../queue/inMemoryQueue" + +const WRITE_RATE_MS = 1000 const initialTime = Date.now() +jest.useFakeTimers({ + now: initialTime, +}) + function resetTime() { - tk.travel(initialTime) + jest.setSystemTime(initialTime) } -function travelForward(ms: number) { - const updatedTime = Date.now() + ms - tk.travel(updatedTime) +async function travelForward(ms: number) { + await jest.advanceTimersByTimeAsync(ms) + + const queue: InMemoryQueue = docWritethroughProcessorQueue as never + while (queue.hasRunningJobs()) { + await jest.runOnlyPendingTimersAsync() + } } describe("docWritethrough", () => { @@ -33,33 +45,37 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeEach(() => { + beforeAll(() => init()) + + beforeEach(async () => { resetTime() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + await config.doInTenant(async () => { + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) }) - it("patching will not persist if timeout from the creation does not hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - travelForward(WRITE_RATE_MS - 1) - await docWritethrough.patch(generatePatchObject(2)) + await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits and next patch is called", async () => { + it("patching will persist if timeout hits", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) + // This will not be persisted const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -67,7 +83,6 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - ...patch3, _rev: expect.stringMatching(/1-.+/), createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), @@ -82,15 +97,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - travelForward(WRITE_RATE_MS) - - const patch4 = generatePatchObject(3) - await docWritethrough.patch(patch4) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -98,7 +110,6 @@ describe("docWritethrough", () => { ...patch1, ...patch2, ...patch3, - ...patch4, }) ) }) @@ -109,16 +120,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date1 = new Date() await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date2 = new Date() - const patch3 = generatePatchObject(3) - await docWritethrough.patch(patch3) - expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -129,22 +137,11 @@ describe("docWritethrough", () => { }) }) - it("patching will not persist even if timeout hits but next patch is not callec", async () => { - await config.doInTenant(async () => { - await docWritethrough.patch(generatePatchObject(2)) - await docWritethrough.patch(generatePatchObject(2)) - - travelForward(WRITE_RATE_MS) - - expect(await db.exists(documentId)).toBe(false) - }) - }) - it("concurrent patches will override keys", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -155,13 +152,14 @@ describe("docWritethrough", () => { }) ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -173,7 +171,7 @@ describe("docWritethrough", () => { }) }) - it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + it("concurrent patches to different docWritethrough will not pollute each other", async () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, @@ -186,12 +184,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) + await travelForward(WRITE_RATE_MS) expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -214,7 +213,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch({}) @@ -224,9 +223,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -246,30 +246,46 @@ describe("docWritethrough", () => { ) } - const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) - expect(persistToDbSpy).not.toBeCalled() expect(storeToCacheSpy).toBeCalledTimes(5) + expect(await db.exists(documentId)).toBe(false) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(40) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(45) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) + await parallelPatch(10) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(55) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(5) - expect(persistToDbSpy).toBeCalledTimes(2) + await travelForward(WRITE_RATE_MS) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/3-.+/), + }) + ) expect(storeToCacheSpy).toBeCalledTimes(60) }) }) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 3205b6f383..f201714903 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -2,6 +2,13 @@ import events from "events" import { timeout } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" +interface JobMessage { + timestamp: number + queue: string + data: any + opts?: JobOptions +} + /** * Bull works with a Job wrapper around all messages that contains a lot more information about * the state of the message, this object constructor implements the same schema of Bull jobs @@ -11,12 +18,12 @@ import { Queue, QueueOptions, JobOptions } from "./queue" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -28,10 +35,12 @@ function newJob(queue: string, message: any) { class InMemoryQueue implements Partial { _name: string _opts?: QueueOptions - _messages: any[] + _messages: JobMessage[] + _queuedJobIds: Set _emitter: EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. @@ -45,6 +54,7 @@ class InMemoryQueue implements Partial { this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -58,19 +68,24 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - const delay = this._opts?.defaultJobOptions?.delay - if (delay) { - await new Promise(r => setTimeout(() => r(), delay)) + try { + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() + + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } + } catch (e: any) { + throw e } - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ }) } @@ -89,12 +104,31 @@ class InMemoryQueue implements Partial { */ // eslint-disable-next-line no-unused-vars async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, data)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } return {} as any } @@ -143,7 +177,11 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } From b94d28b7d63caa6061ff55f623be1f76c9665578 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:55:07 +0100 Subject: [PATCH 54/62] Clean --- .../backend-core/src/queue/inMemoryQueue.ts | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index f201714903..6c8107c7a4 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -68,23 +68,19 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - try { - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ - const jobId = msg?.opts?.jobId?.toString() - if (jobId && msg?.opts?.removeOnComplete) { - this._queuedJobIds.delete(jobId) - } - } catch (e: any) { - throw e + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) } }) } From 8d87850765efdea50d4127cc46743eed2c57a511 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:19:05 +0100 Subject: [PATCH 55/62] Remove defaults and init --- .../backend-core/src/cache/docWritethrough.ts | 52 +++++++------------ .../src/cache/tests/docWritethrough.spec.ts | 3 -- 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index f53cfbfe5f..1a16f60eb9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -7,8 +7,6 @@ import { JobQueue, createQueue } from "../queue" import * as context from "../context" import * as dbUtils from "../db" -const DEFAULT_WRITE_RATE_MS = 10000 - let CACHE: BaseCache | null = null async function getCache() { if (!CACHE) { @@ -29,33 +27,27 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -let _init = false -export const init = () => { - if (_init) { - return - } - docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) +docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) } - }) + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) - _init = true -} +}) export async function persistToDb({ dbName, @@ -97,11 +89,7 @@ export class DocWritethrough { private cacheKeyPrefix: string - constructor( - db: Database, - docId: string, - writeRateMs: number = DEFAULT_WRITE_RATE_MS - ) { + constructor(db: Database, docId: string, writeRateMs: number) { this.db = db this._docId = docId this.writeRateMs = writeRateMs diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 83af66a9d2..a5765171cb 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -5,7 +5,6 @@ import _ from "lodash" import { DocWritethrough, docWritethroughProcessorQueue, - init, } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" @@ -45,8 +44,6 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeAll(() => init()) - beforeEach(async () => { resetTime() documentId = structures.uuid() From 0649497ab53a1d73bac39f3c4ec8ba2cb8e88c3c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:47:23 +0100 Subject: [PATCH 56/62] Add comment --- packages/backend-core/src/cache/tests/docWritethrough.spec.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index a5765171cb..3e638a4eec 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -256,6 +256,8 @@ describe("docWritethrough", () => { expect(storeToCacheSpy).toBeCalledTimes(45) + // Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup. + // We are relying on the document being stored only once (otherwise we would have _rev updated) expect(await db.get(documentId)).toEqual( expect.objectContaining({ _id: documentId, From 2b25f9f0cb75ae1925db074348dbdaab521747c6 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 15:02:02 +0100 Subject: [PATCH 57/62] Improve redlock non executed response --- packages/backend-core/src/cache/docWritethrough.ts | 9 +++++++++ packages/backend-core/src/redis/redlockImpl.ts | 10 +++++++++- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 1a16f60eb9..ebb64ee9e5 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -44,6 +44,15 @@ docWritethroughProcessorQueue.process(async message => { ) if (!lockResponse.executed) { + if ( + lockResponse.reason !== + locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce + ) { + console.error("Error persisting docWritethrough", { + data: message.data, + }) + throw "Error persisting docWritethrough" + } console.log(`Ignoring redlock conflict in write-through cache`) } }) diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts index adeb5b12ec..28babb9405 100644 --- a/packages/backend-core/src/redis/redlockImpl.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -82,6 +82,11 @@ type SuccessfulRedlockExecution = { } type UnsuccessfulRedlockExecution = { executed: false + reason: UnsuccessfulRedlockExecutionReason +} + +export const enum UnsuccessfulRedlockExecutionReason { + LockTakenWithTryOnce = "LOCK_TAKEN_WITH_TRY_ONCE", } type RedlockExecution = @@ -141,7 +146,10 @@ export async function doWithLock( if (opts.type === LockType.TRY_ONCE) { // don't throw for try-once locks, they will always error // due to retry count (0) exceeded - return { executed: false } + return { + executed: false, + reason: UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce, + } } else { throw e } From 4fe7e67dd51617c36356ccc79343a8d12f261ea4 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 17:15:50 +0100 Subject: [PATCH 58/62] Do not use lock --- .../backend-core/src/cache/docWritethrough.ts | 37 ++----------------- .../src/cache/tests/docWritethrough.spec.ts | 4 +- 2 files changed, 4 insertions(+), 37 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index ebb64ee9e5..d4d651c688 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,7 +1,6 @@ import BaseCache from "./base" import { getDocWritethroughClient } from "../redis/init" -import { AnyDocument, Database, LockName, LockType } from "@budibase/types" -import * as locks from "../redis/redlockImpl" +import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" import * as context from "../context" @@ -17,7 +16,6 @@ async function getCache() { } interface ProcessDocMessage { - tenantId: string dbName: string docId: string cacheKeyPrefix: string @@ -28,34 +26,8 @@ export const docWritethroughProcessorQueue = createQueue( ) docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - console.log("DocWritethrough persisted", { data: message.data }) - } - ) - - if (!lockResponse.executed) { - if ( - lockResponse.reason !== - locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce - ) { - console.error("Error persisting docWritethrough", { - data: message.data, - }) - throw "Error persisting docWritethrough" - } - console.log(`Ignoring redlock conflict in write-through cache`) - } - }) + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) }) export async function persistToDb({ @@ -94,7 +66,6 @@ export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number - private tenantId: string private cacheKeyPrefix: string @@ -103,7 +74,6 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.tenantId = context.getTenantId() } get docId() { @@ -117,7 +87,6 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 3e638a4eec..9bbcd6af44 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -47,9 +47,7 @@ describe("docWritethrough", () => { beforeEach(async () => { resetTime() documentId = structures.uuid() - await config.doInTenant(async () => { - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) - }) + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) it("patching will not persist if timeout does not hit", async () => { From ebcb7718b8f6e60e88c1ca4bbcb7cf0f18857efa Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:06:14 +0100 Subject: [PATCH 59/62] Use bulk --- packages/backend-core/src/cache/base/index.ts | 19 +++++++++++++++++++ .../backend-core/src/cache/docWritethrough.ts | 10 +++++----- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 911bd6a831..942d70ae72 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -46,6 +46,25 @@ export default class BaseCache { await client.store(key, value, ttl) } + /** + * Bulk write to the cache. + */ + async bulkStore( + data: Record, + ttl: number | null = null, + opts = { useTenancy: true } + ) { + if (opts.useTenancy) { + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[generateTenantKey(key)] = value + return acc + }, {} as Record) + } + + const client = await this.getClient() + await client.bulkStore(data, ttl) + } + /** * Remove from cache. */ diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index d4d651c688..a0bc14ec5c 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,7 +3,6 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" -import * as context from "../context" import * as dbUtils from "../db" let CACHE: BaseCache | null = null @@ -101,9 +100,10 @@ export class DocWritethrough { } private async storeToCache(cache: BaseCache, data: Record) { - for (const [key, value] of Object.entries(data)) { - const cacheKey = this.cacheKeyPrefix + ":data:" + key - await cache.store(cacheKey, { key, value }, undefined) - } + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[this.cacheKeyPrefix + ":data:" + key] = { key, value } + return acc + }, {} as Record) + await cache.bulkStore(data, null) } } From db75c0594290551fd0a23e1b0c70079eb2ea5656 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:25:23 +0100 Subject: [PATCH 60/62] Use scim-logs db --- packages/backend-core/src/constants/db.ts | 3 +++ packages/backend-core/src/context/mainContext.ts | 11 +++++++++++ packages/pro | 2 +- 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index ac00483021..f4caac502e 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -57,6 +57,9 @@ export const StaticDatabases = { AUDIT_LOGS: { name: "audit-logs", }, + SCIM_LOGS: { + name: "scim-logs", + }, } export const APP_PREFIX = prefixed(DocumentType.APP) diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index 36fd5dcb48..ae86695168 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -35,6 +35,17 @@ export function getAuditLogDBName(tenantId?: string) { } } +export function getScimDBName(tenantId?: string) { + if (!tenantId) { + tenantId = getTenantId() + } + if (tenantId === DEFAULT_TENANT_ID) { + return StaticDatabases.SCIM_LOGS.name + } else { + return `${tenantId}${SEPARATOR}${StaticDatabases.SCIM_LOGS.name}` + } +} + export function baseGlobalDBName(tenantId: string | undefined | null) { if (!tenantId || tenantId === DEFAULT_TENANT_ID) { return StaticDatabases.GLOBAL.name diff --git a/packages/pro b/packages/pro index 6079868997..678c913246 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 607986899781aa7c0b6ccfd9746497b6fc32b569 +Subproject commit 678c913246bacb398fbda2ad73a8e1bb562983fd From c8780177692445fe87a57d53ace7169abfcd7400 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 7 Mar 2024 13:20:48 +0100 Subject: [PATCH 61/62] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index cce3ba87b8..15b5f3ca8b 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit cce3ba87b8aaa00e6e3fa6df9888c35722a12c45 +Subproject commit 15b5f3ca8bc7c8a7a4d82f63595cfc42bcee105d From a7131cb5130f360b54982e977008f7ec1209cc42 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 7 Mar 2024 14:25:00 +0100 Subject: [PATCH 62/62] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 15b5f3ca8b..80a4d8ff99 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 15b5f3ca8bc7c8a7a4d82f63595cfc42bcee105d +Subproject commit 80a4d8ff998895fc298ee510158a82ce7daebc67