From b4bed6c0ce73ef2d3fe2345c8e56c06b23dbe6b2 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 23 Jun 2022 20:22:51 +0100 Subject: [PATCH 1/6] Updating redis to use typescript and adding the option of a writethrough cache which can be used, by passing a DB and a value to be written + a delay for writes. --- packages/backend-core/package.json | 1 + packages/backend-core/redis.js | 2 +- .../backend-core/src/cache/appMetadata.js | 2 +- packages/backend-core/src/cache/base/index.ts | 92 ++++++++++ packages/backend-core/src/cache/generic.js | 75 +------- .../src/cache/tests/writethrough.spec.js | 47 +++++ packages/backend-core/src/cache/user.js | 2 +- .../backend-core/src/cache/writethrough.ts | 58 +++++++ packages/backend-core/src/environment.ts | 3 +- packages/backend-core/src/pkg/redis.ts | 2 +- .../src/redis/{index.js => index.ts} | 132 ++++++++------ .../src/redis/{authRedis.js => init.js} | 13 +- packages/backend-core/src/redis/utils.js | 20 +++ .../backend-core/src/security/sessions.js | 2 +- packages/backend-core/src/utils.js | 4 + packages/backend-core/yarn.lock | 163 ++++++++++++++++++ 16 files changed, 491 insertions(+), 127 deletions(-) create mode 100644 packages/backend-core/src/cache/base/index.ts create mode 100644 packages/backend-core/src/cache/tests/writethrough.spec.js create mode 100644 packages/backend-core/src/cache/writethrough.ts rename packages/backend-core/src/redis/{index.js => index.ts} (55%) rename packages/backend-core/src/redis/{authRedis.js => init.js} (79%) diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index c1fa9953ca..b2b777958b 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -63,6 +63,7 @@ "@types/koa": "2.0.52", "@types/node": "14.18.20", "@types/node-fetch": "2.6.1", + "@types/pouchdb": "^6.4.0", "@types/redlock": "4.0.3", "@types/semver": "7.3.7", "@types/tar-fs": "2.0.1", diff --git a/packages/backend-core/redis.js b/packages/backend-core/redis.js index badce3be20..1f7a48540a 100644 --- a/packages/backend-core/redis.js +++ b/packages/backend-core/redis.js @@ -1,5 +1,5 @@ module.exports = { Client: require("./src/redis"), utils: require("./src/redis/utils"), - clients: require("./src/redis/authRedis"), + clients: require("./src/redis/init"), } diff --git a/packages/backend-core/src/cache/appMetadata.js b/packages/backend-core/src/cache/appMetadata.js index effdc886d7..b0d9481cbd 100644 --- a/packages/backend-core/src/cache/appMetadata.js +++ b/packages/backend-core/src/cache/appMetadata.js @@ -1,4 +1,4 @@ -const redis = require("../redis/authRedis") +const redis = require("../redis/init") const { doWithDB } = require("../db") const { DocumentTypes } = require("../db/constants") diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts new file mode 100644 index 0000000000..f3216531f4 --- /dev/null +++ b/packages/backend-core/src/cache/base/index.ts @@ -0,0 +1,92 @@ +import { getTenantId } from "../../context" +import redis from "../../redis/init" +import RedisWrapper from "../../redis" + +function generateTenantKey(key: string) { + const tenantId = getTenantId() + return `${key}:${tenantId}` +} + +export = class BaseCache { + client: RedisWrapper | undefined + + constructor(client: RedisWrapper | undefined = undefined) { + this.client = client + } + + async getClient() { + return !this.client ? await redis.getCacheClient() : this.client + } + + async keys(pattern: string) { + const client = await this.getClient() + return client.keys(pattern) + } + + /** + * Read only from the cache. + */ + async get(key: string, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + return client.get(key) + } + + /** + * Write to the cache. + */ + async store( + key: string, + value: any, + ttl: number | null = null, + opts = { useTenancy: true } + ) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + await client.store(key, value, ttl) + } + + /** + * Remove from cache. + */ + async delete(key: string, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + return client.delete(key) + } + + /** + * Read from the cache. Write to the cache if not exists. + */ + async withCache( + key: string, + ttl: number, + fetchFn: any, + opts = { useTenancy: true } + ) { + const cachedValue = await this.get(key, opts) + if (cachedValue) { + return cachedValue + } + + try { + const fetchedValue = await fetchFn() + + await this.store(key, fetchedValue, ttl, opts) + return fetchedValue + } catch (err) { + console.error("Error fetching before cache - ", err) + throw err + } + } + + async bustCache(key: string, opts = { client: null }) { + const client = await this.getClient() + try { + await client.delete(generateTenantKey(key)) + } catch (err) { + console.error("Error busting cache - ", err) + throw err + } + } +} diff --git a/packages/backend-core/src/cache/generic.js b/packages/backend-core/src/cache/generic.js index 8f0ef78954..cffcdebb9a 100644 --- a/packages/backend-core/src/cache/generic.js +++ b/packages/backend-core/src/cache/generic.js @@ -1,5 +1,6 @@ -const redis = require("../redis/authRedis") -const { getTenantId } = require("../context") +const BaseCache = require("./base") + +const GENERIC = new BaseCache() exports.CacheKeys = { CHECKLIST: "checklist", @@ -16,67 +17,9 @@ exports.TTL = { ONE_DAY: 86400, } -function generateTenantKey(key) { - const tenantId = getTenantId() - return `${key}:${tenantId}` -} - -exports.keys = async pattern => { - const client = await redis.getCacheClient() - return client.keys(pattern) -} - -/** - * Read only from the cache. - */ -exports.get = async (key, opts = { useTenancy: true }) => { - key = opts.useTenancy ? generateTenantKey(key) : key - const client = await redis.getCacheClient() - const value = await client.get(key) - return value -} - -/** - * Write to the cache. - */ -exports.store = async (key, value, ttl, opts = { useTenancy: true }) => { - key = opts.useTenancy ? generateTenantKey(key) : key - const client = await redis.getCacheClient() - await client.store(key, value, ttl) -} - -exports.delete = async (key, opts = { useTenancy: true }) => { - key = opts.useTenancy ? generateTenantKey(key) : key - const client = await redis.getCacheClient() - return client.delete(key) -} - -/** - * Read from the cache. Write to the cache if not exists. - */ -exports.withCache = async (key, ttl, fetchFn, opts = { useTenancy: true }) => { - const cachedValue = await exports.get(key, opts) - if (cachedValue) { - return cachedValue - } - - try { - const fetchedValue = await fetchFn() - - await exports.store(key, fetchedValue, ttl, opts) - return fetchedValue - } catch (err) { - console.error("Error fetching before cache - ", err) - throw err - } -} - -exports.bustCache = async key => { - const client = await redis.getCacheClient() - try { - await client.delete(generateTenantKey(key)) - } catch (err) { - console.error("Error busting cache - ", err) - throw err - } -} +exports.keys = GENERIC.keys +exports.get = GENERIC.get +exports.store = GENERIC.store +exports.delete = GENERIC.delete +exports.withCache = GENERIC.withCache +exports.bustCache = GENERIC.bustCache diff --git a/packages/backend-core/src/cache/tests/writethrough.spec.js b/packages/backend-core/src/cache/tests/writethrough.spec.js new file mode 100644 index 0000000000..6a949a7489 --- /dev/null +++ b/packages/backend-core/src/cache/tests/writethrough.spec.js @@ -0,0 +1,47 @@ +require("../../../tests/utilities/TestConfiguration") +const { get, put } = require("../writethrough") +const { dangerousGetDB } = require("../../db") +const tk = require("timekeeper") + +const START_DATE = Date.now() +tk.freeze(START_DATE) + +const DELAY = 5000 + +const db = dangerousGetDB("test") + +describe("writethrough", () => { + + describe("put", () => { + let first + it("should be able to store, will go to DB", async () => { + const response = await put(db,{ _id: "test", value: 1 }, DELAY) + const output = await db.get(response._id) + first = output + expect(output.value).toBe(1) + }) + + it("second put shouldn't update DB", async () => { + const response = await put(db, { ...first, value: 2 }, DELAY) + const output = await db.get(response._id) + expect(first._rev).toBe(output._rev) + expect(output.value).toBe(1) + }) + + it("should put it again after delay period", async () => { + tk.freeze(START_DATE + DELAY + 1) + const response = await put(db, { ...first, value: 3 }, DELAY) + const output = await db.get(response._id) + expect(response._rev).not.toBe(first._rev) + expect(output.value).toBe(3) + }) + }) + + describe("get", () => { + it("should be able to retrieve", async () => { + const response = await get(db, "test") + expect(response.value).toBe(3) + }) + }) +}) + diff --git a/packages/backend-core/src/cache/user.js b/packages/backend-core/src/cache/user.js index faac6de725..130da1915e 100644 --- a/packages/backend-core/src/cache/user.js +++ b/packages/backend-core/src/cache/user.js @@ -1,4 +1,4 @@ -const redis = require("../redis/authRedis") +const redis = require("../redis/init") const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy") const env = require("../environment") const accounts = require("../cloud/accounts") diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts new file mode 100644 index 0000000000..85e3c9e50e --- /dev/null +++ b/packages/backend-core/src/cache/writethrough.ts @@ -0,0 +1,58 @@ +import BaseCache from "./base" +import { getWritethroughClient } from "../redis/init" + +const DEFAULT_WRITE_RATE_MS = 10000 +let CACHE: BaseCache | null = null + +interface CacheItem { + value: any + lastWrite: number +} + +async function getCache() { + if (!CACHE) { + const client = await getWritethroughClient() + CACHE = new BaseCache(client) + } + return CACHE +} + +function makeCacheItem(value: any, lastWrite: number | null = null): CacheItem { + return { value, lastWrite: lastWrite || Date.now() } +} + +exports.put = async ( + db: PouchDB.Database, + value: any, + writeRateMs: number = DEFAULT_WRITE_RATE_MS +) => { + const cache = await getCache() + const key = value._id + let cacheItem: CacheItem | undefined = await cache.get(key) + const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs + let output = value + if (updateDb) { + // value should contain the _id and _rev + const response = await db.put(value) + output = { + ...value, + _id: response.id, + _rev: response.rev, + } + } + // if we are updating the DB then need to set the lastWrite to now + cacheItem = makeCacheItem(value, updateDb ? null : cacheItem?.lastWrite) + await cache.store(key, cacheItem) + return output +} + +exports.get = async (db: PouchDB.Database, id: string): Promise => { + const cache = await getCache() + let cacheItem: CacheItem = await cache.get(id) + if (!cacheItem) { + const value = await db.get(id) + cacheItem = makeCacheItem(value) + await cache.store(id, cacheItem) + } + return cacheItem.value +} diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 1d9febc6f9..0a17c82873 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -16,7 +16,7 @@ if (!LOADED && isDev() && !isTest()) { LOADED = true } -const env: any = { +const env = { isTest, isDev, JWT_SECRET: process.env.JWT_SECRET, @@ -66,6 +66,7 @@ const env: any = { for (let [key, value] of Object.entries(env)) { // handle the edge case of "0" to disable an environment variable if (value === "0") { + // @ts-ignore env[key] = 0 } } diff --git a/packages/backend-core/src/pkg/redis.ts b/packages/backend-core/src/pkg/redis.ts index caa5526e55..65ab186d9a 100644 --- a/packages/backend-core/src/pkg/redis.ts +++ b/packages/backend-core/src/pkg/redis.ts @@ -2,7 +2,7 @@ // The outer exports can't be used as they now reference dist directly import Client from "../redis" import utils from "../redis/utils" -import clients from "../redis/authRedis" +import clients from "../redis/init" export = { Client, diff --git a/packages/backend-core/src/redis/index.js b/packages/backend-core/src/redis/index.ts similarity index 55% rename from packages/backend-core/src/redis/index.js rename to packages/backend-core/src/redis/index.ts index bd35f95916..206110366f 100644 --- a/packages/backend-core/src/redis/index.js +++ b/packages/backend-core/src/redis/index.ts @@ -1,3 +1,4 @@ +import RedisWrapper from "../redis" const env = require("../environment") // ioredis mock is all in memory const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis") @@ -6,24 +7,34 @@ const { removeDbPrefix, getRedisOptions, SEPARATOR, + SelectableDatabases, } = require("./utils") const RETRY_PERIOD_MS = 2000 const STARTUP_TIMEOUT_MS = 5000 const CLUSTERED = false +const DEFAULT_SELECT_DB = SelectableDatabases.DEFAULT // for testing just generate the client once let CLOSED = false -let CLIENT = env.isTest() ? new Redis(getRedisOptions()) : null +let CLIENTS: { [key: number]: any } = {} // if in test always connected -let CONNECTED = !!env.isTest() +let CONNECTED = env.isTest() -function connectionError(timeout, err) { +function pickClient(selectDb: number): any { + return CLIENTS[selectDb] +} + +function connectionError( + selectDb: number, + timeout: NodeJS.Timeout, + err: Error | string +) { // manually shut down, ignore errors if (CLOSED) { return } - CLIENT.disconnect() + pickClient(selectDb).disconnect() CLOSED = true // always clear this on error clearTimeout(timeout) @@ -38,59 +49,69 @@ function connectionError(timeout, err) { * Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise * will return the ioredis client which will be ready to use. */ -function init() { - let timeout +function init(selectDb = DEFAULT_SELECT_DB) { + let timeout: NodeJS.Timeout CLOSED = false - // testing uses a single in memory client - if (env.isTest() || (CLIENT && CONNECTED)) { + let client = pickClient(selectDb) + // already connected, ignore + if (client && CONNECTED) { return } + // testing uses a single in memory client + if (env.isTest()) { + CLIENTS[selectDb] = new Redis(getRedisOptions()) + } // start the timer - only allowed 5 seconds to connect timeout = setTimeout(() => { if (!CONNECTED) { - connectionError(timeout, "Did not successfully connect in timeout") + connectionError( + selectDb, + timeout, + "Did not successfully connect in timeout" + ) } }, STARTUP_TIMEOUT_MS) // disconnect any lingering client - if (CLIENT) { - CLIENT.disconnect() + if (client) { + client.disconnect() } const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED) if (CLUSTERED) { - CLIENT = new Redis.Cluster([{ host, port }], opts) + client = new Redis.Cluster([{ host, port }], opts) } else if (redisProtocolUrl) { - CLIENT = new Redis(redisProtocolUrl) + client = new Redis(redisProtocolUrl) } else { - CLIENT = new Redis(opts) + client = new Redis(opts) } // attach handlers - CLIENT.on("end", err => { - connectionError(timeout, err) + client.on("end", (err: Error) => { + connectionError(selectDb, timeout, err) }) - CLIENT.on("error", err => { - connectionError(timeout, err) + client.on("error", (err: Error) => { + connectionError(selectDb, timeout, err) }) - CLIENT.on("connect", () => { + client.on("connect", () => { clearTimeout(timeout) CONNECTED = true }) + CLIENTS[selectDb] = client } -function waitForConnection() { +function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) { return new Promise(resolve => { - if (CLIENT == null) { + if (pickClient(selectDb) == null) { init() } else if (CONNECTED) { - resolve() + resolve("") return } // check if the connection is ready const interval = setInterval(() => { if (CONNECTED) { clearInterval(interval) - resolve() + resolve("") } }, 500) }) @@ -100,25 +121,26 @@ function waitForConnection() { * Utility function, takes a redis stream and converts it to a promisified response - * this can only be done with redis streams because they will have an end. * @param stream A redis stream, specifically as this type of stream will have an end. + * @param client The client to use for further lookups. * @return {Promise} The final output of the stream */ -function promisifyStream(stream) { +function promisifyStream(stream: any, client: RedisWrapper) { return new Promise((resolve, reject) => { const outputKeys = new Set() - stream.on("data", keys => { + stream.on("data", (keys: string[]) => { keys.forEach(key => { outputKeys.add(key) }) }) - stream.on("error", err => { + stream.on("error", (err: Error) => { reject(err) }) stream.on("end", async () => { - const keysArray = Array.from(outputKeys) + const keysArray: string[] = Array.from(outputKeys) as string[] try { let getPromises = [] for (let key of keysArray) { - getPromises.push(CLIENT.get(key)) + getPromises.push(client.get(key)) } const jsonArray = await Promise.all(getPromises) resolve( @@ -134,48 +156,52 @@ function promisifyStream(stream) { }) } -class RedisWrapper { - constructor(db) { +export = class RedisWrapper { + _db: string + _select: number + + constructor(db: string, selectDb: number | null = null) { this._db = db + this._select = selectDb || DEFAULT_SELECT_DB } getClient() { - return CLIENT + return pickClient(this._select) } async init() { CLOSED = false - init() - await waitForConnection() + init(this._select) + await waitForConnection(this._select) return this } async finish() { CLOSED = true - CLIENT.disconnect() + this.getClient().disconnect() } - async scan(key = "") { + async scan(key = ""): Promise { const db = this._db key = `${db}${SEPARATOR}${key}` let stream if (CLUSTERED) { - let node = CLIENT.nodes("master") + let node = this.getClient().nodes("master") stream = node[0].scanStream({ match: key + "*", count: 100 }) } else { - stream = CLIENT.scanStream({ match: key + "*", count: 100 }) + stream = this.getClient().scanStream({ match: key + "*", count: 100 }) } - return promisifyStream(stream) + return promisifyStream(stream, this.getClient()) } - async keys(pattern) { + async keys(pattern: string) { const db = this._db - return CLIENT.keys(addDbPrefix(db, pattern)) + return this.getClient().keys(addDbPrefix(db, pattern)) } - async get(key) { + async get(key: string) { const db = this._db - let response = await CLIENT.get(addDbPrefix(db, key)) + let response = await this.getClient().get(addDbPrefix(db, key)) // overwrite the prefixed key if (response != null && response.key) { response.key = key @@ -188,39 +214,37 @@ class RedisWrapper { } } - async store(key, value, expirySeconds = null) { + async store(key: string, value: any, expirySeconds: number | null = null) { const db = this._db if (typeof value === "object") { value = JSON.stringify(value) } const prefixedKey = addDbPrefix(db, key) - await CLIENT.set(prefixedKey, value) + await this.getClient().set(prefixedKey, value) if (expirySeconds) { - await CLIENT.expire(prefixedKey, expirySeconds) + await this.getClient().expire(prefixedKey, expirySeconds) } } - async getTTL(key) { + async getTTL(key: string) { const db = this._db const prefixedKey = addDbPrefix(db, key) - return CLIENT.ttl(prefixedKey) + return this.getClient().ttl(prefixedKey) } - async setExpiry(key, expirySeconds) { + async setExpiry(key: string, expirySeconds: number | null) { const db = this._db const prefixedKey = addDbPrefix(db, key) - await CLIENT.expire(prefixedKey, expirySeconds) + await this.getClient().expire(prefixedKey, expirySeconds) } - async delete(key) { + async delete(key: string) { const db = this._db - await CLIENT.del(addDbPrefix(db, key)) + await this.getClient().del(addDbPrefix(db, key)) } async clear() { let items = await this.scan() - await Promise.all(items.map(obj => this.delete(obj.key))) + await Promise.all(items.map((obj: any) => this.delete(obj.key))) } } - -module.exports = RedisWrapper diff --git a/packages/backend-core/src/redis/authRedis.js b/packages/backend-core/src/redis/init.js similarity index 79% rename from packages/backend-core/src/redis/authRedis.js rename to packages/backend-core/src/redis/init.js index b2f686b801..8e5d10f838 100644 --- a/packages/backend-core/src/redis/authRedis.js +++ b/packages/backend-core/src/redis/init.js @@ -2,7 +2,7 @@ const Client = require("./index") const utils = require("./utils") const { getRedlock } = require("./redlock") -let userClient, sessionClient, appClient, cacheClient +let userClient, sessionClient, appClient, cacheClient, writethroughClient let migrationsRedlock // turn retry off so that only one instance can ever hold the lock @@ -13,6 +13,10 @@ async function init() { sessionClient = await new Client(utils.Databases.SESSIONS).init() appClient = await new Client(utils.Databases.APP_METADATA).init() cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init() + writethroughClient = await new Client( + utils.Databases.WRITE_THROUGH, + utils.SelectableDatabases.WRITE_THROUGH + ).init() // pass the underlying ioredis client to redlock migrationsRedlock = getRedlock( cacheClient.getClient(), @@ -25,6 +29,7 @@ process.on("exit", async () => { if (sessionClient) await sessionClient.finish() if (appClient) await appClient.finish() if (cacheClient) await cacheClient.finish() + if (writethroughClient) await writethroughClient.finish() }) module.exports = { @@ -52,6 +57,12 @@ module.exports = { } return cacheClient }, + getWritethroughClient: async () => { + if (!writethroughClient) { + await init() + } + return writethroughClient + }, getMigrationsRedlock: async () => { if (!migrationsRedlock) { await init() diff --git a/packages/backend-core/src/redis/utils.js b/packages/backend-core/src/redis/utils.js index 90ea5c33f9..5e677c4f38 100644 --- a/packages/backend-core/src/redis/utils.js +++ b/packages/backend-core/src/redis/utils.js @@ -19,6 +19,26 @@ exports.Databases = { QUERY_VARS: "queryVars", LICENSES: "license", GENERIC_CACHE: "data_cache", + WRITE_THROUGH: "writeThrough", +} + +exports.SelectableDatabases = { + DEFAULT: 0, + WRITE_THROUGH: 1, + UNUSED_1: 2, + UNUSED_2: 3, + UNUSED_3: 4, + UNUSED_4: 5, + UNUSED_5: 6, + UNUSED_6: 7, + UNUSED_7: 8, + UNUSED_8: 9, + UNUSED_9: 10, + UNUSED_10: 11, + UNUSED_11: 12, + UNUSED_12: 13, + UNUSED_13: 14, + UNUSED_14: 15, } exports.SEPARATOR = SEPARATOR diff --git a/packages/backend-core/src/security/sessions.js b/packages/backend-core/src/security/sessions.js index 4e6899c248..8874b47469 100644 --- a/packages/backend-core/src/security/sessions.js +++ b/packages/backend-core/src/security/sessions.js @@ -1,4 +1,4 @@ -const redis = require("../redis/authRedis") +const redis = require("../redis/init") const { v4: uuidv4 } = require("uuid") // a week in seconds diff --git a/packages/backend-core/src/utils.js b/packages/backend-core/src/utils.js index 9d2524bbeb..cf32539c58 100644 --- a/packages/backend-core/src/utils.js +++ b/packages/backend-core/src/utils.js @@ -197,3 +197,7 @@ exports.platformLogout = async ({ ctx, userId, keepActiveSession }) => { await events.auth.logout() await userCache.invalidateUser(userId) } + +exports.timeout = timeMs => { + return new Promise(resolve => setTimeout(resolve, timeMs)) +} diff --git a/packages/backend-core/yarn.lock b/packages/backend-core/yarn.lock index 01d1a43b64..ebbceae60e 100644 --- a/packages/backend-core/yarn.lock +++ b/packages/backend-core/yarn.lock @@ -291,6 +291,11 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@budibase/types@^1.0.209": + version "1.0.210" + resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.0.210.tgz#b28f9d7b37d2cbc6b75ae8de201958ccc540cf06" + integrity sha512-16foNHGAlJxvh4IpPEPw9eZyIvA2n9nssrNu54Ag73E85A+mIiZ6NGHrw+mojrdC1yaSrj/xeQ461iYXdxT6/g== + "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" @@ -656,6 +661,13 @@ "@types/keygrip" "*" "@types/node" "*" +"@types/debug@*": + version "4.1.7" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82" + integrity sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg== + dependencies: + "@types/ms" "*" + "@types/express-serve-static-core@^4.17.18": version "4.17.28" resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz#c47def9f34ec81dc6328d0b1b5303d1ec98d86b8" @@ -762,6 +774,11 @@ resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== +"@types/ms@*": + version "0.7.31" + resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197" + integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA== + "@types/node-fetch@2.6.1": version "2.6.1" resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.1.tgz#8f127c50481db65886800ef496f20bbf15518975" @@ -780,6 +797,152 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.20.tgz#268f028b36eaf51181c3300252f605488c4f0650" integrity sha512-Q8KKwm9YqEmUBRsqJ2GWJDtXltBDxTdC4m5vTdXBolu2PeQh8LX+f6BTwU+OuXPu37fLxoN6gidqBmnky36FXA== +"@types/pouchdb-adapter-cordova-sqlite@*": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-cordova-sqlite/-/pouchdb-adapter-cordova-sqlite-1.0.1.tgz#49e5ee6df7cc0c23196fcb340f43a560e74eb1d6" + integrity sha512-nqlXpW1ho3KBg1mUQvZgH2755y3z/rw4UA7ZJCPMRTHofxGMY8izRVw5rHBL4/7P615or0J2udpRYxgkT3D02g== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-adapter-fruitdown@*": + version "6.1.3" + resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-fruitdown/-/pouchdb-adapter-fruitdown-6.1.3.tgz#9b140ad9645cc56068728acf08ec19ac0046658e" + integrity sha512-Wz1Z1JLOW1hgmFQjqnSkmyyfH7by/iWb4abKn684WMvQfmxx6BxKJpJ4+eulkVPQzzgMMSgU1MpnQOm9FgRkbw== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-adapter-http@*": + version "6.1.3" + resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-http/-/pouchdb-adapter-http-6.1.3.tgz#6e592d5f48deb6274a21ddac1498dd308096bcf3" + integrity sha512-9Z4TLbF/KJWy/D2sWRPBA+RNU0odQimfdvlDX+EY7rGcd3aVoH8qjD/X0Xcd/0dfBH5pKrNIMFFQgW/TylRCmA== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-adapter-idb@*": + version "6.1.4" + resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-idb/-/pouchdb-adapter-idb-6.1.4.tgz#cb9a18864585d600820cd325f007614c5c3989cd" + integrity sha512-KIAXbkF4uYUz0ZwfNEFLtEkK44mEWopAsD76UhucH92XnJloBysav+TjI4FFfYQyTjoW3S1s6V+Z14CUJZ0F6w== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-adapter-leveldb@*": + version "6.1.3" + resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-leveldb/-/pouchdb-adapter-leveldb-6.1.3.tgz#17c7e75d75b992050bca15991e97fba575c61bb3" + integrity sha512-ex8NFqQGFwEpFi7AaZ5YofmuemfZNsL3nTFZBUCAKYMBkazQij1pe2ILLStSvJr0XS0qxgXjCEW19T5Wqiiskg== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-adapter-localstorage@*": + version "6.1.3" + resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-localstorage/-/pouchdb-adapter-localstorage-6.1.3.tgz#0dde02ba6b9d6073a295a20196563942ba9a54bd" + integrity sha512-oor040tye1KKiGLWYtIy7rRT7C2yoyX3Tf6elEJRpjOA7Ja/H8lKc4LaSh9ATbptIcES6MRqZDxtp7ly9hsW3Q== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-adapter-memory@*": + version "6.1.3" + resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-memory/-/pouchdb-adapter-memory-6.1.3.tgz#9eabdbc890fcf58960ee8b68b8685f837e75c844" + integrity sha512-gVbsIMzDzgZYThFVT4eVNsmuZwVm/4jDxP1sjlgc3qtDIxbtBhGgyNfcskwwz9Zu5Lv1avkDsIWvcxQhnvRlHg== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-adapter-node-websql@*": + version "6.1.3" + resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-node-websql/-/pouchdb-adapter-node-websql-6.1.3.tgz#aa18bc68af8cf509acd12c400010dcd5fab2243d" + integrity sha512-F/P+os6Jsa7CgHtH64+Z0HfwIcj0hIRB5z8gNhF7L7dxPWoAfkopK5H2gydrP3sQrlGyN4WInF+UJW/Zu1+FKg== + dependencies: + "@types/pouchdb-adapter-websql" "*" + "@types/pouchdb-core" "*" + +"@types/pouchdb-adapter-websql@*": + version "6.1.4" + resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-websql/-/pouchdb-adapter-websql-6.1.4.tgz#359fbe42ccac0ac90b492ddb8c32fafd0aa96d79" + integrity sha512-zMJQCtXC40hBsIDRn0GhmpeGMK0f9l/OGWfLguvczROzxxcOD7REI+e6SEmX7gJKw5JuMvlfuHzkQwjmvSJbtg== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-browser@*": + version "6.1.3" + resolved "https://registry.yarnpkg.com/@types/pouchdb-browser/-/pouchdb-browser-6.1.3.tgz#8f33d6ef58d6817d1f6d36979148a1c7f63244d8" + integrity sha512-EdYowrWxW9SWBMX/rux2eq7dbHi5Zeyzz+FF/IAsgQKnUxgeCO5VO2j4zTzos0SDyJvAQU+EYRc11r7xGn5tvA== + dependencies: + "@types/pouchdb-adapter-http" "*" + "@types/pouchdb-adapter-idb" "*" + "@types/pouchdb-adapter-websql" "*" + "@types/pouchdb-core" "*" + "@types/pouchdb-mapreduce" "*" + "@types/pouchdb-replication" "*" + +"@types/pouchdb-core@*": + version "7.0.10" + resolved "https://registry.yarnpkg.com/@types/pouchdb-core/-/pouchdb-core-7.0.10.tgz#d1ea1549e7fad6cb579f71459b1bc27252e06a5a" + integrity sha512-mKhjLlWWXyV3PTTjDhzDV1kc2dolO7VYFa75IoKM/hr8Er9eo8RIbS7mJLfC8r/C3p6ihZu9yZs1PWC1LQ0SOA== + dependencies: + "@types/debug" "*" + "@types/pouchdb-find" "*" + +"@types/pouchdb-find@*": + version "6.3.7" + resolved "https://registry.yarnpkg.com/@types/pouchdb-find/-/pouchdb-find-6.3.7.tgz#f713534a53c1a7f3fd8fbbfb74131a1b04711ddc" + integrity sha512-b2dr9xoZRK5Mwl8UiRA9l5j9mmCxNfqXuu63H1KZHwJLILjoIIz7BntCvM0hnlnl7Q8P8wORq0IskuaMq5Nnnw== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-http@*": + version "6.1.3" + resolved "https://registry.yarnpkg.com/@types/pouchdb-http/-/pouchdb-http-6.1.3.tgz#09576c0d409da1f8dee34ec5b768415e2472ea52" + integrity sha512-0e9E5SqNOyPl/3FnEIbENssB4FlJsNYuOy131nxrZk36S+y1R/6qO7ZVRypWpGTqBWSuVd7gCsq2UDwO/285+w== + dependencies: + "@types/pouchdb-adapter-http" "*" + "@types/pouchdb-core" "*" + +"@types/pouchdb-mapreduce@*": + version "6.1.7" + resolved "https://registry.yarnpkg.com/@types/pouchdb-mapreduce/-/pouchdb-mapreduce-6.1.7.tgz#9ab32d1e0f234f1bf6d1e4c5d7e216e9e23ac0a3" + integrity sha512-WzBwm7tmO9QhfRzVaWT4v6JQSS/fG2OoUDrWrhX87rPe2Pn6laPvdK5li6myNRxCoI/l5e8Jd+oYBAFnaiFucA== + dependencies: + "@types/pouchdb-core" "*" + +"@types/pouchdb-node@*": + version "6.1.4" + resolved "https://registry.yarnpkg.com/@types/pouchdb-node/-/pouchdb-node-6.1.4.tgz#5214c0169fcfd2237d373380bbd65a934feb5dfb" + integrity sha512-wnTCH8X1JOPpNOfVhz8HW0AvmdHh6pt40MuRj0jQnK7QEHsHS79WujsKTKSOF8QXtPwpvCNSsI7ut7H7tfxxJQ== + dependencies: + "@types/pouchdb-adapter-http" "*" + "@types/pouchdb-adapter-leveldb" "*" + "@types/pouchdb-core" "*" + "@types/pouchdb-mapreduce" "*" + "@types/pouchdb-replication" "*" + +"@types/pouchdb-replication@*": + version "6.4.4" + resolved "https://registry.yarnpkg.com/@types/pouchdb-replication/-/pouchdb-replication-6.4.4.tgz#743406c90f13a988fa3e346ea74ce40acd170d00" + integrity sha512-BsE5LKpjJK4iAf6Fx5kyrMw+33V+Ip7uWldUnU2BYrrvtR+MLD22dcImm7DZN1st2wPPb91i0XEnQzvP0w1C/Q== + dependencies: + "@types/pouchdb-core" "*" + "@types/pouchdb-find" "*" + +"@types/pouchdb@^6.4.0": + version "6.4.0" + resolved "https://registry.yarnpkg.com/@types/pouchdb/-/pouchdb-6.4.0.tgz#f9c41ca64b23029f9bf2eb4bf6956e6431cb79f8" + integrity sha512-eGCpX+NXhd5VLJuJMzwe3L79fa9+IDTrAG3CPaf4s/31PD56hOrhDJTSmRELSXuiqXr6+OHzzP0PldSaWsFt7w== + dependencies: + "@types/pouchdb-adapter-cordova-sqlite" "*" + "@types/pouchdb-adapter-fruitdown" "*" + "@types/pouchdb-adapter-http" "*" + "@types/pouchdb-adapter-idb" "*" + "@types/pouchdb-adapter-leveldb" "*" + "@types/pouchdb-adapter-localstorage" "*" + "@types/pouchdb-adapter-memory" "*" + "@types/pouchdb-adapter-node-websql" "*" + "@types/pouchdb-adapter-websql" "*" + "@types/pouchdb-browser" "*" + "@types/pouchdb-core" "*" + "@types/pouchdb-http" "*" + "@types/pouchdb-mapreduce" "*" + "@types/pouchdb-node" "*" + "@types/pouchdb-replication" "*" + "@types/prettier@^2.1.5": version "2.6.3" resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.6.3.tgz#68ada76827b0010d0db071f739314fa429943d0a" From 56956dba4fd9c15217b367a99d70d4df97175a31 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 23 Jun 2022 20:27:20 +0100 Subject: [PATCH 2/6] Adding a class wrapper incase using get/put functions multiple times, functions like the PouchDB constructor. --- .../src/cache/tests/writethrough.spec.js | 12 +++++------ .../backend-core/src/cache/writethrough.ts | 21 ++++++++++++++++--- 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/packages/backend-core/src/cache/tests/writethrough.spec.js b/packages/backend-core/src/cache/tests/writethrough.spec.js index 6a949a7489..a0ada8b51b 100644 --- a/packages/backend-core/src/cache/tests/writethrough.spec.js +++ b/packages/backend-core/src/cache/tests/writethrough.spec.js @@ -1,5 +1,5 @@ require("../../../tests/utilities/TestConfiguration") -const { get, put } = require("../writethrough") +const { Writethrough } = require("../writethrough") const { dangerousGetDB } = require("../../db") const tk = require("timekeeper") @@ -9,20 +9,20 @@ tk.freeze(START_DATE) const DELAY = 5000 const db = dangerousGetDB("test") +const writethrough = new Writethrough(db) describe("writethrough", () => { - describe("put", () => { let first it("should be able to store, will go to DB", async () => { - const response = await put(db,{ _id: "test", value: 1 }, DELAY) + const response = await writethrough.put({ _id: "test", value: 1 }, DELAY) const output = await db.get(response._id) first = output expect(output.value).toBe(1) }) it("second put shouldn't update DB", async () => { - const response = await put(db, { ...first, value: 2 }, DELAY) + const response = await writethrough.put({ ...first, value: 2 }, DELAY) const output = await db.get(response._id) expect(first._rev).toBe(output._rev) expect(output.value).toBe(1) @@ -30,7 +30,7 @@ describe("writethrough", () => { it("should put it again after delay period", async () => { tk.freeze(START_DATE + DELAY + 1) - const response = await put(db, { ...first, value: 3 }, DELAY) + const response = await writethrough.put({ ...first, value: 3 }, DELAY) const output = await db.get(response._id) expect(response._rev).not.toBe(first._rev) expect(output.value).toBe(3) @@ -39,7 +39,7 @@ describe("writethrough", () => { describe("get", () => { it("should be able to retrieve", async () => { - const response = await get(db, "test") + const response = await writethrough.get("test") expect(response.value).toBe(3) }) }) diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index 85e3c9e50e..02932d0c2c 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -21,11 +21,11 @@ function makeCacheItem(value: any, lastWrite: number | null = null): CacheItem { return { value, lastWrite: lastWrite || Date.now() } } -exports.put = async ( +export async function put( db: PouchDB.Database, value: any, writeRateMs: number = DEFAULT_WRITE_RATE_MS -) => { +) { const cache = await getCache() const key = value._id let cacheItem: CacheItem | undefined = await cache.get(key) @@ -46,7 +46,7 @@ exports.put = async ( return output } -exports.get = async (db: PouchDB.Database, id: string): Promise => { +export async function get(db: PouchDB.Database, id: string): Promise { const cache = await getCache() let cacheItem: CacheItem = await cache.get(id) if (!cacheItem) { @@ -56,3 +56,18 @@ exports.get = async (db: PouchDB.Database, id: string): Promise => { } return cacheItem.value } + +export class Writethrough { + db: PouchDB.Database + constructor(db: PouchDB.Database) { + this.db = db + } + + async put(value: any, writeRateMs: number = DEFAULT_WRITE_RATE_MS) { + return put(this.db, value, writeRateMs) + } + + async get(id: string) { + return get(this.db, id) + } +} From cd6a92994be71707aee25cd67962f320b09acd83 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 24 Jun 2022 12:08:29 +0100 Subject: [PATCH 3/6] Fixing test case and review comments. --- packages/backend-core/package.json | 2 +- packages/backend-core/src/cache/generic.js | 16 ++++++++++------ packages/backend-core/yarn.lock | 7 +------ 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index b2b777958b..440deae8ae 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -63,7 +63,7 @@ "@types/koa": "2.0.52", "@types/node": "14.18.20", "@types/node-fetch": "2.6.1", - "@types/pouchdb": "^6.4.0", + "@types/pouchdb": "6.4.0", "@types/redlock": "4.0.3", "@types/semver": "7.3.7", "@types/tar-fs": "2.0.1", diff --git a/packages/backend-core/src/cache/generic.js b/packages/backend-core/src/cache/generic.js index cffcdebb9a..e2f3915339 100644 --- a/packages/backend-core/src/cache/generic.js +++ b/packages/backend-core/src/cache/generic.js @@ -17,9 +17,13 @@ exports.TTL = { ONE_DAY: 86400, } -exports.keys = GENERIC.keys -exports.get = GENERIC.get -exports.store = GENERIC.store -exports.delete = GENERIC.delete -exports.withCache = GENERIC.withCache -exports.bustCache = GENERIC.bustCache +function performExport(funcName) { + return (...args) => GENERIC[funcName](...args) +} + +exports.keys = performExport("keys") +exports.get = performExport("get") +exports.store = performExport("store") +exports.delete = performExport("delete") +exports.withCache = performExport("withCache") +exports.bustCache = performExport("bustCache") diff --git a/packages/backend-core/yarn.lock b/packages/backend-core/yarn.lock index ebbceae60e..e40cddc468 100644 --- a/packages/backend-core/yarn.lock +++ b/packages/backend-core/yarn.lock @@ -291,11 +291,6 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/types@^1.0.209": - version "1.0.210" - resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.0.210.tgz#b28f9d7b37d2cbc6b75ae8de201958ccc540cf06" - integrity sha512-16foNHGAlJxvh4IpPEPw9eZyIvA2n9nssrNu54Ag73E85A+mIiZ6NGHrw+mojrdC1yaSrj/xeQ461iYXdxT6/g== - "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" @@ -922,7 +917,7 @@ "@types/pouchdb-core" "*" "@types/pouchdb-find" "*" -"@types/pouchdb@^6.4.0": +"@types/pouchdb@6.4.0": version "6.4.0" resolved "https://registry.yarnpkg.com/@types/pouchdb/-/pouchdb-6.4.0.tgz#f9c41ca64b23029f9bf2eb4bf6956e6431cb79f8" integrity sha512-eGCpX+NXhd5VLJuJMzwe3L79fa9+IDTrAG3CPaf4s/31PD56hOrhDJTSmRELSXuiqXr6+OHzzP0PldSaWsFt7w== From 389856795d7a1c5506d13c31ec91976a2c7454f3 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 24 Jun 2022 13:34:00 +0100 Subject: [PATCH 4/6] Updating writethrough cache a bit to make sure it implements the PouchDB API properly. --- packages/backend-core/cache.js | 1 + .../src/cache/tests/writethrough.spec.js | 16 ++--- .../backend-core/src/cache/writethrough.ts | 62 ++++++++++++++----- 3 files changed, 54 insertions(+), 25 deletions(-) diff --git a/packages/backend-core/cache.js b/packages/backend-core/cache.js index 932fd7b901..6b319357c4 100644 --- a/packages/backend-core/cache.js +++ b/packages/backend-core/cache.js @@ -3,5 +3,6 @@ const generic = require("./src/cache/generic") module.exports = { user: require("./src/cache/user"), app: require("./src/cache/appMetadata"), + writethrough: require("./src/cache/writethrough"), ...generic, } diff --git a/packages/backend-core/src/cache/tests/writethrough.spec.js b/packages/backend-core/src/cache/tests/writethrough.spec.js index a0ada8b51b..eafa002b3e 100644 --- a/packages/backend-core/src/cache/tests/writethrough.spec.js +++ b/packages/backend-core/src/cache/tests/writethrough.spec.js @@ -9,30 +9,30 @@ tk.freeze(START_DATE) const DELAY = 5000 const db = dangerousGetDB("test") -const writethrough = new Writethrough(db) +const writethrough = new Writethrough(db, DELAY) describe("writethrough", () => { describe("put", () => { let first it("should be able to store, will go to DB", async () => { - const response = await writethrough.put({ _id: "test", value: 1 }, DELAY) - const output = await db.get(response._id) + const response = await writethrough.put({ _id: "test", value: 1 }) + const output = await db.get(response.id) first = output expect(output.value).toBe(1) }) it("second put shouldn't update DB", async () => { - const response = await writethrough.put({ ...first, value: 2 }, DELAY) - const output = await db.get(response._id) + const response = await writethrough.put({ ...first, value: 2 }) + const output = await db.get(response.id) expect(first._rev).toBe(output._rev) expect(output.value).toBe(1) }) it("should put it again after delay period", async () => { tk.freeze(START_DATE + DELAY + 1) - const response = await writethrough.put({ ...first, value: 3 }, DELAY) - const output = await db.get(response._id) - expect(response._rev).not.toBe(first._rev) + const response = await writethrough.put({ ...first, value: 3 }) + const output = await db.get(response.id) + expect(response.rev).not.toBe(first._rev) expect(output.value).toBe(3) }) }) diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index 02932d0c2c..472c78a72d 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -5,7 +5,7 @@ const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null interface CacheItem { - value: any + doc: any lastWrite: number } @@ -17,57 +17,85 @@ async function getCache() { return CACHE } -function makeCacheItem(value: any, lastWrite: number | null = null): CacheItem { - return { value, lastWrite: lastWrite || Date.now() } +function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem { + return { doc, lastWrite: lastWrite || Date.now() } } export async function put( db: PouchDB.Database, - value: any, + doc: any, writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { const cache = await getCache() - const key = value._id + const key = doc._id let cacheItem: CacheItem | undefined = await cache.get(key) const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs - let output = value + let output = doc if (updateDb) { - // value should contain the _id and _rev - const response = await db.put(value) + // doc should contain the _id and _rev + const response = await db.put(doc) output = { - ...value, + ...doc, _id: response.id, _rev: response.rev, } } // if we are updating the DB then need to set the lastWrite to now - cacheItem = makeCacheItem(value, updateDb ? null : cacheItem?.lastWrite) + cacheItem = makeCacheItem(output, updateDb ? null : cacheItem?.lastWrite) await cache.store(key, cacheItem) - return output + return { ok: true, id: output._id, rev: output._rev } } export async function get(db: PouchDB.Database, id: string): Promise { const cache = await getCache() let cacheItem: CacheItem = await cache.get(id) if (!cacheItem) { - const value = await db.get(id) - cacheItem = makeCacheItem(value) + const doc = await db.get(id) + cacheItem = makeCacheItem(doc) await cache.store(id, cacheItem) } - return cacheItem.value + return cacheItem.doc +} + +export async function remove( + db: PouchDB.Database, + docOrId: any, + rev?: any +): Promise { + const cache = await getCache() + if (!docOrId) { + throw new Error("No ID/Rev provided.") + } + const id = typeof docOrId === "string" ? docOrId : docOrId._id + rev = typeof docOrId === "string" ? rev : docOrId._rev + try { + await cache.delete(id) + } finally { + await db.remove(id, rev) + } } export class Writethrough { db: PouchDB.Database - constructor(db: PouchDB.Database) { + writeRateMs: number + + constructor( + db: PouchDB.Database, + writeRateMs: number = DEFAULT_WRITE_RATE_MS + ) { this.db = db + this.writeRateMs = writeRateMs } - async put(value: any, writeRateMs: number = DEFAULT_WRITE_RATE_MS) { - return put(this.db, value, writeRateMs) + async put(doc: any) { + return put(this.db, doc, this.writeRateMs) } async get(id: string) { return get(this.db, id) } + + async remove(docOrId: any, rev?: any) { + return remove(this.db, docOrId, rev) + } } From b5f0d7c4fda4028db0fe4d837a6592230ee2704f Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 24 Jun 2022 14:28:45 +0100 Subject: [PATCH 5/6] Adding case to handle multi-DB setups, to confirm store same doc ID to different databases and they run in different cache keys. --- .../src/cache/tests/writethrough.spec.js | 14 +++++++- .../backend-core/src/cache/writethrough.ts | 34 +++++++++++++------ 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/packages/backend-core/src/cache/tests/writethrough.spec.js b/packages/backend-core/src/cache/tests/writethrough.spec.js index eafa002b3e..68db24b325 100644 --- a/packages/backend-core/src/cache/tests/writethrough.spec.js +++ b/packages/backend-core/src/cache/tests/writethrough.spec.js @@ -9,7 +9,8 @@ tk.freeze(START_DATE) const DELAY = 5000 const db = dangerousGetDB("test") -const writethrough = new Writethrough(db, DELAY) +const db2 = dangerousGetDB("test2") +const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY) describe("writethrough", () => { describe("put", () => { @@ -43,5 +44,16 @@ describe("writethrough", () => { expect(response.value).toBe(3) }) }) + + describe("same doc, different databases (tenancy)", () => { + it("should be able to two different databases", async () => { + const resp1 = await writethrough.put({ _id: "db1", value: "first" }) + const resp2 = await writethrough2.put({ _id: "db1", value: "second" }) + expect(resp1.rev).toBeDefined() + expect(resp2.rev).toBeDefined() + expect((await db.get("db1")).value).toBe("first") + expect((await db2.get("db1")).value).toBe("second") + }) + }) }) diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index 472c78a72d..b9817d7d93 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -17,6 +17,10 @@ async function getCache() { return CACHE } +function makeCacheKey(db: PouchDB.Database, key: string) { + return db.name + key +} + function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem { return { doc, lastWrite: lastWrite || Date.now() } } @@ -28,31 +32,39 @@ export async function put( ) { const cache = await getCache() const key = doc._id - let cacheItem: CacheItem | undefined = await cache.get(key) + let cacheItem: CacheItem | undefined = await cache.get(makeCacheKey(db, key)) const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs let output = doc if (updateDb) { - // doc should contain the _id and _rev - const response = await db.put(doc) - output = { - ...doc, - _id: response.id, - _rev: response.rev, + try { + // doc should contain the _id and _rev + const response = await db.put(doc) + output = { + ...doc, + _id: response.id, + _rev: response.rev, + } + } catch (err: any) { + // ignore 409s, some other high speed write has hit it first, just move straight to caching + if (err.status !== 409) { + throw err + } } } // if we are updating the DB then need to set the lastWrite to now cacheItem = makeCacheItem(output, updateDb ? null : cacheItem?.lastWrite) - await cache.store(key, cacheItem) + await cache.store(makeCacheKey(db, key), cacheItem) return { ok: true, id: output._id, rev: output._rev } } export async function get(db: PouchDB.Database, id: string): Promise { const cache = await getCache() - let cacheItem: CacheItem = await cache.get(id) + const cacheKey = makeCacheKey(db, id) + let cacheItem: CacheItem = await cache.get(cacheKey) if (!cacheItem) { const doc = await db.get(id) cacheItem = makeCacheItem(doc) - await cache.store(id, cacheItem) + await cache.store(cacheKey, cacheItem) } return cacheItem.doc } @@ -69,7 +81,7 @@ export async function remove( const id = typeof docOrId === "string" ? docOrId : docOrId._id rev = typeof docOrId === "string" ? rev : docOrId._rev try { - await cache.delete(id) + await cache.delete(makeCacheKey(db, id)) } finally { await db.remove(id, rev) } From 53e9f79c65fe9f0fa8bc51b1df6a0bf5c4284713 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 24 Jun 2022 14:42:15 +0100 Subject: [PATCH 6/6] Adding explanation of the difference between Database and SelectableDatabase in Redis. --- packages/backend-core/src/redis/utils.js | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/packages/backend-core/src/redis/utils.js b/packages/backend-core/src/redis/utils.js index 5e677c4f38..90b3561f31 100644 --- a/packages/backend-core/src/redis/utils.js +++ b/packages/backend-core/src/redis/utils.js @@ -6,6 +6,14 @@ const SEPARATOR = "-" const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD +/** + * These Redis databases help us to segment up a Redis keyspace by prepending the + * specified database name onto the cache key. This means that a single real Redis database + * can be split up a bit; allowing us to use scans on small databases to find some particular + * keys within. + * If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out + * of the default keyspace and use a separate one - the SelectableDatabases can be used for this. + */ exports.Databases = { PW_RESETS: "pwReset", VERIFICATIONS: "verification", @@ -22,6 +30,15 @@ exports.Databases = { WRITE_THROUGH: "writeThrough", } +/** + * These define the numeric Redis databases that can be access with the SELECT command - + * (https://redis.io/commands/select/). By default a Redis server/cluster will have 16 selectable + * databases, increasing this count increases the amount of CPU/memory required to run the server. + * Ideally new Redis keyspaces should be used sparingly, only when absolutely necessary for performance + * to be maintained. Generally a keyspace can grow to be very large is scans are not needed or desired, + * but if you need to walk through all values in a database periodically then a separate selectable + * keyspace should be used. + */ exports.SelectableDatabases = { DEFAULT: 0, WRITE_THROUGH: 1,