From d90123e8de29a40ebfc15542ded58701de722da7 Mon Sep 17 00:00:00 2001 From: Dean Date: Thu, 8 Feb 2024 11:05:45 +0000 Subject: [PATCH 001/174] Wip --- .../NewScreen/CreateScreenModal.svelte | 83 +++++++++++++++---- 1 file changed, 69 insertions(+), 14 deletions(-) diff --git a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte index a9d64afd19..ab8ccecf6e 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte @@ -2,6 +2,7 @@ import ScreenDetailsModal from "components/design/ScreenDetailsModal.svelte" import DatasourceModal from "./DatasourceModal.svelte" import ScreenRoleModal from "./ScreenRoleModal.svelte" + import FormTypeModal from "./FormTypeModal.svelte" import sanitizeUrl from "builderStore/store/screenTemplates/utils/sanitizeUrl" import { Modal, notifications } from "@budibase/bbui" import { store } from "builderStore" @@ -19,12 +20,17 @@ let screenDetailsModal let datasourceModal let screenAccessRoleModal + let formTypeModal // Cache variables for workflow let screenAccessRole = Roles.BASIC + let selectedTemplates = null + + let selectedDatasources = null let blankScreenUrl = null let screenMode = null + let formType = null // Creates an array of screens, checking and sanitising their URLs const createScreens = async ({ screens, screenAccessRole }) => { @@ -56,20 +62,21 @@ screen.routing.roleId = screenAccessRole // Create the screen - const response = await store.actions.screens.save(screen) - screenId = response._id + // const response = await store.actions.screens.save(screen) + // screenId = response._id // Add link in layout. We only ever actually create 1 screen now, even // for autoscreens, so it's always safe to do this. - await store.actions.links.save( - screen.routing.route, - capitalise(screen.routing.route.split("/")[1]) - ) + // await store.actions.links.save( + // screen.routing.route, + // capitalise(screen.routing.route.split("/")[1]) + // ) + console.log(screen) } // Go to new screen - $goto(`./${screenId}`) - store.actions.screens.select(screenId) + //$goto(`./${screenId}`) + //store.actions.screens.select(screenId) } catch (error) { console.log(error) notifications.error("Error creating screens") @@ -103,13 +110,15 @@ // Handler for NewScreenModal export const show = newMode => { mode = newMode - selectedTemplates = null + // selectedTemplates = null + selectedDatasources = null blankScreenUrl = null screenMode = mode pendingScreen = null screenAccessRole = Roles.BASIC + formType = null - if (mode === "table" || mode === "grid") { + if (mode === "table" || mode === "grid" || mode === "form") { datasourceModal.show() } else if (mode === "blank") { let templates = getTemplates($tables.list) @@ -124,8 +133,9 @@ } // Handler for DatasourceModal confirmation, move to screen access select - const confirmScreenDatasources = async ({ templates }) => { - selectedTemplates = templates + const confirmScreenDatasources = async ({ datasources }) => { + selectedDatasources = datasources + console.log("confirmScreenDatasources ", datasources) screenAccessRoleModal.show() } @@ -136,6 +146,14 @@ screenTemplate.autoTableId = template.resourceId return screenTemplate }) + console.log("selectedTemplates ", selectedTemplates) + /* + + id : "ROW_LIST_TEMPLATE" + name : "Employees - List" + resourceId : "ta_bb_employee" + + */ await createScreens({ screens, screenAccessRole }) } @@ -175,8 +193,14 @@ datasourceModal.show() } } + window.test = () => { + formTypeModal.show() + } + { + if (screenMode === "form") { + formTypeModal.show() + } else { + confirmScreenCreation() + } + }} bind:screenAccessRole + onCancel={roleSelectBack} screenUrl={blankScreenUrl} + confirmText={screenMode === "form" ? "Confirm" : "Done"} /> @@ -200,3 +231,27 @@ initialUrl={blankScreenUrl} /> + + { + console.log("hide") + //formType = null + }} +> + { + console.log("test confirm") + }} + onCancel={() => { + console.log("cancel") + formTypeModal.hide() + screenAccessRoleModal.show() + }} + on:select={e => { + console.log("form type selection ", e.detail) + formType = e.detail + }} + type={formType} + /> + From e9e5281e820589f82b091db479b5b72dc2b94db8 Mon Sep 17 00:00:00 2001 From: Dean Date: Wed, 14 Feb 2024 12:11:24 +0000 Subject: [PATCH 002/174] Initial commit form screen flow and tour refactor --- .../builder/src/builderStore/dataBinding.js | 2 +- .../src/builderStore/store/frontend.js | 6 +- .../store/screenTemplates/formScreen.js | 43 ++++++ .../store/screenTemplates/index.js | 7 +- .../src/components/deploy/AppActions.svelte | 9 +- .../ButtonConfiguration/ButtonSetting.svelte | 2 +- .../EditComponentPopover.svelte | 22 +-- .../controls/EditComponentPopover/index.js | 18 +++ .../FieldConfiguration/FieldSetting.svelte | 2 +- .../controls/FormStepConfiguration.svelte | 4 +- .../FieldSetting.svelte | 2 +- .../PrimaryColumnFieldSetting.svelte | 2 +- .../settings/controls/PropertyControl.svelte | 25 +++- .../portal/onboarding/TourPopover.svelte | 3 +- .../portal/onboarding/TourWrap.svelte | 29 ++-- .../steps/NewViewUpdateFormRowId.svelte | 17 +++ .../portal/onboarding/steps/index.js | 1 + .../src/components/portal/onboarding/tours.js | 119 +++++++++++++-- .../builder/app/[application]/_layout.svelte | 2 +- .../Component/ComponentSettingsPanel.svelte | 35 ++++- .../Component/ComponentSettingsSection.svelte | 4 +- .../[screenId]/_components/AppPreview.svelte | 2 +- .../NewScreen/CreateScreenModal.svelte | 137 +++++++++++------- .../NewScreen/DatasourceModal.svelte | 30 ++-- .../NewScreen/FormTypeModal.svelte | 78 ++++++++++ .../NewScreen/ScreenRoleModal.svelte | 3 +- .../NewScreen/{ => images}/blank.png | Bin .../_components/NewScreen/images/form.png | Bin 0 -> 22892 bytes .../NewScreen/{ => images}/grid.png | Bin .../NewScreen/{ => images}/table.png | Bin .../design/_components/NewScreen/index.svelte | 17 ++- packages/types/src/api/web/auth.ts | 1 + packages/types/src/documents/global/user.ts | 1 + .../worker/src/api/routes/validation/users.ts | 1 + 34 files changed, 478 insertions(+), 146 deletions(-) create mode 100644 packages/builder/src/builderStore/store/screenTemplates/formScreen.js rename packages/builder/src/components/design/settings/controls/{ => EditComponentPopover}/EditComponentPopover.svelte (79%) create mode 100644 packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js create mode 100644 packages/builder/src/components/portal/onboarding/steps/NewViewUpdateFormRowId.svelte create mode 100644 packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/FormTypeModal.svelte rename packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/{ => images}/blank.png (100%) create mode 100644 packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/images/form.png rename packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/{ => images}/grid.png (100%) rename packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/{ => images}/table.png (100%) diff --git a/packages/builder/src/builderStore/dataBinding.js b/packages/builder/src/builderStore/dataBinding.js index edea3b9ec7..9cb7b3311b 100644 --- a/packages/builder/src/builderStore/dataBinding.js +++ b/packages/builder/src/builderStore/dataBinding.js @@ -1131,7 +1131,7 @@ export const getAllStateVariables = () => { "@budibase/standard-components/multistepformblockstep" ) - steps.forEach(step => { + steps?.forEach(step => { parseComponentSettings(stepDefinition, step) }) }) diff --git a/packages/builder/src/builderStore/store/frontend.js b/packages/builder/src/builderStore/store/frontend.js index 55208bb97e..456f0658fc 100644 --- a/packages/builder/src/builderStore/store/frontend.js +++ b/packages/builder/src/builderStore/store/frontend.js @@ -75,7 +75,7 @@ const INITIAL_FRONTEND_STATE = { theme: "", customTheme: {}, previewDevice: "desktop", - highlightedSettingKey: null, + highlightedSetting: null, propertyFocus: null, builderSidePanel: false, hasLock: true, @@ -1460,10 +1460,10 @@ export const getFrontendStore = () => { }, }, settings: { - highlight: key => { + highlight: (key, type) => { store.update(state => ({ ...state, - highlightedSettingKey: key, + highlightedSetting: { key, type: type || "info" }, })) }, propertyFocus: key => { diff --git a/packages/builder/src/builderStore/store/screenTemplates/formScreen.js b/packages/builder/src/builderStore/store/screenTemplates/formScreen.js new file mode 100644 index 0000000000..8ce46cd002 --- /dev/null +++ b/packages/builder/src/builderStore/store/screenTemplates/formScreen.js @@ -0,0 +1,43 @@ +import { Screen } from "./utils/Screen" +import { Component } from "./utils/Component" +import sanitizeUrl from "./utils/sanitizeUrl" + +export const FORM_TEMPLATE = "FORM_TEMPLATE" +export const formUrl = datasource => sanitizeUrl(`/${datasource.label}-form`) + +// Mode not really necessary +export default function (datasources, config) { + if (!Array.isArray(datasources)) { + return [] + } + return datasources.map(datasource => { + return { + name: `${datasource.label} - Form`, + create: () => createScreen(datasource, config), + id: FORM_TEMPLATE, + resourceId: datasource.resourceId, + } + }) +} + +const generateMultistepFormBlock = (dataSource, { actionType } = {}) => { + const multistepFormBlock = new Component( + "@budibase/standard-components/multistepformblock" + ) + multistepFormBlock + .customProps({ + actionType, + dataSource, + steps: [{}], + }) + .instanceName(`${dataSource.label} - Multistep Form block`) + return multistepFormBlock +} + +const createScreen = (datasource, config) => { + return new Screen() + .route(formUrl(datasource)) + .instanceName(`${datasource.label} - Form`) + .addChild(generateMultistepFormBlock(datasource, config)) + .json() +} diff --git a/packages/builder/src/builderStore/store/screenTemplates/index.js b/packages/builder/src/builderStore/store/screenTemplates/index.js index 3ff42fdec6..fff31cc070 100644 --- a/packages/builder/src/builderStore/store/screenTemplates/index.js +++ b/packages/builder/src/builderStore/store/screenTemplates/index.js @@ -1,7 +1,11 @@ import rowListScreen from "./rowListScreen" import createFromScratchScreen from "./createFromScratchScreen" +import formScreen from "./formScreen" -const allTemplates = datasources => [...rowListScreen(datasources)] +const allTemplates = datasources => [ + ...rowListScreen(datasources), + ...formScreen(datasources), +] // Allows us to apply common behaviour to all create() functions const createTemplateOverride = template => () => { @@ -19,6 +23,7 @@ export default datasources => { }) const fromScratch = enrichTemplate(createFromScratchScreen) const tableTemplates = allTemplates(datasources).map(enrichTemplate) + return [ fromScratch, ...tableTemplates.sort((templateA, templateB) => { diff --git a/packages/builder/src/components/deploy/AppActions.svelte b/packages/builder/src/components/deploy/AppActions.svelte index 7d14fd0e87..bf59c3a230 100644 --- a/packages/builder/src/components/deploy/AppActions.svelte +++ b/packages/builder/src/components/deploy/AppActions.svelte @@ -156,9 +156,10 @@ {/if}
@@ -204,7 +205,7 @@
- + Publish - import EditComponentPopover from "../EditComponentPopover.svelte" + import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte" import { Icon } from "@budibase/bbui" import { runtimeToReadableBinding } from "builderStore/dataBinding" import { isJSBinding } from "@budibase/string-templates" diff --git a/packages/builder/src/components/design/settings/controls/EditComponentPopover.svelte b/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte similarity index 79% rename from packages/builder/src/components/design/settings/controls/EditComponentPopover.svelte rename to packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte index 04bb925873..af535a00f0 100644 --- a/packages/builder/src/components/design/settings/controls/EditComponentPopover.svelte +++ b/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte @@ -3,7 +3,8 @@ import { store } from "builderStore" import { cloneDeep } from "lodash/fp" import { createEventDispatcher, getContext } from "svelte" - import ComponentSettingsSection from "../../../../pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte" + import { customPositionHandler } from "." + import ComponentSettingsSection from "../../../../../pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte" export let anchor export let componentInstance @@ -59,25 +60,6 @@ dispatch("change", nestedComponentInstance) } - - const customPositionHandler = (anchorBounds, eleBounds, cfg) => { - let { left, top } = cfg - let percentageOffset = 30 - // left-outside - left = anchorBounds.left - eleBounds.width - 18 - - // shift up from the anchor, if space allows - let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset - let defaultTop = anchorBounds.top - offsetPos - - if (window.innerHeight - defaultTop < eleBounds.height) { - top = window.innerHeight - eleBounds.height - 5 - } else { - top = anchorBounds.top - offsetPos - } - - return { ...cfg, left, top } - } { + let { left, top } = cfg + let percentageOffset = 30 + // left-outside + left = anchorBounds.left - eleBounds.width - 18 + + // shift up from the anchor, if space allows + let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset + let defaultTop = anchorBounds.top - offsetPos + + if (window.innerHeight - defaultTop < eleBounds.height) { + top = window.innerHeight - eleBounds.height - 5 + } else { + top = anchorBounds.top - offsetPos + } + + return { ...cfg, left, top } +} diff --git a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte index 8c40c455c8..94ce698ff1 100644 --- a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte +++ b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte @@ -1,5 +1,5 @@
{#if label && !labelHidden} @@ -115,6 +120,16 @@
diff --git a/packages/builder/src/components/portal/onboarding/steps/index.js b/packages/builder/src/components/portal/onboarding/steps/index.js index 8e27748f36..6694ce97a7 100644 --- a/packages/builder/src/components/portal/onboarding/steps/index.js +++ b/packages/builder/src/components/portal/onboarding/steps/index.js @@ -1,3 +1,4 @@ export { default as OnboardingData } from "./OnboardingData.svelte" export { default as OnboardingDesign } from "./OnboardingDesign.svelte" export { default as OnboardingPublish } from "./OnboardingPublish.svelte" +export { default as NewViewUpdateFormRowId } from "./NewViewUpdateFormRowId.svelte" diff --git a/packages/builder/src/components/portal/onboarding/tours.js b/packages/builder/src/components/portal/onboarding/tours.js index 55fd4c4a9b..fdc00bf32d 100644 --- a/packages/builder/src/components/portal/onboarding/tours.js +++ b/packages/builder/src/components/portal/onboarding/tours.js @@ -2,8 +2,14 @@ import { get } from "svelte/store" import { store } from "builderStore" import { auth } from "stores/portal" import analytics from "analytics" -import { OnboardingData, OnboardingDesign, OnboardingPublish } from "./steps" +import { + OnboardingData, + OnboardingDesign, + OnboardingPublish, + NewViewUpdateFormRowId, +} from "./steps" import { API } from "api" +import { customPositionHandler } from "components/design/settings/controls/EditComponentPopover" const ONBOARDING_EVENT_PREFIX = "onboarding" @@ -14,11 +20,26 @@ export const TOUR_STEP_KEYS = { BUILDER_USER_MANAGEMENT: "builder-user-management", BUILDER_AUTOMATION_SECTION: "builder-automation-section", FEATURE_USER_MANAGEMENT: "feature-user-management", + BUILDER_FORM_CREATE_STEPS: "builder-form-create-steps", + BUILDER_FORM_VIEW_UPDATE_STEPS: "builder-form-view-update-steps", + BUILDER_FORM_ROW_ID: "builder-form-row-id", } export const TOUR_KEYS = { TOUR_BUILDER_ONBOARDING: "builder-onboarding", FEATURE_ONBOARDING: "feature-onboarding", + BUILDER_FORM_CREATE: "builder-form-create", + BUILDER_FORM_VIEW_UPDATE: "builder-form-view-update", +} + +const resetTourState = () => { + store.update(state => ({ + ...state, + tourNodes: undefined, + tourKey: undefined, + tourKeyStep: undefined, + onboarding: false, + })) } const endUserOnboarding = async ({ skipped = false } = {}) => { @@ -37,13 +58,7 @@ const endUserOnboarding = async ({ skipped = false } = {}) => { // Update the cached user await auth.getSelf() - store.update(state => ({ - ...state, - tourNodes: undefined, - tourKey: undefined, - tourKeyStep: undefined, - onboarding: false, - })) + resetTourState() } catch (e) { console.error("Onboarding failed", e) return false @@ -52,9 +67,28 @@ const endUserOnboarding = async ({ skipped = false } = {}) => { } } -const tourEvent = eventKey => { +const endTour = async ({ key, skipped = false } = {}) => { + const { tours = {} } = get(auth).user + tours[key] = new Date().toISOString() + + await API.updateSelf({ + tours, + }) + + if (skipped) { + tourEvent(key, skipped) + } + + // Update the cached user + await auth.getSelf() + + resetTourState() +} + +const tourEvent = (eventKey, skipped) => { analytics.captureEvent(`${ONBOARDING_EVENT_PREFIX}:${eventKey}`, { eventSource: EventSource.PORTAL, + skipped, }) } @@ -135,7 +169,74 @@ const getTours = () => { }, ], }, + [TOUR_KEYS.BUILDER_FORM_CREATE]: { + steps: [ + { + id: TOUR_STEP_KEYS.BUILDER_FORM_CREATE_STEPS, + title: "Add multiple steps", + body: `When faced with a sizable form, consider implementing a multi-step + approach to enhance user experience. Breaking the form into multiple steps + can significantly improve usability by making the process more digestible for your users.`, + query: "#steps-prop-control-wrap", + onComplete: () => { + store.actions.settings.highlight() + endTour({ key: TOUR_KEYS.BUILDER_FORM_CREATE }) + }, + onLoad: () => { + tourEvent(TOUR_STEP_KEYS.BUILDER_FORM_CREATE_STEPS) + store.actions.settings.highlight("steps", "info") + }, + positionHandler: customPositionHandler, + align: "left-outside", + }, + ], + }, + [TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE]: { + steps: [ + { + id: TOUR_STEP_KEYS.BUILDER_FORM_ROW_ID, + title: "Add row ID to update a row", + layout: NewViewUpdateFormRowId, + query: "#rowId-prop-control-wrap", + onLoad: () => { + tourEvent(TOUR_STEP_KEYS.BUILDER_FORM_ROW_ID) + store.actions.settings.highlight("rowId", "info") + }, + positionHandler: customPositionHandler, + align: "left-outside", + }, + { + id: TOUR_STEP_KEYS.BUILDER_FORM_VIEW_UPDATE_STEPS, + title: "Add multiple steps", + body: `When faced with a sizable form, consider implementing a multi-step + approach to enhance user experience. Breaking the form into multiple steps + can significantly improve usability by making the process more digestible for your users.`, + query: "#steps-prop-control-wrap", + onComplete: () => { + store.actions.settings.highlight() + endTour({ key: TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE }) + }, + onLoad: () => { + tourEvent(TOUR_STEP_KEYS.BUILDER_FORM_VIEW_UPDATE_STEPS) + store.actions.settings.highlight("steps", "info") + }, + positionHandler: customPositionHandler, + align: "left-outside", + }, + ], + onSkip: async () => { + store.actions.settings.highlight() + endTour({ key: TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE, skipped: true }) + }, + }, } } export const TOURS = getTours() +export const TOURSBYSTEP = Object.keys(TOURS).reduce((acc, tour) => { + TOURS[tour].steps.forEach(element => { + acc[element.id] = element + acc[element.id]["tour"] = tour + }) + return acc +}, {}) diff --git a/packages/builder/src/pages/builder/app/[application]/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/_layout.svelte index 1df2a90250..5a6e9c941e 100644 --- a/packages/builder/src/pages/builder/app/[application]/_layout.svelte +++ b/packages/builder/src/pages/builder/app/[application]/_layout.svelte @@ -151,7 +151,7 @@
{#each $layout.children as { path, title }} - + import Panel from "components/design/Panel.svelte" import { store, selectedComponent, selectedScreen } from "builderStore" + import { auth } from "stores/portal" import { getComponentName } from "builderStore/componentUtils" import ComponentSettingsSection from "./ComponentSettingsSection.svelte" import DesignSection from "./DesignSection.svelte" import CustomStylesSection from "./CustomStylesSection.svelte" import ConditionalUISection from "./ConditionalUISection.svelte" import { notifications, ActionButton } from "@budibase/bbui" + import TourWrap from "components/portal/onboarding/TourWrap.svelte" + import { + TOUR_STEP_KEYS, + TOUR_KEYS, + } from "components/portal/onboarding/tours.js" import { getBindableProperties, @@ -14,6 +20,12 @@ } from "builderStore/dataBinding" import { capitalise } from "helpers" + const { + BUILDER_FORM_CREATE_STEPS, + BUILDER_FORM_VIEW_UPDATE_STEPS, + BUILDER_FORM_ROW_ID, + } = TOUR_STEP_KEYS + const onUpdateName = async value => { try { await store.actions.components.updateSetting("_instanceName", value) @@ -43,7 +55,6 @@ $: id = $selectedComponent?._id $: id, (section = tabs[0]) - $: componentName = getComponentName(componentInstance) @@ -89,13 +100,21 @@
{#if section == "settings"} - + + + {/if} {#if section == "styles"} updateSetting(setting, val)} - highlighted={$store.highlightedSettingKey === setting.key} + highlighted={$store.highlightedSetting?.key === setting.key + ? $store.highlightedSetting + : null} propertyFocus={$store.propertyFocus === setting.key} info={setting.info} disableBindings={setting.disableBindings} diff --git a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_components/AppPreview.svelte b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_components/AppPreview.svelte index 011980bbe2..c9dc4f8982 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_components/AppPreview.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/_components/AppPreview.svelte @@ -161,7 +161,7 @@ } else if (type === "request-add-component") { toggleAddComponent() } else if (type === "highlight-setting") { - store.actions.settings.highlight(data.setting) + store.actions.settings.highlight(data.setting, "error") // Also scroll setting into view const selector = `#${data.setting}-prop-control` diff --git a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte index 2a2459949d..a61e7551e7 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte @@ -4,14 +4,18 @@ import ScreenRoleModal from "./ScreenRoleModal.svelte" import FormTypeModal from "./FormTypeModal.svelte" import sanitizeUrl from "builderStore/store/screenTemplates/utils/sanitizeUrl" + import rowListScreen from "builderStore/store/screenTemplates/rowListScreen" + import formScreen from "builderStore/store/screenTemplates/formScreen" import { Modal, notifications } from "@budibase/bbui" import { store } from "builderStore" import { get } from "svelte/store" import getTemplates from "builderStore/store/screenTemplates" import { tables } from "stores/backend" + import { auth } from "stores/portal" import { Roles } from "constants/backend" import { capitalise } from "helpers" import { goto } from "@roxi/routify" + import { TOUR_KEYS } from "components/portal/onboarding/tours.js" let mode let pendingScreen @@ -25,7 +29,8 @@ // Cache variables for workflow let screenAccessRole = Roles.BASIC - let selectedTemplates = null + let templates = null + let screens = null let selectedDatasources = null let blankScreenUrl = null @@ -40,6 +45,7 @@ try { let screenId + let createdScreens = [] for (let screen of screens) { // Check we aren't clashing with an existing URL @@ -62,21 +68,19 @@ screen.routing.roleId = screenAccessRole // Create the screen - // const response = await store.actions.screens.save(screen) - // screenId = response._id + const response = await store.actions.screens.save(screen) + screenId = response._id + createdScreens.push(response) // Add link in layout. We only ever actually create 1 screen now, even // for autoscreens, so it's always safe to do this. - // await store.actions.links.save( - // screen.routing.route, - // capitalise(screen.routing.route.split("/")[1]) - // ) - console.log(screen) + await store.actions.links.save( + screen.routing.route, + capitalise(screen.routing.route.split("/")[1]) + ) } - // Go to new screen - //$goto(`./${screenId}`) - //store.actions.screens.select(screenId) + return createdScreens } catch (error) { console.error(error) notifications.error("Error creating screens") @@ -110,7 +114,8 @@ // Handler for NewScreenModal export const show = newMode => { mode = newMode - // selectedTemplates = null + templates = null + screens = null selectedDatasources = null blankScreenUrl = null screenMode = mode @@ -135,26 +140,24 @@ // Handler for DatasourceModal confirmation, move to screen access select const confirmScreenDatasources = async ({ datasources }) => { selectedDatasources = datasources - console.log("confirmScreenDatasources ", datasources) - screenAccessRoleModal.show() + if (screenMode === "form") { + formTypeModal.show() + } else { + screenAccessRoleModal.show() + } } // Handler for Datasource Screen Creation const completeDatasourceScreenCreation = async () => { - const screens = selectedTemplates.map(template => { + templates = rowListScreen(selectedDatasources) + + const screens = templates.map(template => { let screenTemplate = template.create() screenTemplate.autoTableId = template.resourceId return screenTemplate }) - console.log("selectedTemplates ", selectedTemplates) - /* - - id : "ROW_LIST_TEMPLATE" - name : "Employees - List" - resourceId : "ta_bb_employee" - - */ - await createScreens({ screens, screenAccessRole }) + const createdScreens = await createScreens({ screens, screenAccessRole }) + loadNewScreen(createdScreens) } const confirmScreenBlank = async ({ screenUrl }) => { @@ -171,7 +174,55 @@ return } pendingScreen.routing.route = screenUrl - await createScreens({ screens: [pendingScreen], screenAccessRole }) + const createdScreens = await createScreens({ + screens: [pendingScreen], + screenAccessRole, + }) + loadNewScreen(createdScreens) + } + + const onConfirmFormType = () => { + screenAccessRoleModal.show() + } + + const loadNewScreen = createdScreens => { + const lastScreen = createdScreens.slice(-1) + + // Go to new screen + $goto(`./${lastScreen._id}`) + store.actions.screens.select(lastScreen._id) + } + + const confirmFormScreenCreation = async () => { + templates = formScreen(selectedDatasources, { actionType: formType }) + screens = templates.map(template => { + let screenTemplate = template.create() + return screenTemplate + }) + const createdScreens = await createScreens({ screens, screenAccessRole }) + const lastScreen = createdScreens?.slice(-1)?.pop() + const mainComponent = lastScreen?.props?._children?.[0]._id + + if (formType === "Update" || formType === "Create") { + const associatedTour = + formType === "Update" + ? TOUR_KEYS.BUILDER_FORM_VIEW_UPDATE + : TOUR_KEYS.BUILDER_FORM_CREATE + + const tourRequired = !$auth?.user?.tours?.[associatedTour] + if (tourRequired) { + store.update(state => ({ + ...state, + tourStepKey: null, + tourNodes: null, + tourKey: associatedTour, + })) + } + } + + // Go to new screen + $goto(`./${lastScreen._id}/${mainComponent}`) + store.actions.screens.select(lastScreen._id) } // Submit screen config for creation. @@ -181,6 +232,8 @@ screenUrl: blankScreenUrl, screenAccessRole, }) + } else if (screenMode === "form") { + confirmFormScreenCreation() } else { completeDatasourceScreenCreation() } @@ -193,30 +246,16 @@ datasourceModal.show() } } - window.test = () => { - formTypeModal.show() - } - - + { - if (screenMode === "form") { - formTypeModal.show() - } else { - confirmScreenCreation() - } + confirmScreenCreation() }} bind:screenAccessRole onCancel={roleSelectBack} @@ -232,24 +271,14 @@ /> - { - console.log("hide") - //formType = null - }} -> + { - console.log("test confirm") - }} + onConfirm={onConfirmFormType} onCancel={() => { - console.log("cancel") formTypeModal.hide() - screenAccessRoleModal.show() + datasourceModal.show() }} on:select={e => { - console.log("form type selection ", e.detail) formType = e.detail }} type={formType} diff --git a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/DatasourceModal.svelte b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/DatasourceModal.svelte index 731c60a406..4348c17312 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/DatasourceModal.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/DatasourceModal.svelte @@ -4,37 +4,33 @@ import ICONS from "components/backend/DatasourceNavigator/icons" import { IntegrationNames } from "constants" import { onMount } from "svelte" - import rowListScreen from "builderStore/store/screenTemplates/rowListScreen" import DatasourceTemplateRow from "./DatasourceTemplateRow.svelte" - export let mode export let onCancel export let onConfirm - export let initialScreens = [] - let selectedScreens = [...initialScreens] + let selectedSources = [] $: filteredSources = $datasources.list?.filter(datasource => { return datasource.source !== IntegrationNames.REST && datasource["entities"] }) const toggleSelection = datasource => { - const { resourceId } = datasource - if (selectedScreens.find(s => s.resourceId === resourceId)) { - selectedScreens = selectedScreens.filter( - screen => screen.resourceId !== resourceId + const exists = selectedSources.find( + d => d.resourceId === datasource.resourceId + ) + if (exists) { + selectedSources = selectedSources.filter( + d => d.resourceId === datasource.resourceId ) } else { - selectedScreens = [ - ...selectedScreens, - rowListScreen([datasource], mode)[0], - ] + selectedSources = [...selectedSources, datasource] } } const confirmDatasourceSelection = async () => { await onConfirm({ - templates: selectedScreens, + datasources: selectedSources, }) } @@ -54,7 +50,7 @@ cancelText="Back" onConfirm={confirmDatasourceSelection} {onCancel} - disabled={!selectedScreens.length} + disabled={!selectedSources.length} size="L" > @@ -85,8 +81,8 @@ resourceId: table._id, type: "table", }} - {@const selected = selectedScreens.find( - screen => screen.resourceId === tableDS.resourceId + {@const selected = selectedSources.find( + datasource => datasource.resourceId === tableDS.resourceId )} toggleSelection(tableDS)} @@ -103,7 +99,7 @@ tableId: view.tableId, type: "viewV2", }} - {@const selected = selectedScreens.find( + {@const selected = selectedSources.find( x => x.resourceId === viewDS.resourceId )} + import { ModalContent, Layout, Body, Label } from "@budibase/bbui" + import { createEventDispatcher } from "svelte" + + export let onCancel = () => {} + export let onConfirm = () => {} + export let type + + const dispatch = createEventDispatcher() + + + + + + +
{ + dispatch("select", "Create") + }} + > + Create a new row + For capturing and storing new data from your users +
+
{ + dispatch("select", "Update") + }} + > + Update an existing row + For viewing and updating existing data +
+
{ + dispatch("select", "View") + }} + > + View an existing row + For a read only view of your data +
+
+
+
+ + diff --git a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/ScreenRoleModal.svelte b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/ScreenRoleModal.svelte index 5d73b7961c..9363523a63 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/ScreenRoleModal.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/ScreenRoleModal.svelte @@ -10,6 +10,7 @@ export let onCancel export let screenUrl export let screenAccessRole + export let confirmText = "Done" let error @@ -41,7 +42,7 @@ lgxV!H3f7O5)=RcfUcw{s|5f6=U*RqB=9T8YjjKa^-qL@jEshoj0}~# zr<<*Vvkd^ilp35WrZ_A~HfpXdmr5Wd5-SK_t>&wVJ#G|5ql45gU5mkx6(W@$O*cBK z>CP^A+ic+D?yhi6(g@1%Z~mvfUSg5V`uw5 z_DnyziAg7)4d%xUU~~c6YKAGZqBavA^oHlYf{fE2^|Jn5Z~L{!J;1uEb^+bp9Vvm3 zIdF4tvp2u@DX?O38`BEQv&do_{`PfOz?N?nWC1V#{fc@k(_cBr?uv$9001iPe-99l zlSlN*MD$itl|$SIp`+0V3J&)(0RU70C0QvQ|Fx6g%_KG_PhHtU9{;hHaVR@H$qov- z^e)98@P^?ovp;=5AQ`&zL4@wj4(N|nm`$R~>Rf`@U;i<$-<1*Zo0}r6tTanT=6brz zr*SFrt?GV0wo6&r&6cBt%|oG256@OSfdO~h`W!ETC_H{VJUpFE%c0rXQBh*K>y4Ag zgtph6w%53R4+mZ=(^JRGwr-&wj(`4Ms4FBazl7WeMRUG9UQC5b^po?Lgh=i=PcEw~Hik>|)bVUmUK6No}f4{n|EPQgF{<7`dZ)^t)nTGXm6hEn(>vOkO zW)=3EJ>cFxUc=0Se$Q|F_VW>*06fedvi^G`P0keIwUaQbR{@u z^IhT1_-e@3$-Z#GEez@5{>#a_?X~gm>rJGGq?q8Z8-3h7$q|@+ql~hbzv<7-U;18H zx&)Im1vtcub9)m1(dL+N8(OP-tJCn@au3p%#q%rp>fneWtABL%zS&hZM`hpuy(fWT zamRNj$bys?y~mrz-uWLP=RLlQ(PgVRA^r`fW-SWpz9od=ovr*wE*?DNG!jqhV2yn7 zLB4@Ymr9=$@BaoJkNqZF5gr(C(Tjz4#PiIgz`*V5(Z?f!LjB7M~QD;uCsx(wZG z{*Sl-5>i1`#g!Ho)6Yh?={E#E2p#Hw@d*bGu?zWw_WF|kQ)H7wBUm3RH)ZgI|FCuj z+G~-ut{o(-7#Ylh5F)nmmD0y_%)Hfl;d|vHL$B!nw4;}~N>m>1IAl`uxc(wReV$#T z7Br`fBoWrB4n5-23!O5_JF#~?t|>Ds4Xc(QA$_GlH%T$8h->0Pvp+1p#wC3YIKds{ zRA(4@x?l+(Z^5Tb#6g|$vHxfbQ}c!dsTo0IJ4IT8F}&bc{IHSB3L{UTxYY4U6dmGC zmU$ofCH=DKzA*nQ`Rb$jd}T@RoB!*^LrhEMa%pYRpAcN_e*AnRpZMg$bA95rqPFsH zIhr{J)VM#9{-O;PufG&S(C+M4q7F{}nLD-7`S|zW+OH_UFA)B{U1PuK80=!VUc>sr zDDY714Kmk`A^J}X(gz&-1E)W;#-2;;h??8n^G%dK<5yFZ6a~lv{|UqIJO11C`uIx2 z_9h5#${|NUZP4psI`-mAos9B7?Zk<=R6S10(w2XypdWmh40-n#@xgH?i}vhBvGn%w ze=-nGsSQsbId;;n;A{j6Kb-dktpuVKN|*i9Az4cJObEf{gYy zwM}%1NJ?-^ln_Vb-~QCWtN;6&ePeLB+3c)p{Jxpx_k)%9h)VyH1dZ_|^kt3p#~!Xi zVS~re|E*A3zU`<2sU?XB+s0}3o7VYN4)T9`3%2pDWtZa*gn+coH+jfC9{#hC|DXw*3LFEFD28Er7# zMQ>!0z^+1+{8>6O@%r?r0$lY!M7IaKYf=``}Zx16<^PA zSI-h#orq`Puw=eCP_Eoy~dX%4$37GUaPWs7Y>*|S!suPnoae+G)}%u^{=-*N`$}9 z??}S9mpE@F5B#~wXFI;}VtW1Hjefm%^4xPzA)i;<6}^IhzPO$FO(OOIPtbquxt{Un zK-Sl$w9Vz%=q$w7cPcEbEbznH!;0uzw{eF7zJWl?{oYl?=tZ&C`z^!^wqO0p*TDk2 zy@!3B*-zg?xgG{z$O<2lp!ZkoKW1Xu{rLk!PV>F5)5;`{08w{!o}a!I8+@NsVkA#! zWyrH9zC-LcvsiJ^OPWGPTnTeHx}xQ161&YjF8mwc@jv-lp$2)CcY=kx3$MP$W%3ZPzx0nFSFan)c}IYccF!w zC{UD;U~>3(I+FOZV^m0zq7wQUg&a#=L5%56FZrR6YvWM2pks0>mM%MCrU0Q2Yg|G{ zhPgvbHnwemGL^L>8c7tG+xXmmRUrf~Y>vP9i9(#T4~R*alnx;1^Dj6UYU?Kq%Y_Ze z9(VWl0U-7pCIycm zyD%*rfHD9?>i#Y!0MCzY4ajIF>GV9C(I_gaV3lg}1#!N`M7_Ji;-dQIv>h(Un4ahW zT))F}@_WhV{j23M_q-O&d6hJffs_@U-2i9*Q#uKsvp^_i^R3Bq0fdNL8(D`76^2ZH zqQA{2`5k$9=H}?Is42$#(2yF-+6zErT=E&O0Din;U0;ZkY8F8OX(zHyId_Yopb;td z49_Y?PTrS*Dylj=WdM@fpq;?B*$X{LU-hCqpE5g;V{-T{`DhcY4UtQAa>uX?HB&dx z>SRn0qxeCk%TFwe#%L;|H8O9B{$+sNL}V0=5E|BXbJ(qL{gNA{u-L0frf8GSehXIY z3TM%0S#bcBQ^kS&wqGs^1rgJHF~+>9u&-&p@c6z=dY{hQuzfvtF-JLr<-#$%6;Ab3 z$KbRQ7&OEQG{D0WU>BV^Krp)9O6@zCnmdJx8xh!n z+07n{6l59(qMl(r{30xmgI0uyc5TZu4v;!f23tZek_Hz}x~`&HZSKvgSUHhJ)VC+9 zNB3_ww4zxqv8C;rZ+o>X#zM?gMBI8?=+NX)sH{VARWzdKzBx-UgT$y8V`C{kT^_j~uC)@^KV(LgY)Dt9KvJwImKf62W=0DcgIzbZZ!{m{ISbVsA-_%Xl``wbF&S%P#%K3LJYy(@Q6 zeA%-)Vjx;@dzVv$og&>RJA=X&mZntLWuL4RtM)Ocm%Ar1;};QT#cVGyqAbfQ+w>5n z+;&yjxWY6NQkFoH{;5+2h6s*m0U6YT4q?XQG#peMpD(qe4;K=?xbbQc&G&pFAsi-G+NycQ82P@_Z#2dQibP9g}HBq$p$6q%uvlHeZ%U&#JyZ z@ZA?GjrUL$r^cD4ApK}cMblxgB-~x|SJ#elT>S$XJX9$$FAj>KVw}ysEbz}+3FL6b-^eeLAduO0( z#^p)tgahCPKEqpXcE?;laz=?60yR6;k1hFss~jzRp36SGoRa<6GNm+sn1B3xyorc; zxM*9NWC~W<`({EZlmFhV{!~VKu{X(p2m(`yM-^ra_(BaKPCe8eOH7>sB1-=pM_1}m+ps6r0 zGn)1$Y{hi%le8X1me%6UtUtEkmNUnwp++oxMYVzY9f=0dOT?q7DN4cpHuEbXUu^&A z;pIlp@ABBHVPj_WVLEmtS8!wFuieBM{K~mvniik^B+O6vyMO$OWTr3^u$JACWpiE3sye2SCTn=&mo;-yrIx&Fli`ZKS zw=@S`Zng1Oe8Ktj&F*NrWlu1W;M;i3`$0P+l?p)6N)^(U2y9Gaz820AH5|RFy_&DM ziUAg)1&G!Z!UYXv${ofz=ZTT=bjf^V>&lvq%@CX)t`XIFbE(XK^Q2+Z((>EBvF~9* z_QOv)n7<8LQp{56*RlwzfeGSl4F-1-onBndE7hI_z(Hc_;%88C5_0$jf(N;4ur8Z! zRfK`=UXxNKMx_syRSVVmm`#v2lH|M84id{=y|KV{=k!|vzJ(x>rY)}4@_U|Hs0TxP z&&xbkd@=vzVkRW+^R7znBzd@lLD1SfFWvj-HTUCA1c-&nA;N3myWKP<71?KXFi&dAs7Gj>r_9Rh~ zrTQveQsyV$G%R_YU#bl(I72Tt!*qO7th@K>mR-WoZ2`agF4)mTxB&dBhd9E7T7RnY z%kXzS5kG=GgACc)&&rsHZcv+)k7QqwmNJz@mMY^ay;d z*+<~q18^QVcf_qF_nORVkS*TQ8A{h=k}RFdwOr8RgPZB_iC|pP@;UEajzbyoX4-%W zDbL!2(ZDDw`Vxh#GD~ zYQ_W|p-Nw3dl7hT=idHU=|0?)@eH8zvSWyxIW7byQyC~Axlp=#PydR;qbj|nyAr64 zV)xiF6ByimUQ_e-&kg6%hnG6Eqt)196dPAzCG(<0Pxwn37|NMA+uqM!B|QBT_S;K# zo<>%WAt}x&pEIrBN;}?qkRaG}pY{F2Dl@I;55@y5w0t4EEME-G{3^610uYbSbe`64 zBSG~CVg|RK+L>}<+rOuc@XC(LLC0edVP8@`N9Yu1LiM1{eYs`ZyKIUHjHa1mJ%;?u za+oH*U91bqAi<$`l6n4dGnPN+DpKQ8vPnF?CW4Vc->s;2n9InP`0xkKv#VTA8M-hm z+!@Z*aC`YF-Q?4uUQ$xdry0TSUGH_?6;S#*WwIv3VH*D(e=C_&)YuF#hNJ}7s#cD9 z31N-5<@<4C=5$lVmK?wl?!FN@KjlgN(Jl7AfpmsH5MHBqVTds$K1ZY5B=XhIEvWlG)8aSzWtp;s3 zkjXjckAqozPSFqz^dB^06M;%OI2jK+yMBKWIM|F4m-0!10pm3WcGWk_ZR zx~;Vleu}q~l#p@o1@2S$YrVnJd3@4@D+uB zNv!lO$|_ENC{68sZ0q+{In-(5sl;~@3u8UTgoIhqSfL%0u46l0$=q zIyCE-#DoGSEz_pC)UW=>(4K$H3MCIOtZv15y3DuL`H6s#J*Az$yaAH?9;7o}2mJ znDlq_!V{DEgyFP{;lNG?V=OQ`c2*OQ-%0f@eljNRA>>R{y4V8iXD>KkENNJ+_y$T( zes0j%`fZ;Pj>k~PS(|>RFRv@<+W6M;XGr(UFL@PHpwU250Ro8q>-*usMn+{q9>XXI z>wM~jrk;j87<)`MO9t+Y@fMDaSo1< zn7m7Dwjs&DAD;xM<Ukb4D+_L@m zmw$c0`=UF|UK)oFp8|#ndO;{NSsG;OjZ`@q#}usKDkxDE@gV^Z*+##r4%cZQHoyL4 z=#*+zfnY@54b(H*;01zp7abtH4w1al60zoH&1siH!)g#HVQW(DglHdlMI+=xiXI5g zF7DJLAZ$nuPRc<`43l&`CLiQIcEITsfY$k4j%Wxg0)*h&raSX7NG130A zjq{N8HKI?oDQUj>HZCzA!x?NGNlGdvY{i?>K{1c>Kld6rU=H7@N zm%*ml;_@S+>IR{Lmcd`dQ~AoECGni;3SP@V!5zTG#2eENFPQ0J0Czi>xQWutNB{Xp zWVacO2wMc!j6wq5dx))6#+{5-{AzoT$HPMtr`pN99)9D3l7zFaA0Zmz-D&bBh06FT zq6>KwwnMmbtHNrOIvTVSHZ)9yZ>ky-YkDygFIM7?Dd{Ez7+8rZ|o zsmR$?aQ%y`r(-K>EK;=`(&1Bwyffo=OSq1|4?kVRi?St>Un3Y>J!I;T&^h@IRB@az3{F}GKG+z>($z>RK8&V% zSj4j^?i0rt4M~&G2X1$a`_V=Z!!aC<2zLw~N-GBkU6Dsrug|j$v!~#FSxvoAo3{#V zIAJ~l>et2Hio4JF{erv~`z}Ux;uFOT3_Q=y6e?P{%&N*(ez@C-ZzlsBb9=1nq@5Yl zIhfl0dPf&OxX&jlCa)1ZJ5<2j?#6~8nqmt-?wq}a4@=riO66wGt{Q3RdFH19*}@Nt zK%C|?W69wmUmz|mL-87s?7k8&C)<31_sWp(wd|skTiJhfyVzF1)j#nD8>K3}7d9iz zpN5XdLp>;9%@-#4@8xS(Z8bF0NK%~P#8a-#RxzaHf=r#}SD**u^4cZ-gZWIhQgS>r zv^Fr)11#8+9Ah4GTDi8d9!cL-@=MgJU Yvyx9Zl9yZ6P*V{!Tpauu`1b++LgmxO zWa0D5ZvQhPmP(UkcM1*P+vs3{X*H~)q$MgOKzUnJ?y)j!gwVAaMA-1QHV00koIsZ; zj#Zwe1`8tF&X1(V0X>i4Q6q#R^iZ&;GziwHUC;i_{-BW5d#LgRs%)&t_pWEbDCp7s zez)l}GU2ss1482(6x`0T*^1H9(o9_@6L@}x2ut7S+m>^bpoYb<5RK9dfc7)Eg)9>h z@C;M!Jd9LOy`?yrBrp`eK2+ttK&d|HShRoLyZ6(2cvoBa_DIv=BA^jCnU{4ijzQak zsvA9aVj?-Z6gS!}W{gy(QvPS<0qURvoKrP$xaSu7Q$jmt2t@vZG;OvY>;#|IVP%ar z>qje&MTJ!yjTDQ4*d9&AuW=d}m%%l2N1!!w$@Mr8Tnl>N-a+v3rmjgzd5r79Rd;7M zK5*X=eN7eP%-ipoY?DPimA*@hnQn5k=BPd8)1DO0AObF~U6 zxyu%v_LM`5K#=0%b5W6(PGExcpN9L3(?iY>AXhg6-FteQu3^)7!&NtfhnBu{R|wjR zfgG2=u2TkBb*Kj2&lbtYIKhc<8$ZtbvGQ1TgRdun#>L=GL+6O{Yr1 zLM`;rdv3f{sTFga!XraW6`f}cd|yr08Jp9c%v88t6qEM7RJ+L%!94(bw|P}bfEt|B zyjMQy^~EIg0$Kz}vHw?4+3|U1a7(R1%dMY#`WBbp>b+I9Jafp9s=wqNs?~ zNEw7@=agzfLW)E!0obk75sbMXwZhFN|{0bYZFJTTu&Ywj&HjeR>l{5un+W6)spwaZS z=V-DVti`wW7;FXLHg~*{vPLaI{zg@rG*u`^?PD+Kh28w@Z;i%N6UYh`QL@dZv~c z%QxR{sqt%GCt_5P^oY$YSe&F%wz*0Aln7>0$_}U{mRf>sIl?i9B`rejTYn!u+Y&#T zq}ViaukUjsNvCRojY4XCB5Qw$?9f-zjSl5VEjDfTiar$ZOkX|^de40lkktnD&IocJ zo=b687{I5~<;b%rR4a;W&wiFY{G1A^)Qh8I>b!-JCXn}H)=uwE_y}CU(OOKo`eX9z z=+;#sZ&}PSRLsd##hM+c+q8;Xoncg1bq;7NpVnDAh6AL$)x?qLi=uW5sY-6x{YxB( z9-Q26X1ia5a@Fl0bJ){y;~>+U-?zREwG&&6RX=4N2Zto#xrp)n!*UZFZuTS==~=!E z6+j}iY;;JU8d8-_9x#PG)UzGlS`DjErf0Iu>u-7L6VY`q9L1djv>(GRBVW6`hMICW z@9@fT6TAW^B<{9H6}eJqFoy29T6|W6^sf6V2Ejm(XFxjHx_)qSM_O>G+a>i)^GJ85 zqz)R%&OP3^D5|SwhwY{R8L^7p^yP;g<*OLnlI_htJXZf?BPSi=er|Vnk$g7Oha{`a zPw1S-FJtJamz{HN9ok-&HkfcXcO~s9VLJH3OZws*Xl=->$hCv0IyD01h0?*XIyek?dH5X#uKs3kP8#hHkw#KAZ60NX)s=dx zvvLx^ad=z}s2f3^hARB#TcSSuVGARQ*Y1k9VnQz2FI#YX$afyauaN}POzDFtG$MT0 zeUQUey8u>rv6r)xk<7NkRDgV@MgG(J!}Dil`+l#<&p=I^XH?Jd$rJ$I$#C56?k8hS zU%?+O-PbNyO>lex@cAnj#tn$gHdN(0G9u~(*3kiO=n?fT^;j%K`eDDY4@L$~e67w# zRK{JQmDbc+O>D9*dW2>sozh!)_-4G@$Tjy(DhB7i3afOuZO)6n;hzvS;Z@f&gf2{G zY7DKZqJQ4N>|$z?^x$ACZH8$orz*^`#~x8fYR5gNi20S%y}1kK@oUjwjaSE42{`xw zy&sd#SNCcLZD5cAB4#nj+BUwGqJ^FgTx+Fs6P>K?7^j5`snbnMUS#8H#gTo=)f*XZ zvp2-%TF#+ab%N(#;r7S| zTR8xv@=oRN95(UyH-s6y$VzXso1Cc6j{tWb{!n~U?bd(G7-y5}K zGY=Gn6hrE1>Km#*xv-oxe_#QCr}(p^S>VD;co z$xvK(GCga*4&0=nt$vvG9yD87JDizA6F?v}SA8_~oj|l?F6*7Qc(wQYUA8T{w>XzQ z)PNKgX&i+XS=|7MdzyZ+CJM`Ct4@S@X31{xySlb_(`FcZ*<|Fs{Hda#y2-9j_)PRI zF?NzQdW zAjmtYu7;W2VG!_PYP@46DN5XYZcwE^%MUwPVF$Da6{B%BjD|?G=5vBH|9tt$1WGhD zR=g?^s-4W!2+*rNgle!nBGuB5pfQ0!@I+OTLw83hDV=~qA*Q&vH4K$HZ{6w6<{Q9+=y%a>cgV2X6IBoc-zIHl{W zFH^dq^!l-o5b4wmCxOY;+dBj$&K|Z+lqsNRu|KpWCi&enn|VBGKg!~6qf-$?&GcUp z0JeZ9*eMw5pfM5uu6kN0wob90IiP*>4p}no7f7E-9*i})p-cFbK{N0jmCFD9I7h>+ zAmQ|ql80_{J$C#+f5{HrTTYt6yYMH!Wiw>gfk9L`h`2$307N*jLI6>h&U>qOWU7ka ze-hlZ_WMD%yy6W~Q$>4)JMXs;{UxDR%jYcX7{?hYPPflY@pcGi$)tIIWy1XI1KZr0 zpzauUyQ;k2u$RG z2)Ou-IRLEwq5XGlIX?w>T+_ZP1XsJOdspBM$y$8HGGqK?a zwS)%y9_8~o-IIW}o-I>HVHJfY8e}ClzkrgLkil79DRdijlAX-Co`N?273msc8(wi! z<03OPWS-awzZDSv_?e-PTFrGB{6jNvUV*NnorBI0Pgw#%lRZ1G5^)e?F}BN#B7a^D z%C5SHUCp-=zB437o6kI~wQogs>=HG=3iV^d`=IGOeZ}I;J1RFpz$@oHR}^L7j7_uV zxbc4LoVo_CBKez!@p#!`Gd~97I%)Xw=gB$QLJK1>Nk!fXbMakv&2SCi__IGxCPGJo znT-q$lJrh5LCTu^uh{CJ@=tT}pEyQirF;lDyi{yI0|TPxka;yFZc42LAonoIwJ({~ z199Gd?=6N_&rdNFHtxl?I;-xlgh!bvy`}WP;r1lz29NvdQ!n=l!EMPXWomH-Ec_Hj zBnFCa!5YeIM_;4e`+Vb6yplB~t!@8IO*g49z-?3+Fg<-sf@6dcL)N@9sLxfV*&%%n z58pKAf%7pyt>67oZLHqb9vC(9pEf)>n3OA1C9?8JG)ETlcc-7v(>Vd2zV-wD&TyDu z>(8@x_G@Z@C;f^VQdRx;7_x*6XuVIo->mo;9;ES40jcnGf|%Q@f_BN|PCiEuJ@in0 zZqwJYL~?k=qu%|hD zJZ#>9$}|ekFm`Z3MfgZoc3ps3If}dnZT@dB&B&YyI!>%AeValI5iz`YCNi#*GCJ8d z&xE?FB-4`RZK=r{xp9$EdlJ zTLzGtdhH!S8kI9GSjW zzoWaqhiKV;u;lirPJI?dG2a^2hDb!cn6hi;bV|TR2JQUmnh7+TK zgy9|Mse-LTXEBKkh*PFEdJO&cxE+=%0RhU9-~Wxr^_y8HK`Q&#_i= z3xM$t2YyLZ9vz%%>iHCak7>UMbw1(ksnE z!_2zrqJC&P+2U1k*1WNVHHzweFK17AD3wG;5kFL5dc!~4Dtl0-n-y0p(3C| zKsMg)-Q(zX&ePosD=40c8_&l3GctqqbcV=h6ZjyV@;}fTcw5=ttO?QiS9``psKIx4 zwS&p0g=G9X$xt46LgF8@!20oX^0EZkmNGfpqs#4TWt{6}zcBk1QA`Dt)Ij8=-#9XC zA{f@V_myR{e%n;R96xi3j#}M*%v>#e*=9&NJZ-1%&4adgeUCG_RIxemj3O`4NfziG zNCM?Dfs$EvC|<+SInl)N$?cmt4A%ubmgk~%RIDT5z&_O|en|n-SI>26^y!EMK~GG> z^S{El#ouls!;s&hiFhWwMMxcO%?_664C-)buZUI6xi=xLZ1G>}JTghaT`f&}AHil_ zG6q(mMAXqFL2T}l8-#x+>6sGfOL~S&&PuBrh{ERFn2()l07gF#wzst#Wm+7S+TG*D z*v*NH;G?a_Bw24a%C?IKDRk5$1~Xv92^<$<+8QOKv#lS(&dcv?yj@3ivGH_2 zlw`HNJ9taJ@+Ls{Iv^aP&7ikNg0+3d>t>h=-&QEAW@8Z^tLq)-B<99e#3RWa}0O6o)pST{+ize+(h2#FYQ$Pyv&nX|$Xq z)(^W3jQA}F%Kkypy_}#y68X^N(k>_>KAZlwetRfE{ZNpVYNg2`;BN}IVJw0c03{`1K!8{`$~^VWM+vdfe@ebaE%~j=u&0rjE^SG zL4y6`wSTrFRtY(kk8-(VRTNA${ z&4W3jgo=nmP`!g7qW0CGkjpL$)V!GN#L8GSW91cm28dC66X3s=&|W-+N|4g4f94#- z*O<5-UC?`yKN0FgR9_cCqx)=2{{Ifoo(TUE2{pDRS;v zxG4Wsh2gc4+tW|WOBdgh3>YzcN*}&WxXka4S;Zcu0)=-jv^m{3YzEE6hZi9kvD8Kp zqcNmZEirmxu`kDG%&_vH$w{sx>F~IM-{eq4f)T9?R~t1Azf5u7x>|2l(V)YUuo0*t zFSD!J-^jMpWT?k4RUbMV>I49#<ZiW=QR4M7a_0AVh&Zyy7bNl^d6?t33{Rw{;Dlfm z!JGu1*Qz1IAE_cuDG`E>wZ!riZj4-{>J>_tq=5doktYw~n*4I)s=uQP&25d6Sn|}- z9qhKOW^%>(A@^9f@0k3pf5NYR&W40nb9QsHM_J^_3$n=zV>e5Ac=sOp8>WDFBU%iiN4zb}}#}`WEKYI#4sKoiyoZF;D z%YyhGMS6i@7^y#SUWO$5hL+~;NvQ}+IBw`mXz9?0H&ve-bS)iGZfm9yQJod3<4rLH z-aI*0;`cPo2^|*y@Wzsbx3k}uelV!*p4$;ESQKfJV(88?mP$~%+QNNpgM1Jo{X+xw zVBg&oXmgt$F4Hh}7*LGkMDfYdVd{tDt2m*a=RbKATvsj@)=*)gS5Qf8pv~i$p*M4F z*31*f?y;w&A?~fsM!(FnHR{=fW7HuhO`rHyabe45%7SjTl^eX3*X##y@$^v5^MVsP1Vfl?`2ozbgWD%qcsH$7;ep{Fd? zn{6dIYT{`wlnyL*`T-GHydQe1RUOAl{ePRH)%@p}<1@QwC+HtE-#Xa5DZQ6I{&JkZ z?CXiR;beSVo4%M0(6*kIh_`<5(+0YO4^5qSq*@0U&`Fu_q(W5W!t7qa*f{?8zdwOadjVE!A5c z*)7(YH8Ru2hV{LK#QMlmS`YAJ|M+>62l&e`m!p$l_v6eSDzkG{)AcrJ&lgh=n9Z|! zNVUsckr|Di@%cNtogq|CQj|G|M1!0&m`=l#*aVII7~$vaisM3(m=NyWnWT#x=EnyW z5mM6NPAVeCVX9QH3n5wkX?s`ZE(WV5{FiM6d&b`nLAjpm2KLDg+RIh*0=XtaYF#IkF1OWt5t!oVw_6edFH4=dcuY)0+Vp2X zg>t62vTJ^%(f^UYC_JUx_lgSq>H23%ed>9?e>vH&?i80BK-aeZc{%{`xAJw=6pd~O zWWZ$nviEt@`uaZt`>Pt*5Ni&vY}VR_)?69T5<`J04QF zlGtBg?>j(d^*ybUpd-rb0 zgPO0@jNthLU>+voL8@XSi<5HRD}XU3rOfSrzM-DX+#aGCpuCEz+}9Z>F$(7U@%6yq zFWDt&sh-YwRFYS5f2A=mCI)$Od3sJ>TohzeOa}2@*8)s~Av(@e{xjq?E`ulAfN1nu z`uV6o9%qDL!E*1Tn9)zr)?LY#=#8*;p0L8`7FgbNAR(X^gO896t3hR`KxH43fHR`} z^po}uaTz!XD~9ScuDKLLFpZ1(YtA1`C0c%?v#+CxP?ivmNnJ$|M}t{~W;WD5usGxb zOw#%(*LJC~q;sMigYvx<10-;r&|F0`V%AwwF}lC#TeOrn4y?u44aj!je9M+q1t_T5 z0?te6gxAl^Sw%gPXOtgVuSE8V;k~mvVZtbJ--njWlIekY#@Bk&CBGvLoXd%~%qM+< zF-#0mcK%8Xf`@2D0@Sn4~i>Z zKKRrt_Y_}}by3hm@Z_{Lj{Gb>&}Aw$jrN-ZFW;gQngflFez8ykQW5wkpR&rha(C)* z^Cg3uGh5Q!BGT+@k`Qd)CbL{dtQNc`&>XOY4=So+R6j(qBFe}#>}PrMGC(i_;R-)5 z3b=G8Fnmp6l!cw|Z1MX0h@?`L?q^e`Bq3>0H>~YOE1Mb`0^p9O7zg#`kls473boFh zYb);z6~WWz*SugDaAMhB@~PjTY!=3%Kq8nal$hJ^_kkUSU5FJS`|`PX-RK3STcN1` zkFyHp@I3+L+^=vw-Jn8ht7bi{icemb4?ayFU6hss0Nw+anqk`5jsi;_ML&pVL`|9b zsRKlqv1CV%kvQc`rvgT>-!yTzudwudgWRBzp1*NZH@O=`Bn$()4;Q_!E7=TTP_H6qO*s>lDGF9xFexab$Pex!=A-ru zRm;C@p)oK=0P*_P>9m=JLt|-=48R!kbr?%;k=(T`v1-F6Q@+)22=BSs z$9=_v)eSB!4r09z9XF_4bhaBs4m-q(2=5n-%*YMY!HHS`@;u=+z!7;^b?8znMGtq1 z?E%BsAQhMYPsn`zr}`S~SuGq#K2GXKRXpauJQl>hcZZPSdln1Y4q*|fho(UoS#fmu z8|<XCjr+d<-O7g6~0q(bBVbx7T+U9!BOgTU21i)E5PpYcUD~ z&26UXXKAL)&T*g_zPSA1-|`qO;WGhIJLdx9@e zC7edygp-tatQg^sXoMROp*85VjAJ-sDf*q6GO~@qe?}9LkMfwj_%Cumt5~4*2Aia1 zJSdm%rt`fz6pZxb-l(Bk+!L+RFxX@mmlJQGGb@0Ac1d^bCFvFon?BeFPw5s2&39KJ z!6pDLT&>i)dHtmfXJoH%)yB!)cB&5cD0U>49$EO5QlC&p2#FVK17<< z=3R_OJ#MDgw-VRip8!oAFYe)V!@aAe7+V6zwweZFy&Ps`%ZZTjpF*t4)uHAGUbdIsehzpODe}l>X~UKR#4ZNXkd=j9-zo2h~;>5`arn(-F*B=n&16 z1y!w@;^grB;wb#v(ZA^9!~`symg$rEqn|80qXOTWITcb70>*itvE(}?Ejxa64V|Vb zqmh+99jqCj9rQvWk?D;YLmG{275ZvvgGP_W8*qh{KH|a-H1oh*JI;y<4qMCP?F5HZ zLJ;nPLsh$CpA+i_MqWLsW|7UUe~WFCFsSb1Uh~_tIo)A2ETocSV0h2<>L0qQ=1-FK zQ_6I4=8SM1gbOwM(3I6mdEjN+Fd8h$;>p+tGXtpZ%a%#1x%&BVpCPHRMrYI1ZEg6_ z34=btE`$kUpP#d9asx>oOTwN8u;VnpUc3vOr!L?TD=!4);cjz`+?@SP&O-wYeL#in z1e)W&PQ3^{gdY4Ts=`_n$b=@8?JRK>o}}x6aF}!}Rk|#~^sBMzp(Kg1R;yBc)FW~2 zUd9AZF;eM!6I%9XLU(@NH{h+Trf%~OUCDG2rU4Y6C28K|v9Iu0fET`9o5CTs=xFRz z78VB2ivZ>mu_ji~p$pE8c1y>6(zB6{l+&uU9oGHd!d*GEl(c;jwPl#*xBTI#b**vS+@NTy;i_k-Iunz z;lh`NV~J;=*%4LtU&n2lbUs7kC&d1Qd#@6}37amQ!Kv}fp?uR|Z()ZwLq_to(tG~a z*BB5pJmVNrQYOM8|Il2cCaa8B0L62pL@M5q^(v;k?uY%{qd5R^rnUmlLtu=i*et0P#>S&(H zu)_VUCIS5p4DiTr+zD*j^E(DZgheXOptUN8?^m{02`4$Ho)$FzetU7$%Msvti#)RROW{bC}|5gMAm;wzb+ z1+yb}*~Tc?Lgxw2vH6IQqL;MsCAPz#x!MsFHClR1L?|=~S8Am&-1MdvHTYA?z*FMULl#vpt~sF7jpY6aX4oqCbMy zn&mCh*ya=9+Kr<2C%W+Q;2AW$3~@u!oo5ZBV+(_}Zw4)-=^f--<;3o>W}P=1E9LC}YpP9y`zt=$v|Y zZHchz)5Sz5S-IP%LWRy`8WV|^VOTSCF~n)?uU0~&jx(fx)jaDfb*^o^R*bwEE2m>B zF%X~*5k^q#lwNF#%D@abh{tLs{j#(2EpyGY7cT@m71d5p_wnU?p3!;Ys>I&Um%7;yp` z(fDFlYZM7uKu0xm+4e6Jpg9OP*VV1)VKDJ4WF%tc6YjM@1Ck|vrn$y|l&chCOQ)bz z!MwObA28T1H-ij5=^lZ-8;->bXsRpAbdbfY@1GeB-)D*+5*)9DAzJ957?3zswVX5u z!p%45ikOAlFiWG`3P#BR>*vymN^tUW;^cxmOwUeSKMM3TUdaKf@keDcKb7T(y$4?s zoo_s?=T|il7&t+EgpZURZ9iN3~U8yhWRBlqSZ*N;x=0l|ymCD+d-WGkvrD}hIha@Oa2n32~ zO&0g#_CW!vyv{NK6o82>Ce-8>X3abS*)A*8^tMvL4hTG^sd2#caRD{Niq=ITfp$=gA6XlZU&|^g6&p-st&h?AQ>EkqGyFN z^et?vdD#)g@GSXL?m7PB9@QvAd0wK?jE%U20N;icMF?DUdr6GzG!3y5}@&afu_<%F1&=tWT)o4GRIB z8t{w_n0{;^74{=jaqA!%Msmz0?N)2{NkMfXH3t%b1hakt_EGl^s1cE%KvMBus28|C z*XJcFuASK~SY}~Odea*astT$O1(|4nI=B`pN@Qd6ODLj13~LDDSy%z*=_?&D9TQSV zvN$$yF#DV)GjRnK6a~>Y>Mk_n6RA+ynGozC8%xJX+cNuNo&YHdgwlOdbV1RB6cv~; z%pNhy-8uaTs4>A82=-3EQO)qU(eqsQg>ohPbrA5ylFJwyFnv5$3KCE=BRz*>tg$M#UU^sM}SnD)(ODgtcEPhpTkpZkS5RF3pi%k)C z83?s9+3?zRsc5DJ#+bl%0Cm9hQP6a`%CB+^g`*3Cc~Jmw1e;!HCE2eBGay+mN>Wsg zKt*-CeqbzI*d?^F4v%N%LxGzYdiq@be-<>Sw%w!y>b}Z#N7ViZM zq=TaarVqy>#U;>OKD~{!cTZ<5=RtE<% zj$vRO_sCzydy2=}gHtq?02z(UVdSE5$^*LM5l zF2)It4Vd1->mHo2HI6*M=0KyE6Q$I4-l92*jy?6VbTLNHc z?dR%!apSibnD1Qeoe)xi<)66evZBO$JfJ#Y`dHX1GP~3n0oDVD69ab0PH3U4Gs!6? zOr>+HHbY)}o8Cl@krmb;_nuQ%ePEB5W;5*iko}Q|QlwV2(J<^+oZ^z57X_lhsxPVS zY^o?r&>tNeF#XsB6-L#SGu}<%=qeD=p4Ayis8YL(!`?>NS?diWU@x+*ZlARLypjJM zB(r7&+7s~F_uRfMWfV?cDB_|#>!R9@bwIk%7cB$)k$bmgmQ7H4(co-lmT7?O6PsBX z8!-L&NGe3CrJ}&;XP3mryQ~;1t%f516(@}ts20RyDL|rqq0JWLoU#S1#Hp59a5$tq z)+$lay`ap$5J`ES56Nu2I0UtfVBqJE*u!ZFhb1F9WUvrjLN71{~kEaXu4Z zN1#>?@fMtCTLCNV6GB#s*yN>KTq*{|G8@2Dq3}dBw+t0&siNiqf*+8zfB}heUkq2~ zO0#NLqACJl&9wHFlfS9hE)z5p-{&mI_F<)W4yH3=_PtRyWs8bxQq^3)KS!hQx(YB5 zu$WO&D3TYPCoO!z0@>N%)^*0sw85zIgZpE+ldVN!#zCq~MMDvo79S(3aJxR&>@&r4 zN`q%Zq17Sguc?1!Chqi@4w#M=;E49dnCe~zpfklC1bPnGZyZqJVVrcTwqtnG%bw*! zpoql56M+JiEjT}q=a6c&+7c>$p}?;C4$KHWqKJvT23AtA+9!AGGa(TK16TWE74m3oXkZB)u;%b@RC=*96coy6A{@q(S^i)8-L@$*ONuqC(tgHOG;a4aeIdQu zvcrbK677XhE)XvHjv>=}u8sPX6@gTA&lJlh71mjOx1e8-T4drva+OHNu$`RKNFgE? z9B7o^*u5~*v8$OQkB=P&bOyqHBo%oBmYcoo%w&)57*erJG?QMKsz*V5UuIa%wRpjl zL}sf8VHLXxA|N3$AY;j_q2RTR~{+)>=sDl>TXT2Mj zhGa6uX9}%N79gVP-4o(Spf20ft<)Hd4VZphJS9rksRs4U+sd>kAW&mc&P;zO(4Acm z=@GE*=Y}Fyteig^%FQsPnnAtorl=L2h;gwDie^X|P@10VrcWjY4XG~g!RL4-`+$HE z4j5+xLXwq_4VXRxF|B_dq@V%ofDuz5TL+YLhS2n$mHM}ilwe2uF?5m!v9k9}^pvI_ zdEF`P=E;goxZ9Uwrt4gyJ!w^UHb_a!AQ)--`-Q$|YGfSSqiTS)D)N6~W@&7|^kI+{ z%8>0S-^w_&6G8ZukxmUzvtyuwbifb}I`wVA3g6BO{n8ZJe60kW2)8ktJl6AT@# zIX${8__c5&r63V&6V-$cRjI228bxZ}#Wf#{y%R8vX^pu(OGU*bPB2SW{*Lqm6q|hc zw_7jmmtMZezU6(dOQfzDoz);y=)2!La`LQKRFC1->nRgV?r6$!FYPDmG}~H{Q~3;% z0toxyHWS5=w0?Lt_C zfdK9jQh*J>RDGS&m^8I|+KGTJ1c4D|Et&$fu3uZ0#S{Y{>RylC8cZk6dut%=CbLM9 zXz|iBW3(71g8&(|$uN%L&|JslJ3-(@BjlpbCKwZtg!n9Uz`)K?RmZ`J*@85)!c6p0 z1I*@HTK#D9J~Gcp8UTT;h?_PhjT%#j{ze7hvFn2Ar>gz2tAi-X8XN$hlrAJ&5K8AL zf=%ZbV>b>f)ek08%CXueQ5`eHy{_U~$~Xq3)B~}6H>nUNsN65XN>x#f28`Okry_w1 zmBvR$rbUQAy8WDM0PjufQLE|kplTFFF_plOVY*8A`Jo}os1LOI z?2B|gO9)p7nP>Nx0Y14Hi~w&cXjl-!r`A0CZ?`jH6qy}{@XT40T|vRwLZ$#R(gD_c z0n@Q}0nut6BHP@*`?s$Yu5?bl8Pn#Q6r=|h{zk*PgJg}VvvB}`ZnOvl1$yV_4n~xn za!p z0n?{hhHIOf$-1XPFz-Y}OciP%oz>)TY3G`?AOyM9tE7x8aE-_5XyH&FtGIB@UC z#|>o{F7-?;2XnRGyU+4^oU>qk>*^{-KCk2 z8@*>4gnE)(bxPzvLtMM;GY(jJE8ZYTH&XL6jlciY9F)wgAeCeUW6R~F7$4dX@4kE}T{dTwb1Sf;Q8 zvIELkzwI$Q$DF1p4~(H>`>D}0x7fNWZ9y;~sOmbe^xU$uRR>UqnLZ|xqB)>qO6&~y z9H*z?)(oRfi&)Ew?rv!Nq$`d2)%P#^{p$2@&}*tPds45rO}heRsvxr>l62%kw#Jxe z#)xO2drz3)#n(Wx&FBa~i2~RV*KTaU^n+7U!ghiN3_Gg8m`U~SkITrmg>Vf@n$bHZ z6u3ABw#2A=XMHc56Yd*=+uJxXR@D4i|35KP2D zfIG|59Z3=PK|o~puxd97c%i5Fj%~?+NacQ4pw{he50H!fR*shA5t*l=10z&1Nz?FTHn0i&cNe-HB&ttuFr6ctq z7d$Y6 z0HdV+qQLn`SUjU6TYR4tZR73j^A2D;Mrk0JGhq+EQ82^1IU^;eXR%$}}sYcLik%(+_DqAI+@40UYVxEOy0}qBxT3&4D6(AdV4{Txo7GwmfacGo~ zLhw_S-0=NyGAJB-2QZCE^WGFsQ%BmQRngQAe8Swpptcu=O`-G`3M!a%7Cp~ZVWQ5p zbx7+RWOF@Jp}Kd%*&NscU?R9jqMJZNKMTr`^olJLvqDXn6&ge!3WXK`%T<-QoXqkB zkag zs*JuQFr7PeD=dvwApxc;P}%cp*ukNu_o5lU-BT>SQ#8dkd>R1H+iYTrQ1o46&qS0p ztx}rKwaRo#RISV*C~l*kspP!O^MZn@mGz>4ay47S;#nA^quqNS@#(&!fZ^mAF(mk{0-Y{v^ zRzFwh$)wVv&F76}k8Dt=f?9Cdhm|T?)Sklx$V<$&=|^@S?7U+z9lIrnmJZN}7ag$* zR)I!14`P{=QWNW7jdV82?_CHgO4hIy3Pc0N&G>y)kcvMOo`{!y(*1e7V8876IyNh! z_M}#88)5+#!M3G}SC-i=fFuQ^Cj6cWJe2Wsy_mxQN0bwUs7w zFb=>r8irgtC2O3(<_-y60LL5*SXPR#e+wV7o~ zNlb=D0^_83Zu(8{223Z$xi*4Qb3Q>@OnG5K1f=d_wyZCe(+j;CwgguIwlWjz&Pkjn zx~BU$WeJ`7kUCqH*=s}<(Pfbq=nti27g^S}st*kI)%Txq!=fQl_Ca`#PyRc^dWQ=Y z5RC^PFnuU08f-)yM4L<(F;^2ulw+{zDc}#HO9@wk;34*tk__7HbPsMnpEUivZL;6Y zFm5z~$8c6OTW3nGijaj2VFX(HdFFXRDQgD2gPW1N(l*Anqfo(b{d~#Cpq0rkgTrH& z1=F!x0;{SEnnlO$K55|?_Ah#w9wiH*N(Zz*=MHF;wvwejSkm#QMbJztn$a?5SV&W` zjMC_6csyikCW>NI>kjtPJN;-%xeG8@BSjUKTULZQrvWk@Y%j8;zBHA*_Xeh4%`)5v zc`Y5r)_MBE`x69P@?Fj-fIt8}x~r3S1CE0k^sUJJRaCG&Mzep6f*R_^3vEuz#brz; z1O!S@sGCvh3yRtXczEk1{A; z%|-~BqYh);hne0bm_FNAQ-X90JdK30c?|;l7e=a9OkR8nBr2y*FcO7fFzo{=J7$YU zYR?qNku7N^6YIDSETTGwud#F#ocRKqmH@dpu_P8BQhTRXJ>DEFl!CV6Gf`<)gNaRT zg}H+@4IA->dt;_!S0khrA&y-OvOr=r+83kaV6}e*h5<=;006dy-cGb{EQ+9)EPnxP z_uq?=pghX#nL@5mo|hndl)L$)mJAE;K#RQR%aV>v;3;H|V35(wI!;xQ#>L(jn2y~Z zV7+?}9k8;By`VnDEIQu&%&jj1VWx>VnvE&$8KZ!`a&(2GkB;_8g-umZKmgVufd^Yl zGs(WUlHekD&-d|?Zi+h&kx(E5}_^vMrSU(@e{qmX5WRDrr9YS#sL*s;EmUbGB z&=ypF1`TE>ncW%K?AfK`NiZ%B>1J4d+G5?@#}MvaC;TC0eL~C%fWTug^?tzA-TpYj zzCTNGMf>(|n*E(`CxE&Tg;NDVD5V4#6ci~3*9sjDQ)EH_4S5bf;iN$_TDFoEfF=^c zp!W9y2->?X%hjp6ycG+lSzPt}2T+-EQmn%J%z%^ import { Body } from "@budibase/bbui" import CreationPage from "components/common/CreationPage.svelte" - import blankImage from "./blank.png" - import tableImage from "./table.png" - import gridImage from "./grid.png" + import blankImage from "./images/blank.png" + import tableImage from "./images/table.png" + import gridImage from "./images/grid.png" + import formImage from "./images/form.png" //optimized example import CreateScreenModal from "./CreateScreenModal.svelte" import { store } from "builderStore" @@ -54,6 +55,16 @@ View and manipulate rows on a grid
+ +
createScreenModal.show("form")}> +
+ +
+
+ Form + Capture data from your users +
+
diff --git a/packages/types/src/api/web/auth.ts b/packages/types/src/api/web/auth.ts index 46b1e8cec9..5ff0c3c1f5 100644 --- a/packages/types/src/api/web/auth.ts +++ b/packages/types/src/api/web/auth.ts @@ -18,6 +18,7 @@ export interface UpdateSelfRequest { password?: string forceResetPassword?: boolean onboardedAt?: string + tours?: Record } export interface UpdateSelfResponse { diff --git a/packages/types/src/documents/global/user.ts b/packages/types/src/documents/global/user.ts index 337855787f..ddb1e39c64 100644 --- a/packages/types/src/documents/global/user.ts +++ b/packages/types/src/documents/global/user.ts @@ -55,6 +55,7 @@ export interface User extends Document { dayPassRecordedAt?: string userGroups?: string[] onboardedAt?: string + tours?: Record scimInfo?: { isSync: true } & Record ssoId?: string } diff --git a/packages/worker/src/api/routes/validation/users.ts b/packages/worker/src/api/routes/validation/users.ts index dfc1e6fbbf..7b95de0f59 100644 --- a/packages/worker/src/api/routes/validation/users.ts +++ b/packages/worker/src/api/routes/validation/users.ts @@ -26,6 +26,7 @@ export const buildSelfSaveValidation = () => { firstName: OPTIONAL_STRING, lastName: OPTIONAL_STRING, onboardedAt: Joi.string().optional(), + tours: Joi.object().optional(), } return auth.joiValidator.body(Joi.object(schema).required().unknown(false)) } From b0cd3d4d03206c11d97821e2c860a2f22076a4c9 Mon Sep 17 00:00:00 2001 From: Dean Date: Thu, 15 Feb 2024 15:23:13 +0000 Subject: [PATCH 003/174] Refactored tours. Tours will end if a TourWrap is removed from screen. --- .../portal/onboarding/TourPopover.svelte | 19 ++++---- .../portal/onboarding/TourWrap.svelte | 44 +++++++++++++------ .../src/components/portal/onboarding/tours.js | 20 ++++++--- .../builder/app/[application]/_layout.svelte | 2 +- .../NewScreen/CreateScreenModal.svelte | 25 +++++------ .../design/_components/NewScreen/index.svelte | 2 +- .../builder/src/stores/builder/builder.js | 15 +++++-- .../src/stores/builder/tests/builder.test.js | 34 ++++++++++++-- 8 files changed, 109 insertions(+), 52 deletions(-) diff --git a/packages/builder/src/components/portal/onboarding/TourPopover.svelte b/packages/builder/src/components/portal/onboarding/TourPopover.svelte index 1140708593..e319d3bee4 100644 --- a/packages/builder/src/components/portal/onboarding/TourPopover.svelte +++ b/packages/builder/src/components/portal/onboarding/TourPopover.svelte @@ -1,6 +1,6 @@ {#if tourKey} @@ -100,6 +96,7 @@ dismissible={false} offset={15} handlePostionUpdate={tourStep?.positionHandler} + customZindex={3} >
diff --git a/packages/builder/src/components/portal/onboarding/TourWrap.svelte b/packages/builder/src/components/portal/onboarding/TourWrap.svelte index 9be6255f52..779a84f463 100644 --- a/packages/builder/src/components/portal/onboarding/TourWrap.svelte +++ b/packages/builder/src/components/portal/onboarding/TourWrap.svelte @@ -1,44 +1,62 @@ diff --git a/packages/builder/src/components/portal/onboarding/tours.js b/packages/builder/src/components/portal/onboarding/tours.js index 894f9c7894..f5e34518cb 100644 --- a/packages/builder/src/components/portal/onboarding/tours.js +++ b/packages/builder/src/components/portal/onboarding/tours.js @@ -32,14 +32,18 @@ export const TOUR_KEYS = { BUILDER_FORM_VIEW_UPDATE: "builder-form-view-update", } +export const getCurrentStepIdx = (steps, tourStepKey) => { + if (!steps?.length) { + return + } + if (steps?.length && !tourStepKey) { + return 0 + } + return steps.findIndex(step => step.id === tourStepKey) +} + const resetTourState = () => { - builderStore.update(state => ({ - ...state, - tourNodes: undefined, - tourKey: undefined, - tourKeyStep: undefined, - onboarding: false, - })) + builderStore.setTour() } const endUserOnboarding = async ({ skipped = false } = {}) => { @@ -58,6 +62,7 @@ const endUserOnboarding = async ({ skipped = false } = {}) => { // Update the cached user await auth.getSelf() + builderStore.endBuilderOnboarding() resetTourState() } catch (e) { console.error("Onboarding failed", e) @@ -222,6 +227,7 @@ const getTours = () => { }, positionHandler: customPositionHandler, align: "left-outside", + scrollIntoView: true, }, ], onSkip: async () => { diff --git a/packages/builder/src/pages/builder/app/[application]/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/_layout.svelte index 474c17ffb7..f786fad017 100644 --- a/packages/builder/src/pages/builder/app/[application]/_layout.svelte +++ b/packages/builder/src/pages/builder/app/[application]/_layout.svelte @@ -95,7 +95,7 @@ const release_date = new Date("2023-03-01T00:00:00.000Z") const onboarded = new Date($auth.user?.onboardedAt) if (onboarded < release_date) { - builderStore.startTour(TOUR_KEYS.FEATURE_ONBOARDING) + builderStore.setTour(TOUR_KEYS.FEATURE_ONBOARDING) } } } diff --git a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte index d263b6b983..c2a7a364e5 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte @@ -155,7 +155,7 @@ // Handler for Datasource Screen Creation const completeDatasourceScreenCreation = async () => { - templates = rowListScreen(selectedDatasources) + templates = rowListScreen(selectedDatasources, mode) const screens = templates.map(template => { let screenTemplate = template.create() @@ -192,10 +192,17 @@ } const loadNewScreen = createdScreens => { - const lastScreen = createdScreens.slice(-1) + const lastScreen = createdScreens.slice(-1)[0] // Go to new screen - $goto(`./${lastScreen._id}`) + if (lastScreen?.props?._children.length) { + // Focus on the main component for the streen type + const mainComponent = lastScreen?.props?._children?.[0]._id + $goto(`./${lastScreen._id}/${mainComponent}`) + } else { + $goto(`./${lastScreen._id}`) + } + screenStore.select(lastScreen._id) } @@ -206,8 +213,6 @@ return screenTemplate }) const createdScreens = await createScreens({ screens, screenAccessRole }) - const lastScreen = createdScreens?.slice(-1)?.pop() - const mainComponent = lastScreen?.props?._children?.[0]._id if (formType === "Update" || formType === "Create") { const associatedTour = @@ -217,18 +222,12 @@ const tourRequired = !$auth?.user?.tours?.[associatedTour] if (tourRequired) { - builderStore.update(state => ({ - ...state, - tourStepKey: null, - tourNodes: null, - tourKey: associatedTour, - })) + builderStore.setTour(associatedTour) } } // Go to new screen - $goto(`./${lastScreen._id}/${mainComponent}`) - screenStore.select(lastScreen._id) + loadNewScreen(createdScreens) } // Submit screen config for creation. diff --git a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/index.svelte b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/index.svelte index 6c3637a248..ff3b0beee9 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/index.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/index.svelte @@ -4,7 +4,7 @@ import blankImage from "./images/blank.png" import tableImage from "./images/table.png" import gridImage from "./images/grid.png" - import formImage from "./images/form.png" //optimized example + import formImage from "./images/form.png" import CreateScreenModal from "./CreateScreenModal.svelte" import { screenStore } from "stores/builder" diff --git a/packages/builder/src/stores/builder/builder.js b/packages/builder/src/stores/builder/builder.js index 22b663af35..19253d2688 100644 --- a/packages/builder/src/stores/builder/builder.js +++ b/packages/builder/src/stores/builder/builder.js @@ -7,7 +7,7 @@ import { TOUR_KEYS } from "components/portal/onboarding/tours.js" export const INITIAL_BUILDER_STATE = { previousTopNavPath: {}, - highlightedSettingKey: null, + highlightedSetting: null, propertyFocus: null, builderSidePanel: false, onboarding: false, @@ -61,7 +61,7 @@ export class BuilderStore extends BudiStore { highlightSetting(key, type) { this.update(state => ({ ...state, - highlightedSetting: { key, type: type || "info" }, + highlightedSetting: key ? { key, type: type || "info" } : null, })) } @@ -135,9 +135,18 @@ export class BuilderStore extends BudiStore { })) } - startTour(tourKey) { + endBuilderOnboarding() { this.update(state => ({ ...state, + onboarding: false, + })) + } + + setTour(tourKey) { + this.update(state => ({ + ...state, + tourStepKey: null, + tourNodes: null, tourKey: tourKey, })) } diff --git a/packages/builder/src/stores/builder/tests/builder.test.js b/packages/builder/src/stores/builder/tests/builder.test.js index 7aac2489db..e6f52689aa 100644 --- a/packages/builder/src/stores/builder/tests/builder.test.js +++ b/packages/builder/src/stores/builder/tests/builder.test.js @@ -88,14 +88,42 @@ describe("Builder store", () => { ) }) - it("Sync a highlighted setting key to state", ctx => { - expect(ctx.test.store.highlightedSettingKey).toBeNull() + it("Sync a highlighted setting key to state. Default to info type", ctx => { + expect(ctx.test.store.highlightedSetting).toBeNull() ctx.test.builderStore.highlightSetting("testing") expect(ctx.test.store).toStrictEqual({ ...INITIAL_BUILDER_STATE, - highlightedSettingKey: "testing", + highlightedSetting: { + key: "testing", + type: "info", + }, + }) + }) + + it("Sync a highlighted setting key to state. Use provided type", ctx => { + expect(ctx.test.store.highlightedSetting).toBeNull() + + ctx.test.builderStore.highlightSetting("testing", "error") + + expect(ctx.test.store).toStrictEqual({ + ...INITIAL_BUILDER_STATE, + highlightedSetting: { + key: "testing", + type: "error", + }, + }) + }) + + it("Sync a highlighted setting key to state. Unset when no value is passed", ctx => { + expect(ctx.test.store.highlightedSetting).toBeNull() + + ctx.test.builderStore.highlightSetting("testing", "error") + ctx.test.builderStore.highlightSetting() + + expect(ctx.test.store).toStrictEqual({ + ...INITIAL_BUILDER_STATE, }) }) From 9da5467bfe5444e87a20022a9eb6a20d53de1a3f Mon Sep 17 00:00:00 2001 From: Dean Date: Thu, 15 Feb 2024 15:23:52 +0000 Subject: [PATCH 004/174] Remove unnecessary reset function --- .../builder/src/components/portal/onboarding/tours.js | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/builder/src/components/portal/onboarding/tours.js b/packages/builder/src/components/portal/onboarding/tours.js index f5e34518cb..5cf6735970 100644 --- a/packages/builder/src/components/portal/onboarding/tours.js +++ b/packages/builder/src/components/portal/onboarding/tours.js @@ -42,10 +42,6 @@ export const getCurrentStepIdx = (steps, tourStepKey) => { return steps.findIndex(step => step.id === tourStepKey) } -const resetTourState = () => { - builderStore.setTour() -} - const endUserOnboarding = async ({ skipped = false } = {}) => { // Mark the users onboarding as complete // Clear all tour related state @@ -63,7 +59,7 @@ const endUserOnboarding = async ({ skipped = false } = {}) => { await auth.getSelf() builderStore.endBuilderOnboarding() - resetTourState() + builderStore.setTour() } catch (e) { console.error("Onboarding failed", e) return false @@ -87,7 +83,8 @@ const endTour = async ({ key, skipped = false } = {}) => { // Update the cached user await auth.getSelf() - resetTourState() + // Reset tour state + builderStore.setTour() } const tourEvent = (eventKey, skipped) => { From 86c6922bf4f3cedaef16e61569000d40c1beb326 Mon Sep 17 00:00:00 2001 From: Dean Date: Thu, 15 Feb 2024 15:25:07 +0000 Subject: [PATCH 005/174] Added in init flag to ensure that analytic clients only init once --- packages/builder/src/analytics/index.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/builder/src/analytics/index.js b/packages/builder/src/analytics/index.js index 6bb10acdb5..3a80a05d7f 100644 --- a/packages/builder/src/analytics/index.js +++ b/packages/builder/src/analytics/index.js @@ -9,13 +9,17 @@ const intercom = new IntercomClient(process.env.INTERCOM_TOKEN) class AnalyticsHub { constructor() { this.clients = [posthog, intercom] + this.initialised = false } async activate() { // Check analytics are enabled const analyticsStatus = await API.getAnalyticsStatus() - if (analyticsStatus.enabled) { - this.clients.forEach(client => client.init()) + if (analyticsStatus.enabled && !this.initialised) { + this.clients.forEach(client => { + client.init() + }) + this.initialised = true } } From ddc51edee77793de33579e7d7446ce8cbdebc08c Mon Sep 17 00:00:00 2001 From: Dean Date: Thu, 15 Feb 2024 15:44:54 +0000 Subject: [PATCH 006/174] Fix to ensure the skip flag is also reset in the tour popover --- .../builder/src/components/portal/onboarding/TourPopover.svelte | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/builder/src/components/portal/onboarding/TourPopover.svelte b/packages/builder/src/components/portal/onboarding/TourPopover.svelte index e319d3bee4..6dd7fa96ee 100644 --- a/packages/builder/src/components/portal/onboarding/TourPopover.svelte +++ b/packages/builder/src/components/portal/onboarding/TourPopover.svelte @@ -26,6 +26,7 @@ tourStep = null popoverAnchor = null popover = null + skipping = false return } if (!tourSteps?.length) { From 7895292705fcb7197e7f846d070931fac761bc13 Mon Sep 17 00:00:00 2001 From: Dean Date: Thu, 15 Feb 2024 16:16:30 +0000 Subject: [PATCH 007/174] Add offset to custom positioning. Reusing existing prop --- packages/bbui/src/Actions/position_dropdown.js | 5 ++++- .../EditComponentPopover/EditComponentPopover.svelte | 1 + .../design/settings/controls/EditComponentPopover/index.js | 4 ++-- .../src/components/portal/onboarding/TourPopover.svelte | 2 +- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/bbui/src/Actions/position_dropdown.js b/packages/bbui/src/Actions/position_dropdown.js index cc169eac09..d259b9197a 100644 --- a/packages/bbui/src/Actions/position_dropdown.js +++ b/packages/bbui/src/Actions/position_dropdown.js @@ -35,7 +35,10 @@ export default function positionDropdown(element, opts) { } if (typeof customUpdate === "function") { - styles = customUpdate(anchorBounds, elementBounds, styles) + styles = customUpdate(anchorBounds, elementBounds, { + ...styles, + offset: opts.offset, + }) } else { // Determine vertical styles if (align === "right-outside") { diff --git a/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte b/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte index 5bbbfa283c..39e4bc2ada 100644 --- a/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte +++ b/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte @@ -86,6 +86,7 @@ showPopover={drawers.length === 0} clickOutsideOverride={drawers.length > 0} maxHeight={600} + offset={18} handlePostionUpdate={customPositionHandler} > diff --git a/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js b/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js index a32a1cd821..2dc3f60185 100644 --- a/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js +++ b/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js @@ -1,8 +1,8 @@ export const customPositionHandler = (anchorBounds, eleBounds, cfg) => { - let { left, top } = cfg + let { left, top, offset } = cfg let percentageOffset = 30 // left-outside - left = anchorBounds.left - eleBounds.width - 18 + left = anchorBounds.left - eleBounds.width - (offset || 5) // shift up from the anchor, if space allows let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset diff --git a/packages/builder/src/components/portal/onboarding/TourPopover.svelte b/packages/builder/src/components/portal/onboarding/TourPopover.svelte index 6dd7fa96ee..732ee40e11 100644 --- a/packages/builder/src/components/portal/onboarding/TourPopover.svelte +++ b/packages/builder/src/components/portal/onboarding/TourPopover.svelte @@ -95,7 +95,7 @@ anchor={popoverAnchor} maxWidth={300} dismissible={false} - offset={15} + offset={12} handlePostionUpdate={tourStep?.positionHandler} customZindex={3} > From 08d5a6174a1aa0850d1bb75f14107c22c3adf330 Mon Sep 17 00:00:00 2001 From: Dean Date: Thu, 15 Feb 2024 16:24:34 +0000 Subject: [PATCH 008/174] Lint --- .../components/design/settings/controls/PropertyControl.svelte | 1 - .../design/_components/NewScreen/CreateScreenModal.svelte | 2 -- .../design/_components/NewScreen/FormTypeModal.svelte | 3 +-- 3 files changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/builder/src/components/design/settings/controls/PropertyControl.svelte b/packages/builder/src/components/design/settings/controls/PropertyControl.svelte index 3bfb7e9086..4cbf29e3ae 100644 --- a/packages/builder/src/components/design/settings/controls/PropertyControl.svelte +++ b/packages/builder/src/components/design/settings/controls/PropertyControl.svelte @@ -26,7 +26,6 @@ export let disableBindings = false export let wide - $: nullishValue = value == null || value === "" $: allBindings = getAllBindings(bindings, componentBindings, nested) $: safeValue = getSafeValue(value, defaultValue, allBindings) $: replaceBindings = val => readableToRuntimeBinding(allBindings, val) diff --git a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte index c2a7a364e5..8c1a11289d 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/CreateScreenModal.svelte @@ -48,7 +48,6 @@ } try { - let screenId let createdScreens = [] for (let screen of screens) { @@ -73,7 +72,6 @@ // Create the screen const response = await screenStore.save(screen) - screenId = response._id createdScreens.push(response) // Add link in layout. We only ever actually create 1 screen now, even diff --git a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/FormTypeModal.svelte b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/FormTypeModal.svelte index cc0ffaea49..856552dec2 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/FormTypeModal.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/_components/NewScreen/FormTypeModal.svelte @@ -1,5 +1,5 @@
{#if label && !labelHidden} diff --git a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte index b55be0728a..c7f8094084 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte @@ -177,9 +177,6 @@ defaultValue={setting.defaultValue} nested={setting.nested} onChange={val => updateSetting(setting, val)} - highlighted={$builderStore.highlightedSetting?.key === setting.key - ? $builderStore.highlightedSetting - : null} propertyFocus={$builderStore.propertyFocus === setting.key} info={setting.info} disableBindings={setting.disableBindings} From 2933571c62e9a8f58a24cdb73e5d7eed768b8812 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 28 Feb 2024 08:34:41 +0000 Subject: [PATCH 015/174] update runLuceneQuery in client to allow for all filter matching --- packages/shared-core/src/filters.ts | 59 +++++++++++++++++++++-------- 1 file changed, 44 insertions(+), 15 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 46d765a7b5..2c4861ed60 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -390,23 +390,52 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { } ) - // Match a document against all criteria const docMatch = (doc: any) => { - return ( - stringMatch(doc) && - fuzzyMatch(doc) && - rangeMatch(doc) && - equalMatch(doc) && - notEqualMatch(doc) && - emptyMatch(doc) && - notEmptyMatch(doc) && - oneOf(doc) && - contains(doc) && - containsAny(doc) && - notContains(doc) - ) - } + // Determine active filters based on query object + const activeFilterKeys = Object.entries(query || {}) + .filter( + ([key, value]) => + !["allOr", "onEmptyFilter"].includes(key) && + Object.keys(value).length > 0 + ) + .map(([key]) => key) + // Apply filters dynamically based on activeFilterKeys + const results = activeFilterKeys.map(filterKey => { + switch (filterKey) { + case "string": + return stringMatch(doc) + case "fuzzy": + return fuzzyMatch(doc) + case "range": + return rangeMatch(doc) + case "equal": + return equalMatch(doc) + case "notEqual": + return notEqualMatch(doc) + case "empty": + return emptyMatch(doc) + case "notEmpty": + return notEmptyMatch(doc) + case "oneOf": + return oneOf(doc) + case "contains": + return contains(doc) + case "containsAny": + return containsAny(doc) + case "notContains": + return notContains(doc) + default: + return true // If the filter type is not recognized, default to true (assuming pass) + } + }) + + if (query!.allOr) { + return results.some(result => result === true) + } else { + return results.every(result => result === true) + } + } // Process all docs return docs.filter(docMatch) } From 76b9cbcc5feb599017a43cacbd0f665c9bab7fd8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 11:22:27 +0100 Subject: [PATCH 016/174] Create docWriteThrough redis cache --- packages/backend-core/src/redis/init.ts | 13 ++++++++++++- packages/backend-core/src/redis/utils.ts | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..7920dfed2d 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -104,3 +108,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** From ff7c8d3b9546fc60424fb8cf24f8ab4615416f27 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:44:52 +0100 Subject: [PATCH 017/174] DocWritethrough --- .../backend-core/src/cache/docWritethrough.ts | 102 ++++++++++++++++++ .../backend-core/src/db/couch/DatabaseImpl.ts | 9 ++ .../backend-core/src/db/instrumentation.ts | 7 ++ packages/types/src/sdk/db.ts | 1 + 4 files changed, 119 insertions(+) create mode 100644 packages/backend-core/src/cache/docWritethrough.ts diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..9e1977f797 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,102 @@ +import BaseCache from "./base" +import { getDocWritethroughClient } from "../redis/init" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" +import * as locks from "../redis/redlockImpl" + +const DEFAULT_WRITE_RATE_MS = 10000 + +let CACHE: BaseCache | null = null +async function getCache() { + if (!CACHE) { + const client = await getDocWritethroughClient() + CACHE = new BaseCache(client) + } + return CACHE +} + +interface CacheItem { + lastWrite: number +} + +export class DocWritethrough { + db: Database + docId: string + writeRateMs: number + + constructor( + db: Database, + docId: string, + writeRateMs: number = DEFAULT_WRITE_RATE_MS + ) { + this.db = db + this.docId = docId + this.writeRateMs = writeRateMs + } + + private makeCacheItem(): CacheItem { + return { lastWrite: Date.now() } + } + + async patch(data: Record) { + const cache = await getCache() + + const key = `${this.docId}:info` + const cacheItem = await cache.withCache( + key, + null, + () => this.makeCacheItem(), + { + useTenancy: false, + } + ) + + await this.storeToCache(cache, data) + + const updateDb = + !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs + // let output = this.doc + if (updateDb) { + await this.persistToDb(cache) + } + } + + private async storeToCache(cache: BaseCache, data: Record) { + for (const [key, value] of Object.entries(data)) { + const cacheKey = this.docId + ":data:" + key + await cache.store(cacheKey, { key, value }, undefined) + } + } + + private async persistToDb(cache: BaseCache) { + const key = `${this.db.name}_${this.docId}` + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: key, + ttl: 15000, + }, + async () => { + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } + + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await this.db.put(doc) + } + ) + + if (!lockResponse.executed) { + throw `DocWriteThrough could not be persisted to db for ${key}` + } + } +} diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 0e2b4173b0..221399325d 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -134,6 +134,15 @@ export class DatabaseImpl implements Database { }) } + async docExists(id: string): Promise { + try { + await this.get(id) + return true + } catch { + return false + } + } + async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index aa2ac424ae..92bd55406f 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -38,6 +38,13 @@ export class DDInstrumentedDatabase implements Database { }) } + docExists(id: string): Promise { + return tracer.trace("db.docExists", span => { + span?.addTags({ db_name: this.name, doc_id: id }) + return this.db.docExists(id) + }) + } + getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index 9e44a4827f..4ae0869156 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -122,6 +122,7 @@ export interface Database { exists(): Promise get(id?: string): Promise + docExists(id: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 7d50a70d039c3e8308ac3a04f7a1ad32b4383b7e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:47:18 +0100 Subject: [PATCH 018/174] USe get for doc exists --- packages/backend-core/src/cache/base/index.ts | 2 +- packages/backend-core/src/db/couch/DatabaseImpl.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..23c952c7b2 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -60,7 +60,7 @@ export default class BaseCache { */ async withCache( key: string, - ttl: number, + ttl: number | null = null, fetchFn: any, opts = { useTenancy: true } ) { diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 221399325d..6be53a9c54 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -136,7 +136,7 @@ export class DatabaseImpl implements Database { async docExists(id: string): Promise { try { - await this.get(id) + await this.performCall(db => () => db.head(id)) return true } catch { return false From 3af2da3b7df8f14cda67879f5942dfc7404ad0e8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:17:18 +0100 Subject: [PATCH 019/174] DatabaseImpl.docExists test --- .../src/db/tests/DatabaseImpl.spec.ts | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 packages/backend-core/src/db/tests/DatabaseImpl.spec.ts diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..140ecf4f2c --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("docExists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.docExists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.docExists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.docExists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.docExists(id)).toBe(false) + }) + }) +}) From 40d7a0a7413325104a49dd7e3a880fe2462ed0b4 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:23:32 +0100 Subject: [PATCH 020/174] docWritethrough test --- .../src/cache/tests/docWritethrough.spec.ts | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 packages/backend-core/src/cache/tests/docWritethrough.spec.ts diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..bfb1da5f1c --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,47 @@ +import tk from "timekeeper" +import { env } from "../.." +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" +import { DocWritethrough } from "../docWritethrough" +import _ from "lodash" + +env._set("MOCK_REDIS", null) + +const initialTime = Date.now() + +const WRITE_RATE_MS = 500 + +describe("docWritethrough", () => { + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.word(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(() => { + tk.freeze(initialTime) + documentId = structures.db.id() + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) + + it("patching will not persist until timeout is hit", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + tk.travel(Date.now() + WRITE_RATE_MS - 1) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) + }) +}) From dc4d1fdbda5eb822b1c8a5a14dcc08076ec066df Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:28:35 +0100 Subject: [PATCH 021/174] Add persisting tests --- .../src/cache/tests/docWritethrough.spec.ts | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bfb1da5f1c..ab0de53bee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -33,7 +33,7 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist until timeout is hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) @@ -43,5 +43,42 @@ describe("docWritethrough", () => { expect(await db.docExists(documentId)).toBe(false) }) }) + + it("patching will persist if timeout hits and next patch is called", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + _rev: expect.stringMatching(/1-.+/), + createdAt: new Date(initialTime + 500).toISOString(), + updatedAt: new Date(initialTime + 500).toISOString(), + }) + }) + }) + + it("patching will not persist even if timeout hits but next patch is not callec", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) }) }) From 3ec00524811a2734e1c2601cae728df141b024ff Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:41:26 +0100 Subject: [PATCH 022/174] Add extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 86 ++++++++++++++++--- 1 file changed, 75 insertions(+), 11 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index ab0de53bee..16e47ce3c3 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -7,9 +7,17 @@ import _ from "lodash" env._set("MOCK_REDIS", null) +const WRITE_RATE_MS = 500 + const initialTime = Date.now() -const WRITE_RATE_MS = 500 +function resetTime() { + tk.travel(initialTime) +} +function travelForward(ms: number) { + const updatedTime = Date.now() + ms + tk.travel(updatedTime) +} describe("docWritethrough", () => { const config = new DBTestConfiguration() @@ -28,7 +36,7 @@ describe("docWritethrough", () => { } beforeEach(() => { - tk.freeze(initialTime) + resetTime() documentId = structures.db.id() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -37,7 +45,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS - 1) + travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) expect(await db.docExists(documentId)).toBe(false) @@ -51,7 +59,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -62,23 +70,79 @@ describe("docWritethrough", () => { ...patch2, ...patch3, _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + 500).toISOString(), - updatedAt: new Date(initialTime + 500).toISOString(), + createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), }) }) }) + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + travelForward(WRITE_RATE_MS) + const date1 = new Date() + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + const date2 = new Date() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + it("patching will not persist even if timeout hits but next patch is not callec", async () => { await config.doInTenant(async () => { - const patch1 = generatePatchObject(2) - const patch2 = generatePatchObject(2) - await docWritethrough.patch(patch1) - await docWritethrough.patch(patch2) + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) expect(await db.docExists(documentId)).toBe(false) }) }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const time1 = travelForward(WRITE_RATE_MS) + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + travelForward(WRITE_RATE_MS) + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) }) }) From 720d5a41052179da6c734b5edb10e63b6e6d8436 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:51:42 +0100 Subject: [PATCH 023/174] Test concurrency --- .../backend-core/src/cache/docWritethrough.ts | 12 ++++-- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++++++++++++- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 9e1977f797..13a85a0d84 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -19,9 +19,9 @@ interface CacheItem { } export class DocWritethrough { - db: Database - docId: string - writeRateMs: number + private db: Database + private _docId: string + private writeRateMs: number constructor( db: Database, @@ -29,10 +29,14 @@ export class DocWritethrough { writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { this.db = db - this.docId = docId + this._docId = docId this.writeRateMs = writeRateMs } + get docId() { + return this._docId + } + private makeCacheItem(): CacheItem { return { lastWrite: Date.now() } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 16e47ce3c3..aed87499ee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -41,8 +41,9 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist if timeout from the creation does not hit", async () => { await config.doInTenant(async () => { + travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) travelForward(WRITE_RATE_MS - 1) @@ -116,7 +117,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - const time1 = travelForward(WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -144,5 +145,41 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id(), + WRITE_RATE_MS + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + travelForward(WRITE_RATE_MS) + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) }) }) From 3068e58c31db762fd9abd77c2c3665f8be181645 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 16:48:16 +0100 Subject: [PATCH 024/174] Ensure keys are removed --- .../backend-core/src/cache/docWritethrough.ts | 4 +++ .../src/cache/tests/docWritethrough.spec.ts | 28 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 13a85a0d84..bde93182a9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -96,6 +96,10 @@ export class DocWritethrough { } await this.db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } ) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index aed87499ee..65e9450f62 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -181,5 +181,33 @@ describe("docWritethrough", () => { ) }) }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + travelForward(WRITE_RATE_MS) + + await docWritethrough.patch({}) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + travelForward(WRITE_RATE_MS) + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) }) }) From 6b8f67ed417fd0405ebc8d71bc3c62639beb67fb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 17:01:16 +0100 Subject: [PATCH 025/174] Extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 65e9450f62..974494d1c9 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -77,6 +77,35 @@ describe("docWritethrough", () => { }) }) + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + travelForward(WRITE_RATE_MS) + + const patch4 = generatePatchObject(3) + await docWritethrough.patch(patch4) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + ...patch4, + }) + ) + }) + }) + it("date audit fields are set correctly when persisting", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) From 66751728bbc7ba32ed98b4b53afe8fad909cf72e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 10:53:18 +0100 Subject: [PATCH 026/174] Fixes and tests --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++---------- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++- 2 files changed, 82 insertions(+), 47 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index bde93182a9..80063e4772 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,8 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private docInfoCacheKey: string + constructor( db: Database, docId: string, @@ -31,6 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs + this.docInfoCacheKey = `${this.docId}:info` } get docId() { @@ -44,26 +47,39 @@ export class DocWritethrough { async patch(data: Record) { const cache = await getCache() - const key = `${this.docId}:info` - const cacheItem = await cache.withCache( - key, - null, - () => this.makeCacheItem(), - { - useTenancy: false, - } - ) - await this.storeToCache(cache, data) - const updateDb = - !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs - // let output = this.doc + const updateDb = await this.shouldUpdateDb(cache) + if (updateDb) { - await this.persistToDb(cache) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: this.docInfoCacheKey, + ttl: 15000, + }, + async () => { + if (await this.shouldUpdateDb(cache)) { + await this.persistToDb(cache) + await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } } } + private async shouldUpdateDb(cache: BaseCache) { + const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => + this.makeCacheItem() + ) + return cacheItem.lastWrite <= Date.now() - this.writeRateMs + } + private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { const cacheKey = this.docId + ":data:" + key @@ -72,39 +88,23 @@ export class DocWritethrough { } private async persistToDb(cache: BaseCache) { - const key = `${this.db.name}_${this.docId}` + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: key, - ttl: 15000, - }, - async () => { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } + await this.db.put(doc) - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } - ) - - if (!lockResponse.executed) { - throw `DocWriteThrough could not be persisted to db for ${key}` + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) } } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 974494d1c9..bca781e377 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,10 @@ import tk from "timekeeper" -import { env } from "../.." + import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -env._set("MOCK_REDIS", null) - const WRITE_RATE_MS = 500 const initialTime = Date.now() @@ -238,5 +236,42 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent calls will not cause multiple saves", async () => { + async function parallelPatch(count: number) { + await Promise.all( + Array.from({ length: count }).map(() => + docWritethrough.patch(generatePatchObject(1)) + ) + ) + } + + const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") + const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + + await config.doInTenant(async () => { + await parallelPatch(5) + expect(persistToDbSpy).not.toBeCalled() + expect(storeToCacheSpy).toBeCalledTimes(5) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(40) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(45) + + await parallelPatch(10) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(55) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(5) + expect(persistToDbSpy).toBeCalledTimes(2) + expect(storeToCacheSpy).toBeCalledTimes(60) + }) + }) }) }) From 2b7c988823384b60201a3122b1081a55200b2157 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:30 +0100 Subject: [PATCH 027/174] Making code more readable --- .../backend-core/src/cache/docWritethrough.ts | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 80063e4772..5148950c1d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -15,7 +15,7 @@ async function getCache() { } interface CacheItem { - lastWrite: number + nextWrite: number } export class DocWritethrough { @@ -40,8 +40,8 @@ export class DocWritethrough { return this._docId } - private makeCacheItem(): CacheItem { - return { lastWrite: Date.now() } + private makeNextWriteInfoItem(): CacheItem { + return { nextWrite: Date.now() + this.writeRateMs } } async patch(data: Record) { @@ -62,7 +62,10 @@ export class DocWritethrough { async () => { if (await this.shouldUpdateDb(cache)) { await this.persistToDb(cache) - await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + await cache.store( + this.docInfoCacheKey, + this.makeNextWriteInfoItem() + ) } } ) @@ -75,9 +78,9 @@ export class DocWritethrough { private async shouldUpdateDb(cache: BaseCache) { const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeCacheItem() + this.makeNextWriteInfoItem() ) - return cacheItem.lastWrite <= Date.now() - this.writeRateMs + return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { From ff7c784342ba79f994a15500984ab6668efef635 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:55 +0100 Subject: [PATCH 028/174] Type caches --- packages/backend-core/src/cache/base/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 23c952c7b2..911bd6a831 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -58,12 +58,12 @@ export default class BaseCache { /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, ttl: number | null = null, - fetchFn: any, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue From 1c171215680e99a07848168f9579557f3988bd15 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:12:31 +0100 Subject: [PATCH 029/174] Fix types --- packages/backend-core/src/cache/generic.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) From 3a341338a197a13f76993fbc372baf68566cefe9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:07:27 +0100 Subject: [PATCH 030/174] Log requests --- packages/backend-core/src/cache/index.ts | 1 + packages/pro | 2 +- packages/types/src/documents/document.ts | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/index.ts b/packages/backend-core/src/cache/index.ts index 4fa986e4e2..3b25108634 100644 --- a/packages/backend-core/src/cache/index.ts +++ b/packages/backend-core/src/cache/index.ts @@ -5,3 +5,4 @@ export * as writethrough from "./writethrough" export * as invite from "./invite" export * as passwordReset from "./passwordReset" export * from "./generic" +export * as docWritethrough from "./docWritethrough" diff --git a/packages/pro b/packages/pro index 183b35d3ac..c83fbd01f5 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec +Subproject commit c83fbd01f50872eedb772fba9a90d79650403126 diff --git a/packages/types/src/documents/document.ts b/packages/types/src/documents/document.ts index 18feb9b518..0de4337f4b 100644 --- a/packages/types/src/documents/document.ts +++ b/packages/types/src/documents/document.ts @@ -38,6 +38,7 @@ export enum DocumentType { AUTOMATION_METADATA = "meta_au", AUDIT_LOG = "al", APP_MIGRATION_METADATA = "_design/migrations", + SCIM_LOG = "scimlog", } // these are the core documents that make up the data, design From 4e53cb5143de0c7cc8a947889faf558dfa7c40c7 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:09:42 +0100 Subject: [PATCH 031/174] Flags --- packages/backend-core/src/environment.ts | 1 + packages/pro | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index b3179cbeea..2da2a77d67 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -186,6 +186,7 @@ const environment = { environment[key] = value }, ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M", + DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS, } // clean up any environment variable edge cases diff --git a/packages/pro b/packages/pro index c83fbd01f5..35c46cc6c5 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit c83fbd01f50872eedb772fba9a90d79650403126 +Subproject commit 35c46cc6c5f4a6d6f874ec1b51a042cb28d237da From 1c701fa81ed8f58d5f5e8db5c4cf5e08e420a899 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:30:43 +0100 Subject: [PATCH 032/174] Log responses --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 35c46cc6c5..4f8998c4be 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 35c46cc6c5f4a6d6f874ec1b51a042cb28d237da +Subproject commit 4f8998c4be4642a0fe55011514462235edbac7b8 From 93e462b8c769881150026b6c6e9e7048daa9a8e9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:38:48 +0100 Subject: [PATCH 033/174] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5148950c1d..e46c763906 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -33,7 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.docId}:info` + this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` } get docId() { From 2da5cb3ddbf0f4844bb259d83960243c1612a2eb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:41:40 +0100 Subject: [PATCH 034/174] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e46c763906..e367c9e060 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,7 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private cacheKeyPrefix: string private docInfoCacheKey: string constructor( @@ -33,7 +34,8 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` + this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { @@ -85,7 +87,7 @@ export class DocWritethrough { private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { - const cacheKey = this.docId + ":data:" + key + const cacheKey = this.cacheKeyPrefix + ":data:" + key await cache.store(cacheKey, { key, value }, undefined) } } @@ -98,7 +100,7 @@ export class DocWritethrough { doc = { _id: this.docId } } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) + const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value From 4ff2b36553c8b76c9fed6b37989e91ca8618fb34 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:59:51 +0100 Subject: [PATCH 035/174] Use overloads --- .../src/cache/tests/docWritethrough.spec.ts | 6 ++-- .../backend-core/src/db/couch/DatabaseImpl.ts | 28 ++++++++++++------- .../backend-core/src/db/instrumentation.ts | 14 ++++------ .../src/db/tests/DatabaseImpl.spec.ts | 16 +++++------ packages/types/src/sdk/db.ts | 2 +- 5 files changed, 35 insertions(+), 31 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bca781e377..4c4a4b2b60 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -35,7 +35,7 @@ describe("docWritethrough", () => { beforeEach(() => { resetTime() - documentId = structures.db.id() + documentId = structures.uuid() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -47,7 +47,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) @@ -136,7 +136,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 9d198e4307..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -70,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -79,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } @@ -135,15 +152,6 @@ export class DatabaseImpl implements Database { }) } - async docExists(id: string): Promise { - try { - await this.performCall(db => () => db.head(id)) - return true - } catch { - return false - } - } - async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 87af0e3127..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } @@ -38,13 +41,6 @@ export class DDInstrumentedDatabase implements Database { }) } - docExists(id: string): Promise { - return tracer.trace("db.docExists", span => { - span?.addTags({ db_name: this.name, doc_id: id }) - return this.db.docExists(id) - }) - } - getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts index 140ecf4f2c..586f13f417 100644 --- a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -17,16 +17,16 @@ describe("DatabaseImpl", () => { documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) }) - describe("docExists", () => { + describe("document exists", () => { it("can check existing docs by id", async () => { const existingDoc = _.sample(documents) - const result = await database.docExists(existingDoc!._id!) + const result = await database.exists(existingDoc!._id!) expect(result).toBe(true) }) it("can check non existing docs by id", async () => { - const result = await database.docExists(newid()) + const result = await database.exists(newid()) expect(result).toBe(false) }) @@ -36,9 +36,9 @@ describe("DatabaseImpl", () => { const id = existingDoc!._id! const results = [] - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) expect(results).toEqual([true, true, true]) }) @@ -46,10 +46,10 @@ describe("DatabaseImpl", () => { it("returns false after the doc is deleted", async () => { const existingDoc = _.sample(documents) const id = existingDoc!._id! - expect(await database.docExists(id)).toBe(true) + expect(await database.exists(id)).toBe(true) await database.remove(existingDoc!) - expect(await database.docExists(id)).toBe(false) + expect(await database.exists(id)).toBe(false) }) }) }) diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index dafc9ced57..4d103d5be6 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,7 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise - docExists(id: string): Promise + exists(docId: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From e3c514e45aee8398c4b65b530c86536f7169dafd Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Mon, 4 Mar 2024 09:48:47 +0000 Subject: [PATCH 036/174] Update test lucene builder and add more tests --- packages/shared-core/src/filters.ts | 57 ++++---- .../shared-core/src/tests/filters.test.ts | 128 +++++++++++++----- 2 files changed, 116 insertions(+), 69 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 2c4861ed60..5f975ff541 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -391,43 +391,32 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { ) const docMatch = (doc: any) => { - // Determine active filters based on query object - const activeFilterKeys = Object.entries(query || {}) + const filterFunctions = { + string: stringMatch, + fuzzy: fuzzyMatch, + range: rangeMatch, + equal: equalMatch, + notEqual: notEqualMatch, + empty: emptyMatch, + notEmpty: notEmptyMatch, + oneOf: oneOf, + contains: contains, + containsAny: containsAny, + notContains: notContains, + } + const activeFilterKeys: (keyof typeof filterFunctions)[] = Object.entries( + query + ) .filter( - ([key, value]) => + ([key, value]: [string, any]) => !["allOr", "onEmptyFilter"].includes(key) && - Object.keys(value).length > 0 + Object.keys(value as Record).length > 0 ) - .map(([key]) => key) + .map(([key]) => key as keyof typeof filterFunctions) - // Apply filters dynamically based on activeFilterKeys - const results = activeFilterKeys.map(filterKey => { - switch (filterKey) { - case "string": - return stringMatch(doc) - case "fuzzy": - return fuzzyMatch(doc) - case "range": - return rangeMatch(doc) - case "equal": - return equalMatch(doc) - case "notEqual": - return notEqualMatch(doc) - case "empty": - return emptyMatch(doc) - case "notEmpty": - return notEmptyMatch(doc) - case "oneOf": - return oneOf(doc) - case "contains": - return contains(doc) - case "containsAny": - return containsAny(doc) - case "notContains": - return notContains(doc) - default: - return true // If the filter type is not recognized, default to true (assuming pass) - } + const results: boolean[] = activeFilterKeys.map(filterKey => { + const filterFunction = filterFunctions[filterKey] + return filterFunction ? filterFunction(doc) : true }) if (query!.allOr) { @@ -436,7 +425,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { return results.every(result => result === true) } } - // Process all docs + return docs.filter(docMatch) } diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index 8586d58777..1e0a68de89 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -47,10 +47,7 @@ describe("runLuceneQuery", () => { }, ] - function buildQuery( - filterKey: string, - value: { [key: string]: any } - ): SearchQuery { + function buildQuery(filters: { [filterKey: string]: any }): SearchQuery { const query: SearchQuery = { string: {}, fuzzy: {}, @@ -63,8 +60,13 @@ describe("runLuceneQuery", () => { notContains: {}, oneOf: {}, containsAny: {}, + allOr: false, } - query[filterKey as SearchQueryOperators] = value + + for (const filterKey in filters) { + query[filterKey as SearchQueryOperators] = filters[filterKey] + } + return query } @@ -73,16 +75,17 @@ describe("runLuceneQuery", () => { }) it("should return matching rows for equal filter", () => { - const query = buildQuery("equal", { - order_status: 4, + const query = buildQuery({ + equal: { order_status: 4 }, }) expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) }) it("should return matching row for notEqual filter", () => { - const query = buildQuery("notEqual", { - order_status: 4, + const query = buildQuery({ + notEqual: { order_status: 4 }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) }) @@ -90,48 +93,56 @@ describe("runLuceneQuery", () => { expect( runLuceneQuery( docs, - buildQuery("fuzzy", { - description: "sm", + buildQuery({ + fuzzy: { description: "sm" }, }) ).map(row => row.description) ).toEqual(["Small box"]) expect( runLuceneQuery( docs, - buildQuery("string", { - description: "SM", + buildQuery({ + string: { description: "SM" }, }) ).map(row => row.description) ).toEqual(["Small box"]) }) it("should return rows within a range filter", () => { - const query = buildQuery("range", { - customer_id: { - low: 500, - high: 1000, + const query = buildQuery({ + range: { + customer_id: { + low: 500, + high: 1000, + }, }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) }) it("should return rows with numeric strings within a range filter", () => { - const query = buildQuery("range", { - customer_id: { - low: "500", - high: "1000", + const query = buildQuery({ + range: { + customer_id: { + low: "500", + high: "1000", + }, }, }) expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) }) it("should return rows with ISO date strings within a range filter", () => { - const query = buildQuery("range", { - order_date: { - low: "2016-01-04T00:00:00.000Z", - high: "2016-01-11T00:00:00.000Z", + const query = buildQuery({ + range: { + order_date: { + low: "2016-01-04T00:00:00.000Z", + high: "2016-01-11T00:00:00.000Z", + }, }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2]) }) @@ -150,40 +161,87 @@ describe("runLuceneQuery", () => { label: "", }, ] - const query = buildQuery("range", { - order_date: { - low: "2016-01-04T00:00:00.000Z", - high: "2016-01-11T00:00:00.000Z", + + const query = buildQuery({ + range: { + order_date: { + low: "2016-01-04T00:00:00.000Z", + high: "2016-01-11T00:00:00.000Z", + }, }, }) + expect(runLuceneQuery(docs, query)).toEqual(docs) }) it("should return rows with matches on empty filter", () => { - const query = buildQuery("empty", { - label: null, + const query = buildQuery({ + empty: { + label: null, + }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1]) }) it("should return rows with matches on notEmpty filter", () => { - const query = buildQuery("notEmpty", { - label: null, + const query = buildQuery({ + notEmpty: { + label: null, + }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2, 3]) }) test.each([[523, 259], "523,259"])( "should return rows with matches on numeric oneOf filter", input => { - let query = buildQuery("oneOf", { - customer_id: input, + const query = buildQuery({ + oneOf: { + customer_id: input, + }, }) + expect(runLuceneQuery(docs, query).map(row => row.customer_id)).toEqual([ 259, 523, ]) } ) + + it("should return matching results if allOr is true and only one filter matches", () => { + const query = buildQuery({ + allOr: true, + oneOf: { staff_id: [10] }, + contains: { description: ["box"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([ + 1, 2, 3, + ]) + }) + + // what should the name of this test be if it's the same test as above but with different operands + + it("should return matching results if allOr is true and only one filter matches with different operands", () => { + const query = buildQuery({ + allOr: true, + equal: { order_status: 4 }, + oneOf: { label: ["FRAGILE"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) + }) + + it("should return nothing if allOr is false and only one filter matches", () => { + const query = buildQuery({ + allOr: false, + oneOf: { staff_id: [10] }, + contains: { description: ["box"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([]) + }) }) describe("buildLuceneQuery", () => { From 5679acb86811c290dd84faf1d81b19d615680f6b Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Mon, 4 Mar 2024 09:55:28 +0000 Subject: [PATCH 037/174] fix types --- packages/shared-core/src/filters.ts | 34 ++++++++++++++--------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 5f975ff541..6d81bbdc62 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -391,28 +391,28 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { ) const docMatch = (doc: any) => { - const filterFunctions = { - string: stringMatch, - fuzzy: fuzzyMatch, - range: rangeMatch, - equal: equalMatch, - notEqual: notEqualMatch, - empty: emptyMatch, - notEmpty: notEmptyMatch, - oneOf: oneOf, - contains: contains, - containsAny: containsAny, - notContains: notContains, - } - const activeFilterKeys: (keyof typeof filterFunctions)[] = Object.entries( - query - ) + const filterFunctions: Record boolean> = + { + string: stringMatch, + fuzzy: fuzzyMatch, + range: rangeMatch, + equal: equalMatch, + notEqual: notEqualMatch, + empty: emptyMatch, + notEmpty: notEmptyMatch, + oneOf: oneOf, + contains: contains, + containsAny: containsAny, + notContains: notContains, + } + + const activeFilterKeys: SearchQueryOperators[] = Object.entries(query) .filter( ([key, value]: [string, any]) => !["allOr", "onEmptyFilter"].includes(key) && Object.keys(value as Record).length > 0 ) - .map(([key]) => key as keyof typeof filterFunctions) + .map(([key]) => key as any) const results: boolean[] = activeFilterKeys.map(filterKey => { const filterFunction = filterFunctions[filterKey] From 3d9a7e5ddf5f76236a304e65239c899e8e865cd7 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Mon, 4 Mar 2024 10:07:06 +0000 Subject: [PATCH 038/174] fix type --- packages/shared-core/src/filters.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 6d81bbdc62..0a1673e558 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -406,7 +406,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { notContains: notContains, } - const activeFilterKeys: SearchQueryOperators[] = Object.entries(query) + const activeFilterKeys: SearchQueryOperators[] = Object.entries(query || {}) .filter( ([key, value]: [string, any]) => !["allOr", "onEmptyFilter"].includes(key) && From 824dd1c1fc601bf890e9390f63b4943303518d15 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:38:45 +0100 Subject: [PATCH 039/174] Type inMemoryQueue --- .../backend-core/src/queue/inMemoryQueue.ts | 36 ++++++++++--------- packages/backend-core/src/queue/queue.ts | 2 ++ 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..3205b6f383 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,6 @@ import events from "events" import { timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -24,9 +25,9 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any + _opts?: QueueOptions _messages: any[] _emitter: EventEmitter _runCount: number @@ -37,7 +38,7 @@ class InMemoryQueue { * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] @@ -55,8 +56,12 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { + const delay = this._opts?.defaultJobOptions?.delay + if (delay) { + await new Promise(r => setTimeout(() => r(), delay)) + } if (this._messages.length <= 0) { return } @@ -70,7 +75,7 @@ class InMemoryQueue { } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +88,26 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) + this._messages.push(newJob(this._name, data)) this._addCount++ this._emitter.emit("message") + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +115,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + async removeJobs(pattern: string) { // no-op } @@ -128,12 +132,12 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds From ae85c832483d7533ee141803fc0336a730846dc1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:43:47 +0100 Subject: [PATCH 040/174] Clean --- packages/worker/src/initPro.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/worker/src/initPro.ts b/packages/worker/src/initPro.ts index ddc8d2562a..b34d514992 100644 --- a/packages/worker/src/initPro.ts +++ b/packages/worker/src/initPro.ts @@ -1,5 +1,4 @@ import { sdk as proSdk } from "@budibase/pro" -import * as userSdk from "./sdk/users" export const initPro = async () => { await proSdk.init({}) From 91468d2569e8c1828c4e107750cab8bcc81f016f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:18:01 +0100 Subject: [PATCH 041/174] Add doc-writethrough queue --- packages/backend-core/src/queue/constants.ts | 1 + packages/backend-core/src/queue/listeners.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..14dce5fe8d 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { From 2d84bc5da2b5a3eada63eba3866b04324a519afb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:34:05 +0100 Subject: [PATCH 042/174] Use bull --- .../backend-core/src/cache/docWritethrough.ts | 123 +++++++++--------- 1 file changed, 64 insertions(+), 59 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e367c9e060..38a162435d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,6 +3,9 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" +import { JobQueue, createQueue } from "../queue" +import { context, db as dbUtils } from ".." + const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null @@ -14,17 +17,63 @@ async function getCache() { return CACHE } -interface CacheItem { - nextWrite: number +interface ProcessDocMessage { + tenantId: string + dbName: string + docId: string + cacheKeyPrefix: string } +export const docWritethroughProcessorQueue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE +) + +docWritethroughProcessorQueue.process(async message => { + const { dbName, tenantId, docId, cacheKeyPrefix } = message.data + const cache = await getCache() + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } + }) +}) + export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number private cacheKeyPrefix: string - private docInfoCacheKey: string constructor( db: Database, @@ -35,54 +84,31 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { return this._docId } - private makeNextWriteInfoItem(): CacheItem { - return { nextWrite: Date.now() + this.writeRateMs } - } - async patch(data: Record) { const cache = await getCache() await this.storeToCache(cache, data) - const updateDb = await this.shouldUpdateDb(cache) - - if (updateDb) { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: this.docInfoCacheKey, - ttl: 15000, - }, - async () => { - if (await this.shouldUpdateDb(cache)) { - await this.persistToDb(cache) - await cache.store( - this.docInfoCacheKey, - this.makeNextWriteInfoItem() - ) - } - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + docWritethroughProcessorQueue.add( + { + tenantId: context.getTenantId(), + dbName: this.db.name, + docId: this.docId, + cacheKeyPrefix: this.cacheKeyPrefix, + }, + { + delay: this.writeRateMs - 1, + jobId: this.cacheKeyPrefix, + removeOnFail: true, + removeOnComplete: true, } - } - } - - private async shouldUpdateDb(cache: BaseCache) { - const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeNextWriteInfoItem() ) - return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { @@ -91,25 +117,4 @@ export class DocWritethrough { await cache.store(cacheKey, { key, value }, undefined) } } - - private async persistToDb(cache: BaseCache) { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } - - const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } } From 13563d18dca87872dad9294c61c8018158fa191d Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 09:20:20 +0000 Subject: [PATCH 043/174] Write a failing test. --- .../src/api/routes/tests/application.spec.ts | 46 ++++++++++++++++++- .../server/src/tests/utilities/api/index.ts | 3 ++ .../server/src/tests/utilities/api/role.ts | 41 +++++++++++++++++ 3 files changed, 89 insertions(+), 1 deletion(-) create mode 100644 packages/server/src/tests/utilities/api/role.ts diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index 5a3be462e8..b452e8742f 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -16,7 +16,13 @@ import * as setup from "./utilities" import { AppStatus } from "../../../db/utils" import { events, utils, context } from "@budibase/backend-core" import env from "../../../environment" -import type { App } from "@budibase/types" +import { + PermissionLevel, + type App, + INTERNAL_TABLE_SOURCE_ID, + TableSourceType, + FieldType, +} from "@budibase/types" import tk from "timekeeper" describe("/applications", () => { @@ -256,10 +262,48 @@ describe("/applications", () => { admin: { global: false }, }) + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + }, + }) + await config.withUser(user, async () => { const apps = await config.api.application.fetch() expect(apps).toHaveLength(0) }) + + const role = await config.api.roles.save({ + name: "Test", + inherits: "PUBLIC", + permissionId: "read_only", + version: "name", + }) + + await config.api.user.update({ + ...user, + roles: { + [config.getAppId()]: role._id!, + }, + }) + + await config.api.permission.add({ + resourceId: table._id!, + roleId: role._id!, + level: PermissionLevel.READ, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(1) + }) }) }) }) diff --git a/packages/server/src/tests/utilities/api/index.ts b/packages/server/src/tests/utilities/api/index.ts index fdcec3098d..d66acd86fd 100644 --- a/packages/server/src/tests/utilities/api/index.ts +++ b/packages/server/src/tests/utilities/api/index.ts @@ -11,6 +11,7 @@ import { BackupAPI } from "./backup" import { AttachmentAPI } from "./attachment" import { UserAPI } from "./user" import { QueryAPI } from "./query" +import { RoleAPI } from "./role" export default class API { table: TableAPI @@ -25,6 +26,7 @@ export default class API { attachment: AttachmentAPI user: UserAPI query: QueryAPI + roles: RoleAPI constructor(config: TestConfiguration) { this.table = new TableAPI(config) @@ -39,5 +41,6 @@ export default class API { this.attachment = new AttachmentAPI(config) this.user = new UserAPI(config) this.query = new QueryAPI(config) + this.roles = new RoleAPI(config) } } diff --git a/packages/server/src/tests/utilities/api/role.ts b/packages/server/src/tests/utilities/api/role.ts new file mode 100644 index 0000000000..4defbc1220 --- /dev/null +++ b/packages/server/src/tests/utilities/api/role.ts @@ -0,0 +1,41 @@ +import { + AccessibleRolesResponse, + FetchRolesResponse, + FindRoleResponse, + SaveRoleRequest, + SaveRoleResponse, +} from "@budibase/types" +import { Expectations, TestAPI } from "./base" + +export class RoleAPI extends TestAPI { + fetch = async (expectations?: Expectations) => { + return await this._get(`/api/roles`, { + expectations, + }) + } + + find = async (roleId: string, expectations?: Expectations) => { + return await this._get(`/api/roles/${roleId}`, { + expectations, + }) + } + + save = async (body: SaveRoleRequest, expectations?: Expectations) => { + return await this._post(`/api/roles`, { + body, + expectations, + }) + } + + destroy = async (roleId: string, expectations?: Expectations) => { + return await this._delete(`/api/roles/${roleId}`, { + expectations, + }) + } + + accesssible = async (expectations?: Expectations) => { + return await this._get(`/api/roles/accessible`, { + expectations, + }) + } +} From aa124524d4bc93e228c5ada844fccb541cb55e6e Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 10:05:05 +0000 Subject: [PATCH 044/174] Add a simpler test. --- packages/backend-core/src/cache/user.ts | 4 +- packages/server/src/api/controllers/user.ts | 3 +- .../src/api/routes/tests/application.spec.ts | 39 ++++++++++++++++++- .../src/tests/utilities/TestConfiguration.ts | 4 +- 4 files changed, 44 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/user.ts b/packages/backend-core/src/cache/user.ts index 313b9a4d4a..ecfa20f99e 100644 --- a/packages/backend-core/src/cache/user.ts +++ b/packages/backend-core/src/cache/user.ts @@ -6,7 +6,7 @@ import env from "../environment" import * as accounts from "../accounts" import { UserDB } from "../users" import { sdk } from "@budibase/shared-core" -import { User } from "@budibase/types" +import { User, UserMetadata } from "@budibase/types" const EXPIRY_SECONDS = 3600 @@ -15,7 +15,7 @@ const EXPIRY_SECONDS = 3600 */ async function populateFromDB(userId: string, tenantId: string) { const db = tenancy.getTenantDB(tenantId) - const user = await db.get(userId) + const user = await db.get(userId) user.budibaseAccess = true if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { const account = await accounts.getAccount(user.email) diff --git a/packages/server/src/api/controllers/user.ts b/packages/server/src/api/controllers/user.ts index 108e29fd3d..d1658f9820 100644 --- a/packages/server/src/api/controllers/user.ts +++ b/packages/server/src/api/controllers/user.ts @@ -1,6 +1,6 @@ import { generateUserFlagID, InternalTables } from "../../db/utils" import { getFullUser } from "../../utilities/users" -import { context } from "@budibase/backend-core" +import { cache, context } from "@budibase/backend-core" import { ContextUserMetadata, Ctx, @@ -42,6 +42,7 @@ export async function updateMetadata( // this isn't applicable to the user delete metadata.roles ctx.body = await db.put(metadata) + await cache.user.invalidateUser(user._id!) } export async function destroyMetadata(ctx: UserCtx) { diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index b452e8742f..7424511200 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -256,7 +256,44 @@ describe("/applications", () => { }) describe("permissions", () => { - it("should only return apps a user has access to", async () => { + it.only("should only return apps a user has access to", async () => { + const user = await config.createUser({ + builder: { global: false }, + admin: { global: false }, + }) + + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(0) + }) + + await config.api.user.update({ + ...user, + builder: { + [config.getAppId()]: true, + }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(1) + }) + }) + + it("should only return apps a user has access to through a custom role on a group", async () => { const user = await config.createUser({ builder: { global: false }, admin: { global: false }, diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 2127e9d1cd..32af88836e 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -299,11 +299,11 @@ export default class TestConfiguration { } } - withUser(user: User, f: () => Promise) { + async withUser(user: User, f: () => Promise) { const oldUser = this.user this.user = user try { - return f() + return await f() } finally { this.user = oldUser } From e648503e4f31045b0b68e4baed76003adb6d5496 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:50:58 +0100 Subject: [PATCH 045/174] Tests --- .../backend-core/src/cache/docWritethrough.ts | 99 +++++++++------ .../src/cache/tests/docWritethrough.spec.ts | 120 ++++++++++-------- .../backend-core/src/queue/inMemoryQueue.ts | 76 ++++++++--- 3 files changed, 186 insertions(+), 109 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 38a162435d..f53cfbfe5f 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -4,7 +4,8 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" import { JobQueue, createQueue } from "../queue" -import { context, db as dbUtils } from ".." +import * as context from "../context" +import * as dbUtils from "../db" const DEFAULT_WRITE_RATE_MS = 10000 @@ -28,50 +29,71 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -docWritethroughProcessorQueue.process(async message => { - const { dbName, tenantId, docId, cacheKeyPrefix } = message.data - const cache = await getCache() - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } +let _init = false +export const init = () => { + if (_init) { + return + } + docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) } + ) - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) - } + }) }) -}) + _init = true +} + +export async function persistToDb({ + dbName, + docId, + cacheKeyPrefix, +}: { + dbName: string + docId: string + cacheKeyPrefix: string +}) { + const cache = await getCache() + + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } +} export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number + private tenantId: string private cacheKeyPrefix: string @@ -84,6 +106,7 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.tenantId = context.getTenantId() } get docId() { @@ -97,13 +120,13 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: context.getTenantId(), + tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, }, { - delay: this.writeRateMs - 1, + delay: this.writeRateMs, jobId: this.cacheKeyPrefix, removeOnFail: true, removeOnComplete: true, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 4c4a4b2b60..83af66a9d2 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,20 +1,32 @@ -import tk from "timekeeper" - import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -const WRITE_RATE_MS = 500 +import { + DocWritethrough, + docWritethroughProcessorQueue, + init, +} from "../docWritethrough" +import InMemoryQueue from "../../queue/inMemoryQueue" + +const WRITE_RATE_MS = 1000 const initialTime = Date.now() +jest.useFakeTimers({ + now: initialTime, +}) + function resetTime() { - tk.travel(initialTime) + jest.setSystemTime(initialTime) } -function travelForward(ms: number) { - const updatedTime = Date.now() + ms - tk.travel(updatedTime) +async function travelForward(ms: number) { + await jest.advanceTimersByTimeAsync(ms) + + const queue: InMemoryQueue = docWritethroughProcessorQueue as never + while (queue.hasRunningJobs()) { + await jest.runOnlyPendingTimersAsync() + } } describe("docWritethrough", () => { @@ -33,33 +45,37 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeEach(() => { + beforeAll(() => init()) + + beforeEach(async () => { resetTime() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + await config.doInTenant(async () => { + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) }) - it("patching will not persist if timeout from the creation does not hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - travelForward(WRITE_RATE_MS - 1) - await docWritethrough.patch(generatePatchObject(2)) + await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits and next patch is called", async () => { + it("patching will persist if timeout hits", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) + // This will not be persisted const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -67,7 +83,6 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - ...patch3, _rev: expect.stringMatching(/1-.+/), createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), @@ -82,15 +97,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - travelForward(WRITE_RATE_MS) - - const patch4 = generatePatchObject(3) - await docWritethrough.patch(patch4) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -98,7 +110,6 @@ describe("docWritethrough", () => { ...patch1, ...patch2, ...patch3, - ...patch4, }) ) }) @@ -109,16 +120,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date1 = new Date() await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date2 = new Date() - const patch3 = generatePatchObject(3) - await docWritethrough.patch(patch3) - expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -129,22 +137,11 @@ describe("docWritethrough", () => { }) }) - it("patching will not persist even if timeout hits but next patch is not callec", async () => { - await config.doInTenant(async () => { - await docWritethrough.patch(generatePatchObject(2)) - await docWritethrough.patch(generatePatchObject(2)) - - travelForward(WRITE_RATE_MS) - - expect(await db.exists(documentId)).toBe(false) - }) - }) - it("concurrent patches will override keys", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -155,13 +152,14 @@ describe("docWritethrough", () => { }) ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -173,7 +171,7 @@ describe("docWritethrough", () => { }) }) - it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + it("concurrent patches to different docWritethrough will not pollute each other", async () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, @@ -186,12 +184,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) + await travelForward(WRITE_RATE_MS) expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -214,7 +213,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch({}) @@ -224,9 +223,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -246,30 +246,46 @@ describe("docWritethrough", () => { ) } - const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) - expect(persistToDbSpy).not.toBeCalled() expect(storeToCacheSpy).toBeCalledTimes(5) + expect(await db.exists(documentId)).toBe(false) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(40) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(45) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) + await parallelPatch(10) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(55) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(5) - expect(persistToDbSpy).toBeCalledTimes(2) + await travelForward(WRITE_RATE_MS) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/3-.+/), + }) + ) expect(storeToCacheSpy).toBeCalledTimes(60) }) }) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 3205b6f383..f201714903 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -2,6 +2,13 @@ import events from "events" import { timeout } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" +interface JobMessage { + timestamp: number + queue: string + data: any + opts?: JobOptions +} + /** * Bull works with a Job wrapper around all messages that contains a lot more information about * the state of the message, this object constructor implements the same schema of Bull jobs @@ -11,12 +18,12 @@ import { Queue, QueueOptions, JobOptions } from "./queue" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -28,10 +35,12 @@ function newJob(queue: string, message: any) { class InMemoryQueue implements Partial { _name: string _opts?: QueueOptions - _messages: any[] + _messages: JobMessage[] + _queuedJobIds: Set _emitter: EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. @@ -45,6 +54,7 @@ class InMemoryQueue implements Partial { this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -58,19 +68,24 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - const delay = this._opts?.defaultJobOptions?.delay - if (delay) { - await new Promise(r => setTimeout(() => r(), delay)) + try { + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() + + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } + } catch (e: any) { + throw e } - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ }) } @@ -89,12 +104,31 @@ class InMemoryQueue implements Partial { */ // eslint-disable-next-line no-unused-vars async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, data)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } return {} as any } @@ -143,7 +177,11 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } From caf142f1db37b4454c03427468d29a7c915de255 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:55:07 +0100 Subject: [PATCH 046/174] Clean --- .../backend-core/src/queue/inMemoryQueue.ts | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index f201714903..6c8107c7a4 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -68,23 +68,19 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - try { - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ - const jobId = msg?.opts?.jobId?.toString() - if (jobId && msg?.opts?.removeOnComplete) { - this._queuedJobIds.delete(jobId) - } - } catch (e: any) { - throw e + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) } }) } From 6b86633c650ea637c1387351c41b2ba52d84236e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:01:41 +0100 Subject: [PATCH 047/174] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 217e0a93a1..f5134a01fc 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 217e0a93a12f6ed56f122729366a3068c6bd957e +Subproject commit f5134a01fc122be2535c6b17e47d956c145fb186 From dfdbc7b22bbb70c3f694cd7f88ff0718b92878c8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:05:39 +0100 Subject: [PATCH 048/174] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index f5134a01fc..54411929db 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit f5134a01fc122be2535c6b17e47d956c145fb186 +Subproject commit 54411929db75f6bc4335491e86871c889fe3a98a From ca0f583399d9786ab25374a4ab42cc6a0861f27d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:19:05 +0100 Subject: [PATCH 049/174] Remove defaults and init --- .../backend-core/src/cache/docWritethrough.ts | 52 +++++++------------ .../src/cache/tests/docWritethrough.spec.ts | 3 -- 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index f53cfbfe5f..1a16f60eb9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -7,8 +7,6 @@ import { JobQueue, createQueue } from "../queue" import * as context from "../context" import * as dbUtils from "../db" -const DEFAULT_WRITE_RATE_MS = 10000 - let CACHE: BaseCache | null = null async function getCache() { if (!CACHE) { @@ -29,33 +27,27 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -let _init = false -export const init = () => { - if (_init) { - return - } - docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) +docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) } - }) + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) - _init = true -} +}) export async function persistToDb({ dbName, @@ -97,11 +89,7 @@ export class DocWritethrough { private cacheKeyPrefix: string - constructor( - db: Database, - docId: string, - writeRateMs: number = DEFAULT_WRITE_RATE_MS - ) { + constructor(db: Database, docId: string, writeRateMs: number) { this.db = db this._docId = docId this.writeRateMs = writeRateMs diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 83af66a9d2..a5765171cb 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -5,7 +5,6 @@ import _ from "lodash" import { DocWritethrough, docWritethroughProcessorQueue, - init, } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" @@ -45,8 +44,6 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeAll(() => init()) - beforeEach(async () => { resetTime() documentId = structures.uuid() From e8c3f20c3047bbd7cb909f1b3735eae05bf0ca4f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:32:18 +0100 Subject: [PATCH 050/174] Update pro ref --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 54411929db..9daa77883c 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 54411929db75f6bc4335491e86871c889fe3a98a +Subproject commit 9daa77883cc0b395e5badffe48260324527b6924 From cb5f3e3bd3a8aaa0cce5c6530a0a949b080e3a71 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:38:36 +0100 Subject: [PATCH 051/174] Lint --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 9daa77883c..6079868997 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 9daa77883cc0b395e5badffe48260324527b6924 +Subproject commit 607986899781aa7c0b6ccfd9746497b6fc32b569 From e076c0e5f53d8696d65287fca328427b52de928a Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 15:10:51 +0100 Subject: [PATCH 052/174] Use typed redis clients --- packages/backend-core/src/redis/redis.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index d15453ba62..2280c3f6df 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT // for testing just generate the client once let CLOSED = false -let CLIENTS: { [key: number]: any } = {} +const CLIENTS: Record = {} let CONNECTED = false // mock redis always connected @@ -36,7 +36,7 @@ if (env.MOCK_REDIS) { CONNECTED = true } -function pickClient(selectDb: number): any { +function pickClient(selectDb: number) { return CLIENTS[selectDb] } From f1decee0102c0bc6b4687fffa14f5b445e1c3689 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 14:37:06 +0000 Subject: [PATCH 053/174] Get test passing. --- .../src/api/routes/tests/application.spec.ts | 8 +++++--- .../src/tests/utilities/TestConfiguration.ts | 18 +++++++++++------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index 7424511200..6f948d9977 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -25,6 +25,8 @@ import { } from "@budibase/types" import tk from "timekeeper" +jest.setTimeout(99999999) + describe("/applications", () => { let config = setup.getConfig() let app: App @@ -257,7 +259,7 @@ describe("/applications", () => { describe("permissions", () => { it.only("should only return apps a user has access to", async () => { - const user = await config.createUser({ + let user = await config.createUser({ builder: { global: false }, admin: { global: false }, }) @@ -280,10 +282,10 @@ describe("/applications", () => { expect(apps).toHaveLength(0) }) - await config.api.user.update({ + user = await config.globalUser({ ...user, builder: { - [config.getAppId()]: true, + apps: [config.getProdAppId()], }, }) diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 32af88836e..cfe1bf4066 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -363,6 +363,7 @@ export default class TestConfiguration { _id, ...existing, ...config, + _rev: existing._rev, email, roles, tenantId, @@ -372,11 +373,12 @@ export default class TestConfiguration { admin, } await sessions.createASession(_id, { - sessionId: "sessionid", + sessionId: this.sessionIdForUser(_id), tenantId: this.getTenantId(), csrfToken: this.csrfToken, }) const resp = await db.put(user) + await cache.user.invalidateUser(_id) return { _rev: resp.rev, ...user, @@ -384,9 +386,7 @@ export default class TestConfiguration { } async createUser(user: Partial = {}): Promise { - const resp = await this.globalUser(user) - await cache.user.invalidateUser(resp._id!) - return resp + return await this.globalUser(user) } async createGroup(roleId: string = roles.BUILTIN_ROLE_IDS.BASIC) { @@ -416,6 +416,10 @@ export default class TestConfiguration { }) } + sessionIdForUser(userId: string): string { + return `sessionid-${userId}` + } + async login({ roleId, userId, @@ -442,13 +446,13 @@ export default class TestConfiguration { }) } await sessions.createASession(userId, { - sessionId: "sessionid", + sessionId: this.sessionIdForUser(userId), tenantId: this.getTenantId(), }) // have to fake this const authObj = { userId, - sessionId: "sessionid", + sessionId: this.sessionIdForUser(userId), tenantId: this.getTenantId(), } const authToken = jwt.sign(authObj, coreEnv.JWT_SECRET as Secret) @@ -470,7 +474,7 @@ export default class TestConfiguration { const user = this.getUser() const authObj: AuthToken = { userId: user._id!, - sessionId: "sessionid", + sessionId: this.sessionIdForUser(user._id!), tenantId, } const authToken = jwt.sign(authObj, coreEnv.JWT_SECRET as Secret) From a5d6d094e63def29122ce5d3b73470681523688f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 16:28:31 +0100 Subject: [PATCH 054/174] Update types --- packages/backend-core/package.json | 2 +- packages/types/package.json | 2 +- yarn.lock | 11 ++++++----- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index 3f8c34f823..90b3316c3f 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -67,7 +67,7 @@ "@types/lodash": "4.14.200", "@types/node-fetch": "2.6.4", "@types/pouchdb": "6.4.0", - "@types/redlock": "4.0.3", + "@types/redlock": "4.0.7", "@types/semver": "7.3.7", "@types/tar-fs": "2.0.1", "@types/uuid": "8.3.4", diff --git a/packages/types/package.json b/packages/types/package.json index ce4fce95fb..558e55a632 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -18,7 +18,7 @@ "@budibase/nano": "10.1.5", "@types/koa": "2.13.4", "@types/pouchdb": "6.4.0", - "@types/redlock": "4.0.3", + "@types/redlock": "4.0.7", "rimraf": "3.0.2", "typescript": "5.2.2" }, diff --git a/yarn.lock b/yarn.lock index 260ae3870a..2f9f558e2c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5408,7 +5408,7 @@ resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== -"@types/ioredis@4.28.10": +"@types/ioredis@4.28.10", "@types/ioredis@^4.28.10": version "4.28.10" resolved "https://registry.yarnpkg.com/@types/ioredis/-/ioredis-4.28.10.tgz#40ceb157a4141088d1394bb87c98ed09a75a06ff" integrity sha512-69LyhUgrXdgcNDv7ogs1qXZomnfOEnSmrmMFqKgt1XMJxmoOSG/u3wYy13yACIfKuMJ8IhKgHafDO3sx19zVQQ== @@ -5896,12 +5896,13 @@ dependencies: "@types/node" "*" -"@types/redlock@4.0.3": - version "4.0.3" - resolved "https://registry.yarnpkg.com/@types/redlock/-/redlock-4.0.3.tgz#aeab5fe5f0d433a125f6dcf9a884372ac0cddd4b" - integrity sha512-mcvvrquwREbAqyZALNBIlf49AL9Aa324BG+J/Dv4TAP8g+nxQMBI4/APNqqS99QEY7VTNT9XvsaczCVGK8uNnQ== +"@types/redlock@4.0.7": + version "4.0.7" + resolved "https://registry.yarnpkg.com/@types/redlock/-/redlock-4.0.7.tgz#33ed56f22a38d6b2f2e6ae5ed1b3fc1875a08e6b" + integrity sha512-5D6egBv0fCfdbmnCETjEynVuiwFMEFFc3YFjh9EwhaaVTAi0YmB6UI1swq1S1rjIu+n27ppmlTFDK3D3cadJqg== dependencies: "@types/bluebird" "*" + "@types/ioredis" "^4.28.10" "@types/redis" "^2.8.0" "@types/request@^2.48.7": From 56870bed5b0bc2c4479c4107e78193e36195a795 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 16:30:45 +0100 Subject: [PATCH 055/174] Typings --- packages/backend-core/src/redis/redis.ts | 21 ++++++++++++------- .../backend-core/src/redis/redlockImpl.ts | 2 +- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 2280c3f6df..f8f0c9f3d7 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -1,5 +1,5 @@ import env from "../environment" -import Redis from "ioredis" +import Redis, { Cluster } from "ioredis" // mock-redis doesn't have any typing let MockRedis: any | undefined if (env.MOCK_REDIS) { @@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT // for testing just generate the client once let CLOSED = false -const CLIENTS: Record = {} +const CLIENTS: Record = {} let CONNECTED = false // mock redis always connected @@ -201,12 +201,15 @@ class RedisWrapper { key = `${db}${SEPARATOR}${key}` let stream if (CLUSTERED) { - let node = this.getClient().nodes("master") + let node = (this.getClient() as Cluster).nodes("master") stream = node[0].scanStream({ match: key + "*", count: 100 }) } else { - stream = this.getClient().scanStream({ match: key + "*", count: 100 }) + stream = (this.getClient() as Redis).scanStream({ + match: key + "*", + count: 100, + }) } - return promisifyStream(stream, this.getClient()) + return promisifyStream(stream, this.getClient() as any) } async keys(pattern: string) { @@ -221,14 +224,16 @@ class RedisWrapper { async get(key: string) { const db = this._db - let response = await this.getClient().get(addDbPrefix(db, key)) + const response = await this.getClient().get(addDbPrefix(db, key)) // overwrite the prefixed key + // @ts-ignore if (response != null && response.key) { + // @ts-ignore response.key = key } // if its not an object just return the response try { - return JSON.parse(response) + return JSON.parse(response!) } catch (err) { return response } @@ -280,7 +285,7 @@ class RedisWrapper { return this.getClient().ttl(prefixedKey) } - async setExpiry(key: string, expirySeconds: number | null) { + async setExpiry(key: string, expirySeconds: number) { const db = this._db const prefixedKey = addDbPrefix(db, key) await this.getClient().expire(prefixedKey, expirySeconds) diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts index 7009dc6f55..adeb5b12ec 100644 --- a/packages/backend-core/src/redis/redlockImpl.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -72,7 +72,7 @@ const OPTIONS: Record = { export async function newRedlock(opts: Redlock.Options = {}) { const options = { ...OPTIONS.DEFAULT, ...opts } const redisWrapper = await getLockClient() - const client = redisWrapper.getClient() + const client = redisWrapper.getClient() as any return new Redlock([client], options) } From a4288a9dd3e21dd99c3ac825effa7b1a40dadb63 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 16:41:49 +0100 Subject: [PATCH 056/174] Basic test --- .../src/redis/tests/redis.spec.ts | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 packages/backend-core/src/redis/tests/redis.spec.ts diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts new file mode 100644 index 0000000000..d082b6b617 --- /dev/null +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -0,0 +1,21 @@ +import { generator, structures } from "../../../tests" +import RedisWrapper from "../redis" + +describe("redis", () => { + const redis = new RedisWrapper(structures.db.id()) + + beforeAll(async () => { + await redis.init() + }) + + describe("store", () => { + it("a basic value can be persisted", async () => { + const key = structures.uuid() + const value = generator.word() + + await redis.store(key, value) + + expect(await redis.get(key)).toEqual(value) + }) + }) +}) From 49db47e1fd75b63b7bf06794e5418a54c266db3d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 16:46:33 +0100 Subject: [PATCH 057/174] Add bulk store --- packages/backend-core/src/redis/redis.ts | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index f8f0c9f3d7..076f64b1ea 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -279,6 +279,19 @@ class RedisWrapper { } } + async bulkStore( + data: Record, + expirySeconds: number | null = null + ) { + const client = this.getClient() + + const dataToStore = Object.entries(data).reduce((acc, [key, value]) => { + acc[addDbPrefix(this._db, key)] = value + return acc + }, {} as Record) + await client.mset(dataToStore) + } + async getTTL(key: string) { const db = this._db const prefixedKey = addDbPrefix(db, key) From d9a5899b2770e7140c360f0eaf24b9e3ccb84d07 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 16:49:45 +0100 Subject: [PATCH 058/174] Bulk store test --- .../src/redis/tests/redis.spec.ts | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index d082b6b617..6333573e6e 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -2,9 +2,10 @@ import { generator, structures } from "../../../tests" import RedisWrapper from "../redis" describe("redis", () => { - const redis = new RedisWrapper(structures.db.id()) + let redis: RedisWrapper - beforeAll(async () => { + beforeEach(async () => { + redis = new RedisWrapper(structures.db.id()) await redis.init() }) @@ -18,4 +19,23 @@ describe("redis", () => { expect(await redis.get(key)).toEqual(value) }) }) + + describe("bulkStore", () => { + it("a basic object can be persisted", async () => { + const data = generator + .unique(() => generator.word(), 10) + .reduce((acc, key) => { + acc[key] = generator.word() + return acc + }, {} as Record) + + await redis.bulkStore(data) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + } + + expect(await redis.keys("*")).toHaveLength(10) + }) + }) }) From 1b0a943e13fcb46cc91be18f794c3ff2a4c95684 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 17:04:46 +0100 Subject: [PATCH 059/174] Atomic expires --- packages/backend-core/src/redis/redis.ts | 19 ++++++++++++++++++- .../src/redis/tests/redis.spec.ts | 19 +++++++++++++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 076f64b1ea..18152aac72 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -289,7 +289,24 @@ class RedisWrapper { acc[addDbPrefix(this._db, key)] = value return acc }, {} as Record) - await client.mset(dataToStore) + + const luaScript = ` + for i, key in ipairs(KEYS) do + redis.call('MSET', key, ARGV[i]) + ${ + expirySeconds !== null + ? `redis.call('EXPIRE', key, ARGV[#ARGV])` + : "" + } + end + ` + const keys = Object.keys(dataToStore) + let values = Object.values(dataToStore) + if (expirySeconds !== null) { + values.push(expirySeconds) + } + + await client.eval(luaScript, keys.length, ...keys, ...values) } async getTTL(key: string) { diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index 6333573e6e..e3e4ae7247 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -37,5 +37,24 @@ describe("redis", () => { expect(await redis.keys("*")).toHaveLength(10) }) + + it("a bulk store can be persisted with TTL", async () => { + const ttl = 500 + const data = generator + .unique(() => generator.word(), 10) + .reduce((acc, key) => { + acc[key] = generator.word() + return acc + }, {} as Record) + + await redis.bulkStore(data, ttl) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + expect(await redis.getTTL(key)).toEqual(ttl) + } + + expect(await redis.keys("*")).toHaveLength(10) + }) }) }) From 3baf981d4826c68360261a1c57c9a97c1a4ed267 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 17:08:08 +0100 Subject: [PATCH 060/174] Add TTL tests --- .../src/redis/tests/redis.spec.ts | 43 ++++++++++++++----- 1 file changed, 33 insertions(+), 10 deletions(-) diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index e3e4ae7247..13f2c676c9 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -21,13 +21,17 @@ describe("redis", () => { }) describe("bulkStore", () => { - it("a basic object can be persisted", async () => { - const data = generator - .unique(() => generator.word(), 10) + function createRandomObject(keyLength: number) { + return generator + .unique(() => generator.word(), keyLength) .reduce((acc, key) => { acc[key] = generator.word() return acc }, {} as Record) + } + + it("a basic object can be persisted", async () => { + const data = createRandomObject(10) await redis.bulkStore(data) @@ -38,14 +42,20 @@ describe("redis", () => { expect(await redis.keys("*")).toHaveLength(10) }) + it("no TTL is set by default", async () => { + const data = createRandomObject(10) + + await redis.bulkStore(data) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + expect(await redis.getTTL(key)).toEqual(-1) + } + }) + it("a bulk store can be persisted with TTL", async () => { const ttl = 500 - const data = generator - .unique(() => generator.word(), 10) - .reduce((acc, key) => { - acc[key] = generator.word() - return acc - }, {} as Record) + const data = createRandomObject(8) await redis.bulkStore(data, ttl) @@ -54,7 +64,20 @@ describe("redis", () => { expect(await redis.getTTL(key)).toEqual(ttl) } - expect(await redis.keys("*")).toHaveLength(10) + expect(await redis.keys("*")).toHaveLength(8) + }) + + it("setting a TTL of -1 will not persist the key", async () => { + const ttl = -1 + const data = createRandomObject(5) + + await redis.bulkStore(data, ttl) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toBe(null) + } + + expect(await redis.keys("*")).toHaveLength(0) }) }) }) From 8f9e8b60c328af4f635c98759b475f08668d7eed Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 17:19:26 +0100 Subject: [PATCH 061/174] Fix types --- packages/backend-core/src/redis/redis.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 18152aac72..99613e7c32 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT // for testing just generate the client once let CLOSED = false -const CLIENTS: Record = {} +const CLIENTS: Record = {} let CONNECTED = false // mock redis always connected @@ -201,7 +201,7 @@ class RedisWrapper { key = `${db}${SEPARATOR}${key}` let stream if (CLUSTERED) { - let node = (this.getClient() as Cluster).nodes("master") + let node = (this.getClient() as never as Cluster).nodes("master") stream = node[0].scanStream({ match: key + "*", count: 100 }) } else { stream = (this.getClient() as Redis).scanStream({ From f2330144de5bfdddc3d9b4021540e0cc9976c3ca Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 17:57:28 +0100 Subject: [PATCH 062/174] Clean --- packages/backend-core/src/redis/redis.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 99613e7c32..59583da366 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -301,7 +301,7 @@ class RedisWrapper { end ` const keys = Object.keys(dataToStore) - let values = Object.values(dataToStore) + const values = Object.values(dataToStore) if (expirySeconds !== null) { values.push(expirySeconds) } From 82ff748fd950e79e60daad95268d5c34490d25f7 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:10:33 +0100 Subject: [PATCH 063/174] Add complex object tests --- .../src/redis/tests/redis.spec.ts | 31 +++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index 13f2c676c9..eb32172edd 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -18,14 +18,26 @@ describe("redis", () => { expect(await redis.get(key)).toEqual(value) }) + + it("objects can be persisted", async () => { + const key = structures.uuid() + const value = { [generator.word()]: generator.word() } + + await redis.store(key, value) + + expect(await redis.get(key)).toEqual(value) + }) }) describe("bulkStore", () => { - function createRandomObject(keyLength: number) { + function createRandomObject( + keyLength: number, + valueGenerator: () => any = () => generator.word() + ) { return generator .unique(() => generator.word(), keyLength) .reduce((acc, key) => { - acc[key] = generator.word() + acc[key] = valueGenerator() return acc }, {} as Record) } @@ -42,6 +54,21 @@ describe("redis", () => { expect(await redis.keys("*")).toHaveLength(10) }) + it("a complex object can be persisted", async () => { + const data = { + ...createRandomObject(10, () => createRandomObject(5)), + ...createRandomObject(5), + } + + await redis.bulkStore(data) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + } + + expect(await redis.keys("*")).toHaveLength(10) + }) + it("no TTL is set by default", async () => { const data = createRandomObject(10) From de0527384aebd2703fd7eec58818366627da8b10 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:10:45 +0100 Subject: [PATCH 064/174] Support complex objects --- packages/backend-core/src/redis/redis.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 59583da366..8cfa3db5c1 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -286,7 +286,8 @@ class RedisWrapper { const client = this.getClient() const dataToStore = Object.entries(data).reduce((acc, [key, value]) => { - acc[addDbPrefix(this._db, key)] = value + acc[addDbPrefix(this._db, key)] = + typeof value === "object" ? JSON.stringify(value) : value return acc }, {} as Record) From a093cfca993d585ae5a83e933dcacccee35c4bf5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:11:12 +0100 Subject: [PATCH 065/174] Fix test --- packages/backend-core/src/redis/tests/redis.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index eb32172edd..1fd40acc37 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -66,7 +66,7 @@ describe("redis", () => { expect(await redis.get(key)).toEqual(value) } - expect(await redis.keys("*")).toHaveLength(10) + expect(await redis.keys("*")).toHaveLength(15) }) it("no TTL is set by default", async () => { From 40cc383c0140fa3d960938162e7924aaacd079f5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 11:22:27 +0100 Subject: [PATCH 066/174] Create docWriteThrough redis cache --- packages/backend-core/src/redis/init.ts | 13 ++++++++++++- packages/backend-core/src/redis/utils.ts | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..7920dfed2d 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -104,3 +108,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** From 9f42ea6bbf2b8247e988b16ae8f3b84a9beb1f9e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:44:52 +0100 Subject: [PATCH 067/174] DocWritethrough --- .../backend-core/src/cache/docWritethrough.ts | 102 ++++++++++++++++++ .../backend-core/src/db/couch/DatabaseImpl.ts | 9 ++ .../backend-core/src/db/instrumentation.ts | 7 ++ packages/types/src/sdk/db.ts | 1 + 4 files changed, 119 insertions(+) create mode 100644 packages/backend-core/src/cache/docWritethrough.ts diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..9e1977f797 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,102 @@ +import BaseCache from "./base" +import { getDocWritethroughClient } from "../redis/init" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" +import * as locks from "../redis/redlockImpl" + +const DEFAULT_WRITE_RATE_MS = 10000 + +let CACHE: BaseCache | null = null +async function getCache() { + if (!CACHE) { + const client = await getDocWritethroughClient() + CACHE = new BaseCache(client) + } + return CACHE +} + +interface CacheItem { + lastWrite: number +} + +export class DocWritethrough { + db: Database + docId: string + writeRateMs: number + + constructor( + db: Database, + docId: string, + writeRateMs: number = DEFAULT_WRITE_RATE_MS + ) { + this.db = db + this.docId = docId + this.writeRateMs = writeRateMs + } + + private makeCacheItem(): CacheItem { + return { lastWrite: Date.now() } + } + + async patch(data: Record) { + const cache = await getCache() + + const key = `${this.docId}:info` + const cacheItem = await cache.withCache( + key, + null, + () => this.makeCacheItem(), + { + useTenancy: false, + } + ) + + await this.storeToCache(cache, data) + + const updateDb = + !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs + // let output = this.doc + if (updateDb) { + await this.persistToDb(cache) + } + } + + private async storeToCache(cache: BaseCache, data: Record) { + for (const [key, value] of Object.entries(data)) { + const cacheKey = this.docId + ":data:" + key + await cache.store(cacheKey, { key, value }, undefined) + } + } + + private async persistToDb(cache: BaseCache) { + const key = `${this.db.name}_${this.docId}` + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: key, + ttl: 15000, + }, + async () => { + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } + + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await this.db.put(doc) + } + ) + + if (!lockResponse.executed) { + throw `DocWriteThrough could not be persisted to db for ${key}` + } + } +} diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 7e7c997cbe..d4d17f6127 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -135,6 +135,15 @@ export class DatabaseImpl implements Database { }) } + async docExists(id: string): Promise { + try { + await this.get(id) + return true + } catch { + return false + } + } + async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 03010d4c92..87af0e3127 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -38,6 +38,13 @@ export class DDInstrumentedDatabase implements Database { }) } + docExists(id: string): Promise { + return tracer.trace("db.docExists", span => { + span?.addTags({ db_name: this.name, doc_id: id }) + return this.db.docExists(id) + }) + } + getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index c4e4a4f02f..dafc9ced57 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,6 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise + docExists(id: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 10568cccff8e4d342a03484f04be299fe4868917 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:47:18 +0100 Subject: [PATCH 068/174] USe get for doc exists --- packages/backend-core/src/cache/base/index.ts | 2 +- packages/backend-core/src/db/couch/DatabaseImpl.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..23c952c7b2 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -60,7 +60,7 @@ export default class BaseCache { */ async withCache( key: string, - ttl: number, + ttl: number | null = null, fetchFn: any, opts = { useTenancy: true } ) { diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index d4d17f6127..9d198e4307 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -137,7 +137,7 @@ export class DatabaseImpl implements Database { async docExists(id: string): Promise { try { - await this.get(id) + await this.performCall(db => () => db.head(id)) return true } catch { return false From 82132d539d2c535be99a8aee58360fff288a1907 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:17:18 +0100 Subject: [PATCH 069/174] DatabaseImpl.docExists test --- .../src/db/tests/DatabaseImpl.spec.ts | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 packages/backend-core/src/db/tests/DatabaseImpl.spec.ts diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..140ecf4f2c --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("docExists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.docExists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.docExists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.docExists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.docExists(id)).toBe(false) + }) + }) +}) From 74aae19a7ebdd9fcb040679c2aeca40e991a8456 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:23:32 +0100 Subject: [PATCH 070/174] docWritethrough test --- .../src/cache/tests/docWritethrough.spec.ts | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 packages/backend-core/src/cache/tests/docWritethrough.spec.ts diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..bfb1da5f1c --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,47 @@ +import tk from "timekeeper" +import { env } from "../.." +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" +import { DocWritethrough } from "../docWritethrough" +import _ from "lodash" + +env._set("MOCK_REDIS", null) + +const initialTime = Date.now() + +const WRITE_RATE_MS = 500 + +describe("docWritethrough", () => { + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.word(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(() => { + tk.freeze(initialTime) + documentId = structures.db.id() + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) + + it("patching will not persist until timeout is hit", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + tk.travel(Date.now() + WRITE_RATE_MS - 1) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) + }) +}) From bfde028e9b8dcae7ed81d34542acfcef32cf791c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:28:35 +0100 Subject: [PATCH 071/174] Add persisting tests --- .../src/cache/tests/docWritethrough.spec.ts | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bfb1da5f1c..ab0de53bee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -33,7 +33,7 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist until timeout is hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) @@ -43,5 +43,42 @@ describe("docWritethrough", () => { expect(await db.docExists(documentId)).toBe(false) }) }) + + it("patching will persist if timeout hits and next patch is called", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + _rev: expect.stringMatching(/1-.+/), + createdAt: new Date(initialTime + 500).toISOString(), + updatedAt: new Date(initialTime + 500).toISOString(), + }) + }) + }) + + it("patching will not persist even if timeout hits but next patch is not callec", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) }) }) From 35536592e6558176e48960063ab71ddfebd2f8d1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:41:26 +0100 Subject: [PATCH 072/174] Add extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 86 ++++++++++++++++--- 1 file changed, 75 insertions(+), 11 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index ab0de53bee..16e47ce3c3 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -7,9 +7,17 @@ import _ from "lodash" env._set("MOCK_REDIS", null) +const WRITE_RATE_MS = 500 + const initialTime = Date.now() -const WRITE_RATE_MS = 500 +function resetTime() { + tk.travel(initialTime) +} +function travelForward(ms: number) { + const updatedTime = Date.now() + ms + tk.travel(updatedTime) +} describe("docWritethrough", () => { const config = new DBTestConfiguration() @@ -28,7 +36,7 @@ describe("docWritethrough", () => { } beforeEach(() => { - tk.freeze(initialTime) + resetTime() documentId = structures.db.id() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -37,7 +45,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS - 1) + travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) expect(await db.docExists(documentId)).toBe(false) @@ -51,7 +59,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -62,23 +70,79 @@ describe("docWritethrough", () => { ...patch2, ...patch3, _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + 500).toISOString(), - updatedAt: new Date(initialTime + 500).toISOString(), + createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), }) }) }) + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + travelForward(WRITE_RATE_MS) + const date1 = new Date() + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + const date2 = new Date() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + it("patching will not persist even if timeout hits but next patch is not callec", async () => { await config.doInTenant(async () => { - const patch1 = generatePatchObject(2) - const patch2 = generatePatchObject(2) - await docWritethrough.patch(patch1) - await docWritethrough.patch(patch2) + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) expect(await db.docExists(documentId)).toBe(false) }) }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const time1 = travelForward(WRITE_RATE_MS) + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + travelForward(WRITE_RATE_MS) + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) }) }) From 41dde9722f57f12d03450c4bc98e929c7133086d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:51:42 +0100 Subject: [PATCH 073/174] Test concurrency --- .../backend-core/src/cache/docWritethrough.ts | 12 ++++-- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++++++++++++- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 9e1977f797..13a85a0d84 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -19,9 +19,9 @@ interface CacheItem { } export class DocWritethrough { - db: Database - docId: string - writeRateMs: number + private db: Database + private _docId: string + private writeRateMs: number constructor( db: Database, @@ -29,10 +29,14 @@ export class DocWritethrough { writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { this.db = db - this.docId = docId + this._docId = docId this.writeRateMs = writeRateMs } + get docId() { + return this._docId + } + private makeCacheItem(): CacheItem { return { lastWrite: Date.now() } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 16e47ce3c3..aed87499ee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -41,8 +41,9 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist if timeout from the creation does not hit", async () => { await config.doInTenant(async () => { + travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) travelForward(WRITE_RATE_MS - 1) @@ -116,7 +117,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - const time1 = travelForward(WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -144,5 +145,41 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id(), + WRITE_RATE_MS + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + travelForward(WRITE_RATE_MS) + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) }) }) From 223637999a4679536ca68ca0a0115376753abfa1 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 16:48:16 +0100 Subject: [PATCH 074/174] Ensure keys are removed --- .../backend-core/src/cache/docWritethrough.ts | 4 +++ .../src/cache/tests/docWritethrough.spec.ts | 28 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 13a85a0d84..bde93182a9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -96,6 +96,10 @@ export class DocWritethrough { } await this.db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } ) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index aed87499ee..65e9450f62 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -181,5 +181,33 @@ describe("docWritethrough", () => { ) }) }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + travelForward(WRITE_RATE_MS) + + await docWritethrough.patch({}) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + travelForward(WRITE_RATE_MS) + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) }) }) From 04fb27962390d79fe2fe3b65fe7ee44a48d6dbd8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 17:01:16 +0100 Subject: [PATCH 075/174] Extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 65e9450f62..974494d1c9 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -77,6 +77,35 @@ describe("docWritethrough", () => { }) }) + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + travelForward(WRITE_RATE_MS) + + const patch4 = generatePatchObject(3) + await docWritethrough.patch(patch4) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + ...patch4, + }) + ) + }) + }) + it("date audit fields are set correctly when persisting", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) From fd93eb79d5b96c7cf0c71a9d8501dfe189771d56 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 10:53:18 +0100 Subject: [PATCH 076/174] Fixes and tests --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++---------- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++- 2 files changed, 82 insertions(+), 47 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index bde93182a9..80063e4772 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,8 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private docInfoCacheKey: string + constructor( db: Database, docId: string, @@ -31,6 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs + this.docInfoCacheKey = `${this.docId}:info` } get docId() { @@ -44,26 +47,39 @@ export class DocWritethrough { async patch(data: Record) { const cache = await getCache() - const key = `${this.docId}:info` - const cacheItem = await cache.withCache( - key, - null, - () => this.makeCacheItem(), - { - useTenancy: false, - } - ) - await this.storeToCache(cache, data) - const updateDb = - !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs - // let output = this.doc + const updateDb = await this.shouldUpdateDb(cache) + if (updateDb) { - await this.persistToDb(cache) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: this.docInfoCacheKey, + ttl: 15000, + }, + async () => { + if (await this.shouldUpdateDb(cache)) { + await this.persistToDb(cache) + await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } } } + private async shouldUpdateDb(cache: BaseCache) { + const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => + this.makeCacheItem() + ) + return cacheItem.lastWrite <= Date.now() - this.writeRateMs + } + private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { const cacheKey = this.docId + ":data:" + key @@ -72,39 +88,23 @@ export class DocWritethrough { } private async persistToDb(cache: BaseCache) { - const key = `${this.db.name}_${this.docId}` + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: key, - ttl: 15000, - }, - async () => { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } + await this.db.put(doc) - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } - ) - - if (!lockResponse.executed) { - throw `DocWriteThrough could not be persisted to db for ${key}` + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) } } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 974494d1c9..bca781e377 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,10 @@ import tk from "timekeeper" -import { env } from "../.." + import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -env._set("MOCK_REDIS", null) - const WRITE_RATE_MS = 500 const initialTime = Date.now() @@ -238,5 +236,42 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent calls will not cause multiple saves", async () => { + async function parallelPatch(count: number) { + await Promise.all( + Array.from({ length: count }).map(() => + docWritethrough.patch(generatePatchObject(1)) + ) + ) + } + + const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") + const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + + await config.doInTenant(async () => { + await parallelPatch(5) + expect(persistToDbSpy).not.toBeCalled() + expect(storeToCacheSpy).toBeCalledTimes(5) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(40) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(45) + + await parallelPatch(10) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(55) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(5) + expect(persistToDbSpy).toBeCalledTimes(2) + expect(storeToCacheSpy).toBeCalledTimes(60) + }) + }) }) }) From eb9a1633944d84cbefa727b18a129feff27c9f56 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:30 +0100 Subject: [PATCH 077/174] Making code more readable --- .../backend-core/src/cache/docWritethrough.ts | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 80063e4772..5148950c1d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -15,7 +15,7 @@ async function getCache() { } interface CacheItem { - lastWrite: number + nextWrite: number } export class DocWritethrough { @@ -40,8 +40,8 @@ export class DocWritethrough { return this._docId } - private makeCacheItem(): CacheItem { - return { lastWrite: Date.now() } + private makeNextWriteInfoItem(): CacheItem { + return { nextWrite: Date.now() + this.writeRateMs } } async patch(data: Record) { @@ -62,7 +62,10 @@ export class DocWritethrough { async () => { if (await this.shouldUpdateDb(cache)) { await this.persistToDb(cache) - await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + await cache.store( + this.docInfoCacheKey, + this.makeNextWriteInfoItem() + ) } } ) @@ -75,9 +78,9 @@ export class DocWritethrough { private async shouldUpdateDb(cache: BaseCache) { const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeCacheItem() + this.makeNextWriteInfoItem() ) - return cacheItem.lastWrite <= Date.now() - this.writeRateMs + return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { From dc84eb4e806684c438ab18005bb14836720cc57b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:55 +0100 Subject: [PATCH 078/174] Type caches --- packages/backend-core/src/cache/base/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 23c952c7b2..911bd6a831 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -58,12 +58,12 @@ export default class BaseCache { /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, ttl: number | null = null, - fetchFn: any, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue From e986d34b8739258e81c6acc385afdd4cbe133a7b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:12:31 +0100 Subject: [PATCH 079/174] Fix types --- packages/backend-core/src/cache/generic.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) From da012c0f082d1bf44b6837e69da05d0a13db7fea Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:38:48 +0100 Subject: [PATCH 080/174] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5148950c1d..e46c763906 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -33,7 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.docId}:info` + this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` } get docId() { From 82a6f9027e5df55b113d550d5e26a8b958f87219 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:41:40 +0100 Subject: [PATCH 081/174] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e46c763906..e367c9e060 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,7 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private cacheKeyPrefix: string private docInfoCacheKey: string constructor( @@ -33,7 +34,8 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` + this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { @@ -85,7 +87,7 @@ export class DocWritethrough { private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { - const cacheKey = this.docId + ":data:" + key + const cacheKey = this.cacheKeyPrefix + ":data:" + key await cache.store(cacheKey, { key, value }, undefined) } } @@ -98,7 +100,7 @@ export class DocWritethrough { doc = { _id: this.docId } } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) + const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value From 774ff42f0c926eb91c84d8a467a9047947274573 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:59:51 +0100 Subject: [PATCH 082/174] Use overloads --- .../src/cache/tests/docWritethrough.spec.ts | 6 ++-- .../backend-core/src/db/couch/DatabaseImpl.ts | 28 ++++++++++++------- .../backend-core/src/db/instrumentation.ts | 14 ++++------ .../src/db/tests/DatabaseImpl.spec.ts | 16 +++++------ packages/types/src/sdk/db.ts | 2 +- 5 files changed, 35 insertions(+), 31 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bca781e377..4c4a4b2b60 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -35,7 +35,7 @@ describe("docWritethrough", () => { beforeEach(() => { resetTime() - documentId = structures.db.id() + documentId = structures.uuid() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -47,7 +47,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) @@ -136,7 +136,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 9d198e4307..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -70,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -79,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } @@ -135,15 +152,6 @@ export class DatabaseImpl implements Database { }) } - async docExists(id: string): Promise { - try { - await this.performCall(db => () => db.head(id)) - return true - } catch { - return false - } - } - async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 87af0e3127..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } @@ -38,13 +41,6 @@ export class DDInstrumentedDatabase implements Database { }) } - docExists(id: string): Promise { - return tracer.trace("db.docExists", span => { - span?.addTags({ db_name: this.name, doc_id: id }) - return this.db.docExists(id) - }) - } - getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts index 140ecf4f2c..586f13f417 100644 --- a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -17,16 +17,16 @@ describe("DatabaseImpl", () => { documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) }) - describe("docExists", () => { + describe("document exists", () => { it("can check existing docs by id", async () => { const existingDoc = _.sample(documents) - const result = await database.docExists(existingDoc!._id!) + const result = await database.exists(existingDoc!._id!) expect(result).toBe(true) }) it("can check non existing docs by id", async () => { - const result = await database.docExists(newid()) + const result = await database.exists(newid()) expect(result).toBe(false) }) @@ -36,9 +36,9 @@ describe("DatabaseImpl", () => { const id = existingDoc!._id! const results = [] - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) expect(results).toEqual([true, true, true]) }) @@ -46,10 +46,10 @@ describe("DatabaseImpl", () => { it("returns false after the doc is deleted", async () => { const existingDoc = _.sample(documents) const id = existingDoc!._id! - expect(await database.docExists(id)).toBe(true) + expect(await database.exists(id)).toBe(true) await database.remove(existingDoc!) - expect(await database.docExists(id)).toBe(false) + expect(await database.exists(id)).toBe(false) }) }) }) diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index dafc9ced57..4d103d5be6 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,7 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise - docExists(id: string): Promise + exists(docId: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 2412d75cacbe36f27d0f8c4d02804eb371bb292d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:38:45 +0100 Subject: [PATCH 083/174] Type inMemoryQueue --- .../backend-core/src/queue/inMemoryQueue.ts | 36 ++++++++++--------- packages/backend-core/src/queue/queue.ts | 2 ++ 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..3205b6f383 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,6 @@ import events from "events" import { timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -24,9 +25,9 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any + _opts?: QueueOptions _messages: any[] _emitter: EventEmitter _runCount: number @@ -37,7 +38,7 @@ class InMemoryQueue { * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] @@ -55,8 +56,12 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { + const delay = this._opts?.defaultJobOptions?.delay + if (delay) { + await new Promise(r => setTimeout(() => r(), delay)) + } if (this._messages.length <= 0) { return } @@ -70,7 +75,7 @@ class InMemoryQueue { } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +88,26 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) + this._messages.push(newJob(this._name, data)) this._addCount++ this._emitter.emit("message") + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +115,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + async removeJobs(pattern: string) { // no-op } @@ -128,12 +132,12 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds From b39400f08c5145a818aadd602f74c2a7a41e895c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:43:47 +0100 Subject: [PATCH 084/174] Clean --- packages/worker/src/initPro.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/worker/src/initPro.ts b/packages/worker/src/initPro.ts index ddc8d2562a..b34d514992 100644 --- a/packages/worker/src/initPro.ts +++ b/packages/worker/src/initPro.ts @@ -1,5 +1,4 @@ import { sdk as proSdk } from "@budibase/pro" -import * as userSdk from "./sdk/users" export const initPro = async () => { await proSdk.init({}) From df325e21c30fae69940ed04bc3eb9f2d2f8b160d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:18:01 +0100 Subject: [PATCH 085/174] Add doc-writethrough queue --- packages/backend-core/src/queue/constants.ts | 1 + packages/backend-core/src/queue/listeners.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..14dce5fe8d 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { From 936ddafee7c21aa939c2842e793e6865741054a5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:34:05 +0100 Subject: [PATCH 086/174] Use bull --- .../backend-core/src/cache/docWritethrough.ts | 123 +++++++++--------- 1 file changed, 64 insertions(+), 59 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e367c9e060..38a162435d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,6 +3,9 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" +import { JobQueue, createQueue } from "../queue" +import { context, db as dbUtils } from ".." + const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null @@ -14,17 +17,63 @@ async function getCache() { return CACHE } -interface CacheItem { - nextWrite: number +interface ProcessDocMessage { + tenantId: string + dbName: string + docId: string + cacheKeyPrefix: string } +export const docWritethroughProcessorQueue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE +) + +docWritethroughProcessorQueue.process(async message => { + const { dbName, tenantId, docId, cacheKeyPrefix } = message.data + const cache = await getCache() + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } + }) +}) + export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number private cacheKeyPrefix: string - private docInfoCacheKey: string constructor( db: Database, @@ -35,54 +84,31 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { return this._docId } - private makeNextWriteInfoItem(): CacheItem { - return { nextWrite: Date.now() + this.writeRateMs } - } - async patch(data: Record) { const cache = await getCache() await this.storeToCache(cache, data) - const updateDb = await this.shouldUpdateDb(cache) - - if (updateDb) { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: this.docInfoCacheKey, - ttl: 15000, - }, - async () => { - if (await this.shouldUpdateDb(cache)) { - await this.persistToDb(cache) - await cache.store( - this.docInfoCacheKey, - this.makeNextWriteInfoItem() - ) - } - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + docWritethroughProcessorQueue.add( + { + tenantId: context.getTenantId(), + dbName: this.db.name, + docId: this.docId, + cacheKeyPrefix: this.cacheKeyPrefix, + }, + { + delay: this.writeRateMs - 1, + jobId: this.cacheKeyPrefix, + removeOnFail: true, + removeOnComplete: true, } - } - } - - private async shouldUpdateDb(cache: BaseCache) { - const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeNextWriteInfoItem() ) - return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { @@ -91,25 +117,4 @@ export class DocWritethrough { await cache.store(cacheKey, { key, value }, undefined) } } - - private async persistToDb(cache: BaseCache) { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } - - const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } } From 420b0ffc03386fdf896b11ff0cc5a0f01741ef9f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:50:58 +0100 Subject: [PATCH 087/174] Tests --- .../backend-core/src/cache/docWritethrough.ts | 99 +++++++++------ .../src/cache/tests/docWritethrough.spec.ts | 120 ++++++++++-------- .../backend-core/src/queue/inMemoryQueue.ts | 76 ++++++++--- 3 files changed, 186 insertions(+), 109 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 38a162435d..f53cfbfe5f 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -4,7 +4,8 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" import { JobQueue, createQueue } from "../queue" -import { context, db as dbUtils } from ".." +import * as context from "../context" +import * as dbUtils from "../db" const DEFAULT_WRITE_RATE_MS = 10000 @@ -28,50 +29,71 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -docWritethroughProcessorQueue.process(async message => { - const { dbName, tenantId, docId, cacheKeyPrefix } = message.data - const cache = await getCache() - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } +let _init = false +export const init = () => { + if (_init) { + return + } + docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) } + ) - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) - } + }) }) -}) + _init = true +} + +export async function persistToDb({ + dbName, + docId, + cacheKeyPrefix, +}: { + dbName: string + docId: string + cacheKeyPrefix: string +}) { + const cache = await getCache() + + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } +} export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number + private tenantId: string private cacheKeyPrefix: string @@ -84,6 +106,7 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.tenantId = context.getTenantId() } get docId() { @@ -97,13 +120,13 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: context.getTenantId(), + tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, }, { - delay: this.writeRateMs - 1, + delay: this.writeRateMs, jobId: this.cacheKeyPrefix, removeOnFail: true, removeOnComplete: true, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 4c4a4b2b60..83af66a9d2 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,20 +1,32 @@ -import tk from "timekeeper" - import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -const WRITE_RATE_MS = 500 +import { + DocWritethrough, + docWritethroughProcessorQueue, + init, +} from "../docWritethrough" +import InMemoryQueue from "../../queue/inMemoryQueue" + +const WRITE_RATE_MS = 1000 const initialTime = Date.now() +jest.useFakeTimers({ + now: initialTime, +}) + function resetTime() { - tk.travel(initialTime) + jest.setSystemTime(initialTime) } -function travelForward(ms: number) { - const updatedTime = Date.now() + ms - tk.travel(updatedTime) +async function travelForward(ms: number) { + await jest.advanceTimersByTimeAsync(ms) + + const queue: InMemoryQueue = docWritethroughProcessorQueue as never + while (queue.hasRunningJobs()) { + await jest.runOnlyPendingTimersAsync() + } } describe("docWritethrough", () => { @@ -33,33 +45,37 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeEach(() => { + beforeAll(() => init()) + + beforeEach(async () => { resetTime() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + await config.doInTenant(async () => { + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) }) - it("patching will not persist if timeout from the creation does not hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - travelForward(WRITE_RATE_MS - 1) - await docWritethrough.patch(generatePatchObject(2)) + await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits and next patch is called", async () => { + it("patching will persist if timeout hits", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) + // This will not be persisted const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -67,7 +83,6 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - ...patch3, _rev: expect.stringMatching(/1-.+/), createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), @@ -82,15 +97,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - travelForward(WRITE_RATE_MS) - - const patch4 = generatePatchObject(3) - await docWritethrough.patch(patch4) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -98,7 +110,6 @@ describe("docWritethrough", () => { ...patch1, ...patch2, ...patch3, - ...patch4, }) ) }) @@ -109,16 +120,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date1 = new Date() await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date2 = new Date() - const patch3 = generatePatchObject(3) - await docWritethrough.patch(patch3) - expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -129,22 +137,11 @@ describe("docWritethrough", () => { }) }) - it("patching will not persist even if timeout hits but next patch is not callec", async () => { - await config.doInTenant(async () => { - await docWritethrough.patch(generatePatchObject(2)) - await docWritethrough.patch(generatePatchObject(2)) - - travelForward(WRITE_RATE_MS) - - expect(await db.exists(documentId)).toBe(false) - }) - }) - it("concurrent patches will override keys", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -155,13 +152,14 @@ describe("docWritethrough", () => { }) ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -173,7 +171,7 @@ describe("docWritethrough", () => { }) }) - it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + it("concurrent patches to different docWritethrough will not pollute each other", async () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, @@ -186,12 +184,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) + await travelForward(WRITE_RATE_MS) expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -214,7 +213,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch({}) @@ -224,9 +223,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -246,30 +246,46 @@ describe("docWritethrough", () => { ) } - const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) - expect(persistToDbSpy).not.toBeCalled() expect(storeToCacheSpy).toBeCalledTimes(5) + expect(await db.exists(documentId)).toBe(false) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(40) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(45) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) + await parallelPatch(10) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(55) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(5) - expect(persistToDbSpy).toBeCalledTimes(2) + await travelForward(WRITE_RATE_MS) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/3-.+/), + }) + ) expect(storeToCacheSpy).toBeCalledTimes(60) }) }) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 3205b6f383..f201714903 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -2,6 +2,13 @@ import events from "events" import { timeout } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" +interface JobMessage { + timestamp: number + queue: string + data: any + opts?: JobOptions +} + /** * Bull works with a Job wrapper around all messages that contains a lot more information about * the state of the message, this object constructor implements the same schema of Bull jobs @@ -11,12 +18,12 @@ import { Queue, QueueOptions, JobOptions } from "./queue" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -28,10 +35,12 @@ function newJob(queue: string, message: any) { class InMemoryQueue implements Partial { _name: string _opts?: QueueOptions - _messages: any[] + _messages: JobMessage[] + _queuedJobIds: Set _emitter: EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. @@ -45,6 +54,7 @@ class InMemoryQueue implements Partial { this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -58,19 +68,24 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - const delay = this._opts?.defaultJobOptions?.delay - if (delay) { - await new Promise(r => setTimeout(() => r(), delay)) + try { + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() + + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } + } catch (e: any) { + throw e } - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ }) } @@ -89,12 +104,31 @@ class InMemoryQueue implements Partial { */ // eslint-disable-next-line no-unused-vars async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, data)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } return {} as any } @@ -143,7 +177,11 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } From b94d28b7d63caa6061ff55f623be1f76c9665578 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:55:07 +0100 Subject: [PATCH 088/174] Clean --- .../backend-core/src/queue/inMemoryQueue.ts | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index f201714903..6c8107c7a4 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -68,23 +68,19 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - try { - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ - const jobId = msg?.opts?.jobId?.toString() - if (jobId && msg?.opts?.removeOnComplete) { - this._queuedJobIds.delete(jobId) - } - } catch (e: any) { - throw e + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) } }) } From 8d87850765efdea50d4127cc46743eed2c57a511 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:19:05 +0100 Subject: [PATCH 089/174] Remove defaults and init --- .../backend-core/src/cache/docWritethrough.ts | 52 +++++++------------ .../src/cache/tests/docWritethrough.spec.ts | 3 -- 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index f53cfbfe5f..1a16f60eb9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -7,8 +7,6 @@ import { JobQueue, createQueue } from "../queue" import * as context from "../context" import * as dbUtils from "../db" -const DEFAULT_WRITE_RATE_MS = 10000 - let CACHE: BaseCache | null = null async function getCache() { if (!CACHE) { @@ -29,33 +27,27 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -let _init = false -export const init = () => { - if (_init) { - return - } - docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) +docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) } - }) + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) - _init = true -} +}) export async function persistToDb({ dbName, @@ -97,11 +89,7 @@ export class DocWritethrough { private cacheKeyPrefix: string - constructor( - db: Database, - docId: string, - writeRateMs: number = DEFAULT_WRITE_RATE_MS - ) { + constructor(db: Database, docId: string, writeRateMs: number) { this.db = db this._docId = docId this.writeRateMs = writeRateMs diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 83af66a9d2..a5765171cb 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -5,7 +5,6 @@ import _ from "lodash" import { DocWritethrough, docWritethroughProcessorQueue, - init, } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" @@ -45,8 +44,6 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeAll(() => init()) - beforeEach(async () => { resetTime() documentId = structures.uuid() From 0649497ab53a1d73bac39f3c4ec8ba2cb8e88c3c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:47:23 +0100 Subject: [PATCH 090/174] Add comment --- packages/backend-core/src/cache/tests/docWritethrough.spec.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index a5765171cb..3e638a4eec 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -256,6 +256,8 @@ describe("docWritethrough", () => { expect(storeToCacheSpy).toBeCalledTimes(45) + // Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup. + // We are relying on the document being stored only once (otherwise we would have _rev updated) expect(await db.get(documentId)).toEqual( expect.objectContaining({ _id: documentId, From 2b25f9f0cb75ae1925db074348dbdaab521747c6 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 15:02:02 +0100 Subject: [PATCH 091/174] Improve redlock non executed response --- packages/backend-core/src/cache/docWritethrough.ts | 9 +++++++++ packages/backend-core/src/redis/redlockImpl.ts | 10 +++++++++- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 1a16f60eb9..ebb64ee9e5 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -44,6 +44,15 @@ docWritethroughProcessorQueue.process(async message => { ) if (!lockResponse.executed) { + if ( + lockResponse.reason !== + locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce + ) { + console.error("Error persisting docWritethrough", { + data: message.data, + }) + throw "Error persisting docWritethrough" + } console.log(`Ignoring redlock conflict in write-through cache`) } }) diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts index adeb5b12ec..28babb9405 100644 --- a/packages/backend-core/src/redis/redlockImpl.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -82,6 +82,11 @@ type SuccessfulRedlockExecution = { } type UnsuccessfulRedlockExecution = { executed: false + reason: UnsuccessfulRedlockExecutionReason +} + +export const enum UnsuccessfulRedlockExecutionReason { + LockTakenWithTryOnce = "LOCK_TAKEN_WITH_TRY_ONCE", } type RedlockExecution = @@ -141,7 +146,10 @@ export async function doWithLock( if (opts.type === LockType.TRY_ONCE) { // don't throw for try-once locks, they will always error // due to retry count (0) exceeded - return { executed: false } + return { + executed: false, + reason: UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce, + } } else { throw e } From 4fe7e67dd51617c36356ccc79343a8d12f261ea4 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 17:15:50 +0100 Subject: [PATCH 092/174] Do not use lock --- .../backend-core/src/cache/docWritethrough.ts | 37 ++----------------- .../src/cache/tests/docWritethrough.spec.ts | 4 +- 2 files changed, 4 insertions(+), 37 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index ebb64ee9e5..d4d651c688 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,7 +1,6 @@ import BaseCache from "./base" import { getDocWritethroughClient } from "../redis/init" -import { AnyDocument, Database, LockName, LockType } from "@budibase/types" -import * as locks from "../redis/redlockImpl" +import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" import * as context from "../context" @@ -17,7 +16,6 @@ async function getCache() { } interface ProcessDocMessage { - tenantId: string dbName: string docId: string cacheKeyPrefix: string @@ -28,34 +26,8 @@ export const docWritethroughProcessorQueue = createQueue( ) docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - console.log("DocWritethrough persisted", { data: message.data }) - } - ) - - if (!lockResponse.executed) { - if ( - lockResponse.reason !== - locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce - ) { - console.error("Error persisting docWritethrough", { - data: message.data, - }) - throw "Error persisting docWritethrough" - } - console.log(`Ignoring redlock conflict in write-through cache`) - } - }) + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) }) export async function persistToDb({ @@ -94,7 +66,6 @@ export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number - private tenantId: string private cacheKeyPrefix: string @@ -103,7 +74,6 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.tenantId = context.getTenantId() } get docId() { @@ -117,7 +87,6 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 3e638a4eec..9bbcd6af44 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -47,9 +47,7 @@ describe("docWritethrough", () => { beforeEach(async () => { resetTime() documentId = structures.uuid() - await config.doInTenant(async () => { - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) - }) + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) it("patching will not persist if timeout does not hit", async () => { From ebcb7718b8f6e60e88c1ca4bbcb7cf0f18857efa Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:06:14 +0100 Subject: [PATCH 093/174] Use bulk --- packages/backend-core/src/cache/base/index.ts | 19 +++++++++++++++++++ .../backend-core/src/cache/docWritethrough.ts | 10 +++++----- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 911bd6a831..942d70ae72 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -46,6 +46,25 @@ export default class BaseCache { await client.store(key, value, ttl) } + /** + * Bulk write to the cache. + */ + async bulkStore( + data: Record, + ttl: number | null = null, + opts = { useTenancy: true } + ) { + if (opts.useTenancy) { + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[generateTenantKey(key)] = value + return acc + }, {} as Record) + } + + const client = await this.getClient() + await client.bulkStore(data, ttl) + } + /** * Remove from cache. */ diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index d4d651c688..a0bc14ec5c 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,7 +3,6 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" -import * as context from "../context" import * as dbUtils from "../db" let CACHE: BaseCache | null = null @@ -101,9 +100,10 @@ export class DocWritethrough { } private async storeToCache(cache: BaseCache, data: Record) { - for (const [key, value] of Object.entries(data)) { - const cacheKey = this.cacheKeyPrefix + ":data:" + key - await cache.store(cacheKey, { key, value }, undefined) - } + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[this.cacheKeyPrefix + ":data:" + key] = { key, value } + return acc + }, {} as Record) + await cache.bulkStore(data, null) } } From db75c0594290551fd0a23e1b0c70079eb2ea5656 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:25:23 +0100 Subject: [PATCH 094/174] Use scim-logs db --- packages/backend-core/src/constants/db.ts | 3 +++ packages/backend-core/src/context/mainContext.ts | 11 +++++++++++ packages/pro | 2 +- 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index ac00483021..f4caac502e 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -57,6 +57,9 @@ export const StaticDatabases = { AUDIT_LOGS: { name: "audit-logs", }, + SCIM_LOGS: { + name: "scim-logs", + }, } export const APP_PREFIX = prefixed(DocumentType.APP) diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index 36fd5dcb48..ae86695168 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -35,6 +35,17 @@ export function getAuditLogDBName(tenantId?: string) { } } +export function getScimDBName(tenantId?: string) { + if (!tenantId) { + tenantId = getTenantId() + } + if (tenantId === DEFAULT_TENANT_ID) { + return StaticDatabases.SCIM_LOGS.name + } else { + return `${tenantId}${SEPARATOR}${StaticDatabases.SCIM_LOGS.name}` + } +} + export function baseGlobalDBName(tenantId: string | undefined | null) { if (!tenantId || tenantId === DEFAULT_TENANT_ID) { return StaticDatabases.GLOBAL.name diff --git a/packages/pro b/packages/pro index 6079868997..678c913246 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 607986899781aa7c0b6ccfd9746497b6fc32b569 +Subproject commit 678c913246bacb398fbda2ad73a8e1bb562983fd From 182a1df9606f98da9791cb50df8355fc54eb21c2 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 17:35:04 +0000 Subject: [PATCH 095/174] Fix the bug, I think. --- packages/backend-core/src/db/Replication.ts | 36 ++++++--- packages/backend-core/src/security/roles.ts | 5 +- packages/server/src/api/controllers/role.ts | 10 +++ .../src/api/routes/tests/application.spec.ts | 81 +++++++++---------- packages/types/src/documents/app/role.ts | 1 + 5 files changed, 72 insertions(+), 61 deletions(-) diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts index f91a37ce8f..12c11eb9e2 100644 --- a/packages/backend-core/src/db/Replication.ts +++ b/packages/backend-core/src/db/Replication.ts @@ -1,17 +1,18 @@ +import PouchDB from "pouchdb" import { getPouchDB, closePouchDB } from "./couch" import { DocumentType } from "../constants" class Replication { - source: any - target: any - replication: any + source: PouchDB.Database + target: PouchDB.Database + replication?: Promise /** * * @param source - the DB you want to replicate or rollback to * @param target - the DB you want to replicate to, or rollback from */ - constructor({ source, target }: any) { + constructor({ source, target }: { source: string; target: string }) { this.source = getPouchDB(source) this.target = getPouchDB(target) } @@ -40,7 +41,7 @@ class Replication { * Two way replication operation, intended to be promise based. * @param opts - PouchDB replication options */ - sync(opts = {}) { + sync(opts: PouchDB.Replication.SyncOptions = {}) { this.replication = this.promisify(this.source.sync, opts) return this.replication } @@ -49,18 +50,31 @@ class Replication { * One way replication operation, intended to be promise based. * @param opts - PouchDB replication options */ - replicate(opts = {}) { + replicate(opts: PouchDB.Replication.ReplicateOptions = {}) { this.replication = this.promisify(this.source.replicate.to, opts) return this.replication } - appReplicateOpts() { + appReplicateOpts( + opts: PouchDB.Replication.ReplicateOptions = {} + ): PouchDB.Replication.ReplicateOptions { + if (typeof opts.filter === "string") { + return opts + } + + const filter = opts.filter + delete opts.filter + return { - filter: (doc: any) => { + ...opts, + filter: (doc: any, params: any) => { if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) { return false } - return doc._id !== DocumentType.APP_METADATA + if (doc._id === DocumentType.APP_METADATA) { + return false + } + return filter ? filter(doc, params) : true }, } } @@ -75,10 +89,6 @@ class Replication { // take the opportunity to remove deleted tombstones await this.replicate() } - - cancel() { - this.replication.cancel() - } } export default Replication diff --git a/packages/backend-core/src/security/roles.ts b/packages/backend-core/src/security/roles.ts index 01473ad991..a64be6b319 100644 --- a/packages/backend-core/src/security/roles.ts +++ b/packages/backend-core/src/security/roles.ts @@ -101,10 +101,7 @@ export function getBuiltinRole(roleId: string): Role | undefined { /** * Works through the inheritance ranks to see how far up the builtin stack this ID is. */ -export function builtinRoleToNumber(id?: string) { - if (!id) { - return 0 - } +export function builtinRoleToNumber(id: string) { const builtins = getBuiltinRoles() const MAX = Object.values(builtins).length + 1 if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) { diff --git a/packages/server/src/api/controllers/role.ts b/packages/server/src/api/controllers/role.ts index b3eb61a255..fff58da86e 100644 --- a/packages/server/src/api/controllers/role.ts +++ b/packages/server/src/api/controllers/role.ts @@ -106,6 +106,16 @@ export async function save(ctx: UserCtx) { ) role._rev = result.rev ctx.body = role + + const replication = new dbCore.Replication({ + source: context.getDevAppDB().name, + target: context.getProdAppDB().name, + }) + await replication.replicate({ + filter: (doc: any, params: any) => { + return doc._id === _id + }, + }) } export async function destroy(ctx: UserCtx) { diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index 6f948d9977..63c9fe44b8 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -16,16 +16,9 @@ import * as setup from "./utilities" import { AppStatus } from "../../../db/utils" import { events, utils, context } from "@budibase/backend-core" import env from "../../../environment" -import { - PermissionLevel, - type App, - INTERNAL_TABLE_SOURCE_ID, - TableSourceType, - FieldType, -} from "@budibase/types" +import { type App } from "@budibase/types" import tk from "timekeeper" - -jest.setTimeout(99999999) +import * as uuid from "uuid" describe("/applications", () => { let config = setup.getConfig() @@ -258,25 +251,12 @@ describe("/applications", () => { }) describe("permissions", () => { - it.only("should only return apps a user has access to", async () => { + it("should only return apps a user has access to", async () => { let user = await config.createUser({ builder: { global: false }, admin: { global: false }, }) - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - }, - }) - await config.withUser(user, async () => { const apps = await config.api.application.fetch() expect(apps).toHaveLength(0) @@ -295,25 +275,12 @@ describe("/applications", () => { }) }) - it("should only return apps a user has access to through a custom role on a group", async () => { - const user = await config.createUser({ + it("should only return apps a user has access to through a custom role", async () => { + let user = await config.createUser({ builder: { global: false }, admin: { global: false }, }) - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - }, - }) - await config.withUser(user, async () => { const apps = await config.api.application.fetch() expect(apps).toHaveLength(0) @@ -326,17 +293,43 @@ describe("/applications", () => { version: "name", }) - await config.api.user.update({ + user = await config.globalUser({ ...user, roles: { - [config.getAppId()]: role._id!, + [config.getProdAppId()]: role.name, }, }) - await config.api.permission.add({ - resourceId: table._id!, - roleId: role._id!, - level: PermissionLevel.READ, + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(1) + }) + }) + + it.only("should only return apps a user has access to through a custom role on a group", async () => { + let user = await config.createUser({ + builder: { global: false }, + admin: { global: false }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(0) + }) + + const roleName = uuid.v4().replace(/-/g, "") + const role = await config.api.roles.save({ + name: roleName, + inherits: "PUBLIC", + permissionId: "read_only", + version: "name", + }) + + const group = await config.createGroup(role._id!) + + user = await config.globalUser({ + ...user, + userGroups: [group._id!], }) await config.withUser(user, async () => { diff --git a/packages/types/src/documents/app/role.ts b/packages/types/src/documents/app/role.ts index d126a67b16..f32ba810b0 100644 --- a/packages/types/src/documents/app/role.ts +++ b/packages/types/src/documents/app/role.ts @@ -5,4 +5,5 @@ export interface Role extends Document { inherits?: string permissions: { [key: string]: string[] } version?: string + name: string } From 11704ea983b5ec3d7426b6927afa41d1cdea81a7 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 5 Mar 2024 17:40:38 +0000 Subject: [PATCH 096/174] TODO. --- packages/server/src/api/controllers/role.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/server/src/api/controllers/role.ts b/packages/server/src/api/controllers/role.ts index fff58da86e..6b62c568e2 100644 --- a/packages/server/src/api/controllers/role.ts +++ b/packages/server/src/api/controllers/role.ts @@ -107,6 +107,8 @@ export async function save(ctx: UserCtx) { role._rev = result.rev ctx.body = role + // TODO: need to check that the prod DB actually exists, I think it won't + // if the app has never been published. const replication = new dbCore.Replication({ source: context.getDevAppDB().name, target: context.getProdAppDB().name, From 339ca403bfbf2a58fa1d5a61371823cd216c35e6 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 22:43:28 +0100 Subject: [PATCH 097/174] Add redis.increment --- packages/backend-core/src/redis/redis.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 8cfa3db5c1..7bd524b18c 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -331,6 +331,11 @@ class RedisWrapper { let items = await this.scan() await Promise.all(items.map((obj: any) => this.delete(obj.key))) } + + async increment(key: string) { + const result = await this.getClient().incr(addDbPrefix(this._db, key)) + return result + } } export default RedisWrapper From 837395e5e02309e3117b72e4a4316e6c16f22b1a Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 22:43:34 +0100 Subject: [PATCH 098/174] Add tests --- .../src/redis/tests/redis.spec.ts | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index 1fd40acc37..31106e1765 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -107,4 +107,44 @@ describe("redis", () => { expect(await redis.keys("*")).toHaveLength(0) }) }) + + describe("increment", () => { + it("can increment on a new key", async () => { + const key = structures.uuid() + const result = await redis.increment(key) + expect(result).toBe(1) + }) + + it("can increment multiple times", async () => { + const key = structures.uuid() + const results = [ + await redis.increment(key), + await redis.increment(key), + await redis.increment(key), + await redis.increment(key), + await redis.increment(key), + ] + expect(results).toEqual([1, 2, 3, 4, 5]) + }) + + it("can increment on a new key", async () => { + const key1 = structures.uuid() + const key2 = structures.uuid() + + const result1 = await redis.increment(key1) + expect(result1).toBe(1) + + const result2 = await redis.increment(key2) + expect(result2).toBe(1) + }) + + it("can increment multiple times in parallel", async () => { + const key = structures.uuid() + const results = await Promise.all( + Array.from({ length: 100 }).map(() => redis.increment(key)) + ) + expect(results).toHaveLength(100) + expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1)) + }) + }) }) From 192d7deb2adc7c8bcbe7f9e08fedae1f2939695d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 22:52:44 +0100 Subject: [PATCH 099/174] Handle errors --- packages/backend-core/src/redis/redis.ts | 3 +++ packages/backend-core/src/redis/tests/redis.spec.ts | 11 +++++++++++ 2 files changed, 14 insertions(+) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 7bd524b18c..55d43e4f60 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -334,6 +334,9 @@ class RedisWrapper { async increment(key: string) { const result = await this.getClient().incr(addDbPrefix(this._db, key)) + if (isNaN(result)) { + throw new Error(`Redis ${key} does not contains a number`) + } return result } } diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index 31106e1765..f7038d6d5e 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -146,5 +146,16 @@ describe("redis", () => { expect(results).toHaveLength(100) expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1)) }) + + it.each([ + generator.word(), + generator.bool(), + { [generator.word()]: generator.word() }, + ])("cannot increment if the store value is not a number", async value => { + const key = structures.uuid() + await redis.store(key, value) + + await expect(redis.increment(key)).rejects.toThrowError("") + }) }) }) From d4ffc3d5d73ec8c5df3d3b87acb3228d957715fc Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 22:54:39 +0100 Subject: [PATCH 100/174] More tests --- packages/backend-core/src/redis/tests/redis.spec.ts | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index f7038d6d5e..8cdaf3a740 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -147,6 +147,15 @@ describe("redis", () => { expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1)) }) + it("can increment existing set keys", async () => { + const key = structures.uuid() + await redis.store(key, 70) + await redis.increment(key) + + const result = await redis.increment(key) + expect(result).toBe(72) + }) + it.each([ generator.word(), generator.bool(), @@ -155,7 +164,9 @@ describe("redis", () => { const key = structures.uuid() await redis.store(key, value) - await expect(redis.increment(key)).rejects.toThrowError("") + await expect(redis.increment(key)).rejects.toThrowError( + `Redis ${key} does not contains a number` + ) }) }) }) From b774987e615435bf68eb33d1bda477dadddb8914 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 00:33:04 +0100 Subject: [PATCH 101/174] Install testcontainers --- packages/backend-core/package.json | 1 + packages/server/package.json | 2 +- yarn.lock | 83 ++++++++++++++++++++++-------- 3 files changed, 63 insertions(+), 23 deletions(-) diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index 90b3316c3f..fe56780982 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -78,6 +78,7 @@ "jest-serial-runner": "1.2.1", "pino-pretty": "10.0.0", "pouchdb-adapter-memory": "7.2.2", + "testcontainers": "^10.7.2", "timekeeper": "2.2.0", "typescript": "5.2.2" }, diff --git a/packages/server/package.json b/packages/server/package.json index 45980a4be6..97de17eb58 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -149,7 +149,7 @@ "rimraf": "3.0.2", "supertest": "6.3.3", "swagger-jsdoc": "6.1.0", - "testcontainers": "10.6.0", + "testcontainers": "10.7.2", "timekeeper": "2.2.0", "ts-node": "10.8.1", "tsconfig-paths": "4.0.0", diff --git a/yarn.lock b/yarn.lock index 2f9f558e2c..4025a537a3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5303,10 +5303,10 @@ dependencies: "@types/node" "*" -"@types/dockerode@^3.3.21": - version "3.3.23" - resolved "https://registry.yarnpkg.com/@types/dockerode/-/dockerode-3.3.23.tgz#07b2084013d01e14d5d97856446f4d9c9f27c223" - integrity sha512-Lz5J+NFgZS4cEVhquwjIGH4oQwlVn2h7LXD3boitujBnzOE5o7s9H8hchEjoDK2SlRsJTogdKnQeiJgPPKLIEw== +"@types/dockerode@^3.3.24": + version "3.3.24" + resolved "https://registry.yarnpkg.com/@types/dockerode/-/dockerode-3.3.24.tgz#bea354a4fcd0824a80fd5ea5ede3e8cda71137a7" + integrity sha512-679y69OYusf7Fr2HtdjXPUF6hnHxSA9K4EsuagsMuPno/XpJHjXxCOy2I5YL8POnWbzjsQAi0pyKIYM9HSpQog== dependencies: "@types/docker-modem" "*" "@types/node" "*" @@ -7027,7 +7027,7 @@ ast-types@0.9.6: resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.9.6.tgz#102c9e9e9005d3e7e3829bf0c4fa24ee862ee9b9" integrity sha512-qEdtR2UH78yyHX/AUNfXmJTlM48XoFZKBdwi1nzkI1mJL21cmbu0cvjxjpkXJ5NENMq42H+hNs8VLJcqXLerBQ== -async-lock@^1.4.0: +async-lock@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/async-lock/-/async-lock-1.4.1.tgz#56b8718915a9b68b10fce2f2a9a3dddf765ef53f" integrity sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ== @@ -7263,6 +7263,33 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== +bare-events@^2.0.0, bare-events@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/bare-events/-/bare-events-2.2.1.tgz#7b6d421f26a7a755e20bf580b727c84b807964c1" + integrity sha512-9GYPpsPFvrWBkelIhOhTWtkeZxVxZOdb3VnFTCzlOo3OjvmTvzLoZFUT8kNFACx0vJej6QPney1Cf9BvzCNE/A== + +bare-fs@^2.1.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/bare-fs/-/bare-fs-2.2.1.tgz#c1985d8d3e07a178956b072d3af67cb8c1fa9391" + integrity sha512-+CjmZANQDFZWy4PGbVdmALIwmt33aJg8qTkVjClU6X4WmZkTPBDxRHiBn7fpqEWEfF3AC2io++erpViAIQbSjg== + dependencies: + bare-events "^2.0.0" + bare-os "^2.0.0" + bare-path "^2.0.0" + streamx "^2.13.0" + +bare-os@^2.0.0, bare-os@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/bare-os/-/bare-os-2.2.0.tgz#24364692984d0bd507621754781b31d7872736b2" + integrity sha512-hD0rOPfYWOMpVirTACt4/nK8mC55La12K5fY1ij8HAdfQakD62M+H4o4tpfKzVGLgRDTuk3vjA4GqGXXCeFbag== + +bare-path@^2.0.0, bare-path@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/bare-path/-/bare-path-2.1.0.tgz#830f17fd39842813ca77d211ebbabe238a88cb4c" + integrity sha512-DIIg7ts8bdRKwJRJrUMy/PICEaQZaPGZ26lsSx9MJSwIhSrcdHn7/C8W+XmnG/rKi6BaRcz+JO00CjZteybDtw== + dependencies: + bare-os "^2.1.0" + base62@^1.1.0: version "1.2.8" resolved "https://registry.yarnpkg.com/base62/-/base62-1.2.8.tgz#1264cb0fb848d875792877479dbe8bae6bae3428" @@ -9615,10 +9642,10 @@ docker-compose@^0.23.5, docker-compose@^0.23.6: dependencies: yaml "^1.10.2" -docker-compose@^0.24.2: - version "0.24.3" - resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.24.3.tgz#298d7bb4aaf37b3b45d0e4ef55c7f58ccc39cca9" - integrity sha512-x3/QN3AIOMe7j2c8f/jcycizMft7dl8MluoB9OGPAYCyKHHiPUFqI9GjCcsU0kYy24vYKMCcfR6+5ZaEyQlrxg== +docker-compose@^0.24.6: + version "0.24.6" + resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.24.6.tgz#d1f490a641bdb7ccc07c4d446b264f026f9a1f15" + integrity sha512-VidlUyNzXMaVsuM79sjSvwC4nfojkP2VneL+Zfs538M2XFnffZDhx6veqnz/evCNIYGyz5O+1fgL6+g0NLWTBA== dependencies: yaml "^2.2.2" @@ -20179,6 +20206,16 @@ stream-to-array@^2.3.0: dependencies: any-promise "^1.1.0" +streamx@^2.13.0: + version "2.16.1" + resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.16.1.tgz#2b311bd34832f08aa6bb4d6a80297c9caef89614" + integrity sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ== + dependencies: + fast-fifo "^1.1.0" + queue-tick "^1.0.1" + optionalDependencies: + bare-events "^2.2.0" + streamx@^2.15.0: version "2.15.6" resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.15.6.tgz#28bf36997ebc7bf6c08f9eba958735231b833887" @@ -20730,14 +20767,16 @@ tar-fs@2.1.1, tar-fs@^2.0.0, tar-fs@^2.1.0: pump "^3.0.0" tar-stream "^2.1.4" -tar-fs@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.4.tgz#a21dc60a2d5d9f55e0089ccd78124f1d3771dbbf" - integrity sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w== +tar-fs@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.5.tgz#f954d77767e4e6edf973384e1eb95f8f81d64ed9" + integrity sha512-JOgGAmZyMgbqpLwct7ZV8VzkEB6pxXFBVErLtb+XCOqzc6w1xiWKI9GVd6bwk68EX7eJ4DWmfXVmq8K2ziZTGg== dependencies: - mkdirp-classic "^0.5.2" pump "^3.0.0" tar-stream "^3.1.5" + optionalDependencies: + bare-fs "^2.1.1" + bare-path "^2.1.0" tar-fs@~2.0.1: version "2.0.1" @@ -20875,25 +20914,25 @@ test-exclude@^6.0.0: glob "^7.1.4" minimatch "^3.0.4" -testcontainers@10.6.0: - version "10.6.0" - resolved "https://registry.yarnpkg.com/testcontainers/-/testcontainers-10.6.0.tgz#205ad9148e68ff5c43a6209a30b12965acbe89d8" - integrity sha512-FDJ3o3J8IMu1V7Uc6lNZ2MAD8+BV4HdpR/Vf5mHtgYHKdn6k1EbGFwtnvVNOxanJ99FCjf/EU8eA5ZQ4yjlsGA== +testcontainers@10.7.2, testcontainers@^10.7.2: + version "10.7.2" + resolved "https://registry.yarnpkg.com/testcontainers/-/testcontainers-10.7.2.tgz#619e93200dd47f174b307b40fa830cf023b74c25" + integrity sha512-7d+LVd/4YKp/cutiVMLL5cnj/8p8oYELAVRRyNUM4FyUDz1OLQuwW868nDl7Vd1ZAQxzGeCR+F86FlR9Yw9fMA== dependencies: "@balena/dockerignore" "^1.0.2" - "@types/dockerode" "^3.3.21" + "@types/dockerode" "^3.3.24" archiver "^5.3.2" - async-lock "^1.4.0" + async-lock "^1.4.1" byline "^5.0.0" debug "^4.3.4" - docker-compose "^0.24.2" + docker-compose "^0.24.6" dockerode "^3.3.5" get-port "^5.1.1" node-fetch "^2.7.0" proper-lockfile "^4.1.2" properties-reader "^2.3.0" ssh-remote-port-forward "^1.0.4" - tar-fs "^3.0.4" + tar-fs "^3.0.5" tmp "^0.2.1" testcontainers@4.7.0: From 60f82610f6ea11d56b406452b590e6a943125140 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 00:33:24 +0100 Subject: [PATCH 102/174] Use redis testcontainer --- .../src/redis/tests/redis.spec.ts | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index 8cdaf3a740..a7c0384d78 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -1,8 +1,26 @@ +import { GenericContainer, StartedTestContainer } from "testcontainers" import { generator, structures } from "../../../tests" import RedisWrapper from "../redis" +import { env } from "../.." describe("redis", () => { let redis: RedisWrapper + let container: StartedTestContainer + + beforeAll(async () => { + const container = await new GenericContainer("redis") + .withExposedPorts(6379) + .start() + + env._set( + "REDIS_URL", + `${container.getHost()}:${container.getMappedPort(6379)}` + ) + env._set("MOCK_REDIS", 0) + env._set("REDIS_PASSWORD", 0) + }) + + afterAll(() => container?.stop()) beforeEach(async () => { redis = new RedisWrapper(structures.db.id()) @@ -165,7 +183,7 @@ describe("redis", () => { await redis.store(key, value) await expect(redis.increment(key)).rejects.toThrowError( - `Redis ${key} does not contains a number` + "ERR value is not an integer or out of range" ) }) }) From 355dea5b05659efa5da48ed12062852aa980a4b2 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 00:13:11 +0100 Subject: [PATCH 103/174] Add deleteIfValue --- packages/backend-core/src/redis/redis.ts | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 55d43e4f60..fd40467b6a 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -339,6 +339,18 @@ class RedisWrapper { } return result } + + async deleteIfValue(key: string, value: any) { + const client = this.getClient() + + const luaScript = ` + if redis.call('GET', KEYS[1]) == ARGV[1] then + redis.call('DEL', KEYS[1]) + end + ` + + await client.eval(luaScript, 1, addDbPrefix(this._db, key), value) + } } export default RedisWrapper From 8f486e7bfc31bdb1b1f3a476f600a76996cccd9e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 00:20:42 +0100 Subject: [PATCH 104/174] Add tests --- .../src/redis/tests/redis.spec.ts | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index a7c0384d78..9ff6828cee 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -187,4 +187,26 @@ describe("redis", () => { ) }) }) + + describe("deleteIfValue", () => { + it("can delete if the value matches", async () => { + const key = structures.uuid() + const value = generator.word() + await redis.store(key, value) + + await redis.deleteIfValue(key, value) + + expect(await redis.get(key)).toBeNull() + }) + + it("will not delete if the value does not matches", async () => { + const key = structures.uuid() + const value = generator.word() + await redis.store(key, value) + + await redis.deleteIfValue(key, generator.word()) + + expect(await redis.get(key)).toEqual(value) + }) + }) }) From 86b715d5ce1cddaee37e531e661509dc55663bb5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 11:22:27 +0100 Subject: [PATCH 105/174] Create docWriteThrough redis cache --- packages/backend-core/src/redis/init.ts | 13 ++++++++++++- packages/backend-core/src/redis/utils.ts | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..7920dfed2d 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -104,3 +108,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** From c96c57bd9e6176418712525866bc89a9cd83c735 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:44:52 +0100 Subject: [PATCH 106/174] DocWritethrough --- .../backend-core/src/cache/docWritethrough.ts | 102 ++++++++++++++++++ .../backend-core/src/db/couch/DatabaseImpl.ts | 9 ++ .../backend-core/src/db/instrumentation.ts | 7 ++ packages/types/src/sdk/db.ts | 1 + 4 files changed, 119 insertions(+) create mode 100644 packages/backend-core/src/cache/docWritethrough.ts diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..9e1977f797 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,102 @@ +import BaseCache from "./base" +import { getDocWritethroughClient } from "../redis/init" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" +import * as locks from "../redis/redlockImpl" + +const DEFAULT_WRITE_RATE_MS = 10000 + +let CACHE: BaseCache | null = null +async function getCache() { + if (!CACHE) { + const client = await getDocWritethroughClient() + CACHE = new BaseCache(client) + } + return CACHE +} + +interface CacheItem { + lastWrite: number +} + +export class DocWritethrough { + db: Database + docId: string + writeRateMs: number + + constructor( + db: Database, + docId: string, + writeRateMs: number = DEFAULT_WRITE_RATE_MS + ) { + this.db = db + this.docId = docId + this.writeRateMs = writeRateMs + } + + private makeCacheItem(): CacheItem { + return { lastWrite: Date.now() } + } + + async patch(data: Record) { + const cache = await getCache() + + const key = `${this.docId}:info` + const cacheItem = await cache.withCache( + key, + null, + () => this.makeCacheItem(), + { + useTenancy: false, + } + ) + + await this.storeToCache(cache, data) + + const updateDb = + !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs + // let output = this.doc + if (updateDb) { + await this.persistToDb(cache) + } + } + + private async storeToCache(cache: BaseCache, data: Record) { + for (const [key, value] of Object.entries(data)) { + const cacheKey = this.docId + ":data:" + key + await cache.store(cacheKey, { key, value }, undefined) + } + } + + private async persistToDb(cache: BaseCache) { + const key = `${this.db.name}_${this.docId}` + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: key, + ttl: 15000, + }, + async () => { + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } + + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await this.db.put(doc) + } + ) + + if (!lockResponse.executed) { + throw `DocWriteThrough could not be persisted to db for ${key}` + } + } +} diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 7e7c997cbe..d4d17f6127 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -135,6 +135,15 @@ export class DatabaseImpl implements Database { }) } + async docExists(id: string): Promise { + try { + await this.get(id) + return true + } catch { + return false + } + } + async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 03010d4c92..87af0e3127 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -38,6 +38,13 @@ export class DDInstrumentedDatabase implements Database { }) } + docExists(id: string): Promise { + return tracer.trace("db.docExists", span => { + span?.addTags({ db_name: this.name, doc_id: id }) + return this.db.docExists(id) + }) + } + getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index c4e4a4f02f..dafc9ced57 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,6 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise + docExists(id: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 48c92377ad93c1a41bf98910b9345f9949e5093e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 13:47:18 +0100 Subject: [PATCH 107/174] USe get for doc exists --- packages/backend-core/src/cache/base/index.ts | 2 +- packages/backend-core/src/db/couch/DatabaseImpl.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..23c952c7b2 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -60,7 +60,7 @@ export default class BaseCache { */ async withCache( key: string, - ttl: number, + ttl: number | null = null, fetchFn: any, opts = { useTenancy: true } ) { diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index d4d17f6127..9d198e4307 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -137,7 +137,7 @@ export class DatabaseImpl implements Database { async docExists(id: string): Promise { try { - await this.get(id) + await this.performCall(db => () => db.head(id)) return true } catch { return false From 3998faaf3a2d6e0f831693f2578e8de3b2149854 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:17:18 +0100 Subject: [PATCH 108/174] DatabaseImpl.docExists test --- .../src/db/tests/DatabaseImpl.spec.ts | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 packages/backend-core/src/db/tests/DatabaseImpl.spec.ts diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..140ecf4f2c --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("docExists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.docExists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.docExists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + results.push(await database.docExists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.docExists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.docExists(id)).toBe(false) + }) + }) +}) From 64ea969aafcd9477ebf1bb65b53a3266aaa72068 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:23:32 +0100 Subject: [PATCH 109/174] docWritethrough test --- .../src/cache/tests/docWritethrough.spec.ts | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 packages/backend-core/src/cache/tests/docWritethrough.spec.ts diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..bfb1da5f1c --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,47 @@ +import tk from "timekeeper" +import { env } from "../.." +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" +import { DocWritethrough } from "../docWritethrough" +import _ from "lodash" + +env._set("MOCK_REDIS", null) + +const initialTime = Date.now() + +const WRITE_RATE_MS = 500 + +describe("docWritethrough", () => { + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.word(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(() => { + tk.freeze(initialTime) + documentId = structures.db.id() + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) + + it("patching will not persist until timeout is hit", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + tk.travel(Date.now() + WRITE_RATE_MS - 1) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) + }) +}) From b1027527b3e820de2e9061aec3fb8f35a66b13ac Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:28:35 +0100 Subject: [PATCH 110/174] Add persisting tests --- .../src/cache/tests/docWritethrough.spec.ts | 39 ++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bfb1da5f1c..ab0de53bee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -33,7 +33,7 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist until timeout is hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) @@ -43,5 +43,42 @@ describe("docWritethrough", () => { expect(await db.docExists(documentId)).toBe(false) }) }) + + it("patching will persist if timeout hits and next patch is called", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + _rev: expect.stringMatching(/1-.+/), + createdAt: new Date(initialTime + 500).toISOString(), + updatedAt: new Date(initialTime + 500).toISOString(), + }) + }) + }) + + it("patching will not persist even if timeout hits but next patch is not callec", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + WRITE_RATE_MS) + + expect(await db.docExists(documentId)).toBe(false) + }) + }) }) }) From b1e1226de6058c4c119546a8751f1ca955f1f078 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:41:26 +0100 Subject: [PATCH 111/174] Add extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 86 ++++++++++++++++--- 1 file changed, 75 insertions(+), 11 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index ab0de53bee..16e47ce3c3 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -7,9 +7,17 @@ import _ from "lodash" env._set("MOCK_REDIS", null) +const WRITE_RATE_MS = 500 + const initialTime = Date.now() -const WRITE_RATE_MS = 500 +function resetTime() { + tk.travel(initialTime) +} +function travelForward(ms: number) { + const updatedTime = Date.now() + ms + tk.travel(updatedTime) +} describe("docWritethrough", () => { const config = new DBTestConfiguration() @@ -28,7 +36,7 @@ describe("docWritethrough", () => { } beforeEach(() => { - tk.freeze(initialTime) + resetTime() documentId = structures.db.id() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -37,7 +45,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS - 1) + travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) expect(await db.docExists(documentId)).toBe(false) @@ -51,7 +59,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -62,23 +70,79 @@ describe("docWritethrough", () => { ...patch2, ...patch3, _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + 500).toISOString(), - updatedAt: new Date(initialTime + 500).toISOString(), + createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), }) }) }) + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + travelForward(WRITE_RATE_MS) + const date1 = new Date() + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + const date2 = new Date() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + it("patching will not persist even if timeout hits but next patch is not callec", async () => { await config.doInTenant(async () => { - const patch1 = generatePatchObject(2) - const patch2 = generatePatchObject(2) - await docWritethrough.patch(patch1) - await docWritethrough.patch(patch2) + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) - tk.travel(Date.now() + WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) expect(await db.docExists(documentId)).toBe(false) }) }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const time1 = travelForward(WRITE_RATE_MS) + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + travelForward(WRITE_RATE_MS) + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) }) }) From e228b9c207cca329a37305a7a7d0d6c932eeffa7 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 15:51:42 +0100 Subject: [PATCH 112/174] Test concurrency --- .../backend-core/src/cache/docWritethrough.ts | 12 ++++-- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++++++++++++- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 9e1977f797..13a85a0d84 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -19,9 +19,9 @@ interface CacheItem { } export class DocWritethrough { - db: Database - docId: string - writeRateMs: number + private db: Database + private _docId: string + private writeRateMs: number constructor( db: Database, @@ -29,10 +29,14 @@ export class DocWritethrough { writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { this.db = db - this.docId = docId + this._docId = docId this.writeRateMs = writeRateMs } + get docId() { + return this._docId + } + private makeCacheItem(): CacheItem { return { lastWrite: Date.now() } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 16e47ce3c3..aed87499ee 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -41,8 +41,9 @@ describe("docWritethrough", () => { docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist if timeout from the creation does not hit", async () => { await config.doInTenant(async () => { + travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) travelForward(WRITE_RATE_MS - 1) @@ -116,7 +117,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - const time1 = travelForward(WRITE_RATE_MS) + travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -144,5 +145,41 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id(), + WRITE_RATE_MS + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + travelForward(WRITE_RATE_MS) + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) }) }) From 319b3afddbfc69afbc461326ee7374bed1115c30 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 16:48:16 +0100 Subject: [PATCH 113/174] Ensure keys are removed --- .../backend-core/src/cache/docWritethrough.ts | 4 +++ .../src/cache/tests/docWritethrough.spec.ts | 28 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 13a85a0d84..bde93182a9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -96,6 +96,10 @@ export class DocWritethrough { } await this.db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } ) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index aed87499ee..65e9450f62 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -181,5 +181,33 @@ describe("docWritethrough", () => { ) }) }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + travelForward(WRITE_RATE_MS) + + await docWritethrough.patch({}) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + travelForward(WRITE_RATE_MS) + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) }) }) From 00bf88c5bf210908d6416f893ef3e4cee3dd730e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 29 Feb 2024 17:01:16 +0100 Subject: [PATCH 114/174] Extra tests --- .../src/cache/tests/docWritethrough.spec.ts | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 65e9450f62..974494d1c9 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -77,6 +77,35 @@ describe("docWritethrough", () => { }) }) + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + travelForward(WRITE_RATE_MS) + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + travelForward(WRITE_RATE_MS) + + const patch4 = generatePatchObject(3) + await docWritethrough.patch(patch4) + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + ...patch4, + }) + ) + }) + }) + it("date audit fields are set correctly when persisting", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) From 6a81d21cb73be609e02e9b343bbef3d3264c6f9b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 10:53:18 +0100 Subject: [PATCH 115/174] Fixes and tests --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++---------- .../src/cache/tests/docWritethrough.spec.ts | 41 ++++++++- 2 files changed, 82 insertions(+), 47 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index bde93182a9..80063e4772 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,8 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private docInfoCacheKey: string + constructor( db: Database, docId: string, @@ -31,6 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs + this.docInfoCacheKey = `${this.docId}:info` } get docId() { @@ -44,26 +47,39 @@ export class DocWritethrough { async patch(data: Record) { const cache = await getCache() - const key = `${this.docId}:info` - const cacheItem = await cache.withCache( - key, - null, - () => this.makeCacheItem(), - { - useTenancy: false, - } - ) - await this.storeToCache(cache, data) - const updateDb = - !cacheItem || cacheItem.lastWrite <= Date.now() - this.writeRateMs - // let output = this.doc + const updateDb = await this.shouldUpdateDb(cache) + if (updateDb) { - await this.persistToDb(cache) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: this.docInfoCacheKey, + ttl: 15000, + }, + async () => { + if (await this.shouldUpdateDb(cache)) { + await this.persistToDb(cache) + await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } } } + private async shouldUpdateDb(cache: BaseCache) { + const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => + this.makeCacheItem() + ) + return cacheItem.lastWrite <= Date.now() - this.writeRateMs + } + private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { const cacheKey = this.docId + ":data:" + key @@ -72,39 +88,23 @@ export class DocWritethrough { } private async persistToDb(cache: BaseCache) { - const key = `${this.db.name}_${this.docId}` + let doc: AnyDocument | undefined + try { + doc = await this.db.get(this.docId) + } catch { + doc = { _id: this.docId } + } - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: key, - ttl: 15000, - }, - async () => { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } + const keysToPersist = await cache.keys(`${this.docId}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } + await this.db.put(doc) - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } - ) - - if (!lockResponse.executed) { - throw `DocWriteThrough could not be persisted to db for ${key}` + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) } } } diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 974494d1c9..bca781e377 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,10 @@ import tk from "timekeeper" -import { env } from "../.." + import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -env._set("MOCK_REDIS", null) - const WRITE_RATE_MS = 500 const initialTime = Date.now() @@ -238,5 +236,42 @@ describe("docWritethrough", () => { ) }) }) + + it("concurrent calls will not cause multiple saves", async () => { + async function parallelPatch(count: number) { + await Promise.all( + Array.from({ length: count }).map(() => + docWritethrough.patch(generatePatchObject(1)) + ) + ) + } + + const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") + const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + + await config.doInTenant(async () => { + await parallelPatch(5) + expect(persistToDbSpy).not.toBeCalled() + expect(storeToCacheSpy).toBeCalledTimes(5) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(40) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(45) + + await parallelPatch(10) + + expect(persistToDbSpy).toBeCalledTimes(1) + expect(storeToCacheSpy).toBeCalledTimes(55) + + travelForward(WRITE_RATE_MS) + + await parallelPatch(5) + expect(persistToDbSpy).toBeCalledTimes(2) + expect(storeToCacheSpy).toBeCalledTimes(60) + }) + }) }) }) From 8062c287227c55f22e952f2006d77aa768bf757b Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:30 +0100 Subject: [PATCH 116/174] Making code more readable --- .../backend-core/src/cache/docWritethrough.ts | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 80063e4772..5148950c1d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -15,7 +15,7 @@ async function getCache() { } interface CacheItem { - lastWrite: number + nextWrite: number } export class DocWritethrough { @@ -40,8 +40,8 @@ export class DocWritethrough { return this._docId } - private makeCacheItem(): CacheItem { - return { lastWrite: Date.now() } + private makeNextWriteInfoItem(): CacheItem { + return { nextWrite: Date.now() + this.writeRateMs } } async patch(data: Record) { @@ -62,7 +62,10 @@ export class DocWritethrough { async () => { if (await this.shouldUpdateDb(cache)) { await this.persistToDb(cache) - await cache.store(this.docInfoCacheKey, this.makeCacheItem()) + await cache.store( + this.docInfoCacheKey, + this.makeNextWriteInfoItem() + ) } } ) @@ -75,9 +78,9 @@ export class DocWritethrough { private async shouldUpdateDb(cache: BaseCache) { const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeCacheItem() + this.makeNextWriteInfoItem() ) - return cacheItem.lastWrite <= Date.now() - this.writeRateMs + return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { From cd56e1ffff58309ecec060b7f55770da0d49634e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:04:55 +0100 Subject: [PATCH 117/174] Type caches --- packages/backend-core/src/cache/base/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 23c952c7b2..911bd6a831 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -58,12 +58,12 @@ export default class BaseCache { /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, ttl: number | null = null, - fetchFn: any, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue From 608865fe9d11dc0649e8a96969b8c1234b1f5308 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 11:12:31 +0100 Subject: [PATCH 118/174] Fix types --- packages/backend-core/src/cache/generic.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) From 6bd7e6b374c77330a1acea9a7ee5440baff7420e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:38:48 +0100 Subject: [PATCH 119/174] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5148950c1d..e46c763906 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -33,7 +33,7 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.docId}:info` + this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` } get docId() { From eb781037640ac07d5e1250271644e9bac2c27aab Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:41:40 +0100 Subject: [PATCH 120/174] Namespace key in redis by db --- packages/backend-core/src/cache/docWritethrough.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e46c763906..e367c9e060 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -23,6 +23,7 @@ export class DocWritethrough { private _docId: string private writeRateMs: number + private cacheKeyPrefix: string private docInfoCacheKey: string constructor( @@ -33,7 +34,8 @@ export class DocWritethrough { this.db = db this._docId = docId this.writeRateMs = writeRateMs - this.docInfoCacheKey = `${this.db.name}:${this.docId}:info` + this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { @@ -85,7 +87,7 @@ export class DocWritethrough { private async storeToCache(cache: BaseCache, data: Record) { for (const [key, value] of Object.entries(data)) { - const cacheKey = this.docId + ":data:" + key + const cacheKey = this.cacheKeyPrefix + ":data:" + key await cache.store(cacheKey, { key, value }, undefined) } } @@ -98,7 +100,7 @@ export class DocWritethrough { doc = { _id: this.docId } } - const keysToPersist = await cache.keys(`${this.docId}:data:*`) + const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value From 3c944073eb0467a75a979ee4c524093cef64d166 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 1 Mar 2024 13:59:51 +0100 Subject: [PATCH 121/174] Use overloads --- .../src/cache/tests/docWritethrough.spec.ts | 6 ++-- .../backend-core/src/db/couch/DatabaseImpl.ts | 28 ++++++++++++------- .../backend-core/src/db/instrumentation.ts | 14 ++++------ .../src/db/tests/DatabaseImpl.spec.ts | 16 +++++------ packages/types/src/sdk/db.ts | 2 +- 5 files changed, 35 insertions(+), 31 deletions(-) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index bca781e377..4c4a4b2b60 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -35,7 +35,7 @@ describe("docWritethrough", () => { beforeEach(() => { resetTime() - documentId = structures.db.id() + documentId = structures.uuid() docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) @@ -47,7 +47,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS - 1) await docWritethrough.patch(generatePatchObject(2)) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) @@ -136,7 +136,7 @@ describe("docWritethrough", () => { travelForward(WRITE_RATE_MS) - expect(await db.docExists(documentId)).toBe(false) + expect(await db.exists(documentId)).toBe(false) }) }) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 9d198e4307..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -70,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -79,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } @@ -135,15 +152,6 @@ export class DatabaseImpl implements Database { }) } - async docExists(id: string): Promise { - try { - await this.performCall(db => () => db.head(id)) - return true - } catch { - return false - } - } - async getMultiple( ids: string[], opts?: { allowMissing?: boolean } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 87af0e3127..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } @@ -38,13 +41,6 @@ export class DDInstrumentedDatabase implements Database { }) } - docExists(id: string): Promise { - return tracer.trace("db.docExists", span => { - span?.addTags({ db_name: this.name, doc_id: id }) - return this.db.docExists(id) - }) - } - getMultiple( ids: string[], opts?: { allowMissing?: boolean | undefined } | undefined diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts index 140ecf4f2c..586f13f417 100644 --- a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -17,16 +17,16 @@ describe("DatabaseImpl", () => { documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) }) - describe("docExists", () => { + describe("document exists", () => { it("can check existing docs by id", async () => { const existingDoc = _.sample(documents) - const result = await database.docExists(existingDoc!._id!) + const result = await database.exists(existingDoc!._id!) expect(result).toBe(true) }) it("can check non existing docs by id", async () => { - const result = await database.docExists(newid()) + const result = await database.exists(newid()) expect(result).toBe(false) }) @@ -36,9 +36,9 @@ describe("DatabaseImpl", () => { const id = existingDoc!._id! const results = [] - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) - results.push(await database.docExists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) expect(results).toEqual([true, true, true]) }) @@ -46,10 +46,10 @@ describe("DatabaseImpl", () => { it("returns false after the doc is deleted", async () => { const existingDoc = _.sample(documents) const id = existingDoc!._id! - expect(await database.docExists(id)).toBe(true) + expect(await database.exists(id)).toBe(true) await database.remove(existingDoc!) - expect(await database.docExists(id)).toBe(false) + expect(await database.exists(id)).toBe(false) }) }) }) diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index dafc9ced57..4d103d5be6 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -128,7 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise - docExists(id: string): Promise + exists(docId: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } From 770aff4f5e9f553443574d3feaba1ab6ddbc924d Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:38:45 +0100 Subject: [PATCH 122/174] Type inMemoryQueue --- .../backend-core/src/queue/inMemoryQueue.ts | 36 ++++++++++--------- packages/backend-core/src/queue/queue.ts | 2 ++ 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..3205b6f383 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,6 @@ import events from "events" import { timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -24,9 +25,9 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any + _opts?: QueueOptions _messages: any[] _emitter: EventEmitter _runCount: number @@ -37,7 +38,7 @@ class InMemoryQueue { * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] @@ -55,8 +56,12 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { + const delay = this._opts?.defaultJobOptions?.delay + if (delay) { + await new Promise(r => setTimeout(() => r(), delay)) + } if (this._messages.length <= 0) { return } @@ -70,7 +75,7 @@ class InMemoryQueue { } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +88,26 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) + this._messages.push(newJob(this._name, data)) this._addCount++ this._emitter.emit("message") + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +115,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + async removeJobs(pattern: string) { // no-op } @@ -128,12 +132,12 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds From ee3cb5ae689ac0cc103227c3efd867e2ba31abec Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 15:43:47 +0100 Subject: [PATCH 123/174] Clean --- packages/worker/src/initPro.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/worker/src/initPro.ts b/packages/worker/src/initPro.ts index ddc8d2562a..b34d514992 100644 --- a/packages/worker/src/initPro.ts +++ b/packages/worker/src/initPro.ts @@ -1,5 +1,4 @@ import { sdk as proSdk } from "@budibase/pro" -import * as userSdk from "./sdk/users" export const initPro = async () => { await proSdk.init({}) From e4d8fe2f4758e55640f6733e81ce4c07526812c4 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:18:01 +0100 Subject: [PATCH 124/174] Add doc-writethrough queue --- packages/backend-core/src/queue/constants.ts | 1 + packages/backend-core/src/queue/listeners.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..14dce5fe8d 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { From 151bfd103b8f399d02654f403dba4738d11d82db Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 4 Mar 2024 16:34:05 +0100 Subject: [PATCH 125/174] Use bull --- .../backend-core/src/cache/docWritethrough.ts | 123 +++++++++--------- 1 file changed, 64 insertions(+), 59 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e367c9e060..38a162435d 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,6 +3,9 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" +import { JobQueue, createQueue } from "../queue" +import { context, db as dbUtils } from ".." + const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null @@ -14,17 +17,63 @@ async function getCache() { return CACHE } -interface CacheItem { - nextWrite: number +interface ProcessDocMessage { + tenantId: string + dbName: string + docId: string + cacheKeyPrefix: string } +export const docWritethroughProcessorQueue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE +) + +docWritethroughProcessorQueue.process(async message => { + const { dbName, tenantId, docId, cacheKeyPrefix } = message.data + const cache = await getCache() + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } + }) +}) + export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number private cacheKeyPrefix: string - private docInfoCacheKey: string constructor( db: Database, @@ -35,54 +84,31 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.docInfoCacheKey = `${this.cacheKeyPrefix}:info` } get docId() { return this._docId } - private makeNextWriteInfoItem(): CacheItem { - return { nextWrite: Date.now() + this.writeRateMs } - } - async patch(data: Record) { const cache = await getCache() await this.storeToCache(cache, data) - const updateDb = await this.shouldUpdateDb(cache) - - if (updateDb) { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: this.docInfoCacheKey, - ttl: 15000, - }, - async () => { - if (await this.shouldUpdateDb(cache)) { - await this.persistToDb(cache) - await cache.store( - this.docInfoCacheKey, - this.makeNextWriteInfoItem() - ) - } - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + docWritethroughProcessorQueue.add( + { + tenantId: context.getTenantId(), + dbName: this.db.name, + docId: this.docId, + cacheKeyPrefix: this.cacheKeyPrefix, + }, + { + delay: this.writeRateMs - 1, + jobId: this.cacheKeyPrefix, + removeOnFail: true, + removeOnComplete: true, } - } - } - - private async shouldUpdateDb(cache: BaseCache) { - const cacheItem = await cache.withCache(this.docInfoCacheKey, null, () => - this.makeNextWriteInfoItem() ) - return Date.now() >= cacheItem.nextWrite } private async storeToCache(cache: BaseCache, data: Record) { @@ -91,25 +117,4 @@ export class DocWritethrough { await cache.store(cacheKey, { key, value }, undefined) } } - - private async persistToDb(cache: BaseCache) { - let doc: AnyDocument | undefined - try { - doc = await this.db.get(this.docId) - } catch { - doc = { _id: this.docId } - } - - const keysToPersist = await cache.keys(`${this.cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await this.db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } - } } From caff2876dd604a97e9370861927f52476860be11 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:50:58 +0100 Subject: [PATCH 126/174] Tests --- .../backend-core/src/cache/docWritethrough.ts | 99 +++++++++------ .../src/cache/tests/docWritethrough.spec.ts | 120 ++++++++++-------- .../backend-core/src/queue/inMemoryQueue.ts | 76 ++++++++--- 3 files changed, 186 insertions(+), 109 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 38a162435d..f53cfbfe5f 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -4,7 +4,8 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import * as locks from "../redis/redlockImpl" import { JobQueue, createQueue } from "../queue" -import { context, db as dbUtils } from ".." +import * as context from "../context" +import * as dbUtils from "../db" const DEFAULT_WRITE_RATE_MS = 10000 @@ -28,50 +29,71 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -docWritethroughProcessorQueue.process(async message => { - const { dbName, tenantId, docId, cacheKeyPrefix } = message.data - const cache = await getCache() - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } +let _init = false +export const init = () => { + if (_init) { + return + } + docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) } + ) - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) - } + }) }) -}) + _init = true +} + +export async function persistToDb({ + dbName, + docId, + cacheKeyPrefix, +}: { + dbName: string + docId: string + cacheKeyPrefix: string +}) { + const cache = await getCache() + + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } + + await db.put(doc) + + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } +} export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number + private tenantId: string private cacheKeyPrefix: string @@ -84,6 +106,7 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` + this.tenantId = context.getTenantId() } get docId() { @@ -97,13 +120,13 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: context.getTenantId(), + tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, }, { - delay: this.writeRateMs - 1, + delay: this.writeRateMs, jobId: this.cacheKeyPrefix, removeOnFail: true, removeOnComplete: true, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 4c4a4b2b60..83af66a9d2 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,20 +1,32 @@ -import tk from "timekeeper" - import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import { DocWritethrough } from "../docWritethrough" import _ from "lodash" -const WRITE_RATE_MS = 500 +import { + DocWritethrough, + docWritethroughProcessorQueue, + init, +} from "../docWritethrough" +import InMemoryQueue from "../../queue/inMemoryQueue" + +const WRITE_RATE_MS = 1000 const initialTime = Date.now() +jest.useFakeTimers({ + now: initialTime, +}) + function resetTime() { - tk.travel(initialTime) + jest.setSystemTime(initialTime) } -function travelForward(ms: number) { - const updatedTime = Date.now() + ms - tk.travel(updatedTime) +async function travelForward(ms: number) { + await jest.advanceTimersByTimeAsync(ms) + + const queue: InMemoryQueue = docWritethroughProcessorQueue as never + while (queue.hasRunningJobs()) { + await jest.runOnlyPendingTimersAsync() + } } describe("docWritethrough", () => { @@ -33,33 +45,37 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeEach(() => { + beforeAll(() => init()) + + beforeEach(async () => { resetTime() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + await config.doInTenant(async () => { + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + }) }) - it("patching will not persist if timeout from the creation does not hit", async () => { + it("patching will not persist if timeout does not hit", async () => { await config.doInTenant(async () => { - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - travelForward(WRITE_RATE_MS - 1) - await docWritethrough.patch(generatePatchObject(2)) + await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits and next patch is called", async () => { + it("patching will persist if timeout hits", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) + // This will not be persisted const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) @@ -67,7 +83,6 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - ...patch3, _rev: expect.stringMatching(/1-.+/), createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), @@ -82,15 +97,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - travelForward(WRITE_RATE_MS) - - const patch4 = generatePatchObject(3) - await docWritethrough.patch(patch4) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -98,7 +110,6 @@ describe("docWritethrough", () => { ...patch1, ...patch2, ...patch3, - ...patch4, }) ) }) @@ -109,16 +120,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date1 = new Date() await docWritethrough.patch(patch2) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const date2 = new Date() - const patch3 = generatePatchObject(3) - await docWritethrough.patch(patch3) - expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -129,22 +137,11 @@ describe("docWritethrough", () => { }) }) - it("patching will not persist even if timeout hits but next patch is not callec", async () => { - await config.doInTenant(async () => { - await docWritethrough.patch(generatePatchObject(2)) - await docWritethrough.patch(generatePatchObject(2)) - - travelForward(WRITE_RATE_MS) - - expect(await db.exists(documentId)).toBe(false) - }) - }) - it("concurrent patches will override keys", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -155,13 +152,14 @@ describe("docWritethrough", () => { }) ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -173,7 +171,7 @@ describe("docWritethrough", () => { }) }) - it("concurrent patches to multiple DocWritethrough will not contaminate each other", async () => { + it("concurrent patches to different docWritethrough will not pollute each other", async () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, @@ -186,12 +184,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) + await travelForward(WRITE_RATE_MS) expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -214,7 +213,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await docWritethrough.patch({}) @@ -224,9 +223,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) + await travelForward(WRITE_RATE_MS) expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -246,30 +246,46 @@ describe("docWritethrough", () => { ) } - const persistToDbSpy = jest.spyOn(docWritethrough as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) - expect(persistToDbSpy).not.toBeCalled() expect(storeToCacheSpy).toBeCalledTimes(5) + expect(await db.exists(documentId)).toBe(false) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(40) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(45) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) + await parallelPatch(10) - expect(persistToDbSpy).toBeCalledTimes(1) expect(storeToCacheSpy).toBeCalledTimes(55) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/1-.+/), + }) + ) - travelForward(WRITE_RATE_MS) + await travelForward(WRITE_RATE_MS) await parallelPatch(5) - expect(persistToDbSpy).toBeCalledTimes(2) + await travelForward(WRITE_RATE_MS) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + _rev: expect.stringMatching(/3-.+/), + }) + ) expect(storeToCacheSpy).toBeCalledTimes(60) }) }) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 3205b6f383..f201714903 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -2,6 +2,13 @@ import events from "events" import { timeout } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" +interface JobMessage { + timestamp: number + queue: string + data: any + opts?: JobOptions +} + /** * Bull works with a Job wrapper around all messages that contains a lot more information about * the state of the message, this object constructor implements the same schema of Bull jobs @@ -11,12 +18,12 @@ import { Queue, QueueOptions, JobOptions } from "./queue" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -28,10 +35,12 @@ function newJob(queue: string, message: any) { class InMemoryQueue implements Partial { _name: string _opts?: QueueOptions - _messages: any[] + _messages: JobMessage[] + _queuedJobIds: Set _emitter: EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. @@ -45,6 +54,7 @@ class InMemoryQueue implements Partial { this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -58,19 +68,24 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - const delay = this._opts?.defaultJobOptions?.delay - if (delay) { - await new Promise(r => setTimeout(() => r(), delay)) + try { + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() + + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } + } catch (e: any) { + throw e } - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ }) } @@ -89,12 +104,31 @@ class InMemoryQueue implements Partial { */ // eslint-disable-next-line no-unused-vars async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, data)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } return {} as any } @@ -143,7 +177,11 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } From fb2c7467b0aa29f99fdb56f541e0e1842d88b5de Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 13:55:07 +0100 Subject: [PATCH 127/174] Clean --- .../backend-core/src/queue/inMemoryQueue.ts | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index f201714903..6c8107c7a4 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -68,23 +68,19 @@ class InMemoryQueue implements Partial { */ async process(func: any) { this._emitter.on("message", async () => { - try { - if (this._messages.length <= 0) { - return - } - let msg = this._messages.shift() + if (this._messages.length <= 0) { + return + } + let msg = this._messages.shift() - let resp = func(msg) - if (resp.then != null) { - await resp - } - this._runCount++ - const jobId = msg?.opts?.jobId?.toString() - if (jobId && msg?.opts?.removeOnComplete) { - this._queuedJobIds.delete(jobId) - } - } catch (e: any) { - throw e + let resp = func(msg) + if (resp.then != null) { + await resp + } + this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) } }) } From c4fc94fb280c6409ebcbd97f168277dfd6ce8f9a Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:19:05 +0100 Subject: [PATCH 128/174] Remove defaults and init --- .../backend-core/src/cache/docWritethrough.ts | 52 +++++++------------ .../src/cache/tests/docWritethrough.spec.ts | 3 -- 2 files changed, 20 insertions(+), 35 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index f53cfbfe5f..1a16f60eb9 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -7,8 +7,6 @@ import { JobQueue, createQueue } from "../queue" import * as context from "../context" import * as dbUtils from "../db" -const DEFAULT_WRITE_RATE_MS = 10000 - let CACHE: BaseCache | null = null async function getCache() { if (!CACHE) { @@ -29,33 +27,27 @@ export const docWritethroughProcessorQueue = createQueue( JobQueue.DOC_WRITETHROUGH_QUEUE ) -let _init = false -export const init = () => { - if (_init) { - return - } - docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - } - ) - - if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) +docWritethroughProcessorQueue.process(async message => { + const { tenantId, cacheKeyPrefix } = message.data + await context.doInTenant(tenantId, async () => { + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: 15000, + }, + async () => { + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) } - }) + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) - _init = true -} +}) export async function persistToDb({ dbName, @@ -97,11 +89,7 @@ export class DocWritethrough { private cacheKeyPrefix: string - constructor( - db: Database, - docId: string, - writeRateMs: number = DEFAULT_WRITE_RATE_MS - ) { + constructor(db: Database, docId: string, writeRateMs: number) { this.db = db this._docId = docId this.writeRateMs = writeRateMs diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 83af66a9d2..a5765171cb 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -5,7 +5,6 @@ import _ from "lodash" import { DocWritethrough, docWritethroughProcessorQueue, - init, } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" @@ -45,8 +44,6 @@ describe("docWritethrough", () => { }, {} as Record) } - beforeAll(() => init()) - beforeEach(async () => { resetTime() documentId = structures.uuid() From 37935112a478b064099552b223cbfb23e2ccf2fe Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 14:47:23 +0100 Subject: [PATCH 129/174] Add comment --- packages/backend-core/src/cache/tests/docWritethrough.spec.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index a5765171cb..3e638a4eec 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -256,6 +256,8 @@ describe("docWritethrough", () => { expect(storeToCacheSpy).toBeCalledTimes(45) + // Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup. + // We are relying on the document being stored only once (otherwise we would have _rev updated) expect(await db.get(documentId)).toEqual( expect.objectContaining({ _id: documentId, From e584d82e6f6dd052d828d45152aea29f95e7cb65 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 15:02:02 +0100 Subject: [PATCH 130/174] Improve redlock non executed response --- packages/backend-core/src/cache/docWritethrough.ts | 9 +++++++++ packages/backend-core/src/redis/redlockImpl.ts | 10 +++++++++- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 1a16f60eb9..ebb64ee9e5 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -44,6 +44,15 @@ docWritethroughProcessorQueue.process(async message => { ) if (!lockResponse.executed) { + if ( + lockResponse.reason !== + locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce + ) { + console.error("Error persisting docWritethrough", { + data: message.data, + }) + throw "Error persisting docWritethrough" + } console.log(`Ignoring redlock conflict in write-through cache`) } }) diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts index adeb5b12ec..28babb9405 100644 --- a/packages/backend-core/src/redis/redlockImpl.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -82,6 +82,11 @@ type SuccessfulRedlockExecution = { } type UnsuccessfulRedlockExecution = { executed: false + reason: UnsuccessfulRedlockExecutionReason +} + +export const enum UnsuccessfulRedlockExecutionReason { + LockTakenWithTryOnce = "LOCK_TAKEN_WITH_TRY_ONCE", } type RedlockExecution = @@ -141,7 +146,10 @@ export async function doWithLock( if (opts.type === LockType.TRY_ONCE) { // don't throw for try-once locks, they will always error // due to retry count (0) exceeded - return { executed: false } + return { + executed: false, + reason: UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce, + } } else { throw e } From f5e2dc7a2709019752d358228575db130dd5aed0 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 17:15:50 +0100 Subject: [PATCH 131/174] Do not use lock --- .../backend-core/src/cache/docWritethrough.ts | 37 ++----------------- .../src/cache/tests/docWritethrough.spec.ts | 4 +- 2 files changed, 4 insertions(+), 37 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index ebb64ee9e5..d4d651c688 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,7 +1,6 @@ import BaseCache from "./base" import { getDocWritethroughClient } from "../redis/init" -import { AnyDocument, Database, LockName, LockType } from "@budibase/types" -import * as locks from "../redis/redlockImpl" +import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" import * as context from "../context" @@ -17,7 +16,6 @@ async function getCache() { } interface ProcessDocMessage { - tenantId: string dbName: string docId: string cacheKeyPrefix: string @@ -28,34 +26,8 @@ export const docWritethroughProcessorQueue = createQueue( ) docWritethroughProcessorQueue.process(async message => { - const { tenantId, cacheKeyPrefix } = message.data - await context.doInTenant(tenantId, async () => { - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_ONCE, - name: LockName.PERSIST_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: 15000, - }, - async () => { - await persistToDb(message.data) - console.log("DocWritethrough persisted", { data: message.data }) - } - ) - - if (!lockResponse.executed) { - if ( - lockResponse.reason !== - locks.UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce - ) { - console.error("Error persisting docWritethrough", { - data: message.data, - }) - throw "Error persisting docWritethrough" - } - console.log(`Ignoring redlock conflict in write-through cache`) - } - }) + await persistToDb(message.data) + console.log("DocWritethrough persisted", { data: message.data }) }) export async function persistToDb({ @@ -94,7 +66,6 @@ export class DocWritethrough { private db: Database private _docId: string private writeRateMs: number - private tenantId: string private cacheKeyPrefix: string @@ -103,7 +74,6 @@ export class DocWritethrough { this._docId = docId this.writeRateMs = writeRateMs this.cacheKeyPrefix = `${this.db.name}:${this.docId}` - this.tenantId = context.getTenantId() } get docId() { @@ -117,7 +87,6 @@ export class DocWritethrough { docWritethroughProcessorQueue.add( { - tenantId: this.tenantId, dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 3e638a4eec..9bbcd6af44 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -47,9 +47,7 @@ describe("docWritethrough", () => { beforeEach(async () => { resetTime() documentId = structures.uuid() - await config.doInTenant(async () => { - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) - }) + docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) }) it("patching will not persist if timeout does not hit", async () => { From 6707da4ac21db6eaf0fa1b0fbbac85d4f027e5c8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 18:06:14 +0100 Subject: [PATCH 132/174] Use bulk --- packages/backend-core/src/cache/base/index.ts | 19 +++++++++++++++++++ .../backend-core/src/cache/docWritethrough.ts | 10 +++++----- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 911bd6a831..942d70ae72 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -46,6 +46,25 @@ export default class BaseCache { await client.store(key, value, ttl) } + /** + * Bulk write to the cache. + */ + async bulkStore( + data: Record, + ttl: number | null = null, + opts = { useTenancy: true } + ) { + if (opts.useTenancy) { + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[generateTenantKey(key)] = value + return acc + }, {} as Record) + } + + const client = await this.getClient() + await client.bulkStore(data, ttl) + } + /** * Remove from cache. */ diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index d4d651c688..a0bc14ec5c 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -3,7 +3,6 @@ import { getDocWritethroughClient } from "../redis/init" import { AnyDocument, Database } from "@budibase/types" import { JobQueue, createQueue } from "../queue" -import * as context from "../context" import * as dbUtils from "../db" let CACHE: BaseCache | null = null @@ -101,9 +100,10 @@ export class DocWritethrough { } private async storeToCache(cache: BaseCache, data: Record) { - for (const [key, value] of Object.entries(data)) { - const cacheKey = this.cacheKeyPrefix + ":data:" + key - await cache.store(cacheKey, { key, value }, undefined) - } + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[this.cacheKeyPrefix + ":data:" + key] = { key, value } + return acc + }, {} as Record) + await cache.bulkStore(data, null) } } From 0a2fb4a3a6b432ed8277738cbee054698911b07e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 23:32:02 +0100 Subject: [PATCH 133/174] Use message id to handle concurrency --- .../backend-core/src/cache/docWritethrough.ts | 88 +++++++++++++++---- 1 file changed, 69 insertions(+), 19 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index a0bc14ec5c..3f84f82bc4 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,9 +1,11 @@ import BaseCache from "./base" import { getDocWritethroughClient } from "../redis/init" -import { AnyDocument, Database } from "@budibase/types" +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import { JobQueue, createQueue } from "../queue" import * as dbUtils from "../db" +import { Duration, newid } from "../utils" +import { context, locks } from ".." let CACHE: BaseCache | null = null async function getCache() { @@ -18,6 +20,7 @@ interface ProcessDocMessage { dbName: string docId: string cacheKeyPrefix: string + messageId: string } export const docWritethroughProcessorQueue = createQueue( @@ -25,21 +28,55 @@ export const docWritethroughProcessorQueue = createQueue( ) docWritethroughProcessorQueue.process(async message => { - await persistToDb(message.data) - console.log("DocWritethrough persisted", { data: message.data }) + const { cacheKeyPrefix, messageId } = message.data + + const cache = await getCache() + const latestMessageId = await cache.get( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID + ) + if (messageId !== latestMessageId) { + // Nothing to do, another message overrode it + return + } + + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: Duration.fromSeconds(60).toMs(), + }, + async () => { + const latestMessageId = await cache.get( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID + ) + if (messageId !== latestMessageId) { + // Nothing to do, another message overrode it + return + } + + await persistToDb(cache, message.data) + console.log("DocWritethrough persisted", { data: message.data }) + } + ) + + if (!lockResponse.executed) { + console.log(`Ignoring redlock conflict in write-through cache`) + } }) -export async function persistToDb({ - dbName, - docId, - cacheKeyPrefix, -}: { - dbName: string - docId: string - cacheKeyPrefix: string -}) { - const cache = await getCache() - +export async function persistToDb( + cache: BaseCache, + { + dbName, + docId, + cacheKeyPrefix, + }: { + dbName: string + docId: string + cacheKeyPrefix: string + } +) { const db = dbUtils.getDB(dbName) let doc: AnyDocument | undefined try { @@ -48,7 +85,9 @@ export async function persistToDb({ doc = { _id: docId } } - const keysToPersist = await cache.keys(`${cacheKeyPrefix}:data:*`) + const keysToPersist = await cache.keys( + REDIS_KEYS(cacheKeyPrefix).DATA.GET_ALL + ) for (const key of keysToPersist) { const data = await cache.get(key, { useTenancy: false }) doc[data.key] = data.value @@ -83,27 +122,38 @@ export class DocWritethrough { const cache = await getCache() await this.storeToCache(cache, data) + const messageId = newid() + await cache.store( + REDIS_KEYS(this.cacheKeyPrefix).LATEST_MESSAGE_ID, + messageId + ) docWritethroughProcessorQueue.add( { dbName: this.db.name, docId: this.docId, cacheKeyPrefix: this.cacheKeyPrefix, + messageId, }, { delay: this.writeRateMs, - jobId: this.cacheKeyPrefix, - removeOnFail: true, - removeOnComplete: true, } ) } private async storeToCache(cache: BaseCache, data: Record) { data = Object.entries(data).reduce((acc, [key, value]) => { - acc[this.cacheKeyPrefix + ":data:" + key] = { key, value } + acc[REDIS_KEYS(this.cacheKeyPrefix).DATA.VALUE(key)] = { key, value } return acc }, {} as Record) await cache.bulkStore(data, null) } } + +const REDIS_KEYS = (prefix: string) => ({ + DATA: { + VALUE: (key: string) => prefix + ":data:" + key, + GET_ALL: prefix + ":data:*", + }, + LATEST_MESSAGE_ID: prefix + ":info:latestMessageId", +}) From 3dbf0b3a64789022897a044295be5501487e81b9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 23:33:20 +0100 Subject: [PATCH 134/174] Throw errors on lock not executed --- .../backend-core/src/cache/docWritethrough.ts | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 3f84f82bc4..14b186271c 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -24,7 +24,16 @@ interface ProcessDocMessage { } export const docWritethroughProcessorQueue = createQueue( - JobQueue.DOC_WRITETHROUGH_QUEUE + JobQueue.DOC_WRITETHROUGH_QUEUE, + { + jobOptions: { + attempts: 5, + backoff: { + type: "fixed", + delay: 1000, + }, + }, + } ) docWritethroughProcessorQueue.process(async message => { @@ -41,7 +50,7 @@ docWritethroughProcessorQueue.process(async message => { const lockResponse = await locks.doWithLock( { - type: LockType.TRY_ONCE, + type: LockType.TRY_TWICE, name: LockName.PERSIST_WRITETHROUGH, resource: cacheKeyPrefix, ttl: Duration.fromSeconds(60).toMs(), @@ -61,7 +70,7 @@ docWritethroughProcessorQueue.process(async message => { ) if (!lockResponse.executed) { - console.log(`Ignoring redlock conflict in write-through cache`) + throw new Error(`Ignoring redlock conflict in write-through cache`) } }) From a9046e9f26019904fb0158d142f8bf415214817c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Tue, 5 Mar 2024 23:33:57 +0100 Subject: [PATCH 135/174] Use lock name --- packages/backend-core/src/cache/docWritethrough.ts | 2 +- packages/types/src/sdk/locks.ts | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 14b186271c..5454362343 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -51,7 +51,7 @@ docWritethroughProcessorQueue.process(async message => { const lockResponse = await locks.doWithLock( { type: LockType.TRY_TWICE, - name: LockName.PERSIST_WRITETHROUGH, + name: LockName.PERSIST_DOC_WRITETHROUGH, resource: cacheKeyPrefix, ttl: Duration.fromSeconds(60).toMs(), }, diff --git a/packages/types/src/sdk/locks.ts b/packages/types/src/sdk/locks.ts index c7c028a135..67de109657 100644 --- a/packages/types/src/sdk/locks.ts +++ b/packages/types/src/sdk/locks.ts @@ -23,6 +23,7 @@ export enum LockName { APP_MIGRATION = "app_migrations", PROCESS_AUTO_COLUMNS = "process_auto_columns", PROCESS_USER_INVITE = "process_user_invite", + PERSIST_DOC_WRITETHROUGH = "persist_doc_writethrough", } export type LockOptions = { From 8e8378d1bebf5d1ea777f9fee755529e04090bcc Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 00:56:30 +0100 Subject: [PATCH 136/174] Delete info on process --- packages/backend-core/src/cache/base/index.ts | 9 +++++++++ packages/backend-core/src/cache/docWritethrough.ts | 5 +++++ 2 files changed, 14 insertions(+) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 942d70ae72..a712ddc263 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -108,4 +108,13 @@ export default class BaseCache { throw err } } + + /** + * Delete the entry if the provided value matches the stored one. + */ + async deleteIfValue(key: string, value: any, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + await client.deleteIfValue(key, value) + } } diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index 5454362343..e8e0e7beb3 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -66,6 +66,11 @@ docWritethroughProcessorQueue.process(async message => { await persistToDb(cache, message.data) console.log("DocWritethrough persisted", { data: message.data }) + + await cache.deleteIfValue( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID, + latestMessageId + ) } ) From d35d382ddb54c24ceb828e691c5f96714d0eeaaf Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 00:58:05 +0100 Subject: [PATCH 137/174] Increase timeouts --- packages/backend-core/src/redis/tests/redis.spec.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index a7c0384d78..41bbc7453a 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -3,6 +3,8 @@ import { generator, structures } from "../../../tests" import RedisWrapper from "../redis" import { env } from "../.." +jest.setTimeout(30000) + describe("redis", () => { let redis: RedisWrapper let container: StartedTestContainer From 2b206f2105681140a1079ba49bcc434df1e7f489 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 6 Mar 2024 10:00:02 +0000 Subject: [PATCH 138/174] Fix the TODO I left myself last night. --- packages/backend-core/src/db/Replication.ts | 41 +++++---------------- packages/server/src/api/controllers/role.ts | 25 +++++++------ 2 files changed, 23 insertions(+), 43 deletions(-) diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts index 12c11eb9e2..9c960d76dd 100644 --- a/packages/backend-core/src/db/Replication.ts +++ b/packages/backend-core/src/db/Replication.ts @@ -5,56 +5,33 @@ import { DocumentType } from "../constants" class Replication { source: PouchDB.Database target: PouchDB.Database - replication?: Promise - /** - * - * @param source - the DB you want to replicate or rollback to - * @param target - the DB you want to replicate to, or rollback from - */ constructor({ source, target }: { source: string; target: string }) { this.source = getPouchDB(source) this.target = getPouchDB(target) } - close() { - return Promise.all([closePouchDB(this.source), closePouchDB(this.target)]) + async close() { + await Promise.all([closePouchDB(this.source), closePouchDB(this.target)]) } - promisify(operation: any, opts = {}) { - return new Promise(resolve => { - operation(this.target, opts) - .on("denied", function (err: any) { + replicate(opts: PouchDB.Replication.ReplicateOptions = {}) { + return new Promise>(resolve => { + this.source.replicate + .to(this.target, opts) + .on("denied", function (err) { // a document failed to replicate (e.g. due to permissions) throw new Error(`Denied: Document failed to replicate ${err}`) }) - .on("complete", function (info: any) { + .on("complete", function (info) { return resolve(info) }) - .on("error", function (err: any) { + .on("error", function (err) { throw new Error(`Replication Error: ${err}`) }) }) } - /** - * Two way replication operation, intended to be promise based. - * @param opts - PouchDB replication options - */ - sync(opts: PouchDB.Replication.SyncOptions = {}) { - this.replication = this.promisify(this.source.sync, opts) - return this.replication - } - - /** - * One way replication operation, intended to be promise based. - * @param opts - PouchDB replication options - */ - replicate(opts: PouchDB.Replication.ReplicateOptions = {}) { - this.replication = this.promisify(this.source.replicate.to, opts) - return this.replication - } - appReplicateOpts( opts: PouchDB.Replication.ReplicateOptions = {} ): PouchDB.Replication.ReplicateOptions { diff --git a/packages/server/src/api/controllers/role.ts b/packages/server/src/api/controllers/role.ts index 6b62c568e2..84179d8dbc 100644 --- a/packages/server/src/api/controllers/role.ts +++ b/packages/server/src/api/controllers/role.ts @@ -107,17 +107,20 @@ export async function save(ctx: UserCtx) { role._rev = result.rev ctx.body = role - // TODO: need to check that the prod DB actually exists, I think it won't - // if the app has never been published. - const replication = new dbCore.Replication({ - source: context.getDevAppDB().name, - target: context.getProdAppDB().name, - }) - await replication.replicate({ - filter: (doc: any, params: any) => { - return doc._id === _id - }, - }) + const devDb = context.getDevAppDB() + const prodDb = context.getProdAppDB() + + if (await prodDb.exists()) { + const replication = new dbCore.Replication({ + source: devDb.name, + target: prodDb.name, + }) + await replication.replicate({ + filter: (doc: any, params: any) => { + return doc._id === _id + }, + }) + } } export async function destroy(ctx: UserCtx) { From b232371efff95f7925c93960ba92862324cb1a46 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 6 Mar 2024 10:01:42 +0000 Subject: [PATCH 139/174] remove uneeded comment --- packages/shared-core/src/tests/filters.test.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index 1e0a68de89..0cf7e0e92a 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -221,8 +221,6 @@ describe("runLuceneQuery", () => { ]) }) - // what should the name of this test be if it's the same test as above but with different operands - it("should return matching results if allOr is true and only one filter matches with different operands", () => { const query = buildQuery({ allOr: true, From eb00ce401f9819406acde58c60018945bc95864e Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 6 Mar 2024 10:10:28 +0000 Subject: [PATCH 140/174] pr comments --- packages/shared-core/src/filters.ts | 7 ++++--- packages/shared-core/src/tests/filters.test.ts | 10 ++++++++++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 0a1673e558..84b6076d56 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -12,6 +12,7 @@ import { import dayjs from "dayjs" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { deepGet } from "./helpers" +import test from "node:test" const HBS_REGEX = /{{([^{].*?)}}/g @@ -359,6 +360,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { const oneOf = match( SearchQueryOperators.ONE_OF, (docValue: any, testValue: any) => { + console.log(testValue) if (typeof testValue === "string") { testValue = testValue.split(",") if (typeof docValue === "number") { @@ -410,13 +412,13 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { .filter( ([key, value]: [string, any]) => !["allOr", "onEmptyFilter"].includes(key) && + value && Object.keys(value as Record).length > 0 ) .map(([key]) => key as any) const results: boolean[] = activeFilterKeys.map(filterKey => { - const filterFunction = filterFunctions[filterKey] - return filterFunction ? filterFunction(doc) : true + return filterFunctions[filterKey]?.(doc) ?? false }) if (query!.allOr) { @@ -425,7 +427,6 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { return results.every(result => result === true) } } - return docs.filter(docMatch) } diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index 0cf7e0e92a..1f8f534f0d 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -240,6 +240,16 @@ describe("runLuceneQuery", () => { expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([]) }) + + it("should handle when a value is null or undefined", () => { + const query = buildQuery({ + allOr: true, + equal: { order_status: null }, + oneOf: { label: ["FRAGILE"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2]) + }) }) describe("buildLuceneQuery", () => { From 30f0e3459f1ec83af56f992a14c3507813d1dfa7 Mon Sep 17 00:00:00 2001 From: Dean Date: Wed, 6 Mar 2024 10:29:43 +0000 Subject: [PATCH 141/174] PR feedback --- .../portal/onboarding/steps/NewFormSteps.svelte | 9 +++++++++ .../src/components/portal/onboarding/steps/index.js | 1 + .../builder/src/components/portal/onboarding/tours.js | 9 +++------ 3 files changed, 13 insertions(+), 6 deletions(-) create mode 100644 packages/builder/src/components/portal/onboarding/steps/NewFormSteps.svelte diff --git a/packages/builder/src/components/portal/onboarding/steps/NewFormSteps.svelte b/packages/builder/src/components/portal/onboarding/steps/NewFormSteps.svelte new file mode 100644 index 0000000000..7c3679f9e8 --- /dev/null +++ b/packages/builder/src/components/portal/onboarding/steps/NewFormSteps.svelte @@ -0,0 +1,9 @@ +
+ When faced with a sizable form, consider implementing a multi-step approach to + enhance user experience. + +

+ Breaking the form into multiple steps can significantly improve usability by + making the process more digestible for your users. +

+
diff --git a/packages/builder/src/components/portal/onboarding/steps/index.js b/packages/builder/src/components/portal/onboarding/steps/index.js index 6694ce97a7..e15d191652 100644 --- a/packages/builder/src/components/portal/onboarding/steps/index.js +++ b/packages/builder/src/components/portal/onboarding/steps/index.js @@ -2,3 +2,4 @@ export { default as OnboardingData } from "./OnboardingData.svelte" export { default as OnboardingDesign } from "./OnboardingDesign.svelte" export { default as OnboardingPublish } from "./OnboardingPublish.svelte" export { default as NewViewUpdateFormRowId } from "./NewViewUpdateFormRowId.svelte" +export { default as NewFormSteps } from "./NewFormSteps.svelte" diff --git a/packages/builder/src/components/portal/onboarding/tours.js b/packages/builder/src/components/portal/onboarding/tours.js index 5cf6735970..fab97cdd03 100644 --- a/packages/builder/src/components/portal/onboarding/tours.js +++ b/packages/builder/src/components/portal/onboarding/tours.js @@ -7,6 +7,7 @@ import { OnboardingDesign, OnboardingPublish, NewViewUpdateFormRowId, + NewFormSteps, } from "./steps" import { API } from "api" import { customPositionHandler } from "components/design/settings/controls/EditComponentPopover" @@ -176,9 +177,7 @@ const getTours = () => { { id: TOUR_STEP_KEYS.BUILDER_FORM_CREATE_STEPS, title: "Add multiple steps", - body: `When faced with a sizable form, consider implementing a multi-step - approach to enhance user experience. Breaking the form into multiple steps - can significantly improve usability by making the process more digestible for your users.`, + layout: NewFormSteps, query: "#steps-prop-control-wrap", onComplete: () => { builderStore.highlightSetting() @@ -210,9 +209,7 @@ const getTours = () => { { id: TOUR_STEP_KEYS.BUILDER_FORM_VIEW_UPDATE_STEPS, title: "Add multiple steps", - body: `When faced with a sizable form, consider implementing a multi-step - approach to enhance user experience. Breaking the form into multiple steps - can significantly improve usability by making the process more digestible for your users.`, + layout: NewFormSteps, query: "#steps-prop-control-wrap", onComplete: () => { builderStore.highlightSetting() From 61c4b83650206626fcbe7ae3c2bf5bd1e76a1bdd Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 11:55:49 +0100 Subject: [PATCH 142/174] Test with spies --- .../backend-core/src/cache/docWritethrough.ts | 131 +++++++++--------- .../src/cache/tests/docWritethrough.spec.ts | 14 +- 2 files changed, 76 insertions(+), 69 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index e8e0e7beb3..af3df11a9c 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,11 +1,11 @@ +import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import BaseCache from "./base" import { getDocWritethroughClient } from "../redis/init" -import { AnyDocument, Database, LockName, LockType } from "@budibase/types" import { JobQueue, createQueue } from "../queue" import * as dbUtils from "../db" import { Duration, newid } from "../utils" -import { context, locks } from ".." +import { locks } from ".." let CACHE: BaseCache | null = null async function getCache() { @@ -36,26 +36,12 @@ export const docWritethroughProcessorQueue = createQueue( } ) -docWritethroughProcessorQueue.process(async message => { - const { cacheKeyPrefix, messageId } = message.data +class DocWritethroughProcessor { + init() { + docWritethroughProcessorQueue.process(async message => { + const { cacheKeyPrefix, messageId } = message.data - const cache = await getCache() - const latestMessageId = await cache.get( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID - ) - if (messageId !== latestMessageId) { - // Nothing to do, another message overrode it - return - } - - const lockResponse = await locks.doWithLock( - { - type: LockType.TRY_TWICE, - name: LockName.PERSIST_DOC_WRITETHROUGH, - resource: cacheKeyPrefix, - ttl: Duration.fromSeconds(60).toMs(), - }, - async () => { + const cache = await getCache() const latestMessageId = await cache.get( REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID ) @@ -64,56 +50,77 @@ docWritethroughProcessorQueue.process(async message => { return } - await persistToDb(cache, message.data) - console.log("DocWritethrough persisted", { data: message.data }) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_TWICE, + name: LockName.PERSIST_DOC_WRITETHROUGH, + resource: cacheKeyPrefix, + ttl: Duration.fromSeconds(60).toMs(), + }, + async () => { + const latestMessageId = await cache.get( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID + ) + if (messageId !== latestMessageId) { + // Nothing to do, another message overrode it + return + } - await cache.deleteIfValue( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID, - latestMessageId + await this.persistToDb(cache, message.data) + console.log("DocWritethrough persisted", { data: message.data }) + + await cache.deleteIfValue( + REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID, + latestMessageId + ) + } ) + + if (!lockResponse.executed) { + throw new Error(`Ignoring redlock conflict in write-through cache`) + } + }) + return this + } + + private async persistToDb( + cache: BaseCache, + { + dbName, + docId, + cacheKeyPrefix, + }: { + dbName: string + docId: string + cacheKeyPrefix: string + } + ) { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } } - ) - if (!lockResponse.executed) { - throw new Error(`Ignoring redlock conflict in write-through cache`) - } -}) + const keysToPersist = await cache.keys( + REDIS_KEYS(cacheKeyPrefix).DATA.GET_ALL + ) + for (const key of keysToPersist) { + const data = await cache.get(key, { useTenancy: false }) + doc[data.key] = data.value + } -export async function persistToDb( - cache: BaseCache, - { - dbName, - docId, - cacheKeyPrefix, - }: { - dbName: string - docId: string - cacheKeyPrefix: string - } -) { - const db = dbUtils.getDB(dbName) - let doc: AnyDocument | undefined - try { - doc = await db.get(docId) - } catch { - doc = { _id: docId } - } + await db.put(doc) - const keysToPersist = await cache.keys( - REDIS_KEYS(cacheKeyPrefix).DATA.GET_ALL - ) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - - await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) + for (const key of keysToPersist) { + await cache.delete(key, { useTenancy: false }) + } } } +export const processor = new DocWritethroughProcessor().init() + export class DocWritethrough { private db: Database private _docId: string diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 9bbcd6af44..b909f4624f 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,12 +1,11 @@ +import _ from "lodash" import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" -import _ from "lodash" -import { - DocWritethrough, - docWritethroughProcessorQueue, -} from "../docWritethrough" +import { DocWritethrough, processor } from "../docWritethrough" + import InMemoryQueue from "../../queue/inMemoryQueue" +import { docWritethroughProcessorQueue } from "../docWritethrough" const WRITE_RATE_MS = 1000 @@ -240,12 +239,13 @@ describe("docWritethrough", () => { ) ) } - + const persistToDbSpy = jest.spyOn(processor as any, "persistToDb") const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") await config.doInTenant(async () => { await parallelPatch(5) expect(storeToCacheSpy).toBeCalledTimes(5) + expect(persistToDbSpy).not.toBeCalled() expect(await db.exists(documentId)).toBe(false) await travelForward(WRITE_RATE_MS) @@ -253,7 +253,7 @@ describe("docWritethrough", () => { await parallelPatch(40) expect(storeToCacheSpy).toBeCalledTimes(45) - + expect(persistToDbSpy).toBeCalledTimes(1) // Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup. // We are relying on the document being stored only once (otherwise we would have _rev updated) expect(await db.get(documentId)).toEqual( From 4baadadaa8d2572617236ebb26d66cdbde88f611 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 12:22:20 +0100 Subject: [PATCH 143/174] Use pipeline instead of eval --- packages/backend-core/src/redis/redis.ts | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 8cfa3db5c1..6124f5f447 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -291,23 +291,16 @@ class RedisWrapper { return acc }, {} as Record) - const luaScript = ` - for i, key in ipairs(KEYS) do - redis.call('MSET', key, ARGV[i]) - ${ - expirySeconds !== null - ? `redis.call('EXPIRE', key, ARGV[#ARGV])` - : "" - } - end - ` - const keys = Object.keys(dataToStore) - const values = Object.values(dataToStore) + const pipeline = client.pipeline() + pipeline.mset(dataToStore) + if (expirySeconds !== null) { - values.push(expirySeconds) + for (const key of Object.keys(dataToStore)) { + pipeline.expire(key, expirySeconds) + } } - await client.eval(luaScript, keys.length, ...keys, ...values) + await pipeline.exec() } async getTTL(key: string) { From c198a5ae03164e2dff57281c43c1e8c3ec25a894 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 12:23:04 +0100 Subject: [PATCH 144/174] Update packages/backend-core/src/redis/redis.ts Co-authored-by: Sam Rose --- packages/backend-core/src/redis/redis.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 55d43e4f60..a5e1597c9f 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -335,7 +335,7 @@ class RedisWrapper { async increment(key: string) { const result = await this.getClient().incr(addDbPrefix(this._db, key)) if (isNaN(result)) { - throw new Error(`Redis ${key} does not contains a number`) + throw new Error(`Redis ${key} does not contain a number`) } return result } From 1f107041a108aeaf677da20659819bfe2d06ec03 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 6 Mar 2024 11:57:45 +0000 Subject: [PATCH 145/174] use vitest each --- .../shared-core/src/tests/filters.test.ts | 23 +++++++------------ 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index 1f8f534f0d..de969562af 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -209,16 +209,19 @@ describe("runLuceneQuery", () => { } ) - it("should return matching results if allOr is true and only one filter matches", () => { + test.each([ + [false, []], + [true, [1, 2, 3]], + ])("should return %s if allOr is %s ", (allOr, expectedResult) => { const query = buildQuery({ - allOr: true, + allOr, oneOf: { staff_id: [10] }, contains: { description: ["box"] }, }) - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([ - 1, 2, 3, - ]) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual( + expectedResult + ) }) it("should return matching results if allOr is true and only one filter matches with different operands", () => { @@ -231,16 +234,6 @@ describe("runLuceneQuery", () => { expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) }) - it("should return nothing if allOr is false and only one filter matches", () => { - const query = buildQuery({ - allOr: false, - oneOf: { staff_id: [10] }, - contains: { description: ["box"] }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([]) - }) - it("should handle when a value is null or undefined", () => { const query = buildQuery({ allOr: true, From 5109477e526139202cb37392f2bba3347e4a82a5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 13:39:27 +0100 Subject: [PATCH 146/174] Fix checks --- packages/backend-core/src/queue/inMemoryQueue.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 6c8107c7a4..985501bcbe 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -173,7 +173,7 @@ class InMemoryQueue implements Partial { async waitForCompletion() { do { await timeout(50) - } while (this.hasRunningJobs) + } while (this.hasRunningJobs()) } hasRunningJobs() { From 4ce85cde1afd34872bfe7c401d73cbf77651a660 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 14:07:39 +0100 Subject: [PATCH 147/174] Simplify, use only queues --- .../backend-core/src/cache/docWritethrough.ts | 154 ++++-------------- .../src/cache/tests/docWritethrough.spec.ts | 126 ++++++-------- 2 files changed, 79 insertions(+), 201 deletions(-) diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts index af3df11a9c..cee272cef6 100644 --- a/packages/backend-core/src/cache/docWritethrough.ts +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -1,100 +1,55 @@ import { AnyDocument, Database, LockName, LockType } from "@budibase/types" -import BaseCache from "./base" -import { getDocWritethroughClient } from "../redis/init" import { JobQueue, createQueue } from "../queue" import * as dbUtils from "../db" -import { Duration, newid } from "../utils" -import { locks } from ".." - -let CACHE: BaseCache | null = null -async function getCache() { - if (!CACHE) { - const client = await getDocWritethroughClient() - CACHE = new BaseCache(client) - } - return CACHE -} +import { string } from "yargs" +import { db } from ".." +import { locks } from "../redis" +import { Duration } from "../utils" interface ProcessDocMessage { dbName: string docId: string - cacheKeyPrefix: string - messageId: string + + data: Record } export const docWritethroughProcessorQueue = createQueue( - JobQueue.DOC_WRITETHROUGH_QUEUE, - { - jobOptions: { - attempts: 5, - backoff: { - type: "fixed", - delay: 1000, - }, - }, - } + JobQueue.DOC_WRITETHROUGH_QUEUE ) class DocWritethroughProcessor { init() { docWritethroughProcessorQueue.process(async message => { - const { cacheKeyPrefix, messageId } = message.data - - const cache = await getCache() - const latestMessageId = await cache.get( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID - ) - if (messageId !== latestMessageId) { - // Nothing to do, another message overrode it - return - } - - const lockResponse = await locks.doWithLock( + const result = await locks.doWithLock( { - type: LockType.TRY_TWICE, + type: LockType.DEFAULT, name: LockName.PERSIST_DOC_WRITETHROUGH, - resource: cacheKeyPrefix, + resource: `${message.data.dbName}:${message.data.docId}`, ttl: Duration.fromSeconds(60).toMs(), }, async () => { - const latestMessageId = await cache.get( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID - ) - if (messageId !== latestMessageId) { - // Nothing to do, another message overrode it - return - } - - await this.persistToDb(cache, message.data) - console.log("DocWritethrough persisted", { data: message.data }) - - await cache.deleteIfValue( - REDIS_KEYS(cacheKeyPrefix).LATEST_MESSAGE_ID, - latestMessageId - ) + await this.persistToDb(message.data) } ) - - if (!lockResponse.executed) { - throw new Error(`Ignoring redlock conflict in write-through cache`) + if (!result.executed) { + throw new Error( + `Error persisting docWritethrough message: ${message.id}` + ) } }) return this } - private async persistToDb( - cache: BaseCache, - { - dbName, - docId, - cacheKeyPrefix, - }: { - dbName: string - docId: string - cacheKeyPrefix: string - } - ) { + private async persistToDb({ + dbName, + docId, + data, + }: { + dbName: string + docId: string + data: Record + }) { const db = dbUtils.getDB(dbName) let doc: AnyDocument | undefined try { @@ -103,19 +58,8 @@ class DocWritethroughProcessor { doc = { _id: docId } } - const keysToPersist = await cache.keys( - REDIS_KEYS(cacheKeyPrefix).DATA.GET_ALL - ) - for (const key of keysToPersist) { - const data = await cache.get(key, { useTenancy: false }) - doc[data.key] = data.value - } - + doc = { ...doc, ...data } await db.put(doc) - - for (const key of keysToPersist) { - await cache.delete(key, { useTenancy: false }) - } } } @@ -124,15 +68,10 @@ export const processor = new DocWritethroughProcessor().init() export class DocWritethrough { private db: Database private _docId: string - private writeRateMs: number - private cacheKeyPrefix: string - - constructor(db: Database, docId: string, writeRateMs: number) { + constructor(db: Database, docId: string) { this.db = db this._docId = docId - this.writeRateMs = writeRateMs - this.cacheKeyPrefix = `${this.db.name}:${this.docId}` } get docId() { @@ -140,41 +79,10 @@ export class DocWritethrough { } async patch(data: Record) { - const cache = await getCache() - - await this.storeToCache(cache, data) - const messageId = newid() - await cache.store( - REDIS_KEYS(this.cacheKeyPrefix).LATEST_MESSAGE_ID, - messageId - ) - - docWritethroughProcessorQueue.add( - { - dbName: this.db.name, - docId: this.docId, - cacheKeyPrefix: this.cacheKeyPrefix, - messageId, - }, - { - delay: this.writeRateMs, - } - ) - } - - private async storeToCache(cache: BaseCache, data: Record) { - data = Object.entries(data).reduce((acc, [key, value]) => { - acc[REDIS_KEYS(this.cacheKeyPrefix).DATA.VALUE(key)] = { key, value } - return acc - }, {} as Record) - await cache.bulkStore(data, null) + await docWritethroughProcessorQueue.add({ + dbName: this.db.name, + docId: this.docId, + data, + }) } } - -const REDIS_KEYS = (prefix: string) => ({ - DATA: { - VALUE: (key: string) => prefix + ":data:" + key, - GET_ALL: prefix + ":data:*", - }, - LATEST_MESSAGE_ID: prefix + ":info:latestMessageId", -}) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index b909f4624f..9beb25df93 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,3 +1,5 @@ +import tk from "timekeeper" + import _ from "lodash" import { DBTestConfiguration, generator, structures } from "../../../tests" import { getDB } from "../../db" @@ -7,24 +9,11 @@ import { DocWritethrough, processor } from "../docWritethrough" import InMemoryQueue from "../../queue/inMemoryQueue" import { docWritethroughProcessorQueue } from "../docWritethrough" -const WRITE_RATE_MS = 1000 - const initialTime = Date.now() -jest.useFakeTimers({ - now: initialTime, -}) - -function resetTime() { - jest.setSystemTime(initialTime) -} -async function travelForward(ms: number) { - await jest.advanceTimersByTimeAsync(ms) - +async function waitForQueueCompletion() { const queue: InMemoryQueue = docWritethroughProcessorQueue as never - while (queue.hasRunningJobs()) { - await jest.runOnlyPendingTimersAsync() - } + await queue.waitForCompletion() } describe("docWritethrough", () => { @@ -44,30 +33,28 @@ describe("docWritethrough", () => { } beforeEach(async () => { - resetTime() + jest.clearAllMocks() documentId = structures.uuid() - docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS) + docWritethrough = new DocWritethrough(db, documentId) }) - it("patching will not persist if timeout does not hit", async () => { + it("patching will not persist until the messages are persisted", async () => { await config.doInTenant(async () => { - await travelForward(WRITE_RATE_MS) await docWritethrough.patch(generatePatchObject(2)) await docWritethrough.patch(generatePatchObject(2)) - await travelForward(WRITE_RATE_MS - 1) expect(await db.exists(documentId)).toBe(false) }) }) - it("patching will persist if timeout hits", async () => { + it("patching will persist when the messages are persisted", async () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() // This will not be persisted const patch3 = generatePatchObject(3) @@ -77,9 +64,9 @@ describe("docWritethrough", () => { _id: documentId, ...patch1, ...patch2, - _rev: expect.stringMatching(/1-.+/), - createdAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), - updatedAt: new Date(initialTime + WRITE_RATE_MS).toISOString(), + _rev: expect.stringMatching(/2-.+/), + createdAt: new Date(initialTime).toISOString(), + updatedAt: new Date(initialTime).toISOString(), }) }) }) @@ -91,12 +78,12 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch1) await docWritethrough.patch(patch2) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -114,12 +101,13 @@ describe("docWritethrough", () => { const patch1 = generatePatchObject(2) const patch2 = generatePatchObject(2) await docWritethrough.patch(patch1) - await travelForward(WRITE_RATE_MS) const date1 = new Date() + await waitForQueueCompletion() await docWritethrough.patch(patch2) - await travelForward(WRITE_RATE_MS) + tk.travel(Date.now() + 100) const date2 = new Date() + await waitForQueueCompletion() expect(date1).not.toEqual(date2) expect(await db.get(documentId)).toEqual( @@ -135,7 +123,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const patch1 = generatePatchObject(2) await docWritethrough.patch(patch1) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const patch2 = generatePatchObject(1) await docWritethrough.patch(patch2) @@ -146,14 +134,14 @@ describe("docWritethrough", () => { }) ) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const patch3 = { ...generatePatchObject(3), [keyToOverride]: generator.word(), } await docWritethrough.patch(patch3) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( expect.objectContaining({ @@ -169,8 +157,7 @@ describe("docWritethrough", () => { await config.doInTenant(async () => { const secondDocWritethrough = new DocWritethrough( db, - structures.db.id(), - WRITE_RATE_MS + structures.db.id() ) const doc1Patch = generatePatchObject(2) @@ -178,13 +165,13 @@ describe("docWritethrough", () => { const doc2Patch = generatePatchObject(1) await secondDocWritethrough.patch(doc2Patch) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const doc1Patch2 = generatePatchObject(3) await docWritethrough.patch(doc1Patch2) const doc2Patch2 = generatePatchObject(3) await secondDocWritethrough.patch(doc2Patch2) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(docWritethrough.docId)).toEqual( expect.objectContaining({ @@ -207,9 +194,7 @@ describe("docWritethrough", () => { const initialPatch = generatePatchObject(5) await docWritethrough.patch(initialPatch) - await travelForward(WRITE_RATE_MS) - - await docWritethrough.patch({}) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( expect.objectContaining(initialPatch) @@ -217,10 +202,10 @@ describe("docWritethrough", () => { await db.remove(await db.get(documentId)) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() const extraPatch = generatePatchObject(5) await docWritethrough.patch(extraPatch) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( expect.objectContaining(extraPatch) @@ -231,59 +216,44 @@ describe("docWritethrough", () => { }) }) - it("concurrent calls will not cause multiple saves", async () => { + it("concurrent calls will not cause conflicts", async () => { async function parallelPatch(count: number) { - await Promise.all( - Array.from({ length: count }).map(() => - docWritethrough.patch(generatePatchObject(1)) - ) + const patches = Array.from({ length: count }).map(() => + generatePatchObject(1) ) + await Promise.all(patches.map(p => docWritethrough.patch(p))) + + return patches.reduce((acc, c) => { + acc = { ...acc, ...c } + return acc + }, {}) } - const persistToDbSpy = jest.spyOn(processor as any, "persistToDb") - const storeToCacheSpy = jest.spyOn(docWritethrough as any, "storeToCache") + const queueMessageSpy = jest.spyOn(docWritethroughProcessorQueue, "add") await config.doInTenant(async () => { - await parallelPatch(5) - expect(storeToCacheSpy).toBeCalledTimes(5) - expect(persistToDbSpy).not.toBeCalled() - expect(await db.exists(documentId)).toBe(false) + let patches = await parallelPatch(5) + expect(queueMessageSpy).toBeCalledTimes(5) - await travelForward(WRITE_RATE_MS) - - await parallelPatch(40) - - expect(storeToCacheSpy).toBeCalledTimes(45) - expect(persistToDbSpy).toBeCalledTimes(1) - // Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup. - // We are relying on the document being stored only once (otherwise we would have _rev updated) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( - expect.objectContaining({ - _id: documentId, - _rev: expect.stringMatching(/1-.+/), - }) + expect.objectContaining(patches) ) - await parallelPatch(10) + patches = { ...patches, ...(await parallelPatch(40)) } + expect(queueMessageSpy).toBeCalledTimes(45) - expect(storeToCacheSpy).toBeCalledTimes(55) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( - expect.objectContaining({ - _id: documentId, - _rev: expect.stringMatching(/1-.+/), - }) + expect.objectContaining(patches) ) - await travelForward(WRITE_RATE_MS) + patches = { ...patches, ...(await parallelPatch(10)) } + expect(queueMessageSpy).toBeCalledTimes(55) - await parallelPatch(5) - await travelForward(WRITE_RATE_MS) + await waitForQueueCompletion() expect(await db.get(documentId)).toEqual( - expect.objectContaining({ - _id: documentId, - _rev: expect.stringMatching(/3-.+/), - }) + expect.objectContaining(patches) ) - expect(storeToCacheSpy).toBeCalledTimes(60) }) }) }) From a44faad046f17102fe68b9c4fffb72bace3318b6 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 6 Mar 2024 15:37:38 +0100 Subject: [PATCH 148/174] Add same key tests --- .../src/cache/tests/docWritethrough.spec.ts | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 9beb25df93..2b66b6cc21 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -256,5 +256,30 @@ describe("docWritethrough", () => { ) }) }) + + it("patches will execute in order", async () => { + let incrementalValue = 0 + const keyToOverride = generator.word() + async function incrementalPatches(count: number) { + for (let i = 0; i < count; i++) { + await docWritethrough.patch({ [keyToOverride]: incrementalValue++ }) + } + } + + await config.doInTenant(async () => { + await incrementalPatches(5) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ [keyToOverride]: 5 }) + ) + + await incrementalPatches(40) + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ [keyToOverride]: 45 }) + ) + }) + }) }) }) From 632b9a26f4313216c28458db46dc9334aea7e909 Mon Sep 17 00:00:00 2001 From: Peter Clement Date: Wed, 6 Mar 2024 14:42:30 +0000 Subject: [PATCH 149/174] remove log --- packages/shared-core/src/filters.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 84b6076d56..d9fe533c88 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -12,7 +12,6 @@ import { import dayjs from "dayjs" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { deepGet } from "./helpers" -import test from "node:test" const HBS_REGEX = /{{([^{].*?)}}/g @@ -360,7 +359,6 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { const oneOf = match( SearchQueryOperators.ONE_OF, (docValue: any, testValue: any) => { - console.log(testValue) if (typeof testValue === "string") { testValue = testValue.split(",") if (typeof docValue === "number") { From 10ac21525ba205ce7dc138426b545befa3b030a0 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 6 Mar 2024 14:58:34 +0000 Subject: [PATCH 150/174] Update submodules. --- packages/account-portal | 2 +- packages/pro | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/account-portal b/packages/account-portal index 4de0d98e2f..0c050591c2 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit 4de0d98e2f8d80ee7631dffe076063273812a441 +Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac diff --git a/packages/pro b/packages/pro index 60e47a8249..22a278da72 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 60e47a8249fd6291a6bc20fe3fe6776b11938fa1 +Subproject commit 22a278da720d92991dabdcd4cb6c96e7abe29781 From 1b387d359c669b9672ab1de10e2811bb72f26811 Mon Sep 17 00:00:00 2001 From: Conor Webb <126772285+ConorWebb96@users.noreply.github.com> Date: Wed, 6 Mar 2024 16:32:00 +0000 Subject: [PATCH 151/174] Added icon to button component, reworked icon display code. (#12624) * Added icons to buttons, removed svg code added icon component code. * Added icon functionality to button group component. * Added gap to button manifest * Added gap to button setitngs. * Added gap setting to ButtonGroup component * Added the ability to clear the selected icon. * Added enter search to icon select * Removed use:styleable as its for the button * Moved non internal props up * Fixed broken DynamicFilter component icon * Updated DynamicFilter icon to a better suited one --------- Co-authored-by: melohagan <101575380+melohagan@users.noreply.github.com> --- .../controls/IconSelect/IconSelect.svelte | 17 ++++++++-- packages/client/manifest.json | 32 +++++++++++++++++++ .../client/src/components/app/Button.svelte | 24 +++++++------- .../src/components/app/ButtonGroup.svelte | 4 ++- .../app/dynamic-filter/DynamicFilter.svelte | 4 +-- 5 files changed, 65 insertions(+), 16 deletions(-) diff --git a/packages/builder/src/components/design/settings/controls/IconSelect/IconSelect.svelte b/packages/builder/src/components/design/settings/controls/IconSelect/IconSelect.svelte index 0c68c3c3e6..a28f5cfb3b 100644 --- a/packages/builder/src/components/design/settings/controls/IconSelect/IconSelect.svelte +++ b/packages/builder/src/components/design/settings/controls/IconSelect/IconSelect.svelte @@ -139,10 +139,22 @@ {/each}
-
- +
+ { + if (event.key === "Enter") { + searchForIcon() + } + }} + thin + placeholder="Search Icon" + />
+ {#if value} + + {/if}
@@ -239,6 +251,7 @@ flex-flow: row nowrap; width: 100%; padding-right: 15px; + gap: 10px; } .input-wrapper { width: 510px; diff --git a/packages/client/manifest.json b/packages/client/manifest.json index 43b75ebe26..10f9c5f412 100644 --- a/packages/client/manifest.json +++ b/packages/client/manifest.json @@ -525,6 +525,38 @@ "barTitle": "Disable button", "key": "disabled" }, + { + "type": "icon", + "label": "Icon", + "key": "icon" + }, + { + "type": "select", + "label": "Gap", + "key": "gap", + "showInBar": true, + "barStyle": "picker", + "dependsOn": "icon", + "options": [ + { + "label": "None", + "value": "N" + }, + { + "label": "Small", + "value": "S" + }, + { + "label": "Medium", + "value": "M" + }, + { + "label": "Large", + "value": "L" + } + ], + "defaultValue": "M" + }, { "type": "event", "label": "On click", diff --git a/packages/client/src/components/app/Button.svelte b/packages/client/src/components/app/Button.svelte index 361e64a983..c43face1bb 100644 --- a/packages/client/src/components/app/Button.svelte +++ b/packages/client/src/components/app/Button.svelte @@ -13,9 +13,10 @@ export let size = "M" export let type = "cta" export let quiet = false + export let icon = null + export let gap = "M" // For internal use only for now - not defined in the manifest - export let icon = null export let active = false const handleOnClick = async () => { @@ -47,7 +48,7 @@ {#key $component.editing} @@ -92,4 +85,13 @@ .active { color: var(--spectrum-global-color-blue-600); } + .gap-S { + gap: 8px; + } + .gap-M { + gap: 16px; + } + .gap-L { + gap: 32px; + } diff --git a/packages/client/src/components/app/ButtonGroup.svelte b/packages/client/src/components/app/ButtonGroup.svelte index 3ee703e253..2cf6b3db7d 100644 --- a/packages/client/src/components/app/ButtonGroup.svelte +++ b/packages/client/src/components/app/ButtonGroup.svelte @@ -20,7 +20,7 @@ wrap: true, }} > - {#each buttons as { text, type, quiet, disabled, onClick, size }} + {#each buttons as { text, type, quiet, disabled, onClick, size, icon, gap }} diff --git a/packages/client/src/components/app/dynamic-filter/DynamicFilter.svelte b/packages/client/src/components/app/dynamic-filter/DynamicFilter.svelte index 199a6122ab..549574e89b 100644 --- a/packages/client/src/components/app/dynamic-filter/DynamicFilter.svelte +++ b/packages/client/src/components/app/dynamic-filter/DynamicFilter.svelte @@ -92,9 +92,9 @@ {#if schemaLoaded}