diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index 6e04ca6f67..cb713c93ac 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -99,11 +99,6 @@ jobs: else yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro fi - - uses: codecov/codecov-action@v3 - with: - token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos - name: codecov-umbrella - verbose: true test-worker: runs-on: ubuntu-latest @@ -129,12 +124,6 @@ jobs: yarn test --scope=@budibase/worker fi - - uses: codecov/codecov-action@v3 - with: - token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos - name: codecov-umbrella - verbose: true - test-server: runs-on: ubuntu-latest steps: @@ -159,12 +148,6 @@ jobs: yarn test --scope=@budibase/server fi - - uses: codecov/codecov-action@v3 - with: - token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos - name: codecov-umbrella - verbose: true - test-pro: runs-on: ubuntu-latest if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 0000000000..69d69ab7d0 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,3 @@ +/packages/server @Budibase/backend +/packages/worker @Budibase/backend +/packages/backend-core @Budibase/backend diff --git a/charts/budibase/templates/alb-ingress.yaml b/charts/budibase/templates/alb-ingress.yaml index 01d7fe0bf0..89b4e9e2cb 100644 --- a/charts/budibase/templates/alb-ingress.yaml +++ b/charts/budibase/templates/alb-ingress.yaml @@ -7,8 +7,8 @@ metadata: kubernetes.io/ingress.class: alb alb.ingress.kubernetes.io/scheme: internet-facing alb.ingress.kubernetes.io/target-type: ip - alb.ingress.kubernetes.io/success-codes: 200,301 - alb.ingress.kubernetes.io/healthcheck-path: / + alb.ingress.kubernetes.io/success-codes: '200' + alb.ingress.kubernetes.io/healthcheck-path: '/health' {{- if .Values.awsAlbIngress.certificateArn }} alb.ingress.kubernetes.io/actions.ssl-redirect: '{"Type": "redirect", "RedirectConfig": { "Protocol": "HTTPS", "Port": "443", "StatusCode": "HTTP_301"}}' alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS":443}]' diff --git a/lerna.json b/lerna.json index deb273884d..e6231eb660 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.13.20", + "version": "2.13.30", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/packages/backend-core/CODEOWNERS b/packages/backend-core/CODEOWNERS deleted file mode 100644 index 84313fb9cf..0000000000 --- a/packages/backend-core/CODEOWNERS +++ /dev/null @@ -1 +0,0 @@ -* @Budibase/backend \ No newline at end of file diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index dc8d71b52c..306aabfe6a 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -72,7 +72,7 @@ "@types/tar-fs": "2.0.1", "@types/uuid": "8.3.4", "chance": "1.1.8", - "ioredis-mock": "8.7.0", + "ioredis-mock": "8.9.0", "jest": "29.6.2", "jest-environment-node": "29.6.2", "jest-serial-runner": "1.2.1", diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts index 266f1fe989..a7b2e2b4c6 100644 --- a/packages/backend-core/src/redis/redlockImpl.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -2,8 +2,9 @@ import Redlock from "redlock" import { getLockClient } from "./init" import { LockOptions, LockType } from "@budibase/types" import * as context from "../context" -import env from "../environment" import { logWarn } from "../logging" +import { utils } from "@budibase/shared-core" +import { Duration } from "../utils" async function getClient( type: LockType, @@ -12,9 +13,7 @@ async function getClient( if (type === LockType.CUSTOM) { return newRedlock(opts) } - if (env.isTest() && type !== LockType.TRY_ONCE) { - return newRedlock(OPTIONS.TEST) - } + switch (type) { case LockType.TRY_ONCE: { return newRedlock(OPTIONS.TRY_ONCE) @@ -28,13 +27,16 @@ async function getClient( case LockType.DELAY_500: { return newRedlock(OPTIONS.DELAY_500) } + case LockType.AUTO_EXTEND: { + return newRedlock(OPTIONS.AUTO_EXTEND) + } default: { - throw new Error(`Could not get redlock client: ${type}`) + throw utils.unreachable(type) } } } -const OPTIONS = { +const OPTIONS: Record = { TRY_ONCE: { // immediately throws an error if the lock is already held retryCount: 0, @@ -42,11 +44,6 @@ const OPTIONS = { TRY_TWICE: { retryCount: 1, }, - TEST: { - // higher retry count in unit tests - // due to high contention. - retryCount: 100, - }, DEFAULT: { // the expected clock drift; for more details // see http://redis.io/topics/distlock @@ -67,10 +64,14 @@ const OPTIONS = { DELAY_500: { retryDelay: 500, }, + CUSTOM: {}, + AUTO_EXTEND: { + retryCount: -1, + }, } export async function newRedlock(opts: Redlock.Options = {}) { - let options = { ...OPTIONS.DEFAULT, ...opts } + const options = { ...OPTIONS.DEFAULT, ...opts } const redisWrapper = await getLockClient() const client = redisWrapper.getClient() return new Redlock([client], options) @@ -100,17 +101,36 @@ function getLockName(opts: LockOptions) { return name } +export const AUTO_EXTEND_POLLING_MS = Duration.fromSeconds(10).toMs() + export async function doWithLock( opts: LockOptions, task: () => Promise ): Promise> { const redlock = await getClient(opts.type, opts.customOptions) - let lock + let lock: Redlock.Lock | undefined + let timeout: NodeJS.Timeout | undefined try { const name = getLockName(opts) + const ttl = + opts.type === LockType.AUTO_EXTEND ? AUTO_EXTEND_POLLING_MS : opts.ttl + // create the lock - lock = await redlock.lock(name, opts.ttl) + lock = await redlock.lock(name, ttl) + + if (opts.type === LockType.AUTO_EXTEND) { + // We keep extending the lock while the task is running + const extendInIntervals = (): void => { + timeout = setTimeout(async () => { + lock = await lock!.extend(ttl, () => opts.onExtend && opts.onExtend()) + + extendInIntervals() + }, ttl / 2) + } + + extendInIntervals() + } // perform locked task // need to await to ensure completion before unlocking @@ -131,8 +151,7 @@ export async function doWithLock( throw e } } finally { - if (lock) { - await lock.unlock() - } + clearTimeout(timeout) + await lock?.unlock() } } diff --git a/packages/backend-core/src/redis/tests/redlockImpl.spec.ts b/packages/backend-core/src/redis/tests/redlockImpl.spec.ts new file mode 100644 index 0000000000..a1e83d8e6c --- /dev/null +++ b/packages/backend-core/src/redis/tests/redlockImpl.spec.ts @@ -0,0 +1,105 @@ +import { LockName, LockType, LockOptions } from "@budibase/types" +import { AUTO_EXTEND_POLLING_MS, doWithLock } from "../redlockImpl" +import { DBTestConfiguration, generator } from "../../../tests" + +describe("redlockImpl", () => { + beforeEach(() => { + jest.useFakeTimers() + }) + + describe("doWithLock", () => { + const config = new DBTestConfiguration() + const lockTtl = AUTO_EXTEND_POLLING_MS + + function runLockWithExecutionTime({ + opts, + task, + executionTimeMs, + }: { + opts: LockOptions + task: () => Promise + executionTimeMs: number + }) { + return config.doInTenant(() => + doWithLock(opts, async () => { + // Run in multiple intervals until hitting the expected time + const interval = lockTtl / 10 + for (let i = executionTimeMs; i > 0; i -= interval) { + await jest.advanceTimersByTimeAsync(interval) + } + return task() + }) + ) + } + + it.each(Object.values(LockType))( + "should return the task value and release the lock", + async (lockType: LockType) => { + const expectedResult = generator.guid() + const mockTask = jest.fn().mockResolvedValue(expectedResult) + + const opts: LockOptions = { + name: LockName.PERSIST_WRITETHROUGH, + type: lockType, + ttl: lockTtl, + } + + const result = await runLockWithExecutionTime({ + opts, + task: mockTask, + executionTimeMs: 0, + }) + + expect(result.executed).toBe(true) + expect(result.executed && result.result).toBe(expectedResult) + expect(mockTask).toHaveBeenCalledTimes(1) + } + ) + + it("should extend when type is autoextend", async () => { + const expectedResult = generator.guid() + const mockTask = jest.fn().mockResolvedValue(expectedResult) + const mockOnExtend = jest.fn() + + const opts: LockOptions = { + name: LockName.PERSIST_WRITETHROUGH, + type: LockType.AUTO_EXTEND, + onExtend: mockOnExtend, + } + + const result = await runLockWithExecutionTime({ + opts, + task: mockTask, + executionTimeMs: lockTtl * 2.5, + }) + + expect(result.executed).toBe(true) + expect(result.executed && result.result).toBe(expectedResult) + expect(mockTask).toHaveBeenCalledTimes(1) + expect(mockOnExtend).toHaveBeenCalledTimes(5) + }) + + it.each(Object.values(LockType).filter(t => t !== LockType.AUTO_EXTEND))( + "should timeout when type is %s", + async (lockType: LockType) => { + const mockTask = jest.fn().mockResolvedValue("mockResult") + + const opts: LockOptions = { + name: LockName.PERSIST_WRITETHROUGH, + type: lockType, + ttl: lockTtl, + } + + await expect( + runLockWithExecutionTime({ + opts, + task: mockTask, + executionTimeMs: lockTtl * 2, + }) + ).rejects.toThrowError( + `Unable to fully release the lock on resource \"lock:${config.tenantId}_persist_writethrough\".` + ) + } + ) + }) +}) diff --git a/packages/builder/src/components/automation/SetupPanel/TableSelector.svelte b/packages/builder/src/components/automation/SetupPanel/TableSelector.svelte index 3434219384..1645ded66b 100644 --- a/packages/builder/src/components/automation/SetupPanel/TableSelector.svelte +++ b/packages/builder/src/components/automation/SetupPanel/TableSelector.svelte @@ -22,7 +22,7 @@