diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index fd4d8cf7c8..7d09451614 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -92,8 +92,6 @@ jobs: test-libraries: runs-on: ubuntu-latest - env: - REUSE_CONTAINERS: true steps: - name: Checkout repo uses: actions/checkout@v4 @@ -150,8 +148,6 @@ jobs: test-server: runs-on: budi-tubby-tornado-quad-core-150gb - env: - REUSE_CONTAINERS: true steps: - name: Checkout repo uses: actions/checkout@v4 diff --git a/globalSetup.ts b/globalSetup.ts index 115796c395..dd1a7dbaa0 100644 --- a/globalSetup.ts +++ b/globalSetup.ts @@ -1,16 +1,49 @@ -import { GenericContainer, Wait } from "testcontainers" +import { + GenericContainer, + Wait, + getContainerRuntimeClient, +} from "testcontainers" +import { ContainerInfo } from "dockerode" import path from "path" import lockfile from "proper-lockfile" +async function getBudibaseContainers() { + const client = await getContainerRuntimeClient() + const conatiners = await client.container.list() + return conatiners.filter( + container => + container.Labels["com.budibase"] === "true" && + container.Labels["org.testcontainers"] === "true" + ) +} + +async function killContainers(containers: ContainerInfo[]) { + const client = await getContainerRuntimeClient() + for (const container of containers) { + const c = client.container.getById(container.Id) + await c.kill() + await c.remove() + } +} + export default async function setup() { const lockPath = path.resolve(__dirname, "globalSetup.ts") - if (process.env.REUSE_CONTAINERS) { - // If you run multiple tests at the same time, it's possible for the CouchDB - // shared container to get started multiple times despite having an - // identical reuse hash. To avoid that, we do a filesystem-based lock so - // that only one globalSetup.ts is running at a time. - lockfile.lockSync(lockPath) - } + // If you run multiple tests at the same time, it's possible for the CouchDB + // shared container to get started multiple times despite having an + // identical reuse hash. To avoid that, we do a filesystem-based lock so + // that only one globalSetup.ts is running at a time. + lockfile.lockSync(lockPath) + + // Remove any containers that are older than 24 hours. This is to prevent + // containers getting full volumes or accruing any other problems from being + // left up for very long periods of time. + const threshold = new Date(Date.now() - 1000 * 60 * 60 * 24) + const containers = (await getBudibaseContainers()).filter(container => { + const created = new Date(container.Created * 1000) + return created < threshold + }) + + await killContainers(containers) try { let couchdb = new GenericContainer("budibase/couchdb:v3.2.1-sqs") @@ -28,20 +61,16 @@ export default async function setup() { target: "/opt/couchdb/etc/local.d/test-couchdb.ini", }, ]) + .withLabels({ "com.budibase": "true" }) + .withReuse() .withWaitStrategy( Wait.forSuccessfulCommand( "curl http://budibase:budibase@localhost:5984/_up" ).withStartupTimeout(20000) ) - if (process.env.REUSE_CONTAINERS) { - couchdb = couchdb.withReuse() - } - await couchdb.start() } finally { - if (process.env.REUSE_CONTAINERS) { - lockfile.unlockSync(lockPath) - } + lockfile.unlockSync(lockPath) } } diff --git a/package.json b/package.json index fcd6989b6c..98524e0ee4 100644 --- a/package.json +++ b/package.json @@ -59,7 +59,8 @@ "dev:all": "yarn run kill-all && lerna run --stream dev", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:docker": "./scripts/devDocker.sh", - "test": "REUSE_CONTAINERS=1 lerna run --concurrency 1 --stream test --stream", + "test": "lerna run --concurrency 1 --stream test --stream", + "test:containers:kill": "./scripts/killTestcontainers.sh", "lint:eslint": "eslint packages --max-warnings=0", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"", "lint": "yarn run lint:eslint && yarn run lint:prettier", diff --git a/packages/backend-core/tests/core/utilities/testContainerUtils.ts b/packages/backend-core/tests/core/utilities/testContainerUtils.ts index d0dd2c9b4d..32841e4c3a 100644 --- a/packages/backend-core/tests/core/utilities/testContainerUtils.ts +++ b/packages/backend-core/tests/core/utilities/testContainerUtils.ts @@ -28,7 +28,11 @@ function getTestcontainers(): ContainerInfo[] { .split("\n") .filter(x => x.length > 0) .map(x => JSON.parse(x) as ContainerInfo) - .filter(x => x.Labels.includes("org.testcontainers=true")) + .filter( + x => + x.Labels.includes("org.testcontainers=true") && + x.Labels.includes("com.budibase=true") + ) } export function getContainerByImage(image: string) { diff --git a/packages/server/src/integrations/tests/utils/index.ts b/packages/server/src/integrations/tests/utils/index.ts index 5034b5a8db..a54d0ac1a7 100644 --- a/packages/server/src/integrations/tests/utils/index.ts +++ b/packages/server/src/integrations/tests/utils/index.ts @@ -65,9 +65,7 @@ export async function rawQuery(ds: Datasource, sql: string): Promise { } export async function startContainer(container: GenericContainer) { - if (process.env.REUSE_CONTAINERS) { - container = container.withReuse() - } + container = container.withReuse().withLabels({ "com.budibase": "true" }) const startedContainer = await container.start() diff --git a/scripts/killTestcontainers.sh b/scripts/killTestcontainers.sh new file mode 100755 index 0000000000..773d01a962 --- /dev/null +++ b/scripts/killTestcontainers.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# Find all Docker containers with the label "org.testcontainers=true" +containers=$(docker ps -q -f "label=org.testcontainers=true") + +# Check if there are any containers to stop +if [ -z "$containers" ]; then + echo "No containers with label 'org.testcontainers=true' found." +else + # Stop the containers + echo "Stopping containers..." + docker stop $containers + + # Remove the containers + echo "Removing containers..." + docker rm $containers + + echo "Containers have been stopped and removed." +fi \ No newline at end of file