diff --git a/.github/workflows/release-master.yml b/.github/workflows/release-master.yml
index 9ab8530341..df25182cd6 100644
--- a/.github/workflows/release-master.yml
+++ b/.github/workflows/release-master.yml
@@ -36,6 +36,7 @@ jobs:
- uses: actions/setup-node@v1
with:
node-version: 18.x
+ cache: yarn
- run: yarn install --frozen-lockfile
- name: Update versions
@@ -63,14 +64,64 @@ jobs:
echo "Using tag $version"
echo "version=$version" >> "$GITHUB_OUTPUT"
- - name: Build/release Docker images
+ - name: Setup Docker Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v1
+
+ - name: Docker login
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
- yarn build:docker
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
- BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }}
+
+ - name: Build worker docker
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ push: true
+ platforms: linux/amd64,linux/arm64
+ build-args: |
+ BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
+ tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
+ file: ./packages/worker/Dockerfile.v2
+ cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
+ cache-to: type=inline
+ env:
+ IMAGE_NAME: budibase/worker
+ IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
+ BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
+
+ - name: Build server docker
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ push: true
+ platforms: linux/amd64,linux/arm64
+ build-args: |
+ BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
+ tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
+ file: ./packages/server/Dockerfile.v2
+ cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
+ cache-to: type=inline
+ env:
+ IMAGE_NAME: budibase/apps
+ IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
+ BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
+
+ - name: Build proxy docker
+ uses: docker/build-push-action@v5
+ with:
+ context: ./hosting/proxy
+ push: true
+ platforms: linux/amd64,linux/arm64
+ tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
+ file: ./hosting/proxy/Dockerfile
+ cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
+ cache-to: type=inline
+ env:
+ IMAGE_NAME: budibase/proxy
+ IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
release-helm-chart:
needs: [release-images]
diff --git a/.github/workflows/release-singleimage.yml b/.github/workflows/release-singleimage.yml
index f7f87f6e4c..4d35916f4d 100644
--- a/.github/workflows/release-singleimage.yml
+++ b/.github/workflows/release-singleimage.yml
@@ -67,7 +67,7 @@ jobs:
push: true
platforms: linux/amd64,linux/arm64
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
- file: ./hosting/single/Dockerfile
+ file: ./hosting/single/Dockerfile.v2
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
@@ -76,4 +76,4 @@ jobs:
platforms: linux/amd64
build-args: TARGETBUILD=aas
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
- file: ./hosting/single/Dockerfile
+ file: ./hosting/single/Dockerfile.v2
diff --git a/README.md b/README.md
index 9deb16cd4f..7827d4e48a 100644
--- a/README.md
+++ b/README.md
@@ -126,13 +126,6 @@ You can learn more about the Budibase API at the following places:
- [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/)
-
-
-
-
-
-
-
## 🏁 Get started
Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean.
diff --git a/hosting/scripts/linux/release-to-docker-hub.sh b/hosting/scripts/linux/release-to-docker-hub.sh
deleted file mode 100755
index 599a10f914..0000000000
--- a/hosting/scripts/linux/release-to-docker-hub.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-
-tag=$1
-
-if [[ ! "$tag" ]]; then
- echo "No tag present. You must pass a tag to this script"
- exit 1
-fi
-
-echo "Tagging images with tag: $tag"
-
-docker tag proxy-service budibase/proxy:$tag
-docker tag app-service budibase/apps:$tag
-docker tag worker-service budibase/worker:$tag
-
-docker push --all-tags budibase/apps
-docker push --all-tags budibase/worker
-docker push --all-tags budibase/proxy
diff --git a/lerna.json b/lerna.json
index 78260dcc42..7ce2f17f89 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "2.11.45",
+ "version": "2.12.1",
"npmClient": "yarn",
"packages": ["packages/*", "packages/account-portal/packages/*"],
"useNx": true,
diff --git a/package.json b/package.json
index 8c10a69ee5..e9e17223f0 100644
--- a/package.json
+++ b/package.json
@@ -55,10 +55,6 @@
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"build:specs": "lerna run --stream specs",
- "build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
- "build:docker:proxy": "docker build hosting/proxy -t proxy-service",
- "build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
- "build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
diff --git a/packages/account-portal b/packages/account-portal
index 9ab3f75289..1ca7329ddc 160000
--- a/packages/account-portal
+++ b/packages/account-portal
@@ -1 +1 @@
-Subproject commit 9ab3f75289b77e92cb9b5ebdefcc07c1608532b5
+Subproject commit 1ca7329ddcb5b1e3a66fdbc6803579ff1270c586
diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts
index e64c116663..c331d791a6 100644
--- a/packages/backend-core/src/cache/writethrough.ts
+++ b/packages/backend-core/src/cache/writethrough.ts
@@ -119,8 +119,8 @@ export class Writethrough {
this.writeRateMs = writeRateMs
}
- async put(doc: any) {
- return put(this.db, doc, this.writeRateMs)
+ async put(doc: any, writeRateMs: number = this.writeRateMs) {
+ return put(this.db, doc, writeRateMs)
}
async get(id: string) {
diff --git a/packages/backend-core/src/security/roles.ts b/packages/backend-core/src/security/roles.ts
index b05cf79c8c..0d33031de5 100644
--- a/packages/backend-core/src/security/roles.ts
+++ b/packages/backend-core/src/security/roles.ts
@@ -122,7 +122,9 @@ export async function roleToNumber(id?: string) {
if (isBuiltin(id)) {
return builtinRoleToNumber(id)
}
- const hierarchy = (await getUserRoleHierarchy(id)) as RoleDoc[]
+ const hierarchy = (await getUserRoleHierarchy(id, {
+ defaultPublic: true,
+ })) as RoleDoc[]
for (let role of hierarchy) {
if (isBuiltin(role?.inherits)) {
return builtinRoleToNumber(role.inherits) + 1
@@ -192,12 +194,15 @@ export async function getRole(
/**
* Simple function to get all the roles based on the top level user role ID.
*/
-async function getAllUserRoles(userRoleId?: string): Promise {
+async function getAllUserRoles(
+ userRoleId?: string,
+ opts?: { defaultPublic?: boolean }
+): Promise {
// admins have access to all roles
if (userRoleId === BUILTIN_IDS.ADMIN) {
return getAllRoles()
}
- let currentRole = await getRole(userRoleId)
+ let currentRole = await getRole(userRoleId, opts)
let roles = currentRole ? [currentRole] : []
let roleIds = [userRoleId]
// get all the inherited roles
@@ -226,12 +231,16 @@ export async function getUserRoleIdHierarchy(
* Returns an ordered array of the user's inherited role IDs, this can be used
* to determine if a user can access something that requires a specific role.
* @param userRoleId The user's role ID, this can be found in their access token.
+ * @param opts optional - if want to default to public use this.
* @returns returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level.
*/
-export async function getUserRoleHierarchy(userRoleId?: string) {
+export async function getUserRoleHierarchy(
+ userRoleId?: string,
+ opts?: { defaultPublic?: boolean }
+) {
// special case, if they don't have a role then they are a public user
- return getAllUserRoles(userRoleId)
+ return getAllUserRoles(userRoleId, opts)
}
// this function checks that the provided permissions are in an array format
diff --git a/packages/backend-core/src/users/db.ts b/packages/backend-core/src/users/db.ts
index a2539e836e..daa09bee6f 100644
--- a/packages/backend-core/src/users/db.ts
+++ b/packages/backend-core/src/users/db.ts
@@ -25,12 +25,17 @@ import {
import {
getAccountHolderFromUserIds,
isAdmin,
+ isCreator,
validateUniqueUser,
} from "./utils"
import { searchExistingEmails } from "./lookup"
import { hash } from "../utils"
-type QuotaUpdateFn = (change: number, cb?: () => Promise) => Promise
+type QuotaUpdateFn = (
+ change: number,
+ creatorsChange: number,
+ cb?: () => Promise
+) => Promise
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise
type FeatureFn = () => Promise
type GroupGetFn = (ids: string[]) => Promise
@@ -245,7 +250,8 @@ export class UserDB {
}
const change = dbUser ? 0 : 1 // no change if there is existing user
- return UserDB.quotas.addUsers(change, async () => {
+ const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
+ return UserDB.quotas.addUsers(change, creatorsChange, async () => {
await validateUniqueUser(email, tenantId)
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
@@ -307,6 +313,7 @@ export class UserDB {
let usersToSave: any[] = []
let newUsers: any[] = []
+ let newCreators: any[] = []
const emails = newUsersRequested.map((user: User) => user.email)
const existingEmails = await searchExistingEmails(emails)
@@ -327,59 +334,66 @@ export class UserDB {
}
newUser.userGroups = groups
newUsers.push(newUser)
+ if (isCreator(newUser)) {
+ newCreators.push(newUser)
+ }
}
const account = await accountSdk.getAccountByTenantId(tenantId)
- return UserDB.quotas.addUsers(newUsers.length, async () => {
- // create the promises array that will be called by bulkDocs
- newUsers.forEach((user: any) => {
- usersToSave.push(
- UserDB.buildUser(
- user,
- {
- hashPassword: true,
- requirePassword: user.requirePassword,
- },
- tenantId,
- undefined, // no dbUser
- account
+ return UserDB.quotas.addUsers(
+ newUsers.length,
+ newCreators.length,
+ async () => {
+ // create the promises array that will be called by bulkDocs
+ newUsers.forEach((user: any) => {
+ usersToSave.push(
+ UserDB.buildUser(
+ user,
+ {
+ hashPassword: true,
+ requirePassword: user.requirePassword,
+ },
+ tenantId,
+ undefined, // no dbUser
+ account
+ )
)
- )
- })
+ })
- const usersToBulkSave = await Promise.all(usersToSave)
- await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
+ const usersToBulkSave = await Promise.all(usersToSave)
+ await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
- // Post-processing of bulk added users, e.g. events and cache operations
- for (const user of usersToBulkSave) {
- // TODO: Refactor to bulk insert users into the info db
- // instead of relying on looping tenant creation
- await platform.users.addUser(tenantId, user._id, user.email)
- await eventHelpers.handleSaveEvents(user, undefined)
- }
+ // Post-processing of bulk added users, e.g. events and cache operations
+ for (const user of usersToBulkSave) {
+ // TODO: Refactor to bulk insert users into the info db
+ // instead of relying on looping tenant creation
+ await platform.users.addUser(tenantId, user._id, user.email)
+ await eventHelpers.handleSaveEvents(user, undefined)
+ }
+
+ const saved = usersToBulkSave.map(user => {
+ return {
+ _id: user._id,
+ email: user.email,
+ }
+ })
+
+ // now update the groups
+ if (Array.isArray(saved) && groups) {
+ const groupPromises = []
+ const createdUserIds = saved.map(user => user._id)
+ for (let groupId of groups) {
+ groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
+ }
+ await Promise.all(groupPromises)
+ }
- const saved = usersToBulkSave.map(user => {
return {
- _id: user._id,
- email: user.email,
+ successful: saved,
+ unsuccessful,
}
- })
-
- // now update the groups
- if (Array.isArray(saved) && groups) {
- const groupPromises = []
- const createdUserIds = saved.map(user => user._id)
- for (let groupId of groups) {
- groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
- }
- await Promise.all(groupPromises)
}
-
- return {
- successful: saved,
- unsuccessful,
- }
- })
+ )
}
static async bulkDelete(userIds: string[]): Promise {
@@ -419,11 +433,12 @@ export class UserDB {
_deleted: true,
}))
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
+ const creatorsToDelete = usersToDelete.filter(isCreator)
- await UserDB.quotas.removeUsers(toDelete.length)
for (let user of usersToDelete) {
await bulkDeleteProcessing(user)
}
+ await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
// Build Response
// index users by id
@@ -472,7 +487,8 @@ export class UserDB {
await db.remove(userId, dbUser._rev)
- await UserDB.quotas.removeUsers(1)
+ const creatorsToDelete = isCreator(dbUser) ? 1 : 0
+ await UserDB.quotas.removeUsers(1, creatorsToDelete)
await eventHelpers.handleDeleteEvents(dbUser)
await cache.user.invalidateUser(userId)
await sessions.invalidateSessions(userId, { reason: "deletion" })
diff --git a/packages/backend-core/src/users/users.ts b/packages/backend-core/src/users/users.ts
index 6237c23972..bad108ab84 100644
--- a/packages/backend-core/src/users/users.ts
+++ b/packages/backend-core/src/users/users.ts
@@ -14,11 +14,11 @@ import {
} from "../db"
import {
BulkDocsResponse,
- ContextUser,
SearchQuery,
SearchQueryOperators,
SearchUsersRequest,
User,
+ ContextUser,
DatabaseQueryOpts,
} from "@budibase/types"
import { getGlobalDB } from "../context"
diff --git a/packages/backend-core/tests/core/users/users.spec.js b/packages/backend-core/tests/core/users/users.spec.js
new file mode 100644
index 0000000000..ae7109344a
--- /dev/null
+++ b/packages/backend-core/tests/core/users/users.spec.js
@@ -0,0 +1,54 @@
+const _ = require('lodash/fp')
+const {structures} = require("../../../tests")
+
+jest.mock("../../../src/context")
+jest.mock("../../../src/db")
+
+const context = require("../../../src/context")
+const db = require("../../../src/db")
+
+const {getCreatorCount} = require('../../../src/users/users')
+
+describe("Users", () => {
+
+ let getGlobalDBMock
+ let getGlobalUserParamsMock
+ let paginationMock
+
+ beforeEach(() => {
+ jest.resetAllMocks()
+
+ getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
+ getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
+ paginationMock = jest.spyOn(db, "pagination")
+ })
+
+ it("Retrieves the number of creators", async () => {
+ const getUsers = (offset, limit, creators = false) => {
+ const range = _.range(offset, limit)
+ const opts = creators ? {builder: {global: true}} : undefined
+ return range.map(() => structures.users.user(opts))
+ }
+ const page1Data = getUsers(0, 8)
+ const page2Data = getUsers(8, 12, true)
+ getGlobalDBMock.mockImplementation(() => ({
+ name : "fake-db",
+ allDocs: () => ({
+ rows: [...page1Data, ...page2Data]
+ })
+ }))
+ paginationMock.mockImplementationOnce(() => ({
+ data: page1Data,
+ hasNextPage: true,
+ nextPage: "1"
+ }))
+ paginationMock.mockImplementation(() => ({
+ data: page2Data,
+ hasNextPage: false,
+ nextPage: undefined
+ }))
+ const creatorsCount = await getCreatorCount()
+ expect(creatorsCount).toBe(4)
+ expect(paginationMock).toHaveBeenCalledTimes(2)
+ })
+})
diff --git a/packages/backend-core/tests/core/utilities/structures/licenses.ts b/packages/backend-core/tests/core/utilities/structures/licenses.ts
index 0e34f2e9bb..bb452f9ad5 100644
--- a/packages/backend-core/tests/core/utilities/structures/licenses.ts
+++ b/packages/backend-core/tests/core/utilities/structures/licenses.ts
@@ -123,6 +123,10 @@ export function customer(): Customer {
export function subscription(): Subscription {
return {
amount: 10000,
+ amounts: {
+ user: 10000,
+ creator: 0,
+ },
cancelAt: undefined,
currency: "usd",
currentPeriodEnd: 0,
@@ -131,6 +135,10 @@ export function subscription(): Subscription {
duration: PriceDuration.MONTHLY,
pastDueAt: undefined,
quantity: 0,
+ quantities: {
+ user: 0,
+ creator: 0,
+ },
status: "active",
}
}
diff --git a/packages/bbui/src/Form/Core/Dropzone.svelte b/packages/bbui/src/Form/Core/Dropzone.svelte
index e9ee75bd8b..0b6a9bb94f 100644
--- a/packages/bbui/src/Form/Core/Dropzone.svelte
+++ b/packages/bbui/src/Form/Core/Dropzone.svelte
@@ -159,8 +159,10 @@
{#if selectedImage.size}
{#if selectedImage.size <= BYTES_IN_MB}
- {`${selectedImage.size / BYTES_IN_KB} KB`}
- {:else}{`${selectedImage.size / BYTES_IN_MB} MB`}{/if}
+ {`${(selectedImage.size / BYTES_IN_KB).toFixed(1)} KB`}
+ {:else}{`${(selectedImage.size / BYTES_IN_MB).toFixed(
+ 1
+ )} MB`}{/if}
{/if}
{#if !disabled}
@@ -203,8 +205,8 @@
{#if file.size}
{#if file.size <= BYTES_IN_MB}
- {`${file.size / BYTES_IN_KB} KB`}
- {:else}{`${file.size / BYTES_IN_MB} MB`}{/if}
+ {`${(file.size / BYTES_IN_KB).toFixed(1)} KB`}
+ {:else}{`${(file.size / BYTES_IN_MB).toFixed(1)} MB`}{/if}
{/if}
{#if !disabled}
diff --git a/packages/builder/src/components/common/Dropzone.svelte b/packages/builder/src/components/common/Dropzone.svelte
index fd2359fd91..daa6ad1807 100644
--- a/packages/builder/src/components/common/Dropzone.svelte
+++ b/packages/builder/src/components/common/Dropzone.svelte
@@ -23,7 +23,7 @@
try {
return await API.uploadBuilderAttachment(data)
} catch (error) {
- notifications.error("Failed to upload attachment")
+ notifications.error(error.message || "Failed to upload attachment")
return []
}
}
diff --git a/packages/client/src/utils/buttonActions.js b/packages/client/src/utils/buttonActions.js
index 18d6b3de3c..9b4640dbb4 100644
--- a/packages/client/src/utils/buttonActions.js
+++ b/packages/client/src/utils/buttonActions.js
@@ -103,7 +103,6 @@ const fetchRowHandler = async action => {
const deleteRowHandler = async action => {
const { tableId, rowId: rowConfig, notificationOverride } = action.parameters
-
if (tableId && rowConfig) {
try {
let requestConfig
@@ -129,9 +128,11 @@ const deleteRowHandler = async action => {
requestConfig = [parsedRowConfig]
} else if (Array.isArray(parsedRowConfig)) {
requestConfig = parsedRowConfig
+ } else if (Number.isInteger(parsedRowConfig)) {
+ requestConfig = [String(parsedRowConfig)]
}
- if (!requestConfig.length) {
+ if (!requestConfig && !parsedRowConfig) {
notificationStore.actions.warning("No valid rows were supplied")
return false
}
diff --git a/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte b/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte
index a27c31bbe5..fc0001d55e 100644
--- a/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte
+++ b/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte
@@ -55,7 +55,7 @@
try {
return await API.uploadBuilderAttachment(data)
} catch (error) {
- $notifications.error("Failed to upload attachment")
+ $notifications.error(error.message || "Failed to upload attachment")
return []
}
}
diff --git a/packages/pro b/packages/pro
index 1001bb64d4..0db27cad42 160000
--- a/packages/pro
+++ b/packages/pro
@@ -1 +1 @@
-Subproject commit 1001bb64d4e77c3977da354ca28d754a1688f718
+Subproject commit 0db27cad42b1ea92621a2005bde355a4b818d7bc
diff --git a/packages/server/Dockerfile b/packages/server/Dockerfile
index e1b3b208c7..ea4c5b217a 100644
--- a/packages/server/Dockerfile
+++ b/packages/server/Dockerfile
@@ -38,7 +38,7 @@ RUN apt update && apt upgrade -y \
COPY package.json .
COPY dist/yarn.lock .
-RUN yarn install --production=true \
+RUN yarn install --production=true --network-timeout 1000000 \
# Remove unneeded data from file system to reduce image size
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
diff --git a/packages/server/package.json b/packages/server/package.json
index 9cf126e0b2..1ede1857a0 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -18,7 +18,6 @@
"test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch",
- "build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up",
diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts
index 984cb16c06..8fbc0db910 100644
--- a/packages/server/src/api/controllers/static/index.ts
+++ b/packages/server/src/api/controllers/static/index.ts
@@ -1,3 +1,5 @@
+import { ValidFileExtensions } from "@budibase/shared-core"
+
require("svelte/register")
import { join } from "../../../utilities/centralPath"
@@ -11,34 +13,21 @@ import {
} from "../../../utilities/fileSystem"
import env from "../../../environment"
import { DocumentType } from "../../../db/utils"
-import { context, objectStore, utils, configs } from "@budibase/backend-core"
+import {
+ context,
+ objectStore,
+ utils,
+ configs,
+ BadRequestError,
+} from "@budibase/backend-core"
import AWS from "aws-sdk"
import fs from "fs"
import sdk from "../../../sdk"
import * as pro from "@budibase/pro"
-import { App, Ctx } from "@budibase/types"
+import { App, Ctx, ProcessAttachmentResponse, Upload } from "@budibase/types"
const send = require("koa-send")
-async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
- const response = await objectStore.upload({
- bucket,
- metadata,
- filename: s3Key,
- path: file.path,
- type: file.type,
- })
-
- // don't store a URL, work this out on the way out as the URL could change
- return {
- size: file.size,
- name: file.name,
- url: objectStore.getAppFileUrl(s3Key),
- extension: [...file.name.split(".")].pop(),
- key: response.Key,
- }
-}
-
export const toggleBetaUiFeature = async function (ctx: Ctx) {
const cookieName = `beta:${ctx.params.feature}`
@@ -72,23 +61,58 @@ export const serveBuilder = async function (ctx: Ctx) {
await send(ctx, ctx.file, { root: builderPath })
}
-export const uploadFile = async function (ctx: Ctx) {
+export const uploadFile = async function (
+ ctx: Ctx<{}, ProcessAttachmentResponse>
+) {
const file = ctx.request?.files?.file
+ if (!file) {
+ throw new BadRequestError("No file provided")
+ }
+
let files = file && Array.isArray(file) ? Array.from(file) : [file]
- const uploads = files.map(async (file: any) => {
- const fileExtension = [...file.name.split(".")].pop()
- // filenames converted to UUIDs so they are unique
- const processedFileName = `${uuid.v4()}.${fileExtension}`
+ ctx.body = await Promise.all(
+ files.map(async file => {
+ if (!file.name) {
+ throw new BadRequestError(
+ "Attempted to upload a file without a filename"
+ )
+ }
- return prepareUpload({
- file,
- s3Key: `${context.getProdAppId()}/attachments/${processedFileName}`,
- bucket: ObjectStoreBuckets.APPS,
+ const extension = [...file.name.split(".")].pop()
+ if (!extension) {
+ throw new BadRequestError(
+ `File "${file.name}" has no extension, an extension is required to upload a file`
+ )
+ }
+
+ if (!env.SELF_HOSTED && !ValidFileExtensions.includes(extension)) {
+ throw new BadRequestError(
+ `File "${file.name}" has an invalid extension: "${extension}"`
+ )
+ }
+
+ // filenames converted to UUIDs so they are unique
+ const processedFileName = `${uuid.v4()}.${extension}`
+
+ const s3Key = `${context.getProdAppId()}/attachments/${processedFileName}`
+
+ const response = await objectStore.upload({
+ bucket: ObjectStoreBuckets.APPS,
+ filename: s3Key,
+ path: file.path,
+ type: file.type,
+ })
+
+ return {
+ size: file.size,
+ name: file.name,
+ url: objectStore.getAppFileUrl(s3Key),
+ extension,
+ key: response.Key,
+ }
})
- })
-
- ctx.body = await Promise.all(uploads)
+ )
}
export const deleteObjects = async function (ctx: Ctx) {
diff --git a/packages/server/src/api/routes/tests/attachment.spec.ts b/packages/server/src/api/routes/tests/attachment.spec.ts
new file mode 100644
index 0000000000..14d2e845f6
--- /dev/null
+++ b/packages/server/src/api/routes/tests/attachment.spec.ts
@@ -0,0 +1,49 @@
+import * as setup from "./utilities"
+import { APIError } from "@budibase/types"
+
+describe("/api/applications/:appId/sync", () => {
+ let config = setup.getConfig()
+
+ afterAll(setup.afterAll)
+ beforeAll(async () => {
+ await config.init()
+ })
+
+ describe("/api/attachments/process", () => {
+ it("should accept an image file upload", async () => {
+ let resp = await config.api.attachment.process(
+ "1px.jpg",
+ Buffer.from([0])
+ )
+ expect(resp.length).toBe(1)
+
+ let upload = resp[0]
+ expect(upload.url.endsWith(".jpg")).toBe(true)
+ expect(upload.extension).toBe("jpg")
+ expect(upload.size).toBe(1)
+ expect(upload.name).toBe("1px.jpg")
+ })
+
+ it("should reject an upload with a malicious file extension", async () => {
+ await config.withEnv({ SELF_HOSTED: undefined }, async () => {
+ let resp = (await config.api.attachment.process(
+ "ohno.exe",
+ Buffer.from([0]),
+ { expectStatus: 400 }
+ )) as unknown as APIError
+ expect(resp.message).toContain("invalid extension")
+ })
+ })
+
+ it("should reject an upload with no file", async () => {
+ let resp = (await config.api.attachment.process(
+ undefined as any,
+ undefined as any,
+ {
+ expectStatus: 400,
+ }
+ )) as unknown as APIError
+ expect(resp.message).toContain("No file provided")
+ })
+ })
+})
diff --git a/packages/server/src/api/routes/tests/static.spec.js b/packages/server/src/api/routes/tests/static.spec.js
index 13d963d057..a28d9ecd79 100644
--- a/packages/server/src/api/routes/tests/static.spec.js
+++ b/packages/server/src/api/routes/tests/static.spec.js
@@ -5,11 +5,15 @@ describe("/static", () => {
let request = setup.getRequest()
let config = setup.getConfig()
let app
+ let cleanupEnv
- afterAll(setup.afterAll)
+ afterAll(() => {
+ setup.afterAll()
+ cleanupEnv()
+ })
beforeAll(async () => {
- config.modeSelf()
+ cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
app = await config.init()
})
diff --git a/packages/server/src/api/routes/tests/webhook.spec.ts b/packages/server/src/api/routes/tests/webhook.spec.ts
index e7046d07c8..118bfca95f 100644
--- a/packages/server/src/api/routes/tests/webhook.spec.ts
+++ b/packages/server/src/api/routes/tests/webhook.spec.ts
@@ -8,11 +8,15 @@ describe("/webhooks", () => {
let request = setup.getRequest()
let config = setup.getConfig()
let webhook: Webhook
+ let cleanupEnv: () => void
- afterAll(setup.afterAll)
+ afterAll(() => {
+ setup.afterAll()
+ cleanupEnv()
+ })
const setupTest = async () => {
- config.modeSelf()
+ cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
await config.init()
const autoConfig = basicAutomation()
autoConfig.definition.trigger.schema = {
diff --git a/packages/server/src/integrations/tests/googlesheets.spec.ts b/packages/server/src/integrations/tests/googlesheets.spec.ts
index 748baddc39..a38c6bda45 100644
--- a/packages/server/src/integrations/tests/googlesheets.spec.ts
+++ b/packages/server/src/integrations/tests/googlesheets.spec.ts
@@ -35,13 +35,18 @@ import { FieldType, Table, TableSchema } from "@budibase/types"
describe("Google Sheets Integration", () => {
let integration: any,
config = new TestConfiguration()
+ let cleanupEnv: () => void
beforeAll(() => {
- config.setGoogleAuth("test")
+ cleanupEnv = config.setEnv({
+ GOOGLE_CLIENT_ID: "test",
+ GOOGLE_CLIENT_SECRET: "test",
+ })
})
afterAll(async () => {
- await config.end()
+ cleanupEnv()
+ config.end()
})
beforeEach(async () => {
diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts
index cec8c8aa12..5096b054a6 100644
--- a/packages/server/src/tests/utilities/TestConfiguration.ts
+++ b/packages/server/src/tests/utilities/TestConfiguration.ts
@@ -58,6 +58,7 @@ import {
} from "@budibase/types"
import API from "./api"
+import { cloneDeep } from "lodash"
type DefaultUserValues = {
globalUserId: string
@@ -188,30 +189,38 @@ class TestConfiguration {
}
}
- // MODES
- setMultiTenancy = (value: boolean) => {
- env._set("MULTI_TENANCY", value)
- coreEnv._set("MULTI_TENANCY", value)
+ async withEnv(newEnvVars: Partial, f: () => Promise) {
+ let cleanup = this.setEnv(newEnvVars)
+ try {
+ await f()
+ } finally {
+ cleanup()
+ }
}
- setSelfHosted = (value: boolean) => {
- env._set("SELF_HOSTED", value)
- coreEnv._set("SELF_HOSTED", value)
- }
+ /*
+ * Sets the environment variables to the given values and returns a function
+ * that can be called to reset the environment variables to their original values.
+ */
+ setEnv(newEnvVars: Partial): () => void {
+ const oldEnv = cloneDeep(env)
+ const oldCoreEnv = cloneDeep(coreEnv)
- setGoogleAuth = (value: string) => {
- env._set("GOOGLE_CLIENT_ID", value)
- env._set("GOOGLE_CLIENT_SECRET", value)
- coreEnv._set("GOOGLE_CLIENT_ID", value)
- coreEnv._set("GOOGLE_CLIENT_SECRET", value)
- }
+ let key: keyof typeof newEnvVars
+ for (key in newEnvVars) {
+ env._set(key, newEnvVars[key])
+ coreEnv._set(key, newEnvVars[key])
+ }
- modeCloud = () => {
- this.setSelfHosted(false)
- }
+ return () => {
+ for (const [key, value] of Object.entries(oldEnv)) {
+ env._set(key, value)
+ }
- modeSelf = () => {
- this.setSelfHosted(true)
+ for (const [key, value] of Object.entries(oldCoreEnv)) {
+ coreEnv._set(key, value)
+ }
+ }
}
// UTILS
diff --git a/packages/server/src/tests/utilities/api/attachment.ts b/packages/server/src/tests/utilities/api/attachment.ts
new file mode 100644
index 0000000000..a466f1a67e
--- /dev/null
+++ b/packages/server/src/tests/utilities/api/attachment.ts
@@ -0,0 +1,35 @@
+import {
+ APIError,
+ Datasource,
+ ProcessAttachmentResponse,
+} from "@budibase/types"
+import TestConfiguration from "../TestConfiguration"
+import { TestAPI } from "./base"
+import fs from "fs"
+
+export class AttachmentAPI extends TestAPI {
+ constructor(config: TestConfiguration) {
+ super(config)
+ }
+
+ process = async (
+ name: string,
+ file: Buffer | fs.ReadStream | string,
+ { expectStatus } = { expectStatus: 200 }
+ ): Promise => {
+ const result = await this.request
+ .post(`/api/attachments/process`)
+ .attach("file", file, name)
+ .set(this.config.defaultHeaders())
+
+ if (result.statusCode !== expectStatus) {
+ throw new Error(
+ `Expected status ${expectStatus} but got ${
+ result.statusCode
+ }, body: ${JSON.stringify(result.body)}`
+ )
+ }
+
+ return result.body
+ }
+}
diff --git a/packages/server/src/tests/utilities/api/index.ts b/packages/server/src/tests/utilities/api/index.ts
index fce8237760..30ef7c478d 100644
--- a/packages/server/src/tests/utilities/api/index.ts
+++ b/packages/server/src/tests/utilities/api/index.ts
@@ -7,6 +7,7 @@ import { DatasourceAPI } from "./datasource"
import { LegacyViewAPI } from "./legacyView"
import { ScreenAPI } from "./screen"
import { ApplicationAPI } from "./application"
+import { AttachmentAPI } from "./attachment"
export default class API {
table: TableAPI
@@ -17,6 +18,7 @@ export default class API {
datasource: DatasourceAPI
screen: ScreenAPI
application: ApplicationAPI
+ attachment: AttachmentAPI
constructor(config: TestConfiguration) {
this.table = new TableAPI(config)
@@ -27,5 +29,6 @@ export default class API {
this.datasource = new DatasourceAPI(config)
this.screen = new ScreenAPI(config)
this.application = new ApplicationAPI(config)
+ this.attachment = new AttachmentAPI(config)
}
}
diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts
index cf3875b2ea..604f872c81 100644
--- a/packages/server/src/utilities/rowProcessor/index.ts
+++ b/packages/server/src/utilities/rowProcessor/index.ts
@@ -241,7 +241,7 @@ export async function outputProcessing(
continue
}
row[property].forEach((attachment: RowAttachment) => {
- attachment.url = objectStore.getAppFileUrl(attachment.key)
+ attachment.url ??= objectStore.getAppFileUrl(attachment.key)
})
}
} else if (
diff --git a/packages/shared-core/src/constants.ts b/packages/shared-core/src/constants.ts
index 725c246e2f..e7c6feb20a 100644
--- a/packages/shared-core/src/constants.ts
+++ b/packages/shared-core/src/constants.ts
@@ -96,3 +96,45 @@ export enum BuilderSocketEvent {
export const SocketSessionTTL = 60
export const ValidQueryNameRegex = /^[^()]*$/
export const ValidColumnNameRegex = /^[_a-zA-Z0-9\s]*$/g
+export const ValidFileExtensions = [
+ "avif",
+ "css",
+ "csv",
+ "docx",
+ "drawio",
+ "editorconfig",
+ "edl",
+ "enc",
+ "export",
+ "geojson",
+ "gif",
+ "htm",
+ "html",
+ "ics",
+ "iqy",
+ "jfif",
+ "jpeg",
+ "jpg",
+ "json",
+ "log",
+ "md",
+ "mid",
+ "odt",
+ "pdf",
+ "png",
+ "ris",
+ "rtf",
+ "svg",
+ "tex",
+ "toml",
+ "twig",
+ "txt",
+ "url",
+ "wav",
+ "webp",
+ "xls",
+ "xlsx",
+ "xml",
+ "yaml",
+ "yml",
+]
diff --git a/packages/types/src/api/web/app/attachment.ts b/packages/types/src/api/web/app/attachment.ts
new file mode 100644
index 0000000000..792bdf3885
--- /dev/null
+++ b/packages/types/src/api/web/app/attachment.ts
@@ -0,0 +1,9 @@
+export interface Upload {
+ size: number
+ name: string
+ url: string
+ extension: string
+ key: string
+}
+
+export type ProcessAttachmentResponse = Upload[]
diff --git a/packages/types/src/api/web/app/index.ts b/packages/types/src/api/web/app/index.ts
index 276d7fa7c1..f5b876009b 100644
--- a/packages/types/src/api/web/app/index.ts
+++ b/packages/types/src/api/web/app/index.ts
@@ -5,3 +5,4 @@ export * from "./view"
export * from "./rows"
export * from "./table"
export * from "./permission"
+export * from "./attachment"
diff --git a/packages/types/src/sdk/featureFlag.ts b/packages/types/src/sdk/featureFlag.ts
index 53aa4842c4..e3935bc7ee 100644
--- a/packages/types/src/sdk/featureFlag.ts
+++ b/packages/types/src/sdk/featureFlag.ts
@@ -1,5 +1,8 @@
export enum FeatureFlag {
LICENSING = "LICENSING",
+ // Feature IDs in Posthog
+ PER_CREATOR_PER_USER_PRICE = "18873",
+ PER_CREATOR_PER_USER_PRICE_ALERT = "18530",
}
export interface TenantFeatureFlags {
diff --git a/packages/types/src/sdk/licensing/billing.ts b/packages/types/src/sdk/licensing/billing.ts
index 35f366c811..bcbc7abd18 100644
--- a/packages/types/src/sdk/licensing/billing.ts
+++ b/packages/types/src/sdk/licensing/billing.ts
@@ -5,10 +5,17 @@ export interface Customer {
currency: string | null | undefined
}
+export interface SubscriptionItems {
+ user: number | undefined
+ creator: number | undefined
+}
+
export interface Subscription {
amount: number
+ amounts: SubscriptionItems | undefined
currency: string
quantity: number
+ quantities: SubscriptionItems | undefined
duration: PriceDuration
cancelAt: number | null | undefined
currentPeriodStart: number
diff --git a/packages/types/src/sdk/licensing/plan.ts b/packages/types/src/sdk/licensing/plan.ts
index 3e214a01ff..1604dfb8af 100644
--- a/packages/types/src/sdk/licensing/plan.ts
+++ b/packages/types/src/sdk/licensing/plan.ts
@@ -4,7 +4,9 @@ export enum PlanType {
PRO = "pro",
/** @deprecated */
TEAM = "team",
+ /** @deprecated */
PREMIUM = "premium",
+ PREMIUM_PLUS = "premium_plus",
BUSINESS = "business",
ENTERPRISE = "enterprise",
}
@@ -26,10 +28,12 @@ export interface AvailablePrice {
currency: string
duration: PriceDuration
priceId: string
+ type?: string
}
export enum PlanModel {
PER_USER = "perUser",
+ PER_CREATOR_PER_USER = "per_creator_per_user",
DAY_PASS = "dayPass",
}
diff --git a/packages/worker/Dockerfile b/packages/worker/Dockerfile
index 4230ee86f8..50f1bb78b9 100644
--- a/packages/worker/Dockerfile
+++ b/packages/worker/Dockerfile
@@ -14,7 +14,7 @@ RUN yarn global add pm2
COPY package.json .
COPY dist/yarn.lock .
-RUN yarn install --production=true
+RUN yarn install --production=true --network-timeout 1000000
# Remove unneeded data from file system to reduce image size
RUN apk del .gyp \
&& yarn cache clean
diff --git a/packages/worker/package.json b/packages/worker/package.json
index 47fe88461f..22e12f0e6b 100644
--- a/packages/worker/package.json
+++ b/packages/worker/package.json
@@ -20,7 +20,6 @@
"run:docker": "node dist/index.js",
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
- "build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"dev:stack:init": "node ./scripts/dev/manage.js init",
"dev:builder": "npm run dev:stack:init && nodemon",
"dev:built": "yarn run dev:stack:init && yarn run run:docker",