diff --git a/.github/workflows/release-master.yml b/.github/workflows/release-master.yml index 9ab8530341..df25182cd6 100644 --- a/.github/workflows/release-master.yml +++ b/.github/workflows/release-master.yml @@ -36,6 +36,7 @@ jobs: - uses: actions/setup-node@v1 with: node-version: 18.x + cache: yarn - run: yarn install --frozen-lockfile - name: Update versions @@ -63,14 +64,64 @@ jobs: echo "Using tag $version" echo "version=$version" >> "$GITHUB_OUTPUT" - - name: Build/release Docker images + - name: Setup Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + + - name: Docker login run: | docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - yarn build:docker env: DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} - BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }} + + - name: Build worker docker + uses: docker/build-push-action@v5 + with: + context: . + push: true + platforms: linux/amd64,linux/arm64 + build-args: | + BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }} + tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + file: ./packages/worker/Dockerfile.v2 + cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest + cache-to: type=inline + env: + IMAGE_NAME: budibase/worker + IMAGE_TAG: ${{ steps.currenttag.outputs.version }} + BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }} + + - name: Build server docker + uses: docker/build-push-action@v5 + with: + context: . + push: true + platforms: linux/amd64,linux/arm64 + build-args: | + BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }} + tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + file: ./packages/server/Dockerfile.v2 + cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest + cache-to: type=inline + env: + IMAGE_NAME: budibase/apps + IMAGE_TAG: ${{ steps.currenttag.outputs.version }} + BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }} + + - name: Build proxy docker + uses: docker/build-push-action@v5 + with: + context: ./hosting/proxy + push: true + platforms: linux/amd64,linux/arm64 + tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + file: ./hosting/proxy/Dockerfile + cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest + cache-to: type=inline + env: + IMAGE_NAME: budibase/proxy + IMAGE_TAG: ${{ steps.currenttag.outputs.version }} release-helm-chart: needs: [release-images] diff --git a/.github/workflows/release-singleimage-test.yml b/.github/workflows/release-singleimage-test.yml deleted file mode 100644 index c3a14226ce..0000000000 --- a/.github/workflows/release-singleimage-test.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: Test - -on: - workflow_dispatch: - -env: - CI: true - PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} - REGISTRY_URL: registry.hub.docker.com - NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }} -jobs: - build: - name: "build" - runs-on: ubuntu-latest - strategy: - matrix: - node-version: [18.x] - steps: - - name: "Checkout" - uses: actions/checkout@v4 - with: - submodules: true - token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} - - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - cache: "yarn" - - name: Setup QEMU - uses: docker/setup-qemu-action@v3 - - name: Setup Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - name: Run Yarn - run: yarn - - name: Run Yarn Build - run: yarn build --scope @budibase/server --scope @budibase/worker - - name: Login to Docker Hub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_API_KEY }} - - name: Get the latest release version - id: version - run: | - release_version=$(cat lerna.json | jq -r '.version') - echo $release_version - echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV - - name: Tag and release Budibase service docker image - uses: docker/build-push-action@v5 - with: - context: . - push: true - pull: true - platforms: linux/amd64,linux/arm64 - build-args: BUDIBASE_VERSION=0.0.0+test - tags: budibase/budibase-test:test - file: ./hosting/single/Dockerfile.v2 - cache-from: type=registry,ref=budibase/budibase-test:test - cache-to: type=inline - - name: Tag and release Budibase Azure App Service docker image - uses: docker/build-push-action@v2 - with: - context: . - push: true - platforms: linux/amd64 - build-args: | - TARGETBUILD=aas - BUDIBASE_VERSION=0.0.0+test - tags: budibase/budibase-test:aas - file: ./hosting/single/Dockerfile.v2 diff --git a/.github/workflows/release-singleimage.yml b/.github/workflows/release-singleimage.yml index f7f87f6e4c..a3444d5e7a 100644 --- a/.github/workflows/release-singleimage.yml +++ b/.github/workflows/release-singleimage.yml @@ -66,14 +66,21 @@ jobs: context: . push: true platforms: linux/amd64,linux/arm64 + build-args: BUDIBASE_VERSION=$BUDIBASE_VERSION tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }} - file: ./hosting/single/Dockerfile + file: ./hosting/single/Dockerfile.v2 + env: + BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }} - name: Tag and release Budibase Azure App Service docker image uses: docker/build-push-action@v2 with: context: . push: true platforms: linux/amd64 - build-args: TARGETBUILD=aas + build-args: | + TARGETBUILD=aas + BUDIBASE_VERSION=$BUDIBASE_VERSION tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }} - file: ./hosting/single/Dockerfile + file: ./hosting/single/Dockerfile.v2 + env: + BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }} diff --git a/README.md b/README.md index 9deb16cd4f..7827d4e48a 100644 --- a/README.md +++ b/README.md @@ -126,13 +126,6 @@ You can learn more about the Budibase API at the following places: - [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/) -

- Budibase data -

-

- -


- ## 🏁 Get started Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean. diff --git a/hosting/scripts/install-minio.sh b/hosting/scripts/install-minio.sh deleted file mode 100755 index 8297593599..0000000000 --- a/hosting/scripts/install-minio.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -if [[ $TARGETARCH == arm* ]] ; -then - echo "INSTALLING ARM64 MINIO" - wget https://dl.min.io/server/minio/release/linux-arm64/minio -else - echo "INSTALLING AMD64 MINIO" - wget https://dl.min.io/server/minio/release/linux-amd64/minio -fi -chmod +x minio diff --git a/hosting/scripts/linux/release-to-docker-hub.sh b/hosting/scripts/linux/release-to-docker-hub.sh deleted file mode 100755 index 599a10f914..0000000000 --- a/hosting/scripts/linux/release-to-docker-hub.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -tag=$1 - -if [[ ! "$tag" ]]; then - echo "No tag present. You must pass a tag to this script" - exit 1 -fi - -echo "Tagging images with tag: $tag" - -docker tag proxy-service budibase/proxy:$tag -docker tag app-service budibase/apps:$tag -docker tag worker-service budibase/worker:$tag - -docker push --all-tags budibase/apps -docker push --all-tags budibase/worker -docker push --all-tags budibase/proxy diff --git a/hosting/single/Dockerfile.v2 b/hosting/single/Dockerfile.v2 index 5b07a51b27..ec03a1b5a2 100644 --- a/hosting/single/Dockerfile.v2 +++ b/hosting/single/Dockerfile.v2 @@ -42,6 +42,7 @@ COPY packages/string-templates packages/string-templates FROM budibase/couchdb as runner ARG TARGETARCH ENV TARGETARCH $TARGETARCH +ENV NODE_MAJOR 18 #TARGETBUILD can be set to single (for single docker image) or aas (for azure app service) # e.g. docker build --build-arg TARGETBUILD=aas .... ARG TARGETBUILD=single @@ -49,10 +50,10 @@ ENV TARGETBUILD $TARGETBUILD # install base dependencies RUN apt-get update && \ - apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server + apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server libaio1 # Install postgres client for pg_dump utils -RUN apt install software-properties-common apt-transport-https gpg -y \ +RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \ && curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \ && echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \ && apt update -y \ @@ -61,10 +62,8 @@ RUN apt install software-properties-common apt-transport-https gpg -y \ # install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx WORKDIR /nodejs -RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \ - bash /tmp/nodesource_setup.sh && \ - apt-get install -y --no-install-recommends libaio1 nodejs && \ - npm install --global yarn pm2 +COPY scripts/install-node.sh ./install.sh +RUN chmod +x install.sh && ./install.sh # setup nginx COPY hosting/single/nginx/nginx.conf /etc/nginx diff --git a/hosting/single/runner.sh b/hosting/single/runner.sh index 9dc7aa25d8..770b23eec1 100644 --- a/hosting/single/runner.sh +++ b/hosting/single/runner.sh @@ -77,7 +77,7 @@ mkdir -p ${DATA_DIR}/minio chown -R couchdb:couchdb ${DATA_DIR}/couch redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 & /bbcouch-runner.sh & -/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 & +minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 & /etc/init.d/nginx restart if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then # Add monthly cron job to renew certbot certificate diff --git a/lerna.json b/lerna.json index 384473120b..f0f51242d1 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.11.45", + "version": "2.12.4", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/package.json b/package.json index 100a306a35..417fb31e0e 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,6 @@ "build:sdk": "lerna run --stream build:sdk", "deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular", "release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset", - "release:develop": "yarn release --dist-tag develop", "restore": "yarn run clean && yarn && yarn run build", "nuke": "yarn run nuke:packages && yarn run nuke:docker", "nuke:packages": "yarn run restore", @@ -55,10 +54,6 @@ "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"", "lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint", "build:specs": "lerna run --stream specs", - "build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -", - "build:docker:proxy": "docker build hosting/proxy -t proxy-service", - "build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -", - "build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -", "build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild", "build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -", diff --git a/packages/backend-core/__mocks__/aws-sdk.ts b/packages/backend-core/__mocks__/aws-sdk.ts index b8d91dbaa9..e3be511d08 100644 --- a/packages/backend-core/__mocks__/aws-sdk.ts +++ b/packages/backend-core/__mocks__/aws-sdk.ts @@ -3,6 +3,7 @@ const mockS3 = { deleteObject: jest.fn().mockReturnThis(), deleteObjects: jest.fn().mockReturnThis(), createBucket: jest.fn().mockReturnThis(), + getObject: jest.fn().mockReturnThis(), listObject: jest.fn().mockReturnThis(), getSignedUrl: jest.fn((operation: string, params: any) => { return `http://s3.example.com/${params.Bucket}/${params.Key}` diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index b23cd8e5b1..dc8d71b52c 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -21,7 +21,7 @@ "test:watch": "jest --watchAll" }, "dependencies": { - "@budibase/nano": "10.1.2", + "@budibase/nano": "10.1.3", "@budibase/pouchdb-replication-stream": "1.2.10", "@budibase/shared-core": "0.0.0", "@budibase/types": "0.0.0", diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index e64c116663..c331d791a6 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -119,8 +119,8 @@ export class Writethrough { this.writeRateMs = writeRateMs } - async put(doc: any) { - return put(this.db, doc, this.writeRateMs) + async put(doc: any, writeRateMs: number = this.writeRateMs) { + return put(this.db, doc, writeRateMs) } async get(id: string) { diff --git a/packages/backend-core/src/db/constants.ts b/packages/backend-core/src/db/constants.ts index aea485e3e3..bfa7595d62 100644 --- a/packages/backend-core/src/db/constants.ts +++ b/packages/backend-core/src/db/constants.ts @@ -8,3 +8,7 @@ export const CONSTANT_INTERNAL_ROW_COLS = [ ] as const export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const + +export function isInternalColumnName(name: string): boolean { + return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name) +} diff --git a/packages/backend-core/src/docIds/params.ts b/packages/backend-core/src/docIds/params.ts index 36fd75622b..d9baee3dc6 100644 --- a/packages/backend-core/src/docIds/params.ts +++ b/packages/backend-core/src/docIds/params.ts @@ -6,6 +6,7 @@ import { ViewName, } from "../constants" import { getProdAppID } from "./conversions" +import { DatabaseQueryOpts } from "@budibase/types" /** * If creating DB allDocs/query params with only a single top level ID this can be used, this @@ -22,8 +23,8 @@ import { getProdAppID } from "./conversions" export function getDocParams( docType: string, docId?: string | null, - otherProps: any = {} -) { + otherProps: Partial = {} +): DatabaseQueryOpts { if (docId == null) { docId = "" } @@ -45,8 +46,8 @@ export function getDocParams( export function getRowParams( tableId?: string | null, rowId?: string | null, - otherProps = {} -) { + otherProps: Partial = {} +): DatabaseQueryOpts { if (tableId == null) { return getDocParams(DocumentType.ROW, null, otherProps) } @@ -88,7 +89,10 @@ export const isDatasourceId = (id: string) => { /** * Gets parameters for retrieving workspaces. */ -export function getWorkspaceParams(id = "", otherProps = {}) { +export function getWorkspaceParams( + id = "", + otherProps: Partial = {} +): DatabaseQueryOpts { return { ...otherProps, startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`, @@ -99,7 +103,10 @@ export function getWorkspaceParams(id = "", otherProps = {}) { /** * Gets parameters for retrieving users. */ -export function getGlobalUserParams(globalId: any, otherProps: any = {}) { +export function getGlobalUserParams( + globalId: any, + otherProps: Partial = {} +): DatabaseQueryOpts { if (!globalId) { globalId = "" } @@ -117,11 +124,17 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) { /** * Gets parameters for retrieving users, this is a utility function for the getDocParams function. */ -export function getUserMetadataParams(userId?: string | null, otherProps = {}) { +export function getUserMetadataParams( + userId?: string | null, + otherProps: Partial = {} +): DatabaseQueryOpts { return getRowParams(InternalTable.USER_METADATA, userId, otherProps) } -export function getUsersByAppParams(appId: any, otherProps: any = {}) { +export function getUsersByAppParams( + appId: any, + otherProps: Partial = {} +): DatabaseQueryOpts { const prodAppId = getProdAppID(appId) return { ...otherProps, diff --git a/packages/backend-core/src/index.ts b/packages/backend-core/src/index.ts index ffffd8240a..c7cf9f56cc 100644 --- a/packages/backend-core/src/index.ts +++ b/packages/backend-core/src/index.ts @@ -30,6 +30,7 @@ export * as timers from "./timers" export { default as env } from "./environment" export * as blacklist from "./blacklist" export * as docUpdates from "./docUpdates" +export * from "./utils/Duration" export { SearchParams } from "./db" // Add context to tenancy for backwards compatibility // only do this for external usages to prevent internal diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index af2ec6dbaa..a8add7ecb6 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -36,7 +36,7 @@ class InMemoryQueue { * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts = null) { + constructor(name: string, opts?: any) { this._name = name this._opts = opts this._messages = [] diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0658147709..c0d1861de3 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -2,11 +2,18 @@ import env from "../environment" import { getRedisOptions } from "../redis/utils" import { JobQueue } from "./constants" import InMemoryQueue from "./inMemoryQueue" -import BullQueue from "bull" +import BullQueue, { QueueOptions } from "bull" import { addListeners, StalledFn } from "./listeners" +import { Duration } from "../utils" import * as timers from "../timers" +import * as Redis from "ioredis" -const CLEANUP_PERIOD_MS = 60 * 1000 +// the queue lock is held for 5 minutes +const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() +// queue lock is refreshed every 30 seconds +const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs() +// cleanup the queue every 60 seconds +const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs() let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = [] let cleanupInterval: NodeJS.Timeout @@ -21,7 +28,14 @@ export function createQueue( opts: { removeStalledCb?: StalledFn } = {} ): BullQueue.Queue { const { opts: redisOpts, redisProtocolUrl } = getRedisOptions() - const queueConfig: any = redisProtocolUrl || { redis: redisOpts } + const queueConfig: QueueOptions = { + redis: redisProtocolUrl! || (redisOpts as Redis.RedisOptions), + settings: { + maxStalledCount: 0, + lockDuration: QUEUE_LOCK_MS, + lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS, + }, + } let queue: any if (!env.isTest()) { queue = new BullQueue(jobQueue, queueConfig) diff --git a/packages/backend-core/src/security/roles.ts b/packages/backend-core/src/security/roles.ts index b05cf79c8c..0d33031de5 100644 --- a/packages/backend-core/src/security/roles.ts +++ b/packages/backend-core/src/security/roles.ts @@ -122,7 +122,9 @@ export async function roleToNumber(id?: string) { if (isBuiltin(id)) { return builtinRoleToNumber(id) } - const hierarchy = (await getUserRoleHierarchy(id)) as RoleDoc[] + const hierarchy = (await getUserRoleHierarchy(id, { + defaultPublic: true, + })) as RoleDoc[] for (let role of hierarchy) { if (isBuiltin(role?.inherits)) { return builtinRoleToNumber(role.inherits) + 1 @@ -192,12 +194,15 @@ export async function getRole( /** * Simple function to get all the roles based on the top level user role ID. */ -async function getAllUserRoles(userRoleId?: string): Promise { +async function getAllUserRoles( + userRoleId?: string, + opts?: { defaultPublic?: boolean } +): Promise { // admins have access to all roles if (userRoleId === BUILTIN_IDS.ADMIN) { return getAllRoles() } - let currentRole = await getRole(userRoleId) + let currentRole = await getRole(userRoleId, opts) let roles = currentRole ? [currentRole] : [] let roleIds = [userRoleId] // get all the inherited roles @@ -226,12 +231,16 @@ export async function getUserRoleIdHierarchy( * Returns an ordered array of the user's inherited role IDs, this can be used * to determine if a user can access something that requires a specific role. * @param userRoleId The user's role ID, this can be found in their access token. + * @param opts optional - if want to default to public use this. * @returns returns an ordered array of the roles, with the first being their * highest level of access and the last being the lowest level. */ -export async function getUserRoleHierarchy(userRoleId?: string) { +export async function getUserRoleHierarchy( + userRoleId?: string, + opts?: { defaultPublic?: boolean } +) { // special case, if they don't have a role then they are a public user - return getAllUserRoles(userRoleId) + return getAllUserRoles(userRoleId, opts) } // this function checks that the provided permissions are in an array format diff --git a/packages/backend-core/src/users/db.ts b/packages/backend-core/src/users/db.ts index a2539e836e..c071064713 100644 --- a/packages/backend-core/src/users/db.ts +++ b/packages/backend-core/src/users/db.ts @@ -25,12 +25,17 @@ import { import { getAccountHolderFromUserIds, isAdmin, + isCreator, validateUniqueUser, } from "./utils" import { searchExistingEmails } from "./lookup" import { hash } from "../utils" -type QuotaUpdateFn = (change: number, cb?: () => Promise) => Promise +type QuotaUpdateFn = ( + change: number, + creatorsChange: number, + cb?: () => Promise +) => Promise type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise type FeatureFn = () => Promise type GroupGetFn = (ids: string[]) => Promise @@ -160,13 +165,9 @@ export class UserDB { } static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) { - const params: any = { - include_docs: true, - limit: opts.limit || 50, - } let response: User[] = await usersCore.searchGlobalUsersByAppAccess( opts.appId, - params + { limit: opts.limit || 50 } ) return response } @@ -245,7 +246,8 @@ export class UserDB { } const change = dbUser ? 0 : 1 // no change if there is existing user - return UserDB.quotas.addUsers(change, async () => { + const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0 + return UserDB.quotas.addUsers(change, creatorsChange, async () => { await validateUniqueUser(email, tenantId) let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) @@ -307,6 +309,7 @@ export class UserDB { let usersToSave: any[] = [] let newUsers: any[] = [] + let newCreators: any[] = [] const emails = newUsersRequested.map((user: User) => user.email) const existingEmails = await searchExistingEmails(emails) @@ -327,59 +330,66 @@ export class UserDB { } newUser.userGroups = groups newUsers.push(newUser) + if (isCreator(newUser)) { + newCreators.push(newUser) + } } const account = await accountSdk.getAccountByTenantId(tenantId) - return UserDB.quotas.addUsers(newUsers.length, async () => { - // create the promises array that will be called by bulkDocs - newUsers.forEach((user: any) => { - usersToSave.push( - UserDB.buildUser( - user, - { - hashPassword: true, - requirePassword: user.requirePassword, - }, - tenantId, - undefined, // no dbUser - account + return UserDB.quotas.addUsers( + newUsers.length, + newCreators.length, + async () => { + // create the promises array that will be called by bulkDocs + newUsers.forEach((user: any) => { + usersToSave.push( + UserDB.buildUser( + user, + { + hashPassword: true, + requirePassword: user.requirePassword, + }, + tenantId, + undefined, // no dbUser + account + ) ) - ) - }) + }) - const usersToBulkSave = await Promise.all(usersToSave) - await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) + const usersToBulkSave = await Promise.all(usersToSave) + await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) - // Post-processing of bulk added users, e.g. events and cache operations - for (const user of usersToBulkSave) { - // TODO: Refactor to bulk insert users into the info db - // instead of relying on looping tenant creation - await platform.users.addUser(tenantId, user._id, user.email) - await eventHelpers.handleSaveEvents(user, undefined) - } + // Post-processing of bulk added users, e.g. events and cache operations + for (const user of usersToBulkSave) { + // TODO: Refactor to bulk insert users into the info db + // instead of relying on looping tenant creation + await platform.users.addUser(tenantId, user._id, user.email) + await eventHelpers.handleSaveEvents(user, undefined) + } + + const saved = usersToBulkSave.map(user => { + return { + _id: user._id, + email: user.email, + } + }) + + // now update the groups + if (Array.isArray(saved) && groups) { + const groupPromises = [] + const createdUserIds = saved.map(user => user._id) + for (let groupId of groups) { + groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds)) + } + await Promise.all(groupPromises) + } - const saved = usersToBulkSave.map(user => { return { - _id: user._id, - email: user.email, + successful: saved, + unsuccessful, } - }) - - // now update the groups - if (Array.isArray(saved) && groups) { - const groupPromises = [] - const createdUserIds = saved.map(user => user._id) - for (let groupId of groups) { - groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds)) - } - await Promise.all(groupPromises) } - - return { - successful: saved, - unsuccessful, - } - }) + ) } static async bulkDelete(userIds: string[]): Promise { @@ -419,11 +429,12 @@ export class UserDB { _deleted: true, })) const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) + const creatorsToDelete = usersToDelete.filter(isCreator) - await UserDB.quotas.removeUsers(toDelete.length) for (let user of usersToDelete) { await bulkDeleteProcessing(user) } + await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length) // Build Response // index users by id @@ -472,7 +483,8 @@ export class UserDB { await db.remove(userId, dbUser._rev) - await UserDB.quotas.removeUsers(1) + const creatorsToDelete = isCreator(dbUser) ? 1 : 0 + await UserDB.quotas.removeUsers(1, creatorsToDelete) await eventHelpers.handleDeleteEvents(dbUser) await cache.user.invalidateUser(userId) await sessions.invalidateSessions(userId, { reason: "deletion" }) diff --git a/packages/backend-core/src/users/users.ts b/packages/backend-core/src/users/users.ts index 6237c23972..6dc8750b62 100644 --- a/packages/backend-core/src/users/users.ts +++ b/packages/backend-core/src/users/users.ts @@ -14,12 +14,13 @@ import { } from "../db" import { BulkDocsResponse, - ContextUser, SearchQuery, SearchQueryOperators, SearchUsersRequest, User, + ContextUser, DatabaseQueryOpts, + CouchFindOptions, } from "@budibase/types" import { getGlobalDB } from "../context" import * as context from "../context" @@ -140,7 +141,7 @@ export const getGlobalUserByEmail = async ( export const searchGlobalUsersByApp = async ( appId: any, - opts: any, + opts: DatabaseQueryOpts, getOpts?: GetOpts ) => { if (typeof appId !== "string") { @@ -166,7 +167,10 @@ export const searchGlobalUsersByApp = async ( Return any user who potentially has access to the application Admins, developers and app users with the explicitly role. */ -export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => { +export const searchGlobalUsersByAppAccess = async ( + appId: any, + opts?: { limit?: number } +) => { const roleSelector = `roles.${appId}` let orQuery: any[] = [ @@ -187,7 +191,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => { orQuery.push(roleCheck) } - let searchOptions = { + let searchOptions: CouchFindOptions = { selector: { $or: orQuery, _id: { @@ -198,7 +202,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => { } const resp = await directCouchFind(context.getGlobalDBName(), searchOptions) - return resp?.rows + return resp.rows } export const getGlobalUserByAppPage = (appId: string, user: User) => { @@ -245,7 +249,8 @@ export const paginatedUsers = async ({ limit, }: SearchUsersRequest = {}) => { const db = getGlobalDB() - const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1 + const pageSize = limit ?? PAGE_LIMIT + const pageLimit = pageSize + 1 // get one extra document, to have the next page const opts: DatabaseQueryOpts = { include_docs: true, @@ -272,7 +277,7 @@ export const paginatedUsers = async ({ const response = await db.allDocs(getGlobalUserParams(null, opts)) userList = response.rows.map((row: any) => row.doc) } - return pagination(userList, pageLimit, { + return pagination(userList, pageSize, { paginate: true, property, getKey, diff --git a/packages/backend-core/src/utils/Duration.ts b/packages/backend-core/src/utils/Duration.ts new file mode 100644 index 0000000000..f376c2f7c7 --- /dev/null +++ b/packages/backend-core/src/utils/Duration.ts @@ -0,0 +1,49 @@ +export enum DurationType { + MILLISECONDS = "milliseconds", + SECONDS = "seconds", + MINUTES = "minutes", + HOURS = "hours", + DAYS = "days", +} + +const conversion: Record = { + milliseconds: 1, + seconds: 1000, + minutes: 60 * 1000, + hours: 60 * 60 * 1000, + days: 24 * 60 * 60 * 1000, +} + +export class Duration { + static convert(from: DurationType, to: DurationType, duration: number) { + const milliseconds = duration * conversion[from] + return milliseconds / conversion[to] + } + + static from(from: DurationType, duration: number) { + return { + to: (to: DurationType) => { + return Duration.convert(from, to, duration) + }, + toMs: () => { + return Duration.convert(from, DurationType.MILLISECONDS, duration) + }, + } + } + + static fromSeconds(duration: number) { + return Duration.from(DurationType.SECONDS, duration) + } + + static fromMinutes(duration: number) { + return Duration.from(DurationType.MINUTES, duration) + } + + static fromHours(duration: number) { + return Duration.from(DurationType.HOURS, duration) + } + + static fromDays(duration: number) { + return Duration.from(DurationType.DAYS, duration) + } +} diff --git a/packages/backend-core/src/utils/index.ts b/packages/backend-core/src/utils/index.ts index 318a7f13ba..ac17227459 100644 --- a/packages/backend-core/src/utils/index.ts +++ b/packages/backend-core/src/utils/index.ts @@ -1,3 +1,4 @@ export * from "./hashing" export * from "./utils" export * from "./stringUtils" +export * from "./Duration" diff --git a/packages/backend-core/src/utils/tests/Duration.spec.ts b/packages/backend-core/src/utils/tests/Duration.spec.ts new file mode 100644 index 0000000000..46b996f788 --- /dev/null +++ b/packages/backend-core/src/utils/tests/Duration.spec.ts @@ -0,0 +1,19 @@ +import { Duration, DurationType } from "../Duration" + +describe("duration", () => { + it("should convert minutes to milliseconds", () => { + expect(Duration.fromMinutes(5).toMs()).toBe(300000) + }) + + it("should convert seconds to milliseconds", () => { + expect(Duration.fromSeconds(30).toMs()).toBe(30000) + }) + + it("should convert days to milliseconds", () => { + expect(Duration.fromDays(1).toMs()).toBe(86400000) + }) + + it("should convert minutes to days", () => { + expect(Duration.fromMinutes(1440).to(DurationType.DAYS)).toBe(1) + }) +}) diff --git a/packages/backend-core/tests/core/users/users.spec.js b/packages/backend-core/tests/core/users/users.spec.js new file mode 100644 index 0000000000..ae7109344a --- /dev/null +++ b/packages/backend-core/tests/core/users/users.spec.js @@ -0,0 +1,54 @@ +const _ = require('lodash/fp') +const {structures} = require("../../../tests") + +jest.mock("../../../src/context") +jest.mock("../../../src/db") + +const context = require("../../../src/context") +const db = require("../../../src/db") + +const {getCreatorCount} = require('../../../src/users/users') + +describe("Users", () => { + + let getGlobalDBMock + let getGlobalUserParamsMock + let paginationMock + + beforeEach(() => { + jest.resetAllMocks() + + getGlobalDBMock = jest.spyOn(context, "getGlobalDB") + getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams") + paginationMock = jest.spyOn(db, "pagination") + }) + + it("Retrieves the number of creators", async () => { + const getUsers = (offset, limit, creators = false) => { + const range = _.range(offset, limit) + const opts = creators ? {builder: {global: true}} : undefined + return range.map(() => structures.users.user(opts)) + } + const page1Data = getUsers(0, 8) + const page2Data = getUsers(8, 12, true) + getGlobalDBMock.mockImplementation(() => ({ + name : "fake-db", + allDocs: () => ({ + rows: [...page1Data, ...page2Data] + }) + })) + paginationMock.mockImplementationOnce(() => ({ + data: page1Data, + hasNextPage: true, + nextPage: "1" + })) + paginationMock.mockImplementation(() => ({ + data: page2Data, + hasNextPage: false, + nextPage: undefined + })) + const creatorsCount = await getCreatorCount() + expect(creatorsCount).toBe(4) + expect(paginationMock).toHaveBeenCalledTimes(2) + }) +}) diff --git a/packages/backend-core/tests/core/utilities/mocks/date.ts b/packages/backend-core/tests/core/utilities/mocks/date.ts index f580b68349..1e6d105d93 100644 --- a/packages/backend-core/tests/core/utilities/mocks/date.ts +++ b/packages/backend-core/tests/core/utilities/mocks/date.ts @@ -1,2 +1,3 @@ export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z") + export const MOCK_DATE_TIMESTAMP = 1577836800000 diff --git a/packages/backend-core/tests/core/utilities/structures/licenses.ts b/packages/backend-core/tests/core/utilities/structures/licenses.ts index 0e34f2e9bb..bb452f9ad5 100644 --- a/packages/backend-core/tests/core/utilities/structures/licenses.ts +++ b/packages/backend-core/tests/core/utilities/structures/licenses.ts @@ -123,6 +123,10 @@ export function customer(): Customer { export function subscription(): Subscription { return { amount: 10000, + amounts: { + user: 10000, + creator: 0, + }, cancelAt: undefined, currency: "usd", currentPeriodEnd: 0, @@ -131,6 +135,10 @@ export function subscription(): Subscription { duration: PriceDuration.MONTHLY, pastDueAt: undefined, quantity: 0, + quantities: { + user: 0, + creator: 0, + }, status: "active", } } diff --git a/packages/bbui/src/Form/Core/Dropzone.svelte b/packages/bbui/src/Form/Core/Dropzone.svelte index e9ee75bd8b..0b6a9bb94f 100644 --- a/packages/bbui/src/Form/Core/Dropzone.svelte +++ b/packages/bbui/src/Form/Core/Dropzone.svelte @@ -159,8 +159,10 @@ {#if selectedImage.size}
{#if selectedImage.size <= BYTES_IN_MB} - {`${selectedImage.size / BYTES_IN_KB} KB`} - {:else}{`${selectedImage.size / BYTES_IN_MB} MB`}{/if} + {`${(selectedImage.size / BYTES_IN_KB).toFixed(1)} KB`} + {:else}{`${(selectedImage.size / BYTES_IN_MB).toFixed( + 1 + )} MB`}{/if}
{/if} {#if !disabled} @@ -203,8 +205,8 @@ {#if file.size}
{#if file.size <= BYTES_IN_MB} - {`${file.size / BYTES_IN_KB} KB`} - {:else}{`${file.size / BYTES_IN_MB} MB`}{/if} + {`${(file.size / BYTES_IN_KB).toFixed(1)} KB`} + {:else}{`${(file.size / BYTES_IN_MB).toFixed(1)} MB`}{/if}
{/if} {#if !disabled} diff --git a/packages/builder/.gitignore b/packages/builder/.gitignore index e5c961d509..acd1a70579 100644 --- a/packages/builder/.gitignore +++ b/packages/builder/.gitignore @@ -5,4 +5,4 @@ package-lock.json release/ dist/ routify -.routify/ \ No newline at end of file +.routify/ diff --git a/packages/builder/src/builderStore/store/frontend.js b/packages/builder/src/builderStore/store/frontend.js index a567caf87f..a4729b4a8a 100644 --- a/packages/builder/src/builderStore/store/frontend.js +++ b/packages/builder/src/builderStore/store/frontend.js @@ -580,7 +580,7 @@ export const getFrontendStore = () => { let table = validTables.find(table => { return ( table.sourceId !== BUDIBASE_INTERNAL_DB_ID && - table.type === DB_TYPE_INTERNAL + table.sourceType === DB_TYPE_INTERNAL ) }) if (table) { @@ -591,7 +591,7 @@ export const getFrontendStore = () => { table = validTables.find(table => { return ( table.sourceId === BUDIBASE_INTERNAL_DB_ID && - table.type === DB_TYPE_INTERNAL + table.sourceType === DB_TYPE_INTERNAL ) }) if (table) { @@ -599,7 +599,7 @@ export const getFrontendStore = () => { } // Finally try an external table - return validTables.find(table => table.type === DB_TYPE_EXTERNAL) + return validTables.find(table => table.sourceType === DB_TYPE_EXTERNAL) }, enrichEmptySettings: (component, opts) => { if (!component?._component) { diff --git a/packages/builder/src/builderStore/store/screenTemplates/rowListScreen.js b/packages/builder/src/builderStore/store/screenTemplates/rowListScreen.js index b17bd99e10..59bcd0d5e8 100644 --- a/packages/builder/src/builderStore/store/screenTemplates/rowListScreen.js +++ b/packages/builder/src/builderStore/store/screenTemplates/rowListScreen.js @@ -2,14 +2,14 @@ import sanitizeUrl from "./utils/sanitizeUrl" import { Screen } from "./utils/Screen" import { Component } from "./utils/Component" -export default function (datasources) { +export default function (datasources, mode = "table") { if (!Array.isArray(datasources)) { return [] } return datasources.map(datasource => { return { name: `${datasource.label} - List`, - create: () => createScreen(datasource), + create: () => createScreen(datasource, mode), id: ROW_LIST_TEMPLATE, resourceId: datasource.resourceId, } @@ -40,10 +40,24 @@ const generateTableBlock = datasource => { return tableBlock } -const createScreen = datasource => { +const generateGridBlock = datasource => { + const gridBlock = new Component("@budibase/standard-components/gridblock") + gridBlock + .customProps({ + table: datasource, + }) + .instanceName(`${datasource.label} - Grid block`) + return gridBlock +} + +const createScreen = (datasource, mode) => { return new Screen() .route(rowListUrl(datasource)) .instanceName(`${datasource.label} - List`) - .addChild(generateTableBlock(datasource)) + .addChild( + mode === "table" + ? generateTableBlock(datasource) + : generateGridBlock(datasource) + ) .json() } diff --git a/packages/builder/src/components/backend/DataTable/RelationshipDataTable.svelte b/packages/builder/src/components/backend/DataTable/RelationshipDataTable.svelte index 8ef870caca..4e67a92443 100644 --- a/packages/builder/src/components/backend/DataTable/RelationshipDataTable.svelte +++ b/packages/builder/src/components/backend/DataTable/RelationshipDataTable.svelte @@ -16,7 +16,6 @@ $: linkedTable = $tables.list.find(table => table._id === linkedTableId) $: schema = linkedTable?.schema $: table = $tables.list.find(table => table._id === tableId) - $: type = table?.type $: fetchData(tableId, rowId) $: { let rowLabel = row?.[table?.primaryDisplay] @@ -41,5 +40,5 @@ {#if row && row._id === rowId} - +
{/if} diff --git a/packages/builder/src/components/backend/DataTable/Table.svelte b/packages/builder/src/components/backend/DataTable/Table.svelte index f8087d8a39..f7eccd5242 100644 --- a/packages/builder/src/components/backend/DataTable/Table.svelte +++ b/packages/builder/src/components/backend/DataTable/Table.svelte @@ -24,17 +24,23 @@ let selectedRows = [] let customRenderers = [] + let parsedSchema = {} + + $: if (schema) { + parsedSchema = Object.keys(schema).reduce((acc, key) => { + acc[key] = + typeof schema[key] === "string" ? { type: schema[key] } : schema[key] + + if (!canBeSortColumn(acc[key].type)) { + acc[key].sortable = false + } + return acc + }, {}) + } $: selectedRows, dispatch("selectionUpdated", selectedRows) $: isUsersTable = tableId === TableNames.USERS $: data && resetSelectedRows() - $: { - Object.values(schema || {}).forEach(col => { - if (!canBeSortColumn(col.type)) { - col.sortable = false - } - }) - } $: { if (isUsersTable) { customRenderers = [ @@ -44,24 +50,24 @@ }, ] UNEDITABLE_USER_FIELDS.forEach(field => { - if (schema[field]) { - schema[field].editable = false + if (parsedSchema[field]) { + parsedSchema[field].editable = false } }) - if (schema.email) { - schema.email.displayName = "Email" + if (parsedSchema.email) { + parsedSchema.email.displayName = "Email" } - if (schema.roleId) { - schema.roleId.displayName = "Role" + if (parsedSchema.roleId) { + parsedSchema.roleId.displayName = "Role" } - if (schema.firstName) { - schema.firstName.displayName = "First Name" + if (parsedSchema.firstName) { + parsedSchema.firstName.displayName = "First Name" } - if (schema.lastName) { - schema.lastName.displayName = "Last Name" + if (parsedSchema.lastName) { + parsedSchema.lastName.displayName = "Last Name" } - if (schema.status) { - schema.status.displayName = "Status" + if (parsedSchema.status) { + parsedSchema.status.displayName = "Status" } } } @@ -97,7 +103,7 @@
{ @@ -61,11 +61,10 @@
diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index 467ae413c3..d5a9aba488 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -26,6 +26,7 @@ ALLOWABLE_NUMBER_TYPES, SWITCHABLE_TYPES, PrettyRelationshipDefinitions, + DB_TYPE_EXTERNAL, } from "constants/backend" import { getAutoColumnInformation, buildAutoColumn } from "builderStore/utils" import ConfirmDialog from "components/common/ConfirmDialog.svelte" @@ -254,10 +255,11 @@ !uneditable && editableColumn?.type !== AUTO_TYPE && !editableColumn.autocolumn - $: external = table.type === "external" + $: externalTable = table.sourceType === DB_TYPE_EXTERNAL // in the case of internal tables the sourceId will just be undefined $: tableOptions = $tables.list.filter( - opt => opt.type === table.type && table.sourceId === opt.sourceId + opt => + opt.sourceType === table.sourceType && table.sourceId === opt.sourceId ) $: typeEnabled = !originalName || @@ -409,7 +411,7 @@ editableColumn.type === FieldType.BB_REFERENCE && editableColumn.subtype === FieldSubtype.USERS - if (!external) { + if (!externalTable) { return [ FIELDS.STRING, FIELDS.BARCODEQR, @@ -441,7 +443,7 @@ isUsers ? FIELDS.USERS : FIELDS.USER, ] // no-sql or a spreadsheet - if (!external || table.sql) { + if (!externalTable || table.sql) { fields = [...fields, FIELDS.LINK, FIELDS.ARRAY] } return fields @@ -486,7 +488,7 @@ }) } const newError = {} - if (!external && fieldInfo.name?.startsWith("_")) { + if (!externalTable && fieldInfo.name?.startsWith("_")) { newError.name = `Column name cannot start with an underscore.` } else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) { newError.name = `Illegal character; must be alpha-numeric.` @@ -498,7 +500,7 @@ newError.name = `Column name already in use.` } - if (fieldInfo.type == "auto" && !fieldInfo.subtype) { + if (fieldInfo.type === "auto" && !fieldInfo.subtype) { newError.subtype = `Auto Column requires a type` } diff --git a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte index 43751ad944..eb1e7bc7ff 100644 --- a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte +++ b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte @@ -1,6 +1,6 @@ + +
+ {#if buttonCount} + 1} + /> + + + {/if} +
+ + diff --git a/packages/builder/src/components/design/settings/controls/ButtonConfiguration/ButtonSetting.svelte b/packages/builder/src/components/design/settings/controls/ButtonConfiguration/ButtonSetting.svelte new file mode 100644 index 0000000000..a05fd9a39b --- /dev/null +++ b/packages/builder/src/components/design/settings/controls/ButtonConfiguration/ButtonSetting.svelte @@ -0,0 +1,64 @@ + + +
+
+ +
{readableText || "Button"}
+
+
+ removeButton(item._id)} + /> +
+
+ + diff --git a/packages/builder/src/components/design/settings/controls/DraggableList.svelte b/packages/builder/src/components/design/settings/controls/DraggableList/DraggableList.svelte similarity index 82% rename from packages/builder/src/components/design/settings/controls/DraggableList.svelte rename to packages/builder/src/components/design/settings/controls/DraggableList/DraggableList.svelte index c8395b2a1f..1992299e90 100644 --- a/packages/builder/src/components/design/settings/controls/DraggableList.svelte +++ b/packages/builder/src/components/design/settings/controls/DraggableList/DraggableList.svelte @@ -1,10 +1,10 @@ @@ -79,11 +96,11 @@ bind:this={popover} on:open={() => { drawers = [] - $draggable.actions.select(field._id) + $draggable.actions.select(componentInstance._id) }} on:close={() => { open = false - if ($draggable.selected == field._id) { + if ($draggable.selected == componentInstance._id) { $draggable.actions.select() } }} @@ -92,33 +109,13 @@ showPopover={drawers.length == 0} clickOutsideOverride={drawers.length > 0} maxHeight={600} - handlePostionUpdate={(anchorBounds, eleBounds, cfg) => { - let { left, top } = cfg - let percentageOffset = 30 - // left-outside - left = anchorBounds.left - eleBounds.width - 18 - - // shift up from the anchor, if space allows - let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset - let defaultTop = anchorBounds.top - offsetPos - - if (window.innerHeight - defaultTop < eleBounds.height) { - top = window.innerHeight - eleBounds.height - 5 - } else { - top = anchorBounds.top - offsetPos - } - - return { ...cfg, left, top } - }} + handlePostionUpdate={customPositionHandler} > -
- - {field.field} -
+ diff --git a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldConfiguration.svelte b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldConfiguration.svelte index 4169cb7d3d..6c74705ab0 100644 --- a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldConfiguration.svelte +++ b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldConfiguration.svelte @@ -7,7 +7,7 @@ getComponentBindableProperties, } from "builderStore/dataBinding" import { currentAsset } from "builderStore" - import DraggableList from "../DraggableList.svelte" + import DraggableList from "../DraggableList/DraggableList.svelte" import { createEventDispatcher } from "svelte" import { store, selectedScreen } from "builderStore" import FieldSetting from "./FieldSetting.svelte" @@ -50,7 +50,7 @@ updateSanitsedFields(sanitisedValue) unconfigured = buildUnconfiguredOptions(schema, sanitisedFields) fieldList = [...sanitisedFields, ...unconfigured] - .map(buildSudoInstance) + .map(buildPseudoInstance) .filter(x => x != null) } @@ -104,7 +104,7 @@ }) } - const buildSudoInstance = instance => { + const buildPseudoInstance = instance => { if (instance._component) { return instance } diff --git a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte index b5cfcb12d9..1d9ce733b8 100644 --- a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte +++ b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte @@ -1,8 +1,11 @@
- -
{item.label || item.field}
+ > +
+ + {item.field} +
+ +
{readableText}
@@ -53,4 +81,20 @@ .list-item-body { justify-content: space-between; } + .type-icon { + display: flex; + gap: var(--spacing-m); + margin: var(--spacing-xl); + margin-bottom: 0px; + height: var(--spectrum-alias-item-height-m); + padding: 0px var(--spectrum-alias-item-padding-m); + border-width: var(--spectrum-actionbutton-border-size); + border-radius: var(--spectrum-alias-border-radius-regular); + border: 1px solid + var( + --spectrum-actionbutton-m-border-color, + var(--spectrum-alias-border-color) + ); + align-items: center; + } diff --git a/packages/builder/src/components/integration/QueryViewerSidePanel/PreviewPanel.svelte b/packages/builder/src/components/integration/QueryViewerSidePanel/PreviewPanel.svelte index f379ad18a1..3873669b63 100644 --- a/packages/builder/src/components/integration/QueryViewerSidePanel/PreviewPanel.svelte +++ b/packages/builder/src/components/integration/QueryViewerSidePanel/PreviewPanel.svelte @@ -23,7 +23,7 @@
-
+