Merge branch 'master' into grid-tweaks

This commit is contained in:
Mihail Hadzhiev 2023-11-02 09:52:52 +02:00 committed by GitHub
commit 87b69007f4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
180 changed files with 3204 additions and 1226 deletions

View File

@ -36,6 +36,7 @@ jobs:
- uses: actions/setup-node@v1 - uses: actions/setup-node@v1
with: with:
node-version: 18.x node-version: 18.x
cache: yarn
- run: yarn install --frozen-lockfile - run: yarn install --frozen-lockfile
- name: Update versions - name: Update versions
@ -63,14 +64,64 @@ jobs:
echo "Using tag $version" echo "Using tag $version"
echo "version=$version" >> "$GITHUB_OUTPUT" echo "version=$version" >> "$GITHUB_OUTPUT"
- name: Build/release Docker images - name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Docker login
run: | run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker
env: env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }}
- name: Build worker docker
uses: docker/build-push-action@v5
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: |
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
file: ./packages/worker/Dockerfile.v2
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
cache-to: type=inline
env:
IMAGE_NAME: budibase/worker
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
- name: Build server docker
uses: docker/build-push-action@v5
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: |
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
file: ./packages/server/Dockerfile.v2
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
cache-to: type=inline
env:
IMAGE_NAME: budibase/apps
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
- name: Build proxy docker
uses: docker/build-push-action@v5
with:
context: ./hosting/proxy
push: true
platforms: linux/amd64,linux/arm64
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
file: ./hosting/proxy/Dockerfile
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
cache-to: type=inline
env:
IMAGE_NAME: budibase/proxy
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
release-helm-chart: release-helm-chart:
needs: [release-images] needs: [release-images]

View File

@ -1,72 +0,0 @@
name: Test
on:
workflow_dispatch:
env:
CI: true
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18.x]
steps:
- name: "Checkout"
uses: actions/checkout@v4
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: "yarn"
- name: Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Run Yarn
run: yarn
- name: Run Yarn Build
run: yarn build --scope @budibase/server --scope @budibase/worker
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test
cache-to: type=inline
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2

View File

@ -66,14 +66,21 @@ jobs:
context: . context: .
push: true push: true
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=$BUDIBASE_VERSION
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }} tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}
- name: Tag and release Budibase Azure App Service docker image - name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
with: with:
context: . context: .
push: true push: true
platforms: linux/amd64 platforms: linux/amd64
build-args: TARGETBUILD=aas build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=$BUDIBASE_VERSION
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }} tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}

View File

@ -126,13 +126,6 @@ You can learn more about the Budibase API at the following places:
- [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/) - [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/)
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1647858558/Feb%20release/Start_building_with_Budibase_s_API_3_rhlzhv.png">
</p>
<br /><br />
<br /><br /><br />
## 🏁 Get started ## 🏁 Get started
Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean. Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean.

View File

@ -1,10 +0,0 @@
#!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then
echo "INSTALLING ARM64 MINIO"
wget https://dl.min.io/server/minio/release/linux-arm64/minio
else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio

View File

@ -1,18 +0,0 @@
#!/bin/bash
tag=$1
if [[ ! "$tag" ]]; then
echo "No tag present. You must pass a tag to this script"
exit 1
fi
echo "Tagging images with tag: $tag"
docker tag proxy-service budibase/proxy:$tag
docker tag app-service budibase/apps:$tag
docker tag worker-service budibase/worker:$tag
docker push --all-tags budibase/apps
docker push --all-tags budibase/worker
docker push --all-tags budibase/proxy

View File

@ -42,6 +42,7 @@ COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner FROM budibase/couchdb as runner
ARG TARGETARCH ARG TARGETARCH
ENV TARGETARCH $TARGETARCH ENV TARGETARCH $TARGETARCH
ENV NODE_MAJOR 18
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service) #TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas .... # e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single ARG TARGETBUILD=single
@ -49,10 +50,10 @@ ENV TARGETBUILD $TARGETBUILD
# install base dependencies # install base dependencies
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server libaio1
# Install postgres client for pg_dump utils # Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \ RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \ && curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \ && echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \ && apt update -y \
@ -61,10 +62,8 @@ RUN apt install software-properties-common apt-transport-https gpg -y \
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx # install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \ COPY scripts/install-node.sh ./install.sh
bash /tmp/nodesource_setup.sh && \ RUN chmod +x install.sh && ./install.sh
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
# setup nginx # setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx COPY hosting/single/nginx/nginx.conf /etc/nginx

View File

@ -77,7 +77,7 @@ mkdir -p ${DATA_DIR}/minio
chown -R couchdb:couchdb ${DATA_DIR}/couch chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 & redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/bbcouch-runner.sh & /bbcouch-runner.sh &
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 & minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
/etc/init.d/nginx restart /etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
# Add monthly cron job to renew certbot certificate # Add monthly cron job to renew certbot certificate

View File

@ -1,5 +1,5 @@
{ {
"version": "2.11.45", "version": "2.12.4",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -33,7 +33,6 @@
"build:sdk": "lerna run --stream build:sdk", "build:sdk": "lerna run --stream build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular", "deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset", "release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset",
"release:develop": "yarn release --dist-tag develop",
"restore": "yarn run clean && yarn && yarn run build", "restore": "yarn run clean && yarn && yarn run build",
"nuke": "yarn run nuke:packages && yarn run nuke:docker", "nuke": "yarn run nuke:packages && yarn run nuke:docker",
"nuke:packages": "yarn run restore", "nuke:packages": "yarn run restore",
@ -55,10 +54,6 @@
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"", "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint", "lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"build:specs": "lerna run --stream specs", "build:specs": "lerna run --stream specs",
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -", "build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",

View File

@ -3,6 +3,7 @@ const mockS3 = {
deleteObject: jest.fn().mockReturnThis(), deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(), deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(), createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(), listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => { getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}` return `http://s3.example.com/${params.Bucket}/${params.Key}`

View File

@ -21,7 +21,7 @@
"test:watch": "jest --watchAll" "test:watch": "jest --watchAll"
}, },
"dependencies": { "dependencies": {
"@budibase/nano": "10.1.2", "@budibase/nano": "10.1.3",
"@budibase/pouchdb-replication-stream": "1.2.10", "@budibase/pouchdb-replication-stream": "1.2.10",
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",

View File

@ -119,8 +119,8 @@ export class Writethrough {
this.writeRateMs = writeRateMs this.writeRateMs = writeRateMs
} }
async put(doc: any) { async put(doc: any, writeRateMs: number = this.writeRateMs) {
return put(this.db, doc, this.writeRateMs) return put(this.db, doc, writeRateMs)
} }
async get(id: string) { async get(id: string) {

View File

@ -8,3 +8,7 @@ export const CONSTANT_INTERNAL_ROW_COLS = [
] as const ] as const
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
export function isInternalColumnName(name: string): boolean {
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
}

View File

@ -6,6 +6,7 @@ import {
ViewName, ViewName,
} from "../constants" } from "../constants"
import { getProdAppID } from "./conversions" import { getProdAppID } from "./conversions"
import { DatabaseQueryOpts } from "@budibase/types"
/** /**
* If creating DB allDocs/query params with only a single top level ID this can be used, this * If creating DB allDocs/query params with only a single top level ID this can be used, this
@ -22,8 +23,8 @@ import { getProdAppID } from "./conversions"
export function getDocParams( export function getDocParams(
docType: string, docType: string,
docId?: string | null, docId?: string | null,
otherProps: any = {} otherProps: Partial<DatabaseQueryOpts> = {}
) { ): DatabaseQueryOpts {
if (docId == null) { if (docId == null) {
docId = "" docId = ""
} }
@ -45,8 +46,8 @@ export function getDocParams(
export function getRowParams( export function getRowParams(
tableId?: string | null, tableId?: string | null,
rowId?: string | null, rowId?: string | null,
otherProps = {} otherProps: Partial<DatabaseQueryOpts> = {}
) { ): DatabaseQueryOpts {
if (tableId == null) { if (tableId == null) {
return getDocParams(DocumentType.ROW, null, otherProps) return getDocParams(DocumentType.ROW, null, otherProps)
} }
@ -88,7 +89,10 @@ export const isDatasourceId = (id: string) => {
/** /**
* Gets parameters for retrieving workspaces. * Gets parameters for retrieving workspaces.
*/ */
export function getWorkspaceParams(id = "", otherProps = {}) { export function getWorkspaceParams(
id = "",
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
return { return {
...otherProps, ...otherProps,
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`, startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
@ -99,7 +103,10 @@ export function getWorkspaceParams(id = "", otherProps = {}) {
/** /**
* Gets parameters for retrieving users. * Gets parameters for retrieving users.
*/ */
export function getGlobalUserParams(globalId: any, otherProps: any = {}) { export function getGlobalUserParams(
globalId: any,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
if (!globalId) { if (!globalId) {
globalId = "" globalId = ""
} }
@ -117,11 +124,17 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
/** /**
* Gets parameters for retrieving users, this is a utility function for the getDocParams function. * Gets parameters for retrieving users, this is a utility function for the getDocParams function.
*/ */
export function getUserMetadataParams(userId?: string | null, otherProps = {}) { export function getUserMetadataParams(
userId?: string | null,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
return getRowParams(InternalTable.USER_METADATA, userId, otherProps) return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
} }
export function getUsersByAppParams(appId: any, otherProps: any = {}) { export function getUsersByAppParams(
appId: any,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
const prodAppId = getProdAppID(appId) const prodAppId = getProdAppID(appId)
return { return {
...otherProps, ...otherProps,

View File

@ -30,6 +30,7 @@ export * as timers from "./timers"
export { default as env } from "./environment" export { default as env } from "./environment"
export * as blacklist from "./blacklist" export * as blacklist from "./blacklist"
export * as docUpdates from "./docUpdates" export * as docUpdates from "./docUpdates"
export * from "./utils/Duration"
export { SearchParams } from "./db" export { SearchParams } from "./db"
// Add context to tenancy for backwards compatibility // Add context to tenancy for backwards compatibility
// only do this for external usages to prevent internal // only do this for external usages to prevent internal

View File

@ -36,7 +36,7 @@ class InMemoryQueue {
* @param opts This is not used by the in memory queue as there is no real use * @param opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull * case when in memory, but is the same API as Bull
*/ */
constructor(name: string, opts = null) { constructor(name: string, opts?: any) {
this._name = name this._name = name
this._opts = opts this._opts = opts
this._messages = [] this._messages = []

View File

@ -2,11 +2,18 @@ import env from "../environment"
import { getRedisOptions } from "../redis/utils" import { getRedisOptions } from "../redis/utils"
import { JobQueue } from "./constants" import { JobQueue } from "./constants"
import InMemoryQueue from "./inMemoryQueue" import InMemoryQueue from "./inMemoryQueue"
import BullQueue from "bull" import BullQueue, { QueueOptions } from "bull"
import { addListeners, StalledFn } from "./listeners" import { addListeners, StalledFn } from "./listeners"
import { Duration } from "../utils"
import * as timers from "../timers" import * as timers from "../timers"
import * as Redis from "ioredis"
const CLEANUP_PERIOD_MS = 60 * 1000 // the queue lock is held for 5 minutes
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
// queue lock is refreshed every 30 seconds
const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()
// cleanup the queue every 60 seconds
const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = [] let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
let cleanupInterval: NodeJS.Timeout let cleanupInterval: NodeJS.Timeout
@ -21,7 +28,14 @@ export function createQueue<T>(
opts: { removeStalledCb?: StalledFn } = {} opts: { removeStalledCb?: StalledFn } = {}
): BullQueue.Queue<T> { ): BullQueue.Queue<T> {
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions() const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
const queueConfig: any = redisProtocolUrl || { redis: redisOpts } const queueConfig: QueueOptions = {
redis: redisProtocolUrl! || (redisOpts as Redis.RedisOptions),
settings: {
maxStalledCount: 0,
lockDuration: QUEUE_LOCK_MS,
lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS,
},
}
let queue: any let queue: any
if (!env.isTest()) { if (!env.isTest()) {
queue = new BullQueue(jobQueue, queueConfig) queue = new BullQueue(jobQueue, queueConfig)

View File

@ -122,7 +122,9 @@ export async function roleToNumber(id?: string) {
if (isBuiltin(id)) { if (isBuiltin(id)) {
return builtinRoleToNumber(id) return builtinRoleToNumber(id)
} }
const hierarchy = (await getUserRoleHierarchy(id)) as RoleDoc[] const hierarchy = (await getUserRoleHierarchy(id, {
defaultPublic: true,
})) as RoleDoc[]
for (let role of hierarchy) { for (let role of hierarchy) {
if (isBuiltin(role?.inherits)) { if (isBuiltin(role?.inherits)) {
return builtinRoleToNumber(role.inherits) + 1 return builtinRoleToNumber(role.inherits) + 1
@ -192,12 +194,15 @@ export async function getRole(
/** /**
* Simple function to get all the roles based on the top level user role ID. * Simple function to get all the roles based on the top level user role ID.
*/ */
async function getAllUserRoles(userRoleId?: string): Promise<RoleDoc[]> { async function getAllUserRoles(
userRoleId?: string,
opts?: { defaultPublic?: boolean }
): Promise<RoleDoc[]> {
// admins have access to all roles // admins have access to all roles
if (userRoleId === BUILTIN_IDS.ADMIN) { if (userRoleId === BUILTIN_IDS.ADMIN) {
return getAllRoles() return getAllRoles()
} }
let currentRole = await getRole(userRoleId) let currentRole = await getRole(userRoleId, opts)
let roles = currentRole ? [currentRole] : [] let roles = currentRole ? [currentRole] : []
let roleIds = [userRoleId] let roleIds = [userRoleId]
// get all the inherited roles // get all the inherited roles
@ -226,12 +231,16 @@ export async function getUserRoleIdHierarchy(
* Returns an ordered array of the user's inherited role IDs, this can be used * Returns an ordered array of the user's inherited role IDs, this can be used
* to determine if a user can access something that requires a specific role. * to determine if a user can access something that requires a specific role.
* @param userRoleId The user's role ID, this can be found in their access token. * @param userRoleId The user's role ID, this can be found in their access token.
* @param opts optional - if want to default to public use this.
* @returns returns an ordered array of the roles, with the first being their * @returns returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level. * highest level of access and the last being the lowest level.
*/ */
export async function getUserRoleHierarchy(userRoleId?: string) { export async function getUserRoleHierarchy(
userRoleId?: string,
opts?: { defaultPublic?: boolean }
) {
// special case, if they don't have a role then they are a public user // special case, if they don't have a role then they are a public user
return getAllUserRoles(userRoleId) return getAllUserRoles(userRoleId, opts)
} }
// this function checks that the provided permissions are in an array format // this function checks that the provided permissions are in an array format

View File

@ -25,12 +25,17 @@ import {
import { import {
getAccountHolderFromUserIds, getAccountHolderFromUserIds,
isAdmin, isAdmin,
isCreator,
validateUniqueUser, validateUniqueUser,
} from "./utils" } from "./utils"
import { searchExistingEmails } from "./lookup" import { searchExistingEmails } from "./lookup"
import { hash } from "../utils" import { hash } from "../utils"
type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any> type QuotaUpdateFn = (
change: number,
creatorsChange: number,
cb?: () => Promise<any>
) => Promise<any>
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any> type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
type FeatureFn = () => Promise<Boolean> type FeatureFn = () => Promise<Boolean>
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]> type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
@ -160,13 +165,9 @@ export class UserDB {
} }
static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) { static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
const params: any = {
include_docs: true,
limit: opts.limit || 50,
}
let response: User[] = await usersCore.searchGlobalUsersByAppAccess( let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
opts.appId, opts.appId,
params { limit: opts.limit || 50 }
) )
return response return response
} }
@ -245,7 +246,8 @@ export class UserDB {
} }
const change = dbUser ? 0 : 1 // no change if there is existing user const change = dbUser ? 0 : 1 // no change if there is existing user
return UserDB.quotas.addUsers(change, async () => { const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
await validateUniqueUser(email, tenantId) await validateUniqueUser(email, tenantId)
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
@ -307,6 +309,7 @@ export class UserDB {
let usersToSave: any[] = [] let usersToSave: any[] = []
let newUsers: any[] = [] let newUsers: any[] = []
let newCreators: any[] = []
const emails = newUsersRequested.map((user: User) => user.email) const emails = newUsersRequested.map((user: User) => user.email)
const existingEmails = await searchExistingEmails(emails) const existingEmails = await searchExistingEmails(emails)
@ -327,59 +330,66 @@ export class UserDB {
} }
newUser.userGroups = groups newUser.userGroups = groups
newUsers.push(newUser) newUsers.push(newUser)
if (isCreator(newUser)) {
newCreators.push(newUser)
}
} }
const account = await accountSdk.getAccountByTenantId(tenantId) const account = await accountSdk.getAccountByTenantId(tenantId)
return UserDB.quotas.addUsers(newUsers.length, async () => { return UserDB.quotas.addUsers(
// create the promises array that will be called by bulkDocs newUsers.length,
newUsers.forEach((user: any) => { newCreators.length,
usersToSave.push( async () => {
UserDB.buildUser( // create the promises array that will be called by bulkDocs
user, newUsers.forEach((user: any) => {
{ usersToSave.push(
hashPassword: true, UserDB.buildUser(
requirePassword: user.requirePassword, user,
}, {
tenantId, hashPassword: true,
undefined, // no dbUser requirePassword: user.requirePassword,
account },
tenantId,
undefined, // no dbUser
account
)
) )
) })
})
const usersToBulkSave = await Promise.all(usersToSave) const usersToBulkSave = await Promise.all(usersToSave)
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
// Post-processing of bulk added users, e.g. events and cache operations // Post-processing of bulk added users, e.g. events and cache operations
for (const user of usersToBulkSave) { for (const user of usersToBulkSave) {
// TODO: Refactor to bulk insert users into the info db // TODO: Refactor to bulk insert users into the info db
// instead of relying on looping tenant creation // instead of relying on looping tenant creation
await platform.users.addUser(tenantId, user._id, user.email) await platform.users.addUser(tenantId, user._id, user.email)
await eventHelpers.handleSaveEvents(user, undefined) await eventHelpers.handleSaveEvents(user, undefined)
} }
const saved = usersToBulkSave.map(user => {
return {
_id: user._id,
email: user.email,
}
})
// now update the groups
if (Array.isArray(saved) && groups) {
const groupPromises = []
const createdUserIds = saved.map(user => user._id)
for (let groupId of groups) {
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
}
await Promise.all(groupPromises)
}
const saved = usersToBulkSave.map(user => {
return { return {
_id: user._id, successful: saved,
email: user.email, unsuccessful,
} }
})
// now update the groups
if (Array.isArray(saved) && groups) {
const groupPromises = []
const createdUserIds = saved.map(user => user._id)
for (let groupId of groups) {
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
}
await Promise.all(groupPromises)
} }
)
return {
successful: saved,
unsuccessful,
}
})
} }
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> { static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
@ -419,11 +429,12 @@ export class UserDB {
_deleted: true, _deleted: true,
})) }))
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
const creatorsToDelete = usersToDelete.filter(isCreator)
await UserDB.quotas.removeUsers(toDelete.length)
for (let user of usersToDelete) { for (let user of usersToDelete) {
await bulkDeleteProcessing(user) await bulkDeleteProcessing(user)
} }
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
// Build Response // Build Response
// index users by id // index users by id
@ -472,7 +483,8 @@ export class UserDB {
await db.remove(userId, dbUser._rev) await db.remove(userId, dbUser._rev)
await UserDB.quotas.removeUsers(1) const creatorsToDelete = isCreator(dbUser) ? 1 : 0
await UserDB.quotas.removeUsers(1, creatorsToDelete)
await eventHelpers.handleDeleteEvents(dbUser) await eventHelpers.handleDeleteEvents(dbUser)
await cache.user.invalidateUser(userId) await cache.user.invalidateUser(userId)
await sessions.invalidateSessions(userId, { reason: "deletion" }) await sessions.invalidateSessions(userId, { reason: "deletion" })

View File

@ -14,12 +14,13 @@ import {
} from "../db" } from "../db"
import { import {
BulkDocsResponse, BulkDocsResponse,
ContextUser,
SearchQuery, SearchQuery,
SearchQueryOperators, SearchQueryOperators,
SearchUsersRequest, SearchUsersRequest,
User, User,
ContextUser,
DatabaseQueryOpts, DatabaseQueryOpts,
CouchFindOptions,
} from "@budibase/types" } from "@budibase/types"
import { getGlobalDB } from "../context" import { getGlobalDB } from "../context"
import * as context from "../context" import * as context from "../context"
@ -140,7 +141,7 @@ export const getGlobalUserByEmail = async (
export const searchGlobalUsersByApp = async ( export const searchGlobalUsersByApp = async (
appId: any, appId: any,
opts: any, opts: DatabaseQueryOpts,
getOpts?: GetOpts getOpts?: GetOpts
) => { ) => {
if (typeof appId !== "string") { if (typeof appId !== "string") {
@ -166,7 +167,10 @@ export const searchGlobalUsersByApp = async (
Return any user who potentially has access to the application Return any user who potentially has access to the application
Admins, developers and app users with the explicitly role. Admins, developers and app users with the explicitly role.
*/ */
export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => { export const searchGlobalUsersByAppAccess = async (
appId: any,
opts?: { limit?: number }
) => {
const roleSelector = `roles.${appId}` const roleSelector = `roles.${appId}`
let orQuery: any[] = [ let orQuery: any[] = [
@ -187,7 +191,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
orQuery.push(roleCheck) orQuery.push(roleCheck)
} }
let searchOptions = { let searchOptions: CouchFindOptions = {
selector: { selector: {
$or: orQuery, $or: orQuery,
_id: { _id: {
@ -198,7 +202,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
} }
const resp = await directCouchFind(context.getGlobalDBName(), searchOptions) const resp = await directCouchFind(context.getGlobalDBName(), searchOptions)
return resp?.rows return resp.rows
} }
export const getGlobalUserByAppPage = (appId: string, user: User) => { export const getGlobalUserByAppPage = (appId: string, user: User) => {
@ -245,7 +249,8 @@ export const paginatedUsers = async ({
limit, limit,
}: SearchUsersRequest = {}) => { }: SearchUsersRequest = {}) => {
const db = getGlobalDB() const db = getGlobalDB()
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1 const pageSize = limit ?? PAGE_LIMIT
const pageLimit = pageSize + 1
// get one extra document, to have the next page // get one extra document, to have the next page
const opts: DatabaseQueryOpts = { const opts: DatabaseQueryOpts = {
include_docs: true, include_docs: true,
@ -272,7 +277,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts)) const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc) userList = response.rows.map((row: any) => row.doc)
} }
return pagination(userList, pageLimit, { return pagination(userList, pageSize, {
paginate: true, paginate: true,
property, property,
getKey, getKey,

View File

@ -0,0 +1,49 @@
export enum DurationType {
MILLISECONDS = "milliseconds",
SECONDS = "seconds",
MINUTES = "minutes",
HOURS = "hours",
DAYS = "days",
}
const conversion: Record<DurationType, number> = {
milliseconds: 1,
seconds: 1000,
minutes: 60 * 1000,
hours: 60 * 60 * 1000,
days: 24 * 60 * 60 * 1000,
}
export class Duration {
static convert(from: DurationType, to: DurationType, duration: number) {
const milliseconds = duration * conversion[from]
return milliseconds / conversion[to]
}
static from(from: DurationType, duration: number) {
return {
to: (to: DurationType) => {
return Duration.convert(from, to, duration)
},
toMs: () => {
return Duration.convert(from, DurationType.MILLISECONDS, duration)
},
}
}
static fromSeconds(duration: number) {
return Duration.from(DurationType.SECONDS, duration)
}
static fromMinutes(duration: number) {
return Duration.from(DurationType.MINUTES, duration)
}
static fromHours(duration: number) {
return Duration.from(DurationType.HOURS, duration)
}
static fromDays(duration: number) {
return Duration.from(DurationType.DAYS, duration)
}
}

View File

@ -1,3 +1,4 @@
export * from "./hashing" export * from "./hashing"
export * from "./utils" export * from "./utils"
export * from "./stringUtils" export * from "./stringUtils"
export * from "./Duration"

View File

@ -0,0 +1,19 @@
import { Duration, DurationType } from "../Duration"
describe("duration", () => {
it("should convert minutes to milliseconds", () => {
expect(Duration.fromMinutes(5).toMs()).toBe(300000)
})
it("should convert seconds to milliseconds", () => {
expect(Duration.fromSeconds(30).toMs()).toBe(30000)
})
it("should convert days to milliseconds", () => {
expect(Duration.fromDays(1).toMs()).toBe(86400000)
})
it("should convert minutes to days", () => {
expect(Duration.fromMinutes(1440).to(DurationType.DAYS)).toBe(1)
})
})

View File

@ -0,0 +1,54 @@
const _ = require('lodash/fp')
const {structures} = require("../../../tests")
jest.mock("../../../src/context")
jest.mock("../../../src/db")
const context = require("../../../src/context")
const db = require("../../../src/db")
const {getCreatorCount} = require('../../../src/users/users')
describe("Users", () => {
let getGlobalDBMock
let getGlobalUserParamsMock
let paginationMock
beforeEach(() => {
jest.resetAllMocks()
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
paginationMock = jest.spyOn(db, "pagination")
})
it("Retrieves the number of creators", async () => {
const getUsers = (offset, limit, creators = false) => {
const range = _.range(offset, limit)
const opts = creators ? {builder: {global: true}} : undefined
return range.map(() => structures.users.user(opts))
}
const page1Data = getUsers(0, 8)
const page2Data = getUsers(8, 12, true)
getGlobalDBMock.mockImplementation(() => ({
name : "fake-db",
allDocs: () => ({
rows: [...page1Data, ...page2Data]
})
}))
paginationMock.mockImplementationOnce(() => ({
data: page1Data,
hasNextPage: true,
nextPage: "1"
}))
paginationMock.mockImplementation(() => ({
data: page2Data,
hasNextPage: false,
nextPage: undefined
}))
const creatorsCount = await getCreatorCount()
expect(creatorsCount).toBe(4)
expect(paginationMock).toHaveBeenCalledTimes(2)
})
})

View File

@ -1,2 +1,3 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z") export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000 export const MOCK_DATE_TIMESTAMP = 1577836800000

View File

@ -123,6 +123,10 @@ export function customer(): Customer {
export function subscription(): Subscription { export function subscription(): Subscription {
return { return {
amount: 10000, amount: 10000,
amounts: {
user: 10000,
creator: 0,
},
cancelAt: undefined, cancelAt: undefined,
currency: "usd", currency: "usd",
currentPeriodEnd: 0, currentPeriodEnd: 0,
@ -131,6 +135,10 @@ export function subscription(): Subscription {
duration: PriceDuration.MONTHLY, duration: PriceDuration.MONTHLY,
pastDueAt: undefined, pastDueAt: undefined,
quantity: 0, quantity: 0,
quantities: {
user: 0,
creator: 0,
},
status: "active", status: "active",
} }
} }

View File

@ -159,8 +159,10 @@
{#if selectedImage.size} {#if selectedImage.size}
<div class="filesize"> <div class="filesize">
{#if selectedImage.size <= BYTES_IN_MB} {#if selectedImage.size <= BYTES_IN_MB}
{`${selectedImage.size / BYTES_IN_KB} KB`} {`${(selectedImage.size / BYTES_IN_KB).toFixed(1)} KB`}
{:else}{`${selectedImage.size / BYTES_IN_MB} MB`}{/if} {:else}{`${(selectedImage.size / BYTES_IN_MB).toFixed(
1
)} MB`}{/if}
</div> </div>
{/if} {/if}
{#if !disabled} {#if !disabled}
@ -203,8 +205,8 @@
{#if file.size} {#if file.size}
<div class="filesize"> <div class="filesize">
{#if file.size <= BYTES_IN_MB} {#if file.size <= BYTES_IN_MB}
{`${file.size / BYTES_IN_KB} KB`} {`${(file.size / BYTES_IN_KB).toFixed(1)} KB`}
{:else}{`${file.size / BYTES_IN_MB} MB`}{/if} {:else}{`${(file.size / BYTES_IN_MB).toFixed(1)} MB`}{/if}
</div> </div>
{/if} {/if}
{#if !disabled} {#if !disabled}

View File

@ -580,7 +580,7 @@ export const getFrontendStore = () => {
let table = validTables.find(table => { let table = validTables.find(table => {
return ( return (
table.sourceId !== BUDIBASE_INTERNAL_DB_ID && table.sourceId !== BUDIBASE_INTERNAL_DB_ID &&
table.type === DB_TYPE_INTERNAL table.sourceType === DB_TYPE_INTERNAL
) )
}) })
if (table) { if (table) {
@ -591,7 +591,7 @@ export const getFrontendStore = () => {
table = validTables.find(table => { table = validTables.find(table => {
return ( return (
table.sourceId === BUDIBASE_INTERNAL_DB_ID && table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
table.type === DB_TYPE_INTERNAL table.sourceType === DB_TYPE_INTERNAL
) )
}) })
if (table) { if (table) {
@ -599,7 +599,7 @@ export const getFrontendStore = () => {
} }
// Finally try an external table // Finally try an external table
return validTables.find(table => table.type === DB_TYPE_EXTERNAL) return validTables.find(table => table.sourceType === DB_TYPE_EXTERNAL)
}, },
enrichEmptySettings: (component, opts) => { enrichEmptySettings: (component, opts) => {
if (!component?._component) { if (!component?._component) {

View File

@ -2,14 +2,14 @@ import sanitizeUrl from "./utils/sanitizeUrl"
import { Screen } from "./utils/Screen" import { Screen } from "./utils/Screen"
import { Component } from "./utils/Component" import { Component } from "./utils/Component"
export default function (datasources) { export default function (datasources, mode = "table") {
if (!Array.isArray(datasources)) { if (!Array.isArray(datasources)) {
return [] return []
} }
return datasources.map(datasource => { return datasources.map(datasource => {
return { return {
name: `${datasource.label} - List`, name: `${datasource.label} - List`,
create: () => createScreen(datasource), create: () => createScreen(datasource, mode),
id: ROW_LIST_TEMPLATE, id: ROW_LIST_TEMPLATE,
resourceId: datasource.resourceId, resourceId: datasource.resourceId,
} }
@ -40,10 +40,24 @@ const generateTableBlock = datasource => {
return tableBlock return tableBlock
} }
const createScreen = datasource => { const generateGridBlock = datasource => {
const gridBlock = new Component("@budibase/standard-components/gridblock")
gridBlock
.customProps({
table: datasource,
})
.instanceName(`${datasource.label} - Grid block`)
return gridBlock
}
const createScreen = (datasource, mode) => {
return new Screen() return new Screen()
.route(rowListUrl(datasource)) .route(rowListUrl(datasource))
.instanceName(`${datasource.label} - List`) .instanceName(`${datasource.label} - List`)
.addChild(generateTableBlock(datasource)) .addChild(
mode === "table"
? generateTableBlock(datasource)
: generateGridBlock(datasource)
)
.json() .json()
} }

View File

@ -16,7 +16,6 @@
$: linkedTable = $tables.list.find(table => table._id === linkedTableId) $: linkedTable = $tables.list.find(table => table._id === linkedTableId)
$: schema = linkedTable?.schema $: schema = linkedTable?.schema
$: table = $tables.list.find(table => table._id === tableId) $: table = $tables.list.find(table => table._id === tableId)
$: type = table?.type
$: fetchData(tableId, rowId) $: fetchData(tableId, rowId)
$: { $: {
let rowLabel = row?.[table?.primaryDisplay] let rowLabel = row?.[table?.primaryDisplay]
@ -41,5 +40,5 @@
</script> </script>
{#if row && row._id === rowId} {#if row && row._id === rowId}
<Table {title} {schema} {data} {type} /> <Table {title} {schema} {data} />
{/if} {/if}

View File

@ -24,17 +24,23 @@
let selectedRows = [] let selectedRows = []
let customRenderers = [] let customRenderers = []
let parsedSchema = {}
$: if (schema) {
parsedSchema = Object.keys(schema).reduce((acc, key) => {
acc[key] =
typeof schema[key] === "string" ? { type: schema[key] } : schema[key]
if (!canBeSortColumn(acc[key].type)) {
acc[key].sortable = false
}
return acc
}, {})
}
$: selectedRows, dispatch("selectionUpdated", selectedRows) $: selectedRows, dispatch("selectionUpdated", selectedRows)
$: isUsersTable = tableId === TableNames.USERS $: isUsersTable = tableId === TableNames.USERS
$: data && resetSelectedRows() $: data && resetSelectedRows()
$: {
Object.values(schema || {}).forEach(col => {
if (!canBeSortColumn(col.type)) {
col.sortable = false
}
})
}
$: { $: {
if (isUsersTable) { if (isUsersTable) {
customRenderers = [ customRenderers = [
@ -44,24 +50,24 @@
}, },
] ]
UNEDITABLE_USER_FIELDS.forEach(field => { UNEDITABLE_USER_FIELDS.forEach(field => {
if (schema[field]) { if (parsedSchema[field]) {
schema[field].editable = false parsedSchema[field].editable = false
} }
}) })
if (schema.email) { if (parsedSchema.email) {
schema.email.displayName = "Email" parsedSchema.email.displayName = "Email"
} }
if (schema.roleId) { if (parsedSchema.roleId) {
schema.roleId.displayName = "Role" parsedSchema.roleId.displayName = "Role"
} }
if (schema.firstName) { if (parsedSchema.firstName) {
schema.firstName.displayName = "First Name" parsedSchema.firstName.displayName = "First Name"
} }
if (schema.lastName) { if (parsedSchema.lastName) {
schema.lastName.displayName = "Last Name" parsedSchema.lastName.displayName = "Last Name"
} }
if (schema.status) { if (parsedSchema.status) {
schema.status.displayName = "Status" parsedSchema.status.displayName = "Status"
} }
} }
} }
@ -97,7 +103,7 @@
<div class="table-wrapper"> <div class="table-wrapper">
<Table <Table
{data} {data}
{schema} schema={parsedSchema}
{loading} {loading}
{customRenderers} {customRenderers}
{rowCount} {rowCount}

View File

@ -16,6 +16,7 @@
import GridRelationshipButton from "components/backend/DataTable/buttons/grid/GridRelationshipButton.svelte" import GridRelationshipButton from "components/backend/DataTable/buttons/grid/GridRelationshipButton.svelte"
import GridEditColumnModal from "components/backend/DataTable/modals/grid/GridEditColumnModal.svelte" import GridEditColumnModal from "components/backend/DataTable/modals/grid/GridEditColumnModal.svelte"
import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte" import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte"
import { DB_TYPE_EXTERNAL } from "constants/backend"
const userSchemaOverrides = { const userSchemaOverrides = {
firstName: { displayName: "First name", disabled: true }, firstName: { displayName: "First name", disabled: true },
@ -27,7 +28,7 @@
$: id = $tables.selected?._id $: id = $tables.selected?._id
$: isUsersTable = id === TableNames.USERS $: isUsersTable = id === TableNames.USERS
$: isInternal = $tables.selected?.type !== "external" $: isInternal = $tables.selected?.sourceType !== DB_TYPE_EXTERNAL
$: gridDatasource = { $: gridDatasource = {
type: "table", type: "table",
tableId: id, tableId: id,
@ -46,10 +47,7 @@
tables.replaceTable(id, e.detail) tables.replaceTable(id, e.detail)
// We need to refresh datasources when an external table changes. // We need to refresh datasources when an external table changes.
// Type "external" may exist - sometimes type is "table" and sometimes it if (e.detail?.sourceType === DB_TYPE_EXTERNAL) {
// is "external" - it has different meanings in different endpoints.
// If we check both these then we hopefully catch all external tables.
if (e.detail?.type === "external" || e.detail?.sql) {
await datasources.fetch() await datasources.fetch()
} }
} }

View File

@ -17,9 +17,9 @@
let hideAutocolumns = true let hideAutocolumns = true
let data = [] let data = []
let loading = false let loading = false
let type = "internal"
$: name = view.name $: name = view.name
$: schema = view.schema
$: calculation = view.calculation $: calculation = view.calculation
$: supportedFormats = Object.values(ROW_EXPORT_FORMATS).filter(key => { $: supportedFormats = Object.values(ROW_EXPORT_FORMATS).filter(key => {
@ -61,11 +61,10 @@
<Table <Table
title={decodeURI(name)} title={decodeURI(name)}
schema={view.schema} {schema}
tableId={view.tableId} tableId={view.tableId}
{data} {data}
{loading} {loading}
{type}
rowCount={10} rowCount={10}
allowEditing={false} allowEditing={false}
bind:hideAutocolumns bind:hideAutocolumns

View File

@ -10,6 +10,6 @@
<ImportButton <ImportButton
{disabled} {disabled}
tableId={$datasource?.tableId} tableId={$datasource?.tableId}
tableType={$definition?.type} tableType={$definition?.sourceType}
on:importrows={rows.actions.refreshData} on:importrows={rows.actions.refreshData}
/> />

View File

@ -26,6 +26,7 @@
ALLOWABLE_NUMBER_TYPES, ALLOWABLE_NUMBER_TYPES,
SWITCHABLE_TYPES, SWITCHABLE_TYPES,
PrettyRelationshipDefinitions, PrettyRelationshipDefinitions,
DB_TYPE_EXTERNAL,
} from "constants/backend" } from "constants/backend"
import { getAutoColumnInformation, buildAutoColumn } from "builderStore/utils" import { getAutoColumnInformation, buildAutoColumn } from "builderStore/utils"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
@ -254,10 +255,11 @@
!uneditable && !uneditable &&
editableColumn?.type !== AUTO_TYPE && editableColumn?.type !== AUTO_TYPE &&
!editableColumn.autocolumn !editableColumn.autocolumn
$: external = table.type === "external" $: externalTable = table.sourceType === DB_TYPE_EXTERNAL
// in the case of internal tables the sourceId will just be undefined // in the case of internal tables the sourceId will just be undefined
$: tableOptions = $tables.list.filter( $: tableOptions = $tables.list.filter(
opt => opt.type === table.type && table.sourceId === opt.sourceId opt =>
opt.sourceType === table.sourceType && table.sourceId === opt.sourceId
) )
$: typeEnabled = $: typeEnabled =
!originalName || !originalName ||
@ -409,7 +411,7 @@
editableColumn.type === FieldType.BB_REFERENCE && editableColumn.type === FieldType.BB_REFERENCE &&
editableColumn.subtype === FieldSubtype.USERS editableColumn.subtype === FieldSubtype.USERS
if (!external) { if (!externalTable) {
return [ return [
FIELDS.STRING, FIELDS.STRING,
FIELDS.BARCODEQR, FIELDS.BARCODEQR,
@ -441,7 +443,7 @@
isUsers ? FIELDS.USERS : FIELDS.USER, isUsers ? FIELDS.USERS : FIELDS.USER,
] ]
// no-sql or a spreadsheet // no-sql or a spreadsheet
if (!external || table.sql) { if (!externalTable || table.sql) {
fields = [...fields, FIELDS.LINK, FIELDS.ARRAY] fields = [...fields, FIELDS.LINK, FIELDS.ARRAY]
} }
return fields return fields
@ -486,7 +488,7 @@
}) })
} }
const newError = {} const newError = {}
if (!external && fieldInfo.name?.startsWith("_")) { if (!externalTable && fieldInfo.name?.startsWith("_")) {
newError.name = `Column name cannot start with an underscore.` newError.name = `Column name cannot start with an underscore.`
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) { } else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {
newError.name = `Illegal character; must be alpha-numeric.` newError.name = `Illegal character; must be alpha-numeric.`
@ -498,7 +500,7 @@
newError.name = `Column name already in use.` newError.name = `Column name already in use.`
} }
if (fieldInfo.type == "auto" && !fieldInfo.subtype) { if (fieldInfo.type === "auto" && !fieldInfo.subtype) {
newError.subtype = `Auto Column requires a type` newError.subtype = `Auto Column requires a type`
} }

View File

@ -1,6 +1,6 @@
<script> <script>
import { Select, Toggle, Multiselect } from "@budibase/bbui" import { Select, Toggle, Multiselect } from "@budibase/bbui"
import { FIELDS } from "constants/backend" import { DB_TYPE_INTERNAL, FIELDS } from "constants/backend"
import { API } from "api" import { API } from "api"
import { parseFile } from "./utils" import { parseFile } from "./utils"
@ -169,7 +169,7 @@
</div> </div>
{/each} {/each}
</div> </div>
{#if tableType === "internal"} {#if tableType === DB_TYPE_INTERNAL}
<br /> <br />
<Toggle <Toggle
bind:value={updateExistingRows} bind:value={updateExistingRows}

View File

@ -8,6 +8,7 @@
import { import {
BUDIBASE_INTERNAL_DB_ID, BUDIBASE_INTERNAL_DB_ID,
BUDIBASE_DATASOURCE_TYPE, BUDIBASE_DATASOURCE_TYPE,
DB_TYPE_INTERNAL,
} from "constants/backend" } from "constants/backend"
$: tableNames = $tables.list.map(table => table.name) $: tableNames = $tables.list.map(table => table.name)
@ -55,8 +56,9 @@
name, name,
schema: { ...schema }, schema: { ...schema },
rows, rows,
type: "internal", type: "table",
sourceId: targetDatasourceId, sourceId: targetDatasourceId,
sourceType: DB_TYPE_INTERNAL,
} }
// Only set primary display if defined // Only set primary display if defined

View File

@ -13,6 +13,7 @@
notifications, notifications,
} from "@budibase/bbui" } from "@budibase/bbui"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { DB_TYPE_EXTERNAL } from "constants/backend"
export let table export let table
@ -27,8 +28,8 @@
let willBeDeleted let willBeDeleted
let deleteTableName let deleteTableName
$: external = table?.type === "external" $: externalTable = table?.sourceType === DB_TYPE_EXTERNAL
$: allowDeletion = !external || table?.created $: allowDeletion = !externalTable || table?.created
function showDeleteModal() { function showDeleteModal() {
templateScreens = $store.screens.filter( templateScreens = $store.screens.filter(
@ -48,7 +49,7 @@
for (let screen of templateScreens) { for (let screen of templateScreens) {
await store.actions.screens.delete(screen) await store.actions.screens.delete(screen)
} }
if (table.type === "external") { if (table.sourceType === DB_TYPE_EXTERNAL) {
await datasources.fetch() await datasources.fetch()
} }
notifications.success("Table deleted") notifications.success("Table deleted")
@ -91,7 +92,7 @@
<div slot="control" class="icon"> <div slot="control" class="icon">
<Icon s hoverable name="MoreSmallList" /> <Icon s hoverable name="MoreSmallList" />
</div> </div>
{#if !external} {#if !externalTable}
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem> <MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
{/if} {/if}
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem> <MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>

View File

@ -23,7 +23,7 @@
try { try {
return await API.uploadBuilderAttachment(data) return await API.uploadBuilderAttachment(data)
} catch (error) { } catch (error) {
notifications.error("Failed to upload attachment") notifications.error(error.message || "Failed to upload attachment")
return [] return []
} }
} }

View File

@ -39,7 +39,15 @@
allowCreator allowCreator
) => { ) => {
if (allowedRoles?.length) { if (allowedRoles?.length) {
return roles.filter(role => allowedRoles.includes(role._id)) const filteredRoles = roles.filter(role =>
allowedRoles.includes(role._id)
)
return [
...filteredRoles,
...(allowedRoles.includes(Constants.Roles.CREATOR)
? [{ _id: Constants.Roles.CREATOR, name: "Creator", enabled: false }]
: []),
]
} }
let newRoles = [...roles] let newRoles = [...roles]
@ -129,8 +137,9 @@
getOptionColour={getColor} getOptionColour={getColor}
getOptionIcon={getIcon} getOptionIcon={getIcon}
isOptionEnabled={option => isOptionEnabled={option =>
option._id !== Constants.Roles.CREATOR || (option._id !== Constants.Roles.CREATOR ||
$licensing.perAppBuildersEnabled} $licensing.perAppBuildersEnabled) &&
option.enabled !== false}
{placeholder} {placeholder}
{error} {error}
/> />

View File

@ -23,6 +23,7 @@ import BasicColumnEditor from "./controls/ColumnEditor/BasicColumnEditor.svelte"
import GridColumnEditor from "./controls/ColumnEditor/GridColumnEditor.svelte" import GridColumnEditor from "./controls/ColumnEditor/GridColumnEditor.svelte"
import BarButtonList from "./controls/BarButtonList.svelte" import BarButtonList from "./controls/BarButtonList.svelte"
import FieldConfiguration from "./controls/FieldConfiguration/FieldConfiguration.svelte" import FieldConfiguration from "./controls/FieldConfiguration/FieldConfiguration.svelte"
import ButtonConfiguration from "./controls/ButtonConfiguration/ButtonConfiguration.svelte"
import RelationshipFilterEditor from "./controls/RelationshipFilterEditor.svelte" import RelationshipFilterEditor from "./controls/RelationshipFilterEditor.svelte"
const componentMap = { const componentMap = {
@ -48,6 +49,7 @@ const componentMap = {
"filter/relationship": RelationshipFilterEditor, "filter/relationship": RelationshipFilterEditor,
url: URLSelect, url: URLSelect,
fieldConfiguration: FieldConfiguration, fieldConfiguration: FieldConfiguration,
buttonConfiguration: ButtonConfiguration,
columns: ColumnEditor, columns: ColumnEditor,
"columns/basic": BasicColumnEditor, "columns/basic": BasicColumnEditor,
"columns/grid": GridColumnEditor, "columns/grid": GridColumnEditor,

View File

@ -0,0 +1,134 @@
<script>
import DraggableList from "../DraggableList/DraggableList.svelte"
import ButtonSetting from "./ButtonSetting.svelte"
import { createEventDispatcher } from "svelte"
import { store } from "builderStore"
import { Helpers } from "@budibase/bbui"
export let componentBindings
export let bindings
export let value
const dispatch = createEventDispatcher()
let focusItem
$: buttonList = sanitizeValue(value) || []
$: buttonCount = buttonList.length
$: itemProps = {
componentBindings: componentBindings || [],
bindings,
removeButton,
canRemove: buttonCount > 1,
}
const sanitizeValue = val => {
return val?.map(button => {
return button._component ? button : buildPseudoInstance(button)
})
}
const processItemUpdate = e => {
const updatedField = e.detail
const newButtonList = [...buttonList]
const fieldIdx = newButtonList.findIndex(pSetting => {
return pSetting._id === updatedField?._id
})
if (fieldIdx === -1) {
newButtonList.push(updatedField)
} else {
newButtonList[fieldIdx] = updatedField
}
dispatch("change", newButtonList)
}
const listUpdated = e => {
dispatch("change", [...e.detail])
}
const buildPseudoInstance = cfg => {
return store.actions.components.createInstance(
`@budibase/standard-components/button`,
{
_instanceName: Helpers.uuid(),
text: cfg.text,
type: cfg.type || "primary",
},
{}
)
}
const addButton = () => {
const newButton = buildPseudoInstance({
text: `Button ${buttonCount + 1}`,
})
dispatch("change", [...buttonList, newButton])
focusItem = newButton._id
}
const removeButton = id => {
dispatch(
"change",
buttonList.filter(button => button._id !== id)
)
}
</script>
<div class="button-configuration">
{#if buttonCount}
<DraggableList
on:change={listUpdated}
on:itemChange={processItemUpdate}
items={buttonList}
listItemKey={"_id"}
listType={ButtonSetting}
listTypeProps={itemProps}
focus={focusItem}
draggable={buttonCount > 1}
/>
<div class="list-footer" on:click={addButton}>
<div class="add-button">Add button</div>
</div>
{/if}
</div>
<style>
.button-configuration :global(.spectrum-ActionButton) {
width: 100%;
}
.button-configuration :global(.list-wrap > li:last-child),
.button-configuration :global(.list-wrap) {
border-bottom-left-radius: unset;
border-bottom-right-radius: unset;
border-bottom: 0px;
}
.list-footer {
width: 100%;
border-bottom-left-radius: 4px;
border-bottom-right-radius: 4px;
background-color: var(
--spectrum-table-background-color,
var(--spectrum-global-color-gray-50)
);
transition: background-color ease-in-out 130ms;
display: flex;
justify-content: center;
border: 1px solid
var(--spectrum-table-border-color, var(--spectrum-alias-border-color-mid));
cursor: pointer;
}
.add-button {
margin: var(--spacing-s);
}
.list-footer:hover {
background-color: var(
--spectrum-table-row-background-color-hover,
var(--spectrum-alias-highlight-hover)
);
}
</style>

View File

@ -0,0 +1,64 @@
<script>
import EditComponentPopover from "../EditComponentPopover.svelte"
import { Icon } from "@budibase/bbui"
import { runtimeToReadableBinding } from "builderStore/dataBinding"
import { isJSBinding } from "@budibase/string-templates"
export let item
export let componentBindings
export let bindings
export let anchor
export let removeButton
export let canRemove
$: readableText = isJSBinding(item.text)
? "(JavaScript function)"
: runtimeToReadableBinding([...bindings, componentBindings], item.text)
</script>
<div class="list-item-body">
<div class="list-item-left">
<EditComponentPopover
{anchor}
componentInstance={item}
{componentBindings}
{bindings}
on:change
/>
<div class="field-label">{readableText || "Button"}</div>
</div>
<div class="list-item-right">
<Icon
disabled={!canRemove}
size="S"
name="Close"
hoverable
on:click={() => removeButton(item._id)}
/>
</div>
</div>
<style>
.field-label {
text-overflow: ellipsis;
white-space: nowrap;
overflow: hidden;
}
.list-item-body,
.list-item-left {
display: flex;
align-items: center;
gap: var(--spacing-m);
min-width: 0;
}
.list-item-body {
margin-top: 8px;
margin-bottom: 8px;
}
.list-item-right :global(div.spectrum-Switch) {
margin: 0px;
}
.list-item-body {
justify-content: space-between;
}
</style>

View File

@ -1,10 +1,10 @@
<script> <script>
import { Icon } from "@budibase/bbui"
import { dndzone } from "svelte-dnd-action" import { dndzone } from "svelte-dnd-action"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { generate } from "shortid" import { generate } from "shortid"
import { setContext } from "svelte" import { setContext } from "svelte"
import { writable } from "svelte/store" import { writable, get } from "svelte/store"
import DragHandle from "./drag-handle.svelte"
export let items = [] export let items = []
export let showHandle = true export let showHandle = true
@ -12,6 +12,7 @@
export let listTypeProps = {} export let listTypeProps = {}
export let listItemKey export let listItemKey
export let draggable = true export let draggable = true
export let focus
let store = writable({ let store = writable({
selected: null, selected: null,
@ -27,6 +28,10 @@
setContext("draggable", store) setContext("draggable", store)
$: if (focus && store) {
get(store).actions.select(focus)
}
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const flipDurationMs = 150 const flipDurationMs = 150
@ -82,13 +87,16 @@
> >
{#each draggableItems as draggable (draggable.id)} {#each draggableItems as draggable (draggable.id)}
<li <li
on:mousedown={() => {
get(store).actions.select()
}}
bind:this={anchors[draggable.id]} bind:this={anchors[draggable.id]}
class:highlighted={draggable.id === $store.selected} class:highlighted={draggable.id === $store.selected}
> >
<div class="left-content"> <div class="left-content">
{#if showHandle} {#if showHandle}
<div class="handle" aria-label="drag-handle"> <div class="handle">
<Icon name="DragHandle" size="XL" /> <DragHandle />
</div> </div>
{/if} {/if}
</div> </div>
@ -142,8 +150,9 @@
border-top-right-radius: 4px; border-top-right-radius: 4px;
} }
.list-wrap > li:last-child { .list-wrap > li:last-child {
border-top-left-radius: var(--spectrum-table-regular-border-radius); border-bottom-left-radius: 4px;
border-top-right-radius: var(--spectrum-table-regular-border-radius); border-bottom-right-radius: 4px;
border-bottom: 0px;
} }
.right-content { .right-content {
flex: 1; flex: 1;
@ -153,4 +162,15 @@
padding-left: var(--spacing-s); padding-left: var(--spacing-s);
padding-right: var(--spacing-s); padding-right: var(--spacing-s);
} }
.handle {
display: flex;
height: var(--spectrum-global-dimension-size-150);
}
.handle :global(svg) {
fill: var(--spectrum-global-color-gray-500);
margin-right: var(--spacing-m);
margin-left: 2px;
width: var(--spectrum-global-dimension-size-65);
height: 100%;
}
</style> </style>

View File

@ -0,0 +1,31 @@
<svg
class="drag-handle spectrum-Icon spectrum-Icon--sizeS"
focusable="false"
aria-hidden="true"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="m1,11c0.55228,0 1,-0.4477 1,-1c0,-0.5523 -0.44772,-1 -1,-1c-0.55228,0 -1,0.4477 -1,1c0,0.5523 0.44772,1 1,1z"
/>
<path
d="m1,8c0.55228,0 1,-0.4477 1,-1c0,-0.55228 -0.44772,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.5523 0.44772,1 1,1z"
/>
<path
d="m1,5c0.55228,0 1,-0.44772 1,-1c0,-0.55228 -0.44772,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.55228 0.44772,1 1,1z"
/>
<path
d="m1,2c0.55228,0 1,-0.44772 1,-1c0,-0.55228 -0.44772,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.55228 0.44772,1 1,1z"
/>
<path
d="m4,11c0.5523,0 1,-0.4477 1,-1c0,-0.5523 -0.4477,-1 -1,-1c-0.55228,0 -1,0.4477 -1,1c0,0.5523 0.44772,1 1,1z"
/>
<path
d="m4,8c0.5523,0 1,-0.4477 1,-1c0,-0.55228 -0.4477,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.5523 0.44772,1 1,1z"
/>
<path
d="m4,5c0.5523,0 1,-0.44772 1,-1c0,-0.55228 -0.4477,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.55228 0.44772,1 1,1z"
/>
<path
d="m4,2c0.5523,0 1,-0.44772 1,-1c0,-0.55228 -0.4477,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.55228 0.44772,1 1,1z"
/>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -3,31 +3,35 @@
import { store } from "builderStore" import { store } from "builderStore"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import ComponentSettingsSection from "../../../../../pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte" import ComponentSettingsSection from "../../../../pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte"
import { getContext } from "svelte" import { getContext } from "svelte"
export let anchor export let anchor
export let field export let componentInstance
export let componentBindings export let componentBindings
export let bindings export let bindings
export let parseSettings
const draggable = getContext("draggable") const draggable = getContext("draggable")
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
let popover let popover
let drawers = [] let drawers = []
let pseudoComponentInstance
let open = false let open = false
$: if (open && $draggable.selected && $draggable.selected != field._id) { // Auto hide the component when another item is selected
$: if (open && $draggable.selected != componentInstance._id) {
popover.hide() popover.hide()
} }
$: if (field) { // Open automatically if the component is marked as selected
pseudoComponentInstance = field $: if (!open && $draggable.selected === componentInstance._id && popover) {
popover.show()
open = true
} }
$: componentDef = store.actions.components.getDefinition( $: componentDef = store.actions.components.getDefinition(
pseudoComponentInstance._component componentInstance._component
) )
$: parsedComponentDef = processComponentDefinitionSettings(componentDef) $: parsedComponentDef = processComponentDefinitionSettings(componentDef)
@ -36,17 +40,16 @@
return {} return {}
} }
const clone = cloneDeep(componentDef) const clone = cloneDeep(componentDef)
const updatedSettings = clone.settings
.filter(setting => setting.key !== "field") if (typeof parseSettings === "function") {
.map(setting => { clone.settings = parseSettings(clone.settings)
return { ...setting, nested: true } }
})
clone.settings = updatedSettings
return clone return clone
} }
const updateSetting = async (setting, value) => { const updateSetting = async (setting, value) => {
const nestedComponentInstance = cloneDeep(pseudoComponentInstance) const nestedComponentInstance = cloneDeep(componentInstance)
const patchFn = store.actions.components.updateComponentSetting( const patchFn = store.actions.components.updateComponentSetting(
setting.key, setting.key,
@ -54,12 +57,26 @@
) )
patchFn(nestedComponentInstance) patchFn(nestedComponentInstance)
const update = { dispatch("change", nestedComponentInstance)
...nestedComponentInstance, }
active: pseudoComponentInstance.active,
const customPositionHandler = (anchorBounds, eleBounds, cfg) => {
let { left, top } = cfg
let percentageOffset = 30
// left-outside
left = anchorBounds.left - eleBounds.width - 18
// shift up from the anchor, if space allows
let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
let defaultTop = anchorBounds.top - offsetPos
if (window.innerHeight - defaultTop < eleBounds.height) {
top = window.innerHeight - eleBounds.height - 5
} else {
top = anchorBounds.top - offsetPos
} }
dispatch("change", update) return { ...cfg, left, top }
} }
</script> </script>
@ -79,11 +96,11 @@
bind:this={popover} bind:this={popover}
on:open={() => { on:open={() => {
drawers = [] drawers = []
$draggable.actions.select(field._id) $draggable.actions.select(componentInstance._id)
}} }}
on:close={() => { on:close={() => {
open = false open = false
if ($draggable.selected == field._id) { if ($draggable.selected == componentInstance._id) {
$draggable.actions.select() $draggable.actions.select()
} }
}} }}
@ -92,33 +109,13 @@
showPopover={drawers.length == 0} showPopover={drawers.length == 0}
clickOutsideOverride={drawers.length > 0} clickOutsideOverride={drawers.length > 0}
maxHeight={600} maxHeight={600}
handlePostionUpdate={(anchorBounds, eleBounds, cfg) => { handlePostionUpdate={customPositionHandler}
let { left, top } = cfg
let percentageOffset = 30
// left-outside
left = anchorBounds.left - eleBounds.width - 18
// shift up from the anchor, if space allows
let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
let defaultTop = anchorBounds.top - offsetPos
if (window.innerHeight - defaultTop < eleBounds.height) {
top = window.innerHeight - eleBounds.height - 5
} else {
top = anchorBounds.top - offsetPos
}
return { ...cfg, left, top }
}}
> >
<span class="popover-wrap"> <span class="popover-wrap">
<Layout noPadding noGap> <Layout noPadding noGap>
<div class="type-icon"> <slot name="header" />
<Icon name={parsedComponentDef.icon} />
<span>{field.field}</span>
</div>
<ComponentSettingsSection <ComponentSettingsSection
componentInstance={pseudoComponentInstance} {componentInstance}
componentDefinition={parsedComponentDef} componentDefinition={parsedComponentDef}
isScreen={false} isScreen={false}
onUpdateSetting={updateSetting} onUpdateSetting={updateSetting}
@ -141,20 +138,4 @@
.popover-wrap { .popover-wrap {
background-color: var(--spectrum-alias-background-color-primary); background-color: var(--spectrum-alias-background-color-primary);
} }
.type-icon {
display: flex;
gap: var(--spacing-m);
margin: var(--spacing-xl);
margin-bottom: 0px;
height: var(--spectrum-alias-item-height-m);
padding: 0px var(--spectrum-alias-item-padding-m);
border-width: var(--spectrum-actionbutton-border-size);
border-radius: var(--spectrum-alias-border-radius-regular);
border: 1px solid
var(
--spectrum-actionbutton-m-border-color,
var(--spectrum-alias-border-color)
);
align-items: center;
}
</style> </style>

View File

@ -7,7 +7,7 @@
getComponentBindableProperties, getComponentBindableProperties,
} from "builderStore/dataBinding" } from "builderStore/dataBinding"
import { currentAsset } from "builderStore" import { currentAsset } from "builderStore"
import DraggableList from "../DraggableList.svelte" import DraggableList from "../DraggableList/DraggableList.svelte"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { store, selectedScreen } from "builderStore" import { store, selectedScreen } from "builderStore"
import FieldSetting from "./FieldSetting.svelte" import FieldSetting from "./FieldSetting.svelte"
@ -50,7 +50,7 @@
updateSanitsedFields(sanitisedValue) updateSanitsedFields(sanitisedValue)
unconfigured = buildUnconfiguredOptions(schema, sanitisedFields) unconfigured = buildUnconfiguredOptions(schema, sanitisedFields)
fieldList = [...sanitisedFields, ...unconfigured] fieldList = [...sanitisedFields, ...unconfigured]
.map(buildSudoInstance) .map(buildPseudoInstance)
.filter(x => x != null) .filter(x => x != null)
} }
@ -104,7 +104,7 @@
}) })
} }
const buildSudoInstance = instance => { const buildPseudoInstance = instance => {
if (instance._component) { if (instance._component) {
return instance return instance
} }

View File

@ -1,8 +1,11 @@
<script> <script>
import EditFieldPopover from "./EditFieldPopover.svelte" import EditComponentPopover from "../EditComponentPopover.svelte"
import { Toggle } from "@budibase/bbui" import { Toggle, Icon } from "@budibase/bbui"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { store } from "builderStore"
import { runtimeToReadableBinding } from "builderStore/dataBinding"
import { isJSBinding } from "@budibase/string-templates"
export let item export let item
export let componentBindings export let componentBindings
@ -16,18 +19,43 @@
dispatch("change", { ...cloneDeep(item), active: e.detail }) dispatch("change", { ...cloneDeep(item), active: e.detail })
} }
} }
const getReadableText = () => {
if (item.label) {
return isJSBinding(item.label)
? "(JavaScript function)"
: runtimeToReadableBinding([...bindings, componentBindings], item.label)
}
return item.field
}
const parseSettings = settings => {
return settings
.filter(setting => setting.key !== "field")
.map(setting => {
return { ...setting, nested: true }
})
}
$: readableText = getReadableText(item)
$: componentDef = store.actions.components.getDefinition(item._component)
</script> </script>
<div class="list-item-body"> <div class="list-item-body">
<div class="list-item-left"> <div class="list-item-left">
<EditFieldPopover <EditComponentPopover
{anchor} {anchor}
field={item} componentInstance={item}
{componentBindings} {componentBindings}
{bindings} {bindings}
{parseSettings}
on:change on:change
/> >
<div class="field-label">{item.label || item.field}</div> <div slot="header" class="type-icon">
<Icon name={componentDef.icon} />
<span>{item.field}</span>
</div>
</EditComponentPopover>
<div class="field-label">{readableText}</div>
</div> </div>
<div class="list-item-right"> <div class="list-item-right">
<Toggle on:change={onToggle(item)} text="" value={item.active} thin /> <Toggle on:change={onToggle(item)} text="" value={item.active} thin />
@ -53,4 +81,20 @@
.list-item-body { .list-item-body {
justify-content: space-between; justify-content: space-between;
} }
.type-icon {
display: flex;
gap: var(--spacing-m);
margin: var(--spacing-xl);
margin-bottom: 0px;
height: var(--spectrum-alias-item-height-m);
padding: 0px var(--spectrum-alias-item-padding-m);
border-width: var(--spectrum-actionbutton-border-size);
border-radius: var(--spectrum-alias-border-radius-regular);
border: 1px solid
var(
--spectrum-actionbutton-m-border-color,
var(--spectrum-alias-border-color)
);
align-items: center;
}
</style> </style>

View File

@ -23,7 +23,7 @@
</script> </script>
<div class="table"> <div class="table">
<Table {schema} data={rowsCopy} type="external" allowEditing={false} /> <Table {schema} data={rowsCopy} allowEditing={false} />
</div> </div>
<style> <style>

View File

@ -196,8 +196,36 @@
} }
} }
const validateQuery = async () => {
const forbiddenBindings = /{{\s?user(\.(\w|\$)*\s?|\s?)}}/g
const bindingError = new Error(
"'user' is a protected binding and cannot be used"
)
if (forbiddenBindings.test(url)) {
throw bindingError
}
if (forbiddenBindings.test(query.fields.requestBody ?? "")) {
throw bindingError
}
Object.values(requestBindings).forEach(bindingValue => {
if (forbiddenBindings.test(bindingValue)) {
throw bindingError
}
})
Object.values(query.fields.headers).forEach(headerValue => {
if (forbiddenBindings.test(headerValue)) {
throw bindingError
}
})
}
async function runQuery() { async function runQuery() {
try { try {
await validateQuery()
response = await queries.preview(buildQuery()) response = await queries.preview(buildQuery())
if (response.rows.length === 0) { if (response.rows.length === 0) {
notifications.info("Request did not return any data") notifications.info("Request did not return any data")

View File

@ -516,6 +516,13 @@
} }
return null return null
} }
const parseRole = user => {
if (user.isAdminOrGlobalBuilder) {
return Constants.Roles.CREATOR
}
return user.role
}
</script> </script>
<svelte:window on:keydown={handleKeyDown} /> <svelte:window on:keydown={handleKeyDown} />
@ -725,7 +732,7 @@
<RoleSelect <RoleSelect
footer={getRoleFooter(user)} footer={getRoleFooter(user)}
placeholder={false} placeholder={false}
value={user.role} value={parseRole(user)}
allowRemove={user.role && !user.group} allowRemove={user.role && !user.group}
allowPublic={false} allowPublic={false}
allowCreator={true} allowCreator={true}
@ -744,7 +751,7 @@
autoWidth autoWidth
align="right" align="right"
allowedRoles={user.isAdminOrGlobalBuilder allowedRoles={user.isAdminOrGlobalBuilder
? [Constants.Roles.ADMIN] ? [Constants.Roles.CREATOR]
: null} : null}
/> />
</div> </div>

View File

@ -2,6 +2,7 @@
import { ModalContent, Body, Input, notifications } from "@budibase/bbui" import { ModalContent, Body, Input, notifications } from "@budibase/bbui"
import { tables, datasources } from "stores/backend" import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { DB_TYPE_EXTERNAL } from "constants/backend"
export let datasource export let datasource
@ -16,9 +17,10 @@
function buildDefaultTable(tableName, datasourceId) { function buildDefaultTable(tableName, datasourceId) {
return { return {
name: tableName, name: tableName,
type: "external", type: "table",
primary: ["id"], primary: ["id"],
sourceId: datasourceId, sourceId: datasourceId,
sourceType: DB_TYPE_EXTERNAL,
schema: { schema: {
id: { id: {
autocolumn: true, autocolumn: true,

View File

@ -5,7 +5,7 @@
import { tables, datasources } from "stores/backend" import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { onMount } from "svelte" import { onMount } from "svelte"
import { BUDIBASE_INTERNAL_DB_ID } from "constants/backend" import { BUDIBASE_INTERNAL_DB_ID, DB_TYPE_EXTERNAL } from "constants/backend"
import { TableNames } from "constants" import { TableNames } from "constants"
import { store } from "builderStore" import { store } from "builderStore"
@ -14,7 +14,7 @@
$: store.actions.websocket.selectResource(BUDIBASE_INTERNAL_DB_ID) $: store.actions.websocket.selectResource(BUDIBASE_INTERNAL_DB_ID)
$: internalTablesBySourceId = $tables.list.filter( $: internalTablesBySourceId = $tables.list.filter(
table => table =>
table.type !== "external" && table.sourceType !== DB_TYPE_EXTERNAL &&
table.sourceId === BUDIBASE_INTERNAL_DB_ID && table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
table._id !== TableNames.USERS table._id !== TableNames.USERS
) )

View File

@ -4,7 +4,7 @@
import ICONS from "components/backend/DatasourceNavigator/icons" import ICONS from "components/backend/DatasourceNavigator/icons"
import { tables, datasources } from "stores/backend" import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { DEFAULT_BB_DATASOURCE_ID } from "constants/backend" import { DEFAULT_BB_DATASOURCE_ID, DB_TYPE_EXTERNAL } from "constants/backend"
import { onMount } from "svelte" import { onMount } from "svelte"
import { store } from "builderStore" import { store } from "builderStore"
@ -13,7 +13,8 @@
$: store.actions.websocket.selectResource(DEFAULT_BB_DATASOURCE_ID) $: store.actions.websocket.selectResource(DEFAULT_BB_DATASOURCE_ID)
$: internalTablesBySourceId = $tables.list.filter( $: internalTablesBySourceId = $tables.list.filter(
table => table =>
table.type !== "external" && table.sourceId === DEFAULT_BB_DATASOURCE_ID table.sourceType !== DB_TYPE_EXTERNAL &&
table.sourceId === DEFAULT_BB_DATASOURCE_ID
) )
onMount(() => { onMount(() => {

View File

@ -91,7 +91,12 @@
/> />
{/if} {/if}
{#if section == "styles"} {#if section == "styles"}
<DesignSection {componentInstance} {componentDefinition} {bindings} /> <DesignSection
{componentInstance}
{componentBindings}
{componentDefinition}
{bindings}
/>
<CustomStylesSection <CustomStylesSection
{componentInstance} {componentInstance}
{componentDefinition} {componentDefinition}

View File

@ -16,18 +16,32 @@
export let isScreen = false export let isScreen = false
export let onUpdateSetting export let onUpdateSetting
export let showSectionTitle = true export let showSectionTitle = true
export let tag
$: sections = getSections(componentInstance, componentDefinition, isScreen) $: sections = getSections(
componentInstance,
componentDefinition,
isScreen,
tag
)
const getSections = (instance, definition, isScreen) => { const getSections = (instance, definition, isScreen, tag) => {
const settings = definition?.settings ?? [] const settings = definition?.settings ?? []
const generalSettings = settings.filter(setting => !setting.section) const generalSettings = settings.filter(
const customSections = settings.filter(setting => setting.section) setting => !setting.section && setting.tag === tag
)
const customSections = settings.filter(
setting => setting.section && setting.tag === tag
)
let sections = [ let sections = [
{ ...(generalSettings?.length
name: "General", ? [
settings: generalSettings, {
}, name: "General",
settings: generalSettings,
},
]
: []),
...(customSections || []), ...(customSections || []),
] ]
@ -132,7 +146,7 @@
<div class="section-info"> <div class="section-info">
<InfoDisplay body={section.info} /> <InfoDisplay body={section.info} />
</div> </div>
{:else if idx === 0 && section.name === "General" && componentDefinition.info} {:else if idx === 0 && section.name === "General" && componentDefinition?.info && !tag}
<InfoDisplay <InfoDisplay
title={componentDefinition.name} title={componentDefinition.name}
body={componentDefinition.info} body={componentDefinition.info}
@ -181,7 +195,7 @@
</DetailSummary> </DetailSummary>
{/if} {/if}
{/each} {/each}
{#if componentDefinition?.block} {#if componentDefinition?.block && !tag}
<DetailSummary name="Eject" collapsible={false}> <DetailSummary name="Eject" collapsible={false}>
<EjectBlockButton /> <EjectBlockButton />
</DetailSummary> </DetailSummary>

View File

@ -1,10 +1,12 @@
<script> <script>
import StyleSection from "./StyleSection.svelte" import StyleSection from "./StyleSection.svelte"
import * as ComponentStyles from "./componentStyles" import * as ComponentStyles from "./componentStyles"
import ComponentSettingsSection from "./ComponentSettingsSection.svelte"
export let componentDefinition export let componentDefinition
export let componentInstance export let componentInstance
export let bindings export let bindings
export let componentBindings
const getStyles = def => { const getStyles = def => {
if (!def?.styles?.length) { if (!def?.styles?.length) {
@ -22,6 +24,19 @@
$: styles = getStyles(componentDefinition) $: styles = getStyles(componentDefinition)
</script> </script>
<!--
Load any general settings or sections tagged as "style"
-->
<ComponentSettingsSection
{componentInstance}
{componentDefinition}
isScreen={false}
showInstanceName={false}
{bindings}
{componentBindings}
tag="style"
/>
{#if styles?.length > 0} {#if styles?.length > 0}
{#each styles as style} {#each styles as style}
<StyleSection <StyleSection

View File

@ -36,6 +36,7 @@
"heading", "heading",
"text", "text",
"button", "button",
"buttongroup",
"tag", "tag",
"spectrumcard", "spectrumcard",
"cardstat", "cardstat",

View File

@ -12,6 +12,7 @@
import { capitalise } from "helpers" import { capitalise } from "helpers"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
let mode
let pendingScreen let pendingScreen
// Modal refs // Modal refs
@ -100,14 +101,15 @@
} }
// Handler for NewScreenModal // Handler for NewScreenModal
export const show = mode => { export const show = newMode => {
mode = newMode
selectedTemplates = null selectedTemplates = null
blankScreenUrl = null blankScreenUrl = null
screenMode = mode screenMode = mode
pendingScreen = null pendingScreen = null
screenAccessRole = Roles.BASIC screenAccessRole = Roles.BASIC
if (mode === "table") { if (mode === "table" || mode === "grid") {
datasourceModal.show() datasourceModal.show()
} else if (mode === "blank") { } else if (mode === "blank") {
let templates = getTemplates($tables.list) let templates = getTemplates($tables.list)
@ -123,6 +125,7 @@
// Handler for DatasourceModal confirmation, move to screen access select // Handler for DatasourceModal confirmation, move to screen access select
const confirmScreenDatasources = async ({ templates }) => { const confirmScreenDatasources = async ({ templates }) => {
console.log(templates)
selectedTemplates = templates selectedTemplates = templates
screenAccessRoleModal.show() screenAccessRoleModal.show()
} }
@ -177,6 +180,7 @@
<Modal bind:this={datasourceModal} autoFocus={false}> <Modal bind:this={datasourceModal} autoFocus={false}>
<DatasourceModal <DatasourceModal
{mode}
onConfirm={confirmScreenDatasources} onConfirm={confirmScreenDatasources}
initialScreens={!selectedTemplates ? [] : [...selectedTemplates]} initialScreens={!selectedTemplates ? [] : [...selectedTemplates]}
/> />

View File

@ -7,6 +7,7 @@
import rowListScreen from "builderStore/store/screenTemplates/rowListScreen" import rowListScreen from "builderStore/store/screenTemplates/rowListScreen"
import DatasourceTemplateRow from "./DatasourceTemplateRow.svelte" import DatasourceTemplateRow from "./DatasourceTemplateRow.svelte"
export let mode
export let onCancel export let onCancel
export let onConfirm export let onConfirm
export let initialScreens = [] export let initialScreens = []
@ -24,7 +25,10 @@
screen => screen.resourceId !== resourceId screen => screen.resourceId !== resourceId
) )
} else { } else {
selectedScreens = [...selectedScreens, rowListScreen([datasource])[0]] selectedScreens = [
...selectedScreens,
rowListScreen([datasource], mode)[0],
]
} }
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

View File

@ -3,6 +3,7 @@
import CreationPage from "components/common/CreationPage.svelte" import CreationPage from "components/common/CreationPage.svelte"
import blankImage from "./blank.png" import blankImage from "./blank.png"
import tableImage from "./table.png" import tableImage from "./table.png"
import gridImage from "./grid.png"
import CreateScreenModal from "./CreateScreenModal.svelte" import CreateScreenModal from "./CreateScreenModal.svelte"
import { store } from "builderStore" import { store } from "builderStore"
@ -43,6 +44,16 @@
<Body size="XS">View, edit and delete rows on a table</Body> <Body size="XS">View, edit and delete rows on a table</Body>
</div> </div>
</div> </div>
<div class="card" on:click={() => createScreenModal.show("grid")}>
<div class="image">
<img alt="" src={gridImage} />
</div>
<div class="text">
<Body size="S">Grid</Body>
<Body size="XS">View and manipulate rows on a grid</Body>
</div>
</div>
</div> </div>
</CreationPage> </CreationPage>
</div> </div>

View File

@ -3,7 +3,6 @@
Heading, Heading,
Body, Body,
Button, Button,
ButtonGroup,
Table, Table,
Layout, Layout,
Modal, Modal,
@ -46,6 +45,10 @@
datasource: { datasource: {
type: "user", type: "user",
}, },
options: {
paginate: true,
limit: 10,
},
}) })
let groupsLoaded = !$licensing.groupsEnabled || $groups?.length let groupsLoaded = !$licensing.groupsEnabled || $groups?.length
@ -65,10 +68,12 @@
{ column: "role", component: RoleTableRenderer }, { column: "role", component: RoleTableRenderer },
] ]
let userData = [] let userData = []
let invitesLoaded = false
let pendingInvites = []
let parsedInvites = []
$: isOwner = $auth.accountPortalAccess && $admin.cloud $: isOwner = $auth.accountPortalAccess && $admin.cloud
$: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled $: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled
$: debouncedUpdateFetch(searchEmail) $: debouncedUpdateFetch(searchEmail)
$: schema = { $: schema = {
email: { email: {
@ -88,16 +93,6 @@
width: "1fr", width: "1fr",
}, },
} }
const getPendingSchema = tblSchema => {
if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
$: pendingSchema = getPendingSchema(schema) $: pendingSchema = getPendingSchema(schema)
$: userData = [] $: userData = []
$: inviteUsersResponse = { successful: [], unsuccessful: [] } $: inviteUsersResponse = { successful: [], unsuccessful: [] }
@ -121,9 +116,15 @@
} }
}) })
} }
let invitesLoaded = false
let pendingInvites = [] const getPendingSchema = tblSchema => {
let parsedInvites = [] if (!tblSchema) {
return {}
}
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
pendingSchema.email.displayName = "Pending Invites"
return pendingSchema
}
const invitesToSchema = invites => { const invitesToSchema = invites => {
return invites.map(invite => { return invites.map(invite => {
@ -143,7 +144,9 @@
const updateFetch = email => { const updateFetch = email => {
fetch.update({ fetch.update({
query: { query: {
email, string: {
email,
},
}, },
}) })
} }
@ -296,7 +299,7 @@
{/if} {/if}
<div class="controls"> <div class="controls">
{#if !readonly} {#if !readonly}
<ButtonGroup> <div class="buttons">
<Button <Button
disabled={readonly} disabled={readonly}
on:click={$licensing.userLimitReached on:click={$licensing.userLimitReached
@ -315,7 +318,7 @@
> >
Import Import
</Button> </Button>
</ButtonGroup> </div>
{:else} {:else}
<ScimBanner /> <ScimBanner />
{/if} {/if}
@ -390,12 +393,15 @@
</Modal> </Modal>
<style> <style>
.buttons {
display: flex;
gap: 10px;
}
.pagination { .pagination {
display: flex; display: flex;
flex-direction: row; flex-direction: row;
justify-content: flex-end; justify-content: flex-end;
} }
.controls { .controls {
display: flex; display: flex;
flex-direction: row; flex-direction: row;
@ -403,7 +409,6 @@
align-items: center; align-items: center;
gap: var(--spacing-xl); gap: var(--spacing-xl);
} }
.controls-right { .controls-right {
display: flex; display: flex;
flex-direction: row; flex-direction: row;
@ -411,7 +416,6 @@
align-items: center; align-items: center;
gap: var(--spacing-xl); gap: var(--spacing-xl);
} }
.controls-right :global(.spectrum-Search) { .controls-right :global(.spectrum-Search) {
width: 200px; width: 200px;
} }

View File

@ -258,6 +258,186 @@
"description": "Contains your app screens", "description": "Contains your app screens",
"static": true "static": true
}, },
"buttongroup": {
"name": "Button group",
"icon": "Button",
"hasChildren": false,
"settings": [
{
"section": true,
"name": "Buttons",
"settings": [
{
"type": "buttonConfiguration",
"key": "buttons",
"nested": true,
"defaultValue": [
{
"type": "cta",
"text": "Button 1"
},
{
"type": "primary",
"text": "Button 2"
}
]
}
]
},
{
"section": true,
"name": "Layout",
"settings": [
{
"type": "select",
"label": "Direction",
"key": "direction",
"showInBar": true,
"barStyle": "buttons",
"options": [
{
"label": "Column",
"value": "column",
"barIcon": "ViewColumn",
"barTitle": "Column layout"
},
{
"label": "Row",
"value": "row",
"barIcon": "ViewRow",
"barTitle": "Row layout"
}
],
"defaultValue": "row"
},
{
"type": "select",
"label": "Horiz. align",
"key": "hAlign",
"showInBar": true,
"barStyle": "buttons",
"options": [
{
"label": "Left",
"value": "left",
"barIcon": "AlignLeft",
"barTitle": "Align left"
},
{
"label": "Center",
"value": "center",
"barIcon": "AlignCenter",
"barTitle": "Align center"
},
{
"label": "Right",
"value": "right",
"barIcon": "AlignRight",
"barTitle": "Align right"
},
{
"label": "Stretch",
"value": "stretch",
"barIcon": "MoveLeftRight",
"barTitle": "Align stretched horizontally"
}
],
"defaultValue": "left"
},
{
"type": "select",
"label": "Vert. align",
"key": "vAlign",
"showInBar": true,
"barStyle": "buttons",
"options": [
{
"label": "Top",
"value": "top",
"barIcon": "AlignTop",
"barTitle": "Align top"
},
{
"label": "Middle",
"value": "middle",
"barIcon": "AlignMiddle",
"barTitle": "Align middle"
},
{
"label": "Bottom",
"value": "bottom",
"barIcon": "AlignBottom",
"barTitle": "Align bottom"
},
{
"label": "Stretch",
"value": "stretch",
"barIcon": "MoveUpDown",
"barTitle": "Align stretched vertically"
}
],
"defaultValue": "top"
},
{
"type": "select",
"label": "Size",
"key": "size",
"showInBar": true,
"barStyle": "buttons",
"options": [
{
"label": "Shrink",
"value": "shrink",
"barIcon": "Minimize",
"barTitle": "Shrink container"
},
{
"label": "Grow",
"value": "grow",
"barIcon": "Maximize",
"barTitle": "Grow container"
}
],
"defaultValue": "shrink"
},
{
"type": "select",
"label": "Gap",
"key": "gap",
"showInBar": true,
"barStyle": "picker",
"options": [
{
"label": "None",
"value": "N"
},
{
"label": "Small",
"value": "S"
},
{
"label": "Medium",
"value": "M"
},
{
"label": "Large",
"value": "L"
}
],
"defaultValue": "M"
},
{
"type": "boolean",
"label": "Wrap",
"key": "wrap",
"showInBar": true,
"barIcon": "ModernGridView",
"barTitle": "Wrap"
}
]
}
]
},
"button": { "button": {
"name": "Button", "name": "Button",
"description": "A basic html button that is ready for styling", "description": "A basic html button that is ready for styling",
@ -2409,7 +2589,6 @@
"key": "disabled", "key": "disabled",
"defaultValue": false "defaultValue": false
}, },
{ {
"type": "text", "type": "text",
"label": "Initial form step", "label": "Initial form step",
@ -5288,17 +5467,17 @@
}, },
"settings": [ "settings": [
{ {
"type": "select", "type": "table",
"label": "Data",
"key": "dataSource"
},
{
"type": "radio",
"label": "Type", "label": "Type",
"key": "actionType", "key": "actionType",
"options": ["Create", "Update", "View"], "options": ["Create", "Update", "View"],
"defaultValue": "Create" "defaultValue": "Create"
}, },
{
"type": "table",
"label": "Data",
"key": "dataSource"
},
{ {
"type": "text", "type": "text",
"label": "Title", "label": "Title",
@ -5329,13 +5508,37 @@
}, },
{ {
"type": "text", "type": "text",
"label": "Empty text", "label": "No rows found",
"key": "noRowsMessage", "key": "noRowsMessage",
"defaultValue": "We couldn't find a row to display", "defaultValue": "We couldn't find a row to display",
"nested": true "nested": true
} }
] ]
}, },
{
"section": true,
"name": "Fields",
"settings": [
{
"type": "fieldConfiguration",
"key": "fields",
"nested": true,
"resetOn": "dataSource",
"selectAllFields": true
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false,
"dependsOn": {
"setting": "actionType",
"value": "View",
"invert": true
}
}
]
},
{ {
"section": true, "section": true,
"name": "Buttons", "name": "Buttons",
@ -5388,60 +5591,38 @@
] ]
}, },
{ {
"section": true, "tag": "style",
"name": "Fields", "type": "select",
"settings": [ "label": "Align labels",
"key": "labelPosition",
"defaultValue": "left",
"options": [
{ {
"type": "select", "label": "Left",
"label": "Align labels", "value": "left"
"key": "labelPosition",
"defaultValue": "left",
"options": [
{
"label": "Left",
"value": "left"
},
{
"label": "Above",
"value": "above"
}
]
}, },
{ {
"type": "select", "label": "Above",
"label": "Size", "value": "above"
"key": "size",
"options": [
{
"label": "Medium",
"value": "spectrum--medium"
},
{
"label": "Large",
"value": "spectrum--large"
}
],
"defaultValue": "spectrum--medium"
},
{
"type": "fieldConfiguration",
"key": "fields",
"nested": true,
"resetOn": "dataSource",
"selectAllFields": true
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false,
"dependsOn": {
"setting": "actionType",
"value": "View",
"invert": true
}
} }
] ]
},
{
"tag": "style",
"type": "select",
"label": "Size",
"key": "size",
"options": [
{
"label": "Medium",
"value": "spectrum--medium"
},
{
"label": "Large",
"value": "spectrum--large"
}
],
"defaultValue": "spectrum--medium"
} }
], ],
"context": [ "context": [

View File

@ -0,0 +1,37 @@
<script>
import BlockComponent from "../BlockComponent.svelte"
import Block from "../Block.svelte"
export let buttons = []
export let direction
export let hAlign
export let vAlign
export let gap = "S"
</script>
<Block>
<BlockComponent
type="container"
props={{
direction,
hAlign,
vAlign,
gap,
wrap: true,
}}
>
{#each buttons as { text, type, quiet, disabled, onClick, size }}
<BlockComponent
type="button"
props={{
text: text || "Button",
onClick,
type,
quiet,
disabled,
size,
}}
/>
{/each}
</BlockComponent>
</Block>

View File

@ -220,15 +220,11 @@
</BlockComponent> </BlockComponent>
{/if} {/if}
</BlockComponent> </BlockComponent>
{#if description}
<BlockComponent
type="text"
props={{ text: description }}
order={1}
/>
{/if}
</BlockComponent> </BlockComponent>
{/if} {/if}
{#if description}
<BlockComponent type="text" props={{ text: description }} order={1} />
{/if}
{#key fields} {#key fields}
<BlockComponent type="fieldgroup" props={{ labelPosition }} order={1}> <BlockComponent type="fieldgroup" props={{ labelPosition }} order={1}>
{#each fields as field, idx} {#each fields as field, idx}

View File

@ -19,6 +19,7 @@ export { default as dataprovider } from "./DataProvider.svelte"
export { default as divider } from "./Divider.svelte" export { default as divider } from "./Divider.svelte"
export { default as screenslot } from "./ScreenSlot.svelte" export { default as screenslot } from "./ScreenSlot.svelte"
export { default as button } from "./Button.svelte" export { default as button } from "./Button.svelte"
export { default as buttongroup } from "./ButtonGroup.svelte"
export { default as repeater } from "./Repeater.svelte" export { default as repeater } from "./Repeater.svelte"
export { default as text } from "./Text.svelte" export { default as text } from "./Text.svelte"
export { default as layout } from "./Layout.svelte" export { default as layout } from "./Layout.svelte"

View File

@ -103,7 +103,6 @@ const fetchRowHandler = async action => {
const deleteRowHandler = async action => { const deleteRowHandler = async action => {
const { tableId, rowId: rowConfig, notificationOverride } = action.parameters const { tableId, rowId: rowConfig, notificationOverride } = action.parameters
if (tableId && rowConfig) { if (tableId && rowConfig) {
try { try {
let requestConfig let requestConfig
@ -129,9 +128,11 @@ const deleteRowHandler = async action => {
requestConfig = [parsedRowConfig] requestConfig = [parsedRowConfig]
} else if (Array.isArray(parsedRowConfig)) { } else if (Array.isArray(parsedRowConfig)) {
requestConfig = parsedRowConfig requestConfig = parsedRowConfig
} else if (Number.isInteger(parsedRowConfig)) {
requestConfig = [String(parsedRowConfig)]
} }
if (!requestConfig.length) { if (!requestConfig && !parsedRowConfig) {
notificationStore.actions.warning("No valid rows were supplied") notificationStore.actions.warning("No valid rows were supplied")
return false return false
} }

View File

@ -140,4 +140,13 @@ export const buildTableEndpoints = API => ({
}, },
}) })
}, },
migrateColumn: async ({ tableId, oldColumn, newColumn }) => {
return await API.post({
url: `/api/tables/${tableId}/migrate`,
body: {
oldColumn,
newColumn,
},
})
},
}) })

View File

@ -55,7 +55,7 @@
try { try {
return await API.uploadBuilderAttachment(data) return await API.uploadBuilderAttachment(data)
} catch (error) { } catch (error) {
$notifications.error("Failed to upload attachment") $notifications.error(error.message || "Failed to upload attachment")
return [] return []
} }
} }

View File

@ -1,11 +1,20 @@
<script> <script>
import { getContext, onMount, tick } from "svelte" import { getContext, onMount, tick } from "svelte"
import { canBeDisplayColumn, canBeSortColumn } from "@budibase/shared-core" import { canBeDisplayColumn, canBeSortColumn } from "@budibase/shared-core"
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui" import {
Icon,
Popover,
Menu,
MenuItem,
clickOutside,
Modal,
} from "@budibase/bbui"
import GridCell from "./GridCell.svelte" import GridCell from "./GridCell.svelte"
import { getColumnIcon } from "../lib/utils" import { getColumnIcon } from "../lib/utils"
import MigrationModal from "../controls/MigrationModal.svelte"
import { debounce } from "../../../utils/utils" import { debounce } from "../../../utils/utils"
import { FieldType, FormulaTypes } from "@budibase/types" import { FieldType, FormulaTypes } from "@budibase/types"
import { TableNames } from "../../../constants"
export let column export let column
export let idx export let idx
@ -45,6 +54,7 @@
let editIsOpen = false let editIsOpen = false
let timeout let timeout
let popover let popover
let migrationModal
let searchValue let searchValue
let input let input
@ -189,6 +199,11 @@
}) })
} }
const openMigrationModal = () => {
migrationModal.show()
open = false
}
const startSearching = async () => { const startSearching = async () => {
$focusedCellId = null $focusedCellId = null
searchValue = "" searchValue = ""
@ -224,6 +239,10 @@
onMount(() => subscribe("close-edit-column", cancelEdit)) onMount(() => subscribe("close-edit-column", cancelEdit))
</script> </script>
<Modal bind:this={migrationModal}>
<MigrationModal {column} />
</Modal>
<div <div
class="header-cell" class="header-cell"
class:open class:open
@ -363,6 +382,11 @@
> >
Hide column Hide column
</MenuItem> </MenuItem>
{#if $config.canEditColumns && column.schema.type === "link" && column.schema.tableId === TableNames.USERS}
<MenuItem icon="User" on:click={openMigrationModal}>
Migrate to user column
</MenuItem>
{/if}
</Menu> </Menu>
{/if} {/if}
</Popover> </Popover>

View File

@ -0,0 +1,73 @@
<script>
import {
ModalContent,
notifications,
Input,
InlineAlert,
} from "@budibase/bbui"
import { getContext } from "svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldSubtype, FieldType, RelationshipType } from "@budibase/types"
const { API, definition, rows } = getContext("grid")
export let column
let newColumnName = `${column.schema.name} migrated`
$: error = checkNewColumnName(newColumnName)
const checkNewColumnName = newColumnName => {
if (newColumnName === "") {
return "Column name can't be empty."
}
if (newColumnName in $definition.schema) {
return "New column name can't be the same as an existing column name."
}
if (newColumnName.match(ValidColumnNameRegex) === null) {
return "Illegal character; must be alpha-numeric."
}
}
const migrateUserColumn = async () => {
let subtype = FieldSubtype.USERS
if (column.schema.relationshipType === RelationshipType.ONE_TO_MANY) {
subtype = FieldSubtype.USER
}
try {
await API.migrateColumn({
tableId: $definition._id,
oldColumn: column.schema,
newColumn: {
name: newColumnName,
type: FieldType.BB_REFERENCE,
subtype,
},
})
notifications.success("Column migrated")
} catch (e) {
notifications.error(`Failed to migrate: ${e.message}`)
}
await rows.actions.refreshData()
}
</script>
<ModalContent
title="Migrate column"
confirmText="Continue"
cancelText="Cancel"
onConfirm={migrateUserColumn}
disabled={error !== undefined}
size="M"
>
This operation will kick off a migration of the column "{column.schema.name}"
to a new column, with the name provided - this operation may take a moment to
complete.
<InlineAlert
type="error"
header="Are you sure?"
message="This will leave bindings which utilised the user relationship column in a state where they will need to be updated to use the new column instead."
/>
<Input bind:value={newColumnName} label="New column name" {error} />
</ModalContent>

@ -1 +1 @@
Subproject commit d24c0dc3a30014cbe61860252aa48104cad36376 Subproject commit 3820c0c93a3e448e10a60a9feb5396844b537ca8

View File

@ -38,7 +38,7 @@ RUN apt update && apt upgrade -y \
COPY package.json . COPY package.json .
COPY dist/yarn.lock . COPY dist/yarn.lock .
RUN yarn install --production=true \ RUN yarn install --production=true --network-timeout 1000000 \
# Remove unneeded data from file system to reduce image size # Remove unneeded data from file system to reduce image size
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \ && yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp && rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp

View File

@ -44,7 +44,7 @@ RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
WORKDIR /string-templates WORKDIR /string-templates
COPY packages/string-templates/package.json package.json COPY packages/string-templates/package.json package.json
RUN ../scripts/removeWorkspaceDependencies.sh package.json RUN ../scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true --network-timeout 1000000
COPY packages/string-templates . COPY packages/string-templates .
@ -57,7 +57,7 @@ COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
RUN ./scripts/removeWorkspaceDependencies.sh package.json RUN ./scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true \ RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true --network-timeout 1000000 \
# Remove unneeded data from file system to reduce image size # Remove unneeded data from file system to reduce image size
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python jq \ && yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python jq \
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp && rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp

View File

@ -70,6 +70,13 @@ module AwsMock {
Contents: {}, Contents: {},
}) })
) )
// @ts-ignore
this.getObject = jest.fn(
response({
Body: "",
})
)
} }
aws.DynamoDB = { DocumentClient } aws.DynamoDB = { DocumentClient }

View File

@ -18,7 +18,6 @@
"test": "bash scripts/test.sh", "test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit", "test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch", "test:watch": "jest --watch",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up", "dev:stack:up": "node scripts/dev/manage.js up",

View File

@ -2,7 +2,7 @@ version: "3.8"
services: services:
db: db:
container_name: postgres container_name: postgres
image: postgres:15 image: postgres:15-bullseye
restart: unless-stopped restart: unless-stopped
environment: environment:
POSTGRES_USER: root POSTGRES_USER: root

View File

@ -32,11 +32,8 @@ import {
tenancy, tenancy,
users, users,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { USERS_TABLE_SCHEMA } from "../../constants" import { USERS_TABLE_SCHEMA, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
import { import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
buildDefaultDocs,
DEFAULT_BB_DATASOURCE_ID,
} from "../../db/defaultData/datasource_bb_default"
import { removeAppFromUserRoles } from "../../utilities/workerRequests" import { removeAppFromUserRoles } from "../../utilities/workerRequests"
import { stringToReadStream } from "../../utilities" import { stringToReadStream } from "../../utilities"
import { doesUserHaveLock } from "../../utilities/redis" import { doesUserHaveLock } from "../../utilities/redis"

View File

@ -12,7 +12,6 @@ import {
CreateDatasourceResponse, CreateDatasourceResponse,
Datasource, Datasource,
DatasourcePlus, DatasourcePlus,
ExternalTable,
FetchDatasourceInfoRequest, FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse, FetchDatasourceInfoResponse,
IntegrationBase, IntegrationBase,
@ -59,7 +58,7 @@ async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus const connector = (await getConnector(datasource)) as DatasourcePlus
return await connector.buildSchema( return await connector.buildSchema(
datasource._id!, datasource._id!,
datasource.entities! as Record<string, ExternalTable> datasource.entities! as Record<string, Table>
) )
} }

View File

@ -1,12 +1,12 @@
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import { APP_PREFIX, DocumentType } from "../../../db/utils" import { APP_PREFIX, DocumentType } from "../../../db/utils"
export async function addRev( export async function addRev(
body: { _id?: string; _rev?: string }, body: { _id?: string; _rev?: string },
tableId?: string tableId?: string
) { ) {
if (!body._id || (tableId && isExternalTable(tableId))) { if (!body._id || (tableId && isExternalTableID(tableId))) {
return body return body
} }
let id = body._id let id = body._id

View File

@ -1,4 +1,10 @@
import { context, db as dbCore, events, roles } from "@budibase/backend-core" import {
context,
db as dbCore,
events,
roles,
Header,
} from "@budibase/backend-core"
import { getUserMetadataParams, InternalTables } from "../../db/utils" import { getUserMetadataParams, InternalTables } from "../../db/utils"
import { Database, Role, UserCtx, UserRoles } from "@budibase/types" import { Database, Role, UserCtx, UserRoles } from "@budibase/types"
import { sdk as sharedSdk } from "@budibase/shared-core" import { sdk as sharedSdk } from "@budibase/shared-core"
@ -143,4 +149,20 @@ export async function accessible(ctx: UserCtx) {
} else { } else {
ctx.body = await roles.getUserRoleIdHierarchy(roleId!) ctx.body = await roles.getUserRoleIdHierarchy(roleId!)
} }
// If a custom role is provided in the header, filter out higher level roles
const roleHeader = ctx.header?.[Header.PREVIEW_ROLE] as string
if (roleHeader && !Object.keys(roles.BUILTIN_ROLE_IDS).includes(roleHeader)) {
const inherits = (await roles.getRole(roleHeader))?.inherits
const orderedRoles = ctx.body.reverse()
let filteredRoles = [roleHeader]
for (let role of orderedRoles) {
filteredRoles = [role, ...filteredRoles]
if (role === inherits) {
break
}
}
filteredRoles.pop()
ctx.body = [roleHeader, ...filteredRoles]
}
} }

View File

@ -1,7 +1,7 @@
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import * as internal from "./internal" import * as internal from "./internal"
import * as external from "./external" import * as external from "./external"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import { import {
Ctx, Ctx,
UserCtx, UserCtx,
@ -30,7 +30,7 @@ import { Format } from "../view/exporters"
export * as views from "./views" export * as views from "./views"
function pickApi(tableId: any) { function pickApi(tableId: any) {
if (isExternalTable(tableId)) { if (isExternalTableID(tableId)) {
return external return external
} }
return internal return internal
@ -227,7 +227,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
export async function validate(ctx: Ctx<Row, ValidateResponse>) { export async function validate(ctx: Ctx<Row, ValidateResponse>) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
// external tables are hard to validate currently // external tables are hard to validate currently
if (isExternalTable(tableId)) { if (isExternalTableID(tableId)) {
ctx.body = { valid: true, errors: {} } ctx.body = { valid: true, errors: {} }
} else { } else {
ctx.body = await sdk.rows.utils.validate({ ctx.body = await sdk.rows.utils.validate({

View File

@ -1,3 +1,5 @@
import { ValidFileExtensions } from "@budibase/shared-core"
require("svelte/register") require("svelte/register")
import { join } from "../../../utilities/centralPath" import { join } from "../../../utilities/centralPath"
@ -11,34 +13,21 @@ import {
} from "../../../utilities/fileSystem" } from "../../../utilities/fileSystem"
import env from "../../../environment" import env from "../../../environment"
import { DocumentType } from "../../../db/utils" import { DocumentType } from "../../../db/utils"
import { context, objectStore, utils, configs } from "@budibase/backend-core" import {
context,
objectStore,
utils,
configs,
BadRequestError,
} from "@budibase/backend-core"
import AWS from "aws-sdk" import AWS from "aws-sdk"
import fs from "fs" import fs from "fs"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
import { App, Ctx } from "@budibase/types" import { App, Ctx, ProcessAttachmentResponse, Upload } from "@budibase/types"
const send = require("koa-send") const send = require("koa-send")
async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
const response = await objectStore.upload({
bucket,
metadata,
filename: s3Key,
path: file.path,
type: file.type,
})
// don't store a URL, work this out on the way out as the URL could change
return {
size: file.size,
name: file.name,
url: objectStore.getAppFileUrl(s3Key),
extension: [...file.name.split(".")].pop(),
key: response.Key,
}
}
export const toggleBetaUiFeature = async function (ctx: Ctx) { export const toggleBetaUiFeature = async function (ctx: Ctx) {
const cookieName = `beta:${ctx.params.feature}` const cookieName = `beta:${ctx.params.feature}`
@ -72,23 +61,58 @@ export const serveBuilder = async function (ctx: Ctx) {
await send(ctx, ctx.file, { root: builderPath }) await send(ctx, ctx.file, { root: builderPath })
} }
export const uploadFile = async function (ctx: Ctx) { export const uploadFile = async function (
ctx: Ctx<{}, ProcessAttachmentResponse>
) {
const file = ctx.request?.files?.file const file = ctx.request?.files?.file
if (!file) {
throw new BadRequestError("No file provided")
}
let files = file && Array.isArray(file) ? Array.from(file) : [file] let files = file && Array.isArray(file) ? Array.from(file) : [file]
const uploads = files.map(async (file: any) => { ctx.body = await Promise.all(
const fileExtension = [...file.name.split(".")].pop() files.map(async file => {
// filenames converted to UUIDs so they are unique if (!file.name) {
const processedFileName = `${uuid.v4()}.${fileExtension}` throw new BadRequestError(
"Attempted to upload a file without a filename"
)
}
return prepareUpload({ const extension = [...file.name.split(".")].pop()
file, if (!extension) {
s3Key: `${context.getProdAppId()}/attachments/${processedFileName}`, throw new BadRequestError(
bucket: ObjectStoreBuckets.APPS, `File "${file.name}" has no extension, an extension is required to upload a file`
)
}
if (!env.SELF_HOSTED && !ValidFileExtensions.includes(extension)) {
throw new BadRequestError(
`File "${file.name}" has an invalid extension: "${extension}"`
)
}
// filenames converted to UUIDs so they are unique
const processedFileName = `${uuid.v4()}.${extension}`
const s3Key = `${context.getProdAppId()}/attachments/${processedFileName}`
const response = await objectStore.upload({
bucket: ObjectStoreBuckets.APPS,
filename: s3Key,
path: file.path,
type: file.type,
})
return {
size: file.size,
name: file.name,
url: objectStore.getAppFileUrl(s3Key),
extension,
key: response.Key,
}
}) })
}) )
ctx.body = await Promise.all(uploads)
} }
export const deleteObjects = async function (ctx: Ctx) { export const deleteObjects = async function (ctx: Ctx) {

View File

@ -5,18 +5,27 @@ import {
isSchema, isSchema,
validate as validateSchema, validate as validateSchema,
} from "../../../utilities/schema" } from "../../../utilities/schema"
import { isExternalTable, isSQL } from "../../../integrations/utils" import {
isExternalTable,
isExternalTableID,
isSQL,
} from "../../../integrations/utils"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
DocumentType,
FetchTablesResponse, FetchTablesResponse,
MigrateRequest,
MigrateResponse,
Row,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
TableResponse, TableResponse,
TableSourceType,
UserCtx, UserCtx,
Row, SEPARATOR,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
@ -24,12 +33,10 @@ import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash" import { cloneDeep, isEqual } from "lodash"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) { function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) { if (table && isExternalTable(table)) {
tableId = table._id
}
if (table && table.type === "external") {
return external return external
} else if (tableId && isExternalTable(tableId)) { }
if (tableId && isExternalTableID(tableId)) {
return external return external
} }
return internal return internal
@ -46,8 +53,8 @@ export async function fetch(ctx: UserCtx<void, FetchTablesResponse>) {
if (entities) { if (entities) {
return Object.values(entities).map<Table>((entity: Table) => ({ return Object.values(entities).map<Table>((entity: Table) => ({
...entity, ...entity,
type: "external", sourceType: TableSourceType.EXTERNAL,
sourceId: datasource._id, sourceId: datasource._id!,
sql: isSQL(datasource), sql: isSQL(datasource),
})) }))
} else { } else {
@ -158,3 +165,19 @@ export async function validateExistingTableImport(ctx: UserCtx) {
ctx.status = 422 ctx.status = 422
} }
} }
export async function migrate(ctx: UserCtx<MigrateRequest, MigrateResponse>) {
const { oldColumn, newColumn } = ctx.request.body
let tableId = ctx.params.tableId as string
const table = await sdk.tables.getTable(tableId)
let result = await sdk.tables.migrate(table, oldColumn, newColumn)
for (let table of result.tablesUpdated) {
builderSocket?.emitTableUpdate(ctx, table, {
includeOriginator: true,
})
}
ctx.status = 200
ctx.body = { message: `Column ${oldColumn.name} migrated.` }
}

View File

@ -7,6 +7,7 @@ import {
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
TableSourceType,
UserCtx, UserCtx,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
@ -16,10 +17,11 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
let tableToSave: Table & { let tableToSave: Table & {
_rename?: RenameColumn _rename?: RenameColumn
} = { } = {
type: "table",
_id: generateTableID(), _id: generateTableID(),
views: {},
...rest, ...rest,
type: "table",
sourceType: TableSourceType.INTERNAL,
views: {},
} }
const renaming = tableToSave._rename const renaming = tableToSave._rename
delete tableToSave._rename delete tableToSave._rename

View File

@ -11,128 +11,24 @@ const { PermissionType, PermissionLevel } = permissions
const router: Router = new Router() const router: Router = new Router()
router router
/**
* @api {get} /api/:sourceId/:rowId/enrich Get an enriched row
* @apiName Get an enriched row
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This API is only useful when dealing with rows that have relationships.
* Normally when a row is a returned from the API relationships will only have the structure
* `{ primaryDisplay: "name", _id: ... }` but this call will return the full related rows
* for each relationship instead.
*
* @apiParam {string} rowId The ID of the row which is to be retrieved and enriched.
*
* @apiSuccess {object} row The response body will be the enriched row.
*/
.get( .get(
"/api/:sourceId/:rowId/enrich", "/api/:sourceId/:rowId/enrich",
paramSubResource("sourceId", "rowId"), paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetchEnrichedRow rowController.fetchEnrichedRow
) )
/**
* @api {get} /api/:sourceId/rows Get all rows in a table
* @apiName Get all rows in a table
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This is a deprecated endpoint that should not be used anymore, instead use the search endpoint.
* This endpoint gets all of the rows within the specified table - it is not heavily used
* due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then
* will simply stop.
*
* @apiParam {string} sourceId The ID of the table to retrieve all rows within.
*
* @apiSuccess {object[]} rows The response body will be an array of all rows found.
*/
.get( .get(
"/api/:sourceId/rows", "/api/:sourceId/rows",
paramResource("sourceId"), paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetch rowController.fetch
) )
/**
* @api {get} /api/:sourceId/rows/:rowId Retrieve a single row
* @apiName Retrieve a single row
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve
* a row by anything other than its _id field, use the search endpoint.
*
* @apiParam {string} sourceId The ID of the table to retrieve a row from.
* @apiParam {string} rowId The ID of the row to retrieve.
*
* @apiSuccess {object} body The response body will be the row that was found.
*/
.get( .get(
"/api/:sourceId/rows/:rowId", "/api/:sourceId/rows/:rowId",
paramSubResource("sourceId", "rowId"), paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.find rowController.find
) )
/**
* @api {post} /api/:sourceId/search Search for rows in a table
* @apiName Search for rows in a table
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This is the primary method of accessing rows in Budibase, the data provider
* and data UI in the builder are built atop this. All filtering, sorting and pagination is
* handled through this, for internal and external (datasource plus, e.g. SQL) tables.
*
* @apiParam {string} sourceId The ID of the table to retrieve rows from.
*
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
* defaults to false.
* @apiParam (Body) {object} [query] This contains a set of filters which should be applied, if none
* specified then the request will be unfiltered. An example with all of the possible query
* options has been supplied below.
* @apiParam (Body) {number} [limit] This sets a limit for the number of rows that will be returned,
* this will be implemented at the database level if supported for performance reasons. This
* is useful when paginating to set exactly how many rows per page.
* @apiParam (Body) {string} [bookmark] If pagination is enabled then a bookmark will be returned
* with each successful search request, this should be supplied back to get the next page.
* @apiParam (Body) {object} [sort] If sort is desired this should contain the name of the column to
* sort on.
* @apiParam (Body) {string} [sortOrder] If sort is enabled then this can be either "descending" or
* "ascending" as required.
* @apiParam (Body) {string} [sortType] If sort is enabled then you must specify the type of search
* being used, either "string" or "number". This is only used for internal tables.
*
* @apiParamExample {json} Example:
* {
* "tableId": "ta_70260ff0b85c467ca74364aefc46f26d",
* "query": {
* "string": {},
* "fuzzy": {},
* "range": {
* "columnName": {
* "high": 20,
* "low": 10,
* }
* },
* "equal": {
* "columnName": "someValue"
* },
* "notEqual": {},
* "empty": {},
* "notEmpty": {},
* "oneOf": {
* "columnName": ["value"]
* }
* },
* "limit": 10,
* "sort": "name",
* "sortOrder": "descending",
* "sortType": "string",
* "paginate": true
* }
*
* @apiSuccess {object[]} rows An array of rows that was found based on the supplied parameters.
* @apiSuccess {boolean} hasNextPage If pagination was enabled then this specifies whether or
* not there is another page after this request.
* @apiSuccess {string} bookmark The bookmark to be sent with the next request to get the next
* page.
*/
.post( .post(
"/api/:sourceId/search", "/api/:sourceId/search",
internalSearchValidator(), internalSearchValidator(),
@ -148,30 +44,6 @@ router
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search rowController.search
) )
/**
* @api {post} /api/:sourceId/rows Creates a new row
* @apiName Creates a new row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This API will create a new row based on the supplied body. If the
* body includes an "_id" field then it will update an existing row if the field
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
* already used by Budibase tables and cannot be used for columns.
*
* @apiParam {string} sourceId The ID of the table to save a row to.
*
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
* must also be provided.
* @apiParam (Body) {string} tableId The ID of the table should also be specified in the row body itself.
* @apiParam (Body) {any} [any] Any field supplied in the body will be assessed to see if it matches
* a column in the specified table. All other fields will be dropped and not stored.
*
* @apiSuccess {string} _id The ID of the row that was just saved, if it was just created this
* is the rows new ID.
* @apiSuccess {string} [_rev] If saving to an internal table a revision will also be returned.
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
*/
.post( .post(
"/api/:sourceId/rows", "/api/:sourceId/rows",
paramResource("sourceId"), paramResource("sourceId"),
@ -179,14 +51,6 @@ router
trimViewRowInfo, trimViewRowInfo,
rowController.save rowController.save
) )
/**
* @api {patch} /api/:sourceId/rows Updates a row
* @apiName Update a row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint is identical to the row creation endpoint but instead it will
* error if an _id isn't provided, it will only function for existing rows.
*/
.patch( .patch(
"/api/:sourceId/rows", "/api/:sourceId/rows",
paramResource("sourceId"), paramResource("sourceId"),
@ -194,52 +58,12 @@ router
trimViewRowInfo, trimViewRowInfo,
rowController.patch rowController.patch
) )
/**
* @api {post} /api/:sourceId/rows/validate Validate inputs for a row
* @apiName Validate inputs for a row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription When attempting to save a row you may want to check if the row is valid
* given the table schema, this will iterate through all the constraints on the table and
* check if the request body is valid.
*
* @apiParam {string} sourceId The ID of the table the row is to be validated for.
*
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
* against the table schema and constraints.
*
* @apiSuccess {boolean} valid If inputs provided are acceptable within the table schema this
* will be true, if it is not then then errors property will be populated.
* @apiSuccess {object} [errors] A key value map of information about fields on the input
* which do not match the table schema. The key name will be the column names that have breached
* the schema.
*/
.post( .post(
"/api/:sourceId/rows/validate", "/api/:sourceId/rows/validate",
paramResource("sourceId"), paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.validate rowController.validate
) )
/**
* @api {delete} /api/:sourceId/rows Delete rows
* @apiName Delete rows
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
* fashion.
*
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
* key of the request body that are to be deleted.
* @apiParam (Body) {string} [_id] If deleting a single row then provide its ID in this field.
* @apiParam (Body) {string} [_rev] If deleting a single row from an internal table then provide its
* revision here.
*
* @apiSuccess {object[]|object} body If deleting bulk then the response body will be an array
* of the deleted rows, if deleting a single row then the body will contain a "row" property which
* is the deleted row.
*/
.delete( .delete(
"/api/:sourceId/rows", "/api/:sourceId/rows",
paramResource("sourceId"), paramResource("sourceId"),
@ -247,20 +71,6 @@ router
trimViewRowInfo, trimViewRowInfo,
rowController.destroy rowController.destroy
) )
/**
* @api {post} /api/:sourceId/rows/exportRows Export Rows
* @apiName Export rows
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This API can export a number of provided rows
*
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] The row IDs which are to be exported
*
* @apiSuccess {object[]|object}
*/
.post( .post(
"/api/:sourceId/rows/exportRows", "/api/:sourceId/rows/exportRows",
paramResource("sourceId"), paramResource("sourceId"),

View File

@ -9,99 +9,13 @@ const { BUILDER, PermissionLevel, PermissionType } = permissions
const router: Router = new Router() const router: Router = new Router()
router router
/**
* @api {get} /api/tables Fetch all tables
* @apiName Fetch all tables
* @apiGroup tables
* @apiPermission table read access
* @apiDescription This endpoint retrieves all of the tables which have been created in
* an app. This includes all of the external and internal tables; to tell the difference
* between these look for the "type" property on each table, either being "internal" or "external".
*
* @apiSuccess {object[]} body The response body will be the list of tables that was found - as
* this does not take any parameters the only error scenario is no access.
*/
.get("/api/tables", authorized(BUILDER), tableController.fetch) .get("/api/tables", authorized(BUILDER), tableController.fetch)
/**
* @api {get} /api/tables/:id Fetch a single table
* @apiName Fetch a single table
* @apiGroup tables
* @apiPermission table read access
* @apiDescription Retrieves a single table this could be be internal or external based on
* the provided table ID.
*
* @apiParam {string} id The ID of the table which is to be retrieved.
*
* @apiSuccess {object[]} body The response body will be the table that was found.
*/
.get( .get(
"/api/tables/:tableId", "/api/tables/:tableId",
paramResource("tableId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.READ, { schema: true }), authorized(PermissionType.TABLE, PermissionLevel.READ, { schema: true }),
tableController.find tableController.find
) )
/**
* @api {post} /api/tables Save a table
* @apiName Save a table
* @apiGroup tables
* @apiPermission builder
* @apiDescription Create or update a table with this endpoint, this will function for both internal
* external tables.
*
* @apiParam (Body) {string} [_id] If updating an existing table then the ID of the table must be specified.
* @apiParam (Body) {string} [_rev] If updating an existing internal table then the revision must also be specified.
* @apiParam (Body) {string} type] This should either be "internal" or "external" depending on the table type -
* this will default to internal.
* @apiParam (Body) {string} [sourceId] If creating an external table then this should be set to the datasource ID. If
* building an internal table this does not need to be set, although it will be returned as "bb_internal".
* @apiParam (Body) {string} name The name of the table, this will be used in the UI. To rename the table simply
* supply the table structure to this endpoint with the name changed.
* @apiParam (Body) {object} schema A key value object which has all of the columns in the table as the keys in this
* object. For each column a "type" and "constraints" must be specified, with some types requiring further information.
* More information about the schema structure can be found in the Typescript definitions.
* @apiParam (Body) {string} [primaryDisplay] The name of the column which should be used when displaying rows
* from this table as relationships.
* @apiParam (Body) {object[]} [indexes] Specifies the search indexes - this is deprecated behaviour with the introduction
* of lucene indexes. This functionality is only available for internal tables.
* @apiParam (Body) {object} [_rename] If a column is to be renamed then the "old" column name should be set in this
* structure, and the "updated", new column name should also be supplied. The schema should also be updated, this field
* lets the server know that a field hasn't just been deleted, that the data has moved to a new name, this will fix
* the rows in the table. This functionality is only available for internal tables.
* @apiParam (Body) {object[]} [rows] When creating a table using a compatible data source, an array of objects to be imported into the new table can be provided.
*
* @apiParamExample {json} Example:
* {
* "_id": "ta_05541307fa0f4044abee071ca2a82119",
* "_rev": "10-0fbe4e78f69b255d79f1017e2eeef807",
* "type": "internal",
* "views": {},
* "name": "tableName",
* "schema": {
* "column": {
* "type": "string",
* "constraints": {
* "type": "string",
* "length": {
* "maximum": null
* },
* "presence": false
* },
* "name": "column"
* },
* },
* "primaryDisplay": "column",
* "indexes": [],
* "sourceId": "bb_internal",
* "_rename": {
* "old": "columnName",
* "updated": "newColumnName",
* },
* "rows": []
* }
*
* @apiSuccess {object} table The response body will contain the table structure after being cleaned up and
* saved to the database.
*/
.post( .post(
"/api/tables", "/api/tables",
// allows control over updating a table // allows control over updating a table
@ -125,41 +39,12 @@ router
authorized(BUILDER), authorized(BUILDER),
tableController.validateExistingTableImport tableController.validateExistingTableImport
) )
/**
* @api {post} /api/tables/:tableId/:revId Delete a table
* @apiName Delete a table
* @apiGroup tables
* @apiPermission builder
* @apiDescription This endpoint will delete a table and all of its associated data, for this reason it is
* quite dangerous - it will work for internal and external tables.
*
* @apiParam {string} tableId The ID of the table which is to be deleted.
* @apiParam {string} [revId] If deleting an internal table then the revision must also be supplied (_rev), for
* external tables this can simply be set to anything, e.g. "external".
*
* @apiSuccess {string} message A message stating that the table was deleted successfully.
*/
.delete( .delete(
"/api/tables/:tableId/:revId", "/api/tables/:tableId/:revId",
paramResource("tableId"), paramResource("tableId"),
authorized(BUILDER), authorized(BUILDER),
tableController.destroy tableController.destroy
) )
/**
* @api {post} /api/tables/:tableId/:revId Import CSV to existing table
* @apiName Import CSV to existing table
* @apiGroup tables
* @apiPermission builder
* @apiDescription This endpoint will import data to existing tables, internal or external. It is used in combination
* with the CSV validation endpoint. Take the output of the CSV validation endpoint and pass it to this endpoint to
* import the data; please note this will only import fields that already exist on the table/match the type.
*
* @apiParam {string} tableId The ID of the table which the data should be imported to.
*
* @apiParam (Body) {object[]} rows An array of objects representing the rows to be imported, key-value pairs not matching the table schema will be ignored.
*
* @apiSuccess {string} message A message stating that the data was imported successfully.
*/
.post( .post(
"/api/tables/:tableId/import", "/api/tables/:tableId/import",
paramResource("tableId"), paramResource("tableId"),
@ -167,4 +52,11 @@ router
tableController.bulkImport tableController.bulkImport
) )
.post(
"/api/tables/:tableId/migrate",
paramResource("tableId"),
authorized(BUILDER),
tableController.migrate
)
export default router export default router

View File

@ -7,7 +7,7 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"entities": [ "entities": [
{ {
"_id": "ta_users", "_id": "ta_users",
"_rev": "1-2375e1bc58aeec664dc1b1f04ad43e44", "_rev": "1-73b7912e6cbdd3d696febc60f3715844",
"createdAt": "2020-01-01T00:00:00.000Z", "createdAt": "2020-01-01T00:00:00.000Z",
"name": "Users", "name": "Users",
"primaryDisplay": "email", "primaryDisplay": "email",
@ -21,7 +21,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": true, "presence": true,
"type": "string", "type": "string",
}, },
"fieldName": "email",
"name": "email", "name": "email",
"type": "string", "type": "string",
}, },
@ -30,7 +29,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false, "presence": false,
"type": "string", "type": "string",
}, },
"fieldName": "firstName",
"name": "firstName", "name": "firstName",
"type": "string", "type": "string",
}, },
@ -39,7 +37,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false, "presence": false,
"type": "string", "type": "string",
}, },
"fieldName": "lastName",
"name": "lastName", "name": "lastName",
"type": "string", "type": "string",
}, },
@ -54,7 +51,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false, "presence": false,
"type": "string", "type": "string",
}, },
"fieldName": "roleId",
"name": "roleId", "name": "roleId",
"type": "options", "type": "options",
}, },
@ -67,11 +63,12 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
"presence": false, "presence": false,
"type": "string", "type": "string",
}, },
"fieldName": "status",
"name": "status", "name": "status",
"type": "options", "type": "options",
}, },
}, },
"sourceId": "bb_internal",
"sourceType": "internal",
"type": "table", "type": "table",
"updatedAt": "2020-01-01T00:00:00.000Z", "updatedAt": "2020-01-01T00:00:00.000Z",
"views": {}, "views": {},

View File

@ -0,0 +1,49 @@
import * as setup from "./utilities"
import { APIError } from "@budibase/types"
describe("/api/applications/:appId/sync", () => {
let config = setup.getConfig()
afterAll(setup.afterAll)
beforeAll(async () => {
await config.init()
})
describe("/api/attachments/process", () => {
it("should accept an image file upload", async () => {
let resp = await config.api.attachment.process(
"1px.jpg",
Buffer.from([0])
)
expect(resp.length).toBe(1)
let upload = resp[0]
expect(upload.url.endsWith(".jpg")).toBe(true)
expect(upload.extension).toBe("jpg")
expect(upload.size).toBe(1)
expect(upload.name).toBe("1px.jpg")
})
it("should reject an upload with a malicious file extension", async () => {
await config.withEnv({ SELF_HOSTED: undefined }, async () => {
let resp = (await config.api.attachment.process(
"ohno.exe",
Buffer.from([0]),
{ expectStatus: 400 }
)) as unknown as APIError
expect(resp.message).toContain("invalid extension")
})
})
it("should reject an upload with no file", async () => {
let resp = (await config.api.attachment.process(
undefined as any,
undefined as any,
{
expectStatus: 400,
}
)) as unknown as APIError
expect(resp.message).toContain("No file provided")
})
})
})

View File

@ -5,6 +5,8 @@ import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import { mocks } from "@budibase/backend-core/tests" import { mocks } from "@budibase/backend-core/tests"
mocks.licenses.useBackups()
describe("/backups", () => { describe("/backups", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
@ -12,16 +14,17 @@ describe("/backups", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeEach(async () => {
tk.reset()
await config.init() await config.init()
}) })
describe("exportAppDump", () => { describe("/api/backups/export", () => {
it("should be able to export app", async () => { it("should be able to export app", async () => {
const res = await request const { body, headers } = await config.api.backup.exportBasicBackup(
.post(`/api/backups/export?appId=${config.getAppId()}`) config.getAppId()!
.set(config.defaultHeaders()) )
.expect(200) expect(body instanceof Buffer).toBe(true)
expect(res.headers["content-type"]).toEqual("application/gzip") expect(headers["content-type"]).toEqual("application/gzip")
expect(events.app.exported).toBeCalledTimes(1) expect(events.app.exported).toBeCalledTimes(1)
}) })
@ -36,11 +39,11 @@ describe("/backups", () => {
it("should infer the app name from the app", async () => { it("should infer the app name from the app", async () => {
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
const res = await request const { headers } = await config.api.backup.exportBasicBackup(
.post(`/api/backups/export?appId=${config.getAppId()}`) config.getAppId()!
.set(config.defaultHeaders()) )
expect(res.headers["content-disposition"]).toEqual( expect(headers["content-disposition"]).toEqual(
`attachment; filename="${ `attachment; filename="${
config.getApp()!.name config.getApp()!.name
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"` }-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
@ -48,6 +51,21 @@ describe("/backups", () => {
}) })
}) })
describe("/api/backups/import", () => {
it("should be able to import an app", async () => {
const appId = config.getAppId()!
const automation = await config.createAutomation()
await config.createAutomationLog(automation, appId)
await config.createScreen()
const exportRes = await config.api.backup.createBackup(appId)
expect(exportRes.backupId).toBeDefined()
const importRes = await config.api.backup.importBackup(
appId,
exportRes.backupId
)
})
})
describe("calculateBackupStats", () => { describe("calculateBackupStats", () => {
it("should be able to calculate the backup statistics", async () => { it("should be able to calculate the backup statistics", async () => {
await config.createAutomation() await config.createAutomation()

View File

@ -158,5 +158,25 @@ describe("/roles", () => {
expect(res.body.length).toBe(1) expect(res.body.length).toBe(1)
expect(res.body[0]).toBe("PUBLIC") expect(res.body[0]).toBe("PUBLIC")
}) })
it("should not fetch higher level accessible roles when a custom role header is provided", async () => {
await createRole({
name: `CUSTOM_ROLE`,
inherits: roles.BUILTIN_ROLE_IDS.BASIC,
permissionId: permissions.BuiltinPermissionID.READ_ONLY,
version: "name",
})
const res = await request
.get("/api/roles/accessible")
.set({
...config.defaultHeaders(),
"x-budibase-role": "CUSTOM_ROLE"
})
.expect(200)
expect(res.body.length).toBe(3)
expect(res.body[0]).toBe("CUSTOM_ROLE")
expect(res.body[1]).toBe("BASIC")
expect(res.body[2]).toBe("PUBLIC")
})
}) })
}) })

View File

@ -1,5 +1,5 @@
const setup = require("./utilities") const setup = require("./utilities")
const { basicScreen } = setup.structures const { basicScreen, powerScreen } = setup.structures
const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions") const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions")
const { roles } = require("@budibase/backend-core") const { roles } = require("@budibase/backend-core")
const { BUILTIN_ROLE_IDS } = roles const { BUILTIN_ROLE_IDS } = roles
@ -12,19 +12,14 @@ const route = "/test"
describe("/routing", () => { describe("/routing", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
let screen, screen2 let basic, power
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeAll(async () => { beforeAll(async () => {
await config.init() await config.init()
screen = basicScreen() basic = await config.createScreen(basicScreen(route))
screen.routing.route = route power = await config.createScreen(powerScreen(route))
screen = await config.createScreen(screen)
screen2 = basicScreen()
screen2.routing.roleId = BUILTIN_ROLE_IDS.POWER
screen2.routing.route = route
screen2 = await config.createScreen(screen2)
await config.publish() await config.publish()
}) })
@ -61,8 +56,8 @@ describe("/routing", () => {
expect(res.body.routes[route]).toEqual({ expect(res.body.routes[route]).toEqual({
subpaths: { subpaths: {
[route]: { [route]: {
screenId: screen._id, screenId: basic._id,
roleId: screen.routing.roleId roleId: basic.routing.roleId
} }
} }
}) })
@ -80,8 +75,8 @@ describe("/routing", () => {
expect(res.body.routes[route]).toEqual({ expect(res.body.routes[route]).toEqual({
subpaths: { subpaths: {
[route]: { [route]: {
screenId: screen2._id, screenId: power._id,
roleId: screen2.routing.roleId roleId: power.routing.roleId
} }
} }
}) })
@ -101,8 +96,8 @@ describe("/routing", () => {
expect(res.body.routes).toBeDefined() expect(res.body.routes).toBeDefined()
expect(res.body.routes[route].subpaths[route]).toBeDefined() expect(res.body.routes[route].subpaths[route]).toBeDefined()
const subpath = res.body.routes[route].subpaths[route] const subpath = res.body.routes[route].subpaths[route]
expect(subpath.screens[screen2.routing.roleId]).toEqual(screen2._id) expect(subpath.screens[power.routing.roleId]).toEqual(power._id)
expect(subpath.screens[screen.routing.roleId]).toEqual(screen._id) expect(subpath.screens[basic.routing.roleId]).toEqual(basic._id)
}) })
it("make sure it is a builder only endpoint", async () => { it("make sure it is a builder only endpoint", async () => {

View File

@ -10,6 +10,7 @@ import {
FieldSchema, FieldSchema,
FieldType, FieldType,
FieldTypeSubtypes, FieldTypeSubtypes,
INTERNAL_TABLE_SOURCE_ID,
MonthlyQuotaName, MonthlyQuotaName,
PermissionLevel, PermissionLevel,
QuotaUsageType, QuotaUsageType,
@ -21,6 +22,7 @@ import {
SortType, SortType,
StaticQuotaName, StaticQuotaName,
Table, Table,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { import {
expectAnyExternalColsAttributes, expectAnyExternalColsAttributes,
@ -65,6 +67,8 @@ describe.each([
type: "table", type: "table",
primary: ["id"], primary: ["id"],
primaryDisplay: "name", primaryDisplay: "name",
sourceType: TableSourceType.INTERNAL,
sourceId: INTERNAL_TABLE_SOURCE_ID,
schema: { schema: {
id: { id: {
type: FieldType.AUTO, type: FieldType.AUTO,
@ -134,9 +138,22 @@ describe.each([
} }
: undefined : undefined
async function createTable(
cfg: Omit<SaveTableRequest, "sourceId" | "sourceType">,
opts?: { skipReassigning: boolean }
) {
let table
if (dsProvider) {
table = await config.createExternalTable(cfg, opts)
} else {
table = await config.createTable(cfg, opts)
}
return table
}
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
const table = await config.createTable(tableConfig) let table = await createTable(tableConfig)
tableId = table._id! tableId = table._id!
}) })
@ -165,7 +182,7 @@ describe.each([
const queryUsage = await getQueryUsage() const queryUsage = await getQueryUsage()
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
const newTable = await config.createTable( const newTable = await createTable(
{ {
...tableConfig, ...tableConfig,
name: "TestTableAuto", name: "TestTableAuto",
@ -242,7 +259,7 @@ describe.each([
}) })
it("should list all rows for given tableId", async () => { it("should list all rows for given tableId", async () => {
const table = await config.createTable(generateTableConfig(), { const table = await createTable(generateTableConfig(), {
skipReassigning: true, skipReassigning: true,
}) })
const tableId = table._id! const tableId = table._id!
@ -323,7 +340,7 @@ describe.each([
inclusion: ["Alpha", "Beta", "Gamma"], inclusion: ["Alpha", "Beta", "Gamma"],
}, },
} }
const table = await config.createTable({ const table = await createTable({
name: "TestTable2", name: "TestTable2",
type: "table", type: "table",
schema: { schema: {
@ -438,7 +455,8 @@ describe.each([
describe("view save", () => { describe("view save", () => {
it("views have extra data trimmed", async () => { it("views have extra data trimmed", async () => {
const table = await config.createTable({ const table = await createTable({
type: "table",
name: "orders", name: "orders",
primary: ["OrderID"], primary: ["OrderID"],
schema: { schema: {
@ -494,7 +512,7 @@ describe.each([
describe("patch", () => { describe("patch", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should update only the fields that are supplied", async () => { it("should update only the fields that are supplied", async () => {
@ -548,7 +566,7 @@ describe.each([
describe("destroy", () => { describe("destroy", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should be able to delete a row", async () => { it("should be able to delete a row", async () => {
@ -566,7 +584,7 @@ describe.each([
describe("validate", () => { describe("validate", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should return no errors on valid row", async () => { it("should return no errors on valid row", async () => {
@ -603,7 +621,7 @@ describe.each([
describe("bulkDelete", () => { describe("bulkDelete", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should be able to delete a bulk set of rows", async () => { it("should be able to delete a bulk set of rows", async () => {
@ -687,7 +705,7 @@ describe.each([
describe("fetchView", () => { describe("fetchView", () => {
beforeEach(async () => { beforeEach(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should be able to fetch tables contents via 'view'", async () => { it("should be able to fetch tables contents via 'view'", async () => {
@ -735,7 +753,7 @@ describe.each([
describe("fetchEnrichedRows", () => { describe("fetchEnrichedRows", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should allow enriching some linked rows", async () => { it("should allow enriching some linked rows", async () => {
@ -808,7 +826,7 @@ describe.each([
describe("attachments", () => { describe("attachments", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should allow enriching attachment rows", async () => { it("should allow enriching attachment rows", async () => {
@ -839,7 +857,7 @@ describe.each([
describe("exportData", () => { describe("exportData", () => {
beforeAll(async () => { beforeAll(async () => {
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
table = await config.createTable(tableConfig) table = await createTable(tableConfig)
}) })
it("should allow exporting all columns", async () => { it("should allow exporting all columns", async () => {
@ -880,6 +898,8 @@ describe.each([
async function userTable(): Promise<Table> { async function userTable(): Promise<Table> {
return { return {
name: `users_${generator.word()}`, name: `users_${generator.word()}`,
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
type: "table", type: "table",
primary: ["id"], primary: ["id"],
schema: { schema: {
@ -925,7 +945,7 @@ describe.each([
describe("create", () => { describe("create", () => {
it("should persist a new row with only the provided view fields", async () => { it("should persist a new row with only the provided view fields", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const view = await config.createView({ const view = await config.createView({
schema: { schema: {
name: { visible: true }, name: { visible: true },
@ -960,7 +980,7 @@ describe.each([
describe("patch", () => { describe("patch", () => {
it("should update only the view fields for a row", async () => { it("should update only the view fields for a row", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const tableId = table._id! const tableId = table._id!
const view = await config.createView({ const view = await config.createView({
schema: { schema: {
@ -1001,7 +1021,7 @@ describe.each([
describe("destroy", () => { describe("destroy", () => {
it("should be able to delete a row", async () => { it("should be able to delete a row", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const tableId = table._id! const tableId = table._id!
const view = await config.createView({ const view = await config.createView({
schema: { schema: {
@ -1025,7 +1045,7 @@ describe.each([
}) })
it("should be able to delete multiple rows", async () => { it("should be able to delete multiple rows", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const tableId = table._id! const tableId = table._id!
const view = await config.createView({ const view = await config.createView({
schema: { schema: {
@ -1062,6 +1082,8 @@ describe.each([
async function userTable(): Promise<Table> { async function userTable(): Promise<Table> {
return { return {
name: `users_${generator.word()}`, name: `users_${generator.word()}`,
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
type: "table", type: "table",
primary: ["id"], primary: ["id"],
schema: { schema: {
@ -1088,7 +1110,7 @@ describe.each([
} }
it("returns empty rows from view when no schema is passed", async () => { it("returns empty rows from view when no schema is passed", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const rows = await Promise.all( const rows = await Promise.all(
Array.from({ length: 10 }, () => Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, { tableId: table._id }) config.api.row.save(table._id!, { tableId: table._id })
@ -1119,7 +1141,7 @@ describe.each([
}) })
it("searching respects the view filters", async () => { it("searching respects the view filters", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
await Promise.all( await Promise.all(
Array.from({ length: 10 }, () => Array.from({ length: 10 }, () =>
@ -1243,7 +1265,7 @@ describe.each([
describe("sorting", () => { describe("sorting", () => {
beforeAll(async () => { beforeAll(async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const users = [ const users = [
{ name: "Alice", age: 25 }, { name: "Alice", age: 25 },
{ name: "Bob", age: 30 }, { name: "Bob", age: 30 },
@ -1310,7 +1332,7 @@ describe.each([
}) })
it("when schema is defined, defined columns and row attributes are returned", async () => { it("when schema is defined, defined columns and row attributes are returned", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const rows = await Promise.all( const rows = await Promise.all(
Array.from({ length: 10 }, () => Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, { config.api.row.save(table._id!, {
@ -1341,7 +1363,7 @@ describe.each([
}) })
it("views without data can be returned", async () => { it("views without data can be returned", async () => {
const table = await config.createTable(await userTable()) const table = await createTable(await userTable())
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
const response = await config.api.viewV2.search(createViewResponse.id) const response = await config.api.viewV2.search(createViewResponse.id)
@ -1350,7 +1372,7 @@ describe.each([
}) })
it("respects the limit parameter", async () => { it("respects the limit parameter", async () => {
await config.createTable(await userTable()) await createTable(await userTable())
await Promise.all(Array.from({ length: 10 }, () => config.createRow())) await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
const limit = generator.integer({ min: 1, max: 8 }) const limit = generator.integer({ min: 1, max: 8 })
@ -1365,7 +1387,7 @@ describe.each([
}) })
it("can handle pagination", async () => { it("can handle pagination", async () => {
await config.createTable(await userTable()) await createTable(await userTable())
await Promise.all(Array.from({ length: 10 }, () => config.createRow())) await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
const createViewResponse = await config.createView() const createViewResponse = await config.createView()
@ -1443,7 +1465,7 @@ describe.each([
let tableId: string let tableId: string
beforeAll(async () => { beforeAll(async () => {
await config.createTable(await userTable()) await createTable(await userTable())
await Promise.all( await Promise.all(
Array.from({ length: 10 }, () => config.createRow()) Array.from({ length: 10 }, () => config.createRow())
) )
@ -1521,13 +1543,13 @@ describe.each([
let o2mTable: Table let o2mTable: Table
let m2mTable: Table let m2mTable: Table
beforeAll(async () => { beforeAll(async () => {
o2mTable = await config.createTable( o2mTable = await createTable(
{ ...generateTableConfig(), name: "o2m" }, { ...generateTableConfig(), name: "o2m" },
{ {
skipReassigning: true, skipReassigning: true,
} }
) )
m2mTable = await config.createTable( m2mTable = await createTable(
{ ...generateTableConfig(), name: "m2m" }, { ...generateTableConfig(), name: "m2m" },
{ {
skipReassigning: true, skipReassigning: true,
@ -1597,9 +1619,9 @@ describe.each([
const tableConfig = generateTableConfig() const tableConfig = generateTableConfig()
if (config.datasource) { if (config.datasource) {
tableConfig.sourceId = config.datasource._id tableConfig.sourceId = config.datasource._id!
if (config.datasource.plus) { if (config.datasource.plus) {
tableConfig.type = "external" tableConfig.sourceType = TableSourceType.EXTERNAL
} }
} }
const table = await config.api.table.create({ const table = await config.api.table.create({

View File

@ -5,11 +5,15 @@ describe("/static", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
let app let app
let cleanupEnv
afterAll(setup.afterAll) afterAll(() => {
setup.afterAll()
cleanupEnv()
})
beforeAll(async () => { beforeAll(async () => {
config.modeSelf() cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
app = await config.init() app = await config.init()
}) })

View File

@ -1,16 +1,24 @@
import { events, context } from "@budibase/backend-core" import { context, events } from "@budibase/backend-core"
import { import {
FieldType,
SaveTableRequest,
RelationshipType,
Table,
ViewCalculation,
AutoFieldSubTypes, AutoFieldSubTypes,
FieldSubtype,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
InternalTable,
RelationshipType,
Row,
SaveTableRequest,
Table,
TableSourceType,
User,
ViewCalculation,
} from "@budibase/types" } from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities" import * as setup from "./utilities"
const { basicTable } = setup.structures
import sdk from "../../../sdk" import sdk from "../../../sdk"
import uuid from "uuid"
const { basicTable } = setup.structures
describe("/tables", () => { describe("/tables", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -239,7 +247,8 @@ describe("/tables", () => {
.expect(200) .expect(200)
const fetchedTable = res.body[0] const fetchedTable = res.body[0]
expect(fetchedTable.name).toEqual(testTable.name) expect(fetchedTable.name).toEqual(testTable.name)
expect(fetchedTable.type).toEqual("internal") expect(fetchedTable.type).toEqual("table")
expect(fetchedTable.sourceType).toEqual("internal")
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
@ -417,4 +426,281 @@ describe("/tables", () => {
}) })
}) })
}) })
describe("migrate", () => {
let users: User[]
beforeAll(async () => {
users = await Promise.all([
config.createUser({ email: `${uuid.v4()}@example.com` }),
config.createUser({ email: `${uuid.v4()}@example.com` }),
config.createUser({ email: `${uuid.v4()}@example.com` }),
])
})
it("should successfully migrate a one-to-many user relationship to a user column", async () => {
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.ONE_TO_MANY,
tableId: InternalTable.USER_METADATA,
},
},
})
const rows = await Promise.all(
users.map(u =>
config.api.row.save(table._id!, { "user relationship": [u] })
)
)
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USER,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const migratedRows = await config.api.row.fetch(table._id!)
rows.sort((a, b) => a._id!.localeCompare(b._id!))
migratedRows.sort((a, b) => a._id!.localeCompare(b._id!))
for (const [i, row] of rows.entries()) {
const migratedRow = migratedRows[i]
expect(migratedRow["user column"]).toBeDefined()
expect(migratedRow["user relationship"]).not.toBeDefined()
expect(row["user relationship"][0]._id).toEqual(
migratedRow["user column"][0]._id
)
}
})
it("should successfully migrate a many-to-many user relationship to a users column", async () => {
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_MANY,
tableId: InternalTable.USER_METADATA,
},
},
})
const row1 = await config.api.row.save(table._id!, {
"user relationship": [users[0], users[1]],
})
const row2 = await config.api.row.save(table._id!, {
"user relationship": [users[1], users[2]],
})
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id])
)
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[1]._id, users[2]._id])
)
})
it("should successfully migrate a many-to-one user relationship to a users column", async () => {
const table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: InternalTable.USER_METADATA,
},
},
})
const row1 = await config.api.row.save(table._id!, {
"user relationship": [users[0], users[1]],
})
const row2 = await config.api.row.save(table._id!, {
"user relationship": [users[2]],
})
await config.api.table.migrate(table._id!, {
oldColumn: table.schema["user relationship"],
newColumn: {
name: "user column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
})
const migratedTable = await config.api.table.get(table._id!)
expect(migratedTable.schema["user column"]).toBeDefined()
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
.body as Row
expect(row1Migrated["user relationship"]).not.toBeDefined()
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
expect.arrayContaining([users[0]._id, users[1]._id])
)
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
.body as Row
expect(row2Migrated["user relationship"]).not.toBeDefined()
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([
users[2]._id,
])
})
describe("unhappy paths", () => {
let table: Table
beforeAll(async () => {
table = await config.api.table.create({
name: "table",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
"user relationship": {
type: FieldType.LINK,
fieldName: "test",
name: "user relationship",
constraints: {
type: "array",
presence: false,
},
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: InternalTable.USER_METADATA,
},
num: {
type: FieldType.NUMBER,
name: "num",
constraints: {
type: "number",
presence: false,
},
},
},
})
})
it("should fail if the new column name is blank", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: table.schema["user relationship"],
newColumn: {
name: "",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
it("should fail if the new column name is a reserved name", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: table.schema["user relationship"],
newColumn: {
name: "_id",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
it("should fail if the new column name is the same as an existing column", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: table.schema["user relationship"],
newColumn: {
name: "num",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
it("should fail if the old column name isn't a column in the table", async () => {
await config.api.table.migrate(
table._id!,
{
oldColumn: {
name: "not a column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
newColumn: {
name: "new column",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
},
{ expectStatus: 400 }
)
})
})
})
}) })

View File

@ -3,10 +3,12 @@ import {
CreateViewRequest, CreateViewRequest,
FieldSchema, FieldSchema,
FieldType, FieldType,
INTERNAL_TABLE_SOURCE_ID,
SearchQueryOperators, SearchQueryOperators,
SortOrder, SortOrder,
SortType, SortType,
Table, Table,
TableSourceType,
UIFieldMetadata, UIFieldMetadata,
UpdateViewRequest, UpdateViewRequest,
ViewV2, ViewV2,
@ -18,6 +20,8 @@ function priceTable(): Table {
return { return {
name: "table", name: "table",
type: "table", type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: { schema: {
Price: { Price: {
type: FieldType.NUMBER, type: FieldType.NUMBER,
@ -54,10 +58,10 @@ describe.each([
}, },
}) })
return config.createTable({ return config.createExternalTable({
...priceTable(), ...priceTable(),
sourceId: datasource._id, sourceId: datasource._id,
type: "external", sourceType: TableSourceType.EXTERNAL,
}) })
}, },
], ],

View File

@ -8,11 +8,15 @@ describe("/webhooks", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
let webhook: Webhook let webhook: Webhook
let cleanupEnv: () => void
afterAll(setup.afterAll) afterAll(() => {
setup.afterAll()
cleanupEnv()
})
const setupTest = async () => { const setupTest = async () => {
config.modeSelf() cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
await config.init() await config.init()
const autoConfig = basicAutomation() const autoConfig = basicAutomation()
autoConfig.definition.trigger.schema = { autoConfig.definition.trigger.schema = {

View File

@ -1,5 +1,11 @@
import { objectStore, roles, constants } from "@budibase/backend-core" import { constants, objectStore, roles } from "@budibase/backend-core"
import { FieldType as FieldTypes } from "@budibase/types" import {
FieldType as FieldTypes,
INTERNAL_TABLE_SOURCE_ID,
Table,
TableSourceType,
} from "@budibase/types"
export { export {
FieldType as FieldTypes, FieldType as FieldTypes,
RelationshipType, RelationshipType,
@ -70,9 +76,11 @@ export enum SortDirection {
DESCENDING = "DESCENDING", DESCENDING = "DESCENDING",
} }
export const USERS_TABLE_SCHEMA = { export const USERS_TABLE_SCHEMA: Table = {
_id: "ta_users", _id: "ta_users",
type: "table", type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
views: {}, views: {},
name: "Users", name: "Users",
// TODO: ADMIN PANEL - when implemented this doesn't need to be carried out // TODO: ADMIN PANEL - when implemented this doesn't need to be carried out
@ -87,12 +95,10 @@ export const USERS_TABLE_SCHEMA = {
}, },
presence: true, presence: true,
}, },
fieldName: "email",
name: "email", name: "email",
}, },
firstName: { firstName: {
name: "firstName", name: "firstName",
fieldName: "firstName",
type: FieldTypes.STRING, type: FieldTypes.STRING,
constraints: { constraints: {
type: FieldTypes.STRING, type: FieldTypes.STRING,
@ -101,7 +107,6 @@ export const USERS_TABLE_SCHEMA = {
}, },
lastName: { lastName: {
name: "lastName", name: "lastName",
fieldName: "lastName",
type: FieldTypes.STRING, type: FieldTypes.STRING,
constraints: { constraints: {
type: FieldTypes.STRING, type: FieldTypes.STRING,
@ -109,7 +114,6 @@ export const USERS_TABLE_SCHEMA = {
}, },
}, },
roleId: { roleId: {
fieldName: "roleId",
name: "roleId", name: "roleId",
type: FieldTypes.OPTIONS, type: FieldTypes.OPTIONS,
constraints: { constraints: {
@ -119,7 +123,6 @@ export const USERS_TABLE_SCHEMA = {
}, },
}, },
status: { status: {
fieldName: "status",
name: "status", name: "status",
type: FieldTypes.OPTIONS, type: FieldTypes.OPTIONS,
constraints: { constraints: {
@ -169,3 +172,8 @@ export enum AutomationErrors {
export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets
export const MAX_AUTOMATION_RECURRING_ERRORS = 5 export const MAX_AUTOMATION_RECURRING_ERRORS = 5
export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber" export const GOOGLE_SHEETS_PRIMARY_KEY = "rowNumber"
export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"

Some files were not shown because too many files have changed in this diff Show More