Merge branch 'master' into grid-tweaks
This commit is contained in:
commit
87b69007f4
|
@ -36,6 +36,7 @@ jobs:
|
|||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 18.x
|
||||
cache: yarn
|
||||
|
||||
- run: yarn install --frozen-lockfile
|
||||
- name: Update versions
|
||||
|
@ -63,14 +64,64 @@ jobs:
|
|||
echo "Using tag $version"
|
||||
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build/release Docker images
|
||||
- name: Setup Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Docker login
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build:docker
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }}
|
||||
|
||||
- name: Build worker docker
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
||||
file: ./packages/worker/Dockerfile.v2
|
||||
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
|
||||
cache-to: type=inline
|
||||
env:
|
||||
IMAGE_NAME: budibase/worker
|
||||
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
|
||||
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
|
||||
|
||||
- name: Build server docker
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
||||
file: ./packages/server/Dockerfile.v2
|
||||
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
|
||||
cache-to: type=inline
|
||||
env:
|
||||
IMAGE_NAME: budibase/apps
|
||||
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
|
||||
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
|
||||
|
||||
- name: Build proxy docker
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./hosting/proxy
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
|
||||
file: ./hosting/proxy/Dockerfile
|
||||
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
|
||||
cache-to: type=inline
|
||||
env:
|
||||
IMAGE_NAME: budibase/proxy
|
||||
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
|
||||
|
||||
release-helm-chart:
|
||||
needs: [release-images]
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
name: Test
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
CI: true
|
||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
REGISTRY_URL: registry.hub.docker.com
|
||||
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
|
||||
jobs:
|
||||
build:
|
||||
name: "build"
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [18.x]
|
||||
steps:
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: "yarn"
|
||||
- name: Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Setup Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Run Yarn
|
||||
run: yarn
|
||||
- name: Run Yarn Build
|
||||
run: yarn build --scope @budibase/server --scope @budibase/worker
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_API_KEY }}
|
||||
- name: Get the latest release version
|
||||
id: version
|
||||
run: |
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo $release_version
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
- name: Tag and release Budibase service docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: BUDIBASE_VERSION=0.0.0+test
|
||||
tags: budibase/budibase-test:test
|
||||
file: ./hosting/single/Dockerfile.v2
|
||||
cache-from: type=registry,ref=budibase/budibase-test:test
|
||||
cache-to: type=inline
|
||||
- name: Tag and release Budibase Azure App Service docker image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64
|
||||
build-args: |
|
||||
TARGETBUILD=aas
|
||||
BUDIBASE_VERSION=0.0.0+test
|
||||
tags: budibase/budibase-test:aas
|
||||
file: ./hosting/single/Dockerfile.v2
|
|
@ -66,14 +66,21 @@ jobs:
|
|||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: BUDIBASE_VERSION=$BUDIBASE_VERSION
|
||||
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
|
||||
file: ./hosting/single/Dockerfile
|
||||
file: ./hosting/single/Dockerfile.v2
|
||||
env:
|
||||
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
- name: Tag and release Budibase Azure App Service docker image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64
|
||||
build-args: TARGETBUILD=aas
|
||||
build-args: |
|
||||
TARGETBUILD=aas
|
||||
BUDIBASE_VERSION=$BUDIBASE_VERSION
|
||||
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
|
||||
file: ./hosting/single/Dockerfile
|
||||
file: ./hosting/single/Dockerfile.v2
|
||||
env:
|
||||
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
|
|
|
@ -126,13 +126,6 @@ You can learn more about the Budibase API at the following places:
|
|||
|
||||
- [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/)
|
||||
|
||||
<p align="center">
|
||||
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1647858558/Feb%20release/Start_building_with_Budibase_s_API_3_rhlzhv.png">
|
||||
</p>
|
||||
<br /><br />
|
||||
|
||||
<br /><br /><br />
|
||||
|
||||
## 🏁 Get started
|
||||
|
||||
Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean.
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
#!/bin/bash
|
||||
if [[ $TARGETARCH == arm* ]] ;
|
||||
then
|
||||
echo "INSTALLING ARM64 MINIO"
|
||||
wget https://dl.min.io/server/minio/release/linux-arm64/minio
|
||||
else
|
||||
echo "INSTALLING AMD64 MINIO"
|
||||
wget https://dl.min.io/server/minio/release/linux-amd64/minio
|
||||
fi
|
||||
chmod +x minio
|
|
@ -1,18 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
tag=$1
|
||||
|
||||
if [[ ! "$tag" ]]; then
|
||||
echo "No tag present. You must pass a tag to this script"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Tagging images with tag: $tag"
|
||||
|
||||
docker tag proxy-service budibase/proxy:$tag
|
||||
docker tag app-service budibase/apps:$tag
|
||||
docker tag worker-service budibase/worker:$tag
|
||||
|
||||
docker push --all-tags budibase/apps
|
||||
docker push --all-tags budibase/worker
|
||||
docker push --all-tags budibase/proxy
|
|
@ -42,6 +42,7 @@ COPY packages/string-templates packages/string-templates
|
|||
FROM budibase/couchdb as runner
|
||||
ARG TARGETARCH
|
||||
ENV TARGETARCH $TARGETARCH
|
||||
ENV NODE_MAJOR 18
|
||||
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
|
||||
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
||||
ARG TARGETBUILD=single
|
||||
|
@ -49,10 +50,10 @@ ENV TARGETBUILD $TARGETBUILD
|
|||
|
||||
# install base dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
|
||||
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server libaio1
|
||||
|
||||
# Install postgres client for pg_dump utils
|
||||
RUN apt install software-properties-common apt-transport-https gpg -y \
|
||||
RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \
|
||||
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
|
||||
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
|
||||
&& apt update -y \
|
||||
|
@ -61,10 +62,8 @@ RUN apt install software-properties-common apt-transport-https gpg -y \
|
|||
|
||||
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
|
||||
WORKDIR /nodejs
|
||||
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \
|
||||
bash /tmp/nodesource_setup.sh && \
|
||||
apt-get install -y --no-install-recommends libaio1 nodejs && \
|
||||
npm install --global yarn pm2
|
||||
COPY scripts/install-node.sh ./install.sh
|
||||
RUN chmod +x install.sh && ./install.sh
|
||||
|
||||
# setup nginx
|
||||
COPY hosting/single/nginx/nginx.conf /etc/nginx
|
||||
|
|
|
@ -77,7 +77,7 @@ mkdir -p ${DATA_DIR}/minio
|
|||
chown -R couchdb:couchdb ${DATA_DIR}/couch
|
||||
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
|
||||
/bbcouch-runner.sh &
|
||||
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
|
||||
minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
|
||||
/etc/init.d/nginx restart
|
||||
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
||||
# Add monthly cron job to renew certbot certificate
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.11.45",
|
||||
"version": "2.12.4",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -33,7 +33,6 @@
|
|||
"build:sdk": "lerna run --stream build:sdk",
|
||||
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
|
||||
"release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset",
|
||||
"release:develop": "yarn release --dist-tag develop",
|
||||
"restore": "yarn run clean && yarn && yarn run build",
|
||||
"nuke": "yarn run nuke:packages && yarn run nuke:docker",
|
||||
"nuke:packages": "yarn run restore",
|
||||
|
@ -55,10 +54,6 @@
|
|||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
||||
"build:specs": "lerna run --stream specs",
|
||||
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
||||
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
||||
|
|
|
@ -3,6 +3,7 @@ const mockS3 = {
|
|||
deleteObject: jest.fn().mockReturnThis(),
|
||||
deleteObjects: jest.fn().mockReturnThis(),
|
||||
createBucket: jest.fn().mockReturnThis(),
|
||||
getObject: jest.fn().mockReturnThis(),
|
||||
listObject: jest.fn().mockReturnThis(),
|
||||
getSignedUrl: jest.fn((operation: string, params: any) => {
|
||||
return `http://s3.example.com/${params.Bucket}/${params.Key}`
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
"test:watch": "jest --watchAll"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/nano": "10.1.2",
|
||||
"@budibase/nano": "10.1.3",
|
||||
"@budibase/pouchdb-replication-stream": "1.2.10",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
|
|
|
@ -119,8 +119,8 @@ export class Writethrough {
|
|||
this.writeRateMs = writeRateMs
|
||||
}
|
||||
|
||||
async put(doc: any) {
|
||||
return put(this.db, doc, this.writeRateMs)
|
||||
async put(doc: any, writeRateMs: number = this.writeRateMs) {
|
||||
return put(this.db, doc, writeRateMs)
|
||||
}
|
||||
|
||||
async get(id: string) {
|
||||
|
|
|
@ -8,3 +8,7 @@ export const CONSTANT_INTERNAL_ROW_COLS = [
|
|||
] as const
|
||||
|
||||
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
|
||||
|
||||
export function isInternalColumnName(name: string): boolean {
|
||||
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
ViewName,
|
||||
} from "../constants"
|
||||
import { getProdAppID } from "./conversions"
|
||||
import { DatabaseQueryOpts } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* If creating DB allDocs/query params with only a single top level ID this can be used, this
|
||||
|
@ -22,8 +23,8 @@ import { getProdAppID } from "./conversions"
|
|||
export function getDocParams(
|
||||
docType: string,
|
||||
docId?: string | null,
|
||||
otherProps: any = {}
|
||||
) {
|
||||
otherProps: Partial<DatabaseQueryOpts> = {}
|
||||
): DatabaseQueryOpts {
|
||||
if (docId == null) {
|
||||
docId = ""
|
||||
}
|
||||
|
@ -45,8 +46,8 @@ export function getDocParams(
|
|||
export function getRowParams(
|
||||
tableId?: string | null,
|
||||
rowId?: string | null,
|
||||
otherProps = {}
|
||||
) {
|
||||
otherProps: Partial<DatabaseQueryOpts> = {}
|
||||
): DatabaseQueryOpts {
|
||||
if (tableId == null) {
|
||||
return getDocParams(DocumentType.ROW, null, otherProps)
|
||||
}
|
||||
|
@ -88,7 +89,10 @@ export const isDatasourceId = (id: string) => {
|
|||
/**
|
||||
* Gets parameters for retrieving workspaces.
|
||||
*/
|
||||
export function getWorkspaceParams(id = "", otherProps = {}) {
|
||||
export function getWorkspaceParams(
|
||||
id = "",
|
||||
otherProps: Partial<DatabaseQueryOpts> = {}
|
||||
): DatabaseQueryOpts {
|
||||
return {
|
||||
...otherProps,
|
||||
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
|
||||
|
@ -99,7 +103,10 @@ export function getWorkspaceParams(id = "", otherProps = {}) {
|
|||
/**
|
||||
* Gets parameters for retrieving users.
|
||||
*/
|
||||
export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
|
||||
export function getGlobalUserParams(
|
||||
globalId: any,
|
||||
otherProps: Partial<DatabaseQueryOpts> = {}
|
||||
): DatabaseQueryOpts {
|
||||
if (!globalId) {
|
||||
globalId = ""
|
||||
}
|
||||
|
@ -117,11 +124,17 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
|
|||
/**
|
||||
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
export function getUserMetadataParams(userId?: string | null, otherProps = {}) {
|
||||
export function getUserMetadataParams(
|
||||
userId?: string | null,
|
||||
otherProps: Partial<DatabaseQueryOpts> = {}
|
||||
): DatabaseQueryOpts {
|
||||
return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
|
||||
}
|
||||
|
||||
export function getUsersByAppParams(appId: any, otherProps: any = {}) {
|
||||
export function getUsersByAppParams(
|
||||
appId: any,
|
||||
otherProps: Partial<DatabaseQueryOpts> = {}
|
||||
): DatabaseQueryOpts {
|
||||
const prodAppId = getProdAppID(appId)
|
||||
return {
|
||||
...otherProps,
|
||||
|
|
|
@ -30,6 +30,7 @@ export * as timers from "./timers"
|
|||
export { default as env } from "./environment"
|
||||
export * as blacklist from "./blacklist"
|
||||
export * as docUpdates from "./docUpdates"
|
||||
export * from "./utils/Duration"
|
||||
export { SearchParams } from "./db"
|
||||
// Add context to tenancy for backwards compatibility
|
||||
// only do this for external usages to prevent internal
|
||||
|
|
|
@ -36,7 +36,7 @@ class InMemoryQueue {
|
|||
* @param opts This is not used by the in memory queue as there is no real use
|
||||
* case when in memory, but is the same API as Bull
|
||||
*/
|
||||
constructor(name: string, opts = null) {
|
||||
constructor(name: string, opts?: any) {
|
||||
this._name = name
|
||||
this._opts = opts
|
||||
this._messages = []
|
||||
|
|
|
@ -2,11 +2,18 @@ import env from "../environment"
|
|||
import { getRedisOptions } from "../redis/utils"
|
||||
import { JobQueue } from "./constants"
|
||||
import InMemoryQueue from "./inMemoryQueue"
|
||||
import BullQueue from "bull"
|
||||
import BullQueue, { QueueOptions } from "bull"
|
||||
import { addListeners, StalledFn } from "./listeners"
|
||||
import { Duration } from "../utils"
|
||||
import * as timers from "../timers"
|
||||
import * as Redis from "ioredis"
|
||||
|
||||
const CLEANUP_PERIOD_MS = 60 * 1000
|
||||
// the queue lock is held for 5 minutes
|
||||
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
|
||||
// queue lock is refreshed every 30 seconds
|
||||
const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()
|
||||
// cleanup the queue every 60 seconds
|
||||
const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()
|
||||
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
|
||||
let cleanupInterval: NodeJS.Timeout
|
||||
|
||||
|
@ -21,7 +28,14 @@ export function createQueue<T>(
|
|||
opts: { removeStalledCb?: StalledFn } = {}
|
||||
): BullQueue.Queue<T> {
|
||||
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
|
||||
const queueConfig: any = redisProtocolUrl || { redis: redisOpts }
|
||||
const queueConfig: QueueOptions = {
|
||||
redis: redisProtocolUrl! || (redisOpts as Redis.RedisOptions),
|
||||
settings: {
|
||||
maxStalledCount: 0,
|
||||
lockDuration: QUEUE_LOCK_MS,
|
||||
lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS,
|
||||
},
|
||||
}
|
||||
let queue: any
|
||||
if (!env.isTest()) {
|
||||
queue = new BullQueue(jobQueue, queueConfig)
|
||||
|
|
|
@ -122,7 +122,9 @@ export async function roleToNumber(id?: string) {
|
|||
if (isBuiltin(id)) {
|
||||
return builtinRoleToNumber(id)
|
||||
}
|
||||
const hierarchy = (await getUserRoleHierarchy(id)) as RoleDoc[]
|
||||
const hierarchy = (await getUserRoleHierarchy(id, {
|
||||
defaultPublic: true,
|
||||
})) as RoleDoc[]
|
||||
for (let role of hierarchy) {
|
||||
if (isBuiltin(role?.inherits)) {
|
||||
return builtinRoleToNumber(role.inherits) + 1
|
||||
|
@ -192,12 +194,15 @@ export async function getRole(
|
|||
/**
|
||||
* Simple function to get all the roles based on the top level user role ID.
|
||||
*/
|
||||
async function getAllUserRoles(userRoleId?: string): Promise<RoleDoc[]> {
|
||||
async function getAllUserRoles(
|
||||
userRoleId?: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc[]> {
|
||||
// admins have access to all roles
|
||||
if (userRoleId === BUILTIN_IDS.ADMIN) {
|
||||
return getAllRoles()
|
||||
}
|
||||
let currentRole = await getRole(userRoleId)
|
||||
let currentRole = await getRole(userRoleId, opts)
|
||||
let roles = currentRole ? [currentRole] : []
|
||||
let roleIds = [userRoleId]
|
||||
// get all the inherited roles
|
||||
|
@ -226,12 +231,16 @@ export async function getUserRoleIdHierarchy(
|
|||
* Returns an ordered array of the user's inherited role IDs, this can be used
|
||||
* to determine if a user can access something that requires a specific role.
|
||||
* @param userRoleId The user's role ID, this can be found in their access token.
|
||||
* @param opts optional - if want to default to public use this.
|
||||
* @returns returns an ordered array of the roles, with the first being their
|
||||
* highest level of access and the last being the lowest level.
|
||||
*/
|
||||
export async function getUserRoleHierarchy(userRoleId?: string) {
|
||||
export async function getUserRoleHierarchy(
|
||||
userRoleId?: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
) {
|
||||
// special case, if they don't have a role then they are a public user
|
||||
return getAllUserRoles(userRoleId)
|
||||
return getAllUserRoles(userRoleId, opts)
|
||||
}
|
||||
|
||||
// this function checks that the provided permissions are in an array format
|
||||
|
|
|
@ -25,12 +25,17 @@ import {
|
|||
import {
|
||||
getAccountHolderFromUserIds,
|
||||
isAdmin,
|
||||
isCreator,
|
||||
validateUniqueUser,
|
||||
} from "./utils"
|
||||
import { searchExistingEmails } from "./lookup"
|
||||
import { hash } from "../utils"
|
||||
|
||||
type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any>
|
||||
type QuotaUpdateFn = (
|
||||
change: number,
|
||||
creatorsChange: number,
|
||||
cb?: () => Promise<any>
|
||||
) => Promise<any>
|
||||
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
|
||||
type FeatureFn = () => Promise<Boolean>
|
||||
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
|
||||
|
@ -160,13 +165,9 @@ export class UserDB {
|
|||
}
|
||||
|
||||
static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
|
||||
const params: any = {
|
||||
include_docs: true,
|
||||
limit: opts.limit || 50,
|
||||
}
|
||||
let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
|
||||
opts.appId,
|
||||
params
|
||||
{ limit: opts.limit || 50 }
|
||||
)
|
||||
return response
|
||||
}
|
||||
|
@ -245,7 +246,8 @@ export class UserDB {
|
|||
}
|
||||
|
||||
const change = dbUser ? 0 : 1 // no change if there is existing user
|
||||
return UserDB.quotas.addUsers(change, async () => {
|
||||
const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
|
||||
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
|
||||
await validateUniqueUser(email, tenantId)
|
||||
|
||||
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
|
||||
|
@ -307,6 +309,7 @@ export class UserDB {
|
|||
|
||||
let usersToSave: any[] = []
|
||||
let newUsers: any[] = []
|
||||
let newCreators: any[] = []
|
||||
|
||||
const emails = newUsersRequested.map((user: User) => user.email)
|
||||
const existingEmails = await searchExistingEmails(emails)
|
||||
|
@ -327,59 +330,66 @@ export class UserDB {
|
|||
}
|
||||
newUser.userGroups = groups
|
||||
newUsers.push(newUser)
|
||||
if (isCreator(newUser)) {
|
||||
newCreators.push(newUser)
|
||||
}
|
||||
}
|
||||
|
||||
const account = await accountSdk.getAccountByTenantId(tenantId)
|
||||
return UserDB.quotas.addUsers(newUsers.length, async () => {
|
||||
// create the promises array that will be called by bulkDocs
|
||||
newUsers.forEach((user: any) => {
|
||||
usersToSave.push(
|
||||
UserDB.buildUser(
|
||||
user,
|
||||
{
|
||||
hashPassword: true,
|
||||
requirePassword: user.requirePassword,
|
||||
},
|
||||
tenantId,
|
||||
undefined, // no dbUser
|
||||
account
|
||||
return UserDB.quotas.addUsers(
|
||||
newUsers.length,
|
||||
newCreators.length,
|
||||
async () => {
|
||||
// create the promises array that will be called by bulkDocs
|
||||
newUsers.forEach((user: any) => {
|
||||
usersToSave.push(
|
||||
UserDB.buildUser(
|
||||
user,
|
||||
{
|
||||
hashPassword: true,
|
||||
requirePassword: user.requirePassword,
|
||||
},
|
||||
tenantId,
|
||||
undefined, // no dbUser
|
||||
account
|
||||
)
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
const usersToBulkSave = await Promise.all(usersToSave)
|
||||
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
|
||||
const usersToBulkSave = await Promise.all(usersToSave)
|
||||
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
|
||||
|
||||
// Post-processing of bulk added users, e.g. events and cache operations
|
||||
for (const user of usersToBulkSave) {
|
||||
// TODO: Refactor to bulk insert users into the info db
|
||||
// instead of relying on looping tenant creation
|
||||
await platform.users.addUser(tenantId, user._id, user.email)
|
||||
await eventHelpers.handleSaveEvents(user, undefined)
|
||||
}
|
||||
// Post-processing of bulk added users, e.g. events and cache operations
|
||||
for (const user of usersToBulkSave) {
|
||||
// TODO: Refactor to bulk insert users into the info db
|
||||
// instead of relying on looping tenant creation
|
||||
await platform.users.addUser(tenantId, user._id, user.email)
|
||||
await eventHelpers.handleSaveEvents(user, undefined)
|
||||
}
|
||||
|
||||
const saved = usersToBulkSave.map(user => {
|
||||
return {
|
||||
_id: user._id,
|
||||
email: user.email,
|
||||
}
|
||||
})
|
||||
|
||||
// now update the groups
|
||||
if (Array.isArray(saved) && groups) {
|
||||
const groupPromises = []
|
||||
const createdUserIds = saved.map(user => user._id)
|
||||
for (let groupId of groups) {
|
||||
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
|
||||
}
|
||||
await Promise.all(groupPromises)
|
||||
}
|
||||
|
||||
const saved = usersToBulkSave.map(user => {
|
||||
return {
|
||||
_id: user._id,
|
||||
email: user.email,
|
||||
successful: saved,
|
||||
unsuccessful,
|
||||
}
|
||||
})
|
||||
|
||||
// now update the groups
|
||||
if (Array.isArray(saved) && groups) {
|
||||
const groupPromises = []
|
||||
const createdUserIds = saved.map(user => user._id)
|
||||
for (let groupId of groups) {
|
||||
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
|
||||
}
|
||||
await Promise.all(groupPromises)
|
||||
}
|
||||
|
||||
return {
|
||||
successful: saved,
|
||||
unsuccessful,
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
|
||||
|
@ -419,11 +429,12 @@ export class UserDB {
|
|||
_deleted: true,
|
||||
}))
|
||||
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
|
||||
const creatorsToDelete = usersToDelete.filter(isCreator)
|
||||
|
||||
await UserDB.quotas.removeUsers(toDelete.length)
|
||||
for (let user of usersToDelete) {
|
||||
await bulkDeleteProcessing(user)
|
||||
}
|
||||
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
|
||||
|
||||
// Build Response
|
||||
// index users by id
|
||||
|
@ -472,7 +483,8 @@ export class UserDB {
|
|||
|
||||
await db.remove(userId, dbUser._rev)
|
||||
|
||||
await UserDB.quotas.removeUsers(1)
|
||||
const creatorsToDelete = isCreator(dbUser) ? 1 : 0
|
||||
await UserDB.quotas.removeUsers(1, creatorsToDelete)
|
||||
await eventHelpers.handleDeleteEvents(dbUser)
|
||||
await cache.user.invalidateUser(userId)
|
||||
await sessions.invalidateSessions(userId, { reason: "deletion" })
|
||||
|
|
|
@ -14,12 +14,13 @@ import {
|
|||
} from "../db"
|
||||
import {
|
||||
BulkDocsResponse,
|
||||
ContextUser,
|
||||
SearchQuery,
|
||||
SearchQueryOperators,
|
||||
SearchUsersRequest,
|
||||
User,
|
||||
ContextUser,
|
||||
DatabaseQueryOpts,
|
||||
CouchFindOptions,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalDB } from "../context"
|
||||
import * as context from "../context"
|
||||
|
@ -140,7 +141,7 @@ export const getGlobalUserByEmail = async (
|
|||
|
||||
export const searchGlobalUsersByApp = async (
|
||||
appId: any,
|
||||
opts: any,
|
||||
opts: DatabaseQueryOpts,
|
||||
getOpts?: GetOpts
|
||||
) => {
|
||||
if (typeof appId !== "string") {
|
||||
|
@ -166,7 +167,10 @@ export const searchGlobalUsersByApp = async (
|
|||
Return any user who potentially has access to the application
|
||||
Admins, developers and app users with the explicitly role.
|
||||
*/
|
||||
export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
|
||||
export const searchGlobalUsersByAppAccess = async (
|
||||
appId: any,
|
||||
opts?: { limit?: number }
|
||||
) => {
|
||||
const roleSelector = `roles.${appId}`
|
||||
|
||||
let orQuery: any[] = [
|
||||
|
@ -187,7 +191,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
|
|||
orQuery.push(roleCheck)
|
||||
}
|
||||
|
||||
let searchOptions = {
|
||||
let searchOptions: CouchFindOptions = {
|
||||
selector: {
|
||||
$or: orQuery,
|
||||
_id: {
|
||||
|
@ -198,7 +202,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
|
|||
}
|
||||
|
||||
const resp = await directCouchFind(context.getGlobalDBName(), searchOptions)
|
||||
return resp?.rows
|
||||
return resp.rows
|
||||
}
|
||||
|
||||
export const getGlobalUserByAppPage = (appId: string, user: User) => {
|
||||
|
@ -245,7 +249,8 @@ export const paginatedUsers = async ({
|
|||
limit,
|
||||
}: SearchUsersRequest = {}) => {
|
||||
const db = getGlobalDB()
|
||||
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
|
||||
const pageSize = limit ?? PAGE_LIMIT
|
||||
const pageLimit = pageSize + 1
|
||||
// get one extra document, to have the next page
|
||||
const opts: DatabaseQueryOpts = {
|
||||
include_docs: true,
|
||||
|
@ -272,7 +277,7 @@ export const paginatedUsers = async ({
|
|||
const response = await db.allDocs(getGlobalUserParams(null, opts))
|
||||
userList = response.rows.map((row: any) => row.doc)
|
||||
}
|
||||
return pagination(userList, pageLimit, {
|
||||
return pagination(userList, pageSize, {
|
||||
paginate: true,
|
||||
property,
|
||||
getKey,
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
export enum DurationType {
|
||||
MILLISECONDS = "milliseconds",
|
||||
SECONDS = "seconds",
|
||||
MINUTES = "minutes",
|
||||
HOURS = "hours",
|
||||
DAYS = "days",
|
||||
}
|
||||
|
||||
const conversion: Record<DurationType, number> = {
|
||||
milliseconds: 1,
|
||||
seconds: 1000,
|
||||
minutes: 60 * 1000,
|
||||
hours: 60 * 60 * 1000,
|
||||
days: 24 * 60 * 60 * 1000,
|
||||
}
|
||||
|
||||
export class Duration {
|
||||
static convert(from: DurationType, to: DurationType, duration: number) {
|
||||
const milliseconds = duration * conversion[from]
|
||||
return milliseconds / conversion[to]
|
||||
}
|
||||
|
||||
static from(from: DurationType, duration: number) {
|
||||
return {
|
||||
to: (to: DurationType) => {
|
||||
return Duration.convert(from, to, duration)
|
||||
},
|
||||
toMs: () => {
|
||||
return Duration.convert(from, DurationType.MILLISECONDS, duration)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
static fromSeconds(duration: number) {
|
||||
return Duration.from(DurationType.SECONDS, duration)
|
||||
}
|
||||
|
||||
static fromMinutes(duration: number) {
|
||||
return Duration.from(DurationType.MINUTES, duration)
|
||||
}
|
||||
|
||||
static fromHours(duration: number) {
|
||||
return Duration.from(DurationType.HOURS, duration)
|
||||
}
|
||||
|
||||
static fromDays(duration: number) {
|
||||
return Duration.from(DurationType.DAYS, duration)
|
||||
}
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
export * from "./hashing"
|
||||
export * from "./utils"
|
||||
export * from "./stringUtils"
|
||||
export * from "./Duration"
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
import { Duration, DurationType } from "../Duration"
|
||||
|
||||
describe("duration", () => {
|
||||
it("should convert minutes to milliseconds", () => {
|
||||
expect(Duration.fromMinutes(5).toMs()).toBe(300000)
|
||||
})
|
||||
|
||||
it("should convert seconds to milliseconds", () => {
|
||||
expect(Duration.fromSeconds(30).toMs()).toBe(30000)
|
||||
})
|
||||
|
||||
it("should convert days to milliseconds", () => {
|
||||
expect(Duration.fromDays(1).toMs()).toBe(86400000)
|
||||
})
|
||||
|
||||
it("should convert minutes to days", () => {
|
||||
expect(Duration.fromMinutes(1440).to(DurationType.DAYS)).toBe(1)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,54 @@
|
|||
const _ = require('lodash/fp')
|
||||
const {structures} = require("../../../tests")
|
||||
|
||||
jest.mock("../../../src/context")
|
||||
jest.mock("../../../src/db")
|
||||
|
||||
const context = require("../../../src/context")
|
||||
const db = require("../../../src/db")
|
||||
|
||||
const {getCreatorCount} = require('../../../src/users/users')
|
||||
|
||||
describe("Users", () => {
|
||||
|
||||
let getGlobalDBMock
|
||||
let getGlobalUserParamsMock
|
||||
let paginationMock
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks()
|
||||
|
||||
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
|
||||
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
|
||||
paginationMock = jest.spyOn(db, "pagination")
|
||||
})
|
||||
|
||||
it("Retrieves the number of creators", async () => {
|
||||
const getUsers = (offset, limit, creators = false) => {
|
||||
const range = _.range(offset, limit)
|
||||
const opts = creators ? {builder: {global: true}} : undefined
|
||||
return range.map(() => structures.users.user(opts))
|
||||
}
|
||||
const page1Data = getUsers(0, 8)
|
||||
const page2Data = getUsers(8, 12, true)
|
||||
getGlobalDBMock.mockImplementation(() => ({
|
||||
name : "fake-db",
|
||||
allDocs: () => ({
|
||||
rows: [...page1Data, ...page2Data]
|
||||
})
|
||||
}))
|
||||
paginationMock.mockImplementationOnce(() => ({
|
||||
data: page1Data,
|
||||
hasNextPage: true,
|
||||
nextPage: "1"
|
||||
}))
|
||||
paginationMock.mockImplementation(() => ({
|
||||
data: page2Data,
|
||||
hasNextPage: false,
|
||||
nextPage: undefined
|
||||
}))
|
||||
const creatorsCount = await getCreatorCount()
|
||||
expect(creatorsCount).toBe(4)
|
||||
expect(paginationMock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
})
|
|
@ -1,2 +1,3 @@
|
|||
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
|
||||
|
||||
export const MOCK_DATE_TIMESTAMP = 1577836800000
|
||||
|
|
|
@ -123,6 +123,10 @@ export function customer(): Customer {
|
|||
export function subscription(): Subscription {
|
||||
return {
|
||||
amount: 10000,
|
||||
amounts: {
|
||||
user: 10000,
|
||||
creator: 0,
|
||||
},
|
||||
cancelAt: undefined,
|
||||
currency: "usd",
|
||||
currentPeriodEnd: 0,
|
||||
|
@ -131,6 +135,10 @@ export function subscription(): Subscription {
|
|||
duration: PriceDuration.MONTHLY,
|
||||
pastDueAt: undefined,
|
||||
quantity: 0,
|
||||
quantities: {
|
||||
user: 0,
|
||||
creator: 0,
|
||||
},
|
||||
status: "active",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -159,8 +159,10 @@
|
|||
{#if selectedImage.size}
|
||||
<div class="filesize">
|
||||
{#if selectedImage.size <= BYTES_IN_MB}
|
||||
{`${selectedImage.size / BYTES_IN_KB} KB`}
|
||||
{:else}{`${selectedImage.size / BYTES_IN_MB} MB`}{/if}
|
||||
{`${(selectedImage.size / BYTES_IN_KB).toFixed(1)} KB`}
|
||||
{:else}{`${(selectedImage.size / BYTES_IN_MB).toFixed(
|
||||
1
|
||||
)} MB`}{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if !disabled}
|
||||
|
@ -203,8 +205,8 @@
|
|||
{#if file.size}
|
||||
<div class="filesize">
|
||||
{#if file.size <= BYTES_IN_MB}
|
||||
{`${file.size / BYTES_IN_KB} KB`}
|
||||
{:else}{`${file.size / BYTES_IN_MB} MB`}{/if}
|
||||
{`${(file.size / BYTES_IN_KB).toFixed(1)} KB`}
|
||||
{:else}{`${(file.size / BYTES_IN_MB).toFixed(1)} MB`}{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if !disabled}
|
||||
|
|
|
@ -5,4 +5,4 @@ package-lock.json
|
|||
release/
|
||||
dist/
|
||||
routify
|
||||
.routify/
|
||||
.routify/
|
||||
|
|
|
@ -580,7 +580,7 @@ export const getFrontendStore = () => {
|
|||
let table = validTables.find(table => {
|
||||
return (
|
||||
table.sourceId !== BUDIBASE_INTERNAL_DB_ID &&
|
||||
table.type === DB_TYPE_INTERNAL
|
||||
table.sourceType === DB_TYPE_INTERNAL
|
||||
)
|
||||
})
|
||||
if (table) {
|
||||
|
@ -591,7 +591,7 @@ export const getFrontendStore = () => {
|
|||
table = validTables.find(table => {
|
||||
return (
|
||||
table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
|
||||
table.type === DB_TYPE_INTERNAL
|
||||
table.sourceType === DB_TYPE_INTERNAL
|
||||
)
|
||||
})
|
||||
if (table) {
|
||||
|
@ -599,7 +599,7 @@ export const getFrontendStore = () => {
|
|||
}
|
||||
|
||||
// Finally try an external table
|
||||
return validTables.find(table => table.type === DB_TYPE_EXTERNAL)
|
||||
return validTables.find(table => table.sourceType === DB_TYPE_EXTERNAL)
|
||||
},
|
||||
enrichEmptySettings: (component, opts) => {
|
||||
if (!component?._component) {
|
||||
|
|
|
@ -2,14 +2,14 @@ import sanitizeUrl from "./utils/sanitizeUrl"
|
|||
import { Screen } from "./utils/Screen"
|
||||
import { Component } from "./utils/Component"
|
||||
|
||||
export default function (datasources) {
|
||||
export default function (datasources, mode = "table") {
|
||||
if (!Array.isArray(datasources)) {
|
||||
return []
|
||||
}
|
||||
return datasources.map(datasource => {
|
||||
return {
|
||||
name: `${datasource.label} - List`,
|
||||
create: () => createScreen(datasource),
|
||||
create: () => createScreen(datasource, mode),
|
||||
id: ROW_LIST_TEMPLATE,
|
||||
resourceId: datasource.resourceId,
|
||||
}
|
||||
|
@ -40,10 +40,24 @@ const generateTableBlock = datasource => {
|
|||
return tableBlock
|
||||
}
|
||||
|
||||
const createScreen = datasource => {
|
||||
const generateGridBlock = datasource => {
|
||||
const gridBlock = new Component("@budibase/standard-components/gridblock")
|
||||
gridBlock
|
||||
.customProps({
|
||||
table: datasource,
|
||||
})
|
||||
.instanceName(`${datasource.label} - Grid block`)
|
||||
return gridBlock
|
||||
}
|
||||
|
||||
const createScreen = (datasource, mode) => {
|
||||
return new Screen()
|
||||
.route(rowListUrl(datasource))
|
||||
.instanceName(`${datasource.label} - List`)
|
||||
.addChild(generateTableBlock(datasource))
|
||||
.addChild(
|
||||
mode === "table"
|
||||
? generateTableBlock(datasource)
|
||||
: generateGridBlock(datasource)
|
||||
)
|
||||
.json()
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
$: linkedTable = $tables.list.find(table => table._id === linkedTableId)
|
||||
$: schema = linkedTable?.schema
|
||||
$: table = $tables.list.find(table => table._id === tableId)
|
||||
$: type = table?.type
|
||||
$: fetchData(tableId, rowId)
|
||||
$: {
|
||||
let rowLabel = row?.[table?.primaryDisplay]
|
||||
|
@ -41,5 +40,5 @@
|
|||
</script>
|
||||
|
||||
{#if row && row._id === rowId}
|
||||
<Table {title} {schema} {data} {type} />
|
||||
<Table {title} {schema} {data} />
|
||||
{/if}
|
||||
|
|
|
@ -24,17 +24,23 @@
|
|||
|
||||
let selectedRows = []
|
||||
let customRenderers = []
|
||||
let parsedSchema = {}
|
||||
|
||||
$: if (schema) {
|
||||
parsedSchema = Object.keys(schema).reduce((acc, key) => {
|
||||
acc[key] =
|
||||
typeof schema[key] === "string" ? { type: schema[key] } : schema[key]
|
||||
|
||||
if (!canBeSortColumn(acc[key].type)) {
|
||||
acc[key].sortable = false
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
$: selectedRows, dispatch("selectionUpdated", selectedRows)
|
||||
$: isUsersTable = tableId === TableNames.USERS
|
||||
$: data && resetSelectedRows()
|
||||
$: {
|
||||
Object.values(schema || {}).forEach(col => {
|
||||
if (!canBeSortColumn(col.type)) {
|
||||
col.sortable = false
|
||||
}
|
||||
})
|
||||
}
|
||||
$: {
|
||||
if (isUsersTable) {
|
||||
customRenderers = [
|
||||
|
@ -44,24 +50,24 @@
|
|||
},
|
||||
]
|
||||
UNEDITABLE_USER_FIELDS.forEach(field => {
|
||||
if (schema[field]) {
|
||||
schema[field].editable = false
|
||||
if (parsedSchema[field]) {
|
||||
parsedSchema[field].editable = false
|
||||
}
|
||||
})
|
||||
if (schema.email) {
|
||||
schema.email.displayName = "Email"
|
||||
if (parsedSchema.email) {
|
||||
parsedSchema.email.displayName = "Email"
|
||||
}
|
||||
if (schema.roleId) {
|
||||
schema.roleId.displayName = "Role"
|
||||
if (parsedSchema.roleId) {
|
||||
parsedSchema.roleId.displayName = "Role"
|
||||
}
|
||||
if (schema.firstName) {
|
||||
schema.firstName.displayName = "First Name"
|
||||
if (parsedSchema.firstName) {
|
||||
parsedSchema.firstName.displayName = "First Name"
|
||||
}
|
||||
if (schema.lastName) {
|
||||
schema.lastName.displayName = "Last Name"
|
||||
if (parsedSchema.lastName) {
|
||||
parsedSchema.lastName.displayName = "Last Name"
|
||||
}
|
||||
if (schema.status) {
|
||||
schema.status.displayName = "Status"
|
||||
if (parsedSchema.status) {
|
||||
parsedSchema.status.displayName = "Status"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -97,7 +103,7 @@
|
|||
<div class="table-wrapper">
|
||||
<Table
|
||||
{data}
|
||||
{schema}
|
||||
schema={parsedSchema}
|
||||
{loading}
|
||||
{customRenderers}
|
||||
{rowCount}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
import GridRelationshipButton from "components/backend/DataTable/buttons/grid/GridRelationshipButton.svelte"
|
||||
import GridEditColumnModal from "components/backend/DataTable/modals/grid/GridEditColumnModal.svelte"
|
||||
import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte"
|
||||
import { DB_TYPE_EXTERNAL } from "constants/backend"
|
||||
|
||||
const userSchemaOverrides = {
|
||||
firstName: { displayName: "First name", disabled: true },
|
||||
|
@ -27,7 +28,7 @@
|
|||
|
||||
$: id = $tables.selected?._id
|
||||
$: isUsersTable = id === TableNames.USERS
|
||||
$: isInternal = $tables.selected?.type !== "external"
|
||||
$: isInternal = $tables.selected?.sourceType !== DB_TYPE_EXTERNAL
|
||||
$: gridDatasource = {
|
||||
type: "table",
|
||||
tableId: id,
|
||||
|
@ -46,10 +47,7 @@
|
|||
tables.replaceTable(id, e.detail)
|
||||
|
||||
// We need to refresh datasources when an external table changes.
|
||||
// Type "external" may exist - sometimes type is "table" and sometimes it
|
||||
// is "external" - it has different meanings in different endpoints.
|
||||
// If we check both these then we hopefully catch all external tables.
|
||||
if (e.detail?.type === "external" || e.detail?.sql) {
|
||||
if (e.detail?.sourceType === DB_TYPE_EXTERNAL) {
|
||||
await datasources.fetch()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,9 +17,9 @@
|
|||
let hideAutocolumns = true
|
||||
let data = []
|
||||
let loading = false
|
||||
let type = "internal"
|
||||
|
||||
$: name = view.name
|
||||
$: schema = view.schema
|
||||
$: calculation = view.calculation
|
||||
|
||||
$: supportedFormats = Object.values(ROW_EXPORT_FORMATS).filter(key => {
|
||||
|
@ -61,11 +61,10 @@
|
|||
|
||||
<Table
|
||||
title={decodeURI(name)}
|
||||
schema={view.schema}
|
||||
{schema}
|
||||
tableId={view.tableId}
|
||||
{data}
|
||||
{loading}
|
||||
{type}
|
||||
rowCount={10}
|
||||
allowEditing={false}
|
||||
bind:hideAutocolumns
|
||||
|
|
|
@ -10,6 +10,6 @@
|
|||
<ImportButton
|
||||
{disabled}
|
||||
tableId={$datasource?.tableId}
|
||||
tableType={$definition?.type}
|
||||
tableType={$definition?.sourceType}
|
||||
on:importrows={rows.actions.refreshData}
|
||||
/>
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
ALLOWABLE_NUMBER_TYPES,
|
||||
SWITCHABLE_TYPES,
|
||||
PrettyRelationshipDefinitions,
|
||||
DB_TYPE_EXTERNAL,
|
||||
} from "constants/backend"
|
||||
import { getAutoColumnInformation, buildAutoColumn } from "builderStore/utils"
|
||||
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
|
||||
|
@ -254,10 +255,11 @@
|
|||
!uneditable &&
|
||||
editableColumn?.type !== AUTO_TYPE &&
|
||||
!editableColumn.autocolumn
|
||||
$: external = table.type === "external"
|
||||
$: externalTable = table.sourceType === DB_TYPE_EXTERNAL
|
||||
// in the case of internal tables the sourceId will just be undefined
|
||||
$: tableOptions = $tables.list.filter(
|
||||
opt => opt.type === table.type && table.sourceId === opt.sourceId
|
||||
opt =>
|
||||
opt.sourceType === table.sourceType && table.sourceId === opt.sourceId
|
||||
)
|
||||
$: typeEnabled =
|
||||
!originalName ||
|
||||
|
@ -409,7 +411,7 @@
|
|||
editableColumn.type === FieldType.BB_REFERENCE &&
|
||||
editableColumn.subtype === FieldSubtype.USERS
|
||||
|
||||
if (!external) {
|
||||
if (!externalTable) {
|
||||
return [
|
||||
FIELDS.STRING,
|
||||
FIELDS.BARCODEQR,
|
||||
|
@ -441,7 +443,7 @@
|
|||
isUsers ? FIELDS.USERS : FIELDS.USER,
|
||||
]
|
||||
// no-sql or a spreadsheet
|
||||
if (!external || table.sql) {
|
||||
if (!externalTable || table.sql) {
|
||||
fields = [...fields, FIELDS.LINK, FIELDS.ARRAY]
|
||||
}
|
||||
return fields
|
||||
|
@ -486,7 +488,7 @@
|
|||
})
|
||||
}
|
||||
const newError = {}
|
||||
if (!external && fieldInfo.name?.startsWith("_")) {
|
||||
if (!externalTable && fieldInfo.name?.startsWith("_")) {
|
||||
newError.name = `Column name cannot start with an underscore.`
|
||||
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {
|
||||
newError.name = `Illegal character; must be alpha-numeric.`
|
||||
|
@ -498,7 +500,7 @@
|
|||
newError.name = `Column name already in use.`
|
||||
}
|
||||
|
||||
if (fieldInfo.type == "auto" && !fieldInfo.subtype) {
|
||||
if (fieldInfo.type === "auto" && !fieldInfo.subtype) {
|
||||
newError.subtype = `Auto Column requires a type`
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script>
|
||||
import { Select, Toggle, Multiselect } from "@budibase/bbui"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { DB_TYPE_INTERNAL, FIELDS } from "constants/backend"
|
||||
import { API } from "api"
|
||||
import { parseFile } from "./utils"
|
||||
|
||||
|
@ -169,7 +169,7 @@
|
|||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{#if tableType === "internal"}
|
||||
{#if tableType === DB_TYPE_INTERNAL}
|
||||
<br />
|
||||
<Toggle
|
||||
bind:value={updateExistingRows}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import {
|
||||
BUDIBASE_INTERNAL_DB_ID,
|
||||
BUDIBASE_DATASOURCE_TYPE,
|
||||
DB_TYPE_INTERNAL,
|
||||
} from "constants/backend"
|
||||
|
||||
$: tableNames = $tables.list.map(table => table.name)
|
||||
|
@ -55,8 +56,9 @@
|
|||
name,
|
||||
schema: { ...schema },
|
||||
rows,
|
||||
type: "internal",
|
||||
type: "table",
|
||||
sourceId: targetDatasourceId,
|
||||
sourceType: DB_TYPE_INTERNAL,
|
||||
}
|
||||
|
||||
// Only set primary display if defined
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
|
||||
import { DB_TYPE_EXTERNAL } from "constants/backend"
|
||||
|
||||
export let table
|
||||
|
||||
|
@ -27,8 +28,8 @@
|
|||
let willBeDeleted
|
||||
let deleteTableName
|
||||
|
||||
$: external = table?.type === "external"
|
||||
$: allowDeletion = !external || table?.created
|
||||
$: externalTable = table?.sourceType === DB_TYPE_EXTERNAL
|
||||
$: allowDeletion = !externalTable || table?.created
|
||||
|
||||
function showDeleteModal() {
|
||||
templateScreens = $store.screens.filter(
|
||||
|
@ -48,7 +49,7 @@
|
|||
for (let screen of templateScreens) {
|
||||
await store.actions.screens.delete(screen)
|
||||
}
|
||||
if (table.type === "external") {
|
||||
if (table.sourceType === DB_TYPE_EXTERNAL) {
|
||||
await datasources.fetch()
|
||||
}
|
||||
notifications.success("Table deleted")
|
||||
|
@ -91,7 +92,7 @@
|
|||
<div slot="control" class="icon">
|
||||
<Icon s hoverable name="MoreSmallList" />
|
||||
</div>
|
||||
{#if !external}
|
||||
{#if !externalTable}
|
||||
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
|
||||
{/if}
|
||||
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
try {
|
||||
return await API.uploadBuilderAttachment(data)
|
||||
} catch (error) {
|
||||
notifications.error("Failed to upload attachment")
|
||||
notifications.error(error.message || "Failed to upload attachment")
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,15 @@
|
|||
allowCreator
|
||||
) => {
|
||||
if (allowedRoles?.length) {
|
||||
return roles.filter(role => allowedRoles.includes(role._id))
|
||||
const filteredRoles = roles.filter(role =>
|
||||
allowedRoles.includes(role._id)
|
||||
)
|
||||
return [
|
||||
...filteredRoles,
|
||||
...(allowedRoles.includes(Constants.Roles.CREATOR)
|
||||
? [{ _id: Constants.Roles.CREATOR, name: "Creator", enabled: false }]
|
||||
: []),
|
||||
]
|
||||
}
|
||||
let newRoles = [...roles]
|
||||
|
||||
|
@ -129,8 +137,9 @@
|
|||
getOptionColour={getColor}
|
||||
getOptionIcon={getIcon}
|
||||
isOptionEnabled={option =>
|
||||
option._id !== Constants.Roles.CREATOR ||
|
||||
$licensing.perAppBuildersEnabled}
|
||||
(option._id !== Constants.Roles.CREATOR ||
|
||||
$licensing.perAppBuildersEnabled) &&
|
||||
option.enabled !== false}
|
||||
{placeholder}
|
||||
{error}
|
||||
/>
|
||||
|
|
|
@ -23,6 +23,7 @@ import BasicColumnEditor from "./controls/ColumnEditor/BasicColumnEditor.svelte"
|
|||
import GridColumnEditor from "./controls/ColumnEditor/GridColumnEditor.svelte"
|
||||
import BarButtonList from "./controls/BarButtonList.svelte"
|
||||
import FieldConfiguration from "./controls/FieldConfiguration/FieldConfiguration.svelte"
|
||||
import ButtonConfiguration from "./controls/ButtonConfiguration/ButtonConfiguration.svelte"
|
||||
import RelationshipFilterEditor from "./controls/RelationshipFilterEditor.svelte"
|
||||
|
||||
const componentMap = {
|
||||
|
@ -48,6 +49,7 @@ const componentMap = {
|
|||
"filter/relationship": RelationshipFilterEditor,
|
||||
url: URLSelect,
|
||||
fieldConfiguration: FieldConfiguration,
|
||||
buttonConfiguration: ButtonConfiguration,
|
||||
columns: ColumnEditor,
|
||||
"columns/basic": BasicColumnEditor,
|
||||
"columns/grid": GridColumnEditor,
|
||||
|
|
|
@ -0,0 +1,134 @@
|
|||
<script>
|
||||
import DraggableList from "../DraggableList/DraggableList.svelte"
|
||||
import ButtonSetting from "./ButtonSetting.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { store } from "builderStore"
|
||||
import { Helpers } from "@budibase/bbui"
|
||||
|
||||
export let componentBindings
|
||||
export let bindings
|
||||
export let value
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
let focusItem
|
||||
|
||||
$: buttonList = sanitizeValue(value) || []
|
||||
$: buttonCount = buttonList.length
|
||||
$: itemProps = {
|
||||
componentBindings: componentBindings || [],
|
||||
bindings,
|
||||
removeButton,
|
||||
canRemove: buttonCount > 1,
|
||||
}
|
||||
|
||||
const sanitizeValue = val => {
|
||||
return val?.map(button => {
|
||||
return button._component ? button : buildPseudoInstance(button)
|
||||
})
|
||||
}
|
||||
|
||||
const processItemUpdate = e => {
|
||||
const updatedField = e.detail
|
||||
const newButtonList = [...buttonList]
|
||||
const fieldIdx = newButtonList.findIndex(pSetting => {
|
||||
return pSetting._id === updatedField?._id
|
||||
})
|
||||
if (fieldIdx === -1) {
|
||||
newButtonList.push(updatedField)
|
||||
} else {
|
||||
newButtonList[fieldIdx] = updatedField
|
||||
}
|
||||
dispatch("change", newButtonList)
|
||||
}
|
||||
|
||||
const listUpdated = e => {
|
||||
dispatch("change", [...e.detail])
|
||||
}
|
||||
|
||||
const buildPseudoInstance = cfg => {
|
||||
return store.actions.components.createInstance(
|
||||
`@budibase/standard-components/button`,
|
||||
{
|
||||
_instanceName: Helpers.uuid(),
|
||||
text: cfg.text,
|
||||
type: cfg.type || "primary",
|
||||
},
|
||||
{}
|
||||
)
|
||||
}
|
||||
|
||||
const addButton = () => {
|
||||
const newButton = buildPseudoInstance({
|
||||
text: `Button ${buttonCount + 1}`,
|
||||
})
|
||||
dispatch("change", [...buttonList, newButton])
|
||||
focusItem = newButton._id
|
||||
}
|
||||
|
||||
const removeButton = id => {
|
||||
dispatch(
|
||||
"change",
|
||||
buttonList.filter(button => button._id !== id)
|
||||
)
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="button-configuration">
|
||||
{#if buttonCount}
|
||||
<DraggableList
|
||||
on:change={listUpdated}
|
||||
on:itemChange={processItemUpdate}
|
||||
items={buttonList}
|
||||
listItemKey={"_id"}
|
||||
listType={ButtonSetting}
|
||||
listTypeProps={itemProps}
|
||||
focus={focusItem}
|
||||
draggable={buttonCount > 1}
|
||||
/>
|
||||
|
||||
<div class="list-footer" on:click={addButton}>
|
||||
<div class="add-button">Add button</div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.button-configuration :global(.spectrum-ActionButton) {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.button-configuration :global(.list-wrap > li:last-child),
|
||||
.button-configuration :global(.list-wrap) {
|
||||
border-bottom-left-radius: unset;
|
||||
border-bottom-right-radius: unset;
|
||||
border-bottom: 0px;
|
||||
}
|
||||
|
||||
.list-footer {
|
||||
width: 100%;
|
||||
border-bottom-left-radius: 4px;
|
||||
border-bottom-right-radius: 4px;
|
||||
background-color: var(
|
||||
--spectrum-table-background-color,
|
||||
var(--spectrum-global-color-gray-50)
|
||||
);
|
||||
transition: background-color ease-in-out 130ms;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
border: 1px solid
|
||||
var(--spectrum-table-border-color, var(--spectrum-alias-border-color-mid));
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.add-button {
|
||||
margin: var(--spacing-s);
|
||||
}
|
||||
|
||||
.list-footer:hover {
|
||||
background-color: var(
|
||||
--spectrum-table-row-background-color-hover,
|
||||
var(--spectrum-alias-highlight-hover)
|
||||
);
|
||||
}
|
||||
</style>
|
|
@ -0,0 +1,64 @@
|
|||
<script>
|
||||
import EditComponentPopover from "../EditComponentPopover.svelte"
|
||||
import { Icon } from "@budibase/bbui"
|
||||
import { runtimeToReadableBinding } from "builderStore/dataBinding"
|
||||
import { isJSBinding } from "@budibase/string-templates"
|
||||
|
||||
export let item
|
||||
export let componentBindings
|
||||
export let bindings
|
||||
export let anchor
|
||||
export let removeButton
|
||||
export let canRemove
|
||||
|
||||
$: readableText = isJSBinding(item.text)
|
||||
? "(JavaScript function)"
|
||||
: runtimeToReadableBinding([...bindings, componentBindings], item.text)
|
||||
</script>
|
||||
|
||||
<div class="list-item-body">
|
||||
<div class="list-item-left">
|
||||
<EditComponentPopover
|
||||
{anchor}
|
||||
componentInstance={item}
|
||||
{componentBindings}
|
||||
{bindings}
|
||||
on:change
|
||||
/>
|
||||
<div class="field-label">{readableText || "Button"}</div>
|
||||
</div>
|
||||
<div class="list-item-right">
|
||||
<Icon
|
||||
disabled={!canRemove}
|
||||
size="S"
|
||||
name="Close"
|
||||
hoverable
|
||||
on:click={() => removeButton(item._id)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.field-label {
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
}
|
||||
.list-item-body,
|
||||
.list-item-left {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-m);
|
||||
min-width: 0;
|
||||
}
|
||||
.list-item-body {
|
||||
margin-top: 8px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
.list-item-right :global(div.spectrum-Switch) {
|
||||
margin: 0px;
|
||||
}
|
||||
.list-item-body {
|
||||
justify-content: space-between;
|
||||
}
|
||||
</style>
|
|
@ -1,10 +1,10 @@
|
|||
<script>
|
||||
import { Icon } from "@budibase/bbui"
|
||||
import { dndzone } from "svelte-dnd-action"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { generate } from "shortid"
|
||||
import { setContext } from "svelte"
|
||||
import { writable } from "svelte/store"
|
||||
import { writable, get } from "svelte/store"
|
||||
import DragHandle from "./drag-handle.svelte"
|
||||
|
||||
export let items = []
|
||||
export let showHandle = true
|
||||
|
@ -12,6 +12,7 @@
|
|||
export let listTypeProps = {}
|
||||
export let listItemKey
|
||||
export let draggable = true
|
||||
export let focus
|
||||
|
||||
let store = writable({
|
||||
selected: null,
|
||||
|
@ -27,6 +28,10 @@
|
|||
|
||||
setContext("draggable", store)
|
||||
|
||||
$: if (focus && store) {
|
||||
get(store).actions.select(focus)
|
||||
}
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const flipDurationMs = 150
|
||||
|
||||
|
@ -82,13 +87,16 @@
|
|||
>
|
||||
{#each draggableItems as draggable (draggable.id)}
|
||||
<li
|
||||
on:mousedown={() => {
|
||||
get(store).actions.select()
|
||||
}}
|
||||
bind:this={anchors[draggable.id]}
|
||||
class:highlighted={draggable.id === $store.selected}
|
||||
>
|
||||
<div class="left-content">
|
||||
{#if showHandle}
|
||||
<div class="handle" aria-label="drag-handle">
|
||||
<Icon name="DragHandle" size="XL" />
|
||||
<div class="handle">
|
||||
<DragHandle />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
@ -142,8 +150,9 @@
|
|||
border-top-right-radius: 4px;
|
||||
}
|
||||
.list-wrap > li:last-child {
|
||||
border-top-left-radius: var(--spectrum-table-regular-border-radius);
|
||||
border-top-right-radius: var(--spectrum-table-regular-border-radius);
|
||||
border-bottom-left-radius: 4px;
|
||||
border-bottom-right-radius: 4px;
|
||||
border-bottom: 0px;
|
||||
}
|
||||
.right-content {
|
||||
flex: 1;
|
||||
|
@ -153,4 +162,15 @@
|
|||
padding-left: var(--spacing-s);
|
||||
padding-right: var(--spacing-s);
|
||||
}
|
||||
.handle {
|
||||
display: flex;
|
||||
height: var(--spectrum-global-dimension-size-150);
|
||||
}
|
||||
.handle :global(svg) {
|
||||
fill: var(--spectrum-global-color-gray-500);
|
||||
margin-right: var(--spacing-m);
|
||||
margin-left: 2px;
|
||||
width: var(--spectrum-global-dimension-size-65);
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
|
@ -0,0 +1,31 @@
|
|||
<svg
|
||||
class="drag-handle spectrum-Icon spectrum-Icon--sizeS"
|
||||
focusable="false"
|
||||
aria-hidden="true"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="m1,11c0.55228,0 1,-0.4477 1,-1c0,-0.5523 -0.44772,-1 -1,-1c-0.55228,0 -1,0.4477 -1,1c0,0.5523 0.44772,1 1,1z"
|
||||
/>
|
||||
<path
|
||||
d="m1,8c0.55228,0 1,-0.4477 1,-1c0,-0.55228 -0.44772,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.5523 0.44772,1 1,1z"
|
||||
/>
|
||||
<path
|
||||
d="m1,5c0.55228,0 1,-0.44772 1,-1c0,-0.55228 -0.44772,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.55228 0.44772,1 1,1z"
|
||||
/>
|
||||
<path
|
||||
d="m1,2c0.55228,0 1,-0.44772 1,-1c0,-0.55228 -0.44772,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.55228 0.44772,1 1,1z"
|
||||
/>
|
||||
<path
|
||||
d="m4,11c0.5523,0 1,-0.4477 1,-1c0,-0.5523 -0.4477,-1 -1,-1c-0.55228,0 -1,0.4477 -1,1c0,0.5523 0.44772,1 1,1z"
|
||||
/>
|
||||
<path
|
||||
d="m4,8c0.5523,0 1,-0.4477 1,-1c0,-0.55228 -0.4477,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.5523 0.44772,1 1,1z"
|
||||
/>
|
||||
<path
|
||||
d="m4,5c0.5523,0 1,-0.44772 1,-1c0,-0.55228 -0.4477,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.55228 0.44772,1 1,1z"
|
||||
/>
|
||||
<path
|
||||
d="m4,2c0.5523,0 1,-0.44772 1,-1c0,-0.55228 -0.4477,-1 -1,-1c-0.55228,0 -1,0.44772 -1,1c0,0.55228 0.44772,1 1,1z"
|
||||
/>
|
||||
</svg>
|
After Width: | Height: | Size: 1.2 KiB |
|
@ -3,31 +3,35 @@
|
|||
import { store } from "builderStore"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import ComponentSettingsSection from "../../../../../pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte"
|
||||
import ComponentSettingsSection from "../../../../pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte"
|
||||
import { getContext } from "svelte"
|
||||
|
||||
export let anchor
|
||||
export let field
|
||||
export let componentInstance
|
||||
export let componentBindings
|
||||
export let bindings
|
||||
export let parseSettings
|
||||
|
||||
const draggable = getContext("draggable")
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
let popover
|
||||
let drawers = []
|
||||
let pseudoComponentInstance
|
||||
let open = false
|
||||
|
||||
$: if (open && $draggable.selected && $draggable.selected != field._id) {
|
||||
// Auto hide the component when another item is selected
|
||||
$: if (open && $draggable.selected != componentInstance._id) {
|
||||
popover.hide()
|
||||
}
|
||||
|
||||
$: if (field) {
|
||||
pseudoComponentInstance = field
|
||||
// Open automatically if the component is marked as selected
|
||||
$: if (!open && $draggable.selected === componentInstance._id && popover) {
|
||||
popover.show()
|
||||
open = true
|
||||
}
|
||||
|
||||
$: componentDef = store.actions.components.getDefinition(
|
||||
pseudoComponentInstance._component
|
||||
componentInstance._component
|
||||
)
|
||||
$: parsedComponentDef = processComponentDefinitionSettings(componentDef)
|
||||
|
||||
|
@ -36,17 +40,16 @@
|
|||
return {}
|
||||
}
|
||||
const clone = cloneDeep(componentDef)
|
||||
const updatedSettings = clone.settings
|
||||
.filter(setting => setting.key !== "field")
|
||||
.map(setting => {
|
||||
return { ...setting, nested: true }
|
||||
})
|
||||
clone.settings = updatedSettings
|
||||
|
||||
if (typeof parseSettings === "function") {
|
||||
clone.settings = parseSettings(clone.settings)
|
||||
}
|
||||
|
||||
return clone
|
||||
}
|
||||
|
||||
const updateSetting = async (setting, value) => {
|
||||
const nestedComponentInstance = cloneDeep(pseudoComponentInstance)
|
||||
const nestedComponentInstance = cloneDeep(componentInstance)
|
||||
|
||||
const patchFn = store.actions.components.updateComponentSetting(
|
||||
setting.key,
|
||||
|
@ -54,12 +57,26 @@
|
|||
)
|
||||
patchFn(nestedComponentInstance)
|
||||
|
||||
const update = {
|
||||
...nestedComponentInstance,
|
||||
active: pseudoComponentInstance.active,
|
||||
dispatch("change", nestedComponentInstance)
|
||||
}
|
||||
|
||||
const customPositionHandler = (anchorBounds, eleBounds, cfg) => {
|
||||
let { left, top } = cfg
|
||||
let percentageOffset = 30
|
||||
// left-outside
|
||||
left = anchorBounds.left - eleBounds.width - 18
|
||||
|
||||
// shift up from the anchor, if space allows
|
||||
let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
|
||||
let defaultTop = anchorBounds.top - offsetPos
|
||||
|
||||
if (window.innerHeight - defaultTop < eleBounds.height) {
|
||||
top = window.innerHeight - eleBounds.height - 5
|
||||
} else {
|
||||
top = anchorBounds.top - offsetPos
|
||||
}
|
||||
|
||||
dispatch("change", update)
|
||||
return { ...cfg, left, top }
|
||||
}
|
||||
</script>
|
||||
|
||||
|
@ -79,11 +96,11 @@
|
|||
bind:this={popover}
|
||||
on:open={() => {
|
||||
drawers = []
|
||||
$draggable.actions.select(field._id)
|
||||
$draggable.actions.select(componentInstance._id)
|
||||
}}
|
||||
on:close={() => {
|
||||
open = false
|
||||
if ($draggable.selected == field._id) {
|
||||
if ($draggable.selected == componentInstance._id) {
|
||||
$draggable.actions.select()
|
||||
}
|
||||
}}
|
||||
|
@ -92,33 +109,13 @@
|
|||
showPopover={drawers.length == 0}
|
||||
clickOutsideOverride={drawers.length > 0}
|
||||
maxHeight={600}
|
||||
handlePostionUpdate={(anchorBounds, eleBounds, cfg) => {
|
||||
let { left, top } = cfg
|
||||
let percentageOffset = 30
|
||||
// left-outside
|
||||
left = anchorBounds.left - eleBounds.width - 18
|
||||
|
||||
// shift up from the anchor, if space allows
|
||||
let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
|
||||
let defaultTop = anchorBounds.top - offsetPos
|
||||
|
||||
if (window.innerHeight - defaultTop < eleBounds.height) {
|
||||
top = window.innerHeight - eleBounds.height - 5
|
||||
} else {
|
||||
top = anchorBounds.top - offsetPos
|
||||
}
|
||||
|
||||
return { ...cfg, left, top }
|
||||
}}
|
||||
handlePostionUpdate={customPositionHandler}
|
||||
>
|
||||
<span class="popover-wrap">
|
||||
<Layout noPadding noGap>
|
||||
<div class="type-icon">
|
||||
<Icon name={parsedComponentDef.icon} />
|
||||
<span>{field.field}</span>
|
||||
</div>
|
||||
<slot name="header" />
|
||||
<ComponentSettingsSection
|
||||
componentInstance={pseudoComponentInstance}
|
||||
{componentInstance}
|
||||
componentDefinition={parsedComponentDef}
|
||||
isScreen={false}
|
||||
onUpdateSetting={updateSetting}
|
||||
|
@ -141,20 +138,4 @@
|
|||
.popover-wrap {
|
||||
background-color: var(--spectrum-alias-background-color-primary);
|
||||
}
|
||||
.type-icon {
|
||||
display: flex;
|
||||
gap: var(--spacing-m);
|
||||
margin: var(--spacing-xl);
|
||||
margin-bottom: 0px;
|
||||
height: var(--spectrum-alias-item-height-m);
|
||||
padding: 0px var(--spectrum-alias-item-padding-m);
|
||||
border-width: var(--spectrum-actionbutton-border-size);
|
||||
border-radius: var(--spectrum-alias-border-radius-regular);
|
||||
border: 1px solid
|
||||
var(
|
||||
--spectrum-actionbutton-m-border-color,
|
||||
var(--spectrum-alias-border-color)
|
||||
);
|
||||
align-items: center;
|
||||
}
|
||||
</style>
|
|
@ -7,7 +7,7 @@
|
|||
getComponentBindableProperties,
|
||||
} from "builderStore/dataBinding"
|
||||
import { currentAsset } from "builderStore"
|
||||
import DraggableList from "../DraggableList.svelte"
|
||||
import DraggableList from "../DraggableList/DraggableList.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { store, selectedScreen } from "builderStore"
|
||||
import FieldSetting from "./FieldSetting.svelte"
|
||||
|
@ -50,7 +50,7 @@
|
|||
updateSanitsedFields(sanitisedValue)
|
||||
unconfigured = buildUnconfiguredOptions(schema, sanitisedFields)
|
||||
fieldList = [...sanitisedFields, ...unconfigured]
|
||||
.map(buildSudoInstance)
|
||||
.map(buildPseudoInstance)
|
||||
.filter(x => x != null)
|
||||
}
|
||||
|
||||
|
@ -104,7 +104,7 @@
|
|||
})
|
||||
}
|
||||
|
||||
const buildSudoInstance = instance => {
|
||||
const buildPseudoInstance = instance => {
|
||||
if (instance._component) {
|
||||
return instance
|
||||
}
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
<script>
|
||||
import EditFieldPopover from "./EditFieldPopover.svelte"
|
||||
import { Toggle } from "@budibase/bbui"
|
||||
import EditComponentPopover from "../EditComponentPopover.svelte"
|
||||
import { Toggle, Icon } from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { store } from "builderStore"
|
||||
import { runtimeToReadableBinding } from "builderStore/dataBinding"
|
||||
import { isJSBinding } from "@budibase/string-templates"
|
||||
|
||||
export let item
|
||||
export let componentBindings
|
||||
|
@ -16,18 +19,43 @@
|
|||
dispatch("change", { ...cloneDeep(item), active: e.detail })
|
||||
}
|
||||
}
|
||||
const getReadableText = () => {
|
||||
if (item.label) {
|
||||
return isJSBinding(item.label)
|
||||
? "(JavaScript function)"
|
||||
: runtimeToReadableBinding([...bindings, componentBindings], item.label)
|
||||
}
|
||||
return item.field
|
||||
}
|
||||
|
||||
const parseSettings = settings => {
|
||||
return settings
|
||||
.filter(setting => setting.key !== "field")
|
||||
.map(setting => {
|
||||
return { ...setting, nested: true }
|
||||
})
|
||||
}
|
||||
|
||||
$: readableText = getReadableText(item)
|
||||
$: componentDef = store.actions.components.getDefinition(item._component)
|
||||
</script>
|
||||
|
||||
<div class="list-item-body">
|
||||
<div class="list-item-left">
|
||||
<EditFieldPopover
|
||||
<EditComponentPopover
|
||||
{anchor}
|
||||
field={item}
|
||||
componentInstance={item}
|
||||
{componentBindings}
|
||||
{bindings}
|
||||
{parseSettings}
|
||||
on:change
|
||||
/>
|
||||
<div class="field-label">{item.label || item.field}</div>
|
||||
>
|
||||
<div slot="header" class="type-icon">
|
||||
<Icon name={componentDef.icon} />
|
||||
<span>{item.field}</span>
|
||||
</div>
|
||||
</EditComponentPopover>
|
||||
<div class="field-label">{readableText}</div>
|
||||
</div>
|
||||
<div class="list-item-right">
|
||||
<Toggle on:change={onToggle(item)} text="" value={item.active} thin />
|
||||
|
@ -53,4 +81,20 @@
|
|||
.list-item-body {
|
||||
justify-content: space-between;
|
||||
}
|
||||
.type-icon {
|
||||
display: flex;
|
||||
gap: var(--spacing-m);
|
||||
margin: var(--spacing-xl);
|
||||
margin-bottom: 0px;
|
||||
height: var(--spectrum-alias-item-height-m);
|
||||
padding: 0px var(--spectrum-alias-item-padding-m);
|
||||
border-width: var(--spectrum-actionbutton-border-size);
|
||||
border-radius: var(--spectrum-alias-border-radius-regular);
|
||||
border: 1px solid
|
||||
var(
|
||||
--spectrum-actionbutton-m-border-color,
|
||||
var(--spectrum-alias-border-color)
|
||||
);
|
||||
align-items: center;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
</script>
|
||||
|
||||
<div class="table">
|
||||
<Table {schema} data={rowsCopy} type="external" allowEditing={false} />
|
||||
<Table {schema} data={rowsCopy} allowEditing={false} />
|
||||
</div>
|
||||
|
||||
<style>
|
||||
|
|
|
@ -196,8 +196,36 @@
|
|||
}
|
||||
}
|
||||
|
||||
const validateQuery = async () => {
|
||||
const forbiddenBindings = /{{\s?user(\.(\w|\$)*\s?|\s?)}}/g
|
||||
const bindingError = new Error(
|
||||
"'user' is a protected binding and cannot be used"
|
||||
)
|
||||
|
||||
if (forbiddenBindings.test(url)) {
|
||||
throw bindingError
|
||||
}
|
||||
|
||||
if (forbiddenBindings.test(query.fields.requestBody ?? "")) {
|
||||
throw bindingError
|
||||
}
|
||||
|
||||
Object.values(requestBindings).forEach(bindingValue => {
|
||||
if (forbiddenBindings.test(bindingValue)) {
|
||||
throw bindingError
|
||||
}
|
||||
})
|
||||
|
||||
Object.values(query.fields.headers).forEach(headerValue => {
|
||||
if (forbiddenBindings.test(headerValue)) {
|
||||
throw bindingError
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function runQuery() {
|
||||
try {
|
||||
await validateQuery()
|
||||
response = await queries.preview(buildQuery())
|
||||
if (response.rows.length === 0) {
|
||||
notifications.info("Request did not return any data")
|
||||
|
|
|
@ -516,6 +516,13 @@
|
|||
}
|
||||
return null
|
||||
}
|
||||
|
||||
const parseRole = user => {
|
||||
if (user.isAdminOrGlobalBuilder) {
|
||||
return Constants.Roles.CREATOR
|
||||
}
|
||||
return user.role
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:window on:keydown={handleKeyDown} />
|
||||
|
@ -725,7 +732,7 @@
|
|||
<RoleSelect
|
||||
footer={getRoleFooter(user)}
|
||||
placeholder={false}
|
||||
value={user.role}
|
||||
value={parseRole(user)}
|
||||
allowRemove={user.role && !user.group}
|
||||
allowPublic={false}
|
||||
allowCreator={true}
|
||||
|
@ -744,7 +751,7 @@
|
|||
autoWidth
|
||||
align="right"
|
||||
allowedRoles={user.isAdminOrGlobalBuilder
|
||||
? [Constants.Roles.ADMIN]
|
||||
? [Constants.Roles.CREATOR]
|
||||
: null}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import { ModalContent, Body, Input, notifications } from "@budibase/bbui"
|
||||
import { tables, datasources } from "stores/backend"
|
||||
import { goto } from "@roxi/routify"
|
||||
import { DB_TYPE_EXTERNAL } from "constants/backend"
|
||||
|
||||
export let datasource
|
||||
|
||||
|
@ -16,9 +17,10 @@
|
|||
function buildDefaultTable(tableName, datasourceId) {
|
||||
return {
|
||||
name: tableName,
|
||||
type: "external",
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
sourceId: datasourceId,
|
||||
sourceType: DB_TYPE_EXTERNAL,
|
||||
schema: {
|
||||
id: {
|
||||
autocolumn: true,
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
import { tables, datasources } from "stores/backend"
|
||||
import { goto } from "@roxi/routify"
|
||||
import { onMount } from "svelte"
|
||||
import { BUDIBASE_INTERNAL_DB_ID } from "constants/backend"
|
||||
import { BUDIBASE_INTERNAL_DB_ID, DB_TYPE_EXTERNAL } from "constants/backend"
|
||||
import { TableNames } from "constants"
|
||||
import { store } from "builderStore"
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
|||
$: store.actions.websocket.selectResource(BUDIBASE_INTERNAL_DB_ID)
|
||||
$: internalTablesBySourceId = $tables.list.filter(
|
||||
table =>
|
||||
table.type !== "external" &&
|
||||
table.sourceType !== DB_TYPE_EXTERNAL &&
|
||||
table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
|
||||
table._id !== TableNames.USERS
|
||||
)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
import ICONS from "components/backend/DatasourceNavigator/icons"
|
||||
import { tables, datasources } from "stores/backend"
|
||||
import { goto } from "@roxi/routify"
|
||||
import { DEFAULT_BB_DATASOURCE_ID } from "constants/backend"
|
||||
import { DEFAULT_BB_DATASOURCE_ID, DB_TYPE_EXTERNAL } from "constants/backend"
|
||||
import { onMount } from "svelte"
|
||||
import { store } from "builderStore"
|
||||
|
||||
|
@ -13,7 +13,8 @@
|
|||
$: store.actions.websocket.selectResource(DEFAULT_BB_DATASOURCE_ID)
|
||||
$: internalTablesBySourceId = $tables.list.filter(
|
||||
table =>
|
||||
table.type !== "external" && table.sourceId === DEFAULT_BB_DATASOURCE_ID
|
||||
table.sourceType !== DB_TYPE_EXTERNAL &&
|
||||
table.sourceId === DEFAULT_BB_DATASOURCE_ID
|
||||
)
|
||||
|
||||
onMount(() => {
|
||||
|
|
|
@ -91,7 +91,12 @@
|
|||
/>
|
||||
{/if}
|
||||
{#if section == "styles"}
|
||||
<DesignSection {componentInstance} {componentDefinition} {bindings} />
|
||||
<DesignSection
|
||||
{componentInstance}
|
||||
{componentBindings}
|
||||
{componentDefinition}
|
||||
{bindings}
|
||||
/>
|
||||
<CustomStylesSection
|
||||
{componentInstance}
|
||||
{componentDefinition}
|
||||
|
|
|
@ -16,18 +16,32 @@
|
|||
export let isScreen = false
|
||||
export let onUpdateSetting
|
||||
export let showSectionTitle = true
|
||||
export let tag
|
||||
|
||||
$: sections = getSections(componentInstance, componentDefinition, isScreen)
|
||||
$: sections = getSections(
|
||||
componentInstance,
|
||||
componentDefinition,
|
||||
isScreen,
|
||||
tag
|
||||
)
|
||||
|
||||
const getSections = (instance, definition, isScreen) => {
|
||||
const getSections = (instance, definition, isScreen, tag) => {
|
||||
const settings = definition?.settings ?? []
|
||||
const generalSettings = settings.filter(setting => !setting.section)
|
||||
const customSections = settings.filter(setting => setting.section)
|
||||
const generalSettings = settings.filter(
|
||||
setting => !setting.section && setting.tag === tag
|
||||
)
|
||||
const customSections = settings.filter(
|
||||
setting => setting.section && setting.tag === tag
|
||||
)
|
||||
let sections = [
|
||||
{
|
||||
name: "General",
|
||||
settings: generalSettings,
|
||||
},
|
||||
...(generalSettings?.length
|
||||
? [
|
||||
{
|
||||
name: "General",
|
||||
settings: generalSettings,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...(customSections || []),
|
||||
]
|
||||
|
||||
|
@ -132,7 +146,7 @@
|
|||
<div class="section-info">
|
||||
<InfoDisplay body={section.info} />
|
||||
</div>
|
||||
{:else if idx === 0 && section.name === "General" && componentDefinition.info}
|
||||
{:else if idx === 0 && section.name === "General" && componentDefinition?.info && !tag}
|
||||
<InfoDisplay
|
||||
title={componentDefinition.name}
|
||||
body={componentDefinition.info}
|
||||
|
@ -181,7 +195,7 @@
|
|||
</DetailSummary>
|
||||
{/if}
|
||||
{/each}
|
||||
{#if componentDefinition?.block}
|
||||
{#if componentDefinition?.block && !tag}
|
||||
<DetailSummary name="Eject" collapsible={false}>
|
||||
<EjectBlockButton />
|
||||
</DetailSummary>
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
<script>
|
||||
import StyleSection from "./StyleSection.svelte"
|
||||
import * as ComponentStyles from "./componentStyles"
|
||||
import ComponentSettingsSection from "./ComponentSettingsSection.svelte"
|
||||
|
||||
export let componentDefinition
|
||||
export let componentInstance
|
||||
export let bindings
|
||||
export let componentBindings
|
||||
|
||||
const getStyles = def => {
|
||||
if (!def?.styles?.length) {
|
||||
|
@ -22,6 +24,19 @@
|
|||
$: styles = getStyles(componentDefinition)
|
||||
</script>
|
||||
|
||||
<!--
|
||||
Load any general settings or sections tagged as "style"
|
||||
-->
|
||||
<ComponentSettingsSection
|
||||
{componentInstance}
|
||||
{componentDefinition}
|
||||
isScreen={false}
|
||||
showInstanceName={false}
|
||||
{bindings}
|
||||
{componentBindings}
|
||||
tag="style"
|
||||
/>
|
||||
|
||||
{#if styles?.length > 0}
|
||||
{#each styles as style}
|
||||
<StyleSection
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
"heading",
|
||||
"text",
|
||||
"button",
|
||||
"buttongroup",
|
||||
"tag",
|
||||
"spectrumcard",
|
||||
"cardstat",
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
import { capitalise } from "helpers"
|
||||
import { goto } from "@roxi/routify"
|
||||
|
||||
let mode
|
||||
let pendingScreen
|
||||
|
||||
// Modal refs
|
||||
|
@ -100,14 +101,15 @@
|
|||
}
|
||||
|
||||
// Handler for NewScreenModal
|
||||
export const show = mode => {
|
||||
export const show = newMode => {
|
||||
mode = newMode
|
||||
selectedTemplates = null
|
||||
blankScreenUrl = null
|
||||
screenMode = mode
|
||||
pendingScreen = null
|
||||
screenAccessRole = Roles.BASIC
|
||||
|
||||
if (mode === "table") {
|
||||
if (mode === "table" || mode === "grid") {
|
||||
datasourceModal.show()
|
||||
} else if (mode === "blank") {
|
||||
let templates = getTemplates($tables.list)
|
||||
|
@ -123,6 +125,7 @@
|
|||
|
||||
// Handler for DatasourceModal confirmation, move to screen access select
|
||||
const confirmScreenDatasources = async ({ templates }) => {
|
||||
console.log(templates)
|
||||
selectedTemplates = templates
|
||||
screenAccessRoleModal.show()
|
||||
}
|
||||
|
@ -177,6 +180,7 @@
|
|||
|
||||
<Modal bind:this={datasourceModal} autoFocus={false}>
|
||||
<DatasourceModal
|
||||
{mode}
|
||||
onConfirm={confirmScreenDatasources}
|
||||
initialScreens={!selectedTemplates ? [] : [...selectedTemplates]}
|
||||
/>
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import rowListScreen from "builderStore/store/screenTemplates/rowListScreen"
|
||||
import DatasourceTemplateRow from "./DatasourceTemplateRow.svelte"
|
||||
|
||||
export let mode
|
||||
export let onCancel
|
||||
export let onConfirm
|
||||
export let initialScreens = []
|
||||
|
@ -24,7 +25,10 @@
|
|||
screen => screen.resourceId !== resourceId
|
||||
)
|
||||
} else {
|
||||
selectedScreens = [...selectedScreens, rowListScreen([datasource])[0]]
|
||||
selectedScreens = [
|
||||
...selectedScreens,
|
||||
rowListScreen([datasource], mode)[0],
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 24 KiB |
|
@ -3,6 +3,7 @@
|
|||
import CreationPage from "components/common/CreationPage.svelte"
|
||||
import blankImage from "./blank.png"
|
||||
import tableImage from "./table.png"
|
||||
import gridImage from "./grid.png"
|
||||
import CreateScreenModal from "./CreateScreenModal.svelte"
|
||||
import { store } from "builderStore"
|
||||
|
||||
|
@ -43,6 +44,16 @@
|
|||
<Body size="XS">View, edit and delete rows on a table</Body>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card" on:click={() => createScreenModal.show("grid")}>
|
||||
<div class="image">
|
||||
<img alt="" src={gridImage} />
|
||||
</div>
|
||||
<div class="text">
|
||||
<Body size="S">Grid</Body>
|
||||
<Body size="XS">View and manipulate rows on a grid</Body>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</CreationPage>
|
||||
</div>
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
Heading,
|
||||
Body,
|
||||
Button,
|
||||
ButtonGroup,
|
||||
Table,
|
||||
Layout,
|
||||
Modal,
|
||||
|
@ -46,6 +45,10 @@
|
|||
datasource: {
|
||||
type: "user",
|
||||
},
|
||||
options: {
|
||||
paginate: true,
|
||||
limit: 10,
|
||||
},
|
||||
})
|
||||
|
||||
let groupsLoaded = !$licensing.groupsEnabled || $groups?.length
|
||||
|
@ -65,10 +68,12 @@
|
|||
{ column: "role", component: RoleTableRenderer },
|
||||
]
|
||||
let userData = []
|
||||
let invitesLoaded = false
|
||||
let pendingInvites = []
|
||||
let parsedInvites = []
|
||||
|
||||
$: isOwner = $auth.accountPortalAccess && $admin.cloud
|
||||
$: readonly = !sdk.users.isAdmin($auth.user) || $features.isScimEnabled
|
||||
|
||||
$: debouncedUpdateFetch(searchEmail)
|
||||
$: schema = {
|
||||
email: {
|
||||
|
@ -88,16 +93,6 @@
|
|||
width: "1fr",
|
||||
},
|
||||
}
|
||||
|
||||
const getPendingSchema = tblSchema => {
|
||||
if (!tblSchema) {
|
||||
return {}
|
||||
}
|
||||
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
|
||||
pendingSchema.email.displayName = "Pending Invites"
|
||||
return pendingSchema
|
||||
}
|
||||
|
||||
$: pendingSchema = getPendingSchema(schema)
|
||||
$: userData = []
|
||||
$: inviteUsersResponse = { successful: [], unsuccessful: [] }
|
||||
|
@ -121,9 +116,15 @@
|
|||
}
|
||||
})
|
||||
}
|
||||
let invitesLoaded = false
|
||||
let pendingInvites = []
|
||||
let parsedInvites = []
|
||||
|
||||
const getPendingSchema = tblSchema => {
|
||||
if (!tblSchema) {
|
||||
return {}
|
||||
}
|
||||
let pendingSchema = JSON.parse(JSON.stringify(tblSchema))
|
||||
pendingSchema.email.displayName = "Pending Invites"
|
||||
return pendingSchema
|
||||
}
|
||||
|
||||
const invitesToSchema = invites => {
|
||||
return invites.map(invite => {
|
||||
|
@ -143,7 +144,9 @@
|
|||
const updateFetch = email => {
|
||||
fetch.update({
|
||||
query: {
|
||||
email,
|
||||
string: {
|
||||
email,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
@ -296,7 +299,7 @@
|
|||
{/if}
|
||||
<div class="controls">
|
||||
{#if !readonly}
|
||||
<ButtonGroup>
|
||||
<div class="buttons">
|
||||
<Button
|
||||
disabled={readonly}
|
||||
on:click={$licensing.userLimitReached
|
||||
|
@ -315,7 +318,7 @@
|
|||
>
|
||||
Import
|
||||
</Button>
|
||||
</ButtonGroup>
|
||||
</div>
|
||||
{:else}
|
||||
<ScimBanner />
|
||||
{/if}
|
||||
|
@ -390,12 +393,15 @@
|
|||
</Modal>
|
||||
|
||||
<style>
|
||||
.buttons {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
}
|
||||
.pagination {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.controls {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
@ -403,7 +409,6 @@
|
|||
align-items: center;
|
||||
gap: var(--spacing-xl);
|
||||
}
|
||||
|
||||
.controls-right {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
@ -411,7 +416,6 @@
|
|||
align-items: center;
|
||||
gap: var(--spacing-xl);
|
||||
}
|
||||
|
||||
.controls-right :global(.spectrum-Search) {
|
||||
width: 200px;
|
||||
}
|
||||
|
|
|
@ -258,6 +258,186 @@
|
|||
"description": "Contains your app screens",
|
||||
"static": true
|
||||
},
|
||||
"buttongroup": {
|
||||
"name": "Button group",
|
||||
"icon": "Button",
|
||||
"hasChildren": false,
|
||||
"settings": [
|
||||
{
|
||||
"section": true,
|
||||
"name": "Buttons",
|
||||
"settings": [
|
||||
{
|
||||
"type": "buttonConfiguration",
|
||||
"key": "buttons",
|
||||
"nested": true,
|
||||
"defaultValue": [
|
||||
{
|
||||
"type": "cta",
|
||||
"text": "Button 1"
|
||||
},
|
||||
{
|
||||
"type": "primary",
|
||||
"text": "Button 2"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"section": true,
|
||||
"name": "Layout",
|
||||
"settings": [
|
||||
{
|
||||
"type": "select",
|
||||
"label": "Direction",
|
||||
"key": "direction",
|
||||
"showInBar": true,
|
||||
"barStyle": "buttons",
|
||||
"options": [
|
||||
{
|
||||
"label": "Column",
|
||||
"value": "column",
|
||||
"barIcon": "ViewColumn",
|
||||
"barTitle": "Column layout"
|
||||
},
|
||||
{
|
||||
"label": "Row",
|
||||
"value": "row",
|
||||
"barIcon": "ViewRow",
|
||||
"barTitle": "Row layout"
|
||||
}
|
||||
],
|
||||
"defaultValue": "row"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"label": "Horiz. align",
|
||||
"key": "hAlign",
|
||||
"showInBar": true,
|
||||
"barStyle": "buttons",
|
||||
"options": [
|
||||
{
|
||||
"label": "Left",
|
||||
"value": "left",
|
||||
"barIcon": "AlignLeft",
|
||||
"barTitle": "Align left"
|
||||
},
|
||||
{
|
||||
"label": "Center",
|
||||
"value": "center",
|
||||
"barIcon": "AlignCenter",
|
||||
"barTitle": "Align center"
|
||||
},
|
||||
{
|
||||
"label": "Right",
|
||||
"value": "right",
|
||||
"barIcon": "AlignRight",
|
||||
"barTitle": "Align right"
|
||||
},
|
||||
{
|
||||
"label": "Stretch",
|
||||
"value": "stretch",
|
||||
"barIcon": "MoveLeftRight",
|
||||
"barTitle": "Align stretched horizontally"
|
||||
}
|
||||
],
|
||||
"defaultValue": "left"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"label": "Vert. align",
|
||||
"key": "vAlign",
|
||||
"showInBar": true,
|
||||
"barStyle": "buttons",
|
||||
"options": [
|
||||
{
|
||||
"label": "Top",
|
||||
"value": "top",
|
||||
"barIcon": "AlignTop",
|
||||
"barTitle": "Align top"
|
||||
},
|
||||
{
|
||||
"label": "Middle",
|
||||
"value": "middle",
|
||||
"barIcon": "AlignMiddle",
|
||||
"barTitle": "Align middle"
|
||||
},
|
||||
{
|
||||
"label": "Bottom",
|
||||
"value": "bottom",
|
||||
"barIcon": "AlignBottom",
|
||||
"barTitle": "Align bottom"
|
||||
},
|
||||
{
|
||||
"label": "Stretch",
|
||||
"value": "stretch",
|
||||
"barIcon": "MoveUpDown",
|
||||
"barTitle": "Align stretched vertically"
|
||||
}
|
||||
],
|
||||
"defaultValue": "top"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"label": "Size",
|
||||
"key": "size",
|
||||
"showInBar": true,
|
||||
"barStyle": "buttons",
|
||||
"options": [
|
||||
{
|
||||
"label": "Shrink",
|
||||
"value": "shrink",
|
||||
"barIcon": "Minimize",
|
||||
"barTitle": "Shrink container"
|
||||
},
|
||||
{
|
||||
"label": "Grow",
|
||||
"value": "grow",
|
||||
"barIcon": "Maximize",
|
||||
"barTitle": "Grow container"
|
||||
}
|
||||
],
|
||||
"defaultValue": "shrink"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"label": "Gap",
|
||||
"key": "gap",
|
||||
"showInBar": true,
|
||||
"barStyle": "picker",
|
||||
"options": [
|
||||
{
|
||||
"label": "None",
|
||||
"value": "N"
|
||||
},
|
||||
{
|
||||
"label": "Small",
|
||||
"value": "S"
|
||||
},
|
||||
{
|
||||
"label": "Medium",
|
||||
"value": "M"
|
||||
},
|
||||
{
|
||||
"label": "Large",
|
||||
"value": "L"
|
||||
}
|
||||
],
|
||||
"defaultValue": "M"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "Wrap",
|
||||
"key": "wrap",
|
||||
"showInBar": true,
|
||||
"barIcon": "ModernGridView",
|
||||
"barTitle": "Wrap"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"button": {
|
||||
"name": "Button",
|
||||
"description": "A basic html button that is ready for styling",
|
||||
|
@ -2409,7 +2589,6 @@
|
|||
"key": "disabled",
|
||||
"defaultValue": false
|
||||
},
|
||||
|
||||
{
|
||||
"type": "text",
|
||||
"label": "Initial form step",
|
||||
|
@ -5288,17 +5467,17 @@
|
|||
},
|
||||
"settings": [
|
||||
{
|
||||
"type": "select",
|
||||
"type": "table",
|
||||
"label": "Data",
|
||||
"key": "dataSource"
|
||||
},
|
||||
{
|
||||
"type": "radio",
|
||||
"label": "Type",
|
||||
"key": "actionType",
|
||||
"options": ["Create", "Update", "View"],
|
||||
"defaultValue": "Create"
|
||||
},
|
||||
{
|
||||
"type": "table",
|
||||
"label": "Data",
|
||||
"key": "dataSource"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"label": "Title",
|
||||
|
@ -5329,13 +5508,37 @@
|
|||
},
|
||||
{
|
||||
"type": "text",
|
||||
"label": "Empty text",
|
||||
"label": "No rows found",
|
||||
"key": "noRowsMessage",
|
||||
"defaultValue": "We couldn't find a row to display",
|
||||
"nested": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"section": true,
|
||||
"name": "Fields",
|
||||
"settings": [
|
||||
{
|
||||
"type": "fieldConfiguration",
|
||||
"key": "fields",
|
||||
"nested": true,
|
||||
"resetOn": "dataSource",
|
||||
"selectAllFields": true
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "Disabled",
|
||||
"key": "disabled",
|
||||
"defaultValue": false,
|
||||
"dependsOn": {
|
||||
"setting": "actionType",
|
||||
"value": "View",
|
||||
"invert": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"section": true,
|
||||
"name": "Buttons",
|
||||
|
@ -5388,60 +5591,38 @@
|
|||
]
|
||||
},
|
||||
{
|
||||
"section": true,
|
||||
"name": "Fields",
|
||||
"settings": [
|
||||
"tag": "style",
|
||||
"type": "select",
|
||||
"label": "Align labels",
|
||||
"key": "labelPosition",
|
||||
"defaultValue": "left",
|
||||
"options": [
|
||||
{
|
||||
"type": "select",
|
||||
"label": "Align labels",
|
||||
"key": "labelPosition",
|
||||
"defaultValue": "left",
|
||||
"options": [
|
||||
{
|
||||
"label": "Left",
|
||||
"value": "left"
|
||||
},
|
||||
{
|
||||
"label": "Above",
|
||||
"value": "above"
|
||||
}
|
||||
]
|
||||
"label": "Left",
|
||||
"value": "left"
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"label": "Size",
|
||||
"key": "size",
|
||||
"options": [
|
||||
{
|
||||
"label": "Medium",
|
||||
"value": "spectrum--medium"
|
||||
},
|
||||
{
|
||||
"label": "Large",
|
||||
"value": "spectrum--large"
|
||||
}
|
||||
],
|
||||
"defaultValue": "spectrum--medium"
|
||||
},
|
||||
{
|
||||
"type": "fieldConfiguration",
|
||||
"key": "fields",
|
||||
"nested": true,
|
||||
"resetOn": "dataSource",
|
||||
"selectAllFields": true
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "Disabled",
|
||||
"key": "disabled",
|
||||
"defaultValue": false,
|
||||
"dependsOn": {
|
||||
"setting": "actionType",
|
||||
"value": "View",
|
||||
"invert": true
|
||||
}
|
||||
"label": "Above",
|
||||
"value": "above"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"tag": "style",
|
||||
"type": "select",
|
||||
"label": "Size",
|
||||
"key": "size",
|
||||
"options": [
|
||||
{
|
||||
"label": "Medium",
|
||||
"value": "spectrum--medium"
|
||||
},
|
||||
{
|
||||
"label": "Large",
|
||||
"value": "spectrum--large"
|
||||
}
|
||||
],
|
||||
"defaultValue": "spectrum--medium"
|
||||
}
|
||||
],
|
||||
"context": [
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
<script>
|
||||
import BlockComponent from "../BlockComponent.svelte"
|
||||
import Block from "../Block.svelte"
|
||||
|
||||
export let buttons = []
|
||||
export let direction
|
||||
export let hAlign
|
||||
export let vAlign
|
||||
export let gap = "S"
|
||||
</script>
|
||||
|
||||
<Block>
|
||||
<BlockComponent
|
||||
type="container"
|
||||
props={{
|
||||
direction,
|
||||
hAlign,
|
||||
vAlign,
|
||||
gap,
|
||||
wrap: true,
|
||||
}}
|
||||
>
|
||||
{#each buttons as { text, type, quiet, disabled, onClick, size }}
|
||||
<BlockComponent
|
||||
type="button"
|
||||
props={{
|
||||
text: text || "Button",
|
||||
onClick,
|
||||
type,
|
||||
quiet,
|
||||
disabled,
|
||||
size,
|
||||
}}
|
||||
/>
|
||||
{/each}
|
||||
</BlockComponent>
|
||||
</Block>
|
|
@ -220,15 +220,11 @@
|
|||
</BlockComponent>
|
||||
{/if}
|
||||
</BlockComponent>
|
||||
{#if description}
|
||||
<BlockComponent
|
||||
type="text"
|
||||
props={{ text: description }}
|
||||
order={1}
|
||||
/>
|
||||
{/if}
|
||||
</BlockComponent>
|
||||
{/if}
|
||||
{#if description}
|
||||
<BlockComponent type="text" props={{ text: description }} order={1} />
|
||||
{/if}
|
||||
{#key fields}
|
||||
<BlockComponent type="fieldgroup" props={{ labelPosition }} order={1}>
|
||||
{#each fields as field, idx}
|
||||
|
|
|
@ -19,6 +19,7 @@ export { default as dataprovider } from "./DataProvider.svelte"
|
|||
export { default as divider } from "./Divider.svelte"
|
||||
export { default as screenslot } from "./ScreenSlot.svelte"
|
||||
export { default as button } from "./Button.svelte"
|
||||
export { default as buttongroup } from "./ButtonGroup.svelte"
|
||||
export { default as repeater } from "./Repeater.svelte"
|
||||
export { default as text } from "./Text.svelte"
|
||||
export { default as layout } from "./Layout.svelte"
|
||||
|
|
|
@ -103,7 +103,6 @@ const fetchRowHandler = async action => {
|
|||
|
||||
const deleteRowHandler = async action => {
|
||||
const { tableId, rowId: rowConfig, notificationOverride } = action.parameters
|
||||
|
||||
if (tableId && rowConfig) {
|
||||
try {
|
||||
let requestConfig
|
||||
|
@ -129,9 +128,11 @@ const deleteRowHandler = async action => {
|
|||
requestConfig = [parsedRowConfig]
|
||||
} else if (Array.isArray(parsedRowConfig)) {
|
||||
requestConfig = parsedRowConfig
|
||||
} else if (Number.isInteger(parsedRowConfig)) {
|
||||
requestConfig = [String(parsedRowConfig)]
|
||||
}
|
||||
|
||||
if (!requestConfig.length) {
|
||||
if (!requestConfig && !parsedRowConfig) {
|
||||
notificationStore.actions.warning("No valid rows were supplied")
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -140,4 +140,13 @@ export const buildTableEndpoints = API => ({
|
|||
},
|
||||
})
|
||||
},
|
||||
migrateColumn: async ({ tableId, oldColumn, newColumn }) => {
|
||||
return await API.post({
|
||||
url: `/api/tables/${tableId}/migrate`,
|
||||
body: {
|
||||
oldColumn,
|
||||
newColumn,
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
||||
|
|
|
@ -55,7 +55,7 @@
|
|||
try {
|
||||
return await API.uploadBuilderAttachment(data)
|
||||
} catch (error) {
|
||||
$notifications.error("Failed to upload attachment")
|
||||
$notifications.error(error.message || "Failed to upload attachment")
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,20 @@
|
|||
<script>
|
||||
import { getContext, onMount, tick } from "svelte"
|
||||
import { canBeDisplayColumn, canBeSortColumn } from "@budibase/shared-core"
|
||||
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui"
|
||||
import {
|
||||
Icon,
|
||||
Popover,
|
||||
Menu,
|
||||
MenuItem,
|
||||
clickOutside,
|
||||
Modal,
|
||||
} from "@budibase/bbui"
|
||||
import GridCell from "./GridCell.svelte"
|
||||
import { getColumnIcon } from "../lib/utils"
|
||||
import MigrationModal from "../controls/MigrationModal.svelte"
|
||||
import { debounce } from "../../../utils/utils"
|
||||
import { FieldType, FormulaTypes } from "@budibase/types"
|
||||
import { TableNames } from "../../../constants"
|
||||
|
||||
export let column
|
||||
export let idx
|
||||
|
@ -45,6 +54,7 @@
|
|||
let editIsOpen = false
|
||||
let timeout
|
||||
let popover
|
||||
let migrationModal
|
||||
let searchValue
|
||||
let input
|
||||
|
||||
|
@ -189,6 +199,11 @@
|
|||
})
|
||||
}
|
||||
|
||||
const openMigrationModal = () => {
|
||||
migrationModal.show()
|
||||
open = false
|
||||
}
|
||||
|
||||
const startSearching = async () => {
|
||||
$focusedCellId = null
|
||||
searchValue = ""
|
||||
|
@ -224,6 +239,10 @@
|
|||
onMount(() => subscribe("close-edit-column", cancelEdit))
|
||||
</script>
|
||||
|
||||
<Modal bind:this={migrationModal}>
|
||||
<MigrationModal {column} />
|
||||
</Modal>
|
||||
|
||||
<div
|
||||
class="header-cell"
|
||||
class:open
|
||||
|
@ -363,6 +382,11 @@
|
|||
>
|
||||
Hide column
|
||||
</MenuItem>
|
||||
{#if $config.canEditColumns && column.schema.type === "link" && column.schema.tableId === TableNames.USERS}
|
||||
<MenuItem icon="User" on:click={openMigrationModal}>
|
||||
Migrate to user column
|
||||
</MenuItem>
|
||||
{/if}
|
||||
</Menu>
|
||||
{/if}
|
||||
</Popover>
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
<script>
|
||||
import {
|
||||
ModalContent,
|
||||
notifications,
|
||||
Input,
|
||||
InlineAlert,
|
||||
} from "@budibase/bbui"
|
||||
import { getContext } from "svelte"
|
||||
import { ValidColumnNameRegex } from "@budibase/shared-core"
|
||||
import { FieldSubtype, FieldType, RelationshipType } from "@budibase/types"
|
||||
|
||||
const { API, definition, rows } = getContext("grid")
|
||||
|
||||
export let column
|
||||
|
||||
let newColumnName = `${column.schema.name} migrated`
|
||||
$: error = checkNewColumnName(newColumnName)
|
||||
|
||||
const checkNewColumnName = newColumnName => {
|
||||
if (newColumnName === "") {
|
||||
return "Column name can't be empty."
|
||||
}
|
||||
if (newColumnName in $definition.schema) {
|
||||
return "New column name can't be the same as an existing column name."
|
||||
}
|
||||
if (newColumnName.match(ValidColumnNameRegex) === null) {
|
||||
return "Illegal character; must be alpha-numeric."
|
||||
}
|
||||
}
|
||||
|
||||
const migrateUserColumn = async () => {
|
||||
let subtype = FieldSubtype.USERS
|
||||
if (column.schema.relationshipType === RelationshipType.ONE_TO_MANY) {
|
||||
subtype = FieldSubtype.USER
|
||||
}
|
||||
|
||||
try {
|
||||
await API.migrateColumn({
|
||||
tableId: $definition._id,
|
||||
oldColumn: column.schema,
|
||||
newColumn: {
|
||||
name: newColumnName,
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype,
|
||||
},
|
||||
})
|
||||
notifications.success("Column migrated")
|
||||
} catch (e) {
|
||||
notifications.error(`Failed to migrate: ${e.message}`)
|
||||
}
|
||||
await rows.actions.refreshData()
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
title="Migrate column"
|
||||
confirmText="Continue"
|
||||
cancelText="Cancel"
|
||||
onConfirm={migrateUserColumn}
|
||||
disabled={error !== undefined}
|
||||
size="M"
|
||||
>
|
||||
This operation will kick off a migration of the column "{column.schema.name}"
|
||||
to a new column, with the name provided - this operation may take a moment to
|
||||
complete.
|
||||
|
||||
<InlineAlert
|
||||
type="error"
|
||||
header="Are you sure?"
|
||||
message="This will leave bindings which utilised the user relationship column in a state where they will need to be updated to use the new column instead."
|
||||
/>
|
||||
<Input bind:value={newColumnName} label="New column name" {error} />
|
||||
</ModalContent>
|
|
@ -1 +1 @@
|
|||
Subproject commit d24c0dc3a30014cbe61860252aa48104cad36376
|
||||
Subproject commit 3820c0c93a3e448e10a60a9feb5396844b537ca8
|
|
@ -38,7 +38,7 @@ RUN apt update && apt upgrade -y \
|
|||
|
||||
COPY package.json .
|
||||
COPY dist/yarn.lock .
|
||||
RUN yarn install --production=true \
|
||||
RUN yarn install --production=true --network-timeout 1000000 \
|
||||
# Remove unneeded data from file system to reduce image size
|
||||
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python \
|
||||
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
|
||||
|
|
|
@ -44,7 +44,7 @@ RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
|
|||
WORKDIR /string-templates
|
||||
COPY packages/string-templates/package.json package.json
|
||||
RUN ../scripts/removeWorkspaceDependencies.sh package.json
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true --network-timeout 1000000
|
||||
COPY packages/string-templates .
|
||||
|
||||
|
||||
|
@ -57,7 +57,7 @@ COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.
|
|||
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
|
||||
RUN ./scripts/removeWorkspaceDependencies.sh package.json
|
||||
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true \
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true --network-timeout 1000000 \
|
||||
# Remove unneeded data from file system to reduce image size
|
||||
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python jq \
|
||||
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
|
||||
|
|
|
@ -70,6 +70,13 @@ module AwsMock {
|
|||
Contents: {},
|
||||
})
|
||||
)
|
||||
|
||||
// @ts-ignore
|
||||
this.getObject = jest.fn(
|
||||
response({
|
||||
Body: "",
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
aws.DynamoDB = { DocumentClient }
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
"test": "bash scripts/test.sh",
|
||||
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
|
||||
"test:watch": "jest --watch",
|
||||
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
|
||||
"run:docker": "node dist/index.js",
|
||||
"run:docker:cluster": "pm2-runtime start pm2.config.js",
|
||||
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||
|
|
|
@ -2,7 +2,7 @@ version: "3.8"
|
|||
services:
|
||||
db:
|
||||
container_name: postgres
|
||||
image: postgres:15
|
||||
image: postgres:15-bullseye
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: root
|
||||
|
|
|
@ -32,11 +32,8 @@ import {
|
|||
tenancy,
|
||||
users,
|
||||
} from "@budibase/backend-core"
|
||||
import { USERS_TABLE_SCHEMA } from "../../constants"
|
||||
import {
|
||||
buildDefaultDocs,
|
||||
DEFAULT_BB_DATASOURCE_ID,
|
||||
} from "../../db/defaultData/datasource_bb_default"
|
||||
import { USERS_TABLE_SCHEMA, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
|
||||
import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
|
||||
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
|
||||
import { stringToReadStream } from "../../utilities"
|
||||
import { doesUserHaveLock } from "../../utilities/redis"
|
||||
|
|
|
@ -12,7 +12,6 @@ import {
|
|||
CreateDatasourceResponse,
|
||||
Datasource,
|
||||
DatasourcePlus,
|
||||
ExternalTable,
|
||||
FetchDatasourceInfoRequest,
|
||||
FetchDatasourceInfoResponse,
|
||||
IntegrationBase,
|
||||
|
@ -59,7 +58,7 @@ async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
|
|||
const connector = (await getConnector(datasource)) as DatasourcePlus
|
||||
return await connector.buildSchema(
|
||||
datasource._id!,
|
||||
datasource.entities! as Record<string, ExternalTable>
|
||||
datasource.entities! as Record<string, Table>
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import { context } from "@budibase/backend-core"
|
||||
import { isExternalTable } from "../../../integrations/utils"
|
||||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import { APP_PREFIX, DocumentType } from "../../../db/utils"
|
||||
|
||||
export async function addRev(
|
||||
body: { _id?: string; _rev?: string },
|
||||
tableId?: string
|
||||
) {
|
||||
if (!body._id || (tableId && isExternalTable(tableId))) {
|
||||
if (!body._id || (tableId && isExternalTableID(tableId))) {
|
||||
return body
|
||||
}
|
||||
let id = body._id
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
import { context, db as dbCore, events, roles } from "@budibase/backend-core"
|
||||
import {
|
||||
context,
|
||||
db as dbCore,
|
||||
events,
|
||||
roles,
|
||||
Header,
|
||||
} from "@budibase/backend-core"
|
||||
import { getUserMetadataParams, InternalTables } from "../../db/utils"
|
||||
import { Database, Role, UserCtx, UserRoles } from "@budibase/types"
|
||||
import { sdk as sharedSdk } from "@budibase/shared-core"
|
||||
|
@ -143,4 +149,20 @@ export async function accessible(ctx: UserCtx) {
|
|||
} else {
|
||||
ctx.body = await roles.getUserRoleIdHierarchy(roleId!)
|
||||
}
|
||||
|
||||
// If a custom role is provided in the header, filter out higher level roles
|
||||
const roleHeader = ctx.header?.[Header.PREVIEW_ROLE] as string
|
||||
if (roleHeader && !Object.keys(roles.BUILTIN_ROLE_IDS).includes(roleHeader)) {
|
||||
const inherits = (await roles.getRole(roleHeader))?.inherits
|
||||
const orderedRoles = ctx.body.reverse()
|
||||
let filteredRoles = [roleHeader]
|
||||
for (let role of orderedRoles) {
|
||||
filteredRoles = [role, ...filteredRoles]
|
||||
if (role === inherits) {
|
||||
break
|
||||
}
|
||||
}
|
||||
filteredRoles.pop()
|
||||
ctx.body = [roleHeader, ...filteredRoles]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { quotas } from "@budibase/pro"
|
||||
import * as internal from "./internal"
|
||||
import * as external from "./external"
|
||||
import { isExternalTable } from "../../../integrations/utils"
|
||||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import {
|
||||
Ctx,
|
||||
UserCtx,
|
||||
|
@ -30,7 +30,7 @@ import { Format } from "../view/exporters"
|
|||
export * as views from "./views"
|
||||
|
||||
function pickApi(tableId: any) {
|
||||
if (isExternalTable(tableId)) {
|
||||
if (isExternalTableID(tableId)) {
|
||||
return external
|
||||
}
|
||||
return internal
|
||||
|
@ -227,7 +227,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
export async function validate(ctx: Ctx<Row, ValidateResponse>) {
|
||||
const tableId = utils.getTableId(ctx)
|
||||
// external tables are hard to validate currently
|
||||
if (isExternalTable(tableId)) {
|
||||
if (isExternalTableID(tableId)) {
|
||||
ctx.body = { valid: true, errors: {} }
|
||||
} else {
|
||||
ctx.body = await sdk.rows.utils.validate({
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { ValidFileExtensions } from "@budibase/shared-core"
|
||||
|
||||
require("svelte/register")
|
||||
|
||||
import { join } from "../../../utilities/centralPath"
|
||||
|
@ -11,34 +13,21 @@ import {
|
|||
} from "../../../utilities/fileSystem"
|
||||
import env from "../../../environment"
|
||||
import { DocumentType } from "../../../db/utils"
|
||||
import { context, objectStore, utils, configs } from "@budibase/backend-core"
|
||||
import {
|
||||
context,
|
||||
objectStore,
|
||||
utils,
|
||||
configs,
|
||||
BadRequestError,
|
||||
} from "@budibase/backend-core"
|
||||
import AWS from "aws-sdk"
|
||||
import fs from "fs"
|
||||
import sdk from "../../../sdk"
|
||||
import * as pro from "@budibase/pro"
|
||||
import { App, Ctx } from "@budibase/types"
|
||||
import { App, Ctx, ProcessAttachmentResponse, Upload } from "@budibase/types"
|
||||
|
||||
const send = require("koa-send")
|
||||
|
||||
async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
|
||||
const response = await objectStore.upload({
|
||||
bucket,
|
||||
metadata,
|
||||
filename: s3Key,
|
||||
path: file.path,
|
||||
type: file.type,
|
||||
})
|
||||
|
||||
// don't store a URL, work this out on the way out as the URL could change
|
||||
return {
|
||||
size: file.size,
|
||||
name: file.name,
|
||||
url: objectStore.getAppFileUrl(s3Key),
|
||||
extension: [...file.name.split(".")].pop(),
|
||||
key: response.Key,
|
||||
}
|
||||
}
|
||||
|
||||
export const toggleBetaUiFeature = async function (ctx: Ctx) {
|
||||
const cookieName = `beta:${ctx.params.feature}`
|
||||
|
||||
|
@ -72,23 +61,58 @@ export const serveBuilder = async function (ctx: Ctx) {
|
|||
await send(ctx, ctx.file, { root: builderPath })
|
||||
}
|
||||
|
||||
export const uploadFile = async function (ctx: Ctx) {
|
||||
export const uploadFile = async function (
|
||||
ctx: Ctx<{}, ProcessAttachmentResponse>
|
||||
) {
|
||||
const file = ctx.request?.files?.file
|
||||
if (!file) {
|
||||
throw new BadRequestError("No file provided")
|
||||
}
|
||||
|
||||
let files = file && Array.isArray(file) ? Array.from(file) : [file]
|
||||
|
||||
const uploads = files.map(async (file: any) => {
|
||||
const fileExtension = [...file.name.split(".")].pop()
|
||||
// filenames converted to UUIDs so they are unique
|
||||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||
ctx.body = await Promise.all(
|
||||
files.map(async file => {
|
||||
if (!file.name) {
|
||||
throw new BadRequestError(
|
||||
"Attempted to upload a file without a filename"
|
||||
)
|
||||
}
|
||||
|
||||
return prepareUpload({
|
||||
file,
|
||||
s3Key: `${context.getProdAppId()}/attachments/${processedFileName}`,
|
||||
bucket: ObjectStoreBuckets.APPS,
|
||||
const extension = [...file.name.split(".")].pop()
|
||||
if (!extension) {
|
||||
throw new BadRequestError(
|
||||
`File "${file.name}" has no extension, an extension is required to upload a file`
|
||||
)
|
||||
}
|
||||
|
||||
if (!env.SELF_HOSTED && !ValidFileExtensions.includes(extension)) {
|
||||
throw new BadRequestError(
|
||||
`File "${file.name}" has an invalid extension: "${extension}"`
|
||||
)
|
||||
}
|
||||
|
||||
// filenames converted to UUIDs so they are unique
|
||||
const processedFileName = `${uuid.v4()}.${extension}`
|
||||
|
||||
const s3Key = `${context.getProdAppId()}/attachments/${processedFileName}`
|
||||
|
||||
const response = await objectStore.upload({
|
||||
bucket: ObjectStoreBuckets.APPS,
|
||||
filename: s3Key,
|
||||
path: file.path,
|
||||
type: file.type,
|
||||
})
|
||||
|
||||
return {
|
||||
size: file.size,
|
||||
name: file.name,
|
||||
url: objectStore.getAppFileUrl(s3Key),
|
||||
extension,
|
||||
key: response.Key,
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
ctx.body = await Promise.all(uploads)
|
||||
)
|
||||
}
|
||||
|
||||
export const deleteObjects = async function (ctx: Ctx) {
|
||||
|
|
|
@ -5,18 +5,27 @@ import {
|
|||
isSchema,
|
||||
validate as validateSchema,
|
||||
} from "../../../utilities/schema"
|
||||
import { isExternalTable, isSQL } from "../../../integrations/utils"
|
||||
import {
|
||||
isExternalTable,
|
||||
isExternalTableID,
|
||||
isSQL,
|
||||
} from "../../../integrations/utils"
|
||||
import { events } from "@budibase/backend-core"
|
||||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
DocumentType,
|
||||
FetchTablesResponse,
|
||||
MigrateRequest,
|
||||
MigrateResponse,
|
||||
Row,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
Table,
|
||||
TableResponse,
|
||||
TableSourceType,
|
||||
UserCtx,
|
||||
Row,
|
||||
SEPARATOR,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||
|
@ -24,12 +33,10 @@ import { builderSocket } from "../../../websockets"
|
|||
import { cloneDeep, isEqual } from "lodash"
|
||||
|
||||
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
|
||||
if (table && !tableId) {
|
||||
tableId = table._id
|
||||
}
|
||||
if (table && table.type === "external") {
|
||||
if (table && isExternalTable(table)) {
|
||||
return external
|
||||
} else if (tableId && isExternalTable(tableId)) {
|
||||
}
|
||||
if (tableId && isExternalTableID(tableId)) {
|
||||
return external
|
||||
}
|
||||
return internal
|
||||
|
@ -46,8 +53,8 @@ export async function fetch(ctx: UserCtx<void, FetchTablesResponse>) {
|
|||
if (entities) {
|
||||
return Object.values(entities).map<Table>((entity: Table) => ({
|
||||
...entity,
|
||||
type: "external",
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
sourceId: datasource._id!,
|
||||
sql: isSQL(datasource),
|
||||
}))
|
||||
} else {
|
||||
|
@ -158,3 +165,19 @@ export async function validateExistingTableImport(ctx: UserCtx) {
|
|||
ctx.status = 422
|
||||
}
|
||||
}
|
||||
|
||||
export async function migrate(ctx: UserCtx<MigrateRequest, MigrateResponse>) {
|
||||
const { oldColumn, newColumn } = ctx.request.body
|
||||
let tableId = ctx.params.tableId as string
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
let result = await sdk.tables.migrate(table, oldColumn, newColumn)
|
||||
|
||||
for (let table of result.tablesUpdated) {
|
||||
builderSocket?.emitTableUpdate(ctx, table, {
|
||||
includeOriginator: true,
|
||||
})
|
||||
}
|
||||
|
||||
ctx.status = 200
|
||||
ctx.body = { message: `Column ${oldColumn.name} migrated.` }
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
Table,
|
||||
TableSourceType,
|
||||
UserCtx,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
|
@ -16,10 +17,11 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
let tableToSave: Table & {
|
||||
_rename?: RenameColumn
|
||||
} = {
|
||||
type: "table",
|
||||
_id: generateTableID(),
|
||||
views: {},
|
||||
...rest,
|
||||
type: "table",
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
views: {},
|
||||
}
|
||||
const renaming = tableToSave._rename
|
||||
delete tableToSave._rename
|
||||
|
|
|
@ -11,128 +11,24 @@ const { PermissionType, PermissionLevel } = permissions
|
|||
const router: Router = new Router()
|
||||
|
||||
router
|
||||
/**
|
||||
* @api {get} /api/:sourceId/:rowId/enrich Get an enriched row
|
||||
* @apiName Get an enriched row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This API is only useful when dealing with rows that have relationships.
|
||||
* Normally when a row is a returned from the API relationships will only have the structure
|
||||
* `{ primaryDisplay: "name", _id: ... }` but this call will return the full related rows
|
||||
* for each relationship instead.
|
||||
*
|
||||
* @apiParam {string} rowId The ID of the row which is to be retrieved and enriched.
|
||||
*
|
||||
* @apiSuccess {object} row The response body will be the enriched row.
|
||||
*/
|
||||
.get(
|
||||
"/api/:sourceId/:rowId/enrich",
|
||||
paramSubResource("sourceId", "rowId"),
|
||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||
rowController.fetchEnrichedRow
|
||||
)
|
||||
/**
|
||||
* @api {get} /api/:sourceId/rows Get all rows in a table
|
||||
* @apiName Get all rows in a table
|
||||
* @apiGroup rows
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This is a deprecated endpoint that should not be used anymore, instead use the search endpoint.
|
||||
* This endpoint gets all of the rows within the specified table - it is not heavily used
|
||||
* due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then
|
||||
* will simply stop.
|
||||
*
|
||||
* @apiParam {string} sourceId The ID of the table to retrieve all rows within.
|
||||
*
|
||||
* @apiSuccess {object[]} rows The response body will be an array of all rows found.
|
||||
*/
|
||||
.get(
|
||||
"/api/:sourceId/rows",
|
||||
paramResource("sourceId"),
|
||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||
rowController.fetch
|
||||
)
|
||||
/**
|
||||
* @api {get} /api/:sourceId/rows/:rowId Retrieve a single row
|
||||
* @apiName Retrieve a single row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve
|
||||
* a row by anything other than its _id field, use the search endpoint.
|
||||
*
|
||||
* @apiParam {string} sourceId The ID of the table to retrieve a row from.
|
||||
* @apiParam {string} rowId The ID of the row to retrieve.
|
||||
*
|
||||
* @apiSuccess {object} body The response body will be the row that was found.
|
||||
*/
|
||||
.get(
|
||||
"/api/:sourceId/rows/:rowId",
|
||||
paramSubResource("sourceId", "rowId"),
|
||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||
rowController.find
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/:sourceId/search Search for rows in a table
|
||||
* @apiName Search for rows in a table
|
||||
* @apiGroup rows
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This is the primary method of accessing rows in Budibase, the data provider
|
||||
* and data UI in the builder are built atop this. All filtering, sorting and pagination is
|
||||
* handled through this, for internal and external (datasource plus, e.g. SQL) tables.
|
||||
*
|
||||
* @apiParam {string} sourceId The ID of the table to retrieve rows from.
|
||||
*
|
||||
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
|
||||
* defaults to false.
|
||||
* @apiParam (Body) {object} [query] This contains a set of filters which should be applied, if none
|
||||
* specified then the request will be unfiltered. An example with all of the possible query
|
||||
* options has been supplied below.
|
||||
* @apiParam (Body) {number} [limit] This sets a limit for the number of rows that will be returned,
|
||||
* this will be implemented at the database level if supported for performance reasons. This
|
||||
* is useful when paginating to set exactly how many rows per page.
|
||||
* @apiParam (Body) {string} [bookmark] If pagination is enabled then a bookmark will be returned
|
||||
* with each successful search request, this should be supplied back to get the next page.
|
||||
* @apiParam (Body) {object} [sort] If sort is desired this should contain the name of the column to
|
||||
* sort on.
|
||||
* @apiParam (Body) {string} [sortOrder] If sort is enabled then this can be either "descending" or
|
||||
* "ascending" as required.
|
||||
* @apiParam (Body) {string} [sortType] If sort is enabled then you must specify the type of search
|
||||
* being used, either "string" or "number". This is only used for internal tables.
|
||||
*
|
||||
* @apiParamExample {json} Example:
|
||||
* {
|
||||
* "tableId": "ta_70260ff0b85c467ca74364aefc46f26d",
|
||||
* "query": {
|
||||
* "string": {},
|
||||
* "fuzzy": {},
|
||||
* "range": {
|
||||
* "columnName": {
|
||||
* "high": 20,
|
||||
* "low": 10,
|
||||
* }
|
||||
* },
|
||||
* "equal": {
|
||||
* "columnName": "someValue"
|
||||
* },
|
||||
* "notEqual": {},
|
||||
* "empty": {},
|
||||
* "notEmpty": {},
|
||||
* "oneOf": {
|
||||
* "columnName": ["value"]
|
||||
* }
|
||||
* },
|
||||
* "limit": 10,
|
||||
* "sort": "name",
|
||||
* "sortOrder": "descending",
|
||||
* "sortType": "string",
|
||||
* "paginate": true
|
||||
* }
|
||||
*
|
||||
* @apiSuccess {object[]} rows An array of rows that was found based on the supplied parameters.
|
||||
* @apiSuccess {boolean} hasNextPage If pagination was enabled then this specifies whether or
|
||||
* not there is another page after this request.
|
||||
* @apiSuccess {string} bookmark The bookmark to be sent with the next request to get the next
|
||||
* page.
|
||||
*/
|
||||
.post(
|
||||
"/api/:sourceId/search",
|
||||
internalSearchValidator(),
|
||||
|
@ -148,30 +44,6 @@ router
|
|||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||
rowController.search
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/:sourceId/rows Creates a new row
|
||||
* @apiName Creates a new row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table write access
|
||||
* @apiDescription This API will create a new row based on the supplied body. If the
|
||||
* body includes an "_id" field then it will update an existing row if the field
|
||||
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
|
||||
* already used by Budibase tables and cannot be used for columns.
|
||||
*
|
||||
* @apiParam {string} sourceId The ID of the table to save a row to.
|
||||
*
|
||||
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
|
||||
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
|
||||
* must also be provided.
|
||||
* @apiParam (Body) {string} tableId The ID of the table should also be specified in the row body itself.
|
||||
* @apiParam (Body) {any} [any] Any field supplied in the body will be assessed to see if it matches
|
||||
* a column in the specified table. All other fields will be dropped and not stored.
|
||||
*
|
||||
* @apiSuccess {string} _id The ID of the row that was just saved, if it was just created this
|
||||
* is the rows new ID.
|
||||
* @apiSuccess {string} [_rev] If saving to an internal table a revision will also be returned.
|
||||
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
|
||||
*/
|
||||
.post(
|
||||
"/api/:sourceId/rows",
|
||||
paramResource("sourceId"),
|
||||
|
@ -179,14 +51,6 @@ router
|
|||
trimViewRowInfo,
|
||||
rowController.save
|
||||
)
|
||||
/**
|
||||
* @api {patch} /api/:sourceId/rows Updates a row
|
||||
* @apiName Update a row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table write access
|
||||
* @apiDescription This endpoint is identical to the row creation endpoint but instead it will
|
||||
* error if an _id isn't provided, it will only function for existing rows.
|
||||
*/
|
||||
.patch(
|
||||
"/api/:sourceId/rows",
|
||||
paramResource("sourceId"),
|
||||
|
@ -194,52 +58,12 @@ router
|
|||
trimViewRowInfo,
|
||||
rowController.patch
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/:sourceId/rows/validate Validate inputs for a row
|
||||
* @apiName Validate inputs for a row
|
||||
* @apiGroup rows
|
||||
* @apiPermission table write access
|
||||
* @apiDescription When attempting to save a row you may want to check if the row is valid
|
||||
* given the table schema, this will iterate through all the constraints on the table and
|
||||
* check if the request body is valid.
|
||||
*
|
||||
* @apiParam {string} sourceId The ID of the table the row is to be validated for.
|
||||
*
|
||||
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
|
||||
* against the table schema and constraints.
|
||||
*
|
||||
* @apiSuccess {boolean} valid If inputs provided are acceptable within the table schema this
|
||||
* will be true, if it is not then then errors property will be populated.
|
||||
* @apiSuccess {object} [errors] A key value map of information about fields on the input
|
||||
* which do not match the table schema. The key name will be the column names that have breached
|
||||
* the schema.
|
||||
*/
|
||||
.post(
|
||||
"/api/:sourceId/rows/validate",
|
||||
paramResource("sourceId"),
|
||||
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
rowController.validate
|
||||
)
|
||||
/**
|
||||
* @api {delete} /api/:sourceId/rows Delete rows
|
||||
* @apiName Delete rows
|
||||
* @apiGroup rows
|
||||
* @apiPermission table write access
|
||||
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
|
||||
* fashion.
|
||||
*
|
||||
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
|
||||
*
|
||||
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
|
||||
* key of the request body that are to be deleted.
|
||||
* @apiParam (Body) {string} [_id] If deleting a single row then provide its ID in this field.
|
||||
* @apiParam (Body) {string} [_rev] If deleting a single row from an internal table then provide its
|
||||
* revision here.
|
||||
*
|
||||
* @apiSuccess {object[]|object} body If deleting bulk then the response body will be an array
|
||||
* of the deleted rows, if deleting a single row then the body will contain a "row" property which
|
||||
* is the deleted row.
|
||||
*/
|
||||
.delete(
|
||||
"/api/:sourceId/rows",
|
||||
paramResource("sourceId"),
|
||||
|
@ -247,20 +71,6 @@ router
|
|||
trimViewRowInfo,
|
||||
rowController.destroy
|
||||
)
|
||||
|
||||
/**
|
||||
* @api {post} /api/:sourceId/rows/exportRows Export Rows
|
||||
* @apiName Export rows
|
||||
* @apiGroup rows
|
||||
* @apiPermission table write access
|
||||
* @apiDescription This API can export a number of provided rows
|
||||
*
|
||||
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
|
||||
*
|
||||
* @apiParam (Body) {object[]} [rows] The row IDs which are to be exported
|
||||
*
|
||||
* @apiSuccess {object[]|object}
|
||||
*/
|
||||
.post(
|
||||
"/api/:sourceId/rows/exportRows",
|
||||
paramResource("sourceId"),
|
||||
|
|
|
@ -9,99 +9,13 @@ const { BUILDER, PermissionLevel, PermissionType } = permissions
|
|||
const router: Router = new Router()
|
||||
|
||||
router
|
||||
/**
|
||||
* @api {get} /api/tables Fetch all tables
|
||||
* @apiName Fetch all tables
|
||||
* @apiGroup tables
|
||||
* @apiPermission table read access
|
||||
* @apiDescription This endpoint retrieves all of the tables which have been created in
|
||||
* an app. This includes all of the external and internal tables; to tell the difference
|
||||
* between these look for the "type" property on each table, either being "internal" or "external".
|
||||
*
|
||||
* @apiSuccess {object[]} body The response body will be the list of tables that was found - as
|
||||
* this does not take any parameters the only error scenario is no access.
|
||||
*/
|
||||
.get("/api/tables", authorized(BUILDER), tableController.fetch)
|
||||
/**
|
||||
* @api {get} /api/tables/:id Fetch a single table
|
||||
* @apiName Fetch a single table
|
||||
* @apiGroup tables
|
||||
* @apiPermission table read access
|
||||
* @apiDescription Retrieves a single table this could be be internal or external based on
|
||||
* the provided table ID.
|
||||
*
|
||||
* @apiParam {string} id The ID of the table which is to be retrieved.
|
||||
*
|
||||
* @apiSuccess {object[]} body The response body will be the table that was found.
|
||||
*/
|
||||
.get(
|
||||
"/api/tables/:tableId",
|
||||
paramResource("tableId"),
|
||||
authorized(PermissionType.TABLE, PermissionLevel.READ, { schema: true }),
|
||||
tableController.find
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/tables Save a table
|
||||
* @apiName Save a table
|
||||
* @apiGroup tables
|
||||
* @apiPermission builder
|
||||
* @apiDescription Create or update a table with this endpoint, this will function for both internal
|
||||
* external tables.
|
||||
*
|
||||
* @apiParam (Body) {string} [_id] If updating an existing table then the ID of the table must be specified.
|
||||
* @apiParam (Body) {string} [_rev] If updating an existing internal table then the revision must also be specified.
|
||||
* @apiParam (Body) {string} type] This should either be "internal" or "external" depending on the table type -
|
||||
* this will default to internal.
|
||||
* @apiParam (Body) {string} [sourceId] If creating an external table then this should be set to the datasource ID. If
|
||||
* building an internal table this does not need to be set, although it will be returned as "bb_internal".
|
||||
* @apiParam (Body) {string} name The name of the table, this will be used in the UI. To rename the table simply
|
||||
* supply the table structure to this endpoint with the name changed.
|
||||
* @apiParam (Body) {object} schema A key value object which has all of the columns in the table as the keys in this
|
||||
* object. For each column a "type" and "constraints" must be specified, with some types requiring further information.
|
||||
* More information about the schema structure can be found in the Typescript definitions.
|
||||
* @apiParam (Body) {string} [primaryDisplay] The name of the column which should be used when displaying rows
|
||||
* from this table as relationships.
|
||||
* @apiParam (Body) {object[]} [indexes] Specifies the search indexes - this is deprecated behaviour with the introduction
|
||||
* of lucene indexes. This functionality is only available for internal tables.
|
||||
* @apiParam (Body) {object} [_rename] If a column is to be renamed then the "old" column name should be set in this
|
||||
* structure, and the "updated", new column name should also be supplied. The schema should also be updated, this field
|
||||
* lets the server know that a field hasn't just been deleted, that the data has moved to a new name, this will fix
|
||||
* the rows in the table. This functionality is only available for internal tables.
|
||||
* @apiParam (Body) {object[]} [rows] When creating a table using a compatible data source, an array of objects to be imported into the new table can be provided.
|
||||
*
|
||||
* @apiParamExample {json} Example:
|
||||
* {
|
||||
* "_id": "ta_05541307fa0f4044abee071ca2a82119",
|
||||
* "_rev": "10-0fbe4e78f69b255d79f1017e2eeef807",
|
||||
* "type": "internal",
|
||||
* "views": {},
|
||||
* "name": "tableName",
|
||||
* "schema": {
|
||||
* "column": {
|
||||
* "type": "string",
|
||||
* "constraints": {
|
||||
* "type": "string",
|
||||
* "length": {
|
||||
* "maximum": null
|
||||
* },
|
||||
* "presence": false
|
||||
* },
|
||||
* "name": "column"
|
||||
* },
|
||||
* },
|
||||
* "primaryDisplay": "column",
|
||||
* "indexes": [],
|
||||
* "sourceId": "bb_internal",
|
||||
* "_rename": {
|
||||
* "old": "columnName",
|
||||
* "updated": "newColumnName",
|
||||
* },
|
||||
* "rows": []
|
||||
* }
|
||||
*
|
||||
* @apiSuccess {object} table The response body will contain the table structure after being cleaned up and
|
||||
* saved to the database.
|
||||
*/
|
||||
.post(
|
||||
"/api/tables",
|
||||
// allows control over updating a table
|
||||
|
@ -125,41 +39,12 @@ router
|
|||
authorized(BUILDER),
|
||||
tableController.validateExistingTableImport
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/tables/:tableId/:revId Delete a table
|
||||
* @apiName Delete a table
|
||||
* @apiGroup tables
|
||||
* @apiPermission builder
|
||||
* @apiDescription This endpoint will delete a table and all of its associated data, for this reason it is
|
||||
* quite dangerous - it will work for internal and external tables.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table which is to be deleted.
|
||||
* @apiParam {string} [revId] If deleting an internal table then the revision must also be supplied (_rev), for
|
||||
* external tables this can simply be set to anything, e.g. "external".
|
||||
*
|
||||
* @apiSuccess {string} message A message stating that the table was deleted successfully.
|
||||
*/
|
||||
.delete(
|
||||
"/api/tables/:tableId/:revId",
|
||||
paramResource("tableId"),
|
||||
authorized(BUILDER),
|
||||
tableController.destroy
|
||||
)
|
||||
/**
|
||||
* @api {post} /api/tables/:tableId/:revId Import CSV to existing table
|
||||
* @apiName Import CSV to existing table
|
||||
* @apiGroup tables
|
||||
* @apiPermission builder
|
||||
* @apiDescription This endpoint will import data to existing tables, internal or external. It is used in combination
|
||||
* with the CSV validation endpoint. Take the output of the CSV validation endpoint and pass it to this endpoint to
|
||||
* import the data; please note this will only import fields that already exist on the table/match the type.
|
||||
*
|
||||
* @apiParam {string} tableId The ID of the table which the data should be imported to.
|
||||
*
|
||||
* @apiParam (Body) {object[]} rows An array of objects representing the rows to be imported, key-value pairs not matching the table schema will be ignored.
|
||||
*
|
||||
* @apiSuccess {string} message A message stating that the data was imported successfully.
|
||||
*/
|
||||
.post(
|
||||
"/api/tables/:tableId/import",
|
||||
paramResource("tableId"),
|
||||
|
@ -167,4 +52,11 @@ router
|
|||
tableController.bulkImport
|
||||
)
|
||||
|
||||
.post(
|
||||
"/api/tables/:tableId/migrate",
|
||||
paramResource("tableId"),
|
||||
authorized(BUILDER),
|
||||
tableController.migrate
|
||||
)
|
||||
|
||||
export default router
|
||||
|
|
|
@ -7,7 +7,7 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
|
|||
"entities": [
|
||||
{
|
||||
"_id": "ta_users",
|
||||
"_rev": "1-2375e1bc58aeec664dc1b1f04ad43e44",
|
||||
"_rev": "1-73b7912e6cbdd3d696febc60f3715844",
|
||||
"createdAt": "2020-01-01T00:00:00.000Z",
|
||||
"name": "Users",
|
||||
"primaryDisplay": "email",
|
||||
|
@ -21,7 +21,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
|
|||
"presence": true,
|
||||
"type": "string",
|
||||
},
|
||||
"fieldName": "email",
|
||||
"name": "email",
|
||||
"type": "string",
|
||||
},
|
||||
|
@ -30,7 +29,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
|
|||
"presence": false,
|
||||
"type": "string",
|
||||
},
|
||||
"fieldName": "firstName",
|
||||
"name": "firstName",
|
||||
"type": "string",
|
||||
},
|
||||
|
@ -39,7 +37,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
|
|||
"presence": false,
|
||||
"type": "string",
|
||||
},
|
||||
"fieldName": "lastName",
|
||||
"name": "lastName",
|
||||
"type": "string",
|
||||
},
|
||||
|
@ -54,7 +51,6 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
|
|||
"presence": false,
|
||||
"type": "string",
|
||||
},
|
||||
"fieldName": "roleId",
|
||||
"name": "roleId",
|
||||
"type": "options",
|
||||
},
|
||||
|
@ -67,11 +63,12 @@ exports[`/datasources fetch returns all the datasources from the server 1`] = `
|
|||
"presence": false,
|
||||
"type": "string",
|
||||
},
|
||||
"fieldName": "status",
|
||||
"name": "status",
|
||||
"type": "options",
|
||||
},
|
||||
},
|
||||
"sourceId": "bb_internal",
|
||||
"sourceType": "internal",
|
||||
"type": "table",
|
||||
"updatedAt": "2020-01-01T00:00:00.000Z",
|
||||
"views": {},
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
import * as setup from "./utilities"
|
||||
import { APIError } from "@budibase/types"
|
||||
|
||||
describe("/api/applications/:appId/sync", () => {
|
||||
let config = setup.getConfig()
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
describe("/api/attachments/process", () => {
|
||||
it("should accept an image file upload", async () => {
|
||||
let resp = await config.api.attachment.process(
|
||||
"1px.jpg",
|
||||
Buffer.from([0])
|
||||
)
|
||||
expect(resp.length).toBe(1)
|
||||
|
||||
let upload = resp[0]
|
||||
expect(upload.url.endsWith(".jpg")).toBe(true)
|
||||
expect(upload.extension).toBe("jpg")
|
||||
expect(upload.size).toBe(1)
|
||||
expect(upload.name).toBe("1px.jpg")
|
||||
})
|
||||
|
||||
it("should reject an upload with a malicious file extension", async () => {
|
||||
await config.withEnv({ SELF_HOSTED: undefined }, async () => {
|
||||
let resp = (await config.api.attachment.process(
|
||||
"ohno.exe",
|
||||
Buffer.from([0]),
|
||||
{ expectStatus: 400 }
|
||||
)) as unknown as APIError
|
||||
expect(resp.message).toContain("invalid extension")
|
||||
})
|
||||
})
|
||||
|
||||
it("should reject an upload with no file", async () => {
|
||||
let resp = (await config.api.attachment.process(
|
||||
undefined as any,
|
||||
undefined as any,
|
||||
{
|
||||
expectStatus: 400,
|
||||
}
|
||||
)) as unknown as APIError
|
||||
expect(resp.message).toContain("No file provided")
|
||||
})
|
||||
})
|
||||
})
|
|
@ -5,6 +5,8 @@ import sdk from "../../../sdk"
|
|||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
|
||||
mocks.licenses.useBackups()
|
||||
|
||||
describe("/backups", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
|
@ -12,16 +14,17 @@ describe("/backups", () => {
|
|||
afterAll(setup.afterAll)
|
||||
|
||||
beforeEach(async () => {
|
||||
tk.reset()
|
||||
await config.init()
|
||||
})
|
||||
|
||||
describe("exportAppDump", () => {
|
||||
describe("/api/backups/export", () => {
|
||||
it("should be able to export app", async () => {
|
||||
const res = await request
|
||||
.post(`/api/backups/export?appId=${config.getAppId()}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
expect(res.headers["content-type"]).toEqual("application/gzip")
|
||||
const { body, headers } = await config.api.backup.exportBasicBackup(
|
||||
config.getAppId()!
|
||||
)
|
||||
expect(body instanceof Buffer).toBe(true)
|
||||
expect(headers["content-type"]).toEqual("application/gzip")
|
||||
expect(events.app.exported).toBeCalledTimes(1)
|
||||
})
|
||||
|
||||
|
@ -36,11 +39,11 @@ describe("/backups", () => {
|
|||
it("should infer the app name from the app", async () => {
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
const res = await request
|
||||
.post(`/api/backups/export?appId=${config.getAppId()}`)
|
||||
.set(config.defaultHeaders())
|
||||
const { headers } = await config.api.backup.exportBasicBackup(
|
||||
config.getAppId()!
|
||||
)
|
||||
|
||||
expect(res.headers["content-disposition"]).toEqual(
|
||||
expect(headers["content-disposition"]).toEqual(
|
||||
`attachment; filename="${
|
||||
config.getApp()!.name
|
||||
}-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`
|
||||
|
@ -48,6 +51,21 @@ describe("/backups", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("/api/backups/import", () => {
|
||||
it("should be able to import an app", async () => {
|
||||
const appId = config.getAppId()!
|
||||
const automation = await config.createAutomation()
|
||||
await config.createAutomationLog(automation, appId)
|
||||
await config.createScreen()
|
||||
const exportRes = await config.api.backup.createBackup(appId)
|
||||
expect(exportRes.backupId).toBeDefined()
|
||||
const importRes = await config.api.backup.importBackup(
|
||||
appId,
|
||||
exportRes.backupId
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("calculateBackupStats", () => {
|
||||
it("should be able to calculate the backup statistics", async () => {
|
||||
await config.createAutomation()
|
||||
|
|
|
@ -158,5 +158,25 @@ describe("/roles", () => {
|
|||
expect(res.body.length).toBe(1)
|
||||
expect(res.body[0]).toBe("PUBLIC")
|
||||
})
|
||||
|
||||
it("should not fetch higher level accessible roles when a custom role header is provided", async () => {
|
||||
await createRole({
|
||||
name: `CUSTOM_ROLE`,
|
||||
inherits: roles.BUILTIN_ROLE_IDS.BASIC,
|
||||
permissionId: permissions.BuiltinPermissionID.READ_ONLY,
|
||||
version: "name",
|
||||
})
|
||||
const res = await request
|
||||
.get("/api/roles/accessible")
|
||||
.set({
|
||||
...config.defaultHeaders(),
|
||||
"x-budibase-role": "CUSTOM_ROLE"
|
||||
})
|
||||
.expect(200)
|
||||
expect(res.body.length).toBe(3)
|
||||
expect(res.body[0]).toBe("CUSTOM_ROLE")
|
||||
expect(res.body[1]).toBe("BASIC")
|
||||
expect(res.body[2]).toBe("PUBLIC")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const setup = require("./utilities")
|
||||
const { basicScreen } = setup.structures
|
||||
const { basicScreen, powerScreen } = setup.structures
|
||||
const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions")
|
||||
const { roles } = require("@budibase/backend-core")
|
||||
const { BUILTIN_ROLE_IDS } = roles
|
||||
|
@ -12,19 +12,14 @@ const route = "/test"
|
|||
describe("/routing", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let screen, screen2
|
||||
let basic, power
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
screen = basicScreen()
|
||||
screen.routing.route = route
|
||||
screen = await config.createScreen(screen)
|
||||
screen2 = basicScreen()
|
||||
screen2.routing.roleId = BUILTIN_ROLE_IDS.POWER
|
||||
screen2.routing.route = route
|
||||
screen2 = await config.createScreen(screen2)
|
||||
basic = await config.createScreen(basicScreen(route))
|
||||
power = await config.createScreen(powerScreen(route))
|
||||
await config.publish()
|
||||
})
|
||||
|
||||
|
@ -61,8 +56,8 @@ describe("/routing", () => {
|
|||
expect(res.body.routes[route]).toEqual({
|
||||
subpaths: {
|
||||
[route]: {
|
||||
screenId: screen._id,
|
||||
roleId: screen.routing.roleId
|
||||
screenId: basic._id,
|
||||
roleId: basic.routing.roleId
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -80,8 +75,8 @@ describe("/routing", () => {
|
|||
expect(res.body.routes[route]).toEqual({
|
||||
subpaths: {
|
||||
[route]: {
|
||||
screenId: screen2._id,
|
||||
roleId: screen2.routing.roleId
|
||||
screenId: power._id,
|
||||
roleId: power.routing.roleId
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -101,8 +96,8 @@ describe("/routing", () => {
|
|||
expect(res.body.routes).toBeDefined()
|
||||
expect(res.body.routes[route].subpaths[route]).toBeDefined()
|
||||
const subpath = res.body.routes[route].subpaths[route]
|
||||
expect(subpath.screens[screen2.routing.roleId]).toEqual(screen2._id)
|
||||
expect(subpath.screens[screen.routing.roleId]).toEqual(screen._id)
|
||||
expect(subpath.screens[power.routing.roleId]).toEqual(power._id)
|
||||
expect(subpath.screens[basic.routing.roleId]).toEqual(basic._id)
|
||||
})
|
||||
|
||||
it("make sure it is a builder only endpoint", async () => {
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
FieldSchema,
|
||||
FieldType,
|
||||
FieldTypeSubtypes,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
MonthlyQuotaName,
|
||||
PermissionLevel,
|
||||
QuotaUsageType,
|
||||
|
@ -21,6 +22,7 @@ import {
|
|||
SortType,
|
||||
StaticQuotaName,
|
||||
Table,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
expectAnyExternalColsAttributes,
|
||||
|
@ -65,6 +67,8 @@ describe.each([
|
|||
type: "table",
|
||||
primary: ["id"],
|
||||
primaryDisplay: "name",
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
schema: {
|
||||
id: {
|
||||
type: FieldType.AUTO,
|
||||
|
@ -134,9 +138,22 @@ describe.each([
|
|||
}
|
||||
: undefined
|
||||
|
||||
async function createTable(
|
||||
cfg: Omit<SaveTableRequest, "sourceId" | "sourceType">,
|
||||
opts?: { skipReassigning: boolean }
|
||||
) {
|
||||
let table
|
||||
if (dsProvider) {
|
||||
table = await config.createExternalTable(cfg, opts)
|
||||
} else {
|
||||
table = await config.createTable(cfg, opts)
|
||||
}
|
||||
return table
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
const tableConfig = generateTableConfig()
|
||||
const table = await config.createTable(tableConfig)
|
||||
let table = await createTable(tableConfig)
|
||||
tableId = table._id!
|
||||
})
|
||||
|
||||
|
@ -165,7 +182,7 @@ describe.each([
|
|||
const queryUsage = await getQueryUsage()
|
||||
|
||||
const tableConfig = generateTableConfig()
|
||||
const newTable = await config.createTable(
|
||||
const newTable = await createTable(
|
||||
{
|
||||
...tableConfig,
|
||||
name: "TestTableAuto",
|
||||
|
@ -242,7 +259,7 @@ describe.each([
|
|||
})
|
||||
|
||||
it("should list all rows for given tableId", async () => {
|
||||
const table = await config.createTable(generateTableConfig(), {
|
||||
const table = await createTable(generateTableConfig(), {
|
||||
skipReassigning: true,
|
||||
})
|
||||
const tableId = table._id!
|
||||
|
@ -323,7 +340,7 @@ describe.each([
|
|||
inclusion: ["Alpha", "Beta", "Gamma"],
|
||||
},
|
||||
}
|
||||
const table = await config.createTable({
|
||||
const table = await createTable({
|
||||
name: "TestTable2",
|
||||
type: "table",
|
||||
schema: {
|
||||
|
@ -438,7 +455,8 @@ describe.each([
|
|||
|
||||
describe("view save", () => {
|
||||
it("views have extra data trimmed", async () => {
|
||||
const table = await config.createTable({
|
||||
const table = await createTable({
|
||||
type: "table",
|
||||
name: "orders",
|
||||
primary: ["OrderID"],
|
||||
schema: {
|
||||
|
@ -494,7 +512,7 @@ describe.each([
|
|||
describe("patch", () => {
|
||||
beforeAll(async () => {
|
||||
const tableConfig = generateTableConfig()
|
||||
table = await config.createTable(tableConfig)
|
||||
table = await createTable(tableConfig)
|
||||
})
|
||||
|
||||
it("should update only the fields that are supplied", async () => {
|
||||
|
@ -548,7 +566,7 @@ describe.each([
|
|||
describe("destroy", () => {
|
||||
beforeAll(async () => {
|
||||
const tableConfig = generateTableConfig()
|
||||
table = await config.createTable(tableConfig)
|
||||
table = await createTable(tableConfig)
|
||||
})
|
||||
|
||||
it("should be able to delete a row", async () => {
|
||||
|
@ -566,7 +584,7 @@ describe.each([
|
|||
describe("validate", () => {
|
||||
beforeAll(async () => {
|
||||
const tableConfig = generateTableConfig()
|
||||
table = await config.createTable(tableConfig)
|
||||
table = await createTable(tableConfig)
|
||||
})
|
||||
|
||||
it("should return no errors on valid row", async () => {
|
||||
|
@ -603,7 +621,7 @@ describe.each([
|
|||
describe("bulkDelete", () => {
|
||||
beforeAll(async () => {
|
||||
const tableConfig = generateTableConfig()
|
||||
table = await config.createTable(tableConfig)
|
||||
table = await createTable(tableConfig)
|
||||
})
|
||||
|
||||
it("should be able to delete a bulk set of rows", async () => {
|
||||
|
@ -687,7 +705,7 @@ describe.each([
|
|||
describe("fetchView", () => {
|
||||
beforeEach(async () => {
|
||||
const tableConfig = generateTableConfig()
|
||||
table = await config.createTable(tableConfig)
|
||||
table = await createTable(tableConfig)
|
||||
})
|
||||
|
||||
it("should be able to fetch tables contents via 'view'", async () => {
|
||||
|
@ -735,7 +753,7 @@ describe.each([
|
|||
describe("fetchEnrichedRows", () => {
|
||||
beforeAll(async () => {
|
||||
const tableConfig = generateTableConfig()
|
||||
table = await config.createTable(tableConfig)
|
||||
table = await createTable(tableConfig)
|
||||
})
|
||||
|
||||
it("should allow enriching some linked rows", async () => {
|
||||
|
@ -808,7 +826,7 @@ describe.each([
|
|||
describe("attachments", () => {
|
||||
beforeAll(async () => {
|
||||
const tableConfig = generateTableConfig()
|
||||
table = await config.createTable(tableConfig)
|
||||
table = await createTable(tableConfig)
|
||||
})
|
||||
|
||||
it("should allow enriching attachment rows", async () => {
|
||||
|
@ -839,7 +857,7 @@ describe.each([
|
|||
describe("exportData", () => {
|
||||
beforeAll(async () => {
|
||||
const tableConfig = generateTableConfig()
|
||||
table = await config.createTable(tableConfig)
|
||||
table = await createTable(tableConfig)
|
||||
})
|
||||
|
||||
it("should allow exporting all columns", async () => {
|
||||
|
@ -880,6 +898,8 @@ describe.each([
|
|||
async function userTable(): Promise<Table> {
|
||||
return {
|
||||
name: `users_${generator.word()}`,
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -925,7 +945,7 @@ describe.each([
|
|||
|
||||
describe("create", () => {
|
||||
it("should persist a new row with only the provided view fields", async () => {
|
||||
const table = await config.createTable(await userTable())
|
||||
const table = await createTable(await userTable())
|
||||
const view = await config.createView({
|
||||
schema: {
|
||||
name: { visible: true },
|
||||
|
@ -960,7 +980,7 @@ describe.each([
|
|||
|
||||
describe("patch", () => {
|
||||
it("should update only the view fields for a row", async () => {
|
||||
const table = await config.createTable(await userTable())
|
||||
const table = await createTable(await userTable())
|
||||
const tableId = table._id!
|
||||
const view = await config.createView({
|
||||
schema: {
|
||||
|
@ -1001,7 +1021,7 @@ describe.each([
|
|||
|
||||
describe("destroy", () => {
|
||||
it("should be able to delete a row", async () => {
|
||||
const table = await config.createTable(await userTable())
|
||||
const table = await createTable(await userTable())
|
||||
const tableId = table._id!
|
||||
const view = await config.createView({
|
||||
schema: {
|
||||
|
@ -1025,7 +1045,7 @@ describe.each([
|
|||
})
|
||||
|
||||
it("should be able to delete multiple rows", async () => {
|
||||
const table = await config.createTable(await userTable())
|
||||
const table = await createTable(await userTable())
|
||||
const tableId = table._id!
|
||||
const view = await config.createView({
|
||||
schema: {
|
||||
|
@ -1062,6 +1082,8 @@ describe.each([
|
|||
async function userTable(): Promise<Table> {
|
||||
return {
|
||||
name: `users_${generator.word()}`,
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -1088,7 +1110,7 @@ describe.each([
|
|||
}
|
||||
|
||||
it("returns empty rows from view when no schema is passed", async () => {
|
||||
const table = await config.createTable(await userTable())
|
||||
const table = await createTable(await userTable())
|
||||
const rows = await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, { tableId: table._id })
|
||||
|
@ -1119,7 +1141,7 @@ describe.each([
|
|||
})
|
||||
|
||||
it("searching respects the view filters", async () => {
|
||||
const table = await config.createTable(await userTable())
|
||||
const table = await createTable(await userTable())
|
||||
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
|
@ -1243,7 +1265,7 @@ describe.each([
|
|||
|
||||
describe("sorting", () => {
|
||||
beforeAll(async () => {
|
||||
const table = await config.createTable(await userTable())
|
||||
const table = await createTable(await userTable())
|
||||
const users = [
|
||||
{ name: "Alice", age: 25 },
|
||||
{ name: "Bob", age: 30 },
|
||||
|
@ -1310,7 +1332,7 @@ describe.each([
|
|||
})
|
||||
|
||||
it("when schema is defined, defined columns and row attributes are returned", async () => {
|
||||
const table = await config.createTable(await userTable())
|
||||
const table = await createTable(await userTable())
|
||||
const rows = await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, {
|
||||
|
@ -1341,7 +1363,7 @@ describe.each([
|
|||
})
|
||||
|
||||
it("views without data can be returned", async () => {
|
||||
const table = await config.createTable(await userTable())
|
||||
const table = await createTable(await userTable())
|
||||
|
||||
const createViewResponse = await config.createView()
|
||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||
|
@ -1350,7 +1372,7 @@ describe.each([
|
|||
})
|
||||
|
||||
it("respects the limit parameter", async () => {
|
||||
await config.createTable(await userTable())
|
||||
await createTable(await userTable())
|
||||
await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
|
||||
|
||||
const limit = generator.integer({ min: 1, max: 8 })
|
||||
|
@ -1365,7 +1387,7 @@ describe.each([
|
|||
})
|
||||
|
||||
it("can handle pagination", async () => {
|
||||
await config.createTable(await userTable())
|
||||
await createTable(await userTable())
|
||||
await Promise.all(Array.from({ length: 10 }, () => config.createRow()))
|
||||
|
||||
const createViewResponse = await config.createView()
|
||||
|
@ -1443,7 +1465,7 @@ describe.each([
|
|||
let tableId: string
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.createTable(await userTable())
|
||||
await createTable(await userTable())
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () => config.createRow())
|
||||
)
|
||||
|
@ -1521,13 +1543,13 @@ describe.each([
|
|||
let o2mTable: Table
|
||||
let m2mTable: Table
|
||||
beforeAll(async () => {
|
||||
o2mTable = await config.createTable(
|
||||
o2mTable = await createTable(
|
||||
{ ...generateTableConfig(), name: "o2m" },
|
||||
{
|
||||
skipReassigning: true,
|
||||
}
|
||||
)
|
||||
m2mTable = await config.createTable(
|
||||
m2mTable = await createTable(
|
||||
{ ...generateTableConfig(), name: "m2m" },
|
||||
{
|
||||
skipReassigning: true,
|
||||
|
@ -1597,9 +1619,9 @@ describe.each([
|
|||
const tableConfig = generateTableConfig()
|
||||
|
||||
if (config.datasource) {
|
||||
tableConfig.sourceId = config.datasource._id
|
||||
tableConfig.sourceId = config.datasource._id!
|
||||
if (config.datasource.plus) {
|
||||
tableConfig.type = "external"
|
||||
tableConfig.sourceType = TableSourceType.EXTERNAL
|
||||
}
|
||||
}
|
||||
const table = await config.api.table.create({
|
||||
|
|
|
@ -5,11 +5,15 @@ describe("/static", () => {
|
|||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let app
|
||||
let cleanupEnv
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
afterAll(() => {
|
||||
setup.afterAll()
|
||||
cleanupEnv()
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
config.modeSelf()
|
||||
cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
|
||||
app = await config.init()
|
||||
})
|
||||
|
||||
|
|
|
@ -1,16 +1,24 @@
|
|||
import { events, context } from "@budibase/backend-core"
|
||||
import { context, events } from "@budibase/backend-core"
|
||||
import {
|
||||
FieldType,
|
||||
SaveTableRequest,
|
||||
RelationshipType,
|
||||
Table,
|
||||
ViewCalculation,
|
||||
AutoFieldSubTypes,
|
||||
FieldSubtype,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
InternalTable,
|
||||
RelationshipType,
|
||||
Row,
|
||||
SaveTableRequest,
|
||||
Table,
|
||||
TableSourceType,
|
||||
User,
|
||||
ViewCalculation,
|
||||
} from "@budibase/types"
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import * as setup from "./utilities"
|
||||
const { basicTable } = setup.structures
|
||||
import sdk from "../../../sdk"
|
||||
import uuid from "uuid"
|
||||
|
||||
const { basicTable } = setup.structures
|
||||
|
||||
describe("/tables", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -239,7 +247,8 @@ describe("/tables", () => {
|
|||
.expect(200)
|
||||
const fetchedTable = res.body[0]
|
||||
expect(fetchedTable.name).toEqual(testTable.name)
|
||||
expect(fetchedTable.type).toEqual("internal")
|
||||
expect(fetchedTable.type).toEqual("table")
|
||||
expect(fetchedTable.sourceType).toEqual("internal")
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
|
@ -417,4 +426,281 @@ describe("/tables", () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("migrate", () => {
|
||||
let users: User[]
|
||||
beforeAll(async () => {
|
||||
users = await Promise.all([
|
||||
config.createUser({ email: `${uuid.v4()}@example.com` }),
|
||||
config.createUser({ email: `${uuid.v4()}@example.com` }),
|
||||
config.createUser({ email: `${uuid.v4()}@example.com` }),
|
||||
])
|
||||
})
|
||||
|
||||
it("should successfully migrate a one-to-many user relationship to a user column", async () => {
|
||||
const table = await config.api.table.create({
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
"user relationship": {
|
||||
type: FieldType.LINK,
|
||||
fieldName: "test",
|
||||
name: "user relationship",
|
||||
constraints: {
|
||||
type: "array",
|
||||
presence: false,
|
||||
},
|
||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||
tableId: InternalTable.USER_METADATA,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const rows = await Promise.all(
|
||||
users.map(u =>
|
||||
config.api.row.save(table._id!, { "user relationship": [u] })
|
||||
)
|
||||
)
|
||||
|
||||
await config.api.table.migrate(table._id!, {
|
||||
oldColumn: table.schema["user relationship"],
|
||||
newColumn: {
|
||||
name: "user column",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USER,
|
||||
},
|
||||
})
|
||||
|
||||
const migratedTable = await config.api.table.get(table._id!)
|
||||
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||
|
||||
const migratedRows = await config.api.row.fetch(table._id!)
|
||||
|
||||
rows.sort((a, b) => a._id!.localeCompare(b._id!))
|
||||
migratedRows.sort((a, b) => a._id!.localeCompare(b._id!))
|
||||
|
||||
for (const [i, row] of rows.entries()) {
|
||||
const migratedRow = migratedRows[i]
|
||||
expect(migratedRow["user column"]).toBeDefined()
|
||||
expect(migratedRow["user relationship"]).not.toBeDefined()
|
||||
expect(row["user relationship"][0]._id).toEqual(
|
||||
migratedRow["user column"][0]._id
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it("should successfully migrate a many-to-many user relationship to a users column", async () => {
|
||||
const table = await config.api.table.create({
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
"user relationship": {
|
||||
type: FieldType.LINK,
|
||||
fieldName: "test",
|
||||
name: "user relationship",
|
||||
constraints: {
|
||||
type: "array",
|
||||
presence: false,
|
||||
},
|
||||
relationshipType: RelationshipType.MANY_TO_MANY,
|
||||
tableId: InternalTable.USER_METADATA,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const row1 = await config.api.row.save(table._id!, {
|
||||
"user relationship": [users[0], users[1]],
|
||||
})
|
||||
|
||||
const row2 = await config.api.row.save(table._id!, {
|
||||
"user relationship": [users[1], users[2]],
|
||||
})
|
||||
|
||||
await config.api.table.migrate(table._id!, {
|
||||
oldColumn: table.schema["user relationship"],
|
||||
newColumn: {
|
||||
name: "user column",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
})
|
||||
|
||||
const migratedTable = await config.api.table.get(table._id!)
|
||||
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||
|
||||
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
|
||||
.body as Row
|
||||
expect(row1Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
|
||||
expect.arrayContaining([users[0]._id, users[1]._id])
|
||||
)
|
||||
|
||||
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
|
||||
.body as Row
|
||||
expect(row2Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual(
|
||||
expect.arrayContaining([users[1]._id, users[2]._id])
|
||||
)
|
||||
})
|
||||
|
||||
it("should successfully migrate a many-to-one user relationship to a users column", async () => {
|
||||
const table = await config.api.table.create({
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
"user relationship": {
|
||||
type: FieldType.LINK,
|
||||
fieldName: "test",
|
||||
name: "user relationship",
|
||||
constraints: {
|
||||
type: "array",
|
||||
presence: false,
|
||||
},
|
||||
relationshipType: RelationshipType.MANY_TO_ONE,
|
||||
tableId: InternalTable.USER_METADATA,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const row1 = await config.api.row.save(table._id!, {
|
||||
"user relationship": [users[0], users[1]],
|
||||
})
|
||||
|
||||
const row2 = await config.api.row.save(table._id!, {
|
||||
"user relationship": [users[2]],
|
||||
})
|
||||
|
||||
await config.api.table.migrate(table._id!, {
|
||||
oldColumn: table.schema["user relationship"],
|
||||
newColumn: {
|
||||
name: "user column",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
})
|
||||
|
||||
const migratedTable = await config.api.table.get(table._id!)
|
||||
expect(migratedTable.schema["user column"]).toBeDefined()
|
||||
expect(migratedTable.schema["user relationship"]).not.toBeDefined()
|
||||
|
||||
const row1Migrated = (await config.api.row.get(table._id!, row1._id!))
|
||||
.body as Row
|
||||
expect(row1Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual(
|
||||
expect.arrayContaining([users[0]._id, users[1]._id])
|
||||
)
|
||||
|
||||
const row2Migrated = (await config.api.row.get(table._id!, row2._id!))
|
||||
.body as Row
|
||||
expect(row2Migrated["user relationship"]).not.toBeDefined()
|
||||
expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([
|
||||
users[2]._id,
|
||||
])
|
||||
})
|
||||
|
||||
describe("unhappy paths", () => {
|
||||
let table: Table
|
||||
beforeAll(async () => {
|
||||
table = await config.api.table.create({
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
"user relationship": {
|
||||
type: FieldType.LINK,
|
||||
fieldName: "test",
|
||||
name: "user relationship",
|
||||
constraints: {
|
||||
type: "array",
|
||||
presence: false,
|
||||
},
|
||||
relationshipType: RelationshipType.MANY_TO_ONE,
|
||||
tableId: InternalTable.USER_METADATA,
|
||||
},
|
||||
num: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "num",
|
||||
constraints: {
|
||||
type: "number",
|
||||
presence: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should fail if the new column name is blank", async () => {
|
||||
await config.api.table.migrate(
|
||||
table._id!,
|
||||
{
|
||||
oldColumn: table.schema["user relationship"],
|
||||
newColumn: {
|
||||
name: "",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
it("should fail if the new column name is a reserved name", async () => {
|
||||
await config.api.table.migrate(
|
||||
table._id!,
|
||||
{
|
||||
oldColumn: table.schema["user relationship"],
|
||||
newColumn: {
|
||||
name: "_id",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
it("should fail if the new column name is the same as an existing column", async () => {
|
||||
await config.api.table.migrate(
|
||||
table._id!,
|
||||
{
|
||||
oldColumn: table.schema["user relationship"],
|
||||
newColumn: {
|
||||
name: "num",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
it("should fail if the old column name isn't a column in the table", async () => {
|
||||
await config.api.table.migrate(
|
||||
table._id!,
|
||||
{
|
||||
oldColumn: {
|
||||
name: "not a column",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
newColumn: {
|
||||
name: "new column",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: FieldSubtype.USERS,
|
||||
},
|
||||
},
|
||||
{ expectStatus: 400 }
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -3,10 +3,12 @@ import {
|
|||
CreateViewRequest,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
SearchQueryOperators,
|
||||
SortOrder,
|
||||
SortType,
|
||||
Table,
|
||||
TableSourceType,
|
||||
UIFieldMetadata,
|
||||
UpdateViewRequest,
|
||||
ViewV2,
|
||||
|
@ -18,6 +20,8 @@ function priceTable(): Table {
|
|||
return {
|
||||
name: "table",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
Price: {
|
||||
type: FieldType.NUMBER,
|
||||
|
@ -54,10 +58,10 @@ describe.each([
|
|||
},
|
||||
})
|
||||
|
||||
return config.createTable({
|
||||
return config.createExternalTable({
|
||||
...priceTable(),
|
||||
sourceId: datasource._id,
|
||||
type: "external",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
},
|
||||
],
|
||||
|
|
|
@ -8,11 +8,15 @@ describe("/webhooks", () => {
|
|||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let webhook: Webhook
|
||||
let cleanupEnv: () => void
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
afterAll(() => {
|
||||
setup.afterAll()
|
||||
cleanupEnv()
|
||||
})
|
||||
|
||||
const setupTest = async () => {
|
||||
config.modeSelf()
|
||||
cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
|
||||
await config.init()
|
||||
const autoConfig = basicAutomation()
|
||||
autoConfig.definition.trigger.schema = {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue