Merge branch 'master' into account-portal-api-license-key

This commit is contained in:
Mitch-Budibase 2023-10-24 17:33:27 +01:00
commit a3189ac786
181 changed files with 2572 additions and 1814 deletions

View File

@ -18,8 +18,7 @@ env:
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' }}
jobs:
lint:
@ -231,7 +230,7 @@ jobs:
cache: "yarn"
- run: yarn --frozen-lockfile
- name: Build packages
run: yarn build --scope @budibase/server --scope @budibase/worker --scope @budibase/client --scope @budibase/backend-core
run: yarn build --scope @budibase/server --scope @budibase/worker
- name: Run tests
run: |
cd qa-core

View File

@ -4,6 +4,8 @@ on:
types: [created]
pull_request_target:
types: [opened,closed,synchronize]
branches:
- master
jobs:
CLAssistant:
@ -33,4 +35,4 @@ jobs:
#custom-pr-sign-comment: 'The signature to be committed in order to sign the CLA'
#custom-allsigned-prcomment: 'pull request comment when all contributors has signed, defaults to **CLA Assistant Lite bot** All Contributors have signed the CLA.'
#lock-pullrequest-aftermerge: false - if you don't want this bot to automatically lock the pull request after merging (default - true)
#use-dco-flag: true - If you are using DCO instead of CLA
#use-dco-flag: true - If you are using DCO instead of CLA

20
.github/workflows/deploy-qa.yml vendored Normal file
View File

@ -0,0 +1,20 @@
name: Deploy QA
on:
push:
branches:
- master
workflow_dispatch:
jobs:
trigger-deploy-to-qa-env:
runs-on: ubuntu-latest
steps:
- uses: peter-evans/repository-dispatch@v2
env:
PAYLOAD_VERSION: ${{ github.sha }}
REF_NAME: ${{ github.ref_name}}
with:
repository: budibase/budibase-deploys
event-type: budicloud-qa-deploy
token: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -14,7 +14,6 @@ env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
jobs:
@ -110,7 +109,6 @@ jobs:
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
@ -125,6 +123,7 @@ jobs:
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
REF_NAME: ${{ github.ref_name}}
with:
repository: budibase/budibase-deploys
event: budicloud-qa-deploy

View File

@ -54,6 +54,7 @@ jobs:
push: true
pull: true
platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test
@ -64,6 +65,8 @@ jobs:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=0.0.0+test
tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2

View File

@ -20,8 +20,8 @@ jobs:
with:
root-reserve-mb: 30000
swap-size-mb: 1024
remove-android: 'true'
remove-dotnet: 'true'
remove-android: "true"
remove-dotnet: "true"
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
@ -48,7 +48,7 @@ jobs:
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Run Yarn Build
run: yarn build:docker:pre
run: yarn build
- name: Login to Docker Hub
uses: docker/login-action@v2
with:

View File

@ -134,8 +134,6 @@ spec:
{{ end }}
- name: SELF_HOSTED
value: {{ .Values.globals.selfHosted | quote }}
- name: SENTRY_DSN
value: {{ .Values.globals.sentryDSN | quote }}
- name: POSTHOG_TOKEN
value: {{ .Values.globals.posthogToken | quote }}
- name: WORKER_URL

View File

@ -130,8 +130,6 @@ spec:
{{ end }}
- name: SELF_HOSTED
value: {{ .Values.globals.selfHosted | quote }}
- name: SENTRY_DSN
value: {{ .Values.globals.sentryDSN }}
- name: ENABLE_ANALYTICS
value: {{ .Values.globals.enableAnalytics | quote }}
- name: POSTHOG_TOKEN

View File

@ -78,7 +78,6 @@ globals:
budibaseEnv: PRODUCTION
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
enableAnalytics: "1"
sentryDSN: ""
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs

View File

@ -5,7 +5,7 @@ ENV COUCHDB_PASSWORD admin
EXPOSE 5984
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | sudo apt-key add - && \
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
apt-add-repository 'deb http://security.debian.org/debian-security bullseye-security/updates main' && \
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bullseye main' && \

View File

@ -3,3 +3,6 @@
[couchdb]
database_dir = DATA_DIR/couch/dbs
view_index_dir = DATA_DIR/couch/views
[chttpd_auth]
timeout = 7200 ; 2 hours in seconds

View File

@ -4,7 +4,11 @@ version: "3"
services:
app-service:
build: ../packages/server
build:
context: ..
dockerfile: packages/server/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbapps
environment:
SELF_HOSTED: 1
@ -19,7 +23,6 @@ services:
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
JWT_SECRET: ${JWT_SECRET}
LOG_LEVEL: info
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
ENABLE_ANALYTICS: "true"
REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD}
@ -29,11 +32,13 @@ services:
depends_on:
- worker-service
- redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service:
build: ../packages/worker
build:
context: ..
dockerfile: packages/worker/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbworker
environment:
SELF_HOSTED: 1
@ -48,7 +53,6 @@ services:
COUCH_DB_USERNAME: ${COUCH_DB_USER}
COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD}
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD}

View File

@ -20,7 +20,6 @@ services:
API_ENCRYPTION_KEY: ${API_ENCRYPTION_KEY}
JWT_SECRET: ${JWT_SECRET}
LOG_LEVEL: info
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
ENABLE_ANALYTICS: "true"
REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD}
@ -31,8 +30,8 @@ services:
depends_on:
- worker-service
- redis-service
# volumes:
# - /some/path/to/plugins:/plugins
# volumes:
# - /some/path/to/plugins:/plugins
worker-service:
restart: unless-stopped
@ -51,7 +50,6 @@ services:
COUCH_DB_USERNAME: ${COUCH_DB_USER}
COUCH_DB_PASSWORD: ${COUCH_DB_PASSWORD}
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD}
@ -113,7 +111,12 @@ services:
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
depends_on:
- couchdb-service
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"]
command:
[
"sh",
"-c",
"sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;",
]
redis-service:
restart: unless-stopped

View File

@ -12,14 +12,14 @@ RUN chmod +x /cleanup.sh
WORKDIR /app
ADD packages/server .
COPY yarn.lock .
RUN yarn install --production=true --network-timeout 100000
RUN yarn install --production=true --network-timeout 1000000
RUN /cleanup.sh
# build worker
WORKDIR /worker
ADD packages/worker .
COPY yarn.lock .
RUN yarn install --production=true --network-timeout 100000
RUN yarn install --production=true --network-timeout 1000000
RUN /cleanup.sh
FROM budibase/couchdb

View File

@ -19,13 +19,15 @@ COPY packages/string-templates/package.json packages/string-templates/package.js
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
RUN ./scripts/removeWorkspaceDependencies.sh
RUN ./scripts/removeWorkspaceDependencies.sh packages/server/package.json
RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json
# We will never want to sync pro, but the script is still required
RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
RUN ./scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
# copy the actual code
COPY packages/server/dist packages/server/dist
@ -116,6 +118,10 @@ EXPOSE 443
EXPOSE 2222
VOLUME /data
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"

View File

@ -1,5 +1,5 @@
{
"version": "2.11.35",
"version": "2.11.43",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -3,14 +3,16 @@
"default": {
"runner": "nx-cloud",
"options": {
"cacheableOperations": ["build", "test", "check:types"],
"accessToken": "MmM4OGYxNzItMDBlYy00ZmE3LTk4MTYtNmJhYWMyZjBjZTUyfHJlYWQ="
"cacheableOperations": ["build", "test", "check:types"]
}
}
},
"targetDefaults": {
"build": {
"inputs": ["{workspaceRoot}/scripts/build.js"]
"inputs": [
"{workspaceRoot}/scripts/build.js",
"{workspaceRoot}/lerna.json"
]
}
}
}

View File

@ -46,7 +46,7 @@
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"test": "lerna run --stream test --stream",
"lint:eslint": "eslint packages qa-core --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",

View File

@ -35,7 +35,7 @@
"dotenv": "16.0.1",
"ioredis": "5.3.2",
"joi": "17.6.0",
"jsonwebtoken": "9.0.0",
"jsonwebtoken": "9.0.2",
"koa-passport": "4.1.4",
"koa-pino-logger": "4.0.0",
"lodash": "4.17.21",
@ -63,7 +63,7 @@
"@types/chance": "1.1.3",
"@types/cookies": "0.7.8",
"@types/jest": "29.5.5",
"@types/lodash": "4.14.180",
"@types/lodash": "4.14.200",
"@types/node": "18.17.0",
"@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.0",

View File

@ -33,8 +33,8 @@ function isInvalid(metadata?: { state: string }) {
* Get the requested app metadata by id.
* Use redis cache to first read the app metadata.
* If not present fallback to loading the app metadata directly and re-caching.
* @param {string} appId the id of the app to get metadata from.
* @returns {object} the app metadata.
* @param appId the id of the app to get metadata from.
* @returns the app metadata.
*/
export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
const client = await getAppClient()
@ -72,9 +72,9 @@ export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
/**
* Invalidate/reset the cached metadata when a change occurs in the db.
* @param appId {string} the cache key to bust/update.
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
* @return {Promise<void>} will respond with success when cache is updated.
* @param appId the cache key to bust/update.
* @param newMetadata optional - can simply provide the new metadata to update with.
* @return will respond with success when cache is updated.
*/
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
if (!appId) {

View File

@ -61,9 +61,9 @@ async function populateUsersFromDB(
* Get the requested user by id.
* Use redis cache to first read the user.
* If not present fallback to loading the user directly and re-caching.
* @param {*} userId the id of the user to get
* @param {*} tenantId the tenant of the user to get
* @param {*} populateUser function to provide the user for re-caching. default to couch db
* @param userId the id of the user to get
* @param tenantId the tenant of the user to get
* @param populateUser function to provide the user for re-caching. default to couch db
* @returns
*/
export async function getUser(
@ -111,8 +111,8 @@ export async function getUser(
* Get the requested users by id.
* Use redis cache to first read the users.
* If not present fallback to loading the users directly and re-caching.
* @param {*} userIds the ids of the user to get
* @param {*} tenantId the tenant of the users to get
* @param userIds the ids of the user to get
* @param tenantId the tenant of the users to get
* @returns
*/
export async function getUsers(

View File

@ -23,7 +23,7 @@ import environment from "../environment"
/**
* Generates a new configuration ID.
* @returns {string} The new configuration ID which the config doc can be stored under.
* @returns The new configuration ID which the config doc can be stored under.
*/
export function generateConfigID(type: ConfigType) {
return `${DocumentType.CONFIG}${SEPARATOR}${type}`

View File

@ -62,7 +62,7 @@ export function isTenancyEnabled() {
/**
* Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID.
* @return The tenant ID found within the app ID.
*/
export function getTenantIDFromAppID(appId: string) {
if (!appId) {

View File

@ -8,8 +8,8 @@ class Replication {
/**
*
* @param {String} source - the DB you want to replicate or rollback to
* @param {String} target - the DB you want to replicate to, or rollback from
* @param source - the DB you want to replicate or rollback to
* @param target - the DB you want to replicate to, or rollback from
*/
constructor({ source, target }: any) {
this.source = getPouchDB(source)
@ -38,7 +38,7 @@ class Replication {
/**
* Two way replication operation, intended to be promise based.
* @param {Object} opts - PouchDB replication options
* @param opts - PouchDB replication options
*/
sync(opts = {}) {
this.replication = this.promisify(this.source.sync, opts)
@ -47,7 +47,7 @@ class Replication {
/**
* One way replication operation, intended to be promise based.
* @param {Object} opts - PouchDB replication options
* @param opts - PouchDB replication options
*/
replicate(opts = {}) {
this.replication = this.promisify(this.source.replicate.to, opts)

View File

@ -599,10 +599,10 @@ async function runQuery<T>(
* Gets round the fixed limit of 200 results from a query by fetching as many
* pages as required and concatenating the results. This recursively operates
* until enough results have been found.
* @param dbName {string} Which database to run a lucene query on
* @param index {string} Which search index to utilise
* @param query {object} The JSON query structure
* @param params {object} The search params including:
* @param dbName Which database to run a lucene query on
* @param index Which search index to utilise
* @param query The JSON query structure
* @param params The search params including:
* tableId {string} The table ID to search
* sort {string} The sort column
* sortOrder {string} The sort order ("ascending" or "descending")
@ -655,10 +655,10 @@ async function recursiveSearch<T>(
* Performs a paginated search. A bookmark will be returned to allow the next
* page to be fetched. There is a max limit off 200 results per page in a
* paginated search.
* @param dbName {string} Which database to run a lucene query on
* @param index {string} Which search index to utilise
* @param query {object} The JSON query structure
* @param params {object} The search params including:
* @param dbName Which database to run a lucene query on
* @param index Which search index to utilise
* @param query The JSON query structure
* @param params The search params including:
* tableId {string} The table ID to search
* sort {string} The sort column
* sortOrder {string} The sort order ("ascending" or "descending")
@ -722,10 +722,10 @@ export async function paginatedSearch<T>(
* desired amount of results. There is a limit of 1000 results to avoid
* heavy performance hits, and to avoid client components breaking from
* handling too much data.
* @param dbName {string} Which database to run a lucene query on
* @param index {string} Which search index to utilise
* @param query {object} The JSON query structure
* @param params {object} The search params including:
* @param dbName Which database to run a lucene query on
* @param index Which search index to utilise
* @param query The JSON query structure
* @param params The search params including:
* tableId {string} The table ID to search
* sort {string} The sort column
* sortOrder {string} The sort order ("ascending" or "descending")

View File

@ -45,7 +45,7 @@ export async function getAllDbs(opts = { efficient: false }) {
* Lots of different points in the system need to find the full list of apps, this will
* enumerate the entire CouchDB cluster and get the list of databases (every app).
*
* @return {Promise<object[]>} returns the app information document stored in each app database.
* @return returns the app information document stored in each app database.
*/
export async function getAllApps({
dev,

View File

@ -25,7 +25,7 @@ export function isDevApp(app: App) {
/**
* Generates a development app ID from a real app ID.
* @returns {string} the dev app ID which can be used for dev database.
* @returns the dev app ID which can be used for dev database.
*/
export function getDevelopmentAppID(appId: string) {
if (!appId || appId.startsWith(APP_DEV_PREFIX)) {

View File

@ -8,7 +8,7 @@ import { newid } from "./newid"
/**
* Generates a new app ID.
* @returns {string} The new app ID which the app doc can be stored under.
* @returns The new app ID which the app doc can be stored under.
*/
export const generateAppID = (tenantId?: string | null) => {
let id = APP_PREFIX
@ -20,9 +20,9 @@ export const generateAppID = (tenantId?: string | null) => {
/**
* Gets a new row ID for the specified table.
* @param {string} tableId The table which the row is being created for.
* @param {string|null} id If an ID is to be used then the UUID can be substituted for this.
* @returns {string} The new ID which a row doc can be stored under.
* @param tableId The table which the row is being created for.
* @param id If an ID is to be used then the UUID can be substituted for this.
* @returns The new ID which a row doc can be stored under.
*/
export function generateRowID(tableId: string, id?: string) {
id = id || newid()
@ -31,7 +31,7 @@ export function generateRowID(tableId: string, id?: string) {
/**
* Generates a new workspace ID.
* @returns {string} The new workspace ID which the workspace doc can be stored under.
* @returns The new workspace ID which the workspace doc can be stored under.
*/
export function generateWorkspaceID() {
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
@ -39,7 +39,7 @@ export function generateWorkspaceID() {
/**
* Generates a new global user ID.
* @returns {string} The new user ID which the user doc can be stored under.
* @returns The new user ID which the user doc can be stored under.
*/
export function generateGlobalUserID(id?: any) {
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
@ -52,8 +52,8 @@ export function isGlobalUserID(id: string) {
/**
* Generates a new user ID based on the passed in global ID.
* @param {string} globalId The ID of the global user.
* @returns {string} The new user ID which the user doc can be stored under.
* @param globalId The ID of the global user.
* @returns The new user ID which the user doc can be stored under.
*/
export function generateUserMetadataID(globalId: string) {
return generateRowID(InternalTable.USER_METADATA, globalId)
@ -84,7 +84,7 @@ export function generateAppUserID(prodAppId: string, userId: string) {
/**
* Generates a new role ID.
* @returns {string} The new role ID which the role doc can be stored under.
* @returns The new role ID which the role doc can be stored under.
*/
export function generateRoleID(name: string) {
const prefix = `${DocumentType.ROLE}${SEPARATOR}`
@ -103,7 +103,7 @@ export function prefixRoleID(name: string) {
/**
* Generates a new dev info document ID - this is scoped to a user.
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
* @returns The new dev info ID which info for dev (like api key) can be stored under.
*/
export const generateDevInfoID = (userId: any) => {
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
@ -111,7 +111,7 @@ export const generateDevInfoID = (userId: any) => {
/**
* Generates a new plugin ID - to be used in the global DB.
* @returns {string} The new plugin ID which a plugin metadata document can be stored under.
* @returns The new plugin ID which a plugin metadata document can be stored under.
*/
export const generatePluginID = (name: string) => {
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`

View File

@ -12,12 +12,12 @@ import { getProdAppID } from "./conversions"
* is usually the case as most of our docs are top level e.g. tables, automations, users and so on.
* More complex cases such as link docs and rows which have multiple levels of IDs that their
* ID consists of need their own functions to build the allDocs parameters.
* @param {string} docType The type of document which input params are being built for, e.g. user,
* @param docType The type of document which input params are being built for, e.g. user,
* link, app, table and so on.
* @param {string|null} docId The ID of the document minus its type - this is only needed if looking
* @param docId The ID of the document minus its type - this is only needed if looking
* for a singular document.
* @param {object} otherProps Add any other properties onto the request, e.g. include_docs.
* @returns {object} Parameters which can then be used with an allDocs request.
* @param otherProps Add any other properties onto the request, e.g. include_docs.
* @returns Parameters which can then be used with an allDocs request.
*/
export function getDocParams(
docType: string,
@ -36,11 +36,11 @@ export function getDocParams(
/**
* Gets the DB allDocs/query params for retrieving a row.
* @param {string|null} tableId The table in which the rows have been stored.
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be
* @param tableId The table in which the rows have been stored.
* @param rowId The ID of the row which is being specifically queried for. This can be
* left null to get all the rows in the table.
* @param {object} otherProps Any other properties to add to the request.
* @returns {object} Parameters which can then be used with an allDocs request.
* @param otherProps Any other properties to add to the request.
* @returns Parameters which can then be used with an allDocs request.
*/
export function getRowParams(
tableId?: string | null,

View File

@ -75,12 +75,12 @@ function getPackageJsonFields(): {
const content = readFileSync(packageJsonFile!, "utf-8")
const parsedContent = JSON.parse(content)
return {
VERSION: parsedContent.version,
VERSION: process.env.BUDIBASE_VERSION || parsedContent.version,
SERVICE_NAME: parsedContent.name,
}
} catch {
// throwing an error here is confusing/causes backend-core to be hard to import
return { VERSION: "", SERVICE_NAME: "" }
return { VERSION: process.env.BUDIBASE_VERSION || "", SERVICE_NAME: "" }
}
}

View File

@ -1,8 +1,8 @@
/**
* Makes sure that a URL has the correct number of slashes, while maintaining the
* http(s):// double slashes.
* @param {string} url The URL to test and remove any extra double slashes.
* @return {string} The updated url.
* @param url The URL to test and remove any extra double slashes.
* @return The updated url.
*/
export function checkSlashesInUrl(url: string) {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")

View File

@ -13,10 +13,10 @@ export const options = {
/**
* Passport Local Authentication Middleware.
* @param {*} ctx the request structure
* @param {*} email username to login with
* @param {*} password plain text password to log in with
* @param {*} done callback from passport to return user information and errors
* @param ctx the request structure
* @param email username to login with
* @param password plain text password to log in with
* @param done callback from passport to return user information and errors
* @returns The authenticated user, or errors if they occur
*/
export async function authenticate(

View File

@ -17,15 +17,15 @@ const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
/**
* @param {*} issuer The identity provider base URL
* @param {*} sub The user ID
* @param {*} profile The user profile information. Created by passport from the /userinfo response
* @param {*} jwtClaims The parsed id_token claims
* @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT
* @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
* @param {*} idToken The id_token - always a JWT
* @param {*} params The response body from requesting an access_token
* @param {*} done The passport callback: err, user, info
* @param issuer The identity provider base URL
* @param sub The user ID
* @param profile The user profile information. Created by passport from the /userinfo response
* @param jwtClaims The parsed id_token claims
* @param accessToken The access_token for contacting the identity provider - may or may not be a JWT
* @param refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
* @param idToken The id_token - always a JWT
* @param params The response body from requesting an access_token
* @param done The passport callback: err, user, info
*/
return async (
issuer: string,
@ -61,8 +61,8 @@ export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
}
/**
* @param {*} profile The structured profile created by passport using the user info endpoint
* @param {*} jwtClaims The claims returned in the id token
* @param profile The structured profile created by passport using the user info endpoint
* @param jwtClaims The claims returned in the id token
*/
function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {
// profile not guaranteed to contain email e.g. github connected azure ad account

View File

@ -5,9 +5,9 @@ import { ConfigType, GoogleInnerConfig } from "@budibase/types"
/**
* Utility to handle authentication errors.
*
* @param {*} done The passport callback.
* @param {*} message Message that will be returned in the response body
* @param {*} err (Optional) error that will be logged
* @param done The passport callback.
* @param message Message that will be returned in the response body
* @param err (Optional) error that will be logged
*/
export function authError(done: Function, message: string, err?: any) {

View File

@ -6,10 +6,10 @@ import * as cloudfront from "../cloudfront"
* In production the client library is stored in the object store, however in development
* we use the symlinked version produced by lerna, located in node modules. We link to this
* via a specific endpoint (under /api/assets/client).
* @param {string} appId In production we need the appId to look up the correct bucket, as the
* @param appId In production we need the appId to look up the correct bucket, as the
* version of the client lib may differ between apps.
* @param {string} version The version to retrieve.
* @return {string} The URL to be inserted into appPackage response or server rendered
* @param version The version to retrieve.
* @return The URL to be inserted into appPackage response or server rendered
* app index file.
*/
export const clientLibraryUrl = (appId: string, version: string) => {

View File

@ -61,9 +61,9 @@ export function sanitizeBucket(input: string) {
/**
* Gets a connection to the object store using the S3 SDK.
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
* @param {object} opts configuration for the object store.
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
* @param bucket the name of the bucket which blobs will be uploaded/retrieved from.
* @param opts configuration for the object store.
* @return an S3 object store object, check S3 Nodejs SDK for usage.
* @constructor
*/
export const ObjectStore = (

View File

@ -5,9 +5,9 @@ import { timeout } from "../utils"
* Bull works with a Job wrapper around all messages that contains a lot more information about
* the state of the message, this object constructor implements the same schema of Bull jobs
* for the sake of maintaining API consistency.
* @param {string} queue The name of the queue which the message will be carried on.
* @param {object} message The JSON message which will be passed back to the consumer.
* @returns {Object} A new job which can now be put onto the queue, this is mostly an
* @param queue The name of the queue which the message will be carried on.
* @param message The JSON message which will be passed back to the consumer.
* @returns A new job which can now be put onto the queue, this is mostly an
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
*/
function newJob(queue: string, message: any) {
@ -32,8 +32,8 @@ class InMemoryQueue {
_addCount: number
/**
* The constructor the queue, exactly the same as that of Bulls.
* @param {string} name The name of the queue which is being configured.
* @param {object|null} opts This is not used by the in memory queue as there is no real use
* @param name The name of the queue which is being configured.
* @param opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull
*/
constructor(name: string, opts = null) {
@ -49,7 +49,7 @@ class InMemoryQueue {
* Same callback API as Bull, each callback passed to this will consume messages as they are
* available. Please note this is a queue service, not a notification service, so each
* consumer will receive different messages.
* @param {function<object>} func The callback function which will return a "Job", the same
* @param func The callback function which will return a "Job", the same
* as the Bull API, within this job the property "data" contains the JSON message. Please
* note this is incredibly limited compared to Bull as in reality the Job would contain
* a lot more information about the queue and current status of Bull cluster.
@ -73,9 +73,9 @@ class InMemoryQueue {
* Simple function to replicate the add message functionality of Bull, putting
* a new message on the queue. This then emits an event which will be used to
* return the message to a consumer (if one is attached).
* @param {object} msg A message to be transported over the queue, this should be
* @param msg A message to be transported over the queue, this should be
* a JSON message as this is required by Bull.
* @param {boolean} repeat serves no purpose for the import queue.
* @param repeat serves no purpose for the import queue.
*/
// eslint-disable-next-line no-unused-vars
add(msg: any, repeat: boolean) {
@ -96,7 +96,7 @@ class InMemoryQueue {
/**
* This removes a cron which has been implemented, this is part of Bull API.
* @param {string} cronJobId The cron which is to be removed.
* @param cronJobId The cron which is to be removed.
*/
removeRepeatableByKey(cronJobId: string) {
// TODO: implement for testing

View File

@ -142,7 +142,7 @@ function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
* this can only be done with redis streams because they will have an end.
* @param stream A redis stream, specifically as this type of stream will have an end.
* @param client The client to use for further lookups.
* @return {Promise<object>} The final output of the stream
* @return The final output of the stream
*/
function promisifyStream(stream: any, client: RedisWrapper) {
return new Promise((resolve, reject) => {

View File

@ -36,8 +36,8 @@ export function levelToNumber(perm: PermissionLevel) {
/**
* Given the specified permission level for the user return the levels they are allowed to carry out.
* @param {string} userPermLevel The permission level of the user.
* @return {string[]} All the permission levels this user is allowed to carry out.
* @param userPermLevel The permission level of the user.
* @return All the permission levels this user is allowed to carry out.
*/
export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
switch (userPermLevel) {

View File

@ -149,9 +149,9 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
/**
* Gets the role object, this is mainly useful for two purposes, to check if the level exists and
* to check if the role inherits any others.
* @param {string|null} roleId The level ID to lookup.
* @param {object|null} opts options for the function, like whether to halt errors, instead return public.
* @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property.
* @param roleId The level ID to lookup.
* @param opts options for the function, like whether to halt errors, instead return public.
* @returns The role object, which may contain an "inherits" property.
*/
export async function getRole(
roleId?: string,
@ -225,8 +225,8 @@ export async function getUserRoleIdHierarchy(
/**
* Returns an ordered array of the user's inherited role IDs, this can be used
* to determine if a user can access something that requires a specific role.
* @param {string} userRoleId The user's role ID, this can be found in their access token.
* @returns {Promise<object[]>} returns an ordered array of the roles, with the first being their
* @param userRoleId The user's role ID, this can be found in their access token.
* @returns returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level.
*/
export async function getUserRoleHierarchy(userRoleId?: string) {
@ -258,7 +258,7 @@ export async function getAllRoleIds(appId?: string) {
/**
* Given an app ID this will retrieve all of the roles that are currently within that app.
* @return {Promise<object[]>} An array of the role objects that were found.
* @return An array of the role objects that were found.
*/
export async function getAllRoles(appId?: string): Promise<RoleDoc[]> {
if (appId) {

View File

@ -21,7 +21,6 @@ import {
User,
UserStatus,
UserGroup,
ContextUser,
} from "@budibase/types"
import {
getAccountHolderFromUserIds,
@ -135,7 +134,7 @@ export class UserDB {
if (!fullUser.roles) {
fullUser.roles = {}
}
// add the active status to a user if its not provided
// add the active status to a user if it's not provided
if (fullUser.status == null) {
fullUser.status = UserStatus.ACTIVE
}
@ -160,14 +159,14 @@ export class UserDB {
}
}
static async getUsersByAppAccess(appId?: string) {
const opts: any = {
static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
const params: any = {
include_docs: true,
limit: 50,
limit: opts.limit || 50,
}
let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
appId,
opts
opts.appId,
params
)
return response
}

View File

@ -19,9 +19,11 @@ import {
SearchQueryOperators,
SearchUsersRequest,
User,
DatabaseQueryOpts,
} from "@budibase/types"
import * as context from "../context"
import { getGlobalDB } from "../context"
import * as context from "../context"
import { isCreator } from "./utils"
type GetOpts = { cleanup?: boolean }
@ -240,12 +242,14 @@ export const paginatedUsers = async ({
bookmark,
query,
appId,
limit,
}: SearchUsersRequest = {}) => {
const db = getGlobalDB()
const pageLimit = limit ? limit + 1 : PAGE_LIMIT + 1
// get one extra document, to have the next page
const opts: any = {
const opts: DatabaseQueryOpts = {
include_docs: true,
limit: PAGE_LIMIT + 1,
limit: pageLimit,
}
// add a startkey if the page was specified (anchor)
if (bookmark) {
@ -268,7 +272,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc)
}
return pagination(userList, PAGE_LIMIT, {
return pagination(userList, pageLimit, {
paginate: true,
property,
getKey,
@ -283,6 +287,19 @@ export async function getUserCount() {
return response.total_rows
}
export async function getCreatorCount() {
let creators = 0
async function iterate(startPage?: string) {
const page = await paginatedUsers({ bookmark: startPage })
creators += page.data.filter(isCreator).length
if (page.hasNextPage) {
await iterate(page.nextPage)
}
}
await iterate()
return creators
}
// used to remove the builder/admin permissions, for processing the
// user as an app user (they may have some specific role/group
export function removePortalUserPermissions(user: User | ContextUser) {

View File

@ -10,6 +10,7 @@ import { getAccountByTenantId } from "../accounts"
// extract from shared-core to make easily accessible from backend-core
export const isBuilder = sdk.users.isBuilder
export const isAdmin = sdk.users.isAdmin
export const isCreator = sdk.users.isCreator
export const isGlobalBuilder = sdk.users.isGlobalBuilder
export const isAdminOrBuilder = sdk.users.isAdminOrBuilder
export const hasAdminPermissions = sdk.users.hasAdminPermissions

View File

@ -79,8 +79,8 @@ export function isPublicApiRequest(ctx: Ctx): boolean {
/**
* Given a request tries to find the appId, which can be located in various places
* @param {object} ctx The main request body to look through.
* @returns {string|undefined} If an appId was found it will be returned.
* @param ctx The main request body to look through.
* @returns If an appId was found it will be returned.
*/
export async function getAppIdFromCtx(ctx: Ctx) {
// look in headers
@ -135,7 +135,7 @@ function parseAppIdFromUrl(url?: string) {
/**
* opens the contents of the specified encrypted JWT.
* @return {object} the contents of the token.
* @return the contents of the token.
*/
export function openJwt(token: string) {
if (!token) {
@ -169,8 +169,8 @@ export function isValidInternalAPIKey(apiKey: string) {
/**
* Get a cookie from context, and decrypt if necessary.
* @param {object} ctx The request which is to be manipulated.
* @param {string} name The name of the cookie to get.
* @param ctx The request which is to be manipulated.
* @param name The name of the cookie to get.
*/
export function getCookie(ctx: Ctx, name: string) {
const cookie = ctx.cookies.get(name)
@ -184,10 +184,10 @@ export function getCookie(ctx: Ctx, name: string) {
/**
* Store a cookie for the request - it will not expire.
* @param {object} ctx The request which is to be manipulated.
* @param {string} name The name of the cookie to set.
* @param {string|object} value The value of cookie which will be set.
* @param {object} opts options like whether to sign.
* @param ctx The request which is to be manipulated.
* @param name The name of the cookie to set.
* @param value The value of cookie which will be set.
* @param opts options like whether to sign.
*/
export function setCookie(
ctx: Ctx,
@ -223,8 +223,8 @@ export function clearCookie(ctx: Ctx, name: string) {
/**
* Checks if the API call being made (based on the provided ctx object) is from the client. If
* the call is not from a client app then it is from the builder.
* @param {object} ctx The koa context object to be tested.
* @return {boolean} returns true if the call is from the client lib (a built app rather than the builder).
* @param ctx The koa context object to be tested.
* @return returns true if the call is from the client lib (a built app rather than the builder).
*/
export function isClient(ctx: Ctx) {
return ctx.headers[Header.TYPE] === "client"

View File

@ -72,6 +72,11 @@ export function quotas(): Quotas {
value: 1,
triggers: [],
},
creators: {
name: "Creators",
value: 1,
triggers: [],
},
userGroups: {
name: "User Groups",
value: 1,

View File

@ -1,6 +1,6 @@
import { MonthlyQuotaName, QuotaUsage } from "@budibase/types"
export const usage = (): QuotaUsage => {
export const usage = (users: number = 0, creators: number = 0): QuotaUsage => {
return {
_id: "usage_quota",
quotaReset: new Date().toISOString(),
@ -58,7 +58,8 @@ export const usage = (): QuotaUsage => {
usageQuota: {
apps: 0,
plugins: 0,
users: 0,
users,
creators,
userGroups: 0,
rows: 0,
triggers: {},

View File

@ -106,6 +106,13 @@
name: fieldName,
}
}
// Delete numeric only widths as these are grid widths and should be
// ignored
const width = fixedSchema[fieldName].width
if (width != null && `${width}`.trim().match(/^[0-9]+$/)) {
delete fixedSchema[fieldName].width
}
})
return fixedSchema
}

View File

@ -64,7 +64,6 @@
"@fortawesome/fontawesome-svg-core": "^6.2.1",
"@fortawesome/free-brands-svg-icons": "^6.2.1",
"@fortawesome/free-solid-svg-icons": "^6.2.1",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",
"codemirror": "^5.59.0",

View File

@ -1,37 +0,0 @@
import * as Sentry from "@sentry/browser"
export default class SentryClient {
constructor(dsn) {
this.dsn = dsn
}
init() {
if (this.dsn) {
Sentry.init({ dsn: this.dsn })
this.initalised = true
}
}
/**
* Capture an exception and send it to sentry.
* @param {Error} err - JS error object
*/
captureException(err) {
if (!this.initalised) return
Sentry.captureException(err)
}
/**
* Identify user in sentry.
* @param {String} id - Unique user id
*/
identify(id) {
if (!this.initalised) return
Sentry.configureScope(scope => {
scope.setUser({ id })
})
}
}

View File

@ -1,16 +1,14 @@
import { API } from "api"
import PosthogClient from "./PosthogClient"
import IntercomClient from "./IntercomClient"
import SentryClient from "./SentryClient"
import { Events, EventSource } from "./constants"
const posthog = new PosthogClient(process.env.POSTHOG_TOKEN)
const sentry = new SentryClient(process.env.SENTRY_DSN)
const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
class AnalyticsHub {
constructor() {
this.clients = [posthog, sentry, intercom]
this.clients = [posthog, intercom]
}
async activate() {
@ -23,12 +21,9 @@ class AnalyticsHub {
identify(id) {
posthog.identify(id)
sentry.identify(id)
}
captureException(err) {
sentry.captureException(err)
}
captureException(_err) {}
captureEvent(eventName, props = {}) {
posthog.captureEvent(eventName, props)

View File

@ -3,13 +3,10 @@
import { goto, params } from "@roxi/routify"
import { Table, Heading, Layout } from "@budibase/bbui"
import Spinner from "components/common/Spinner.svelte"
import {
TableNames,
UNEDITABLE_USER_FIELDS,
UNSORTABLE_TYPES,
} from "constants"
import { TableNames, UNEDITABLE_USER_FIELDS } from "constants"
import RoleCell from "./cells/RoleCell.svelte"
import { createEventDispatcher } from "svelte"
import { canBeSortColumn } from "@budibase/shared-core"
export let schema = {}
export let data = []
@ -32,12 +29,10 @@
$: isUsersTable = tableId === TableNames.USERS
$: data && resetSelectedRows()
$: {
UNSORTABLE_TYPES.forEach(type => {
Object.values(schema || {}).forEach(col => {
if (col.type === type) {
col.sortable = false
}
})
Object.values(schema || {}).forEach(col => {
if (!canBeSortColumn(col.type)) {
col.sortable = false
}
})
}
$: {

View File

@ -16,7 +16,11 @@
<DrawerContent>
<div class="container">
<Layout noPadding gap="S">
<Input bind:value={column.width} label="Width" placeholder="Auto" />
<Input
bind:value={column.width}
label="Width (must include a unit like px or %)"
placeholder="Auto"
/>
<Select
label="Alignment"
bind:value={column.align}

View File

@ -1,5 +1,9 @@
<script>
import { getContextProviderComponents } from "builderStore/dataBinding"
import {
getContextProviderComponents,
readableToRuntimeBinding,
runtimeToReadableBinding,
} from "builderStore/dataBinding"
import {
Button,
Popover,
@ -9,6 +13,11 @@
Heading,
Drawer,
DrawerContent,
Icon,
Modal,
ModalContent,
CoreDropzone,
notifications,
} from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
import { store, currentAsset } from "builderStore"
@ -22,6 +31,8 @@
import BindingBuilder from "components/integration/QueryBindingBuilder.svelte"
import IntegrationQueryEditor from "components/integration/index.svelte"
import { makePropSafe as safe } from "@budibase/string-templates"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import { API } from "api"
export let value = {}
export let otherSources
@ -31,9 +42,13 @@
const dispatch = createEventDispatcher()
const arrayTypes = ["attachment", "array"]
let anchorRight, dropdownRight
let drawer
let tmpQueryParams
let tmpCustomData
let customDataValid = true
let modal
$: text = value?.label ?? "Choose an option"
$: tables = $tablesStore.list.map(m => ({
@ -125,6 +140,10 @@
value: `{{ literal ${runtimeBinding} }}`,
}
})
$: custom = {
type: "custom",
label: "JSON / CSV",
}
const handleSelected = selected => {
dispatch("change", selected)
@ -151,6 +170,11 @@
drawer.show()
}
const openCustomDrawer = () => {
tmpCustomData = runtimeToReadableBinding(bindings, value.data || "")
drawer.show()
}
const getQueryValue = queries => {
return queries.find(q => q._id === value._id) || value
}
@ -162,6 +186,35 @@
})
drawer.hide()
}
const saveCustomData = () => {
handleSelected({
...value,
data: readableToRuntimeBinding(bindings, tmpCustomData),
})
drawer.hide()
}
const promptForCSV = () => {
drawer.hide()
modal.show()
}
const handleCSV = async e => {
try {
const csv = await e.detail[0]?.text()
if (csv?.length) {
const js = await API.csvToJson(csv)
tmpCustomData = JSON.stringify(js)
}
modal.hide()
saveCustomData()
} catch (error) {
notifications.error("Failed to parse CSV")
modal.hide()
drawer.show()
}
}
</script>
<div class="container" bind:this={anchorRight}>
@ -172,7 +225,9 @@
on:click={dropdownRight.show}
/>
{#if value?.type === "query"}
<i class="ri-settings-5-line" on:click={openQueryParamsDrawer} />
<div class="icon">
<Icon hoverable name="Settings" on:click={openQueryParamsDrawer} />
</div>
<Drawer title={"Query Bindings"} bind:this={drawer}>
<Button slot="buttons" cta on:click={saveQueryParams}>Save</Button>
<DrawerContent slot="body">
@ -198,6 +253,29 @@
</DrawerContent>
</Drawer>
{/if}
{#if value?.type === "custom"}
<div class="icon">
<Icon hoverable name="Settings" on:click={openCustomDrawer} />
</div>
<Drawer title="Custom data" bind:this={drawer}>
<div slot="buttons" style="display:contents">
<Button primary on:click={promptForCSV}>Load CSV</Button>
<Button cta on:click={saveCustomData} disabled={!customDataValid}>
Save
</Button>
</div>
<div slot="description">Provide a JSON array to use as data</div>
<ClientBindingPanel
slot="body"
bind:valid={customDataValid}
value={tmpCustomData}
on:change={event => (tmpCustomData = event.detail)}
{bindings}
allowJS
allowHelpers
/>
</Drawer>
{/if}
</div>
<Popover bind:this={dropdownRight} anchor={anchorRight}>
<div class="dropdown">
@ -285,20 +363,27 @@
{/each}
</ul>
{/if}
{#if otherSources?.length}
<Divider />
<div class="title">
<Heading size="XS">Other</Heading>
</div>
<ul>
<Divider />
<div class="title">
<Heading size="XS">Other</Heading>
</div>
<ul>
<li on:click={() => handleSelected(custom)}>{custom.label}</li>
{#if otherSources?.length}
{#each otherSources as source}
<li on:click={() => handleSelected(source)}>{source.label}</li>
{/each}
</ul>
{/if}
{/if}
</ul>
</div>
</Popover>
<Modal bind:this={modal}>
<ModalContent title="Load CSV" showConfirmButton={false}>
<CoreDropzone compact extensions=".csv" on:change={handleCSV} />
</ModalContent>
</Modal>
<style>
.container {
display: flex;
@ -340,16 +425,7 @@
background-color: var(--spectrum-global-color-gray-200);
}
i {
margin-left: 5px;
display: flex;
align-items: center;
transition: all 0.2s;
}
i:hover {
transform: scale(1.1);
font-weight: 600;
cursor: pointer;
.icon {
margin-left: 8px;
}
</style>

View File

@ -6,7 +6,7 @@
} from "builderStore/dataBinding"
import { currentAsset } from "builderStore"
import { createEventDispatcher } from "svelte"
import { UNSORTABLE_TYPES } from "constants"
import { canBeSortColumn } from "@budibase/shared-core"
export let componentInstance = {}
export let value = ""
@ -20,7 +20,7 @@
const getSortableFields = schema => {
return Object.entries(schema || {})
.filter(entry => !UNSORTABLE_TYPES.includes(entry[1].type))
.filter(entry => canBeSortColumn(entry[1].type))
.map(entry => entry[0])
}

View File

@ -34,8 +34,6 @@ export const UNEDITABLE_USER_FIELDS = [
"lastName",
]
export const UNSORTABLE_TYPES = ["formula", "attachment", "array", "link"]
export const LAYOUT_NAMES = {
MASTER: {
PRIVATE: "layout_private_master",

View File

@ -114,8 +114,9 @@
query: {
appId: query || !filterByAppAccess ? null : prodAppId,
email: query,
paginated: query || !filterByAppAccess ? null : false,
},
limit: 50,
paginate: query || !filterByAppAccess ? null : false,
})
await usersFetch.refresh()

View File

@ -23,5 +23,7 @@
</script>
{#key $params.datasourceId}
<slot />
{#if $datasources.selected}
<slot />
{/if}
{/key}

View File

@ -16,8 +16,7 @@
let selectedPanel = null
let panelOptions = []
// datasources.selected can return null temporarily on datasource deletion
$: datasource = $datasources.selected || {}
$: datasource = $datasources.selected
$: getOptions(datasource)

View File

@ -1,5 +1,5 @@
<script>
import { isEmpty } from "lodash/fp"
import { helpers } from "@budibase/shared-core"
import { Input, DetailSummary, notifications } from "@budibase/bbui"
import { store } from "builderStore"
import PropertyControl from "components/design/settings/controls/PropertyControl.svelte"
@ -70,41 +70,43 @@
}
const shouldDisplay = (instance, setting) => {
// Parse dependant settings
if (setting.dependsOn) {
let dependantSetting = setting.dependsOn
let dependantValue = null
let invert = !!setting.dependsOn.invert
if (typeof setting.dependsOn === "object") {
dependantSetting = setting.dependsOn.setting
dependantValue = setting.dependsOn.value
let dependsOn = setting.dependsOn
if (dependsOn && !Array.isArray(dependsOn)) {
dependsOn = [dependsOn]
}
if (!dependsOn?.length) {
return true
}
// Ensure all conditions are met
return dependsOn.every(condition => {
let dependantSetting = condition
let dependantValues = null
let invert = !!condition.invert
if (typeof condition === "object") {
dependantSetting = condition.setting
dependantValues = condition.value
}
if (!dependantSetting) {
return false
}
// If no specific value is depended upon, check if a value exists at all
// for the dependent setting
if (dependantValue == null) {
const currentValue = instance[dependantSetting]
if (currentValue === false) {
return false
}
if (currentValue === true) {
return true
}
return !isEmpty(currentValue)
// Ensure values is an array
if (!Array.isArray(dependantValues)) {
dependantValues = [dependantValues]
}
// Otherwise check the value matches
if (invert) {
return instance[dependantSetting] !== dependantValue
} else {
return instance[dependantSetting] === dependantValue
}
}
return typeof setting.visible == "boolean" ? setting.visible : true
// If inverting, we want to ensure that we don't have any matches.
// If not inverting, we want to ensure that we do have any matches.
const currentVal = helpers.deepGet(instance, dependantSetting)
const anyMatches = dependantValues.some(dependantVal => {
if (dependantVal == null) {
return currentVal != null && currentVal !== false && currentVal !== ""
}
return dependantVal === currentVal
})
return anyMatches !== invert
})
}
const canRenderControl = (instance, setting, isScreen) => {

View File

@ -43,7 +43,7 @@
})
</script>
<TestimonialPage>
<TestimonialPage enabled={$organisation.testimonialsEnabled}>
<Layout gap="S" noPadding>
<img alt="logo" src={$organisation.logoUrl || Logo} />
<span class="heading-wrap">

View File

@ -53,7 +53,7 @@
})
</script>
<TestimonialPage>
<TestimonialPage enabled={$organisation.testimonialsEnabled}>
<Layout gap="S" noPadding>
{#if loaded}
<img alt="logo" src={$organisation.logoUrl || Logo} />

View File

@ -81,9 +81,9 @@ export function createDatasourcesStore() {
}))
}
const updateDatasource = response => {
const updateDatasource = (response, { ignoreErrors } = {}) => {
const { datasource, errors } = response
if (errors && Object.keys(errors).length > 0) {
if (!ignoreErrors && errors && Object.keys(errors).length > 0) {
throw new TableImportError(errors)
}
replaceDatasource(datasource._id, datasource)
@ -137,7 +137,7 @@ export function createDatasourcesStore() {
fetchSchema: integration.plus,
})
return updateDatasource(response)
return updateDatasource(response, { ignoreErrors: true })
}
const update = async ({ integration, datasource }) => {

View File

@ -80,7 +80,6 @@ export default defineConfig(({ mode }) => {
"process.env.INTERCOM_TOKEN": JSON.stringify(
process.env.INTERCOM_TOKEN
),
"process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN),
}),
copyFonts("fonts"),
...(isProduction ? [] : devOnlyPlugins),

View File

@ -5556,10 +5556,9 @@
"width": 600,
"height": 400
},
"info": "Grid Blocks are only compatible with internal or SQL tables",
"settings": [
{
"type": "table",
"type": "dataSource",
"label": "Data",
"key": "table",
"required": true
@ -5568,18 +5567,35 @@
"type": "columns/grid",
"label": "Columns",
"key": "columns",
"dependsOn": "table"
"dependsOn": [
"table",
{
"setting": "table.type",
"value": "custom",
"invert": true
}
]
},
{
"type": "filter",
"label": "Filtering",
"key": "initialFilter"
"key": "initialFilter",
"dependsOn": {
"setting": "table.type",
"value": "custom",
"invert": true
}
},
{
"type": "field/sortable",
"label": "Sort column",
"key": "initialSortColumn",
"placeholder": "Default"
"placeholder": "Default",
"dependsOn": {
"setting": "table.type",
"value": "custom",
"invert": true
}
},
{
"type": "select",
@ -5618,29 +5634,37 @@
"label": "Clicked row",
"key": "row"
}
],
"dependsOn": {
"setting": "allowEditRows",
"value": false
}
]
},
{
"type": "boolean",
"label": "Add rows",
"key": "allowAddRows",
"defaultValue": true
"defaultValue": true,
"dependsOn": {
"setting": "table.type",
"value": ["table", "viewV2"]
}
},
{
"type": "boolean",
"label": "Edit rows",
"key": "allowEditRows",
"defaultValue": true
"defaultValue": true,
"dependsOn": {
"setting": "table.type",
"value": ["table", "viewV2"]
}
},
{
"type": "boolean",
"label": "Delete rows",
"key": "allowDeleteRows",
"defaultValue": true
"defaultValue": true,
"dependsOn": {
"setting": "table.type",
"value": ["table", "viewV2"]
}
},
{
"type": "boolean",

View File

@ -32,7 +32,7 @@ export const API = createAPIClient({
},
// Show an error notification for all API failures.
// We could also log these to sentry.
// We could also log these to Posthog.
// Or we could check error.status and redirect to login on a 403 etc.
onError: error => {
const { status, method, url, message, handled, suppressErrors } =

View File

@ -81,6 +81,7 @@
sortOrder: $fetch.sortOrder,
},
limit,
primaryDisplay: $fetch.definition?.primaryDisplay,
}
const createFetch = datasource => {

View File

@ -4,6 +4,7 @@
import { getContext } from "svelte"
import { Grid } from "@budibase/frontend-core"
// table is actually any datasource, but called table for legacy compatibility
export let table
export let allowAddRows = true
export let allowEditRows = true
@ -21,7 +22,6 @@
$: columnWhitelist = columns?.map(col => col.name)
$: schemaOverrides = getSchemaOverrides(columns)
$: handleRowClick = allowEditRows ? undefined : onRowClick
const getSchemaOverrides = columns => {
let overrides = {}
@ -58,7 +58,7 @@
showControls={false}
notifySuccess={notificationStore.actions.success}
notifyError={notificationStore.actions.error}
on:rowclick={e => handleRowClick?.({ row: e.detail })}
on:rowclick={e => onRowClick?.({ row: e.detail })}
/>
</div>

View File

@ -2,8 +2,8 @@
import { getContext } from "svelte"
import { Table } from "@budibase/bbui"
import SlotRenderer from "./SlotRenderer.svelte"
import { UnsortableTypes } from "../../../constants"
import { onDestroy } from "svelte"
import { canBeSortColumn } from "@budibase/shared-core"
export let dataProvider
export let columns
@ -32,7 +32,8 @@
$: loading = dataProvider?.loading ?? false
$: data = dataProvider?.rows || []
$: fullSchema = dataProvider?.schema ?? {}
$: fields = getFields(fullSchema, columns, false)
$: primaryDisplay = dataProvider?.primaryDisplay
$: fields = getFields(fullSchema, columns, false, primaryDisplay)
$: schema = getFilteredSchema(fullSchema, fields, hasChildren)
$: setSorting = getAction(
dataProvider?.id,
@ -55,18 +56,13 @@
}
}
const getFields = (schema, customColumns, showAutoColumns) => {
// Check for an invalid column selection
let invalid = false
customColumns?.forEach(column => {
const columnName = typeof column === "string" ? column : column.name
if (schema[columnName] == null) {
invalid = true
}
})
// Use column selection if it exists
if (!invalid && customColumns?.length) {
const getFields = (
schema,
customColumns,
showAutoColumns,
primaryDisplay
) => {
if (customColumns?.length) {
return customColumns
}
@ -74,13 +70,38 @@
let columns = []
let autoColumns = []
Object.entries(schema).forEach(([field, fieldSchema]) => {
if (fieldSchema.visible === false) {
return
}
if (!fieldSchema?.autocolumn) {
columns.push(field)
} else if (showAutoColumns) {
autoColumns.push(field)
}
})
return columns.concat(autoColumns)
// Sort columns to respect grid metadata
const allCols = columns.concat(autoColumns)
return allCols.sort((a, b) => {
if (a === primaryDisplay) {
return -1
}
if (b === primaryDisplay) {
return 1
}
const aOrder = schema[a].order
const bOrder = schema[b].order
if (aOrder === bOrder) {
return 0
}
if (aOrder == null) {
return 1
}
if (bOrder == null) {
return -1
}
return aOrder < bOrder ? -1 : 1
})
}
const getFilteredSchema = (schema, fields, hasChildren) => {
@ -102,7 +123,7 @@
return
}
newSchema[columnName] = schema[columnName]
if (UnsortableTypes.includes(schema[columnName].type)) {
if (!canBeSortColumn(schema[columnName].type)) {
newSchema[columnName].sortable = false
}

View File

@ -1,13 +1,5 @@
import { FieldType as FieldTypes } from "@budibase/types"
export { FieldType as FieldTypes } from "@budibase/types"
export const UnsortableTypes = [
FieldTypes.FORMULA,
FieldTypes.ATTACHMENT,
FieldTypes.ARRAY,
FieldTypes.LINK,
]
export const ActionTypes = {
ValidateForm: "ValidateForm",
UpdateFieldValue: "UpdateFieldValue",

View File

@ -9,7 +9,9 @@ export const buildRelationshipEndpoints = API => ({
if (!tableId || !rowId) {
return []
}
const response = await API.get({ url: `/api/${tableId}/${rowId}/enrich` })
const response = await API.get({
url: `/api/${tableId}/${rowId}/enrich?field=${fieldName}`,
})
if (!fieldName) {
return response || []
} else {

View File

@ -34,7 +34,7 @@
column.schema.autocolumn ||
column.schema.disabled ||
column.schema.type === "formula" ||
(!$config.canEditRows && row._id)
(!$config.canEditRows && !row._isNewRow)
// Register this cell API if the row is focused
$: {

View File

@ -1,6 +1,6 @@
<script>
import { getContext, onMount, tick } from "svelte"
import { canBeDisplayColumn } from "@budibase/shared-core"
import { canBeDisplayColumn, canBeSortColumn } from "@budibase/shared-core"
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui"
import GridCell from "./GridCell.svelte"
import { getColumnIcon } from "../lib/utils"
@ -23,6 +23,7 @@
columns,
definition,
datasource,
schema,
} = getContext("grid")
let anchor
@ -119,16 +120,16 @@
// Generate new name
let newName = `${column.name} copy`
let attempts = 2
while ($definition.schema[newName]) {
while ($schema[newName]) {
newName = `${column.name} copy ${attempts++}`
}
// Save schema with new column
const existingColumnDefinition = $definition.schema[column.name]
const existingColumnDefinition = $schema[column.name]
await datasource.actions.saveDefinition({
...$definition,
schema: {
...$definition.schema,
...$schema,
[newName]: {
...existingColumnDefinition,
name: newName,
@ -231,14 +232,16 @@
<MenuItem
icon="SortOrderUp"
on:click={sortAscending}
disabled={column.name === $sort.column && $sort.order === "ascending"}
disabled={!canBeSortColumn(column.schema.type) ||
(column.name === $sort.column && $sort.order === "ascending")}
>
Sort {ascendingLabel}
</MenuItem>
<MenuItem
icon="SortOrderDown"
on:click={sortDescending}
disabled={column.name === $sort.column && $sort.order === "descending"}
disabled={!canBeSortColumn(column.schema.type) ||
(column.name === $sort.column && $sort.order === "descending")}
>
Sort {descendingLabel}
</MenuItem>

View File

@ -260,29 +260,31 @@
class:wrap={editable || contentLines > 1}
on:wheel={e => (focused ? e.stopPropagation() : null)}
>
{#each value || [] as relationship}
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
<div class="badge">
<span
on:click={editable
? () => showRelationship(relationship._id)
: null}
>
{readable(
relationship[primaryDisplay] || relationship.primaryDisplay
)}
</span>
{#if editable}
<Icon
name="Close"
size="XS"
hoverable
on:click={() => toggleRow(relationship)}
/>
{/if}
</div>
{/if}
{/each}
{#if Array.isArray(value) && value.length}
{#each value as relationship}
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
<div class="badge">
<span
on:click={editable
? () => showRelationship(relationship._id)
: null}
>
{readable(
relationship[primaryDisplay] || relationship.primaryDisplay
)}
</span>
{#if editable}
<Icon
name="Close"
size="XS"
hoverable
on:click={() => toggleRow(relationship)}
/>
{/if}
</div>
{/if}
{/each}
{/if}
{#if editable}
<div class="add" on:click={open}>
<Icon name="Add" size="S" />
@ -318,7 +320,7 @@
<div class="searching">
<ProgressCircle size="S" />
</div>
{:else if searchResults?.length}
{:else if Array.isArray(searchResults) && searchResults.length}
<div class="results">
{#each searchResults as row, idx}
<div

View File

@ -1,6 +1,7 @@
<script>
import { getContext } from "svelte"
import { ActionButton, Popover, Select } from "@budibase/bbui"
import { canBeSortColumn } from "@budibase/shared-core"
const { sort, columns, stickyColumn } = getContext("grid")
@ -19,7 +20,7 @@
type: stickyColumn.schema?.type,
})
}
return [
options = [
...options,
...columns.map(col => ({
label: col.label || col.name,
@ -27,6 +28,7 @@
type: col.schema?.type,
})),
]
return options.filter(col => canBeSortColumn(col.type))
}
const getOrderOptions = (column, columnOptions) => {

View File

@ -141,7 +141,14 @@
</div>
</div>
{/if}
{#if $loaded}
{#if $error}
<div class="grid-error">
<div class="grid-error-title">There was a problem loading your grid</div>
<div class="grid-error-subtitle">
{$error}
</div>
</div>
{:else if $loaded}
<div class="grid-data-outer" use:clickOutside={ui.actions.blur}>
<div class="grid-data-inner">
<StickyColumn>
@ -171,13 +178,6 @@
</div>
</div>
</div>
{:else if $error}
<div class="grid-error">
<div class="grid-error-title">There was a problem loading your grid</div>
<div class="grid-error-subtitle">
{$error}
</div>
</div>
{/if}
{#if $loading && !$error}
<div in:fade|local={{ duration: 130 }} class="grid-loading">

View File

@ -18,6 +18,7 @@
contentLines,
isDragging,
dispatch,
rows,
} = getContext("grid")
$: rowSelected = !!$selectedRows[row._id]
@ -31,7 +32,7 @@
on:focus
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
on:click={() => dispatch("rowclick", rows.actions.cleanRow(row))}
>
{#each $renderedColumns as column, columnIdx (column.name)}
{@const cellId = `${row._id}-${column.name}`}

View File

@ -33,7 +33,7 @@
let visible = false
let isAdding = false
let newRow = {}
let newRow
let offset = 0
$: firstColumn = $stickyColumn || $renderedColumns[0]
@ -58,7 +58,9 @@
// Create row
const newRowIndex = offset ? undefined : 0
const savedRow = await rows.actions.addRow(newRow, newRowIndex)
let rowToCreate = { ...newRow }
delete rowToCreate._isNewRow
const savedRow = await rows.actions.addRow(rowToCreate, newRowIndex)
if (savedRow) {
// Reset state
clear()
@ -109,7 +111,7 @@
}
// Update state and select initial cell
newRow = {}
newRow = { _isNewRow: true }
visible = true
$hoveredRowId = NewRowID
if (firstColumn) {

View File

@ -74,7 +74,7 @@
class="row"
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
on:click={() => dispatch("rowclick", rows.actions.cleanRow(row))}
>
<GutterCell {row} {rowFocused} {rowHovered} {rowSelected} />
{#if $stickyColumn}

View File

@ -1,6 +1,6 @@
export const getColor = (idx, opacity = 0.3) => {
if (idx == null || idx === -1) {
return null
idx = 0
}
return `hsla(${((idx + 1) * 222) % 360}, 90%, 75%, ${opacity})`
}

View File

@ -17,6 +17,7 @@
focusedCellAPI,
focusedRowId,
notifications,
isDatasourcePlus,
} = getContext("grid")
$: style = makeStyle($menu)
@ -75,7 +76,7 @@
</MenuItem>
<MenuItem
icon="Copy"
disabled={isNewRow || !$focusedRow?._id}
disabled={isNewRow || !$focusedRow?._id || !$isDatasourcePlus}
on:click={() => copyToClipboard($focusedRow?._id)}
on:click={menu.actions.close}
>

View File

@ -69,7 +69,7 @@ export const deriveStores = context => {
}
export const createActions = context => {
const { columns, stickyColumn, datasource, definition } = context
const { columns, stickyColumn, datasource, definition, schema } = context
// Updates the datasources primary display column
const changePrimaryDisplay = async column => {
@ -101,7 +101,7 @@ export const createActions = context => {
const $columns = get(columns)
const $definition = get(definition)
const $stickyColumn = get(stickyColumn)
const newSchema = cloneDeep($definition.schema)
let newSchema = cloneDeep(get(schema)) || {}
// Build new updated datasource schema
Object.keys(newSchema).forEach(column => {
@ -142,26 +142,35 @@ export const createActions = context => {
}
export const initialise = context => {
const { definition, columns, stickyColumn, schema } = context
const { definition, columns, stickyColumn, enrichedSchema } = context
// Merge new schema fields with existing schema in order to preserve widths
schema.subscribe($schema => {
if (!$schema) {
enrichedSchema.subscribe($enrichedSchema => {
if (!$enrichedSchema) {
columns.set([])
stickyColumn.set(null)
return
}
const $definition = get(definition)
const $columns = get(columns)
const $stickyColumn = get(stickyColumn)
// Generate array of all columns to easily find pre-existing columns
let allColumns = $columns || []
if ($stickyColumn) {
allColumns.push($stickyColumn)
}
// Find primary display
let primaryDisplay
if ($definition.primaryDisplay && $schema[$definition.primaryDisplay]) {
primaryDisplay = $definition.primaryDisplay
const candidatePD = $definition.primaryDisplay || $stickyColumn?.name
if (candidatePD && $enrichedSchema[candidatePD]) {
primaryDisplay = candidatePD
}
// Get field list
let fields = []
Object.keys($schema).forEach(field => {
Object.keys($enrichedSchema).forEach(field => {
if (field !== primaryDisplay) {
fields.push(field)
}
@ -170,14 +179,18 @@ export const initialise = context => {
// Update columns, removing extraneous columns and adding missing ones
columns.set(
fields
.map(field => ({
name: field,
label: $schema[field].displayName || field,
schema: $schema[field],
width: $schema[field].width || DefaultColumnWidth,
visible: $schema[field].visible ?? true,
order: $schema[field].order,
}))
.map(field => {
const fieldSchema = $enrichedSchema[field]
const oldColumn = allColumns?.find(x => x.name === field)
return {
name: field,
label: fieldSchema.displayName || field,
schema: fieldSchema,
width: fieldSchema.width || oldColumn?.width || DefaultColumnWidth,
visible: fieldSchema.visible ?? true,
order: fieldSchema.order ?? oldColumn?.order,
}
})
.sort((a, b) => {
// Sort by order first
const orderA = a.order
@ -205,11 +218,13 @@ export const initialise = context => {
stickyColumn.set(null)
return
}
const stickySchema = $enrichedSchema[primaryDisplay]
const oldStickyColumn = allColumns?.find(x => x.name === primaryDisplay)
stickyColumn.set({
name: primaryDisplay,
label: $schema[primaryDisplay].displayName || primaryDisplay,
schema: $schema[primaryDisplay],
width: $schema[primaryDisplay].width || DefaultColumnWidth,
label: stickySchema.displayName || primaryDisplay,
schema: stickySchema,
width: stickySchema.width || oldStickyColumn?.width || DefaultColumnWidth,
visible: true,
order: 0,
left: GutterWidth,

View File

@ -37,9 +37,10 @@ export const deriveStores = context => {
[props, hasNonAutoColumn],
([$props, $hasNonAutoColumn]) => {
let config = { ...$props }
const type = $props.datasource?.type
// Disable some features if we're editing a view
if ($props.datasource?.type === "viewV2") {
if (type === "viewV2") {
config.canEditColumns = false
}
@ -48,6 +49,16 @@ export const deriveStores = context => {
config.canAddRows = false
}
// Disable features for non DS+
if (!["table", "viewV2"].includes(type)) {
config.canAddRows = false
config.canEditRows = false
config.canDeleteRows = false
config.canExpandRows = false
config.canSaveSchema = false
config.canEditColumns = false
}
return config
}
)

View File

@ -1,4 +1,5 @@
import { derived, get, writable } from "svelte/store"
import { getDatasourceDefinition } from "../../../fetch"
export const createStores = () => {
const definition = writable(null)
@ -9,21 +10,38 @@ export const createStores = () => {
}
export const deriveStores = context => {
const { definition, schemaOverrides, columnWhitelist } = context
const { definition, schemaOverrides, columnWhitelist, datasource } = context
const schema = derived(
[definition, schemaOverrides, columnWhitelist],
([$definition, $schemaOverrides, $columnWhitelist]) => {
if (!$definition?.schema) {
const schema = derived(definition, $definition => {
let schema = $definition?.schema
if (!schema) {
return null
}
// Ensure schema is configured as objects.
// Certain datasources like queries use primitives.
Object.keys(schema || {}).forEach(key => {
if (typeof schema[key] !== "object") {
schema[key] = { type: schema[key] }
}
})
return schema
})
const enrichedSchema = derived(
[schema, schemaOverrides, columnWhitelist],
([$schema, $schemaOverrides, $columnWhitelist]) => {
if (!$schema) {
return null
}
let newSchema = { ...$definition?.schema }
let enrichedSchema = { ...$schema }
// Apply schema overrides
Object.keys($schemaOverrides || {}).forEach(field => {
if (newSchema[field]) {
newSchema[field] = {
...newSchema[field],
if (enrichedSchema[field]) {
enrichedSchema[field] = {
...enrichedSchema[field],
...$schemaOverrides[field],
}
}
@ -31,41 +49,64 @@ export const deriveStores = context => {
// Apply whitelist if specified
if ($columnWhitelist?.length) {
Object.keys(newSchema).forEach(key => {
Object.keys(enrichedSchema).forEach(key => {
if (!$columnWhitelist.includes(key)) {
delete newSchema[key]
delete enrichedSchema[key]
}
})
}
return newSchema
return enrichedSchema
}
)
const isDatasourcePlus = derived(datasource, $datasource => {
return ["table", "viewV2"].includes($datasource?.type)
})
return {
schema,
enrichedSchema,
isDatasourcePlus,
}
}
export const createActions = context => {
const { datasource, definition, config, dispatch, table, viewV2 } = context
const {
API,
datasource,
definition,
config,
dispatch,
table,
viewV2,
nonPlus,
} = context
// Gets the appropriate API for the configured datasource type
const getAPI = () => {
const $datasource = get(datasource)
switch ($datasource?.type) {
const type = $datasource?.type
if (!type) {
return null
}
switch (type) {
case "table":
return table
case "viewV2":
return viewV2
default:
return null
return nonPlus
}
}
// Refreshes the datasource definition
const refreshDefinition = async () => {
return await getAPI()?.actions.refreshDefinition()
const def = await getDatasourceDefinition({
API,
datasource: get(datasource),
})
definition.set(def)
}
// Saves the datasource definition
@ -113,6 +154,11 @@ export const createActions = context => {
return getAPI()?.actions.canUseColumn(name)
}
// Gets the default number of rows for a single page
const getFeatures = () => {
return getAPI()?.actions.getFeatures()
}
return {
datasource: {
...datasource,
@ -125,6 +171,7 @@ export const createActions = context => {
getRow,
isDatasourceValid,
canUseColumn,
getFeatures,
},
},
}

View File

@ -0,0 +1,124 @@
import { get } from "svelte/store"
export const createActions = context => {
const { columns, stickyColumn, table, viewV2 } = context
const saveDefinition = async () => {
throw "This datasource does not support updating the definition"
}
const saveRow = async () => {
throw "This datasource does not support saving rows"
}
const deleteRows = async () => {
throw "This datasource does not support deleting rows"
}
const getRow = () => {
throw "This datasource does not support fetching individual rows"
}
const isDatasourceValid = datasource => {
// There are many different types and shapes of datasource, so we only
// check that we aren't null
return (
!table.actions.isDatasourceValid(datasource) &&
!viewV2.actions.isDatasourceValid(datasource) &&
datasource?.type != null
)
}
const canUseColumn = name => {
const $columns = get(columns)
const $sticky = get(stickyColumn)
return $columns.some(col => col.name === name) || $sticky?.name === name
}
const getFeatures = () => {
// We don't support any features
return {}
}
return {
nonPlus: {
actions: {
saveDefinition,
addRow: saveRow,
updateRow: saveRow,
deleteRows,
getRow,
isDatasourceValid,
canUseColumn,
getFeatures,
},
},
}
}
// Small util to compare datasource definitions
const isSameDatasource = (a, b) => {
return JSON.stringify(a) === JSON.stringify(b)
}
export const initialise = context => {
const {
datasource,
sort,
filter,
nonPlus,
initialFilter,
initialSortColumn,
initialSortOrder,
fetch,
} = context
// Keep a list of subscriptions so that we can clear them when the datasource
// config changes
let unsubscribers = []
// Observe datasource changes and apply logic for view V2 datasources
datasource.subscribe($datasource => {
// Clear previous subscriptions
unsubscribers?.forEach(unsubscribe => unsubscribe())
unsubscribers = []
if (!nonPlus.actions.isDatasourceValid($datasource)) {
return
}
// Wipe state
filter.set(get(initialFilter))
sort.set({
column: get(initialSortColumn),
order: get(initialSortOrder) || "ascending",
})
// Update fetch when filter changes
unsubscribers.push(
filter.subscribe($filter => {
// Ensure we're updating the correct fetch
const $fetch = get(fetch)
if (!isSameDatasource($fetch?.options?.datasource, $datasource)) {
return
}
$fetch.update({
filter: $filter,
})
})
)
// Update fetch when sorting changes
unsubscribers.push(
sort.subscribe($sort => {
// Ensure we're updating the correct fetch
const $fetch = get(fetch)
if (!isSameDatasource($fetch?.options?.datasource, $datasource)) {
return
}
$fetch.update({
sortOrder: $sort.order || "ascending",
sortColumn: $sort.column,
})
})
)
})
}

View File

@ -1,13 +1,10 @@
import { get } from "svelte/store"
import TableFetch from "../../../../fetch/TableFetch"
const SuppressErrors = true
export const createActions = context => {
const { definition, API, datasource, columns, stickyColumn } = context
const refreshDefinition = async () => {
definition.set(await API.fetchTableDefinition(get(datasource).tableId))
}
const { API, datasource, columns, stickyColumn } = context
const saveDefinition = async newDefinition => {
await API.saveTable(newDefinition)
@ -49,10 +46,13 @@ export const createActions = context => {
return $columns.some(col => col.name === name) || $sticky?.name === name
}
const getFeatures = () => {
return new TableFetch({ API }).determineFeatureFlags()
}
return {
table: {
actions: {
refreshDefinition,
saveDefinition,
addRow: saveRow,
updateRow: saveRow,
@ -60,6 +60,7 @@ export const createActions = context => {
getRow,
isDatasourceValid,
canUseColumn,
getFeatures,
},
},
}

View File

@ -1,22 +1,10 @@
import { get } from "svelte/store"
import ViewV2Fetch from "../../../../fetch/ViewV2Fetch"
const SuppressErrors = true
export const createActions = context => {
const { definition, API, datasource, columns, stickyColumn } = context
const refreshDefinition = async () => {
const $datasource = get(datasource)
if (!$datasource) {
definition.set(null)
return
}
const table = await API.fetchTableDefinition($datasource.tableId)
const view = Object.values(table?.views || {}).find(
view => view.id === $datasource.id
)
definition.set(view)
}
const { API, datasource, columns, stickyColumn } = context
const saveDefinition = async newDefinition => {
await API.viewV2.update(newDefinition)
@ -58,10 +46,13 @@ export const createActions = context => {
)
}
const getFeatures = () => {
return new ViewV2Fetch({ API }).determineFeatureFlags()
}
return {
viewV2: {
actions: {
refreshDefinition,
saveDefinition,
addRow: saveRow,
updateRow: saveRow,
@ -69,6 +60,7 @@ export const createActions = context => {
getRow,
isDatasourceValid,
canUseColumn,
getFeatures,
},
},
}

View File

@ -15,9 +15,10 @@ import * as Config from "./config"
import * as Sort from "./sort"
import * as Filter from "./filter"
import * as Notifications from "./notifications"
import * as Table from "./table"
import * as ViewV2 from "./viewV2"
import * as Datasource from "./datasource"
import * as Table from "./datasources/table"
import * as ViewV2 from "./datasources/viewV2"
import * as NonPlus from "./datasources/nonPlus"
const DependencyOrderedStores = [
Sort,
@ -26,6 +27,7 @@ const DependencyOrderedStores = [
Scroll,
Table,
ViewV2,
NonPlus,
Datasource,
Columns,
Rows,

View File

@ -1,7 +1,8 @@
import { writable, derived, get } from "svelte/store"
import { fetchData } from "../../../fetch/fetchData"
import { fetchData } from "../../../fetch"
import { NewRowID, RowPageSize } from "../lib/constants"
import { tick } from "svelte"
import { Helpers } from "@budibase/bbui"
export const createStores = () => {
const rows = writable([])
@ -76,11 +77,11 @@ export const createActions = context => {
columns,
rowChangeCache,
inProgressChanges,
previousFocusedRowId,
hasNextPage,
error,
notifications,
fetch,
isDatasourcePlus,
} = context
const instanceLoaded = writable(false)
@ -93,12 +94,14 @@ export const createActions = context => {
datasource.subscribe(async $datasource => {
// Unsub from previous fetch if one exists
unsubscribe?.()
unsubscribe = null
fetch.set(null)
instanceLoaded.set(false)
loading.set(true)
// Abandon if we don't have a valid datasource
if (!datasource.actions.isDatasourceValid($datasource)) {
error.set("Datasource is invalid")
return
}
@ -108,6 +111,10 @@ export const createActions = context => {
const $filter = get(filter)
const $sort = get(sort)
// Determine how many rows to fetch per page
const features = datasource.actions.getFeatures()
const limit = features?.supportsPagination ? RowPageSize : null
// Create new fetch model
const newFetch = fetchData({
API,
@ -116,7 +123,7 @@ export const createActions = context => {
filter: $filter,
sortColumn: $sort.column,
sortOrder: $sort.order,
limit: RowPageSize,
limit,
paginate: true,
},
})
@ -355,7 +362,7 @@ export const createActions = context => {
// Update row
const saved = await datasource.actions.updateRow({
...row,
...cleanRow(row),
...get(rowChangeCache)[rowId],
})
@ -411,8 +418,17 @@ export const createActions = context => {
}
let rowsToAppend = []
let newRow
const $isDatasourcePlus = get(isDatasourcePlus)
for (let i = 0; i < newRows.length; i++) {
newRow = newRows[i]
// Ensure we have a unique _id.
// This means generating one for non DS+, overriting any that may already
// exist as we cannot allow duplicates.
if (!$isDatasourcePlus) {
newRow._id = Helpers.uuid()
}
if (!rowCacheMap[newRow._id]) {
rowCacheMap[newRow._id] = true
rowsToAppend.push(newRow)
@ -449,15 +465,16 @@ export const createActions = context => {
return get(rowLookupMap)[id] != null
}
// Wipe the row change cache when changing row
previousFocusedRowId.subscribe(id => {
if (id && !get(inProgressChanges)[id]) {
rowChangeCache.update(state => {
delete state[id]
return state
})
// Cleans a row by removing any internal grid metadata from it.
// Call this before passing a row to any sort of external flow.
const cleanRow = row => {
let clone = { ...row }
delete clone.__idx
if (!get(isDatasourcePlus)) {
delete clone._id
}
})
return clone
}
return {
rows: {
@ -474,7 +491,22 @@ export const createActions = context => {
refreshRow,
replaceRow,
refreshData,
cleanRow,
},
},
}
}
export const initialise = context => {
const { rowChangeCache, inProgressChanges, previousFocusedRowId } = context
// Wipe the row change cache when changing row
previousFocusedRowId.subscribe(id => {
if (id && !get(inProgressChanges)[id]) {
rowChangeCache.update(state => {
delete state[id]
return state
})
}
})
}

View File

@ -17,7 +17,7 @@ export const createStores = context => {
}
export const initialise = context => {
const { sort, initialSortColumn, initialSortOrder, definition } = context
const { sort, initialSortColumn, initialSortOrder, schema } = context
// Reset sort when initial sort props change
initialSortColumn.subscribe(newSortColumn => {
@ -28,15 +28,12 @@ export const initialise = context => {
})
// Derive if the current sort column exists in the schema
const sortColumnExists = derived(
[sort, definition],
([$sort, $definition]) => {
if (!$sort?.column || !$definition) {
return true
}
return $definition.schema?.[$sort.column] != null
const sortColumnExists = derived([sort, schema], ([$sort, $schema]) => {
if (!$sort?.column || !$schema) {
return true
}
)
return $schema[$sort.column] != null
})
// Clear sort state if our sort column does not exist
sortColumnExists.subscribe(exists => {

View File

@ -0,0 +1,145 @@
import DataFetch from "./DataFetch.js"
export default class CustomFetch extends DataFetch {
// Gets the correct Budibase type for a JS value
getType(value) {
if (value == null) {
return "string"
}
const type = typeof value
if (type === "object") {
if (Array.isArray(value)) {
// Use our custom array type to render badges
return "array"
}
// Use JSON for objects to ensure they are stringified
return "json"
} else if (!isNaN(value)) {
return "number"
} else {
return "string"
}
}
// Parses the custom data into an array format
parseCustomData(data) {
if (!data) {
return []
}
// Happy path - already an array
if (Array.isArray(data)) {
return data
}
// For strings, try JSON then fall back to attempting a CSV
if (typeof data === "string") {
try {
const js = JSON.parse(data)
return Array.isArray(js) ? js : [js]
} catch (error) {
// Ignore
}
// Try splitting by newlines first
if (data.includes("\n")) {
return data.split("\n").map(x => x.trim())
}
// Split by commas next
return data.split(",").map(x => x.trim())
}
// Other cases we just assume it's a single object and wrap it
return [data]
}
// Enriches the custom data to ensure the structure and format is usable
enrichCustomData(data) {
if (!data?.length) {
return []
}
// Filter out any invalid values
data = data.filter(x => x != null && x !== "" && !Array.isArray(x))
// Ensure all values are packed into objects
return data.map(value => {
if (typeof value === "object") {
return value
}
// Try parsing strings
if (typeof value === "string") {
const split = value.split(",").map(x => x.trim())
let obj = {}
for (let i = 0; i < split.length; i++) {
const suffix = i === 0 ? "" : ` ${i + 1}`
const key = `Value${suffix}`
obj[key] = split[i]
}
return obj
}
// For anything else, wrap in an object
return { Value: value }
})
}
// Extracts and parses the custom data from the datasource definition
getCustomData(datasource) {
return this.enrichCustomData(this.parseCustomData(datasource?.data))
}
async getDefinition(datasource) {
// Try and work out the schema from the array provided
let schema = {}
const data = this.getCustomData(datasource)
if (!data?.length) {
return { schema }
}
// Go through every object and extract all valid keys
for (let datum of data) {
for (let key of Object.keys(datum)) {
if (key === "_id") {
continue
}
if (!schema[key]) {
let type = this.getType(datum[key])
let constraints = {}
// Determine whether we should render text columns as options instead
if (type === "string") {
const uniqueValues = [...new Set(data.map(x => x[key]))]
const uniqueness = uniqueValues.length / data.length
if (uniqueness <= 0.8 && uniqueValues.length > 1) {
type = "options"
constraints.inclusion = uniqueValues
}
}
// Generate options for array columns
else if (type === "array") {
constraints.inclusion = [...new Set(data.map(x => x[key]).flat())]
}
schema[key] = {
type,
constraints,
}
}
}
}
return { schema }
}
async getData() {
const { datasource } = this.options
return {
rows: this.getCustomData(datasource),
hasNextPage: false,
cursor: null,
}
}
}

View File

@ -8,6 +8,7 @@ import FieldFetch from "./FieldFetch.js"
import JSONArrayFetch from "./JSONArrayFetch.js"
import UserFetch from "./UserFetch.js"
import GroupUserFetch from "./GroupUserFetch.js"
import CustomFetch from "./CustomFetch.js"
const DataFetchMap = {
table: TableFetch,
@ -17,6 +18,7 @@ const DataFetchMap = {
link: RelationshipFetch,
user: UserFetch,
groupUser: GroupUserFetch,
custom: CustomFetch,
// Client specific datasource types
provider: NestedProviderFetch,
@ -24,7 +26,18 @@ const DataFetchMap = {
jsonarray: JSONArrayFetch,
}
// Constructs a new fetch model for a certain datasource
export const fetchData = ({ API, datasource, options }) => {
const Fetch = DataFetchMap[datasource?.type] || TableFetch
return new Fetch({ API, datasource, ...options })
}
// Fetches the definition of any type of datasource
export const getDatasourceDefinition = async ({ API, datasource }) => {
const handler = DataFetchMap[datasource?.type]
if (!handler) {
return null
}
const instance = new handler({ API })
return await instance.getDefinition(datasource)
}

View File

@ -1,5 +1,5 @@
export { createAPIClient } from "./api"
export { fetchData } from "./fetch/fetchData"
export { fetchData } from "./fetch"
export { Utils } from "./utils"
export * as Constants from "./constants"
export * from "./stores"

@ -1 +1 @@
Subproject commit 044bec6447066b215932d6726c437e7ec5a9e42e
Subproject commit d24c0dc3a30014cbe61860252aa48104cad36376

View File

@ -0,0 +1,84 @@
FROM node:18-slim
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.post-update="scripts/watchtower-hooks/post-update.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.post-check="scripts/watchtower-hooks/post-check.sh"
WORKDIR /app
ENV PORT=4001
ENV COUCH_DB_URL=https://couchdb.budi.live:5984
ENV BUDIBASE_ENVIRONMENT=PRODUCTION
ENV SERVICE=app-service
ENV POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
ENV TENANT_FEATURE_FLAGS=*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR
ENV ACCOUNT_PORTAL_URL=https://account.budibase.app
ENV TOP_LEVEL_PATH=/
# handle node-gyp
RUN apt-get update \
&& apt-get install -y --no-install-recommends g++ make python3 jq
RUN yarn global add pm2
# Install client for oracle datasource
RUN apt-get install unzip libaio1
COPY packages/server/scripts/integrations/oracle/ scripts/integrations/oracle/
RUN /bin/bash -e scripts/integrations/oracle/instantclient/linux/x86-64/install.sh
# Install postgres client for pg_dump utils
RUN apt update && apt upgrade -y \
&& apt install software-properties-common apt-transport-https curl gpg -y \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
&& apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https curl gpg -y
WORKDIR /
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
WORKDIR /string-templates
COPY packages/string-templates/package.json package.json
RUN ../scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true
COPY packages/string-templates .
WORKDIR /app
COPY packages/server/package.json .
COPY packages/server/dist/yarn.lock .
RUN cd ../string-templates && yarn link && cd - && yarn link @budibase/string-templates
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
RUN ./scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production=true \
# Remove unneeded data from file system to reduce image size
&& yarn cache clean && apt-get remove -y --purge --auto-remove g++ make python jq \
&& rm -rf /tmp/* /root/.node-gyp /usr/local/lib/node_modules/npm/node_modules/node-gyp
COPY packages/server/dist/ dist/
COPY packages/server/docker_run.sh .
COPY packages/server/builder/ builder/
COPY packages/server/client/ client/
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
EXPOSE 4001
# have to add node environment production after install
# due to this causing yarn to stop installing dev dependencies
# which are actually needed to get this environment up and running
ENV NODE_ENV=production
ENV CLUSTER_MODE=${CLUSTER_MODE}
ENV TOP_LEVEL_PATH=/app
CMD ["./docker_run.sh"]

View File

@ -18,7 +18,7 @@
"test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION --build-arg BUDIBASE_VERSION=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up",
@ -55,7 +55,6 @@
"@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "6.8.0",
"@koa/router": "8.0.8",
"@sentry/node": "6.17.7",
"@socket.io/redis-adapter": "^8.2.1",
"airtable": "0.10.1",
"arangojs": "7.2.0",
@ -123,7 +122,7 @@
"@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/koa__router": "8.0.8",
"@types/lodash": "4.14.180",
"@types/lodash": "4.14.200",
"@types/mssql": "8.1.2",
"@types/node": "18.17.0",
"@types/node-fetch": "2.6.4",

View File

@ -47,6 +47,7 @@ async function init() {
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
HTTP_MIGRATIONS: "0",
HTTP_LOGGING: "0",
VERSION: "0.0.0+local",
}
let envFile = ""
Object.keys(envFileJson).forEach(key => {

View File

@ -4,7 +4,6 @@ import {
getQueryParams,
getTableParams,
} from "../../db/utils"
import { destroy as tableDestroy } from "./table/internal"
import { getIntegration } from "../../integrations"
import { invalidateDynamicVariables } from "../../threads/utils"
import { context, db as dbCore, events } from "@budibase/backend-core"
@ -325,11 +324,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) {
// Destroy the tables.
for (const table of datasourceTableDocs) {
await tableDestroy({
params: {
tableId: table._id,
},
})
await sdk.tables.internal.destroy(table)
}
}

View File

@ -40,7 +40,7 @@ class Routing {
/**
* Gets the full routing structure by querying the routing view and processing the result into the tree.
* @returns {Promise<object>} The routing structure, this is the full structure designed for use in the builder,
* @returns The routing structure, this is the full structure designed for use in the builder,
* if the client routing is required then the updateRoutingStructureForUserRole should be used.
*/
async function getRoutingStructure() {

View File

@ -280,17 +280,8 @@ function isEditableColumn(column: FieldSchema) {
return !(isExternalAutoColumn || isFormula)
}
export type ExternalRequestReturnType<T> = T extends Operation.READ
?
| Row[]
| {
row: Row
table: Table
}
: {
row: Row
table: Table
}
export type ExternalRequestReturnType<T extends Operation> =
T extends Operation.READ ? Row[] : { row: Row; table: Table }
export class ExternalRequest<T extends Operation> {
private readonly operation: T
@ -857,11 +848,12 @@ export class ExternalRequest<T extends Operation> {
}
const output = this.outputProcessing(response, table, relationships)
// if reading it'll just be an array of rows, return whole thing
const result = (
operation === Operation.READ && Array.isArray(response)
? output
: { row: output[0], table }
) as ExternalRequestReturnType<T>
return result
if (operation === Operation.READ) {
return (
Array.isArray(output) ? output : [output]
) as ExternalRequestReturnType<T>
} else {
return { row: output[0], table } as ExternalRequestReturnType<T>
}
}
}

View File

@ -44,7 +44,7 @@ export async function handleRequest<T extends Operation>(
return [] as any
}
return new ExternalRequest(operation, tableId, opts?.datasource).run(
return new ExternalRequest<T>(operation, tableId, opts?.datasource).run(
opts || {}
)
}
@ -148,17 +148,17 @@ export async function find(ctx: UserCtx): Promise<Row> {
export async function destroy(ctx: UserCtx) {
const tableId = utils.getTableId(ctx)
const _id = ctx.request.body._id
const { row } = (await handleRequest(Operation.DELETE, tableId, {
const { row } = await handleRequest(Operation.DELETE, tableId, {
id: breakRowIdField(_id),
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})) as { row: Row }
})
return { response: { ok: true, id: _id }, row }
}
export async function bulkDestroy(ctx: UserCtx) {
const { rows } = ctx.request.body
const tableId = utils.getTableId(ctx)
let promises: Promise<Row[] | { row: Row; table: Table }>[] = []
let promises: Promise<{ row: Row; table: Table }>[] = []
for (let row of rows) {
promises.push(
handleRequest(Operation.DELETE, tableId, {
@ -167,7 +167,7 @@ export async function bulkDestroy(ctx: UserCtx) {
})
)
}
const responses = (await Promise.all(promises)) as { row: Row }[]
const responses = await Promise.all(promises)
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
}
@ -183,11 +183,11 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
ctx.throw(400, "Datasource has not been configured for plus API.")
}
const tables = datasource.entities
const response = (await handleRequest(Operation.READ, tableId, {
const response = await handleRequest(Operation.READ, tableId, {
id,
datasource,
includeSqlRelationships: IncludeRelationship.INCLUDE,
})) as Row[]
})
const table: Table = tables[tableName]
const row = response[0]
// this seems like a lot of work, but basically we need to dig deeper for the enrich

Some files were not shown because too many files have changed in this diff Show More