Merge branch 'develop' of github.com:Budibase/budibase into plugins-dev-experience
This commit is contained in:
commit
76d427001a
|
@ -12,19 +12,28 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Fail if branch is not master
|
||||
if: github.ref != 'refs/heads/master'
|
||||
run: |
|
||||
echo "Ref is not master, you must run this job from master."
|
||||
exit 1
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
node-version: 14.x
|
||||
fetch_depth: 0
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
run: |
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Tag and release Docker images (Self Host)
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
|
||||
# Get latest release version
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
release_tag=v$release_version
|
||||
release_tag=v${{ env.RELEASE_VERSION }}
|
||||
|
||||
# Pull apps and worker images
|
||||
docker pull budibase/apps:$release_tag
|
||||
|
|
|
@ -75,7 +75,7 @@ services:
|
|||
ports:
|
||||
- "${MAIN_PORT}:10000"
|
||||
container_name: bbproxy
|
||||
image: proxy-service
|
||||
image: budibase/proxy
|
||||
environment:
|
||||
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
|
||||
depends_on:
|
||||
|
|
|
@ -3,15 +3,14 @@
|
|||
echo ${TARGETBUILD} > /buildtarget.txt
|
||||
if [[ "${TARGETBUILD}" = "aas" ]]; then
|
||||
# Azure AppService uses /home for persisent data & SSH on port 2222
|
||||
mkdir -p /home/{search,minio,couch}
|
||||
mkdir -p /home/couch/{dbs,views}
|
||||
chown -R couchdb:couchdb /home/couch/
|
||||
DATA_DIR=/home
|
||||
mkdir -p $DATA_DIR/{search,minio,couchdb}
|
||||
mkdir -p $DATA_DIR/couchdb/{dbs,views}
|
||||
chown -R couchdb:couchdb $DATA_DIR/couchdb/
|
||||
apt update
|
||||
apt-get install -y openssh-server
|
||||
sed -i 's#dir=/opt/couchdb/data/search#dir=/home/search#' /opt/clouseau/clouseau.ini
|
||||
sed -i 's#/minio/minio server /minio &#/minio/minio server /home/minio &#' /runner.sh
|
||||
sed -i 's#database_dir = ./data#database_dir = /home/couch/dbs#' /opt/couchdb/etc/default.ini
|
||||
sed -i 's#view_index_dir = ./data#view_index_dir = /home/couch/views#' /opt/couchdb/etc/default.ini
|
||||
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
|
||||
/etc/init.d/ssh restart
|
||||
fi
|
||||
|
||||
sed -i 's#DATA_DIR#$DATA_DIR#' /opt/clouseau/clouseau.ini /opt/couchdb/etc/local.ini
|
||||
|
|
|
@ -20,10 +20,10 @@ RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
|
|||
|
||||
FROM couchdb:3.2.1
|
||||
# TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64
|
||||
ARG TARGETARCH amd64
|
||||
ARG TARGETARCH=amd64
|
||||
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
|
||||
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
||||
ARG TARGETBUILD single
|
||||
ARG TARGETBUILD=single
|
||||
ENV TARGETBUILD $TARGETBUILD
|
||||
|
||||
COPY --from=build /app /app
|
||||
|
@ -35,6 +35,7 @@ ENV \
|
|||
BUDIBASE_ENVIRONMENT=PRODUCTION \
|
||||
CLUSTER_PORT=80 \
|
||||
# CUSTOM_DOMAIN=budi001.custom.com \
|
||||
DATA_DIR=/data \
|
||||
DEPLOYMENT_ENVIRONMENT=docker \
|
||||
MINIO_URL=http://localhost:9000 \
|
||||
POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU \
|
||||
|
@ -114,6 +115,7 @@ RUN chmod +x ./healthcheck.sh
|
|||
ADD hosting/scripts/build-target-paths.sh .
|
||||
RUN chmod +x ./build-target-paths.sh
|
||||
|
||||
# Script below sets the path for storing data based on $DATA_DIR
|
||||
# For Azure App Service install SSH & point data locations to /home
|
||||
RUN /build-target-paths.sh
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ name=clouseau@127.0.0.1
|
|||
cookie=monster
|
||||
|
||||
; the path where you would like to store the search index files
|
||||
dir=/data/search
|
||||
dir=DATA_DIR/search
|
||||
|
||||
; the number of search indexes that can be open simultaneously
|
||||
max_indexes_open=500
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
; CouchDB Configuration Settings
|
||||
|
||||
[couchdb]
|
||||
database_dir = /data/couch/dbs
|
||||
view_index_dir = /data/couch/views
|
||||
database_dir = DATA_DIR/couch/dbs
|
||||
view_index_dir = DATA_DIR/couch/views
|
||||
|
|
|
@ -1,7 +1,16 @@
|
|||
#!/bin/bash
|
||||
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
|
||||
if [ -f "/data/.env" ]; then
|
||||
export $(cat /data/.env | xargs)
|
||||
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
|
||||
|
||||
# Azure App Service customisations
|
||||
if [[ "${TARGETBUILD}" = "aas" ]]; then
|
||||
DATA_DIR=/home
|
||||
/etc/init.d/ssh start
|
||||
else
|
||||
DATA_DIR=${DATA_DIR:-/data}
|
||||
fi
|
||||
|
||||
if [ -f "${DATA_DIR}/.env" ]; then
|
||||
export $(cat ${DATA_DIR}/.env | xargs)
|
||||
fi
|
||||
# first randomise any unset environment variables
|
||||
for ENV_VAR in "${ENV_VARS[@]}"
|
||||
|
@ -14,21 +23,26 @@ done
|
|||
if [[ -z "${COUCH_DB_URL}" ]]; then
|
||||
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
|
||||
fi
|
||||
if [ ! -f "/data/.env" ]; then
|
||||
touch /data/.env
|
||||
if [ ! -f "${DATA_DIR}/.env" ]; then
|
||||
touch ${DATA_DIR}/.env
|
||||
for ENV_VAR in "${ENV_VARS[@]}"
|
||||
do
|
||||
temp=$(eval "echo \$$ENV_VAR")
|
||||
echo "$ENV_VAR=$temp" >> /data/.env
|
||||
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
|
||||
done
|
||||
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
|
||||
fi
|
||||
|
||||
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
|
||||
|
||||
# make these directories in runner, incase of mount
|
||||
mkdir -p /data/couch/{dbs,views} /home/couch/{dbs,views}
|
||||
chown -R couchdb:couchdb /data/couch /home/couch
|
||||
mkdir -p ${DATA_DIR}/couchdb/{dbs,views}
|
||||
mkdir -p ${DATA_DIR}/minio
|
||||
mkdir -p ${DATA_DIR}/search
|
||||
chown -R couchdb:couchdb ${DATA_DIR}/couchdb
|
||||
redis-server --requirepass $REDIS_PASSWORD &
|
||||
/opt/clouseau/bin/clouseau &
|
||||
/minio/minio server /data/minio &
|
||||
/minio/minio server ${DATA_DIR}/minio &
|
||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
|
||||
/etc/init.d/nginx restart
|
||||
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "1.2.28-alpha.0",
|
||||
"version": "1.2.38",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "1.2.28-alpha.0",
|
||||
"version": "1.2.38",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "dist/src/index.js",
|
||||
"types": "dist/src/index.d.ts",
|
||||
|
@ -20,7 +20,7 @@
|
|||
"test:watch": "jest --watchAll"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/types": "1.2.28-alpha.0",
|
||||
"@budibase/types": "^1.2.38",
|
||||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.0.1",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const redis = require("../redis/init")
|
||||
const { doWithDB } = require("../db")
|
||||
const { DocumentTypes } = require("../db/constants")
|
||||
const { DocumentType } = require("../db/constants")
|
||||
|
||||
const AppState = {
|
||||
INVALID: "invalid",
|
||||
|
@ -14,7 +14,7 @@ const populateFromDB = async appId => {
|
|||
return doWithDB(
|
||||
appId,
|
||||
db => {
|
||||
return db.get(DocumentTypes.APP_METADATA)
|
||||
return db.get(DocumentType.APP_METADATA)
|
||||
},
|
||||
{ skip_setup: true }
|
||||
)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
export enum ContextKeys {
|
||||
export enum ContextKey {
|
||||
TENANT_ID = "tenantId",
|
||||
GLOBAL_DB = "globalDb",
|
||||
APP_ID = "appId",
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import env from "../environment"
|
||||
import { SEPARATOR, DocumentTypes } from "../db/constants"
|
||||
import { SEPARATOR, DocumentType } from "../db/constants"
|
||||
import cls from "./FunctionContext"
|
||||
import { dangerousGetDB, closeDB } from "../db"
|
||||
import { baseGlobalDBName } from "../tenancy/utils"
|
||||
import { IdentityContext } from "@budibase/types"
|
||||
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
|
||||
import { ContextKeys } from "./constants"
|
||||
import { ContextKey } from "./constants"
|
||||
import {
|
||||
updateUsing,
|
||||
closeWithUsing,
|
||||
|
@ -33,8 +33,8 @@ export const closeTenancy = async () => {
|
|||
}
|
||||
await closeDB(db)
|
||||
// clear from context now that database is closed/task is finished
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, null)
|
||||
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
|
||||
cls.setOnContext(ContextKey.TENANT_ID, null)
|
||||
cls.setOnContext(ContextKey.GLOBAL_DB, null)
|
||||
}
|
||||
|
||||
// export const isDefaultTenant = () => {
|
||||
|
@ -54,7 +54,7 @@ export const getTenantIDFromAppID = (appId: string) => {
|
|||
return null
|
||||
}
|
||||
const split = appId.split(SEPARATOR)
|
||||
const hasDev = split[1] === DocumentTypes.DEV
|
||||
const hasDev = split[1] === DocumentType.DEV
|
||||
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
|
||||
return null
|
||||
}
|
||||
|
@ -83,14 +83,14 @@ export const doInTenant = (tenantId: string | null, task: any) => {
|
|||
// invoke the task
|
||||
return await task()
|
||||
} finally {
|
||||
await closeWithUsing(ContextKeys.TENANCY_IN_USE, () => {
|
||||
await closeWithUsing(ContextKey.TENANCY_IN_USE, () => {
|
||||
return closeTenancy()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const existing = cls.getFromContext(ContextKeys.TENANT_ID) === tenantId
|
||||
return updateUsing(ContextKeys.TENANCY_IN_USE, existing, internal)
|
||||
const existing = cls.getFromContext(ContextKey.TENANT_ID) === tenantId
|
||||
return updateUsing(ContextKey.TENANCY_IN_USE, existing, internal)
|
||||
}
|
||||
|
||||
export const doInAppContext = (appId: string, task: any) => {
|
||||
|
@ -108,7 +108,7 @@ export const doInAppContext = (appId: string, task: any) => {
|
|||
setAppTenantId(appId)
|
||||
}
|
||||
// set the app ID
|
||||
cls.setOnContext(ContextKeys.APP_ID, appId)
|
||||
cls.setOnContext(ContextKey.APP_ID, appId)
|
||||
|
||||
// preserve the identity
|
||||
if (identity) {
|
||||
|
@ -118,14 +118,14 @@ export const doInAppContext = (appId: string, task: any) => {
|
|||
// invoke the task
|
||||
return await task()
|
||||
} finally {
|
||||
await closeWithUsing(ContextKeys.APP_IN_USE, async () => {
|
||||
await closeWithUsing(ContextKey.APP_IN_USE, async () => {
|
||||
await closeAppDBs()
|
||||
await closeTenancy()
|
||||
})
|
||||
}
|
||||
}
|
||||
const existing = cls.getFromContext(ContextKeys.APP_ID) === appId
|
||||
return updateUsing(ContextKeys.APP_IN_USE, existing, internal)
|
||||
const existing = cls.getFromContext(ContextKey.APP_ID) === appId
|
||||
return updateUsing(ContextKey.APP_IN_USE, existing, internal)
|
||||
}
|
||||
|
||||
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
|
||||
|
@ -135,7 +135,7 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
|
|||
|
||||
async function internal(opts = { existing: false }) {
|
||||
if (!opts.existing) {
|
||||
cls.setOnContext(ContextKeys.IDENTITY, identity)
|
||||
cls.setOnContext(ContextKey.IDENTITY, identity)
|
||||
// set the tenant so that doInTenant will preserve identity
|
||||
if (identity.tenantId) {
|
||||
updateTenantId(identity.tenantId)
|
||||
|
@ -146,27 +146,27 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
|
|||
// invoke the task
|
||||
return await task()
|
||||
} finally {
|
||||
await closeWithUsing(ContextKeys.IDENTITY_IN_USE, async () => {
|
||||
await closeWithUsing(ContextKey.IDENTITY_IN_USE, async () => {
|
||||
setIdentity(null)
|
||||
await closeTenancy()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const existing = cls.getFromContext(ContextKeys.IDENTITY)
|
||||
return updateUsing(ContextKeys.IDENTITY_IN_USE, existing, internal)
|
||||
const existing = cls.getFromContext(ContextKey.IDENTITY)
|
||||
return updateUsing(ContextKey.IDENTITY_IN_USE, existing, internal)
|
||||
}
|
||||
|
||||
export const getIdentity = (): IdentityContext | undefined => {
|
||||
try {
|
||||
return cls.getFromContext(ContextKeys.IDENTITY)
|
||||
return cls.getFromContext(ContextKey.IDENTITY)
|
||||
} catch (e) {
|
||||
// do nothing - identity is not in context
|
||||
}
|
||||
}
|
||||
|
||||
export const updateTenantId = (tenantId: string | null) => {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
cls.setOnContext(ContextKey.TENANT_ID, tenantId)
|
||||
if (env.USE_COUCH) {
|
||||
setGlobalDB(tenantId)
|
||||
}
|
||||
|
@ -176,7 +176,7 @@ export const updateAppId = async (appId: string) => {
|
|||
try {
|
||||
// have to close first, before removing the databases from context
|
||||
await closeAppDBs()
|
||||
cls.setOnContext(ContextKeys.APP_ID, appId)
|
||||
cls.setOnContext(ContextKey.APP_ID, appId)
|
||||
} catch (err) {
|
||||
if (env.isTest()) {
|
||||
TEST_APP_ID = appId
|
||||
|
@ -189,12 +189,12 @@ export const updateAppId = async (appId: string) => {
|
|||
export const setGlobalDB = (tenantId: string | null) => {
|
||||
const dbName = baseGlobalDBName(tenantId)
|
||||
const db = dangerousGetDB(dbName)
|
||||
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
|
||||
cls.setOnContext(ContextKey.GLOBAL_DB, db)
|
||||
return db
|
||||
}
|
||||
|
||||
export const getGlobalDB = () => {
|
||||
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
|
||||
const db = cls.getFromContext(ContextKey.GLOBAL_DB)
|
||||
if (!db) {
|
||||
throw new Error("Global DB not found")
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ export const getGlobalDB = () => {
|
|||
}
|
||||
|
||||
export const isTenantIdSet = () => {
|
||||
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
|
||||
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
|
||||
return !!tenantId
|
||||
}
|
||||
|
||||
|
@ -210,7 +210,7 @@ export const getTenantId = () => {
|
|||
if (!isMultiTenant()) {
|
||||
return DEFAULT_TENANT_ID
|
||||
}
|
||||
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
|
||||
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
|
||||
if (!tenantId) {
|
||||
throw new Error("Tenant id not found")
|
||||
}
|
||||
|
@ -218,7 +218,7 @@ export const getTenantId = () => {
|
|||
}
|
||||
|
||||
export const getAppId = () => {
|
||||
const foundId = cls.getFromContext(ContextKeys.APP_ID)
|
||||
const foundId = cls.getFromContext(ContextKey.APP_ID)
|
||||
if (!foundId && env.isTest() && TEST_APP_ID) {
|
||||
return TEST_APP_ID
|
||||
} else {
|
||||
|
@ -231,7 +231,7 @@ export const getAppId = () => {
|
|||
* contained, dev or prod.
|
||||
*/
|
||||
export const getAppDB = (opts?: any) => {
|
||||
return getContextDB(ContextKeys.CURRENT_DB, opts)
|
||||
return getContextDB(ContextKey.CURRENT_DB, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -239,7 +239,7 @@ export const getAppDB = (opts?: any) => {
|
|||
* contained a development app ID, this will open the prod one.
|
||||
*/
|
||||
export const getProdAppDB = (opts?: any) => {
|
||||
return getContextDB(ContextKeys.PROD_DB, opts)
|
||||
return getContextDB(ContextKey.PROD_DB, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -247,5 +247,5 @@ export const getProdAppDB = (opts?: any) => {
|
|||
* contained a prod app ID, this will open the dev one.
|
||||
*/
|
||||
export const getDevAppDB = (opts?: any) => {
|
||||
return getContextDB(ContextKeys.DEV_DB, opts)
|
||||
return getContextDB(ContextKey.DEV_DB, opts)
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import {
|
|||
} from "./index"
|
||||
import cls from "./FunctionContext"
|
||||
import { IdentityContext } from "@budibase/types"
|
||||
import { ContextKeys } from "./constants"
|
||||
import { ContextKey } from "./constants"
|
||||
import { dangerousGetDB, closeDB } from "../db"
|
||||
import { isEqual } from "lodash"
|
||||
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
|
||||
|
@ -47,17 +47,13 @@ export const setAppTenantId = (appId: string) => {
|
|||
}
|
||||
|
||||
export const setIdentity = (identity: IdentityContext | null) => {
|
||||
cls.setOnContext(ContextKeys.IDENTITY, identity)
|
||||
cls.setOnContext(ContextKey.IDENTITY, identity)
|
||||
}
|
||||
|
||||
// this function makes sure the PouchDB objects are closed and
|
||||
// fully deleted when finished - this protects against memory leaks
|
||||
export async function closeAppDBs() {
|
||||
const dbKeys = [
|
||||
ContextKeys.CURRENT_DB,
|
||||
ContextKeys.PROD_DB,
|
||||
ContextKeys.DEV_DB,
|
||||
]
|
||||
const dbKeys = [ContextKey.CURRENT_DB, ContextKey.PROD_DB, ContextKey.DEV_DB]
|
||||
for (let dbKey of dbKeys) {
|
||||
const db = cls.getFromContext(dbKey)
|
||||
if (!db) {
|
||||
|
@ -68,16 +64,16 @@ export async function closeAppDBs() {
|
|||
cls.setOnContext(dbKey, null)
|
||||
}
|
||||
// clear the app ID now that the databases are closed
|
||||
if (cls.getFromContext(ContextKeys.APP_ID)) {
|
||||
cls.setOnContext(ContextKeys.APP_ID, null)
|
||||
if (cls.getFromContext(ContextKey.APP_ID)) {
|
||||
cls.setOnContext(ContextKey.APP_ID, null)
|
||||
}
|
||||
if (cls.getFromContext(ContextKeys.DB_OPTS)) {
|
||||
cls.setOnContext(ContextKeys.DB_OPTS, null)
|
||||
if (cls.getFromContext(ContextKey.DB_OPTS)) {
|
||||
cls.setOnContext(ContextKey.DB_OPTS, null)
|
||||
}
|
||||
}
|
||||
|
||||
export function getContextDB(key: string, opts: any) {
|
||||
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
|
||||
const dbOptsKey = `${key}${ContextKey.DB_OPTS}`
|
||||
let storedOpts = cls.getFromContext(dbOptsKey)
|
||||
let db = cls.getFromContext(key)
|
||||
if (db && isEqual(opts, storedOpts)) {
|
||||
|
@ -88,13 +84,13 @@ export function getContextDB(key: string, opts: any) {
|
|||
let toUseAppId
|
||||
|
||||
switch (key) {
|
||||
case ContextKeys.CURRENT_DB:
|
||||
case ContextKey.CURRENT_DB:
|
||||
toUseAppId = appId
|
||||
break
|
||||
case ContextKeys.PROD_DB:
|
||||
case ContextKey.PROD_DB:
|
||||
toUseAppId = getProdAppID(appId)
|
||||
break
|
||||
case ContextKeys.DEV_DB:
|
||||
case ContextKey.DEV_DB:
|
||||
toUseAppId = getDevelopmentAppID(appId)
|
||||
break
|
||||
}
|
||||
|
|
|
@ -4,13 +4,13 @@ export const UNICODE_MAX = "\ufff0"
|
|||
/**
|
||||
* Can be used to create a few different forms of querying a view.
|
||||
*/
|
||||
export enum AutomationViewModes {
|
||||
export enum AutomationViewMode {
|
||||
ALL = "all",
|
||||
AUTOMATION = "automation",
|
||||
STATUS = "status",
|
||||
}
|
||||
|
||||
export enum ViewNames {
|
||||
export enum ViewName {
|
||||
USER_BY_APP = "by_app",
|
||||
USER_BY_EMAIL = "by_email2",
|
||||
BY_API_KEY = "by_api_key",
|
||||
|
@ -21,13 +21,13 @@ export enum ViewNames {
|
|||
}
|
||||
|
||||
export const DeprecatedViews = {
|
||||
[ViewNames.USER_BY_EMAIL]: [
|
||||
[ViewName.USER_BY_EMAIL]: [
|
||||
// removed due to inaccuracy in view doc filter logic
|
||||
"by_email",
|
||||
],
|
||||
}
|
||||
|
||||
export enum DocumentTypes {
|
||||
export enum DocumentType {
|
||||
USER = "us",
|
||||
GROUP = "gr",
|
||||
WORKSPACE = "workspace",
|
||||
|
@ -62,6 +62,6 @@ export const StaticDatabases = {
|
|||
},
|
||||
}
|
||||
|
||||
export const APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR
|
||||
export const APP_DEV = exports.DocumentTypes.APP_DEV + exports.SEPARATOR
|
||||
export const APP_PREFIX = DocumentType.APP + SEPARATOR
|
||||
export const APP_DEV = DocumentType.APP_DEV + SEPARATOR
|
||||
export const APP_DEV_PREFIX = APP_DEV
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { newid } from "../hashing"
|
||||
import { DEFAULT_TENANT_ID, Configs } from "../constants"
|
||||
import env from "../environment"
|
||||
import { SEPARATOR, DocumentTypes, UNICODE_MAX, ViewNames } from "./constants"
|
||||
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
|
||||
import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
|
||||
import fetch from "node-fetch"
|
||||
import { doWithDB, allDbs } from "./index"
|
||||
|
@ -58,7 +58,7 @@ export function getDocParams(
|
|||
/**
|
||||
* Retrieve the correct index for a view based on default design DB.
|
||||
*/
|
||||
export function getQueryIndex(viewName: ViewNames) {
|
||||
export function getQueryIndex(viewName: ViewName) {
|
||||
return `database/${viewName}`
|
||||
}
|
||||
|
||||
|
@ -67,7 +67,7 @@ export function getQueryIndex(viewName: ViewNames) {
|
|||
* @returns {string} The new workspace ID which the workspace doc can be stored under.
|
||||
*/
|
||||
export function generateWorkspaceID() {
|
||||
return `${DocumentTypes.WORKSPACE}${SEPARATOR}${newid()}`
|
||||
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -76,8 +76,8 @@ export function generateWorkspaceID() {
|
|||
export function getWorkspaceParams(id = "", otherProps = {}) {
|
||||
return {
|
||||
...otherProps,
|
||||
startkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}`,
|
||||
endkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
|
||||
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
|
||||
endkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,7 +86,7 @@ export function getWorkspaceParams(id = "", otherProps = {}) {
|
|||
* @returns {string} The new user ID which the user doc can be stored under.
|
||||
*/
|
||||
export function generateGlobalUserID(id?: any) {
|
||||
return `${DocumentTypes.USER}${SEPARATOR}${id || newid()}`
|
||||
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -102,8 +102,8 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
|
|||
// need to include this incase pagination
|
||||
startkey: startkey
|
||||
? startkey
|
||||
: `${DocumentTypes.USER}${SEPARATOR}${globalId}`,
|
||||
endkey: `${DocumentTypes.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
|
||||
: `${DocumentType.USER}${SEPARATOR}${globalId}`,
|
||||
endkey: `${DocumentType.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,7 @@ export function getUsersByAppParams(appId: any, otherProps: any = {}) {
|
|||
* @param ownerId The owner/user of the template, this could be global or a workspace level.
|
||||
*/
|
||||
export function generateTemplateID(ownerId: any) {
|
||||
return `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
|
||||
return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
export function generateAppUserID(prodAppId: string, userId: string) {
|
||||
|
@ -143,7 +143,7 @@ export function getTemplateParams(
|
|||
if (templateId) {
|
||||
final = templateId
|
||||
} else {
|
||||
final = `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
|
||||
final = `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
|
||||
}
|
||||
return {
|
||||
...otherProps,
|
||||
|
@ -157,14 +157,14 @@ export function getTemplateParams(
|
|||
* @returns {string} The new role ID which the role doc can be stored under.
|
||||
*/
|
||||
export function generateRoleID(id: any) {
|
||||
return `${DocumentTypes.ROLE}${SEPARATOR}${id || newid()}`
|
||||
return `${DocumentType.ROLE}${SEPARATOR}${id || newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving a role, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
export function getRoleParams(roleId = null, otherProps = {}) {
|
||||
return getDocParams(DocumentTypes.ROLE, roleId, otherProps)
|
||||
return getDocParams(DocumentType.ROLE, roleId, otherProps)
|
||||
}
|
||||
|
||||
export function getStartEndKeyURL(base: any, baseKey: any, tenantId = null) {
|
||||
|
@ -211,9 +211,9 @@ export async function getAllDbs(opts = { efficient: false }) {
|
|||
await addDbs(couchUrl)
|
||||
} else {
|
||||
// get prod apps
|
||||
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP, tenantId))
|
||||
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP, tenantId))
|
||||
// get dev apps
|
||||
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP_DEV, tenantId))
|
||||
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP_DEV, tenantId))
|
||||
// add global db name
|
||||
dbs.push(getGlobalDBName(tenantId))
|
||||
}
|
||||
|
@ -233,14 +233,18 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
|
|||
}
|
||||
let dbs = await getAllDbs({ efficient })
|
||||
const appDbNames = dbs.filter((dbName: any) => {
|
||||
if (env.isTest() && !dbName) {
|
||||
return false
|
||||
}
|
||||
|
||||
const split = dbName.split(SEPARATOR)
|
||||
// it is an app, check the tenantId
|
||||
if (split[0] === DocumentTypes.APP) {
|
||||
if (split[0] === DocumentType.APP) {
|
||||
// tenantId is always right before the UUID
|
||||
const possibleTenantId = split[split.length - 2]
|
||||
|
||||
const noTenantId =
|
||||
split.length === 2 || possibleTenantId === DocumentTypes.DEV
|
||||
split.length === 2 || possibleTenantId === DocumentType.DEV
|
||||
|
||||
return (
|
||||
(tenantId === DEFAULT_TENANT_ID && noTenantId) ||
|
||||
|
@ -326,7 +330,7 @@ export async function dbExists(dbName: any) {
|
|||
export const generateConfigID = ({ type, workspace, user }: any) => {
|
||||
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
|
||||
|
||||
return `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`
|
||||
return `${DocumentType.CONFIG}${SEPARATOR}${scope}`
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -340,8 +344,8 @@ export const getConfigParams = (
|
|||
|
||||
return {
|
||||
...otherProps,
|
||||
startkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`,
|
||||
endkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
|
||||
startkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}`,
|
||||
endkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -350,7 +354,7 @@ export const getConfigParams = (
|
|||
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
|
||||
*/
|
||||
export const generateDevInfoID = (userId: any) => {
|
||||
return `${DocumentTypes.DEV_INFO}${SEPARATOR}${userId}`
|
||||
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const {
|
||||
DocumentTypes,
|
||||
ViewNames,
|
||||
DocumentType,
|
||||
ViewName,
|
||||
DeprecatedViews,
|
||||
SEPARATOR,
|
||||
} = require("./utils")
|
||||
|
@ -44,14 +44,14 @@ exports.createNewUserEmailView = async () => {
|
|||
const view = {
|
||||
// if using variables in a map function need to inject them before use
|
||||
map: `function(doc) {
|
||||
if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}")) {
|
||||
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
|
||||
emit(doc.email.toLowerCase(), doc._id)
|
||||
}
|
||||
}`,
|
||||
}
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[ViewNames.USER_BY_EMAIL]: view,
|
||||
[ViewName.USER_BY_EMAIL]: view,
|
||||
}
|
||||
await db.put(designDoc)
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ exports.createUserAppView = async () => {
|
|||
const view = {
|
||||
// if using variables in a map function need to inject them before use
|
||||
map: `function(doc) {
|
||||
if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}") && doc.roles) {
|
||||
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
|
||||
for (let prodAppId of Object.keys(doc.roles)) {
|
||||
let emitted = prodAppId + "${SEPARATOR}" + doc._id
|
||||
emit(emitted, null)
|
||||
|
@ -78,7 +78,7 @@ exports.createUserAppView = async () => {
|
|||
}
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[ViewNames.USER_BY_APP]: view,
|
||||
[ViewName.USER_BY_APP]: view,
|
||||
}
|
||||
await db.put(designDoc)
|
||||
}
|
||||
|
@ -93,14 +93,14 @@ exports.createApiKeyView = async () => {
|
|||
}
|
||||
const view = {
|
||||
map: `function(doc) {
|
||||
if (doc._id.startsWith("${DocumentTypes.DEV_INFO}") && doc.apiKey) {
|
||||
if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
|
||||
emit(doc.apiKey, doc.userId)
|
||||
}
|
||||
}`,
|
||||
}
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[ViewNames.BY_API_KEY]: view,
|
||||
[ViewName.BY_API_KEY]: view,
|
||||
}
|
||||
await db.put(designDoc)
|
||||
}
|
||||
|
@ -123,17 +123,17 @@ exports.createUserBuildersView = async () => {
|
|||
}
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[ViewNames.USER_BY_BUILDERS]: view,
|
||||
[ViewName.USER_BY_BUILDERS]: view,
|
||||
}
|
||||
await db.put(designDoc)
|
||||
}
|
||||
|
||||
exports.queryGlobalView = async (viewName, params, db = null) => {
|
||||
const CreateFuncByName = {
|
||||
[ViewNames.USER_BY_EMAIL]: exports.createNewUserEmailView,
|
||||
[ViewNames.BY_API_KEY]: exports.createApiKeyView,
|
||||
[ViewNames.USER_BY_BUILDERS]: exports.createUserBuildersView,
|
||||
[ViewNames.USER_BY_APP]: exports.createUserAppView,
|
||||
[ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView,
|
||||
[ViewName.BY_API_KEY]: exports.createApiKeyView,
|
||||
[ViewName.USER_BY_BUILDERS]: exports.createUserBuildersView,
|
||||
[ViewName.USER_BY_APP]: exports.createUserAppView,
|
||||
}
|
||||
// can pass DB in if working with something specific
|
||||
if (!db) {
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
import { EventEmitter } from "events"
|
||||
import * as context from "../../context"
|
||||
import { Identity, Event } from "@budibase/types"
|
||||
|
||||
export interface EmittedEvent {
|
||||
tenantId: string
|
||||
identity: Identity
|
||||
appId: string | undefined
|
||||
properties: any
|
||||
}
|
||||
|
||||
class BBEventEmitter extends EventEmitter {
|
||||
emitEvent(event: Event, properties: any, identity: Identity) {
|
||||
const tenantId = context.getTenantId()
|
||||
const appId = context.getAppId()
|
||||
|
||||
const emittedEvent: EmittedEvent = {
|
||||
tenantId,
|
||||
identity,
|
||||
appId,
|
||||
properties,
|
||||
}
|
||||
this.emit(event, emittedEvent)
|
||||
}
|
||||
}
|
||||
|
||||
export const emitter = new BBEventEmitter()
|
|
@ -1 +0,0 @@
|
|||
export * from "./BBEventEmitter"
|
|
@ -2,41 +2,6 @@ import { Event } from "@budibase/types"
|
|||
import { processors } from "./processors"
|
||||
import * as identification from "./identification"
|
||||
import * as backfill from "./backfill"
|
||||
import { emitter, EmittedEvent } from "./emit"
|
||||
import * as context from "../context"
|
||||
import * as logging from "../logging"
|
||||
|
||||
const USE_EMITTER: any[] = [
|
||||
Event.SERVED_BUILDER,
|
||||
Event.SERVED_APP,
|
||||
Event.SERVED_APP_PREVIEW,
|
||||
]
|
||||
|
||||
for (let event of USE_EMITTER) {
|
||||
emitter.on(event, async (props: EmittedEvent) => {
|
||||
try {
|
||||
await context.doInTenant(props.tenantId, async () => {
|
||||
if (props.appId) {
|
||||
await context.doInAppContext(props.appId, async () => {
|
||||
await processors.processEvent(
|
||||
event as Event,
|
||||
props.identity,
|
||||
props.properties
|
||||
)
|
||||
})
|
||||
} else {
|
||||
await processors.processEvent(
|
||||
event as Event,
|
||||
props.identity,
|
||||
props.properties
|
||||
)
|
||||
}
|
||||
})
|
||||
} catch (e) {
|
||||
logging.logAlert(`Unable to process async event ${event}`, e)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export const publishEvent = async (
|
||||
event: Event,
|
||||
|
@ -46,11 +11,6 @@ export const publishEvent = async (
|
|||
// in future this should use async events via a distributed queue.
|
||||
const identity = await identification.getCurrentIdentity()
|
||||
|
||||
if (USE_EMITTER.includes(event)) {
|
||||
emitter.emitEvent(event, properties, identity)
|
||||
return
|
||||
}
|
||||
|
||||
const backfilling = await backfill.isBackfillingEvent(event)
|
||||
// no backfill - send the event and exit
|
||||
if (!backfilling) {
|
||||
|
|
|
@ -7,22 +7,26 @@ import {
|
|||
AppServedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
export async function servedBuilder() {
|
||||
const properties: BuilderServedEvent = {}
|
||||
export async function servedBuilder(timezone: string) {
|
||||
const properties: BuilderServedEvent = {
|
||||
timezone,
|
||||
}
|
||||
await publishEvent(Event.SERVED_BUILDER, properties)
|
||||
}
|
||||
|
||||
export async function servedApp(app: App) {
|
||||
export async function servedApp(app: App, timezone: string) {
|
||||
const properties: AppServedEvent = {
|
||||
appVersion: app.version,
|
||||
timezone,
|
||||
}
|
||||
await publishEvent(Event.SERVED_APP, properties)
|
||||
}
|
||||
|
||||
export async function servedAppPreview(app: App) {
|
||||
export async function servedAppPreview(app: App, timezone: string) {
|
||||
const properties: AppPreviewServedEvent = {
|
||||
appId: app.appId,
|
||||
appVersion: app.version,
|
||||
timezone,
|
||||
}
|
||||
await publishEvent(Event.SERVED_APP_PREVIEW, properties)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import errors from "./errors"
|
||||
|
||||
const errorClasses = errors.errors
|
||||
import * as events from "./events"
|
||||
import * as migrations from "./migrations"
|
||||
|
|
|
@ -4,7 +4,7 @@ import { getUser } from "../cache/user"
|
|||
import { getSession, updateSessionTTL } from "../security/sessions"
|
||||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { SEPARATOR } from "../db/constants"
|
||||
import { ViewNames } from "../db/utils"
|
||||
import { ViewName } from "../db/utils"
|
||||
import { queryGlobalView } from "../db/views"
|
||||
import { getGlobalDB, doInTenant } from "../tenancy"
|
||||
import { decrypt } from "../security/encryption"
|
||||
|
@ -43,7 +43,7 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
|
|||
const db = getGlobalDB()
|
||||
// api key is encrypted in the database
|
||||
const userId = await queryGlobalView(
|
||||
ViewNames.BY_API_KEY,
|
||||
ViewName.BY_API_KEY,
|
||||
{
|
||||
key: apiKey,
|
||||
},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { DEFAULT_TENANT_ID } from "../constants"
|
||||
import { doWithDB } from "../db"
|
||||
import { DocumentTypes, StaticDatabases } from "../db/constants"
|
||||
import { DocumentType, StaticDatabases } from "../db/constants"
|
||||
import { getAllApps } from "../db/utils"
|
||||
import environment from "../environment"
|
||||
import {
|
||||
|
@ -21,10 +21,10 @@ import {
|
|||
export const getMigrationsDoc = async (db: any) => {
|
||||
// get the migrations doc
|
||||
try {
|
||||
return await db.get(DocumentTypes.MIGRATIONS)
|
||||
return await db.get(DocumentType.MIGRATIONS)
|
||||
} catch (err: any) {
|
||||
if (err.status && err.status === 404) {
|
||||
return { _id: DocumentTypes.MIGRATIONS }
|
||||
return { _id: DocumentType.MIGRATIONS }
|
||||
} else {
|
||||
console.error(err)
|
||||
throw err
|
||||
|
|
|
@ -3,7 +3,7 @@ const { BUILTIN_PERMISSION_IDS, PermissionLevels } = require("./permissions")
|
|||
const {
|
||||
generateRoleID,
|
||||
getRoleParams,
|
||||
DocumentTypes,
|
||||
DocumentType,
|
||||
SEPARATOR,
|
||||
} = require("../db/utils")
|
||||
const { getAppDB } = require("../context")
|
||||
|
@ -338,7 +338,7 @@ class AccessController {
|
|||
* Adds the "role_" for builtin role IDs which are to be written to the DB (for permissions).
|
||||
*/
|
||||
exports.getDBRoleID = roleId => {
|
||||
if (roleId.startsWith(DocumentTypes.ROLE)) {
|
||||
if (roleId.startsWith(DocumentType.ROLE)) {
|
||||
return roleId
|
||||
}
|
||||
return generateRoleID(roleId)
|
||||
|
@ -349,8 +349,8 @@ exports.getDBRoleID = roleId => {
|
|||
*/
|
||||
exports.getExternalRoleID = roleId => {
|
||||
// for built in roles we want to remove the DB role ID element (role_)
|
||||
if (roleId.startsWith(DocumentTypes.ROLE) && isBuiltin(roleId)) {
|
||||
return roleId.split(`${DocumentTypes.ROLE}${SEPARATOR}`)[1]
|
||||
if (roleId.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) {
|
||||
return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1]
|
||||
}
|
||||
return roleId
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const {
|
||||
ViewNames,
|
||||
ViewName,
|
||||
getUsersByAppParams,
|
||||
getProdAppID,
|
||||
generateAppUserID,
|
||||
|
@ -18,7 +18,7 @@ exports.getGlobalUserByEmail = async email => {
|
|||
throw "Must supply an email address to view"
|
||||
}
|
||||
|
||||
return await queryGlobalView(ViewNames.USER_BY_EMAIL, {
|
||||
return await queryGlobalView(ViewName.USER_BY_EMAIL, {
|
||||
key: email.toLowerCase(),
|
||||
include_docs: true,
|
||||
})
|
||||
|
@ -32,7 +32,7 @@ exports.searchGlobalUsersByApp = async (appId, opts) => {
|
|||
include_docs: true,
|
||||
})
|
||||
params.startkey = opts && opts.startkey ? opts.startkey : params.startkey
|
||||
let response = await queryGlobalView(ViewNames.USER_BY_APP, params)
|
||||
let response = await queryGlobalView(ViewName.USER_BY_APP, params)
|
||||
if (!response) {
|
||||
response = []
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ exports.searchGlobalUsersByEmail = async (email, opts) => {
|
|||
const lcEmail = email.toLowerCase()
|
||||
// handle if passing up startkey for pagination
|
||||
const startkey = opts && opts.startkey ? opts.startkey : lcEmail
|
||||
let response = await queryGlobalView(ViewNames.USER_BY_EMAIL, {
|
||||
let response = await queryGlobalView(ViewName.USER_BY_EMAIL, {
|
||||
...opts,
|
||||
startkey,
|
||||
endkey: `${lcEmail}${UNICODE_MAX}`,
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
const {
|
||||
DocumentTypes,
|
||||
SEPARATOR,
|
||||
ViewNames,
|
||||
getAllApps,
|
||||
} = require("./db/utils")
|
||||
const { DocumentType, SEPARATOR, ViewName, getAllApps } = require("./db/utils")
|
||||
const jwt = require("jsonwebtoken")
|
||||
const { options } = require("./middleware/passport/jwt")
|
||||
const { queryGlobalView } = require("./db/views")
|
||||
|
@ -17,7 +12,7 @@ const {
|
|||
const events = require("./events")
|
||||
const tenancy = require("./tenancy")
|
||||
|
||||
const APP_PREFIX = DocumentTypes.APP + SEPARATOR
|
||||
const APP_PREFIX = DocumentType.APP + SEPARATOR
|
||||
const PROD_APP_PREFIX = "/app/"
|
||||
|
||||
function confirmAppId(possibleAppId) {
|
||||
|
@ -154,7 +149,7 @@ exports.isClient = ctx => {
|
|||
}
|
||||
|
||||
const getBuilders = async () => {
|
||||
const builders = await queryGlobalView(ViewNames.USER_BY_BUILDERS, {
|
||||
const builders = await queryGlobalView(ViewName.USER_BY_BUILDERS, {
|
||||
include_docs: false,
|
||||
})
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "1.2.28-alpha.0",
|
||||
"version": "1.2.38",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
@ -38,7 +38,7 @@
|
|||
],
|
||||
"dependencies": {
|
||||
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
|
||||
"@budibase/string-templates": "1.2.28-alpha.0",
|
||||
"@budibase/string-templates": "^1.2.38",
|
||||
"@spectrum-css/actionbutton": "^1.0.1",
|
||||
"@spectrum-css/actiongroup": "^1.0.1",
|
||||
"@spectrum-css/avatar": "^3.0.2",
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
export let appendTo = undefined
|
||||
export let timeOnly = false
|
||||
export let ignoreTimezones = false
|
||||
export let time24hr = false
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const flatpickrId = `${uuid()}-wrapper`
|
||||
|
@ -37,6 +38,7 @@
|
|||
enableTime: timeOnly || enableTime || false,
|
||||
noCalendar: timeOnly || false,
|
||||
altInput: true,
|
||||
time_24hr: time24hr || false,
|
||||
altFormat: timeOnly ? "H:i" : enableTime ? "F j Y, H:i" : "F j, Y",
|
||||
wrap: true,
|
||||
appendTo,
|
||||
|
@ -49,6 +51,12 @@
|
|||
},
|
||||
}
|
||||
|
||||
$: redrawOptions = {
|
||||
timeOnly,
|
||||
enableTime,
|
||||
time24hr,
|
||||
}
|
||||
|
||||
const handleChange = event => {
|
||||
const [dates] = event.detail
|
||||
const noTimezone = enableTime && !timeOnly && ignoreTimezones
|
||||
|
@ -142,7 +150,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
{#key timeOnly}
|
||||
{#key redrawOptions}
|
||||
<Flatpickr
|
||||
bind:flatpickr
|
||||
value={parseDate(value)}
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
export let error = null
|
||||
export let enableTime = true
|
||||
export let timeOnly = false
|
||||
export let time24hr = false
|
||||
export let placeholder = null
|
||||
export let appendTo = undefined
|
||||
export let ignoreTimezones = false
|
||||
|
@ -30,6 +31,7 @@
|
|||
{placeholder}
|
||||
{enableTime}
|
||||
{timeOnly}
|
||||
{time24hr}
|
||||
{appendTo}
|
||||
{ignoreTimezones}
|
||||
on:change={onChange}
|
||||
|
|
|
@ -23,7 +23,7 @@ filterTests(["smoke", "all"], () => {
|
|||
cy.get(interact.SPECTRUM_ICON).click({ force: true })
|
||||
})
|
||||
cy.get(interact.SPECTRUM_MENU).within(() => {
|
||||
cy.get(interact.SPECTRUM_MENU_ITEM).contains("Force Password Reset").click({ force: true })
|
||||
cy.get(interact.SPECTRUM_MENU_ITEM).contains("Force password reset").click({ force: true })
|
||||
})
|
||||
|
||||
cy.get(interact.SPECTRUM_DIALOG_GRID)
|
||||
|
@ -41,10 +41,25 @@ filterTests(["smoke", "all"], () => {
|
|||
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT).eq(i).type("test")
|
||||
}
|
||||
cy.get(interact.SPECTRUM_BUTTON).contains("Reset your password").click({ force: true })
|
||||
//cy.logoutNoAppGrid()
|
||||
})
|
||||
|
||||
it("should verify Standard Portal", () => {
|
||||
// Development access should be disabled (Admin access is already disabled)
|
||||
cy.login()
|
||||
cy.setUserRole("bbuser", "App User")
|
||||
bbUserLogin()
|
||||
|
||||
// Verify Standard Portal
|
||||
cy.get(interact.SPECTRUM_SIDENAV).should('not.exist') // No config sections
|
||||
cy.get(interact.CREATE_APP_BUTTON).should('not.exist') // No create app button
|
||||
cy.get(".app").should('not.exist') // No apps -> no roles assigned to user
|
||||
cy.get(interact.CONTAINER).should('contain', bbUserEmail) // Message containing users email
|
||||
|
||||
cy.logoutNoAppGrid()
|
||||
})
|
||||
|
||||
xit("should verify Admin Portal", () => {
|
||||
it("should verify Admin Portal", () => {
|
||||
cy.login()
|
||||
// Configure user role
|
||||
cy.setUserRole("bbuser", "Admin")
|
||||
|
@ -86,21 +101,6 @@ filterTests(["smoke", "all"], () => {
|
|||
cy.logOut()
|
||||
})
|
||||
|
||||
it("should verify Standard Portal", () => {
|
||||
// Development access should be disabled (Admin access is already disabled)
|
||||
cy.login()
|
||||
cy.setUserRole("bbuser", "App User")
|
||||
bbUserLogin()
|
||||
|
||||
// Verify Standard Portal
|
||||
cy.get(interact.SPECTRUM_SIDENAV).should('not.exist') // No config sections
|
||||
cy.get(interact.CREATE_APP_BUTTON).should('not.exist') // No create app button
|
||||
cy.get(".app").should('not.exist') // No apps -> no roles assigned to user
|
||||
cy.get(interact.CONTAINER).should('contain', bbUserEmail) // Message containing users email
|
||||
|
||||
cy.logoutNoAppGrid()
|
||||
})
|
||||
|
||||
const bbUserLogin = () => {
|
||||
// Login as bbuser
|
||||
cy.logOut()
|
||||
|
|
|
@ -17,7 +17,7 @@ filterTests(["smoke", "all"], () => {
|
|||
|
||||
it("should confirm App User role for a New User", () => {
|
||||
cy.contains("bbuser").click()
|
||||
cy.get(".spectrum-Form-itemField").eq(2).should('contain', 'App User')
|
||||
cy.get(".spectrum-Form-itemField").eq(3).should('contain', 'App User')
|
||||
|
||||
// User should not have app access
|
||||
cy.get(interact.LIST_ITEMS, { timeout: 500 }).should("contain", "No apps")
|
||||
|
@ -166,12 +166,12 @@ filterTests(["smoke", "all"], () => {
|
|||
|
||||
it("Should edit user details within user details page", () => {
|
||||
// Add First name
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(0).within(() => {
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(1).within(() => {
|
||||
cy.wait(500)
|
||||
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT, { timeout: 1000 }).wait(500).clear().click().type("bb")
|
||||
})
|
||||
// Add Last name
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(1).within(() => {
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(2).within(() => {
|
||||
cy.wait(500)
|
||||
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT, { timeout: 1000 }).click().wait(500).clear().type("test")
|
||||
})
|
||||
|
@ -180,10 +180,10 @@ filterTests(["smoke", "all"], () => {
|
|||
cy.reload()
|
||||
|
||||
// Confirm details have been saved
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(0).within(() => {
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(1).within(() => {
|
||||
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT).should('have.value', "bb")
|
||||
})
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(1).within(() => {
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(2).within(() => {
|
||||
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT, { timeout: 1000 }).should('have.value', "test")
|
||||
})
|
||||
})
|
||||
|
@ -193,13 +193,14 @@ filterTests(["smoke", "all"], () => {
|
|||
cy.get(interact.SPECTRUM_ICON).click({ force: true })
|
||||
})
|
||||
cy.get(interact.SPECTRUM_MENU).within(() => {
|
||||
cy.get(interact.SPECTRUM_MENU_ITEM).contains("Force Password Reset").click({ force: true })
|
||||
cy.get(interact.SPECTRUM_MENU_ITEM).contains("Force password reset").click({ force: true })
|
||||
})
|
||||
|
||||
// Reset password modal
|
||||
cy.get(interact.SPECTRUM_DIALOG_GRID)
|
||||
.find(interact.SPECTRUM_TEXTFIELD_INPUT).invoke('val').as('pwd')
|
||||
cy.get(interact.SPECTRUM_BUTTON).contains("Reset password").click({ force: true })
|
||||
cy.get(interact.SPECTRUM_BUTTON).contains("Reset password").should('not.exist')
|
||||
|
||||
// Logout, then login with new password
|
||||
cy.logOut()
|
||||
|
@ -214,6 +215,7 @@ filterTests(["smoke", "all"], () => {
|
|||
cy.get(interact.SPECTRUM_BUTTON).contains("Reset your password").click({ force: true })
|
||||
|
||||
// Confirm user logged in afer password change
|
||||
cy.login("bbuser@test.com", "test")
|
||||
cy.get(".avatar > .icon").click({ force: true })
|
||||
|
||||
cy.get(".spectrum-Menu-item").contains("Update user information").click({ force: true })
|
||||
|
|
|
@ -19,10 +19,10 @@ filterTests(["smoke", "all"], () => {
|
|||
cy.contains("Users").click()
|
||||
cy.contains("test@test.com").click()
|
||||
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(0).within(() => {
|
||||
cy.get(interact.FIELD, { timeout: 1000 }).eq(1).within(() => {
|
||||
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT).should('have.value', fname)
|
||||
})
|
||||
cy.get(interact.FIELD).eq(1).within(() => {
|
||||
cy.get(interact.FIELD).eq(2).within(() => {
|
||||
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT).should('have.value', lname)
|
||||
})
|
||||
})
|
||||
|
@ -72,7 +72,7 @@ filterTests(["smoke", "all"], () => {
|
|||
})
|
||||
|
||||
// Logout & in with new password
|
||||
cy.logOut()
|
||||
//cy.logOut()
|
||||
cy.login("test@test.com", "newpwd")
|
||||
})
|
||||
|
||||
|
@ -90,7 +90,6 @@ filterTests(["smoke", "all"], () => {
|
|||
cy.get(interact.SPECTRUM_MENU_ITEM).contains("Open developer mode").click({ force: true })
|
||||
cy.get(interact.SPECTRUM_SIDENAV).should('exist') // config sections available
|
||||
cy.get(interact.CREATE_APP_BUTTON).should('exist') // create app button available
|
||||
cy.get(interact.APP_TABLE).should('exist') // App table available
|
||||
})
|
||||
|
||||
after(() => {
|
||||
|
|
|
@ -94,6 +94,7 @@ filterTests(['smoke', 'all'], () => {
|
|||
})
|
||||
|
||||
it("should create the first application from scratch with a default name", () => {
|
||||
cy.updateUserInformation("", "")
|
||||
cy.createApp("", false)
|
||||
cy.applicationInAppTable("My app")
|
||||
cy.deleteApp("My app")
|
||||
|
|
|
@ -48,7 +48,7 @@ filterTests(["smoke", "all"], () => {
|
|||
|
||||
it("deletes a row", () => {
|
||||
cy.get(interact.SPECTRUM_CHECKBOX_INPUT).check({ force: true })
|
||||
cy.contains("Delete 1 row(s)").click()
|
||||
cy.contains("Delete 1 row").click()
|
||||
cy.get(interact.SPECTRUM_MODAL).contains("Delete").click()
|
||||
cy.contains("RoverUpdated").should("not.exist")
|
||||
})
|
||||
|
|
|
@ -140,14 +140,14 @@ Cypress.Commands.add("setUserRole", (user, role) => {
|
|||
// Set Role
|
||||
cy.wait(500)
|
||||
cy.get(".spectrum-Form-itemField")
|
||||
.eq(2)
|
||||
.eq(3)
|
||||
.within(() => {
|
||||
cy.get(".spectrum-Picker-label").click({ force: true })
|
||||
})
|
||||
cy.get(".spectrum-Menu").within(() => {
|
||||
cy.get(".spectrum-Menu-itemLabel").contains(role).click({ force: true })
|
||||
})
|
||||
cy.get(".spectrum-Form-itemField").eq(2).should("contain", role)
|
||||
cy.get(".spectrum-Form-itemField").eq(3).should("contain", role)
|
||||
})
|
||||
|
||||
// APPLICATIONS
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "1.2.28-alpha.0",
|
||||
"version": "1.2.38",
|
||||
"license": "GPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -69,10 +69,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "1.2.28-alpha.0",
|
||||
"@budibase/client": "1.2.28-alpha.0",
|
||||
"@budibase/frontend-core": "1.2.28-alpha.0",
|
||||
"@budibase/string-templates": "1.2.28-alpha.0",
|
||||
"@budibase/bbui": "^1.2.38",
|
||||
"@budibase/client": "^1.2.38",
|
||||
"@budibase/frontend-core": "^1.2.38",
|
||||
"@budibase/string-templates": "^1.2.38",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -79,7 +79,7 @@
|
|||
automationStore.actions.addTestDataToAutomation({
|
||||
body: {
|
||||
[key]: e.detail,
|
||||
...$automationStore.selectedAutomation.automation.testData.body,
|
||||
...$automationStore.selectedAutomation.automation.testData?.body,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
import { datasources, integrations, queries } from "stores/backend"
|
||||
import BindingBuilder from "components/integration/QueryBindingBuilder.svelte"
|
||||
import IntegrationQueryEditor from "components/integration/index.svelte"
|
||||
import { BUDIBASE_DATASOURCE_ID } from "constants/backend"
|
||||
|
||||
export let parameters
|
||||
export let bindings = []
|
||||
|
@ -11,6 +12,10 @@
|
|||
$: datasource = $datasources.list.find(
|
||||
ds => ds._id === parameters.datasourceId
|
||||
)
|
||||
// Executequery must exclude budibase datasource
|
||||
$: executeQueryDatasources = $datasources.list.filter(
|
||||
x => x._id !== BUDIBASE_DATASOURCE_ID
|
||||
)
|
||||
|
||||
function fetchQueryDefinition(query) {
|
||||
const source = $datasources.list.find(
|
||||
|
@ -24,7 +29,7 @@
|
|||
<Select
|
||||
label="Datasource"
|
||||
bind:value={parameters.datasourceId}
|
||||
options={$datasources.list}
|
||||
options={executeQueryDatasources}
|
||||
getOptionLabel={source => source.name}
|
||||
getOptionValue={source => source._id}
|
||||
/>
|
||||
|
|
|
@ -163,6 +163,8 @@ export const SWITCHABLE_TYPES = [
|
|||
...ALLOWABLE_NUMBER_TYPES,
|
||||
]
|
||||
|
||||
export const BUDIBASE_DATASOURCE_ID = "bb_internal"
|
||||
|
||||
export const IntegrationTypes = {
|
||||
POSTGRES: "POSTGRES",
|
||||
MONGODB: "MONGODB",
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
import { admin, auth } from "stores/portal"
|
||||
import { onMount } from "svelte"
|
||||
import { CookieUtils, Constants } from "@budibase/frontend-core"
|
||||
import { API } from "api"
|
||||
|
||||
let loaded = false
|
||||
|
||||
|
@ -53,6 +54,9 @@
|
|||
await auth.setOrganisation(urlTenantId)
|
||||
}
|
||||
}
|
||||
async function analyticsPing() {
|
||||
await API.analyticsPing({ source: "builder" })
|
||||
}
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
|
@ -73,6 +77,9 @@
|
|||
// being logged in
|
||||
}
|
||||
loaded = true
|
||||
|
||||
// lastly
|
||||
await analyticsPing()
|
||||
})
|
||||
|
||||
$: {
|
||||
|
|
|
@ -55,13 +55,16 @@
|
|||
let saveId, url
|
||||
let response, schema, enabledHeaders
|
||||
let authConfigId
|
||||
let dynamicVariables, addVariableModal, varBinding
|
||||
let dynamicVariables, addVariableModal, varBinding, globalDynamicBindings
|
||||
let restBindings = getRestBindings()
|
||||
|
||||
$: staticVariables = datasource?.config?.staticVariables || {}
|
||||
|
||||
$: customRequestBindings = toBindingsArray(requestBindings, "Binding")
|
||||
$: dynamicRequestBindings = toBindingsArray(dynamicVariables, "Dynamic")
|
||||
$: globalDynamicRequestBindings = toBindingsArray(
|
||||
globalDynamicBindings,
|
||||
"Dynamic"
|
||||
)
|
||||
$: dataSourceStaticBindings = toBindingsArray(
|
||||
staticVariables,
|
||||
"Datasource.Static"
|
||||
|
@ -70,7 +73,7 @@
|
|||
$: mergedBindings = [
|
||||
...restBindings,
|
||||
...customRequestBindings,
|
||||
...dynamicRequestBindings,
|
||||
...globalDynamicRequestBindings,
|
||||
...dataSourceStaticBindings,
|
||||
]
|
||||
|
||||
|
@ -231,11 +234,11 @@
|
|||
]
|
||||
|
||||
// convert dynamic variables list to simple key/val object
|
||||
const getDynamicVariables = (datasource, queryId) => {
|
||||
const getDynamicVariables = (datasource, queryId, matchFn) => {
|
||||
const variablesList = datasource?.config?.dynamicVariables
|
||||
if (variablesList && variablesList.length > 0) {
|
||||
const filtered = queryId
|
||||
? variablesList.filter(variable => variable.queryId === queryId)
|
||||
? variablesList.filter(variable => matchFn(variable, queryId))
|
||||
: variablesList
|
||||
return filtered.reduce(
|
||||
(acc, next) => ({ ...acc, [next.name]: next.value }),
|
||||
|
@ -367,12 +370,21 @@
|
|||
if (query && !query.fields.pagination) {
|
||||
query.fields.pagination = {}
|
||||
}
|
||||
dynamicVariables = getDynamicVariables(datasource, query._id)
|
||||
dynamicVariables = getDynamicVariables(
|
||||
datasource,
|
||||
query._id,
|
||||
(variable, queryId) => variable.queryId === queryId
|
||||
)
|
||||
globalDynamicBindings = getDynamicVariables(
|
||||
datasource,
|
||||
query._id,
|
||||
(variable, queryId) => variable.queryId !== queryId
|
||||
)
|
||||
|
||||
prettifyQueryRequestBody(
|
||||
query,
|
||||
requestBindings,
|
||||
dynamicVariables,
|
||||
globalDynamicBindings,
|
||||
staticVariables,
|
||||
restBindings
|
||||
)
|
||||
|
@ -437,7 +449,7 @@
|
|||
valuePlaceholder="Default"
|
||||
bindings={[
|
||||
...restBindings,
|
||||
...dynamicRequestBindings,
|
||||
...globalDynamicRequestBindings,
|
||||
...dataSourceStaticBindings,
|
||||
]}
|
||||
bindingDrawerLeft="260px"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "1.2.28-alpha.0",
|
||||
"version": "1.2.38",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
|
|
@ -43,18 +43,19 @@
|
|||
"@babel/helper-validator-identifier" "^7.16.7"
|
||||
to-fast-properties "^2.0.0"
|
||||
|
||||
"@budibase/backend-core@1.1.32-alpha.6":
|
||||
version "1.1.32-alpha.6"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.1.32-alpha.6.tgz#e9dc1a1989a2a6952f5ce002fcdfef66625f3de8"
|
||||
integrity sha512-8oT6veeSmymuJfnu1jAkDAWD4fLj5W0KxNq6GlC+eMWWDZloDF4fMWDpuYTFBeinq1z1GeSFXc9Ak6u+1Z7LtQ==
|
||||
"@budibase/backend-core@1.2.28-alpha.0":
|
||||
version "1.2.28-alpha.0"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.2.28-alpha.0.tgz#f4253825724327e6552000f8c7575134bfaa05cd"
|
||||
integrity sha512-ueu+NZgkiKrX49E0Zy2rrNE4NLe2HAyl3VolTrZfVxOzu1IvtQ/wJBcGDG84VvSliP+0bOOVO9TiGMY3bvZ/Hw==
|
||||
dependencies:
|
||||
"@budibase/types" "1.1.32-alpha.6"
|
||||
"@budibase/types" "1.2.28-alpha.0"
|
||||
"@techpass/passport-openidconnect" "0.3.2"
|
||||
aws-sdk "2.1030.0"
|
||||
bcrypt "5.0.1"
|
||||
dotenv "16.0.1"
|
||||
emitter-listener "1.1.2"
|
||||
ioredis "4.28.0"
|
||||
joi "17.6.0"
|
||||
jsonwebtoken "8.5.1"
|
||||
koa-passport "4.1.4"
|
||||
lodash "4.17.21"
|
||||
|
@ -101,10 +102,10 @@
|
|||
to-gfm-code-block "^0.1.1"
|
||||
year "^0.2.1"
|
||||
|
||||
"@budibase/string-templates@^1.2.28":
|
||||
version "1.2.28"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.2.28.tgz#563ae6ca150de508caf01add9b61468bd14771df"
|
||||
integrity sha512-AMn+fZ8M5h516JcFngvz9FvmjZjmD0rOs32eYrZ3OlZXmbVI5UQBCYhRyUbdFWJ2UbqPm8scsOSQgwJfq90/eQ==
|
||||
"@budibase/string-templates@1.2.28-alpha.0":
|
||||
version "1.2.28-alpha.0"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.2.28-alpha.0.tgz#895571c142bcd68852f87e06a395232b3eb5516a"
|
||||
integrity sha512-nXqa0IlSVW0og8NAJUW+ihUhdW8+rK0tskGWIwF+gEfAKd9NMyxoLswIAb7aYLmwdRRJFwhrpMEuF7ed8AojSQ==
|
||||
dependencies:
|
||||
"@budibase/handlebars-helpers" "^0.11.8"
|
||||
dayjs "^1.10.4"
|
||||
|
@ -113,15 +114,10 @@
|
|||
lodash "^4.17.20"
|
||||
vm2 "^3.9.4"
|
||||
|
||||
"@budibase/types@1.1.32-alpha.6":
|
||||
version "1.1.32-alpha.6"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.1.32-alpha.6.tgz#95d8d73c7ed6ebc22ff26a44365127a478e19409"
|
||||
integrity sha512-AKKxrzVqGtcSzZZ2fP6i2Vgv6ICN9NEEE1dmzRk9AImZS+XKQ9VgVpdE+4gHgFK7L0gBYAsiaoEpCbbrI/+NoQ==
|
||||
|
||||
"@budibase/types@^1.2.31":
|
||||
version "1.2.31"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.2.31.tgz#4715bca331ecd5eac23f95bfdee2eb147ef57814"
|
||||
integrity sha512-/R03MleZRMtf6JW/nCKBqd/bBIkbFnwr8EV1Y3t6EySh8fnhM2PdhlWlpf/BrE0zMoiuBn4JMFl2vJ2Mzo/aoA==
|
||||
"@budibase/types@1.2.28-alpha.0":
|
||||
version "1.2.28-alpha.0"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.2.28-alpha.0.tgz#61668b7d5d9b1f85c09d658deed87ec3dc28e290"
|
||||
integrity sha512-tYhdUl1+dEtG8h2xoGUl0NXZC5BZYQIhgPK7JkYrqFHuNx+1f6EoHPQ9MMb/WyOxIDZv4gY7QJLg0KeVflofbw==
|
||||
|
||||
"@eslint/eslintrc@^0.4.3":
|
||||
version "0.4.3"
|
||||
|
@ -2489,7 +2485,7 @@ jmespath@0.15.0:
|
|||
resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217"
|
||||
integrity sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w==
|
||||
|
||||
joi@^17.6.0:
|
||||
joi@17.6.0, joi@^17.6.0:
|
||||
version "17.6.0"
|
||||
resolved "https://registry.yarnpkg.com/joi/-/joi-17.6.0.tgz#0bb54f2f006c09a96e75ce687957bd04290054b2"
|
||||
integrity sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw==
|
||||
|
|
|
@ -2875,6 +2875,12 @@
|
|||
"key": "timeOnly",
|
||||
"defaultValue": false
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "24-Hour time",
|
||||
"key": "time24hr",
|
||||
"defaultValue": false
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "Ignore time zones",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "1.2.28-alpha.0",
|
||||
"version": "1.2.38",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -19,9 +19,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "1.2.28-alpha.0",
|
||||
"@budibase/frontend-core": "1.2.28-alpha.0",
|
||||
"@budibase/string-templates": "1.2.28-alpha.0",
|
||||
"@budibase/bbui": "^1.2.38",
|
||||
"@budibase/frontend-core": "^1.2.38",
|
||||
"@budibase/string-templates": "^1.2.38",
|
||||
"@spectrum-css/button": "^3.0.3",
|
||||
"@spectrum-css/card": "^3.0.3",
|
||||
"@spectrum-css/divider": "^1.0.3",
|
||||
|
|
|
@ -83,6 +83,8 @@
|
|||
dataLoaded = true
|
||||
if (get(builderStore).inBuilder) {
|
||||
builderStore.actions.notifyLoaded()
|
||||
} else {
|
||||
builderStore.actions.analyticsPing({ source: "app" })
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
export let disabled = false
|
||||
export let enableTime = false
|
||||
export let timeOnly = false
|
||||
export let time24hr = false
|
||||
export let ignoreTimezones = false
|
||||
export let validation
|
||||
export let defaultValue
|
||||
|
@ -44,6 +45,7 @@
|
|||
appendTo={document.getElementById("flatpickr-root")}
|
||||
{enableTime}
|
||||
{timeOnly}
|
||||
{time24hr}
|
||||
{ignoreTimezones}
|
||||
{placeholder}
|
||||
/>
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { writable, get } from "svelte/store"
|
||||
import { API } from "api"
|
||||
import { devToolsStore } from "./devTools.js"
|
||||
|
||||
const dispatchEvent = (type, data = {}) => {
|
||||
|
@ -48,6 +49,13 @@ const createBuilderStore = () => {
|
|||
notifyLoaded: () => {
|
||||
dispatchEvent("preview-loaded")
|
||||
},
|
||||
analyticsPing: async () => {
|
||||
try {
|
||||
await API.analyticsPing({ source: "app" })
|
||||
} catch (error) {
|
||||
// Do nothing
|
||||
}
|
||||
},
|
||||
moveComponent: (componentId, destinationComponentId, mode) => {
|
||||
dispatchEvent("move-component", {
|
||||
componentId,
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@budibase/frontend-core",
|
||||
"version": "1.2.28-alpha.0",
|
||||
"version": "1.2.38",
|
||||
"description": "Budibase frontend core libraries used in builder and client",
|
||||
"author": "Budibase",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "1.2.28-alpha.0",
|
||||
"@budibase/bbui": "^1.2.38",
|
||||
"lodash": "^4.17.21",
|
||||
"svelte": "^3.46.2"
|
||||
}
|
||||
|
|
|
@ -7,4 +7,11 @@ export const buildAnalyticsEndpoints = API => ({
|
|||
url: "/api/bbtel",
|
||||
})
|
||||
},
|
||||
analyticsPing: async ({ source }) => {
|
||||
const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||
return await API.post({
|
||||
url: "/api/bbtel/ping",
|
||||
body: { source, timezone },
|
||||
})
|
||||
},
|
||||
})
|
||||
|
|
|
@ -8,6 +8,7 @@ module MongoMock {
|
|||
this.insertMany = jest.fn(() => ({ toArray: () => [] }))
|
||||
this.find = jest.fn(() => ({ toArray: () => [] }))
|
||||
this.findOne = jest.fn()
|
||||
this.findOneAndUpdate = jest.fn()
|
||||
this.count = jest.fn()
|
||||
this.deleteOne = jest.fn()
|
||||
this.deleteMany = jest.fn(() => ({ toArray: () => [] }))
|
||||
|
@ -19,6 +20,7 @@ module MongoMock {
|
|||
find: this.find,
|
||||
insertMany: this.insertMany,
|
||||
findOne: this.findOne,
|
||||
findOneAndUpdate: this.findOneAndUpdate,
|
||||
count: this.count,
|
||||
deleteOne: this.deleteOne,
|
||||
deleteMany: this.deleteMany,
|
||||
|
@ -31,5 +33,7 @@ module MongoMock {
|
|||
})
|
||||
}
|
||||
|
||||
mongodb.ObjectID = require("mongodb").ObjectID
|
||||
|
||||
module.exports = mongodb
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "1.2.28-alpha.0",
|
||||
"version": "1.2.38",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.ts",
|
||||
"repository": {
|
||||
|
@ -77,11 +77,11 @@
|
|||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@apidevtools/swagger-parser": "10.0.3",
|
||||
"@budibase/backend-core": "1.2.28-alpha.0",
|
||||
"@budibase/client": "1.2.28-alpha.0",
|
||||
"@budibase/pro": "1.2.28-alpha.0",
|
||||
"@budibase/string-templates": "1.2.28-alpha.0",
|
||||
"@budibase/types": "1.2.28-alpha.0",
|
||||
"@budibase/backend-core": "^1.2.38",
|
||||
"@budibase/client": "^1.2.38",
|
||||
"@budibase/pro": "1.2.38",
|
||||
"@budibase/string-templates": "^1.2.38",
|
||||
"@budibase/types": "^1.2.38",
|
||||
"@bull-board/api": "3.7.0",
|
||||
"@bull-board/koa": "3.9.4",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
import { events } from "@budibase/backend-core"
|
||||
import { AnalyticsPingRequest, PingSource } from "@budibase/types"
|
||||
import { DocumentType, isDevAppID } from "../../db/utils"
|
||||
import { context } from "@budibase/backend-core"
|
||||
|
||||
export const isEnabled = async (ctx: any) => {
|
||||
const enabled = await events.analytics.enabled()
|
||||
|
@ -6,3 +9,27 @@ export const isEnabled = async (ctx: any) => {
|
|||
enabled,
|
||||
}
|
||||
}
|
||||
|
||||
export const ping = async (ctx: any) => {
|
||||
const body = ctx.request.body as AnalyticsPingRequest
|
||||
switch (body.source) {
|
||||
case PingSource.APP: {
|
||||
const db = context.getAppDB({ skip_setup: true })
|
||||
const appInfo = await db.get(DocumentType.APP_METADATA)
|
||||
let appId = context.getAppId()
|
||||
|
||||
if (isDevAppID(appId)) {
|
||||
await events.serve.servedAppPreview(appInfo, body.timezone)
|
||||
} else {
|
||||
await events.serve.servedApp(appInfo, body.timezone)
|
||||
}
|
||||
break
|
||||
}
|
||||
case PingSource.BUILDER: {
|
||||
await events.serve.servedBuilder(body.timezone)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
ctx.status = 200
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
getLayoutParams,
|
||||
getScreenParams,
|
||||
generateDevAppID,
|
||||
DocumentTypes,
|
||||
DocumentType,
|
||||
AppStatus,
|
||||
} from "../../db/utils"
|
||||
const {
|
||||
|
@ -206,7 +206,7 @@ export const fetchAppDefinition = async (ctx: any) => {
|
|||
|
||||
export const fetchAppPackage = async (ctx: any) => {
|
||||
const db = context.getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
const application = await db.get(DocumentType.APP_METADATA)
|
||||
const layouts = await getLayouts()
|
||||
let screens = await getScreens()
|
||||
|
||||
|
@ -248,13 +248,13 @@ const performAppCreate = async (ctx: any) => {
|
|||
let _rev
|
||||
try {
|
||||
// if template there will be an existing doc
|
||||
const existing = await db.get(DocumentTypes.APP_METADATA)
|
||||
const existing = await db.get(DocumentType.APP_METADATA)
|
||||
_rev = existing._rev
|
||||
} catch (err) {
|
||||
// nothing to do
|
||||
}
|
||||
const newApplication: App = {
|
||||
_id: DocumentTypes.APP_METADATA,
|
||||
_id: DocumentType.APP_METADATA,
|
||||
_rev,
|
||||
appId: instance._id,
|
||||
type: "app",
|
||||
|
@ -383,7 +383,7 @@ export const update = async (ctx: any) => {
|
|||
export const updateClient = async (ctx: any) => {
|
||||
// Get current app version
|
||||
const db = context.getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
const application = await db.get(DocumentType.APP_METADATA)
|
||||
const currentVersion = application.version
|
||||
|
||||
// Update client library and manifest
|
||||
|
@ -407,7 +407,7 @@ export const updateClient = async (ctx: any) => {
|
|||
export const revertClient = async (ctx: any) => {
|
||||
// Check app can be reverted
|
||||
const db = context.getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
const application = await db.get(DocumentType.APP_METADATA)
|
||||
if (!application.revertableVersion) {
|
||||
ctx.throw(400, "There is no version to revert to")
|
||||
}
|
||||
|
@ -439,7 +439,7 @@ const destroyApp = async (ctx: any) => {
|
|||
}
|
||||
|
||||
const db = isUnpublish ? context.getProdAppDB() : context.getAppDB()
|
||||
const app = await db.get(DocumentTypes.APP_METADATA)
|
||||
const app = await db.get(DocumentType.APP_METADATA)
|
||||
const result = await db.destroy()
|
||||
|
||||
if (isUnpublish) {
|
||||
|
@ -526,7 +526,7 @@ export const sync = async (ctx: any, next: any) => {
|
|||
try {
|
||||
await replication.replicate({
|
||||
filter: function (doc: any) {
|
||||
return doc._id !== DocumentTypes.APP_METADATA
|
||||
return doc._id !== DocumentType.APP_METADATA
|
||||
},
|
||||
})
|
||||
} catch (err) {
|
||||
|
@ -550,7 +550,7 @@ export const sync = async (ctx: any, next: any) => {
|
|||
const updateAppPackage = async (appPackage: any, appId: any) => {
|
||||
return context.doInAppContext(appId, async () => {
|
||||
const db = context.getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
const application = await db.get(DocumentType.APP_METADATA)
|
||||
|
||||
const newAppPackage = { ...application, ...appPackage }
|
||||
if (appPackage._rev !== application._rev) {
|
||||
|
|
|
@ -3,7 +3,7 @@ const triggers = require("../../automations/triggers")
|
|||
const {
|
||||
getAutomationParams,
|
||||
generateAutomationID,
|
||||
DocumentTypes,
|
||||
DocumentType,
|
||||
} = require("../../db/utils")
|
||||
const {
|
||||
checkForWebhooks,
|
||||
|
@ -201,7 +201,7 @@ exports.clearLogError = async function (ctx) {
|
|||
const { automationId, appId } = ctx.request.body
|
||||
await doInAppContext(appId, async () => {
|
||||
const db = getProdAppDB()
|
||||
const metadata = await db.get(DocumentTypes.APP_METADATA)
|
||||
const metadata = await db.get(DocumentType.APP_METADATA)
|
||||
if (!automationId) {
|
||||
delete metadata.automationErrors
|
||||
} else if (
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { streamBackup } = require("../../utilities/fileSystem")
|
||||
const { events, context } = require("@budibase/backend-core")
|
||||
const { DocumentTypes } = require("../../db/utils")
|
||||
const { DocumentType } = require("../../db/utils")
|
||||
|
||||
exports.exportAppDump = async function (ctx) {
|
||||
let { appId, excludeRows } = ctx.query
|
||||
|
@ -12,7 +12,7 @@ exports.exportAppDump = async function (ctx) {
|
|||
|
||||
await context.doInAppContext(appId, async () => {
|
||||
const appDb = context.getAppDB()
|
||||
const app = await appDb.get(DocumentTypes.APP_METADATA)
|
||||
const app = await appDb.get(DocumentType.APP_METADATA)
|
||||
await events.app.exported(app)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ const {
|
|||
getGlobalDB,
|
||||
} = require("@budibase/backend-core/tenancy")
|
||||
const { create } = require("./application")
|
||||
const { getDocParams, DocumentTypes, isDevAppID } = require("../../db/utils")
|
||||
const { getDocParams, DocumentType, isDevAppID } = require("../../db/utils")
|
||||
|
||||
async function createApp(appName, appImport) {
|
||||
const ctx = {
|
||||
|
@ -31,7 +31,7 @@ exports.exportApps = async ctx => {
|
|||
}
|
||||
const apps = await getAllApps({ all: true })
|
||||
const globalDBString = await exportDB(getGlobalDBName(), {
|
||||
filter: doc => !doc._id.startsWith(DocumentTypes.USER),
|
||||
filter: doc => !doc._id.startsWith(DocumentType.USER),
|
||||
})
|
||||
let allDBs = {
|
||||
global: globalDBString,
|
||||
|
@ -97,7 +97,7 @@ exports.importApps = async ctx => {
|
|||
}
|
||||
|
||||
// if there are any users make sure to remove them
|
||||
let users = await getAllDocType(globalDb, DocumentTypes.USER)
|
||||
let users = await getAllDocType(globalDb, DocumentType.USER)
|
||||
let userDeletionPromises = []
|
||||
for (let user of users) {
|
||||
userDeletionPromises.push(globalDb.remove(user._id, user._rev))
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
const { DocumentTypes } = require("../../db/utils")
|
||||
const { DocumentType } = require("../../db/utils")
|
||||
const { getComponentLibraryManifest } = require("../../utilities/fileSystem")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.fetchAppComponentDefinitions = async function (ctx) {
|
||||
const db = getAppDB()
|
||||
const app = await db.get(DocumentTypes.APP_METADATA)
|
||||
const app = await db.get(DocumentType.APP_METADATA)
|
||||
|
||||
let componentManifests = await Promise.all(
|
||||
app.componentLibraries.map(async library => {
|
||||
|
|
|
@ -2,7 +2,7 @@ const {
|
|||
generateDatasourceID,
|
||||
getDatasourceParams,
|
||||
getQueryParams,
|
||||
DocumentTypes,
|
||||
DocumentType,
|
||||
BudibaseInternalDB,
|
||||
getTableParams,
|
||||
} = require("../../db/utils")
|
||||
|
@ -132,7 +132,7 @@ exports.save = async function (ctx) {
|
|||
|
||||
const datasource = {
|
||||
_id: generateDatasourceID({ plus }),
|
||||
type: plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE,
|
||||
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
|
||||
...ctx.request.body.datasource,
|
||||
}
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ import {
|
|||
getProdAppID,
|
||||
getDevelopmentAppID,
|
||||
} from "@budibase/backend-core/db"
|
||||
import { DocumentTypes, getAutomationParams } from "../../../db/utils"
|
||||
import { DocumentType, getAutomationParams } from "../../../db/utils"
|
||||
import {
|
||||
disableAllCrons,
|
||||
enableCronTrigger,
|
||||
|
@ -52,9 +52,9 @@ async function storeDeploymentHistory(deployment: any) {
|
|||
let deploymentDoc
|
||||
try {
|
||||
// theres only one deployment doc per app database
|
||||
deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||
deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
|
||||
} catch (err) {
|
||||
deploymentDoc = { _id: DocumentTypes.DEPLOYMENTS, history: {} }
|
||||
deploymentDoc = { _id: DocumentType.DEPLOYMENTS, history: {} }
|
||||
}
|
||||
|
||||
const deploymentId = deploymentJSON._id
|
||||
|
@ -115,7 +115,7 @@ async function deployApp(deployment: any) {
|
|||
await replication.replicate()
|
||||
console.log("replication complete.. replacing app meta doc")
|
||||
const db = getProdAppDB()
|
||||
const appDoc = await db.get(DocumentTypes.APP_METADATA)
|
||||
const appDoc = await db.get(DocumentType.APP_METADATA)
|
||||
|
||||
deployment.appUrl = appDoc.url
|
||||
|
||||
|
@ -146,7 +146,7 @@ async function deployApp(deployment: any) {
|
|||
export async function fetchDeployments(ctx: any) {
|
||||
try {
|
||||
const db = getAppDB()
|
||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||
const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
|
||||
const { updated, deployments } = await checkAllDeployments(deploymentDoc)
|
||||
if (updated) {
|
||||
await db.put(deployments)
|
||||
|
@ -160,7 +160,7 @@ export async function fetchDeployments(ctx: any) {
|
|||
export async function deploymentProgress(ctx: any) {
|
||||
try {
|
||||
const db = getAppDB()
|
||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||
const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
|
||||
ctx.body = deploymentDoc[ctx.params.deploymentId]
|
||||
} catch (err) {
|
||||
ctx.throw(
|
||||
|
@ -173,7 +173,7 @@ export async function deploymentProgress(ctx: any) {
|
|||
const isFirstDeploy = async () => {
|
||||
try {
|
||||
const db = getProdAppDB()
|
||||
await db.get(DocumentTypes.APP_METADATA)
|
||||
await db.get(DocumentType.APP_METADATA)
|
||||
} catch (e: any) {
|
||||
if (e.status === 404) {
|
||||
return true
|
||||
|
|
|
@ -4,7 +4,7 @@ const { checkSlashesInUrl } = require("../../utilities")
|
|||
const { request } = require("../../utilities/workerRequests")
|
||||
const { clearLock } = require("../../utilities/redis")
|
||||
const { Replication, getProdAppID } = require("@budibase/backend-core/db")
|
||||
const { DocumentTypes } = require("../../db/utils")
|
||||
const { DocumentType } = require("../../db/utils")
|
||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
||||
const { getProdAppDB, getAppDB } = require("@budibase/backend-core/context")
|
||||
const { events } = require("@budibase/backend-core")
|
||||
|
@ -87,7 +87,7 @@ exports.revert = async ctx => {
|
|||
if (info.error) {
|
||||
throw info.error
|
||||
}
|
||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||
const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
|
||||
if (
|
||||
!deploymentDoc.history ||
|
||||
Object.keys(deploymentDoc.history).length === 0
|
||||
|
@ -110,7 +110,7 @@ exports.revert = async ctx => {
|
|||
|
||||
// update appID in reverted app to be dev version again
|
||||
const db = getAppDB()
|
||||
const appDoc = await db.get(DocumentTypes.APP_METADATA)
|
||||
const appDoc = await db.get(DocumentType.APP_METADATA)
|
||||
appDoc.appId = appId
|
||||
appDoc.instance._id = appId
|
||||
await db.put(appDoc)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { cloneDeep } = require("lodash")
|
||||
const { definitions } = require("../../integrations")
|
||||
const { SourceNames } = require("../../definitions/datasource")
|
||||
const { SourceName } = require("@budibase/types")
|
||||
const googlesheets = require("../../integrations/googlesheets")
|
||||
const { featureFlags } = require("@budibase/backend-core")
|
||||
|
||||
|
@ -10,7 +10,7 @@ exports.fetch = async function (ctx) {
|
|||
|
||||
// for google sheets integration google verification
|
||||
if (featureFlags.isEnabled(featureFlags.FeatureFlag.GOOGLE_SHEETS)) {
|
||||
defs[SourceNames.GOOGLE_SHEETS] = googlesheets.schema
|
||||
defs[SourceName.GOOGLE_SHEETS] = googlesheets.schema
|
||||
}
|
||||
|
||||
ctx.body = defs
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
import { isExternalTable } from "../../../integrations/utils"
|
||||
import { APP_PREFIX, DocumentTypes } from "../../../db/utils"
|
||||
import { APP_PREFIX, DocumentType } from "../../../db/utils"
|
||||
|
||||
export async function addRev(
|
||||
body: { _id?: string; _rev?: string },
|
||||
|
@ -11,7 +11,7 @@ export async function addRev(
|
|||
}
|
||||
let id = body._id
|
||||
if (body._id.startsWith(APP_PREFIX)) {
|
||||
id = DocumentTypes.APP_METADATA
|
||||
id = DocumentType.APP_METADATA
|
||||
}
|
||||
const db = getAppDB()
|
||||
const dbDoc = await db.get(id)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { ImportInfo } from "./base"
|
||||
import { Query, QueryParameter } from "../../../../../definitions/datasource"
|
||||
import { Query, QueryParameter } from "@budibase/types"
|
||||
import { OpenAPIV2 } from "openapi-types"
|
||||
import { OpenAPISource } from "./base/openapi"
|
||||
import { URL } from "url"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { ImportInfo } from "./base"
|
||||
import { Query, QueryParameter } from "../../../../../definitions/datasource"
|
||||
import { Query, QueryParameter } from "@budibase/types"
|
||||
import { OpenAPIV3 } from "openapi-types"
|
||||
import { OpenAPISource } from "./base/openapi"
|
||||
import { URL } from "url"
|
||||
|
|
|
@ -1,18 +1,16 @@
|
|||
import {
|
||||
FilterTypes,
|
||||
IncludeRelationships,
|
||||
FilterType,
|
||||
IncludeRelationship,
|
||||
Operation,
|
||||
PaginationJson,
|
||||
RelationshipsJson,
|
||||
SearchFilters,
|
||||
SortJson,
|
||||
} from "../../../definitions/datasource"
|
||||
import {
|
||||
Datasource,
|
||||
FieldSchema,
|
||||
Row,
|
||||
Table,
|
||||
} from "../../../definitions/common"
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
breakRowIdField,
|
||||
generateRowIdField,
|
||||
|
@ -128,7 +126,7 @@ module External {
|
|||
if (
|
||||
typeof filter !== "object" ||
|
||||
Object.keys(filter).length === 0 ||
|
||||
key === FilterTypes.ONE_OF
|
||||
key === FilterType.ONE_OF
|
||||
) {
|
||||
continue
|
||||
}
|
||||
|
@ -634,7 +632,7 @@ module External {
|
|||
*/
|
||||
buildFields(
|
||||
table: Table,
|
||||
includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE
|
||||
includeRelations: IncludeRelationship = IncludeRelationship.INCLUDE
|
||||
) {
|
||||
function extractRealFields(table: Table, existing: string[] = []) {
|
||||
return Object.entries(table.schema)
|
||||
|
|
|
@ -3,7 +3,7 @@ const {
|
|||
generateRowID,
|
||||
getRowParams,
|
||||
getTableIDFromRowID,
|
||||
DocumentTypes,
|
||||
DocumentType,
|
||||
InternalTables,
|
||||
} = require("../../../db/utils")
|
||||
const { dangerousGetDB } = require("@budibase/backend-core/db")
|
||||
|
@ -183,7 +183,7 @@ exports.fetchView = async ctx => {
|
|||
const viewName = ctx.params.viewName
|
||||
|
||||
// if this is a table view being looked for just transfer to that
|
||||
if (viewName.startsWith(DocumentTypes.TABLE)) {
|
||||
if (viewName.startsWith(DocumentType.TABLE)) {
|
||||
ctx.params.tableId = viewName
|
||||
return exports.fetch(ctx)
|
||||
}
|
||||
|
|
|
@ -14,11 +14,10 @@ const env = require("../../../environment")
|
|||
const { clientLibraryPath } = require("../../../utilities")
|
||||
const { upload } = require("../../../utilities/fileSystem")
|
||||
const { attachmentsRelativeURL } = require("../../../utilities")
|
||||
const { DocumentTypes, isDevAppID } = require("../../../db/utils")
|
||||
const { DocumentType } = require("../../../db/utils")
|
||||
const { getAppDB, getAppId } = require("@budibase/backend-core/context")
|
||||
const { setCookie, clearCookie } = require("@budibase/backend-core/utils")
|
||||
const AWS = require("aws-sdk")
|
||||
import { events } from "@budibase/backend-core"
|
||||
|
||||
const fs = require("fs")
|
||||
const {
|
||||
|
@ -75,9 +74,6 @@ export const toggleBetaUiFeature = async function (ctx: any) {
|
|||
export const serveBuilder = async function (ctx: any) {
|
||||
const builderPath = resolve(TOP_LEVEL_PATH, "builder")
|
||||
await send(ctx, ctx.file, { root: builderPath })
|
||||
if (ctx.file === "index.html") {
|
||||
await events.serve.servedBuilder()
|
||||
}
|
||||
}
|
||||
|
||||
export const uploadFile = async function (ctx: any) {
|
||||
|
@ -103,7 +99,7 @@ export const uploadFile = async function (ctx: any) {
|
|||
|
||||
export const serveApp = async function (ctx: any) {
|
||||
const db = getAppDB({ skip_setup: true })
|
||||
const appInfo = await db.get(DocumentTypes.APP_METADATA)
|
||||
const appInfo = await db.get(DocumentType.APP_METADATA)
|
||||
let appId = getAppId()
|
||||
|
||||
if (!env.isJest()) {
|
||||
|
@ -126,12 +122,6 @@ export const serveApp = async function (ctx: any) {
|
|||
// just return the app info for jest to assert on
|
||||
ctx.body = appInfo
|
||||
}
|
||||
|
||||
if (isDevAppID(appInfo.appId)) {
|
||||
await events.serve.servedAppPreview(appInfo)
|
||||
} else {
|
||||
await events.serve.servedApp(appInfo)
|
||||
}
|
||||
}
|
||||
|
||||
export const serveClientLibrary = async function (ctx: any) {
|
||||
|
|
|
@ -7,7 +7,7 @@ const { getTable } = require("../table/utils")
|
|||
const { FieldTypes } = require("../../../constants")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const { events } = require("@budibase/backend-core")
|
||||
const { DocumentTypes } = require("../../../db/utils")
|
||||
const { DocumentType } = require("../../../db/utils")
|
||||
const { cloneDeep, isEqual } = require("lodash")
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
|
@ -181,7 +181,7 @@ exports.exportView = async ctx => {
|
|||
ctx.attachment(filename)
|
||||
ctx.body = apiFileReturn(exporter(headers, rows))
|
||||
|
||||
if (viewName.startsWith(DocumentTypes.TABLE)) {
|
||||
if (viewName.startsWith(DocumentType.TABLE)) {
|
||||
await events.table.exported(table, format)
|
||||
} else {
|
||||
await events.view.exported(table, format)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
const {
|
||||
ViewNames,
|
||||
ViewName,
|
||||
generateMemoryViewID,
|
||||
getMemoryViewParams,
|
||||
DocumentTypes,
|
||||
DocumentType,
|
||||
SEPARATOR,
|
||||
} = require("../../../db/utils")
|
||||
const env = require("../../../environment")
|
||||
|
@ -16,7 +16,7 @@ exports.getView = async viewName => {
|
|||
return designDoc.views[viewName]
|
||||
} else {
|
||||
// This is a table view, don't read the view from the DB
|
||||
if (viewName.startsWith(DocumentTypes.TABLE + SEPARATOR)) {
|
||||
if (viewName.startsWith(DocumentType.TABLE + SEPARATOR)) {
|
||||
return null
|
||||
}
|
||||
|
||||
|
@ -32,7 +32,7 @@ exports.getViews = async () => {
|
|||
const designDoc = await db.get("_design/database")
|
||||
for (let name of Object.keys(designDoc.views)) {
|
||||
// Only return custom views, not built ins
|
||||
if (Object.values(ViewNames).indexOf(name) !== -1) {
|
||||
if (Object.values(ViewName).indexOf(name) !== -1) {
|
||||
continue
|
||||
}
|
||||
response.push({
|
||||
|
|
|
@ -4,5 +4,6 @@ const controller = require("../controllers/analytics")
|
|||
const router = Router()
|
||||
|
||||
router.get("/api/bbtel", controller.isEnabled)
|
||||
router.post("/api/bbtel/ping", controller.ping)
|
||||
|
||||
module.exports = router
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
const setup = require("./utilities")
|
||||
const { events, constants, db } = require("@budibase/backend-core")
|
||||
|
||||
describe("/static", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let app
|
||||
|
||||
const timezone = "Europe/London"
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
beforeEach(async () => {
|
||||
app = await config.init()
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
describe("/ping", () => {
|
||||
it("should ping from builder", async () => {
|
||||
await request
|
||||
.post("/api/bbtel/ping")
|
||||
.send({source: "builder", timezone})
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
|
||||
expect(events.serve.servedBuilder).toBeCalledTimes(1)
|
||||
expect(events.serve.servedBuilder).toBeCalledWith(timezone)
|
||||
expect(events.serve.servedApp).not.toBeCalled()
|
||||
expect(events.serve.servedAppPreview).not.toBeCalled()
|
||||
})
|
||||
|
||||
it("should ping from app preview", async () => {
|
||||
await request
|
||||
.post("/api/bbtel/ping")
|
||||
.send({source: "app", timezone})
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
|
||||
expect(events.serve.servedAppPreview).toBeCalledTimes(1)
|
||||
expect(events.serve.servedAppPreview).toBeCalledWith(config.getApp(), timezone)
|
||||
expect(events.serve.servedApp).not.toBeCalled()
|
||||
})
|
||||
|
||||
it("should ping from app", async () => {
|
||||
const headers = config.defaultHeaders()
|
||||
headers[constants.Headers.APP_ID] = config.prodAppId
|
||||
|
||||
await request
|
||||
.post("/api/bbtel/ping")
|
||||
.send({source: "app", timezone})
|
||||
.set(headers)
|
||||
.expect(200)
|
||||
|
||||
expect(events.serve.servedApp).toBeCalledTimes(1)
|
||||
expect(events.serve.servedApp).toBeCalledWith(config.getProdApp(), timezone)
|
||||
expect(events.serve.servedAppPreview).not.toBeCalled()
|
||||
})
|
||||
})
|
||||
})
|
|
@ -36,7 +36,6 @@ describe("/static", () => {
|
|||
.expect(200)
|
||||
|
||||
expect(res.text).toContain("<title>Budibase</title>")
|
||||
expect(events.serve.servedBuilder).toBeCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -56,9 +55,6 @@ describe("/static", () => {
|
|||
.expect(200)
|
||||
|
||||
expect(res.body.appId).toBe(config.prodAppId)
|
||||
expect(events.serve.servedApp).toBeCalledTimes(1)
|
||||
expect(events.serve.servedApp).toBeCalledWith(res.body)
|
||||
expect(events.serve.servedAppPreview).not.toBeCalled()
|
||||
})
|
||||
|
||||
it("should serve the app by url", async () => {
|
||||
|
@ -71,9 +67,6 @@ describe("/static", () => {
|
|||
.expect(200)
|
||||
|
||||
expect(res.body.appId).toBe(config.prodAppId)
|
||||
expect(events.serve.servedApp).toBeCalledTimes(1)
|
||||
expect(events.serve.servedApp).toBeCalledWith(res.body)
|
||||
expect(events.serve.servedAppPreview).not.toBeCalled()
|
||||
})
|
||||
|
||||
it("should serve the app preview by id", async () => {
|
||||
|
@ -83,9 +76,6 @@ describe("/static", () => {
|
|||
.expect(200)
|
||||
|
||||
expect(res.body.appId).toBe(config.appId)
|
||||
expect(events.serve.servedAppPreview).toBeCalledTimes(1)
|
||||
expect(events.serve.servedAppPreview).toBeCalledWith(res.body)
|
||||
expect(events.serve.servedApp).not.toBeCalled()
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -14,6 +14,16 @@ const SortOrdersPretty = {
|
|||
[SortOrders.DESCENDING]: "Descending",
|
||||
}
|
||||
|
||||
const EmptyFilterOptions = {
|
||||
RETURN_ALL: "all",
|
||||
RETURN_NONE: "none",
|
||||
}
|
||||
|
||||
const EmptyFilterOptionsPretty = {
|
||||
[EmptyFilterOptions.RETURN_ALL]: "Return all table rows",
|
||||
[EmptyFilterOptions.RETURN_NONE]: "Return no rows",
|
||||
}
|
||||
|
||||
exports.definition = {
|
||||
description: "Query rows from the database",
|
||||
icon: "Search",
|
||||
|
@ -52,6 +62,12 @@ exports.definition = {
|
|||
title: "Limit",
|
||||
customType: "queryLimit",
|
||||
},
|
||||
onEmptyFilter: {
|
||||
pretty: Object.values(EmptyFilterOptionsPretty),
|
||||
enum: Object.values(EmptyFilterOptions),
|
||||
type: "string",
|
||||
title: "When Filter Empty",
|
||||
},
|
||||
},
|
||||
required: ["tableId"],
|
||||
},
|
||||
|
@ -103,6 +119,10 @@ function typeCoercion(filters, table) {
|
|||
return filters
|
||||
}
|
||||
|
||||
const hasNullFilters = filters =>
|
||||
filters.length === 0 ||
|
||||
filters.some(filter => filter.value === null || filter.value === "")
|
||||
|
||||
exports.run = async function ({ inputs, appId }) {
|
||||
const { tableId, filters, sortColumn, sortOrder, limit } = inputs
|
||||
const table = await getTable(appId, tableId)
|
||||
|
@ -127,9 +147,21 @@ exports.run = async function ({ inputs, appId }) {
|
|||
version: "1",
|
||||
})
|
||||
try {
|
||||
await rowController.search(ctx)
|
||||
let rows
|
||||
|
||||
if (
|
||||
inputs.onEmptyFilter === EmptyFilterOptions.RETURN_NONE &&
|
||||
inputs["filters-def"] &&
|
||||
hasNullFilters(inputs["filters-def"])
|
||||
) {
|
||||
rows = []
|
||||
} else {
|
||||
await rowController.search(ctx)
|
||||
rows = ctx.body ? ctx.body.rows : []
|
||||
}
|
||||
|
||||
return {
|
||||
rows: ctx.body ? ctx.body.rows : [],
|
||||
rows,
|
||||
success: ctx.status === 200,
|
||||
}
|
||||
} catch (err) {
|
||||
|
|
|
@ -16,7 +16,7 @@ describe("Test a query step automation", () => {
|
|||
let table
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
table = await config.createTable()
|
||||
const row = {
|
||||
|
@ -48,4 +48,70 @@ describe("Test a query step automation", () => {
|
|||
expect(res.rows.length).toBe(2)
|
||||
expect(res.rows[0].name).toBe(NAME)
|
||||
})
|
||||
|
||||
it("Returns all rows when onEmptyFilter has no value and no filters are passed", async () => {
|
||||
const inputs = {
|
||||
tableId: table._id,
|
||||
filters: {},
|
||||
sortColumn: "name",
|
||||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
||||
expect(res.success).toBe(true)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(2)
|
||||
expect(res.rows[0].name).toBe(NAME)
|
||||
})
|
||||
|
||||
it("Returns no rows when onEmptyFilter is RETURN_NONE and theres no filters", async () => {
|
||||
const inputs = {
|
||||
tableId: table._id,
|
||||
filters: {},
|
||||
"filters-def": [],
|
||||
sortColumn: "name",
|
||||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
onEmptyFilter: "none",
|
||||
}
|
||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
||||
expect(res.success).toBe(false)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(0)
|
||||
})
|
||||
|
||||
it("Returns no rows when onEmptyFilters RETURN_NONE and a filter is passed with a null value", async () => {
|
||||
const inputs = {
|
||||
tableId: table._id,
|
||||
onEmptyFilter: "none",
|
||||
filters: {},
|
||||
"filters-def": [
|
||||
{
|
||||
value: null
|
||||
}
|
||||
],
|
||||
sortColumn: "name",
|
||||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
||||
expect(res.success).toBe(false)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(0)
|
||||
})
|
||||
|
||||
it("Returns rows when onEmptyFilter is RETURN_ALL and no filter is passed", async () => {
|
||||
const inputs = {
|
||||
tableId: table._id,
|
||||
onEmptyFilter: "all",
|
||||
filters: {},
|
||||
sortColumn: "name",
|
||||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(setup.actions.QUERY_ROWS.stepId, inputs)
|
||||
expect(res.success).toBe(true)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(2)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const Sentry = require("@sentry/node")
|
||||
const { ViewNames, getQueryIndex } = require("../utils")
|
||||
const { ViewName, getQueryIndex } = require("../utils")
|
||||
const { FieldTypes } = require("../../constants")
|
||||
const { createLinkView } = require("../views/staticViews")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
@ -41,7 +41,7 @@ exports.getLinkDocuments = async function (args) {
|
|||
}
|
||||
params.include_docs = !!includeDocs
|
||||
try {
|
||||
let linkRows = (await db.query(getQueryIndex(ViewNames.LINK), params)).rows
|
||||
let linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows
|
||||
// filter to get unique entries
|
||||
const foundIds = []
|
||||
linkRows = linkRows.filter(link => {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const newid = require("./newid")
|
||||
const {
|
||||
DocumentTypes: CoreDocTypes,
|
||||
DocumentType: CoreDocTypes,
|
||||
getRoleParams,
|
||||
generateRoleID,
|
||||
APP_DEV_PREFIX,
|
||||
|
@ -12,7 +12,7 @@ const {
|
|||
getDevelopmentAppID,
|
||||
generateAppID,
|
||||
getQueryIndex,
|
||||
ViewNames,
|
||||
ViewName,
|
||||
} = require("@budibase/backend-core/db")
|
||||
|
||||
const UNICODE_MAX = "\ufff0"
|
||||
|
@ -23,7 +23,7 @@ const AppStatus = {
|
|||
DEPLOYED: "published",
|
||||
}
|
||||
|
||||
const DocumentTypes = {
|
||||
const DocumentType = {
|
||||
...CoreDocTypes,
|
||||
TABLE: "ta",
|
||||
ROW: "ro",
|
||||
|
@ -67,12 +67,12 @@ exports.APP_PREFIX = APP_PREFIX
|
|||
exports.APP_DEV_PREFIX = APP_DEV_PREFIX
|
||||
exports.isDevAppID = isDevAppID
|
||||
exports.isProdAppID = isProdAppID
|
||||
exports.USER_METDATA_PREFIX = `${DocumentTypes.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
||||
exports.LINK_USER_METADATA_PREFIX = `${DocumentTypes.LINK}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
||||
exports.TABLE_ROW_PREFIX = `${DocumentTypes.ROW}${SEPARATOR}${DocumentTypes.TABLE}`
|
||||
exports.ViewNames = ViewNames
|
||||
exports.USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
||||
exports.LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
||||
exports.TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}`
|
||||
exports.ViewName = ViewName
|
||||
exports.InternalTables = InternalTables
|
||||
exports.DocumentTypes = DocumentTypes
|
||||
exports.DocumentType = DocumentType
|
||||
exports.SEPARATOR = SEPARATOR
|
||||
exports.UNICODE_MAX = UNICODE_MAX
|
||||
exports.SearchIndexes = SearchIndexes
|
||||
|
@ -115,7 +115,7 @@ exports.getDocParams = getDocParams
|
|||
* Gets parameters for retrieving tables, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
exports.getTableParams = (tableId = null, otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.TABLE, tableId, otherProps)
|
||||
return getDocParams(DocumentType.TABLE, tableId, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -123,7 +123,7 @@ exports.getTableParams = (tableId = null, otherProps = {}) => {
|
|||
* @returns {string} The new table ID which the table doc can be stored under.
|
||||
*/
|
||||
exports.generateTableID = () => {
|
||||
return `${DocumentTypes.TABLE}${SEPARATOR}${newid()}`
|
||||
return `${DocumentType.TABLE}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -136,12 +136,12 @@ exports.generateTableID = () => {
|
|||
*/
|
||||
exports.getRowParams = (tableId = null, rowId = null, otherProps = {}) => {
|
||||
if (tableId == null) {
|
||||
return getDocParams(DocumentTypes.ROW, null, otherProps)
|
||||
return getDocParams(DocumentType.ROW, null, otherProps)
|
||||
}
|
||||
|
||||
const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId
|
||||
|
||||
return getDocParams(DocumentTypes.ROW, endOfKey, otherProps)
|
||||
return getDocParams(DocumentType.ROW, endOfKey, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -151,9 +151,9 @@ exports.getRowParams = (tableId = null, rowId = null, otherProps = {}) => {
|
|||
*/
|
||||
exports.getTableIDFromRowID = rowId => {
|
||||
const components = rowId
|
||||
.split(DocumentTypes.TABLE + SEPARATOR)[1]
|
||||
.split(DocumentType.TABLE + SEPARATOR)[1]
|
||||
.split(SEPARATOR)
|
||||
return `${DocumentTypes.TABLE}${SEPARATOR}${components[0]}`
|
||||
return `${DocumentType.TABLE}${SEPARATOR}${components[0]}`
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -164,7 +164,7 @@ exports.getTableIDFromRowID = rowId => {
|
|||
*/
|
||||
exports.generateRowID = (tableId, id = null) => {
|
||||
id = id || newid()
|
||||
return `${DocumentTypes.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`
|
||||
return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -187,7 +187,7 @@ exports.generateUserMetadataID = globalId => {
|
|||
* Breaks up the ID to get the global ID.
|
||||
*/
|
||||
exports.getGlobalIDFromUserMetadataID = id => {
|
||||
const prefix = `${DocumentTypes.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
||||
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
||||
if (!id || !id.includes(prefix)) {
|
||||
return id
|
||||
}
|
||||
|
@ -198,7 +198,7 @@ exports.getGlobalIDFromUserMetadataID = id => {
|
|||
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
exports.getAutomationParams = (automationId = null, otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.AUTOMATION, automationId, otherProps)
|
||||
return getDocParams(DocumentType.AUTOMATION, automationId, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -206,7 +206,7 @@ exports.getAutomationParams = (automationId = null, otherProps = {}) => {
|
|||
* @returns {string} The new automation ID which the automation doc can be stored under.
|
||||
*/
|
||||
exports.generateAutomationID = () => {
|
||||
return `${DocumentTypes.AUTOMATION}${SEPARATOR}${newid()}`
|
||||
return `${DocumentType.AUTOMATION}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -231,14 +231,14 @@ exports.generateLinkID = (
|
|||
const tables = `${SEPARATOR}${tableId1}${SEPARATOR}${tableId2}`
|
||||
const rows = `${SEPARATOR}${rowId1}${SEPARATOR}${rowId2}`
|
||||
const fields = `${SEPARATOR}${fieldName1}${SEPARATOR}${fieldName2}`
|
||||
return `${DocumentTypes.LINK}${tables}${rows}${fields}`
|
||||
return `${DocumentType.LINK}${tables}${rows}${fields}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving link docs, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
exports.getLinkParams = (otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.LINK, null, otherProps)
|
||||
return getDocParams(DocumentType.LINK, null, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -246,14 +246,14 @@ exports.getLinkParams = (otherProps = {}) => {
|
|||
* @returns {string} The new layout ID which the layout doc can be stored under.
|
||||
*/
|
||||
exports.generateLayoutID = id => {
|
||||
return `${DocumentTypes.LAYOUT}${SEPARATOR}${id || newid()}`
|
||||
return `${DocumentType.LAYOUT}${SEPARATOR}${id || newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving layout, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
exports.getLayoutParams = (layoutId = null, otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.LAYOUT, layoutId, otherProps)
|
||||
return getDocParams(DocumentType.LAYOUT, layoutId, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -261,14 +261,14 @@ exports.getLayoutParams = (layoutId = null, otherProps = {}) => {
|
|||
* @returns {string} The new screen ID which the screen doc can be stored under.
|
||||
*/
|
||||
exports.generateScreenID = () => {
|
||||
return `${DocumentTypes.SCREEN}${SEPARATOR}${newid()}`
|
||||
return `${DocumentType.SCREEN}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving screens, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
exports.getScreenParams = (screenId = null, otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.SCREEN, screenId, otherProps)
|
||||
return getDocParams(DocumentType.SCREEN, screenId, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -276,14 +276,14 @@ exports.getScreenParams = (screenId = null, otherProps = {}) => {
|
|||
* @returns {string} The new webhook ID which the webhook doc can be stored under.
|
||||
*/
|
||||
exports.generateWebhookID = () => {
|
||||
return `${DocumentTypes.WEBHOOK}${SEPARATOR}${newid()}`
|
||||
return `${DocumentType.WEBHOOK}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving a webhook, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
exports.getWebhookParams = (webhookId = null, otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.WEBHOOK, webhookId, otherProps)
|
||||
return getDocParams(DocumentType.WEBHOOK, webhookId, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -292,7 +292,7 @@ exports.getWebhookParams = (webhookId = null, otherProps = {}) => {
|
|||
*/
|
||||
exports.generateDatasourceID = ({ plus = false } = {}) => {
|
||||
return `${
|
||||
plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE
|
||||
plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE
|
||||
}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
|
@ -300,7 +300,7 @@ exports.generateDatasourceID = ({ plus = false } = {}) => {
|
|||
* Gets parameters for retrieving a datasource, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
exports.getDatasourceParams = (datasourceId = null, otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.DATASOURCE, datasourceId, otherProps)
|
||||
return getDocParams(DocumentType.DATASOURCE, datasourceId, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -309,7 +309,7 @@ exports.getDatasourceParams = (datasourceId = null, otherProps = {}) => {
|
|||
*/
|
||||
exports.generateQueryID = datasourceId => {
|
||||
return `${
|
||||
DocumentTypes.QUERY
|
||||
DocumentType.QUERY
|
||||
}${SEPARATOR}${datasourceId}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
|
@ -318,14 +318,14 @@ exports.generateQueryID = datasourceId => {
|
|||
* automations etc.
|
||||
*/
|
||||
exports.generateAutomationMetadataID = automationId => {
|
||||
return `${DocumentTypes.AUTOMATION_METADATA}${SEPARATOR}${automationId}`
|
||||
return `${DocumentType.AUTOMATION_METADATA}${SEPARATOR}${automationId}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all automation metadata in an app database.
|
||||
*/
|
||||
exports.getAutomationMetadataParams = (otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.AUTOMATION_METADATA, null, otherProps)
|
||||
return getDocParams(DocumentType.AUTOMATION_METADATA, null, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -333,11 +333,11 @@ exports.getAutomationMetadataParams = (otherProps = {}) => {
|
|||
*/
|
||||
exports.getQueryParams = (datasourceId = null, otherProps = {}) => {
|
||||
if (datasourceId == null) {
|
||||
return getDocParams(DocumentTypes.QUERY, null, otherProps)
|
||||
return getDocParams(DocumentType.QUERY, null, otherProps)
|
||||
}
|
||||
|
||||
return getDocParams(
|
||||
DocumentTypes.QUERY,
|
||||
DocumentType.QUERY,
|
||||
`${datasourceId}${SEPARATOR}`,
|
||||
otherProps
|
||||
)
|
||||
|
@ -348,11 +348,11 @@ exports.getQueryParams = (datasourceId = null, otherProps = {}) => {
|
|||
* @returns {string} The ID of the flag document that was generated.
|
||||
*/
|
||||
exports.generateUserFlagID = userId => {
|
||||
return `${DocumentTypes.USER_FLAG}${SEPARATOR}${userId}`
|
||||
return `${DocumentType.USER_FLAG}${SEPARATOR}${userId}`
|
||||
}
|
||||
|
||||
exports.generateMetadataID = (type, entityId) => {
|
||||
return `${DocumentTypes.METADATA}${SEPARATOR}${type}${SEPARATOR}${entityId}`
|
||||
return `${DocumentType.METADATA}${SEPARATOR}${type}${SEPARATOR}${entityId}`
|
||||
}
|
||||
|
||||
exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
|
||||
|
@ -360,19 +360,19 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
|
|||
if (entityId != null) {
|
||||
docId += entityId
|
||||
}
|
||||
return getDocParams(DocumentTypes.METADATA, docId, otherProps)
|
||||
return getDocParams(DocumentType.METADATA, docId, otherProps)
|
||||
}
|
||||
|
||||
exports.generateMemoryViewID = viewName => {
|
||||
return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}`
|
||||
return `${DocumentType.MEM_VIEW}${SEPARATOR}${viewName}`
|
||||
}
|
||||
|
||||
exports.getMemoryViewParams = (otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps)
|
||||
return getDocParams(DocumentType.MEM_VIEW, null, otherProps)
|
||||
}
|
||||
|
||||
exports.generatePluginID = (name, version) => {
|
||||
return `${DocumentTypes.PLUGIN}${SEPARATOR}${name}${SEPARATOR}${version}`
|
||||
return `${DocumentType.PLUGIN}${SEPARATOR}${name}${SEPARATOR}${version}`
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,11 +1,6 @@
|
|||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const {
|
||||
DocumentTypes,
|
||||
SEPARATOR,
|
||||
ViewNames,
|
||||
SearchIndexes,
|
||||
} = require("../utils")
|
||||
const SCREEN_PREFIX = DocumentTypes.SCREEN + SEPARATOR
|
||||
const { DocumentType, SEPARATOR, ViewName, SearchIndexes } = require("../utils")
|
||||
const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR
|
||||
|
||||
/**************************************************
|
||||
* INFORMATION *
|
||||
|
@ -53,7 +48,7 @@ exports.createLinkView = async () => {
|
|||
}
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[ViewNames.LINK]: view,
|
||||
[ViewName.LINK]: view,
|
||||
}
|
||||
await db.put(designDoc)
|
||||
}
|
||||
|
@ -74,7 +69,7 @@ exports.createRoutingView = async () => {
|
|||
}
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[ViewNames.ROUTING]: view,
|
||||
[ViewName.ROUTING]: view,
|
||||
}
|
||||
await db.put(designDoc)
|
||||
}
|
||||
|
|
|
@ -5,14 +5,14 @@ import {
|
|||
Document,
|
||||
} from "@budibase/types"
|
||||
|
||||
export enum LoopStepTypes {
|
||||
export enum LoopStepType {
|
||||
ARRAY = "Array",
|
||||
STRING = "String",
|
||||
}
|
||||
|
||||
export interface LoopStep extends AutomationStep {
|
||||
inputs: {
|
||||
option: LoopStepTypes
|
||||
option: LoopStepType
|
||||
[key: string]: any
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,70 +1,19 @@
|
|||
export { Query, Datasource } from "./datasource"
|
||||
import { Document } from "@budibase/types"
|
||||
export {
|
||||
Query,
|
||||
Datasource,
|
||||
FieldSchema,
|
||||
TableSchema,
|
||||
Table,
|
||||
Document,
|
||||
Row,
|
||||
} from "@budibase/types"
|
||||
|
||||
export interface Base {
|
||||
_id?: string
|
||||
_rev?: string
|
||||
}
|
||||
|
||||
export interface Application extends Base {
|
||||
export interface Application extends Document {
|
||||
_id: string
|
||||
appId?: string
|
||||
}
|
||||
|
||||
export interface FieldSchema {
|
||||
// TODO: replace with field types enum when done
|
||||
type: string
|
||||
externalType?: string
|
||||
fieldName?: string
|
||||
name: string
|
||||
tableId?: string
|
||||
relationshipType?: string
|
||||
through?: string
|
||||
foreignKey?: string
|
||||
autocolumn?: boolean
|
||||
subtype?: string
|
||||
throughFrom?: string
|
||||
throughTo?: string
|
||||
formula?: string
|
||||
formulaType?: string
|
||||
main?: boolean
|
||||
ignoreTimezones?: boolean
|
||||
meta?: {
|
||||
toTable: string
|
||||
toKey: string
|
||||
}
|
||||
constraints?: {
|
||||
type?: string
|
||||
email?: boolean
|
||||
inclusion?: string[]
|
||||
length?: {
|
||||
minimum?: string | number
|
||||
maximum?: string | number
|
||||
}
|
||||
presence?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface TableSchema {
|
||||
[key: string]: FieldSchema
|
||||
}
|
||||
|
||||
export interface Table extends Base {
|
||||
type?: string
|
||||
views?: {}
|
||||
name: string
|
||||
primary?: string[]
|
||||
schema: TableSchema
|
||||
primaryDisplay?: string
|
||||
sourceId?: string
|
||||
relatedFormula?: string[]
|
||||
constrained?: string[]
|
||||
}
|
||||
|
||||
export interface Row extends Base {
|
||||
type?: string
|
||||
tableId?: string
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface JsonSchemaField {
|
||||
properties: {
|
||||
[key: string]: {
|
||||
|
@ -94,7 +43,7 @@ export interface AutomationStep {
|
|||
type: string
|
||||
}
|
||||
|
||||
export interface Automation extends Base {
|
||||
export interface Automation extends Document {
|
||||
name: string
|
||||
type: string
|
||||
appId?: string
|
||||
|
|
|
@ -1,162 +1,13 @@
|
|||
import { Row, Table, Base } from "./common"
|
||||
import {
|
||||
Operation,
|
||||
QueryTypes,
|
||||
SortDirection,
|
||||
SourceNames,
|
||||
} from "@budibase/types"
|
||||
|
||||
// these were previously exported here - moved to types for re-use
|
||||
export {
|
||||
Operation,
|
||||
SortDirection,
|
||||
QueryTypes,
|
||||
DatasourceFieldTypes,
|
||||
SourceNames,
|
||||
IncludeRelationships,
|
||||
FilterTypes,
|
||||
} from "@budibase/types"
|
||||
|
||||
export interface QueryDefinition {
|
||||
type: QueryTypes
|
||||
displayName?: string
|
||||
readable?: boolean
|
||||
customisable?: boolean
|
||||
fields?: object
|
||||
urlDisplay?: boolean
|
||||
}
|
||||
|
||||
export interface ExtraQueryConfig {
|
||||
[key: string]: {
|
||||
displayName: string
|
||||
type: string
|
||||
required: boolean
|
||||
data?: object
|
||||
}
|
||||
}
|
||||
|
||||
export interface Integration {
|
||||
docs: string
|
||||
plus?: boolean
|
||||
auth?: { type: string }
|
||||
relationships?: boolean
|
||||
description: string
|
||||
friendlyName: string
|
||||
type?: string
|
||||
datasource: {}
|
||||
query: {
|
||||
[key: string]: QueryDefinition
|
||||
}
|
||||
extra?: ExtraQueryConfig
|
||||
}
|
||||
|
||||
export interface SearchFilters {
|
||||
allOr?: boolean
|
||||
string?: {
|
||||
[key: string]: string
|
||||
}
|
||||
fuzzy?: {
|
||||
[key: string]: string
|
||||
}
|
||||
range?: {
|
||||
[key: string]: {
|
||||
high: number | string
|
||||
low: number | string
|
||||
}
|
||||
}
|
||||
equal?: {
|
||||
[key: string]: any
|
||||
}
|
||||
notEqual?: {
|
||||
[key: string]: any
|
||||
}
|
||||
empty?: {
|
||||
[key: string]: any
|
||||
}
|
||||
notEmpty?: {
|
||||
[key: string]: any
|
||||
}
|
||||
oneOf?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
contains?: {
|
||||
[key: string]: any
|
||||
}
|
||||
}
|
||||
|
||||
export interface SortJson {
|
||||
[key: string]: SortDirection
|
||||
}
|
||||
|
||||
export interface PaginationJson {
|
||||
limit: number
|
||||
page?: string | number
|
||||
}
|
||||
|
||||
export interface RenameColumn {
|
||||
old: string
|
||||
updated: string
|
||||
}
|
||||
|
||||
export interface RelationshipsJson {
|
||||
through?: string
|
||||
from?: string
|
||||
to?: string
|
||||
fromPrimary?: string
|
||||
toPrimary?: string
|
||||
tableName: string
|
||||
column: string
|
||||
}
|
||||
|
||||
export interface QueryJson {
|
||||
endpoint: {
|
||||
datasourceId: string
|
||||
entityId: string
|
||||
operation: Operation
|
||||
schema?: string
|
||||
}
|
||||
resource: {
|
||||
fields: string[]
|
||||
}
|
||||
filters?: SearchFilters
|
||||
sort?: SortJson
|
||||
paginate?: PaginationJson
|
||||
body?: Row | Row[]
|
||||
table?: Table
|
||||
meta?: {
|
||||
table?: Table
|
||||
tables?: Record<string, Table>
|
||||
renamed: RenameColumn
|
||||
}
|
||||
extra?: {
|
||||
idFilter?: SearchFilters
|
||||
}
|
||||
relationships?: RelationshipsJson[]
|
||||
}
|
||||
|
||||
export interface SqlQuery {
|
||||
sql: string
|
||||
bindings?: string[]
|
||||
}
|
||||
/********************************************
|
||||
* This file contains structures which are *
|
||||
* internal to the server and don't need to *
|
||||
* be exposed for use by other services. *
|
||||
********************************************/
|
||||
|
||||
export interface QueryOptions {
|
||||
disableReturning?: boolean
|
||||
}
|
||||
|
||||
export interface Datasource extends Base {
|
||||
type: string
|
||||
name: string
|
||||
source: SourceNames
|
||||
// the config is defined by the schema
|
||||
config: {
|
||||
[key: string]: string | number | boolean
|
||||
}
|
||||
plus: boolean
|
||||
entities?: {
|
||||
[key: string]: Table
|
||||
}
|
||||
}
|
||||
|
||||
export enum AuthType {
|
||||
BASIC = "basic",
|
||||
BEARER = "bearer",
|
||||
|
@ -178,25 +29,6 @@ export interface BearerAuthConfig {
|
|||
token: string
|
||||
}
|
||||
|
||||
export interface QueryParameter {
|
||||
name: string
|
||||
default: string
|
||||
}
|
||||
|
||||
export interface RestQueryFields {
|
||||
path: string
|
||||
queryString?: string
|
||||
headers: { [key: string]: any }
|
||||
disabledHeaders: { [key: string]: any }
|
||||
requestBody: any
|
||||
bodyType: string
|
||||
json: object
|
||||
method: string
|
||||
authConfigId: string
|
||||
pagination: PaginationConfig | null
|
||||
paginationValues: PaginationValues | null
|
||||
}
|
||||
|
||||
export interface RestConfig {
|
||||
url: string
|
||||
defaultHeaders: {
|
||||
|
@ -214,28 +46,3 @@ export interface RestConfig {
|
|||
}
|
||||
]
|
||||
}
|
||||
|
||||
export interface PaginationConfig {
|
||||
type: string
|
||||
location: string
|
||||
pageParam: string
|
||||
sizeParam: string | null
|
||||
responseParam: string | null
|
||||
}
|
||||
|
||||
export interface PaginationValues {
|
||||
page: string | number | null
|
||||
limit: number | null
|
||||
}
|
||||
|
||||
export interface Query {
|
||||
_id?: string
|
||||
datasourceId: string
|
||||
name: string
|
||||
parameters: QueryParameter[]
|
||||
fields: RestQueryFields | any
|
||||
transformer: string | null
|
||||
schema: any
|
||||
readable: boolean
|
||||
queryVerb: string
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import {
|
||||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "../definitions/datasource"
|
||||
import { IntegrationBase } from "./base/IntegrationBase"
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module AirtableModule {
|
||||
const Airtable = require("airtable")
|
||||
|
@ -21,56 +21,61 @@ module AirtableModule {
|
|||
type: "Spreadsheet",
|
||||
datasource: {
|
||||
apiKey: {
|
||||
type: DatasourceFieldTypes.PASSWORD,
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
default: "enter api key",
|
||||
required: true,
|
||||
},
|
||||
base: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "mybase",
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
view: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
numRecords: {
|
||||
type: DatasourceFieldTypes.NUMBER,
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
default: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
id: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
display: "Record ID",
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
table: {
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import {
|
||||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "../definitions/datasource"
|
||||
import { IntegrationBase } from "./base/IntegrationBase"
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module ArangoModule {
|
||||
const { Database, aql } = require("arangojs")
|
||||
|
@ -24,35 +24,35 @@ module ArangoModule {
|
|||
"ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "http://localhost:8529",
|
||||
required: true,
|
||||
},
|
||||
username: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldTypes.PASSWORD,
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
databaseName: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "_system",
|
||||
required: true,
|
||||
},
|
||||
collection: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
read: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
create: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
export interface IntegrationBase {
|
||||
create?(query: any): Promise<any[] | any>
|
||||
read?(query: any): Promise<any[] | any>
|
||||
update?(query: any): Promise<any[] | any>
|
||||
delete?(query: any): Promise<any[] | any>
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
import { Table } from "../../definitions/common"
|
||||
import { IntegrationBase } from "./IntegrationBase"
|
||||
|
||||
export interface DatasourcePlus extends IntegrationBase {
|
||||
tables: Record<string, Table>
|
||||
schemaErrors: Record<string, string>
|
||||
|
||||
// if the datasource supports the use of bindings directly (to protect against SQL injection)
|
||||
// this returns the format of the identifier
|
||||
getBindingIdentifier(): string
|
||||
getStringConcat(parts: string[]): string
|
||||
buildSchema(datasourceId: string, entities: Record<string, Table>): any
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
import { QueryJson } from "../../definitions/datasource"
|
||||
import { Datasource } from "../../definitions/common"
|
||||
import { QueryJson, Datasource } from "@budibase/types"
|
||||
const { integrations } = require("../index")
|
||||
|
||||
export async function makeExternalQuery(
|
||||
|
|
|
@ -2,12 +2,12 @@ import { Knex, knex } from "knex"
|
|||
import {
|
||||
Operation,
|
||||
QueryJson,
|
||||
QueryOptions,
|
||||
RelationshipsJson,
|
||||
SearchFilters,
|
||||
SortDirection,
|
||||
} from "../../definitions/datasource"
|
||||
import { isIsoDateString, SqlClients } from "../utils"
|
||||
} from "@budibase/types"
|
||||
import { QueryOptions } from "../../definitions/datasource"
|
||||
import { isIsoDateString, SqlClient } from "../utils"
|
||||
import SqlTableQueryBuilder from "./sqlTable"
|
||||
import environment from "../../environment"
|
||||
import { removeKeyNumbering } from "./utils"
|
||||
|
@ -28,14 +28,14 @@ function likeKey(client: string, key: string): string {
|
|||
}
|
||||
let start: string, end: string
|
||||
switch (client) {
|
||||
case SqlClients.MY_SQL:
|
||||
case SqlClient.MY_SQL:
|
||||
start = end = "`"
|
||||
break
|
||||
case SqlClients.ORACLE:
|
||||
case SqlClients.POSTGRES:
|
||||
case SqlClient.ORACLE:
|
||||
case SqlClient.POSTGRES:
|
||||
start = end = '"'
|
||||
break
|
||||
case SqlClients.MS_SQL:
|
||||
case SqlClient.MS_SQL:
|
||||
start = "["
|
||||
end = "]"
|
||||
break
|
||||
|
@ -103,7 +103,7 @@ function generateSelectStatement(
|
|||
if (
|
||||
columnName &&
|
||||
schema?.[columnName] &&
|
||||
knex.client.config.client === SqlClients.POSTGRES
|
||||
knex.client.config.client === SqlClient.POSTGRES
|
||||
) {
|
||||
const externalType = schema[columnName].externalType
|
||||
if (externalType?.includes("money")) {
|
||||
|
@ -148,7 +148,7 @@ class InternalBuilder {
|
|||
const like = (key: string, value: any) => {
|
||||
const fnc = allOr ? "orWhere" : "where"
|
||||
// postgres supports ilike, nothing else does
|
||||
if (this.client === SqlClients.POSTGRES) {
|
||||
if (this.client === SqlClient.POSTGRES) {
|
||||
query = query[fnc](key, "ilike", `%${value}%`)
|
||||
} else {
|
||||
const rawFnc = `${fnc}Raw`
|
||||
|
@ -175,7 +175,7 @@ class InternalBuilder {
|
|||
iterate(filters.string, (key, value) => {
|
||||
const fnc = allOr ? "orWhere" : "where"
|
||||
// postgres supports ilike, nothing else does
|
||||
if (this.client === SqlClients.POSTGRES) {
|
||||
if (this.client === SqlClient.POSTGRES) {
|
||||
query = query[fnc](key, "ilike", `${value}%`)
|
||||
} else {
|
||||
const rawFnc = `${fnc}Raw`
|
||||
|
@ -231,7 +231,7 @@ class InternalBuilder {
|
|||
if (filters.contains) {
|
||||
const fnc = allOr ? "orWhere" : "where"
|
||||
const rawFnc = `${fnc}Raw`
|
||||
if (this.client === SqlClients.POSTGRES) {
|
||||
if (this.client === SqlClient.POSTGRES) {
|
||||
iterate(filters.contains, (key: string, value: any) => {
|
||||
const fieldNames = key.split(/\./g)
|
||||
const tableName = fieldNames[0]
|
||||
|
@ -244,7 +244,7 @@ class InternalBuilder {
|
|||
`"${tableName}"."${columnName}"::jsonb @> '[${value}]'`
|
||||
)
|
||||
})
|
||||
} else if (this.client === SqlClients.MY_SQL) {
|
||||
} else if (this.client === SqlClient.MY_SQL) {
|
||||
iterate(filters.contains, (key: string, value: any) => {
|
||||
if (typeof value === "string") {
|
||||
value = `"${value}"`
|
||||
|
@ -267,7 +267,7 @@ class InternalBuilder {
|
|||
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
|
||||
query = query.orderBy(`${table?.name}.${key}`, direction)
|
||||
}
|
||||
} else if (this.client === SqlClients.MS_SQL && paginate?.limit) {
|
||||
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
|
||||
// @ts-ignore
|
||||
query = query.orderBy(`${table?.name}.${table?.primary[0]}`)
|
||||
}
|
||||
|
@ -416,7 +416,7 @@ class InternalBuilder {
|
|||
[tableName]: query,
|
||||
}).select(selectStatement)
|
||||
// have to add after as well (this breaks MS-SQL)
|
||||
if (this.client !== SqlClients.MS_SQL) {
|
||||
if (this.client !== SqlClient.MS_SQL) {
|
||||
preQuery = this.addSorting(preQuery, json)
|
||||
}
|
||||
// handle joins
|
||||
|
@ -567,9 +567,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
// same as delete, manage returning
|
||||
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
|
||||
let id
|
||||
if (sqlClient === SqlClients.MS_SQL) {
|
||||
if (sqlClient === SqlClient.MS_SQL) {
|
||||
id = results?.[0].id
|
||||
} else if (sqlClient === SqlClients.MY_SQL) {
|
||||
} else if (sqlClient === SqlClient.MY_SQL) {
|
||||
id = results?.insertId
|
||||
}
|
||||
row = processFn(
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
import { Knex, knex } from "knex"
|
||||
import { Table } from "../../definitions/common"
|
||||
import {
|
||||
Operation,
|
||||
QueryJson,
|
||||
RenameColumn,
|
||||
} from "../../definitions/datasource"
|
||||
import { Operation, QueryJson, RenameColumn, Table } from "@budibase/types"
|
||||
import { breakExternalTableId } from "../utils"
|
||||
import SchemaBuilder = Knex.SchemaBuilder
|
||||
import CreateTableBuilder = Knex.CreateTableBuilder
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import {
|
||||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "../definitions/datasource"
|
||||
import { IntegrationBase } from "./base/IntegrationBase"
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module CouchDBModule {
|
||||
const PouchDB = require("pouchdb")
|
||||
|
@ -21,30 +21,30 @@ module CouchDBModule {
|
|||
"Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "http://localhost:5984",
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
id: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import {
|
||||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "../definitions/datasource"
|
||||
import { IntegrationBase } from "./base/IntegrationBase"
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module DynamoModule {
|
||||
const AWS = require("aws-sdk")
|
||||
|
@ -24,101 +24,101 @@ module DynamoModule {
|
|||
type: "Non-relational",
|
||||
datasource: {
|
||||
region: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "us-east-1",
|
||||
},
|
||||
accessKeyId: {
|
||||
type: DatasourceFieldTypes.PASSWORD,
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: DatasourceFieldTypes.PASSWORD,
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
endpoint: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: false,
|
||||
default: "https://dynamodb.us-east-1.amazonaws.com",
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
scan: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
},
|
||||
},
|
||||
},
|
||||
describe: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
get: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
readable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
table: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import {
|
||||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "../definitions/datasource"
|
||||
import { IntegrationBase } from "./base/IntegrationBase"
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
|
||||
module ElasticsearchModule {
|
||||
const { Client } = require("@elastic/elasticsearch")
|
||||
|
@ -20,55 +20,55 @@ module ElasticsearchModule {
|
|||
type: "Non-relational",
|
||||
datasource: {
|
||||
url: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "http://localhost:9200",
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
index: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
index: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
customisable: true,
|
||||
fields: {
|
||||
id: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
index: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
index: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
id: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import {
|
||||
DatasourceFieldTypes,
|
||||
DatasourceFieldType,
|
||||
Integration,
|
||||
QueryTypes,
|
||||
} from "../definitions/datasource"
|
||||
import { IntegrationBase } from "./base/IntegrationBase"
|
||||
QueryType,
|
||||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
import { Firestore, WhereFilterOp } from "@google-cloud/firestore"
|
||||
|
||||
module Firebase {
|
||||
|
@ -21,46 +21,46 @@ module Firebase {
|
|||
"Cloud Firestore is a flexible, scalable database for mobile, web, and server development from Firebase and Google Cloud.",
|
||||
datasource: {
|
||||
email: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
privateKey: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
projectId: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
extra: {
|
||||
collection: {
|
||||
displayName: "Collection",
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
filterField: {
|
||||
displayName: "Filter field",
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: false,
|
||||
},
|
||||
filter: {
|
||||
displayName: "Filter comparison",
|
||||
type: DatasourceFieldTypes.LIST,
|
||||
type: DatasourceFieldType.LIST,
|
||||
required: false,
|
||||
data: {
|
||||
read: [
|
||||
|
@ -79,7 +79,7 @@ module Firebase {
|
|||
},
|
||||
filterValue: {
|
||||
displayName: "Filter value",
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import {
|
||||
DatasourceFieldTypes,
|
||||
DatasourceFieldType,
|
||||
Integration,
|
||||
QueryType,
|
||||
Table,
|
||||
TableSchema,
|
||||
QueryJson,
|
||||
QueryTypes,
|
||||
} from "../definitions/datasource"
|
||||
DatasourcePlus,
|
||||
} from "@budibase/types"
|
||||
import { OAuth2Client } from "google-auth-library"
|
||||
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||
import { Table, TableSchema } from "../definitions/common"
|
||||
import { buildExternalTableId } from "./utils"
|
||||
import { DataSourceOperation, FieldTypes } from "../constants"
|
||||
import { GoogleSpreadsheet } from "google-spreadsheet"
|
||||
|
@ -53,59 +54,59 @@ module GoogleSheetsModule {
|
|||
datasource: {
|
||||
spreadsheetId: {
|
||||
display: "Google Sheet URL",
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
row: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
rowIndex: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
row: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.FIELDS,
|
||||
type: QueryType.FIELDS,
|
||||
fields: {
|
||||
sheet: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
rowIndex: {
|
||||
type: DatasourceFieldTypes.NUMBER,
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -13,54 +13,54 @@ const googlesheets = require("./googlesheets")
|
|||
const firebase = require("./firebase")
|
||||
const redis = require("./redis")
|
||||
const snowflake = require("./snowflake")
|
||||
const { SourceNames } = require("../definitions/datasource")
|
||||
const { SourceName } = require("@budibase/types")
|
||||
const environment = require("../environment")
|
||||
|
||||
const DEFINITIONS = {
|
||||
[SourceNames.POSTGRES]: postgres.schema,
|
||||
[SourceNames.DYNAMODB]: dynamodb.schema,
|
||||
[SourceNames.MONGODB]: mongodb.schema,
|
||||
[SourceNames.ELASTICSEARCH]: elasticsearch.schema,
|
||||
[SourceNames.COUCHDB]: couchdb.schema,
|
||||
[SourceNames.SQL_SERVER]: sqlServer.schema,
|
||||
[SourceNames.S3]: s3.schema,
|
||||
[SourceNames.AIRTABLE]: airtable.schema,
|
||||
[SourceNames.MYSQL]: mysql.schema,
|
||||
[SourceNames.ARANGODB]: arangodb.schema,
|
||||
[SourceNames.REST]: rest.schema,
|
||||
[SourceNames.FIRESTORE]: firebase.schema,
|
||||
[SourceNames.REDIS]: redis.schema,
|
||||
[SourceNames.SNOWFLAKE]: snowflake.schema,
|
||||
[SourceName.POSTGRES]: postgres.schema,
|
||||
[SourceName.DYNAMODB]: dynamodb.schema,
|
||||
[SourceName.MONGODB]: mongodb.schema,
|
||||
[SourceName.ELASTICSEARCH]: elasticsearch.schema,
|
||||
[SourceName.COUCHDB]: couchdb.schema,
|
||||
[SourceName.SQL_SERVER]: sqlServer.schema,
|
||||
[SourceName.S3]: s3.schema,
|
||||
[SourceName.AIRTABLE]: airtable.schema,
|
||||
[SourceName.MYSQL]: mysql.schema,
|
||||
[SourceName.ARANGODB]: arangodb.schema,
|
||||
[SourceName.REST]: rest.schema,
|
||||
[SourceName.FIRESTORE]: firebase.schema,
|
||||
[SourceName.REDIS]: redis.schema,
|
||||
[SourceName.SNOWFLAKE]: snowflake.schema,
|
||||
}
|
||||
|
||||
const INTEGRATIONS = {
|
||||
[SourceNames.POSTGRES]: postgres.integration,
|
||||
[SourceNames.DYNAMODB]: dynamodb.integration,
|
||||
[SourceNames.MONGODB]: mongodb.integration,
|
||||
[SourceNames.ELASTICSEARCH]: elasticsearch.integration,
|
||||
[SourceNames.COUCHDB]: couchdb.integration,
|
||||
[SourceNames.SQL_SERVER]: sqlServer.integration,
|
||||
[SourceNames.S3]: s3.integration,
|
||||
[SourceNames.AIRTABLE]: airtable.integration,
|
||||
[SourceNames.MYSQL]: mysql.integration,
|
||||
[SourceNames.ARANGODB]: arangodb.integration,
|
||||
[SourceNames.REST]: rest.integration,
|
||||
[SourceNames.FIRESTORE]: firebase.integration,
|
||||
[SourceNames.GOOGLE_SHEETS]: googlesheets.integration,
|
||||
[SourceNames.REDIS]: redis.integration,
|
||||
[SourceNames.FIREBASE]: firebase.integration,
|
||||
[SourceNames.SNOWFLAKE]: snowflake.integration,
|
||||
[SourceName.POSTGRES]: postgres.integration,
|
||||
[SourceName.DYNAMODB]: dynamodb.integration,
|
||||
[SourceName.MONGODB]: mongodb.integration,
|
||||
[SourceName.ELASTICSEARCH]: elasticsearch.integration,
|
||||
[SourceName.COUCHDB]: couchdb.integration,
|
||||
[SourceName.SQL_SERVER]: sqlServer.integration,
|
||||
[SourceName.S3]: s3.integration,
|
||||
[SourceName.AIRTABLE]: airtable.integration,
|
||||
[SourceName.MYSQL]: mysql.integration,
|
||||
[SourceName.ARANGODB]: arangodb.integration,
|
||||
[SourceName.REST]: rest.integration,
|
||||
[SourceName.FIRESTORE]: firebase.integration,
|
||||
[SourceName.GOOGLE_SHEETS]: googlesheets.integration,
|
||||
[SourceName.REDIS]: redis.integration,
|
||||
[SourceName.FIREBASE]: firebase.integration,
|
||||
[SourceName.SNOWFLAKE]: snowflake.integration,
|
||||
}
|
||||
|
||||
// optionally add oracle integration if the oracle binary can be installed
|
||||
if (process.arch && !process.arch.startsWith("arm")) {
|
||||
const oracle = require("./oracle")
|
||||
DEFINITIONS[SourceNames.ORACLE] = oracle.schema
|
||||
INTEGRATIONS[SourceNames.ORACLE] = oracle.integration
|
||||
DEFINITIONS[SourceName.ORACLE] = oracle.schema
|
||||
INTEGRATIONS[SourceName.ORACLE] = oracle.integration
|
||||
}
|
||||
|
||||
if (environment.SELF_HOSTED) {
|
||||
DEFINITIONS[SourceNames.GOOGLE_SHEETS] = googlesheets.schema
|
||||
DEFINITIONS[SourceName.GOOGLE_SHEETS] = googlesheets.schema
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
import {
|
||||
DatasourceFieldTypes,
|
||||
DatasourceFieldType,
|
||||
Integration,
|
||||
Operation,
|
||||
Table,
|
||||
TableSchema,
|
||||
QueryJson,
|
||||
QueryTypes,
|
||||
QueryType,
|
||||
SqlQuery,
|
||||
} from "../definitions/datasource"
|
||||
DatasourcePlus,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
buildExternalTableId,
|
||||
convertSqlType,
|
||||
finaliseExternalTables,
|
||||
SqlClients,
|
||||
SqlClient,
|
||||
} from "./utils"
|
||||
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||
import { Table, TableSchema } from "../definitions/common"
|
||||
import Sql from "./base/sql"
|
||||
|
||||
module MSSQLModule {
|
||||
|
@ -47,48 +48,48 @@ module MSSQLModule {
|
|||
type: "Relational",
|
||||
datasource: {
|
||||
user: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "localhost",
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldTypes.PASSWORD,
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
server: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldTypes.NUMBER,
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: false,
|
||||
default: 1433,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
},
|
||||
schema: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: DEFAULT_SCHEMA,
|
||||
},
|
||||
encrypt: {
|
||||
type: DatasourceFieldTypes.BOOLEAN,
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -112,7 +113,7 @@ module MSSQLModule {
|
|||
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
|
||||
|
||||
constructor(config: MSSQLConfig) {
|
||||
super(SqlClients.MS_SQL)
|
||||
super(SqlClient.MS_SQL)
|
||||
this.config = config
|
||||
const clientCfg = {
|
||||
...this.config,
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import {
|
||||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "../definitions/datasource"
|
||||
import { IntegrationBase } from "./base/IntegrationBase"
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
IntegrationBase,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
MongoClient,
|
||||
ObjectID,
|
||||
|
@ -29,38 +29,38 @@ module MongoDBModule {
|
|||
"MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.",
|
||||
datasource: {
|
||||
connectionString: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
default: "mongodb://localhost:27017",
|
||||
},
|
||||
db: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.JSON,
|
||||
type: QueryType.JSON,
|
||||
},
|
||||
},
|
||||
extra: {
|
||||
collection: {
|
||||
displayName: "Collection",
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
actionTypes: {
|
||||
displayName: "Action Types",
|
||||
type: DatasourceFieldTypes.LIST,
|
||||
type: DatasourceFieldType.LIST,
|
||||
required: true,
|
||||
data: {
|
||||
read: ["find", "findOne", "findOneAndUpdate", "count", "distinct"],
|
||||
|
@ -92,12 +92,15 @@ module MongoDBModule {
|
|||
if (json[field] instanceof Object) {
|
||||
json[field] = self.createObjectIds(json[field])
|
||||
}
|
||||
if (field === "_id" && typeof json[field] === "string") {
|
||||
const id = json["_id"].match(
|
||||
if (
|
||||
(field === "_id" || field?.startsWith("$")) &&
|
||||
typeof json[field] === "string"
|
||||
) {
|
||||
const id = json[field].match(
|
||||
/(?<=objectid\(['"]).*(?=['"]\))/gi
|
||||
)?.[0]
|
||||
if (id) {
|
||||
json["_id"] = ObjectID.createFromHexString(id)
|
||||
json[field] = ObjectID.createFromHexString(id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -114,10 +117,31 @@ module MongoDBModule {
|
|||
}
|
||||
|
||||
parseQueryParams(params: string, mode: string) {
|
||||
let queryParams = params.split(/(?<=}),[\n\s]*(?={)/g)
|
||||
let group1 = queryParams[0] ? JSON.parse(queryParams[0]) : {}
|
||||
let group2 = queryParams[1] ? JSON.parse(queryParams[1]) : {}
|
||||
let group3 = queryParams[2] ? JSON.parse(queryParams[2]) : {}
|
||||
let queryParams = []
|
||||
let openCount = 0
|
||||
let inQuotes = false
|
||||
let i = 0
|
||||
let startIndex = 0
|
||||
for (let c of params) {
|
||||
if (c === '"' && i > 0 && params[i - 1] !== "\\") {
|
||||
inQuotes = !inQuotes
|
||||
}
|
||||
if (c === "{" && !inQuotes) {
|
||||
openCount++
|
||||
if (openCount === 1) {
|
||||
startIndex = i
|
||||
}
|
||||
} else if (c === "}" && !inQuotes) {
|
||||
if (openCount === 1) {
|
||||
queryParams.push(JSON.parse(params.substring(startIndex, i + 1)))
|
||||
}
|
||||
openCount--
|
||||
}
|
||||
i++
|
||||
}
|
||||
let group1 = queryParams[0] ?? {}
|
||||
let group2 = queryParams[1] ?? {}
|
||||
let group3 = queryParams[2] ?? {}
|
||||
if (mode === "update") {
|
||||
return {
|
||||
filter: group1,
|
||||
|
@ -176,7 +200,10 @@ module MongoDBModule {
|
|||
return await collection.findOne(json)
|
||||
}
|
||||
case "findOneAndUpdate": {
|
||||
let findAndUpdateJson = json as {
|
||||
if (typeof query.json === "string") {
|
||||
json = this.parseQueryParams(query.json, "update")
|
||||
}
|
||||
let findAndUpdateJson = this.createObjectIds(json) as {
|
||||
filter: FilterQuery<any>
|
||||
update: UpdateQuery<any>
|
||||
options: FindOneAndUpdateOption<any>
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
import {
|
||||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
QueryJson,
|
||||
SqlQuery,
|
||||
} from "../definitions/datasource"
|
||||
import { Table, TableSchema } from "../definitions/common"
|
||||
Table,
|
||||
TableSchema,
|
||||
DatasourcePlus,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
SqlClients,
|
||||
SqlClient,
|
||||
buildExternalTableId,
|
||||
convertSqlType,
|
||||
finaliseExternalTables,
|
||||
} from "./utils"
|
||||
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||
import dayjs from "dayjs"
|
||||
const { NUMBER_REGEX } = require("../utilities")
|
||||
import Sql from "./base/sql"
|
||||
|
@ -41,51 +42,51 @@ module MySQLModule {
|
|||
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
|
||||
datasource: {
|
||||
host: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldTypes.NUMBER,
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
default: 3306,
|
||||
required: false,
|
||||
},
|
||||
user: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldTypes.PASSWORD,
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
ssl: {
|
||||
type: DatasourceFieldTypes.OBJECT,
|
||||
type: DatasourceFieldType.OBJECT,
|
||||
required: false,
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
type: DatasourceFieldTypes.BOOLEAN,
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: true,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -119,7 +120,7 @@ module MySQLModule {
|
|||
public schemaErrors: Record<string, string> = {}
|
||||
|
||||
constructor(config: MySQLConfig) {
|
||||
super(SqlClients.MY_SQL)
|
||||
super(SqlClient.MY_SQL)
|
||||
this.config = config
|
||||
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
||||
delete config.ssl
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
import {
|
||||
DatasourceFieldTypes,
|
||||
DatasourceFieldType,
|
||||
Integration,
|
||||
Operation,
|
||||
QueryJson,
|
||||
QueryTypes,
|
||||
QueryType,
|
||||
SqlQuery,
|
||||
} from "../definitions/datasource"
|
||||
Table,
|
||||
DatasourcePlus,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
buildExternalTableId,
|
||||
convertSqlType,
|
||||
finaliseExternalTables,
|
||||
getSqlQuery,
|
||||
SqlClients,
|
||||
SqlClient,
|
||||
} from "./utils"
|
||||
import oracledb, {
|
||||
BindParameters,
|
||||
|
@ -21,8 +23,6 @@ import oracledb, {
|
|||
Result,
|
||||
} from "oracledb"
|
||||
import Sql from "./base/sql"
|
||||
import { Table } from "../definitions/common"
|
||||
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||
import { FieldTypes } from "../constants"
|
||||
|
||||
module OracleModule {
|
||||
|
@ -45,40 +45,40 @@ module OracleModule {
|
|||
"Oracle Database is an object-relational database management system developed by Oracle Corporation",
|
||||
datasource: {
|
||||
host: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldTypes.NUMBER,
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: true,
|
||||
default: 1521,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldTypes.PASSWORD,
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -172,7 +172,7 @@ module OracleModule {
|
|||
OR cons.status IS NULL)
|
||||
`
|
||||
constructor(config: OracleConfig) {
|
||||
super(SqlClients.ORACLE)
|
||||
super(SqlClient.ORACLE)
|
||||
this.config = config
|
||||
}
|
||||
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
import {
|
||||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
QueryJson,
|
||||
SqlQuery,
|
||||
} from "../definitions/datasource"
|
||||
import { Table } from "../definitions/common"
|
||||
Table,
|
||||
DatasourcePlus,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
buildExternalTableId,
|
||||
convertSqlType,
|
||||
finaliseExternalTables,
|
||||
SqlClients,
|
||||
SqlClient,
|
||||
} from "./utils"
|
||||
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||
import Sql from "./base/sql"
|
||||
|
||||
module PostgresModule {
|
||||
|
@ -52,63 +52,63 @@ module PostgresModule {
|
|||
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||
datasource: {
|
||||
host: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
port: {
|
||||
type: DatasourceFieldTypes.NUMBER,
|
||||
type: DatasourceFieldType.NUMBER,
|
||||
required: true,
|
||||
default: 5432,
|
||||
},
|
||||
database: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "postgres",
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: DatasourceFieldTypes.PASSWORD,
|
||||
type: DatasourceFieldType.PASSWORD,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
schema: {
|
||||
type: DatasourceFieldTypes.STRING,
|
||||
type: DatasourceFieldType.STRING,
|
||||
default: "public",
|
||||
required: true,
|
||||
},
|
||||
ssl: {
|
||||
type: DatasourceFieldTypes.BOOLEAN,
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
type: DatasourceFieldTypes.BOOLEAN,
|
||||
type: DatasourceFieldType.BOOLEAN,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
ca: {
|
||||
type: DatasourceFieldTypes.LONGFORM,
|
||||
type: DatasourceFieldType.LONGFORM,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
query: {
|
||||
create: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
read: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
update: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
delete: {
|
||||
type: QueryTypes.SQL,
|
||||
type: QueryType.SQL,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -134,7 +134,7 @@ module PostgresModule {
|
|||
`
|
||||
|
||||
constructor(config: PostgresConfig) {
|
||||
super(SqlClients.POSTGRES)
|
||||
super(SqlClient.POSTGRES)
|
||||
this.config = config
|
||||
|
||||
let newConfig = {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { findHBSBlocks, processStringSync } from "@budibase/string-templates"
|
||||
import { DatasourcePlus } from "../base/datasourcePlus"
|
||||
import { DatasourcePlus } from "@budibase/types"
|
||||
|
||||
const CONST_CHAR_REGEX = new RegExp("'[^']*'", "g")
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue