Merge branch 'develop' of github.com:Budibase/budibase into new-design-ui

This commit is contained in:
Andrew Kingston 2022-04-27 07:59:37 +01:00
commit 76feddfaff
139 changed files with 2829 additions and 1705 deletions

View File

@ -71,3 +71,57 @@ jobs:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }} BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1
- name: Tag and release Proxy service docker image
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:proxy:preprod
docker tag proxy-service budibase/proxy:$PREPROD_TAG
docker push budibase/proxy:$PREPROD_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
PREPROD_TAG: k8s-preprod
- name: Pull values.yaml from budibase-infra
run: |
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
-H 'Accept: application/vnd.github.v3.raw' \
-o values.preprod.yaml \
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-preprod/values.yaml
wc -l values.preprod.yaml
- name: Deploy to Preprod Environment
uses: glopezep/helm@v1.7.1
with:
release: budibase-preprod
namespace: budibase
chart: charts/budibase
token: ${{ github.token }}
helm: helm3
values: |
globals:
appVersion: ${{ steps.previoustag.outputs.tag }}
ingress:
enabled: true
nginx: true
value-files: >-
[
"values.preprod.yaml"
]
env:
KUBECONFIG_FILE: '${{ secrets.PREPROD_KUBECONFIG }}'
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0
with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Preprod Deployment Complete: ${{ steps.previoustag.outputs.tag }} deployed to Budibase Pre-prod."
embed-title: ${{ steps.previoustag.outputs.tag }}

View File

@ -12,5 +12,10 @@ spec:
resources: resources:
requests: requests:
storage: {{ .Values.services.objectStore.storage }} storage: {{ .Values.services.objectStore.storage }}
{{- if (eq "-" .Values.services.objectStore.storageClass) }}
storageClassName: ""
{{- else }}
storageClassName: "{{ .Values.services.objectStore.storageClass }}"
{{- end }}
status: {} status: {}
{{- end }} {{- end }}

View File

@ -12,5 +12,10 @@ spec:
resources: resources:
requests: requests:
storage: {{ .Values.services.redis.storage }} storage: {{ .Values.services.redis.storage }}
{{- if (eq "-" .Values.services.redis.storageClass) }}
storageClassName: ""
{{- else }}
storageClassName: "{{ .Values.services.redis.storageClass }}"
{{- end }}
status: {} status: {}
{{- end }} {{- end }}

View File

@ -47,6 +47,8 @@ ingress:
className: "" className: ""
annotations: annotations:
kubernetes.io/ingress.class: nginx kubernetes.io/ingress.class: nginx
nginx.ingress.kubernetes.io/client-max-body-size: 150M
nginx.ingress.kubernetes.io/proxy-body-size: 50m
hosts: hosts:
- host: # change if using custom domain - host: # change if using custom domain
paths: paths:
@ -149,6 +151,11 @@ services:
url: "" # only change if pointing to existing redis cluster and enabled: false url: "" # only change if pointing to existing redis cluster and enabled: false
password: "budibase" # recommended to override if using built-in redis password: "budibase" # recommended to override if using built-in redis
storage: 100Mi storage: 100Mi
## If defined, storageClassName: <storageClass>
## If set to "-", storageClassName: "", which disables dynamic provisioning
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: "-"
objectStore: objectStore:
minio: true minio: true
@ -160,6 +167,11 @@ services:
region: "" # AWS_REGION if using S3 or existing minio secret region: "" # AWS_REGION if using S3 or existing minio secret
url: "http://minio-service:9000" # only change if pointing to existing minio cluster or S3 and minio: false url: "http://minio-service:9000" # only change if pointing to existing minio cluster or S3 and minio: false
storage: 100Mi storage: 100Mi
## If defined, storageClassName: <storageClass>
## If set to "-", storageClassName: "", which disables dynamic provisioning
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: "-"
# Override values in couchDB subchart # Override values in couchDB subchart
couchdb: couchdb:

View File

@ -27,6 +27,7 @@ services:
image: nginx:latest image: nginx:latest
volumes: volumes:
- ./.generated-nginx.dev.conf:/etc/nginx/nginx.conf - ./.generated-nginx.dev.conf:/etc/nginx/nginx.conf
- ./proxy/error.html:/usr/share/nginx/html/error.html
ports: ports:
- "${MAIN_PORT}:10000" - "${MAIN_PORT}:10000"
depends_on: depends_on:

View File

@ -28,6 +28,12 @@ http {
ignore_invalid_headers off; ignore_invalid_headers off;
proxy_buffering off; proxy_buffering off;
error_page 502 503 504 /error.html;
location = /error.html {
root /usr/share/nginx/html;
internal;
}
location /db/ { location /db/ {
proxy_pass http://couchdb-service:5984; proxy_pass http://couchdb-service:5984;
rewrite ^/db/(.*)$ /$1 break; rewrite ^/db/(.*)$ /$1 break;

View File

@ -56,6 +56,12 @@ http {
set $csp_media "media-src 'self' https://js.intercomcdn.com"; set $csp_media "media-src 'self' https://js.intercomcdn.com";
set $csp_worker "worker-src 'none'"; set $csp_worker "worker-src 'none'";
error_page 502 503 504 /error.html;
location = /error.html {
root /usr/share/nginx/html;
internal;
}
# Security Headers # Security Headers
add_header X-Frame-Options SAMEORIGIN always; add_header X-Frame-Options SAMEORIGIN always;
add_header X-Content-Type-Options nosniff always; add_header X-Content-Type-Options nosniff always;

View File

@ -1,2 +1,3 @@
FROM nginx:latest FROM nginx:latest
COPY .generated-nginx.prod.conf /etc/nginx/nginx.conf COPY .generated-nginx.prod.conf /etc/nginx/nginx.conf
COPY error.html /usr/share/nginx/html/error.html

175
hosting/proxy/error.html Normal file
View File

@ -0,0 +1,175 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Budibase</title>
<meta name="viewport" content="width=device-width,initial-scale=1">
</head>
<script>
function checkStatusButton() {
if (window.location.href.includes("budibase.app")) {
var button = document.getElementById("statusButton")
button.removeAttribute("hidden")
}
}
function goToStatus() {
window.location.href = "https://status.budibase.com";
}
function goHome() {
window.location.href = window.location.origin;
}
function getStatus() {
var http = new XMLHttpRequest()
var url = window.location.href
http.open('GET', url, true)
http.send()
http.onreadystatechange = (e) => {
var status = http.status
document.getElementById("status").innerHTML = status
var message
if (status === 502) {
message = "Bad gateway. Please try again later."
} else if (status === 503) {
message = "Service Unavailable. Please try again later."
} else if (status === 504) {
message = "Gateway timeout. Please try again later."
} else {
message = "Please try again later."
}
document.getElementById("message").innerHTML = message
}
}
window.onload = function() {
checkStatusButton()
getStatus()
};
</script>
<style>
:root {
--spectrum-global-color-gray-600: rgb(144,144,144);
--spectrum-global-color-gray-900: rgb(255,255,255);
--spectrum-global-color-gray-800: rgb(227,227,227);
--spectrum-global-color-static-blue-600: rgb(20,115,230);
--spectrum-global-color-static-blue-hover: rgb( 18, 103, 207);
}
html, body {
background-color: #1a1a1a;
padding: 0;
margin: 0;
overflow: hidden;
color: #e7e7e7;
font-family: 'Roboto', sans-serif;
}
button {
color: #e7e7e7;
font-family: 'Roboto', sans-serif;
border: none;
font-size: 15px;
border-radius: 15px;
padding: 8px 22px;
}
button:hover {
cursor: pointer;
}
.main {
height: 100vh;
display: flex;
flex-direction: row;
align-items: center;
justify-content: center;
}
.info {
display: flex;
flex-direction: column;
align-items: left;
}
@media only screen and (max-width: 600px) {
.info {
align-items: center;
}
}
.status {
color: var(--spectrum-global-color-gray-600)
}
.title {
font-weight: 400;
color: var(--spectrum-global-color-gray-900)
}
.message {
font-weight: 200;
color: var(--spectrum-global-color-gray-800)
}
.buttons {
display: flex;
flex-direction: row;
margin-top: 15px;
}
.homeButton {
background-color: var(--spectrum-global-color-static-blue-600);
}
.homeButton:hover {
background-color: var(--spectrum-global-color-static-blue-hover);
}
.statusButton {
background-color: transparent;
margin-left: 20px;
border: none;
}
.hero {
height: 160px;
width: 160px;
margin-right: 80px;
}
.content {
display: flex;
flex-direction: row;
align-items: flex-end;
justify-content: center;
}
@media only screen and (max-width: 600px) {
.content {
flex-direction: column;
}
}
</style>
<script src="">
</script>
<body>
<div class="main">
<div class="content">
<div class="hero">
<img src="https://raw.githubusercontent.com/Budibase/budibase/master/packages/builder/assets/bb-space-man.svg" alt="Budibase Logo">
</div>
<div class="info">
<div>
<h4 id="status" class="status"></h4>
<h1 class="title">
Houston we have a problem!
</h1>
<h3 id="message" class="message">
</h3>
</div>
<div class="buttons">
<button class="homeButton" onclick=goHome()>Return home</button>
<button id="statusButton" class="statusButton" hidden="true" onclick=goToStatus()>Check out status</button>
</div>
</div>
</div>
</div>
</body>
</html>

View File

@ -1,5 +1,5 @@
{ {
"version": "1.0.105-alpha.38", "version": "1.0.124-alpha.0",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -74,6 +74,7 @@
"mode:account": "yarn mode:cloud && yarn env:account:enable", "mode:account": "yarn mode:cloud && yarn env:account:enable",
"security:audit": "node scripts/audit.js", "security:audit": "node scripts/audit.js",
"postinstall": "husky install", "postinstall": "husky install",
"install:pro": "bash scripts/pro/install.sh" "install:pro": "bash scripts/pro/install.sh",
"dep:clean": "yarn clean && yarn bootstrap"
} }
} }

View File

@ -3,4 +3,5 @@ module.exports = {
...require("./src/db/constants"), ...require("./src/db/constants"),
...require("./src/db"), ...require("./src/db"),
...require("./src/db/views"), ...require("./src/db/views"),
...require("./src/db/pouch"),
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/backend-core", "name": "@budibase/backend-core",
"version": "1.0.105-alpha.38", "version": "1.0.124-alpha.0",
"description": "Budibase backend core libraries used in server and worker", "description": "Budibase backend core libraries used in server and worker",
"main": "src/index.js", "main": "src/index.js",
"author": "Budibase", "author": "Budibase",
@ -24,6 +24,10 @@
"passport-google-oauth": "^2.0.0", "passport-google-oauth": "^2.0.0",
"passport-jwt": "^4.0.0", "passport-jwt": "^4.0.0",
"passport-local": "^1.0.0", "passport-local": "^1.0.0",
"posthog-node": "^1.3.0",
"pouchdb": "7.3.0",
"pouchdb-find": "^7.2.2",
"pouchdb-replication-stream": "^1.2.9",
"sanitize-s3-objectkey": "^0.0.1", "sanitize-s3-objectkey": "^0.0.1",
"tar-fs": "^2.1.1", "tar-fs": "^2.1.1",
"uuid": "^8.3.2", "uuid": "^8.3.2",
@ -37,7 +41,6 @@
"devDependencies": { "devDependencies": {
"ioredis-mock": "^5.5.5", "ioredis-mock": "^5.5.5",
"jest": "^26.6.3", "jest": "^26.6.3",
"pouchdb": "^7.2.1",
"pouchdb-adapter-memory": "^7.2.2", "pouchdb-adapter-memory": "^7.2.2",
"pouchdb-all-dbs": "^1.0.2" "pouchdb-all-dbs": "^1.0.2"
}, },

View File

@ -1,5 +1,5 @@
const redis = require("../redis/authRedis") const redis = require("../redis/authRedis")
const { getCouch } = require("../db") const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants") const { DocumentTypes } = require("../db/constants")
const AppState = { const AppState = {
@ -10,12 +10,14 @@ const EXPIRY_SECONDS = 3600
/** /**
* The default populate app metadata function * The default populate app metadata function
*/ */
const populateFromDB = async (appId, CouchDB = null) => { const populateFromDB = async appId => {
if (!CouchDB) { return doWithDB(
CouchDB = getCouch() appId,
} db => {
const db = new CouchDB(appId, { skip_setup: true })
return db.get(DocumentTypes.APP_METADATA) return db.get(DocumentTypes.APP_METADATA)
},
{ skip_setup: true }
)
} }
const isInvalid = metadata => { const isInvalid = metadata => {
@ -27,17 +29,16 @@ const isInvalid = metadata => {
* Use redis cache to first read the app metadata. * Use redis cache to first read the app metadata.
* If not present fallback to loading the app metadata directly and re-caching. * If not present fallback to loading the app metadata directly and re-caching.
* @param {string} appId the id of the app to get metadata from. * @param {string} appId the id of the app to get metadata from.
* @param {object} CouchDB the database being passed
* @returns {object} the app metadata. * @returns {object} the app metadata.
*/ */
exports.getAppMetadata = async (appId, CouchDB = null) => { exports.getAppMetadata = async appId => {
const client = await redis.getAppClient() const client = await redis.getAppClient()
// try cache // try cache
let metadata = await client.get(appId) let metadata = await client.get(appId)
if (!metadata) { if (!metadata) {
let expiry = EXPIRY_SECONDS let expiry = EXPIRY_SECONDS
try { try {
metadata = await populateFromDB(appId, CouchDB) metadata = await populateFromDB(appId)
} catch (err) { } catch (err) {
// app DB left around, but no metadata, it is invalid // app DB left around, but no metadata, it is invalid
if (err && err.status === 404) { if (err && err.status === 404) {

View File

@ -1,5 +1,5 @@
const redis = require("../redis/authRedis") const redis = require("../redis/authRedis")
const { getTenantId, lookupTenantId, getGlobalDB } = require("../tenancy") const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
const env = require("../environment") const env = require("../environment")
const accounts = require("../cloud/accounts") const accounts = require("../cloud/accounts")
@ -9,9 +9,8 @@ const EXPIRY_SECONDS = 3600
* The default populate user function * The default populate user function
*/ */
const populateFromDB = async (userId, tenantId) => { const populateFromDB = async (userId, tenantId) => {
const user = await getGlobalDB(tenantId).get(userId) const user = await doWithGlobalDB(tenantId, db => db.get(userId))
user.budibaseAccess = true user.budibaseAccess = true
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
const account = await accounts.getAccount(user.email) const account = await accounts.getAccount(user.email)
if (account) { if (account) {

View File

@ -29,9 +29,7 @@ class API {
credentials: "include", credentials: "include",
} }
const resp = await fetch(`${this.host}${url}`, requestOptions) return await fetch(`${this.host}${url}`, requestOptions)
return resp
} }
post = this.apiCall("POST") post = this.apiCall("POST")

View File

@ -4,7 +4,11 @@ const { newid } = require("../hashing")
const REQUEST_ID_KEY = "requestId" const REQUEST_ID_KEY = "requestId"
class FunctionContext { class FunctionContext {
static getMiddleware(updateCtxFn = null, contextName = "session") { static getMiddleware(
updateCtxFn = null,
destroyFn = null,
contextName = "session"
) {
const namespace = this.createNamespace(contextName) const namespace = this.createNamespace(contextName)
return async function (ctx, next) { return async function (ctx, next) {
@ -18,7 +22,14 @@ class FunctionContext {
if (updateCtxFn) { if (updateCtxFn) {
updateCtxFn(ctx) updateCtxFn(ctx)
} }
next().then(resolve).catch(reject) next()
.then(resolve)
.catch(reject)
.finally(() => {
if (destroyFn) {
return destroyFn(ctx)
}
})
}) })
) )
} }

View File

@ -1,6 +1,6 @@
const { getGlobalUserParams, getAllApps } = require("../db/utils") const { getGlobalUserParams, getAllApps } = require("../db/utils")
const { getDB } = require("../db") const { doWithDB } = require("../db")
const { getGlobalDB } = require("../tenancy") const { doWithGlobalDB } = require("../tenancy")
const { StaticDatabases } = require("../db/constants") const { StaticDatabases } = require("../db/constants")
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
@ -8,11 +8,12 @@ const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
const removeTenantFromInfoDB = async tenantId => { const removeTenantFromInfoDB = async tenantId => {
try { try {
const infoDb = getDB(PLATFORM_INFO_DB) await doWithDB(PLATFORM_INFO_DB, async infoDb => {
let tenants = await infoDb.get(TENANT_DOC) let tenants = await infoDb.get(TENANT_DOC)
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId) tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
await infoDb.put(tenants) await infoDb.put(tenants)
})
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} from info db`, err) console.error(`Error removing tenant ${tenantId} from info db`, err)
throw err throw err
@ -20,7 +21,7 @@ const removeTenantFromInfoDB = async tenantId => {
} }
exports.removeUserFromInfoDB = async dbUser => { exports.removeUserFromInfoDB = async dbUser => {
const infoDb = getDB(PLATFORM_INFO_DB) await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const keys = [dbUser._id, dbUser.email] const keys = [dbUser._id, dbUser.email]
const userDocs = await infoDb.allDocs({ const userDocs = await infoDb.allDocs({
keys, keys,
@ -33,17 +34,18 @@ exports.removeUserFromInfoDB = async dbUser => {
} }
}) })
await infoDb.bulkDocs(toDelete) await infoDb.bulkDocs(toDelete)
})
} }
const removeUsersFromInfoDB = async tenantId => { const removeUsersFromInfoDB = async tenantId => {
return doWithGlobalDB(tenantId, async db => {
try { try {
const globalDb = getGlobalDB(tenantId) const allUsers = await db.allDocs(
const infoDb = getDB(PLATFORM_INFO_DB)
const allUsers = await globalDb.allDocs(
getGlobalUserParams(null, { getGlobalUserParams(null, {
include_docs: true, include_docs: true,
}) })
) )
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const allEmails = allUsers.rows.map(row => row.doc.email) const allEmails = allUsers.rows.map(row => row.doc.email)
// get the id docs // get the id docs
let keys = allUsers.rows.map(row => row.id) let keys = allUsers.rows.map(row => row.id)
@ -61,26 +63,31 @@ const removeUsersFromInfoDB = async tenantId => {
} }
}) })
await infoDb.bulkDocs(toDelete) await infoDb.bulkDocs(toDelete)
})
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} users from info db`, err) console.error(`Error removing tenant ${tenantId} users from info db`, err)
throw err throw err
} }
})
} }
const removeGlobalDB = async tenantId => { const removeGlobalDB = async tenantId => {
return doWithGlobalDB(tenantId, async db => {
try { try {
const globalDb = getGlobalDB(tenantId) await db.destroy()
await globalDb.destroy()
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} users from info db`, err) console.error(`Error removing tenant ${tenantId} users from info db`, err)
throw err throw err
} }
})
} }
const removeTenantApps = async tenantId => { const removeTenantApps = async tenantId => {
try { try {
const apps = await getAllApps({ all: true }) const apps = await getAllApps({ all: true })
const destroyPromises = apps.map(app => getDB(app.appId).destroy()) const destroyPromises = apps.map(app =>
doWithDB(app.appId, db => db.destroy())
)
await Promise.allSettled(destroyPromises) await Promise.allSettled(destroyPromises)
} catch (err) { } catch (err) {
console.error(`Error removing tenant ${tenantId} apps`, err) console.error(`Error removing tenant ${tenantId} apps`, err)

View File

@ -1,9 +1,11 @@
const env = require("../environment") const env = require("../environment")
const { Headers } = require("../../constants") const { Headers } = require("../../constants")
const { SEPARATOR, DocumentTypes } = require("../db/constants") const { SEPARATOR, DocumentTypes } = require("../db/constants")
const { DEFAULT_TENANT_ID } = require("../constants")
const cls = require("./FunctionContext") const cls = require("./FunctionContext")
const { getCouch } = require("../db") const { dangerousGetDB, closeDB } = require("../db")
const { getProdAppID, getDevelopmentAppID } = require("../db/conversions") const { getProdAppID, getDevelopmentAppID } = require("../db/conversions")
const { baseGlobalDBName } = require("../tenancy/utils")
const { isEqual } = require("lodash") const { isEqual } = require("lodash")
// some test cases call functions directly, need to // some test cases call functions directly, need to
@ -12,6 +14,7 @@ let TEST_APP_ID = null
const ContextKeys = { const ContextKeys = {
TENANT_ID: "tenantId", TENANT_ID: "tenantId",
GLOBAL_DB: "globalDb",
APP_ID: "appId", APP_ID: "appId",
// whatever the request app DB was // whatever the request app DB was
CURRENT_DB: "currentDb", CURRENT_DB: "currentDb",
@ -20,9 +23,37 @@ const ContextKeys = {
// get the dev app DB from the request // get the dev app DB from the request
DEV_DB: "devDb", DEV_DB: "devDb",
DB_OPTS: "dbOpts", DB_OPTS: "dbOpts",
// check if something else is using the context, don't close DB
IN_USE: "inUse",
} }
exports.DEFAULT_TENANT_ID = "default" exports.DEFAULT_TENANT_ID = DEFAULT_TENANT_ID
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
async function closeAppDBs() {
const dbKeys = [
ContextKeys.CURRENT_DB,
ContextKeys.PROD_DB,
ContextKeys.DEV_DB,
]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
continue
}
await closeDB(db)
// clear the DB from context, incase someone tries to use it again
cls.setOnContext(dbKey, null)
}
// clear the app ID now that the databases are closed
if (cls.getFromContext(ContextKeys.APP_ID)) {
cls.setOnContext(ContextKeys.APP_ID, null)
}
if (cls.getFromContext(ContextKeys.DB_OPTS)) {
cls.setOnContext(ContextKeys.DB_OPTS, null)
}
}
exports.isDefaultTenant = () => { exports.isDefaultTenant = () => {
return exports.getTenantId() === exports.DEFAULT_TENANT_ID return exports.getTenantId() === exports.DEFAULT_TENANT_ID
@ -34,14 +65,45 @@ exports.isMultiTenant = () => {
// used for automations, API endpoints should always be in context already // used for automations, API endpoints should always be in context already
exports.doInTenant = (tenantId, task) => { exports.doInTenant = (tenantId, task) => {
return cls.run(() => { // the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the tenant id // set the tenant id
if (!opts.existing) {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId) cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
if (env.USE_COUCH) {
exports.setGlobalDB(tenantId)
}
}
try {
// invoke the task // invoke the task
return task() return await task()
} finally {
const using = cls.getFromContext(ContextKeys.IN_USE)
if (!using || using <= 1) {
if (env.USE_COUCH) {
await closeDB(exports.getGlobalDB())
}
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKeys.TENANT_ID, null)
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
} else {
cls.setOnContext(using - 1)
}
}
}
const using = cls.getFromContext(ContextKeys.IN_USE)
if (using && cls.getFromContext(ContextKeys.TENANT_ID) === tenantId) {
cls.setOnContext(ContextKeys.IN_USE, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(ContextKeys.IN_USE, 1)
return internal()
}) })
} }
}
/** /**
* Given an app ID this will attempt to retrieve the tenant ID from it. * Given an app ID this will attempt to retrieve the tenant ID from it.
@ -64,37 +126,59 @@ exports.getTenantIDFromAppID = appId => {
} }
const setAppTenantId = appId => { const setAppTenantId = appId => {
const appTenantId = this.getTenantIDFromAppID(appId) || this.DEFAULT_TENANT_ID const appTenantId =
this.updateTenantId(appTenantId) exports.getTenantIDFromAppID(appId) || exports.DEFAULT_TENANT_ID
exports.updateTenantId(appTenantId)
} }
exports.doInAppContext = (appId, task) => { exports.doInAppContext = (appId, task) => {
if (!appId) { if (!appId) {
throw new Error("appId is required") throw new Error("appId is required")
} }
return cls.run(() => {
// set the app tenant id
setAppTenantId(appId)
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the app tenant id
if (!opts.existing) {
setAppTenantId(appId)
}
// set the app ID // set the app ID
cls.setOnContext(ContextKeys.APP_ID, appId) cls.setOnContext(ContextKeys.APP_ID, appId)
try {
// invoke the task // invoke the task
return task() return await task()
} finally {
const using = cls.getFromContext(ContextKeys.IN_USE)
if (!using || using <= 1) {
await closeAppDBs()
} else {
cls.setOnContext(using - 1)
}
}
}
const using = cls.getFromContext(ContextKeys.IN_USE)
if (using && cls.getFromContext(ContextKeys.APP_ID) === appId) {
cls.setOnContext(ContextKeys.IN_USE, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(ContextKeys.IN_USE, 1)
return internal()
}) })
} }
}
exports.updateTenantId = tenantId => { exports.updateTenantId = tenantId => {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId) cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
exports.setGlobalDB(tenantId)
} }
exports.updateAppId = appId => { exports.updateAppId = async appId => {
try { try {
// have to close first, before removing the databases from context
await closeAppDBs()
cls.setOnContext(ContextKeys.APP_ID, appId) cls.setOnContext(ContextKeys.APP_ID, appId)
cls.setOnContext(ContextKeys.PROD_DB, null)
cls.setOnContext(ContextKeys.DEV_DB, null)
cls.setOnContext(ContextKeys.CURRENT_DB, null)
cls.setOnContext(ContextKeys.DB_OPTS, null)
} catch (err) { } catch (err) {
if (env.isTest()) { if (env.isTest()) {
TEST_APP_ID = appId TEST_APP_ID = appId
@ -111,8 +195,8 @@ exports.setTenantId = (
let tenantId let tenantId
// exit early if not multi-tenant // exit early if not multi-tenant
if (!exports.isMultiTenant()) { if (!exports.isMultiTenant()) {
cls.setOnContext(ContextKeys.TENANT_ID, this.DEFAULT_TENANT_ID) cls.setOnContext(ContextKeys.TENANT_ID, exports.DEFAULT_TENANT_ID)
return return exports.DEFAULT_TENANT_ID
} }
const allowQs = opts && opts.allowQs const allowQs = opts && opts.allowQs
@ -140,6 +224,22 @@ exports.setTenantId = (
if (tenantId) { if (tenantId) {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId) cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
} }
return tenantId
}
exports.setGlobalDB = tenantId => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
return db
}
exports.getGlobalDB = () => {
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
return db
} }
exports.isTenantIdSet = () => { exports.isTenantIdSet = () => {
@ -167,7 +267,7 @@ exports.getAppId = () => {
} }
} }
function getDB(key, opts) { function getContextDB(key, opts) {
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}` const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
let storedOpts = cls.getFromContext(dbOptsKey) let storedOpts = cls.getFromContext(dbOptsKey)
let db = cls.getFromContext(key) let db = cls.getFromContext(key)
@ -176,7 +276,6 @@ function getDB(key, opts) {
} }
const appId = exports.getAppId() const appId = exports.getAppId()
const CouchDB = getCouch()
let toUseAppId let toUseAppId
switch (key) { switch (key) {
@ -190,7 +289,7 @@ function getDB(key, opts) {
toUseAppId = getDevelopmentAppID(appId) toUseAppId = getDevelopmentAppID(appId)
break break
} }
db = new CouchDB(toUseAppId, opts) db = dangerousGetDB(toUseAppId, opts)
try { try {
cls.setOnContext(key, db) cls.setOnContext(key, db)
if (opts) { if (opts) {
@ -209,7 +308,7 @@ function getDB(key, opts) {
* contained, dev or prod. * contained, dev or prod.
*/ */
exports.getAppDB = opts => { exports.getAppDB = opts => {
return getDB(ContextKeys.CURRENT_DB, opts) return getContextDB(ContextKeys.CURRENT_DB, opts)
} }
/** /**
@ -217,7 +316,7 @@ exports.getAppDB = opts => {
* contained a development app ID, this will open the prod one. * contained a development app ID, this will open the prod one.
*/ */
exports.getProdAppDB = opts => { exports.getProdAppDB = opts => {
return getDB(ContextKeys.PROD_DB, opts) return getContextDB(ContextKeys.PROD_DB, opts)
} }
/** /**
@ -225,5 +324,5 @@ exports.getProdAppDB = opts => {
* contained a prod app ID, this will open the dev one. * contained a prod app ID, this will open the dev one.
*/ */
exports.getDevAppDB = opts => { exports.getDevAppDB = opts => {
return getDB(ContextKeys.DEV_DB, opts) return getContextDB(ContextKeys.DEV_DB, opts)
} }

View File

@ -1,4 +1,4 @@
const { getDB } = require(".") const { dangerousGetDB, closeDB } = require(".")
class Replication { class Replication {
/** /**
@ -7,8 +7,12 @@ class Replication {
* @param {String} target - the DB you want to replicate to, or rollback from * @param {String} target - the DB you want to replicate to, or rollback from
*/ */
constructor({ source, target }) { constructor({ source, target }) {
this.source = getDB(source) this.source = dangerousGetDB(source)
this.target = getDB(target) this.target = dangerousGetDB(target)
}
close() {
return Promise.all([closeDB(this.source), closeDB(this.target)])
} }
promisify(operation, opts = {}) { promisify(operation, opts = {}) {
@ -51,7 +55,7 @@ class Replication {
async rollback() { async rollback() {
await this.target.destroy() await this.target.destroy()
// Recreate the DB again // Recreate the DB again
this.target = getDB(this.target.name) this.target = dangerousGetDB(this.target.name)
await this.replicate() await this.replicate()
} }

View File

@ -1,13 +1,67 @@
let Pouch const pouch = require("./pouch")
const env = require("../environment")
module.exports.setDB = pouch => { let PouchDB
Pouch = pouch let initialised = false
const put =
dbPut =>
async (doc, options = {}) => {
const response = await dbPut(doc, options)
// TODO: add created / updated
return response
} }
module.exports.getDB = dbName => { const checkInitialised = () => {
return new Pouch(dbName) if (!initialised) {
throw new Error("init has not been called")
}
} }
module.exports.getCouch = () => { exports.init = opts => {
return Pouch PouchDB = pouch.getPouch(opts)
initialised = true
}
// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
// this function is prone to leaks, should only be used
// in situations that using the function doWithDB does not work
exports.dangerousGetDB = (dbName, opts) => {
checkInitialised()
const db = new PouchDB(dbName, opts)
const dbPut = db.put
db.put = put(dbPut)
return db
}
// use this function if you have called dangerousGetDB - close
// the databases you've opened once finished
exports.closeDB = async db => {
if (!db || env.isTest()) {
return
}
try {
return db.close()
} catch (err) {
// ignore error, already closed
}
}
// we have to use a callback for this so that we can close
// the DB when we're done, without this manual requests would
// need to close the database when done with it to avoid memory leaks
exports.doWithDB = async (dbName, cb, opts) => {
const db = exports.dangerousGetDB(dbName, opts)
// need this to be async so that we can correctly close DB after all
// async operations have been completed
try {
return await cb(db)
} finally {
await exports.closeDB(db)
}
}
exports.allDbs = () => {
checkInitialised()
return PouchDB.allDbs()
} }

View File

@ -0,0 +1,93 @@
const PouchDB = require("pouchdb")
const env = require("../environment")
exports.getCouchUrl = () => {
if (!env.COUCH_DB_URL) return
// username and password already exist in URL
if (env.COUCH_DB_URL.includes("@")) {
return env.COUCH_DB_URL
}
const [protocol, ...rest] = env.COUCH_DB_URL.split("://")
if (!env.COUCH_DB_USERNAME || !env.COUCH_DB_PASSWORD) {
throw new Error(
"CouchDB configuration invalid. You must provide a fully qualified CouchDB url, or the COUCH_DB_USER and COUCH_DB_PASSWORD environment variables."
)
}
return `${protocol}://${env.COUCH_DB_USERNAME}:${env.COUCH_DB_PASSWORD}@${rest}`
}
exports.splitCouchUrl = url => {
const [protocol, rest] = url.split("://")
const [auth, host] = rest.split("@")
const [username, password] = auth.split(":")
return {
url: `${protocol}://${host}`,
auth: {
username,
password,
},
}
}
/**
* Return a constructor for PouchDB.
* This should be rarely used outside of the main application config.
* Exposed for exceptional cases such as in-memory views.
*/
exports.getPouch = (opts = {}) => {
let auth = {
username: env.COUCH_DB_USERNAME,
password: env.COUCH_DB_PASSWORD,
}
let url = exports.getCouchUrl() || "http://localhost:4005"
// need to update security settings
if (!auth.username || !auth.password || url.includes("@")) {
const split = exports.splitCouchUrl(url)
url = split.url
auth = split.auth
}
const authCookie = Buffer.from(`${auth.username}:${auth.password}`).toString(
"base64"
)
let POUCH_DB_DEFAULTS = {
prefix: url,
fetch: (url, opts) => {
// use a specific authorization cookie - be very explicit about how we authenticate
opts.headers.set("Authorization", `Basic ${authCookie}`)
return PouchDB.fetch(url, opts)
},
}
if (opts.inMemory) {
const inMemory = require("pouchdb-adapter-memory")
PouchDB.plugin(inMemory)
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "memory",
}
}
if (opts.replication) {
const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
}
if (opts.find) {
const find = require("pouchdb-find")
PouchDB.plugin(find)
}
const Pouch = PouchDB.defaults(POUCH_DB_DEFAULTS)
if (opts.allDbs) {
const allDbs = require("pouchdb-all-dbs")
allDbs(Pouch)
}
return Pouch
}

View File

@ -11,7 +11,8 @@ const {
} = require("./constants") } = require("./constants")
const { getTenantId, getGlobalDBName } = require("../tenancy") const { getTenantId, getGlobalDBName } = require("../tenancy")
const fetch = require("node-fetch") const fetch = require("node-fetch")
const { getCouch } = require("./index") const { doWithDB, allDbs } = require("./index")
const { getCouchUrl } = require("./pouch")
const { getAppMetadata } = require("../cache/appMetadata") const { getAppMetadata } = require("../cache/appMetadata")
const { checkSlashesInUrl } = require("../helpers") const { checkSlashesInUrl } = require("../helpers")
const { const {
@ -150,25 +151,6 @@ exports.getRoleParams = (roleId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.ROLE, roleId, otherProps) return getDocParams(DocumentTypes.ROLE, roleId, otherProps)
} }
exports.getCouchUrl = () => {
if (!env.COUCH_DB_URL) return
// username and password already exist in URL
if (env.COUCH_DB_URL.includes("@")) {
return env.COUCH_DB_URL
}
const [protocol, ...rest] = env.COUCH_DB_URL.split("://")
if (!env.COUCH_DB_USERNAME || !env.COUCH_DB_PASSWORD) {
throw new Error(
"CouchDB configuration invalid. You must provide a fully qualified CouchDB url, or the COUCH_DB_USER and COUCH_DB_PASSWORD environment variables."
)
}
return `${protocol}://${env.COUCH_DB_USERNAME}:${env.COUCH_DB_PASSWORD}@${rest}`
}
exports.getStartEndKeyURL = (base, baseKey, tenantId = null) => { exports.getStartEndKeyURL = (base, baseKey, tenantId = null) => {
const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : "" const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : ""
return `${base}?startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"` return `${base}?startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
@ -184,7 +166,7 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
const efficient = opts && opts.efficient const efficient = opts && opts.efficient
// specifically for testing we use the pouch package for this // specifically for testing we use the pouch package for this
if (env.isTest()) { if (env.isTest()) {
return getCouch().allDbs() return allDbs()
} }
let dbs = [] let dbs = []
async function addDbs(url) { async function addDbs(url) {
@ -196,7 +178,7 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
throw "Cannot connect to CouchDB instance" throw "Cannot connect to CouchDB instance"
} }
} }
let couchUrl = `${exports.getCouchUrl()}/_all_dbs` let couchUrl = `${getCouchUrl()}/_all_dbs`
let tenantId = getTenantId() let tenantId = getTenantId()
if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) { if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) {
// just get all DBs when: // just get all DBs when:
@ -227,7 +209,6 @@ exports.getAllDbs = async (opts = { efficient: false }) => {
* @return {Promise<object[]>} returns the app information document stored in each app database. * @return {Promise<object[]>} returns the app information document stored in each app database.
*/ */
exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => { exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => {
const CouchDB = getCouch()
let tenantId = getTenantId() let tenantId = getTenantId()
if (!env.MULTI_TENANCY && !tenantId) { if (!env.MULTI_TENANCY && !tenantId) {
tenantId = DEFAULT_TENANT_ID tenantId = DEFAULT_TENANT_ID
@ -255,7 +236,7 @@ exports.getAllApps = async ({ dev, all, idsOnly, efficient } = {}) => {
} }
const appPromises = appDbNames.map(app => const appPromises = appDbNames.map(app =>
// skip setup otherwise databases could be re-created // skip setup otherwise databases could be re-created
getAppMetadata(app, CouchDB) getAppMetadata(app)
) )
if (appPromises.length === 0) { if (appPromises.length === 0) {
return [] return []
@ -299,10 +280,11 @@ exports.getDevAppIDs = async () => {
} }
exports.dbExists = async dbName => { exports.dbExists = async dbName => {
const CouchDB = getCouch()
let exists = false let exists = false
return doWithDB(
dbName,
async db => {
try { try {
const db = CouchDB(dbName, { skip_setup: true })
// check if database exists // check if database exists
const info = await db.info() const info = await db.info()
if (info && !info.error) { if (info && !info.error) {
@ -312,6 +294,9 @@ exports.dbExists = async dbName => {
exists = false exists = false
} }
return exists return exists
},
{ skip_setup: true }
)
} }
/** /**
@ -436,3 +421,4 @@ exports.generateConfigID = generateConfigID
exports.getConfigParams = getConfigParams exports.getConfigParams = getConfigParams
exports.getScopedFullConfig = getScopedFullConfig exports.getScopedFullConfig = getScopedFullConfig
exports.generateDevInfoID = generateDevInfoID exports.generateDevInfoID = generateDevInfoID
exports.getPlatformUrl = getPlatformUrl

View File

@ -30,9 +30,18 @@ module.exports = {
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN, COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
PLATFORM_URL: process.env.PLATFORM_URL, PLATFORM_URL: process.env.PLATFORM_URL,
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS, TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
USE_COUCH: process.env.USE_COUCH || true,
isTest, isTest,
_set(key, value) { _set(key, value) {
process.env[key] = value process.env[key] = value
module.exports[key] = value module.exports[key] = value
}, },
} }
// clean up any environment variable edge cases
for (let [key, value] of Object.entries(module.exports)) {
// handle the edge case of "0" to disable an environment variable
if (value === "0") {
module.exports[key] = 0
}
}

View File

@ -1,8 +1,8 @@
const { setDB } = require("./db") const db = require("./db")
module.exports = { module.exports = {
init(pouch) { init(opts = {}) {
setDB(pouch) db.init(opts.db)
}, },
// some default exports from the library, however these ideally shouldn't // some default exports from the library, however these ideally shouldn't
// be used, instead the syntax require("@budibase/backend-core/db") should be used // be used, instead the syntax require("@budibase/backend-core/db") should be used

View File

@ -1,8 +1,8 @@
const google = require("../google") const google = require("../google")
const { Cookies, Configs } = require("../../../constants") const { Cookies, Configs } = require("../../../constants")
const { clearCookie, getCookie } = require("../../../utils") const { clearCookie, getCookie } = require("../../../utils")
const { getDB } = require("../../../db") const { getScopedConfig, getPlatformUrl } = require("../../../db/utils")
const { getScopedConfig } = require("../../../db/utils") const { doWithDB } = require("../../../db")
const environment = require("../../../environment") const environment = require("../../../environment")
const { getGlobalDB } = require("../../../tenancy") const { getGlobalDB } = require("../../../tenancy")
@ -13,18 +13,28 @@ async function fetchGoogleCreds() {
type: Configs.GOOGLE, type: Configs.GOOGLE,
}) })
// or fall back to env variables // or fall back to env variables
const config = googleConfig || { return (
googleConfig || {
clientID: environment.GOOGLE_CLIENT_ID, clientID: environment.GOOGLE_CLIENT_ID,
clientSecret: environment.GOOGLE_CLIENT_SECRET, clientSecret: environment.GOOGLE_CLIENT_SECRET,
} }
)
}
return config async function platformUrl() {
const db = getGlobalDB()
const publicConfig = await getScopedConfig(db, {
type: Configs.SETTINGS,
})
return getPlatformUrl(publicConfig)
} }
async function preAuth(passport, ctx, next) { async function preAuth(passport, ctx, next) {
// get the relevant config // get the relevant config
const googleConfig = await fetchGoogleCreds() const googleConfig = await fetchGoogleCreds()
let callbackUrl = `${environment.PLATFORM_URL}/api/global/auth/datasource/google/callback` const platUrl = await platformUrl()
let callbackUrl = `${platUrl}/api/global/auth/datasource/google/callback`
const strategy = await google.strategyFactory(googleConfig, callbackUrl) const strategy = await google.strategyFactory(googleConfig, callbackUrl)
if (!ctx.query.appId || !ctx.query.datasourceId) { if (!ctx.query.appId || !ctx.query.datasourceId) {
@ -41,14 +51,15 @@ async function preAuth(passport, ctx, next) {
async function postAuth(passport, ctx, next) { async function postAuth(passport, ctx, next) {
// get the relevant config // get the relevant config
const config = await fetchGoogleCreds() const config = await fetchGoogleCreds()
const platUrl = await platformUrl()
let callbackUrl = `${environment.PLATFORM_URL}/api/global/auth/datasource/google/callback` let callbackUrl = `${platUrl}/api/global/auth/datasource/google/callback`
const strategy = await google.strategyFactory( const strategy = await google.strategyFactory(
config, config,
callbackUrl, callbackUrl,
(accessToken, refreshToken, profile, done) => { (accessToken, refreshToken, profile, done) => {
clearCookie(ctx, Cookies.DatasourceAuth) clearCookie(ctx, Cookies.DatasourceAuth)
done(null, { accessToken, refreshToken }) done(null, { refreshToken })
} }
) )
@ -59,7 +70,7 @@ async function postAuth(passport, ctx, next) {
{ successRedirect: "/", failureRedirect: "/error" }, { successRedirect: "/", failureRedirect: "/error" },
async (err, tokens) => { async (err, tokens) => {
// update the DB for the datasource with all the user info // update the DB for the datasource with all the user info
const db = getDB(authStateCookie.appId) await doWithDB(authStateCookie.appId, async db => {
const datasource = await db.get(authStateCookie.datasourceId) const datasource = await db.get(authStateCookie.datasourceId)
if (!datasource.config) { if (!datasource.config) {
datasource.config = {} datasource.config = {}
@ -69,6 +80,7 @@ async function postAuth(passport, ctx, next) {
ctx.redirect( ctx.redirect(
`/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}` `/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}`
) )
})
} }
)(ctx, next) )(ctx, next)
} }

View File

@ -2,7 +2,7 @@ const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
const { authenticateThirdParty } = require("./third-party-common") const { authenticateThirdParty } = require("./third-party-common")
const buildVerifyFn = async saveUserFn => { const buildVerifyFn = saveUserFn => {
return (accessToken, refreshToken, profile, done) => { return (accessToken, refreshToken, profile, done) => {
const thirdPartyUser = { const thirdPartyUser = {
provider: profile.provider, // should always be 'google' provider: profile.provider, // should always be 'google'

View File

@ -2,17 +2,13 @@
require("../../../tests/utilities/dbConfig") require("../../../tests/utilities/dbConfig")
const database = require("../../../db")
const { authenticateThirdParty } = require("../third-party-common") const { authenticateThirdParty } = require("../third-party-common")
const { data } = require("./utilities/mock-data") const { data } = require("./utilities/mock-data")
const { DEFAULT_TENANT_ID } = require("../../../constants")
const { const { generateGlobalUserID } = require("../../../db/utils")
StaticDatabases,
generateGlobalUserID
} = require("../../../db/utils")
const { newid } = require("../../../hashing") const { newid } = require("../../../hashing")
const { doWithGlobalDB, doInTenant } = require("../../../tenancy")
let db
const done = jest.fn() const done = jest.fn()
@ -21,7 +17,15 @@ const getErrorMessage = () => {
} }
const saveUser = async (user) => { const saveUser = async (user) => {
return doWithGlobalDB(DEFAULT_TENANT_ID, async db => {
return await db.put(user) return await db.put(user)
})
}
function authenticate(user, requireLocal, saveFn) {
return doInTenant(DEFAULT_TENANT_ID, () => {
return authenticateThirdParty(user, requireLocal, done, saveFn)
})
} }
describe("third party common", () => { describe("third party common", () => {
@ -29,35 +33,36 @@ describe("third party common", () => {
let thirdPartyUser let thirdPartyUser
beforeEach(() => { beforeEach(() => {
db = database.getDB(StaticDatabases.GLOBAL.name)
thirdPartyUser = data.buildThirdPartyUser() thirdPartyUser = data.buildThirdPartyUser()
}) })
afterEach(async () => { afterEach(async () => {
return doWithGlobalDB(DEFAULT_TENANT_ID, async db => {
jest.clearAllMocks() jest.clearAllMocks()
await db.destroy() await db.destroy()
}) })
})
describe("validation", () => { describe("validation", () => {
const testValidation = async (message) => { const testValidation = async (message) => {
await authenticateThirdParty(thirdPartyUser, false, done, saveUser) await authenticate(thirdPartyUser, false, saveUser)
expect(done.mock.calls.length).toBe(1) expect(done.mock.calls.length).toBe(1)
expect(getErrorMessage()).toContain(message) expect(getErrorMessage()).toContain(message)
} }
it("provider fails", async () => { it("provider fails", async () => {
delete thirdPartyUser.provider delete thirdPartyUser.provider
testValidation("third party user provider required") await testValidation("third party user provider required")
}) })
it("user id fails", async () => { it("user id fails", async () => {
delete thirdPartyUser.userId delete thirdPartyUser.userId
testValidation("third party user id required") await testValidation("third party user id required")
}) })
it("email fails", async () => { it("email fails", async () => {
delete thirdPartyUser.email delete thirdPartyUser.email
testValidation("third party user email required") await testValidation("third party user email required")
}) })
}) })
@ -81,7 +86,7 @@ describe("third party common", () => {
describe("when the user doesn't exist", () => { describe("when the user doesn't exist", () => {
describe("when a local account is required", () => { describe("when a local account is required", () => {
it("returns an error message", async () => { it("returns an error message", async () => {
await authenticateThirdParty(thirdPartyUser, true, done, saveUser) await authenticate(thirdPartyUser, true, saveUser)
expect(done.mock.calls.length).toBe(1) expect(done.mock.calls.length).toBe(1)
expect(getErrorMessage()).toContain("Email does not yet exist. You must set up your local budibase account first.") expect(getErrorMessage()).toContain("Email does not yet exist. You must set up your local budibase account first.")
}) })
@ -89,7 +94,7 @@ describe("third party common", () => {
describe("when a local account isn't required", () => { describe("when a local account isn't required", () => {
it("creates and authenticates the user", async () => { it("creates and authenticates the user", async () => {
await authenticateThirdParty(thirdPartyUser, false, done, saveUser) await authenticate(thirdPartyUser, false, saveUser)
const user = expectUserIsAuthenticated() const user = expectUserIsAuthenticated()
expectUserIsSynced(user, thirdPartyUser) expectUserIsSynced(user, thirdPartyUser)
expect(user.roles).toStrictEqual({}) expect(user.roles).toStrictEqual({})
@ -103,12 +108,15 @@ describe("third party common", () => {
let email let email
const createUser = async () => { const createUser = async () => {
return doWithGlobalDB(DEFAULT_TENANT_ID, async db => {
dbUser = { dbUser = {
_id: id, _id: id,
email: email, email: email,
} }
const response = await db.put(dbUser) const response = await db.put(dbUser)
dbUser._rev = response.rev dbUser._rev = response.rev
return dbUser
})
} }
const expectUserIsUpdated = (user) => { const expectUserIsUpdated = (user) => {
@ -126,7 +134,7 @@ describe("third party common", () => {
}) })
it("syncs and authenticates the user", async () => { it("syncs and authenticates the user", async () => {
await authenticateThirdParty(thirdPartyUser, true, done, saveUser) await authenticate(thirdPartyUser, true, saveUser)
const user = expectUserIsAuthenticated() const user = expectUserIsAuthenticated()
expectUserIsSynced(user, thirdPartyUser) expectUserIsSynced(user, thirdPartyUser)
@ -142,7 +150,7 @@ describe("third party common", () => {
}) })
it("syncs and authenticates the user", async () => { it("syncs and authenticates the user", async () => {
await authenticateThirdParty(thirdPartyUser, true, done, saveUser) await authenticate(thirdPartyUser, true, saveUser)
const user = expectUserIsAuthenticated() const user = expectUserIsAuthenticated()
expectUserIsSynced(user, thirdPartyUser) expectUserIsSynced(user, thirdPartyUser)
@ -160,7 +168,7 @@ describe("third party common", () => {
}) })
it("syncs and authenticates the user", async () => { it("syncs and authenticates the user", async () => {
await authenticateThirdParty(thirdPartyUser, true, done, saveUser) await authenticate(thirdPartyUser, true, saveUser)
const user = expectUserIsAuthenticated() const user = expectUserIsAuthenticated()
expectUserIsSynced(user, thirdPartyUser) expectUserIsSynced(user, thirdPartyUser)

View File

@ -1,4 +1,5 @@
const { setTenantId } = require("../tenancy") const { setTenantId, setGlobalDB, getGlobalDB } = require("../tenancy")
const { closeDB } = require("../db")
const ContextFactory = require("../context/FunctionContext") const ContextFactory = require("../context/FunctionContext")
const { buildMatcherRegex, matches } = require("./matchers") const { buildMatcherRegex, matches } = require("./matchers")
@ -10,10 +11,17 @@ module.exports = (
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns) const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns) const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
return ContextFactory.getMiddleware(ctx => { const updateCtxFn = ctx => {
const allowNoTenant = const allowNoTenant =
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions) opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
const allowQs = !!matches(ctx, allowQsOptions) const allowQs = !!matches(ctx, allowQsOptions)
setTenantId(ctx, { allowQs, allowNoTenant }) const tenantId = setTenantId(ctx, { allowQs, allowNoTenant })
}) setGlobalDB(tenantId)
}
const destroyFn = async () => {
const db = getGlobalDB()
await closeDB(db)
}
return ContextFactory.getMiddleware(updateCtxFn, destroyFn)
} }

View File

@ -1,4 +1,5 @@
const { DEFAULT_TENANT_ID } = require("../constants") const { DEFAULT_TENANT_ID } = require("../constants")
const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants") const { DocumentTypes } = require("../db/constants")
const { getAllApps } = require("../db/utils") const { getAllApps } = require("../db/utils")
const environment = require("../environment") const environment = require("../environment")
@ -26,7 +27,7 @@ exports.getMigrationsDoc = async db => {
} }
} }
const runMigration = async (CouchDB, migration, options = {}) => { const runMigration = async (migration, options = {}) => {
const tenantId = getTenantId() const tenantId = getTenantId()
const migrationType = migration.type const migrationType = migration.type
const migrationName = migration.name const migrationName = migration.name
@ -46,7 +47,7 @@ const runMigration = async (CouchDB, migration, options = {}) => {
// run the migration against each db // run the migration against each db
for (const dbName of dbNames) { for (const dbName of dbNames) {
const db = new CouchDB(dbName) await doWithDB(dbName, async db => {
try { try {
const doc = await exports.getMigrationsDoc(db) const doc = await exports.getMigrationsDoc(db)
@ -62,7 +63,7 @@ const runMigration = async (CouchDB, migration, options = {}) => {
) )
} else { } else {
// the migration has already been performed // the migration has already been performed
continue return
} }
} }
@ -85,10 +86,11 @@ const runMigration = async (CouchDB, migration, options = {}) => {
) )
throw err throw err
} }
})
} }
} }
exports.runMigrations = async (CouchDB, migrations, options = {}) => { exports.runMigrations = async (migrations, options = {}) => {
console.log("Running migrations") console.log("Running migrations")
let tenantIds let tenantIds
if (environment.MULTI_TENANCY) { if (environment.MULTI_TENANCY) {
@ -108,9 +110,7 @@ exports.runMigrations = async (CouchDB, migrations, options = {}) => {
// for all migrations // for all migrations
for (const migration of migrations) { for (const migration of migrations) {
// run the migration // run the migration
await doInTenant(tenantId, () => await doInTenant(tenantId, () => runMigration(migration, options))
runMigration(CouchDB, migration, options)
)
} }
} }
console.log("Migrations complete") console.log("Migrations complete")

View File

@ -1,7 +1,7 @@
require("../../tests/utilities/dbConfig") require("../../tests/utilities/dbConfig")
const { runMigrations, getMigrationsDoc } = require("../index") const { runMigrations, getMigrationsDoc } = require("../index")
const CouchDB = require("../../db").getCouch() const { dangerousGetDB } = require("../../db")
const { const {
StaticDatabases, StaticDatabases,
} = require("../../db/utils") } = require("../../db/utils")
@ -20,7 +20,7 @@ describe("migrations", () => {
}] }]
beforeEach(() => { beforeEach(() => {
db = new CouchDB(StaticDatabases.GLOBAL.name) db = dangerousGetDB(StaticDatabases.GLOBAL.name)
}) })
afterEach(async () => { afterEach(async () => {
@ -29,7 +29,7 @@ describe("migrations", () => {
}) })
const migrate = () => { const migrate = () => {
return runMigrations(CouchDB, MIGRATIONS) return runMigrations(MIGRATIONS)
} }
it("should run a new migration", async () => { it("should run a new migration", async () => {

View File

@ -7,7 +7,7 @@ const {
SEPARATOR, SEPARATOR,
} = require("../db/utils") } = require("../db/utils")
const { getAppDB } = require("../context") const { getAppDB } = require("../context")
const { getDB } = require("../db") const { doWithDB } = require("../db")
const BUILTIN_IDS = { const BUILTIN_IDS = {
ADMIN: "ADMIN", ADMIN: "ADMIN",
@ -199,7 +199,12 @@ exports.checkForRoleResourceArray = (rolePerms, resourceId) => {
* @return {Promise<object[]>} An array of the role objects that were found. * @return {Promise<object[]>} An array of the role objects that were found.
*/ */
exports.getAllRoles = async appId => { exports.getAllRoles = async appId => {
const db = appId ? getDB(appId) : getAppDB() if (appId) {
return doWithDB(appId, internal)
} else {
return internal(getAppDB())
}
async function internal(db) {
const body = await db.allDocs( const body = await db.allDocs(
getRoleParams(null, { getRoleParams(null, {
include_docs: true, include_docs: true,
@ -237,6 +242,7 @@ exports.getAllRoles = async appId => {
} }
return roles return roles
} }
}
/** /**
* This retrieves the required role for a resource * This retrieves the required role for a resource

View File

@ -1,5 +1,6 @@
const { getDB } = require("../db") const { doWithDB } = require("../db")
const { SEPARATOR, StaticDatabases } = require("../db/constants") const { StaticDatabases } = require("../db/constants")
const { baseGlobalDBName } = require("./utils")
const { const {
getTenantId, getTenantId,
DEFAULT_TENANT_ID, DEFAULT_TENANT_ID,
@ -23,7 +24,7 @@ exports.addTenantToUrl = url => {
} }
exports.doesTenantExist = async tenantId => { exports.doesTenantExist = async tenantId => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
let tenants let tenants
try { try {
tenants = await db.get(TENANT_DOC) tenants = await db.get(TENANT_DOC)
@ -36,10 +37,11 @@ exports.doesTenantExist = async tenantId => {
Array.isArray(tenants.tenantIds) && Array.isArray(tenants.tenantIds) &&
tenants.tenantIds.indexOf(tenantId) !== -1 tenants.tenantIds.indexOf(tenantId) !== -1
) )
})
} }
exports.tryAddTenant = async (tenantId, userId, email) => { exports.tryAddTenant = async (tenantId, userId, email) => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
const getDoc = async id => { const getDoc = async id => {
if (!id) { if (!id) {
return null return null
@ -76,6 +78,7 @@ exports.tryAddTenant = async (tenantId, userId, email) => {
promises.push(db.put(tenants)) promises.push(db.put(tenants))
} }
await Promise.all(promises) await Promise.all(promises)
})
} }
exports.getGlobalDBName = (tenantId = null) => { exports.getGlobalDBName = (tenantId = null) => {
@ -84,23 +87,15 @@ exports.getGlobalDBName = (tenantId = null) => {
if (!tenantId) { if (!tenantId) {
tenantId = getTenantId() tenantId = getTenantId()
} }
return baseGlobalDBName(tenantId)
let dbName
if (tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
} }
exports.getGlobalDB = (tenantId = null) => { exports.doWithGlobalDB = (tenantId, cb) => {
const dbName = exports.getGlobalDBName(tenantId) return doWithDB(exports.getGlobalDBName(tenantId), cb)
return getDB(dbName)
} }
exports.lookupTenantId = async userId => { exports.lookupTenantId = async userId => {
const db = getDB(StaticDatabases.PLATFORM_INFO.name) return doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
let tenantId = env.MULTI_TENANCY ? DEFAULT_TENANT_ID : null let tenantId = env.MULTI_TENANCY ? DEFAULT_TENANT_ID : null
try { try {
const doc = await db.get(userId) const doc = await db.get(userId)
@ -111,16 +106,18 @@ exports.lookupTenantId = async userId => {
// just return the default // just return the default
} }
return tenantId return tenantId
})
} }
// lookup, could be email or userId, either will return a doc // lookup, could be email or userId, either will return a doc
exports.getTenantUser = async identifier => { exports.getTenantUser = async identifier => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
try { try {
return await db.get(identifier) return await db.get(identifier)
} catch (err) { } catch (err) {
return null return null
} }
})
} }
exports.isUserInAppTenant = (appId, user = null) => { exports.isUserInAppTenant = (appId, user = null) => {
@ -135,7 +132,7 @@ exports.isUserInAppTenant = (appId, user = null) => {
} }
exports.getTenantIds = async () => { exports.getTenantIds = async () => {
const db = getDB(PLATFORM_INFO_DB) return doWithDB(PLATFORM_INFO_DB, async db => {
let tenants let tenants
try { try {
tenants = await db.get(TENANT_DOC) tenants = await db.get(TENANT_DOC)
@ -144,4 +141,5 @@ exports.getTenantIds = async () => {
return [] return []
} }
return (tenants && tenants.tenantIds) || [] return (tenants && tenants.tenantIds) || []
})
} }

View File

@ -0,0 +1,12 @@
const { DEFAULT_TENANT_ID } = require("../constants")
const { StaticDatabases, SEPARATOR } = require("../db/constants")
exports.baseGlobalDBName = tenantId => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View File

@ -1,17 +0,0 @@
const PouchDB = require("pouchdb")
const env = require("../../environment")
let POUCH_DB_DEFAULTS
// should always be test but good to do the sanity check
if (env.isTest()) {
PouchDB.plugin(require("pouchdb-adapter-memory"))
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "memory",
}
}
const Pouch = PouchDB.defaults(POUCH_DB_DEFAULTS)
module.exports = Pouch

View File

@ -1,3 +1,5 @@
const packageConfiguration = require("../../index") const core = require("../../index")
const CouchDB = require("./db") const dbConfig = {
packageConfiguration.init(CouchDB) inMemory: true,
}
core.init({ db: dbConfig })

View File

@ -10,7 +10,7 @@ const { options } = require("./middleware/passport/jwt")
const { queryGlobalView } = require("./db/views") const { queryGlobalView } = require("./db/views")
const { Headers, UserStatus, Cookies, MAX_VALID_DATE } = require("./constants") const { Headers, UserStatus, Cookies, MAX_VALID_DATE } = require("./constants")
const { const {
getGlobalDB, doWithGlobalDB,
updateTenantId, updateTenantId,
getTenantUser, getTenantUser,
tryAddTenant, tryAddTenant,
@ -176,11 +176,25 @@ exports.getGlobalUserByEmail = async email => {
}) })
} }
exports.getBuildersCount = async () => { const getBuilders = async () => {
const builders = await queryGlobalView(ViewNames.USER_BY_BUILDERS, { const builders = await queryGlobalView(ViewNames.USER_BY_BUILDERS, {
include_docs: false, include_docs: false,
}) })
return builders ? builders.length : 0
if (!builders) {
return []
}
if (Array.isArray(builders)) {
return builders
} else {
return [builders]
}
}
exports.getBuildersCount = async () => {
const builders = await getBuilders()
return builders.length
} }
exports.saveUser = async ( exports.saveUser = async (
@ -195,7 +209,7 @@ exports.saveUser = async (
// need to set the context for this request, as specified // need to set the context for this request, as specified
updateTenantId(tenantId) updateTenantId(tenantId)
// specify the tenancy incase we're making a new admin user (public) // specify the tenancy incase we're making a new admin user (public)
const db = getGlobalDB(tenantId) return doWithGlobalDB(tenantId, async db => {
let { email, password, _id } = user let { email, password, _id } = user
// make sure another user isn't using the same email // make sure another user isn't using the same email
let dbUser let dbUser
@ -271,6 +285,7 @@ exports.saveUser = async (
throw err throw err
} }
} }
})
} }
/** /**

View File

@ -258,6 +258,13 @@
dependencies: dependencies:
"@babel/helper-plugin-utils" "^7.14.5" "@babel/helper-plugin-utils" "^7.14.5"
"@babel/runtime@^7.15.4":
version "7.17.9"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.17.9.tgz#d19fbf802d01a8cb6cf053a64e472d42c434ba72"
integrity sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg==
dependencies:
regenerator-runtime "^0.13.4"
"@babel/template@^7.16.0", "@babel/template@^7.3.3": "@babel/template@^7.16.0", "@babel/template@^7.3.3":
version "7.16.0" version "7.16.0"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6"
@ -857,6 +864,21 @@ aws4@^1.8.0:
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59"
integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==
axios-retry@^3.1.9:
version "3.2.4"
resolved "https://registry.yarnpkg.com/axios-retry/-/axios-retry-3.2.4.tgz#f447a53c3456f5bfeca18f20c3a3272207d082ae"
integrity sha512-Co3UXiv4npi6lM963mfnuH90/YFLKWWDmoBYfxkHT5xtkSSWNqK9zdG3fw5/CP/dsoKB5aMMJCsgab+tp1OxLQ==
dependencies:
"@babel/runtime" "^7.15.4"
is-retry-allowed "^2.2.0"
axios@0.24.0:
version "0.24.0"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6"
integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==
dependencies:
follow-redirects "^1.14.4"
babel-jest@^26.6.3: babel-jest@^26.6.3:
version "26.6.3" version "26.6.3"
resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056" resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056"
@ -1048,7 +1070,7 @@ buffer-from@1.1.1:
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
buffer-from@^1.0.0: buffer-from@1.1.2, buffer-from@^1.0.0:
version "1.1.2" version "1.1.2"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
@ -1139,6 +1161,11 @@ char-regex@^1.0.2:
resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf"
integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==
charenc@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
integrity sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=
chownr@^1.1.1: chownr@^1.1.1:
version "1.1.4" version "1.1.4"
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
@ -1273,6 +1300,11 @@ component-emitter@^1.2.1:
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0"
integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==
component-type@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/component-type/-/component-type-1.2.1.tgz#8a47901700238e4fc32269771230226f24b415a9"
integrity sha1-ikeQFwAjjk/DIml3EjAibyS0Fak=
concat-map@0.0.1: concat-map@0.0.1:
version "0.0.1" version "0.0.1"
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
@ -1315,6 +1347,11 @@ cross-spawn@^7.0.0:
shebang-command "^2.0.0" shebang-command "^2.0.0"
which "^2.0.1" which "^2.0.1"
crypt@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
integrity sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=
cryptiles@2.x.x: cryptiles@2.x.x:
version "2.0.5" version "2.0.5"
resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
@ -1777,6 +1814,13 @@ fetch-cookie@0.10.1:
dependencies: dependencies:
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0" tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
fetch-cookie@0.11.0:
version "0.11.0"
resolved "https://registry.yarnpkg.com/fetch-cookie/-/fetch-cookie-0.11.0.tgz#e046d2abadd0ded5804ce7e2cae06d4331c15407"
integrity sha512-BQm7iZLFhMWFy5CZ/162sAGjBfdNWb7a8LEqqnzsHFhxT/X/SVj/z2t2nu3aJvjlbQkrAlTUApplPRjWyH4mhA==
dependencies:
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
fill-range@^4.0.0: fill-range@^4.0.0:
version "4.0.0" version "4.0.0"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7"
@ -1802,6 +1846,11 @@ find-up@^4.0.0, find-up@^4.1.0:
locate-path "^5.0.0" locate-path "^5.0.0"
path-exists "^4.0.0" path-exists "^4.0.0"
follow-redirects@^1.14.4:
version "1.14.9"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.9.tgz#dd4ea157de7bfaf9ea9b3fbd85aa16951f78d8d7"
integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==
for-in@^1.0.2: for-in@^1.0.2:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
@ -2175,7 +2224,7 @@ inflight@^1.0.4:
once "^1.3.0" once "^1.3.0"
wrappy "1" wrappy "1"
inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1: inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3:
version "2.0.4" version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
@ -2226,7 +2275,7 @@ is-arrayish@^0.2.1:
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
is-buffer@^1.1.5: is-buffer@^1.1.5, is-buffer@~1.1.6:
version "1.1.6" version "1.1.6"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
@ -2328,6 +2377,11 @@ is-potential-custom-element-name@^1.0.1:
resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5"
integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==
is-retry-allowed@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz#88f34cbd236e043e71b6932d09b0c65fb7b4d71d"
integrity sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==
is-stream@^1.1.0: is-stream@^1.1.0:
version "1.1.0" version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
@ -2360,7 +2414,7 @@ isarray@0.0.1:
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=
isarray@1.0.0, isarray@^1.0.0: isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
@ -2824,6 +2878,11 @@ jodid25519@^1.0.0:
dependencies: dependencies:
jsbn "~0.1.0" jsbn "~0.1.0"
join-component@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/join-component/-/join-component-1.1.0.tgz#b8417b750661a392bee2c2537c68b2a9d4977cd5"
integrity sha1-uEF7dQZho5K+4sJTfGiyqdSXfNU=
js-tokens@^4.0.0: js-tokens@^4.0.0:
version "4.0.0" version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
@ -2902,7 +2961,7 @@ json-stable-stringify@^1.0.1:
dependencies: dependencies:
jsonify "~0.0.0" jsonify "~0.0.0"
json-stringify-safe@~5.0.1: json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1:
version "5.0.1" version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=
@ -3204,6 +3263,11 @@ lodash.once@^4.0.0:
resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=
lodash.pick@^4.0.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3"
integrity sha1-UvBWEP/53tQiYRRB7R/BI6AwAbM=
lodash@^4.14.0: lodash@^4.14.0:
version "4.17.4" version "4.17.4"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae"
@ -3252,6 +3316,15 @@ map-visit@^1.0.0:
dependencies: dependencies:
object-visit "^1.0.0" object-visit "^1.0.0"
md5@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f"
integrity sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==
dependencies:
charenc "0.0.2"
crypt "0.0.2"
is-buffer "~1.1.6"
memdown@1.4.1: memdown@1.4.1:
version "1.4.1" version "1.4.1"
resolved "https://registry.yarnpkg.com/memdown/-/memdown-1.4.1.tgz#b4e4e192174664ffbae41361aa500f3119efe215" resolved "https://registry.yarnpkg.com/memdown/-/memdown-1.4.1.tgz#b4e4e192174664ffbae41361aa500f3119efe215"
@ -3372,6 +3445,11 @@ ms@2.1.2, ms@^2.1.1:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
ms@^2.1.3:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
nanomatch@^1.2.9: nanomatch@^1.2.9:
version "1.2.13" version "1.2.13"
resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
@ -3399,6 +3477,16 @@ natural-compare@^1.4.0:
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
ndjson@^1.4.3:
version "1.5.0"
resolved "https://registry.yarnpkg.com/ndjson/-/ndjson-1.5.0.tgz#ae603b36b134bcec347b452422b0bf98d5832ec8"
integrity sha1-rmA7NrE0vOw0e0UkIrC/mNWDLsg=
dependencies:
json-stringify-safe "^5.0.1"
minimist "^1.2.0"
split2 "^2.1.0"
through2 "^2.0.3"
nice-try@^1.0.4: nice-try@^1.0.4:
version "1.0.5" version "1.0.5"
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
@ -3409,7 +3497,7 @@ node-fetch@2.6.0:
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA== integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
node-fetch@^2.6.1: node-fetch@2.6.7, node-fetch@^2.6.1:
version "2.6.7" version "2.6.7"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
@ -3769,6 +3857,42 @@ posix-character-classes@^0.1.0:
resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"
integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=
posthog-node@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-1.3.0.tgz#804ed2f213a2f05253f798bf9569d55a9cad94f7"
integrity sha512-2+VhqiY/rKIqKIXyvemBFHbeijHE25sP7eKltnqcFqAssUE6+sX6vusN9A4luzToOqHQkUZexiCKxvuGagh7JA==
dependencies:
axios "0.24.0"
axios-retry "^3.1.9"
component-type "^1.2.1"
join-component "^1.1.0"
md5 "^2.3.0"
ms "^2.1.3"
remove-trailing-slash "^0.1.1"
uuid "^8.3.2"
pouch-stream@^0.4.0:
version "0.4.1"
resolved "https://registry.yarnpkg.com/pouch-stream/-/pouch-stream-0.4.1.tgz#0c6d8475c9307677627991a2f079b301c3b89bdd"
integrity sha1-DG2EdckwdndieZGi8HmzAcO4m90=
dependencies:
inherits "^2.0.1"
readable-stream "^1.0.27-1"
pouchdb-abstract-mapreduce@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-abstract-mapreduce/-/pouchdb-abstract-mapreduce-7.2.2.tgz#dd1b10a83f8d24361dce9aaaab054614b39f766f"
integrity sha512-7HWN/2yV2JkwMnGnlp84lGvFtnm0Q55NiBUdbBcaT810+clCGKvhssBCrXnmwShD1SXTwT83aszsgiSfW+SnBA==
dependencies:
pouchdb-binary-utils "7.2.2"
pouchdb-collate "7.2.2"
pouchdb-collections "7.2.2"
pouchdb-errors "7.2.2"
pouchdb-fetch "7.2.2"
pouchdb-mapreduce-utils "7.2.2"
pouchdb-md5 "7.2.2"
pouchdb-utils "7.2.2"
pouchdb-adapter-leveldb-core@7.2.2: pouchdb-adapter-leveldb-core@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-adapter-leveldb-core/-/pouchdb-adapter-leveldb-core-7.2.2.tgz#e0aa6a476e2607d7ae89f4a803c9fba6e6d05a8a" resolved "https://registry.yarnpkg.com/pouchdb-adapter-leveldb-core/-/pouchdb-adapter-leveldb-core-7.2.2.tgz#e0aa6a476e2607d7ae89f4a803c9fba6e6d05a8a"
@ -3828,6 +3952,11 @@ pouchdb-binary-utils@7.2.2:
dependencies: dependencies:
buffer-from "1.1.1" buffer-from "1.1.1"
pouchdb-collate@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-collate/-/pouchdb-collate-7.2.2.tgz#fc261f5ef837c437e3445fb0abc3f125d982c37c"
integrity sha512-/SMY9GGasslknivWlCVwXMRMnQ8myKHs4WryQ5535nq1Wj/ehpqWloMwxEQGvZE1Sda3LOm7/5HwLTcB8Our+w==
pouchdb-collections@7.2.2: pouchdb-collections@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-collections/-/pouchdb-collections-7.2.2.tgz#aeed77f33322429e3f59d59ea233b48ff0e68572" resolved "https://registry.yarnpkg.com/pouchdb-collections/-/pouchdb-collections-7.2.2.tgz#aeed77f33322429e3f59d59ea233b48ff0e68572"
@ -3840,6 +3969,28 @@ pouchdb-errors@7.2.2:
dependencies: dependencies:
inherits "2.0.4" inherits "2.0.4"
pouchdb-fetch@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-fetch/-/pouchdb-fetch-7.2.2.tgz#492791236d60c899d7e9973f9aca0d7b9cc02230"
integrity sha512-lUHmaG6U3zjdMkh8Vob9GvEiRGwJfXKE02aZfjiVQgew+9SLkuOxNw3y2q4d1B6mBd273y1k2Lm0IAziRNxQnA==
dependencies:
abort-controller "3.0.0"
fetch-cookie "0.10.1"
node-fetch "2.6.0"
pouchdb-find@^7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-find/-/pouchdb-find-7.2.2.tgz#1227afdd761812d508fe0794b3e904518a721089"
integrity sha512-BmFeFVQ0kHmDehvJxNZl9OmIztCjPlZlVSdpijuFbk/Fi1EFPU1BAv3kLC+6DhZuOqU/BCoaUBY9sn66pPY2ag==
dependencies:
pouchdb-abstract-mapreduce "7.2.2"
pouchdb-collate "7.2.2"
pouchdb-errors "7.2.2"
pouchdb-fetch "7.2.2"
pouchdb-md5 "7.2.2"
pouchdb-selector-core "7.2.2"
pouchdb-utils "7.2.2"
pouchdb-json@7.2.2: pouchdb-json@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-json/-/pouchdb-json-7.2.2.tgz#b939be24b91a7322e9a24b8880a6e21514ec5e1f" resolved "https://registry.yarnpkg.com/pouchdb-json/-/pouchdb-json-7.2.2.tgz#b939be24b91a7322e9a24b8880a6e21514ec5e1f"
@ -3847,6 +3998,16 @@ pouchdb-json@7.2.2:
dependencies: dependencies:
vuvuzela "1.0.3" vuvuzela "1.0.3"
pouchdb-mapreduce-utils@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-mapreduce-utils/-/pouchdb-mapreduce-utils-7.2.2.tgz#13a46a3cc2a3f3b8e24861da26966904f2963146"
integrity sha512-rAllb73hIkU8rU2LJNbzlcj91KuulpwQu804/F6xF3fhZKC/4JQMClahk+N/+VATkpmLxp1zWmvmgdlwVU4HtQ==
dependencies:
argsarray "0.0.1"
inherits "2.0.4"
pouchdb-collections "7.2.2"
pouchdb-utils "7.2.2"
pouchdb-md5@7.2.2: pouchdb-md5@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-md5/-/pouchdb-md5-7.2.2.tgz#415401acc5a844112d765bd1fb4e5d9f38fb0838" resolved "https://registry.yarnpkg.com/pouchdb-md5/-/pouchdb-md5-7.2.2.tgz#415401acc5a844112d765bd1fb4e5d9f38fb0838"
@ -3860,13 +4021,34 @@ pouchdb-merge@7.2.2:
resolved "https://registry.yarnpkg.com/pouchdb-merge/-/pouchdb-merge-7.2.2.tgz#940d85a2b532d6a93a6cab4b250f5648511bcc16" resolved "https://registry.yarnpkg.com/pouchdb-merge/-/pouchdb-merge-7.2.2.tgz#940d85a2b532d6a93a6cab4b250f5648511bcc16"
integrity sha512-6yzKJfjIchBaS7Tusuk8280WJdESzFfQ0sb4jeMUNnrqs4Cx3b0DIEOYTRRD9EJDM+je7D3AZZ4AT0tFw8gb4A== integrity sha512-6yzKJfjIchBaS7Tusuk8280WJdESzFfQ0sb4jeMUNnrqs4Cx3b0DIEOYTRRD9EJDM+je7D3AZZ4AT0tFw8gb4A==
pouchdb-promise@6.4.3: pouchdb-promise@6.4.3, pouchdb-promise@^6.0.4:
version "6.4.3" version "6.4.3"
resolved "https://registry.yarnpkg.com/pouchdb-promise/-/pouchdb-promise-6.4.3.tgz#74516f4acf74957b54debd0fb2c0e5b5a68ca7b3" resolved "https://registry.yarnpkg.com/pouchdb-promise/-/pouchdb-promise-6.4.3.tgz#74516f4acf74957b54debd0fb2c0e5b5a68ca7b3"
integrity sha512-ruJaSFXwzsxRHQfwNHjQfsj58LBOY1RzGzde4PM5CWINZwFjCQAhZwfMrch2o/0oZT6d+Xtt0HTWhq35p3b0qw== integrity sha512-ruJaSFXwzsxRHQfwNHjQfsj58LBOY1RzGzde4PM5CWINZwFjCQAhZwfMrch2o/0oZT6d+Xtt0HTWhq35p3b0qw==
dependencies: dependencies:
lie "3.1.1" lie "3.1.1"
pouchdb-replication-stream@^1.2.9:
version "1.2.9"
resolved "https://registry.yarnpkg.com/pouchdb-replication-stream/-/pouchdb-replication-stream-1.2.9.tgz#aa4fa5d8f52df4825392f18e07c7e11acffc650a"
integrity sha1-qk+l2PUt9IJTkvGOB8fhGs/8ZQo=
dependencies:
argsarray "0.0.1"
inherits "^2.0.3"
lodash.pick "^4.0.0"
ndjson "^1.4.3"
pouch-stream "^0.4.0"
pouchdb-promise "^6.0.4"
through2 "^2.0.0"
pouchdb-selector-core@7.2.2:
version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-selector-core/-/pouchdb-selector-core-7.2.2.tgz#264d7436a8c8ac3801f39960e79875ef7f3879a0"
integrity sha512-XYKCNv9oiNmSXV5+CgR9pkEkTFqxQGWplnVhO3W9P154H08lU0ZoNH02+uf+NjZ2kjse7Q1fxV4r401LEcGMMg==
dependencies:
pouchdb-collate "7.2.2"
pouchdb-utils "7.2.2"
pouchdb-utils@7.2.2: pouchdb-utils@7.2.2:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb-utils/-/pouchdb-utils-7.2.2.tgz#c17c4788f1d052b0daf4ef8797bbc4aaa3945aa4" resolved "https://registry.yarnpkg.com/pouchdb-utils/-/pouchdb-utils-7.2.2.tgz#c17c4788f1d052b0daf4ef8797bbc4aaa3945aa4"
@ -3881,17 +4063,17 @@ pouchdb-utils@7.2.2:
pouchdb-md5 "7.2.2" pouchdb-md5 "7.2.2"
uuid "8.1.0" uuid "8.1.0"
pouchdb@^7.2.1: pouchdb@7.3.0:
version "7.2.2" version "7.3.0"
resolved "https://registry.yarnpkg.com/pouchdb/-/pouchdb-7.2.2.tgz#fcae82862db527e4cf7576ed8549d1384961f364" resolved "https://registry.yarnpkg.com/pouchdb/-/pouchdb-7.3.0.tgz#440fbef12dfd8f9002320802528665e883a3b7f8"
integrity sha512-5gf5nw5XH/2H/DJj8b0YkvG9fhA/4Jt6kL0Y8QjtztVjb1y4J19Rg4rG+fUbXu96gsUrlyIvZ3XfM0b4mogGmw== integrity sha512-OwsIQGXsfx3TrU1pLruj6PGSwFH+h5k4hGNxFkZ76Um7/ZI8F5TzUHFrpldVVIhfXYi2vP31q0q7ot1FSLFYOw==
dependencies: dependencies:
abort-controller "3.0.0" abort-controller "3.0.0"
argsarray "0.0.1" argsarray "0.0.1"
buffer-from "1.1.1" buffer-from "1.1.2"
clone-buffer "1.0.0" clone-buffer "1.0.0"
double-ended-queue "2.1.0-0" double-ended-queue "2.1.0-0"
fetch-cookie "0.10.1" fetch-cookie "0.11.0"
immediate "3.3.0" immediate "3.3.0"
inherits "2.0.4" inherits "2.0.4"
level "6.0.1" level "6.0.1"
@ -3900,11 +4082,11 @@ pouchdb@^7.2.1:
leveldown "5.6.0" leveldown "5.6.0"
levelup "4.4.0" levelup "4.4.0"
ltgt "2.2.1" ltgt "2.2.1"
node-fetch "2.6.0" node-fetch "2.6.7"
readable-stream "1.1.14" readable-stream "1.1.14"
spark-md5 "3.0.1" spark-md5 "3.0.2"
through2 "3.0.2" through2 "3.0.2"
uuid "8.1.0" uuid "8.3.2"
vuvuzela "1.0.3" vuvuzela "1.0.3"
prelude-ls@~1.1.2: prelude-ls@~1.1.2:
@ -3927,6 +4109,11 @@ private@^0.1.6, private@~0.1.5:
resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff"
integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==
process-nextick-args@~2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
prompts@^2.0.1: prompts@^2.0.1:
version "2.4.2" version "2.4.2"
resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069"
@ -4012,7 +4199,7 @@ read-pkg@^5.2.0:
parse-json "^5.0.0" parse-json "^5.0.0"
type-fest "^0.6.0" type-fest "^0.6.0"
readable-stream@1.1.14: readable-stream@1.1.14, readable-stream@^1.0.27-1:
version "1.1.14" version "1.1.14"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9"
integrity sha1-fPTFTvZI44EwhMY23SB54WbAgdk= integrity sha1-fPTFTvZI44EwhMY23SB54WbAgdk=
@ -4036,6 +4223,19 @@ readable-stream@~0.0.2:
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-0.0.4.tgz#f32d76e3fb863344a548d79923007173665b3b8d" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-0.0.4.tgz#f32d76e3fb863344a548d79923007173665b3b8d"
integrity sha1-8y124/uGM0SlSNeZIwBxc2ZbO40= integrity sha1-8y124/uGM0SlSNeZIwBxc2ZbO40=
readable-stream@~2.3.6:
version "2.3.7"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.3"
isarray "~1.0.0"
process-nextick-args "~2.0.0"
safe-buffer "~5.1.1"
string_decoder "~1.1.1"
util-deprecate "~1.0.1"
readline-sync@^1.4.9: readline-sync@^1.4.9:
version "1.4.10" version "1.4.10"
resolved "https://registry.yarnpkg.com/readline-sync/-/readline-sync-1.4.10.tgz#41df7fbb4b6312d673011594145705bf56d8873b" resolved "https://registry.yarnpkg.com/readline-sync/-/readline-sync-1.4.10.tgz#41df7fbb4b6312d673011594145705bf56d8873b"
@ -4068,6 +4268,11 @@ redis-parser@^3.0.0:
dependencies: dependencies:
redis-errors "^1.0.0" redis-errors "^1.0.0"
regenerator-runtime@^0.13.4:
version "0.13.9"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52"
integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==
regex-not@^1.0.0, regex-not@^1.0.2: regex-not@^1.0.0, regex-not@^1.0.2:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c"
@ -4081,6 +4286,11 @@ remove-trailing-separator@^1.0.1:
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8=
remove-trailing-slash@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/remove-trailing-slash/-/remove-trailing-slash-0.1.1.tgz#be2285a59f39c74d1bce4f825950061915e3780d"
integrity sha512-o4S4Qh6L2jpnCy83ysZDau+VORNvnFw07CKSAymkd6ICNVEPisMyzlc00KlvvicsxKck94SEwhDnMNdICzO+tA==
repeat-element@^1.1.2: repeat-element@^1.1.2:
version "1.1.4" version "1.1.4"
resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9"
@ -4202,7 +4412,7 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0:
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
safe-buffer@~5.1.1: safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.2" version "5.1.2"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
@ -4425,6 +4635,11 @@ spark-md5@3.0.1:
resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.1.tgz#83a0e255734f2ab4e5c466e5a2cfc9ba2aa2124d" resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.1.tgz#83a0e255734f2ab4e5c466e5a2cfc9ba2aa2124d"
integrity sha512-0tF3AGSD1ppQeuffsLDIOWlKUd3lS92tFxcsrh5Pe3ZphhnoK+oXIBTzOAThZCiuINZLvpiLH/1VS1/ANEJVig== integrity sha512-0tF3AGSD1ppQeuffsLDIOWlKUd3lS92tFxcsrh5Pe3ZphhnoK+oXIBTzOAThZCiuINZLvpiLH/1VS1/ANEJVig==
spark-md5@3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.2.tgz#7952c4a30784347abcee73268e473b9c0167e3fc"
integrity sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==
spdx-correct@^3.0.0: spdx-correct@^3.0.0:
version "3.1.1" version "3.1.1"
resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9"
@ -4458,6 +4673,13 @@ split-string@^3.0.1, split-string@^3.0.2:
dependencies: dependencies:
extend-shallow "^3.0.0" extend-shallow "^3.0.0"
split2@^2.1.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/split2/-/split2-2.2.0.tgz#186b2575bcf83e85b7d18465756238ee4ee42493"
integrity sha512-RAb22TG39LhI31MbreBgIuKiIKhVsawfTgEGqKHTK87aG+ul/PB8Sqoi3I7kVdRWiCfrKxK3uo4/YUkpNvhPbw==
dependencies:
through2 "^2.0.2"
sprintf-js@^1.1.1: sprintf-js@^1.1.1:
version "1.1.2" version "1.1.2"
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.2.tgz#da1765262bf8c0f571749f2ad6c26300207ae673" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.2.tgz#da1765262bf8c0f571749f2ad6c26300207ae673"
@ -4548,6 +4770,13 @@ string_decoder@~0.10.x:
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94"
integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=
string_decoder@~1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
dependencies:
safe-buffer "~5.1.0"
stringstream@~0.0.4: stringstream@~0.0.4:
version "0.0.6" version "0.0.6"
resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.6.tgz#7880225b0d4ad10e30927d167a1d6f2fd3b33a72" resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.6.tgz#7880225b0d4ad10e30927d167a1d6f2fd3b33a72"
@ -4663,6 +4892,14 @@ through2@3.0.2:
inherits "^2.0.4" inherits "^2.0.4"
readable-stream "2 || 3" readable-stream "2 || 3"
through2@^2.0.0, through2@^2.0.2, through2@^2.0.3:
version "2.0.5"
resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd"
integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==
dependencies:
readable-stream "~2.3.6"
xtend "~4.0.1"
through@~2.3.4: through@~2.3.4:
version "2.3.8" version "2.3.8"
resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
@ -4857,7 +5094,7 @@ use@^3.1.0:
resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f"
integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==
util-deprecate@^1.0.1: util-deprecate@^1.0.1, util-deprecate@~1.0.1:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
@ -4877,6 +5114,11 @@ uuid@8.1.0:
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.1.0.tgz#6f1536eb43249f473abc6bd58ff983da1ca30d8d" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.1.0.tgz#6f1536eb43249f473abc6bd58ff983da1ca30d8d"
integrity sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg== integrity sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg==
uuid@8.3.2, uuid@^8.3.0, uuid@^8.3.2:
version "8.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
uuid@^3.0.0: uuid@^3.0.0:
version "3.0.1" version "3.0.1"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1"
@ -4887,11 +5129,6 @@ uuid@^3.3.2:
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
uuid@^8.3.0, uuid@^8.3.2:
version "8.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
v8-to-istanbul@^7.0.0: v8-to-istanbul@^7.0.0:
version "7.1.2" version "7.1.2"
resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-7.1.2.tgz#30898d1a7fa0c84d225a2c1434fb958f290883c1" resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-7.1.2.tgz#30898d1a7fa0c84d225a2c1434fb958f290883c1"
@ -5084,7 +5321,7 @@ xmlchars@^2.2.0:
resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb"
integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==
xtend@^4.0.2, xtend@~4.0.0: xtend@^4.0.2, xtend@~4.0.0, xtend@~4.0.1:
version "4.0.2" version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/bbui", "name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.", "description": "A UI solution used in the different Budibase projects.",
"version": "1.0.105-alpha.38", "version": "1.0.124-alpha.0",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"module": "dist/bbui.es.js", "module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
], ],
"dependencies": { "dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1", "@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "^1.0.105-alpha.38", "@budibase/string-templates": "^1.0.124-alpha.0",
"@spectrum-css/actionbutton": "^1.0.1", "@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1", "@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2", "@spectrum-css/avatar": "^3.0.2",

View File

@ -67,6 +67,7 @@
data-cy={dataCy} data-cy={dataCy}
> >
<div class="spectrum-Dialog-grid"> <div class="spectrum-Dialog-grid">
{#if title || $$slots.header}
<h1 <h1
class="spectrum-Dialog-heading spectrum-Dialog-heading--noHeader" class="spectrum-Dialog-heading spectrum-Dialog-heading--noHeader"
class:noDivider={!showDivider} class:noDivider={!showDivider}
@ -78,9 +79,10 @@
<slot name="header" /> <slot name="header" />
{/if} {/if}
</h1> </h1>
{#if showDivider && (title || $$slots.header)} {#if showDivider}
<Divider size="M" /> <Divider size="M" />
{/if} {/if}
{/if}
<!-- TODO: Remove content-grid class once Layout components are in bbui --> <!-- TODO: Remove content-grid class once Layout components are in bbui -->
<section class="spectrum-Dialog-content content-grid"> <section class="spectrum-Dialog-content content-grid">

View File

@ -12,8 +12,15 @@
export let portalTarget export let portalTarget
export let dataCy export let dataCy
let clazz export let direction = "bottom"
export { clazz as class } export let showTip = false
let tipSvg =
'<svg xmlns="http://www.w3.org/svg/2000" width="23" height="12" class="spectrum-Popover-tip" > <path class="spectrum-Popover-tip-triangle" d="M 0.7071067811865476 0 L 11.414213562373096 10.707106781186548 L 22.121320343559645 0" /> </svg>'
$: tooltipClasses = showTip
? `spectrum-Popover--withTip spectrum-Popover--${direction}`
: ""
export const show = () => { export const show = () => {
dispatch("open") dispatch("open")
@ -41,10 +48,14 @@
use:positionDropdown={{ anchor, align }} use:positionDropdown={{ anchor, align }}
use:clickOutside={hide} use:clickOutside={hide}
on:keydown={handleEscape} on:keydown={handleEscape}
class={"spectrum-Popover is-open " + (clazz || "")} class={"spectrum-Popover is-open " + (tooltipClasses || "")}
role="presentation" role="presentation"
data-cy={dataCy} data-cy={dataCy}
> >
{#if showTip}
{@html tipSvg}
{/if}
<slot /> <slot />
</div> </div>
</Portal> </Portal>
@ -54,4 +65,13 @@
.spectrum-Popover { .spectrum-Popover {
min-width: var(--spectrum-global-dimension-size-2000) !important; min-width: var(--spectrum-global-dimension-size-2000) !important;
} }
.spectrum-Popover.is-open.spectrum-Popover--withTip {
margin-top: var(--spacing-xs);
margin-left: var(--spacing-xl);
}
:global(.spectrum-Popover--bottom .spectrum-Popover-tip),
:global(.spectrum-Popover--top .spectrum-Popover-tip) {
left: 90%;
margin-left: calc(var(--spectrum-global-dimension-size-150) * -1);
}
</style> </style>

View File

@ -1,82 +0,0 @@
<script>
import { setContext } from "svelte"
import Popover from "../Popover/Popover.svelte"
export let disabled = false
export let align = "left"
export let anchor
export let showTip = true
export let direction = "bottom"
export let dataCy = null
let dropdown
let tipSvg =
'<svg xmlns="http://www.w3.org/svg/2000" width="23" height="12" class="spectrum-Popover-tip" > <path class="spectrum-Popover-tip-triangle" d="M 0.7071067811865476 0 L 11.414213562373096 10.707106781186548 L 22.121320343559645 0" /> </svg>'
// This is needed because display: contents is considered "invisible".
// It should only ever be an action button, so should be fine.
function getAnchor(node) {
if (!anchor) {
anchor = node.firstChild
}
}
//need this for the publish/view behaviours
export const hide = () => {
dropdown.hide()
}
export const show = () => {
dropdown.show()
}
const openMenu = event => {
if (!disabled) {
event.stopPropagation()
show()
}
}
setContext("popoverMenu", { show, hide })
</script>
<div class="popover-menu">
<div use:getAnchor on:click={openMenu}>
<slot name="control" />
</div>
<Popover
bind:this={dropdown}
{anchor}
{align}
class={showTip
? `spectrum-Popover--withTip spectrum-Popover--${direction}`
: ""}
>
{#if showTip}
{@html tipSvg}
{/if}
<div class="popover-container" data-cy={dataCy}>
<div class="popover-menu-wrap">
<slot />
</div>
</div>
</Popover>
</div>
<style>
:global(.spectrum-Popover.is-open.spectrum-Popover--withTip) {
margin-top: var(--spacing-xs);
margin-left: var(--spacing-xl);
}
.popover-menu-wrap {
padding: 10px;
}
.popover-menu :global(.icon) {
display: flex;
}
:global(.spectrum-Popover--bottom .spectrum-Popover-tip) {
left: 90%;
margin-left: calc(var(--spectrum-global-dimension-size-150) * -1);
}
</style>

View File

@ -25,7 +25,6 @@ export { default as RadioGroup } from "./Form/RadioGroup.svelte"
export { default as Checkbox } from "./Form/Checkbox.svelte" export { default as Checkbox } from "./Form/Checkbox.svelte"
export { default as DetailSummary } from "./DetailSummary/DetailSummary.svelte" export { default as DetailSummary } from "./DetailSummary/DetailSummary.svelte"
export { default as Popover } from "./Popover/Popover.svelte" export { default as Popover } from "./Popover/Popover.svelte"
export { default as PopoverMenu } from "./Popover/PopoverMenu.svelte"
export { default as ProgressBar } from "./ProgressBar/ProgressBar.svelte" export { default as ProgressBar } from "./ProgressBar/ProgressBar.svelte"
export { default as ProgressCircle } from "./ProgressCircle/ProgressCircle.svelte" export { default as ProgressCircle } from "./ProgressCircle/ProgressCircle.svelte"
export { default as Label } from "./Label/Label.svelte" export { default as Label } from "./Label/Label.svelte"

View File

@ -23,8 +23,8 @@ filterTests(['all'], () => {
cy.get(".spectrum-Button").contains("Edit").click({ force: true }) cy.get(".spectrum-Button").contains("Edit").click({ force: true })
}) })
cy.get(".app-status-icon svg[aria-label='GlobeStrike']").should("exist") cy.get(".deployment-top-nav svg[aria-label='GlobeStrike']").should("exist")
cy.get(".app-status-icon svg[aria-label='Globe']").should("not.exist") cy.get(".deployment-top-nav svg[aria-label='Globe']").should("not.exist")
}) })
it("Should publish an application and correctly reflect that", () => { it("Should publish an application and correctly reflect that", () => {
@ -61,13 +61,13 @@ filterTests(['all'], () => {
cy.get(".spectrum-Button").contains("Edit").click({ force: true }) cy.get(".spectrum-Button").contains("Edit").click({ force: true })
}) })
cy.get(".app-status-icon svg[aria-label='Globe']").should("exist").click({ force: true }) cy.get(".deployment-top-nav svg[aria-label='Globe']").should("exist").click({ force: true })
cy.get("[data-cy='publish-popover-menu']").should("be.visible") cy.get("[data-cy='publish-popover-menu']").should("be.visible")
.within(() => { .within(() => {
cy.get("[data-cy='publish-popover-action']").should("exist") cy.get("[data-cy='publish-popover-action']").should("exist")
cy.get("button").contains("View App").should("exist") cy.get("button").contains("View app").should("exist")
cy.get(".publish-popover-message").should("have.text", "Last Published: a few seconds ago") cy.get(".publish-popover-message").should("have.text", "Last published a few seconds ago")
}) })
}) })
@ -89,7 +89,7 @@ filterTests(['all'], () => {
}) })
//The published status //The published status
cy.get(".app-status-icon svg[aria-label='Globe']").should("exist") cy.get(".deployment-top-nav svg[aria-label='Globe']").should("exist")
.click({ force: true }) .click({ force: true })
cy.get("[data-cy='publish-popover-menu']").should("be.visible") cy.get("[data-cy='publish-popover-menu']").should("be.visible")
@ -101,7 +101,7 @@ filterTests(['all'], () => {
cy.get(".confirm-wrap button").click({ force: true } cy.get(".confirm-wrap button").click({ force: true }
)}) )})
cy.get(".app-status-icon svg[aria-label='GlobeStrike']").should("exist") cy.get(".deployment-top-nav svg[aria-label='GlobeStrike']").should("exist")
cy.visit(`${Cypress.config().baseUrl}/builder`) cy.visit(`${Cypress.config().baseUrl}/builder`)

View File

@ -11,7 +11,7 @@ filterTests(['all'], () => {
cy.applicationInAppTable("Cypress Tests") cy.applicationInAppTable("Cypress Tests")
cy.get(".appTable") cy.get(".appTable")
.within(() => { .within(() => {
cy.get("[data-cy='app-row-actions-menu']").eq(0).click() cy.get(".app-row-actions-icon").eq(0).click()
}) })
cy.get(".spectrum-Menu").contains("Edit icon").click() cy.get(".spectrum-Menu").contains("Edit icon").click()
// Select random icon // Select random icon

View File

@ -11,7 +11,7 @@ filterTests(['smoke', 'all'], () => {
cy.createTestTableWithData() cy.createTestTableWithData()
cy.wait(2000) cy.wait(2000)
cy.contains("Automate").click() cy.contains("Automate").click()
cy.get("[data-cy='new-screen'] > .spectrum-Icon").click() cy.get(".add-button .spectrum-Icon").click()
cy.get(".modal-inner-wrapper").within(() => { cy.get(".modal-inner-wrapper").within(() => {
cy.get("input").type("Add Row") cy.get("input").type("Add Row")
cy.contains("Row Created").click({ force: true }) cy.contains("Row Created").click({ force: true })

View File

@ -125,7 +125,7 @@ filterTests(['smoke', 'all'], () => {
it("renames a view", () => { it("renames a view", () => {
cy.contains(".nav-item", "Test View") cy.contains(".nav-item", "Test View")
.find(".actions .icon") .find(".actions .icon.open-popover")
.click({ force: true }) .click({ force: true })
cy.get(".spectrum-Menu-itemLabel").contains("Edit").click() cy.get(".spectrum-Menu-itemLabel").contains("Edit").click()
cy.get(".modal-inner-wrapper").within(() => { cy.get(".modal-inner-wrapper").within(() => {
@ -138,7 +138,7 @@ filterTests(['smoke', 'all'], () => {
it("deletes a view", () => { it("deletes a view", () => {
cy.contains(".nav-item", "Test View Updated") cy.contains(".nav-item", "Test View Updated")
.find(".actions .icon") .find(".actions .icon.open-popover")
.click({ force: true }) .click({ force: true })
cy.contains("Delete").click() cy.contains("Delete").click()
cy.contains("Delete View").click() cy.contains("Delete View").click()

View File

@ -99,7 +99,7 @@ filterTests(['all'], () => {
cy.searchForApplication(originalName) cy.searchForApplication(originalName)
cy.get(".appTable") cy.get(".appTable")
.within(() => { .within(() => {
cy.get("[data-cy='app-row-actions-menu']").eq(0).click() cy.get("[aria-label='More']").eq(0).click()
}) })
// Check for when an app is published // Check for when an app is published
if (published == true) { if (published == true) {

View File

@ -10,9 +10,9 @@ filterTests(['smoke', 'all'], () => {
it("should try to revert an unpublished app", () => { it("should try to revert an unpublished app", () => {
// Click revert icon // Click revert icon
cy.get(".toprightnav").within(() => { cy.get(".toprightnav").within(() => {
cy.get("[data-cy='revert-application-topnav']").click({ force: true }) cy.get("[aria-label='Revert']").click({ force: true })
}) })
cy.get(".spectrum-Dialog-grid").within(() => { cy.get(".spectrum-Modal").within(() => {
// Enter app name before revert // Enter app name before revert
cy.get("input").type("Cypress Tests") cy.get("input").type("Cypress Tests")
cy.intercept('**/revert').as('revertApp') cy.intercept('**/revert').as('revertApp')
@ -41,7 +41,7 @@ filterTests(['smoke', 'all'], () => {
cy.addComponent("Elements", "Button") cy.addComponent("Elements", "Button")
// Click Revert // Click Revert
cy.get(".toprightnav").within(() => { cy.get(".toprightnav").within(() => {
cy.get("[data-cy='revert-application-topnav']").click({ force: true }) cy.get("[aria-label='Revert']").click({ force: true })
}) })
cy.get(".spectrum-Dialog-grid").within(() => { cy.get(".spectrum-Dialog-grid").within(() => {
// Click Revert // Click Revert
@ -58,7 +58,7 @@ filterTests(['smoke', 'all'], () => {
it("should enter incorrect app name when reverting", () => { it("should enter incorrect app name when reverting", () => {
// Click Revert // Click Revert
cy.get(".toprightnav").within(() => { cy.get(".toprightnav").within(() => {
cy.get("[data-cy='revert-application-topnav']").click({ force: true }) cy.get("[aria-label='Revert']").click({ force: true })
}) })
// Enter incorrect app name // Enter incorrect app name
cy.get(".spectrum-Dialog-grid").within(() => { cy.get(".spectrum-Dialog-grid").within(() => {

View File

@ -292,7 +292,7 @@ Cypress.Commands.add("createScreen", (route, accessLevelLabel) => {
cy.contains("Design").click() cy.contains("Design").click()
cy.get("[aria-label=AddCircle]").click() cy.get("[aria-label=AddCircle]").click()
cy.get(".spectrum-Modal").within(() => { cy.get(".spectrum-Modal").within(() => {
cy.get(".item").contains("Blank screen").click() cy.get("[data-cy='blank-screen']").click()
cy.get(".spectrum-Button").contains("Continue").click({ force: true }) cy.get(".spectrum-Button").contains("Continue").click({ force: true })
cy.wait(500) cy.wait(500)
}) })
@ -473,6 +473,7 @@ Cypress.Commands.add("selectExternalDatasource", datasourceName => {
cy.get(".add-button").click() cy.get(".add-button").click()
}) })
// Clicks specified datasource & continue // Clicks specified datasource & continue
cy.wait(1000)
cy.get(".item-list").contains(datasourceName).click() cy.get(".item-list").contains(datasourceName).click()
cy.get(".spectrum-Dialog-grid").within(() => { cy.get(".spectrum-Dialog-grid").within(() => {
cy.get(".spectrum-Button").contains("Continue").click({ force: true }) cy.get(".spectrum-Button").contains("Continue").click({ force: true })
@ -495,7 +496,7 @@ Cypress.Commands.add("addDatasourceConfig", (datasource, skipFetch) => {
} else { } else {
cy.get("input") cy.get("input")
.clear({ force: true }) .clear({ force: true })
.type(Cypress.env("mysql").HOST, { force: true }) .type(Cypress.env("HOST_IP"), { force: true })
} }
}) })
}) })

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/builder", "name": "@budibase/builder",
"version": "1.0.105-alpha.38", "version": "1.0.124-alpha.0",
"license": "GPL-3.0", "license": "GPL-3.0",
"private": true, "private": true,
"scripts": { "scripts": {
@ -65,10 +65,10 @@
} }
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.105-alpha.38", "@budibase/bbui": "^1.0.124-alpha.0",
"@budibase/client": "^1.0.105-alpha.38", "@budibase/client": "^1.0.124-alpha.0",
"@budibase/frontend-core": "^1.0.105-alpha.38", "@budibase/frontend-core": "^1.0.124-alpha.0",
"@budibase/string-templates": "^1.0.105-alpha.38", "@budibase/string-templates": "^1.0.124-alpha.0",
"@sentry/browser": "5.19.1", "@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1", "@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",

View File

@ -48,10 +48,6 @@
$automationStore.selectedAutomation?.automation?.definition?.steps.length + $automationStore.selectedAutomation?.automation?.definition?.steps.length +
1 1
$: hasCompletedInputs = Object.keys(
block.schema?.inputs?.properties || {}
).every(x => block?.inputs[x])
$: loopingSelected = $: loopingSelected =
$automationStore.selectedAutomation?.automation.definition.steps.find( $automationStore.selectedAutomation?.automation.definition.steps.find(
x => x.blockToLoop === block.id x => x.blockToLoop === block.id
@ -290,13 +286,7 @@
</Modal> </Modal>
</div> </div>
<div class="separator" /> <div class="separator" />
<Icon <Icon on:click={() => actionModal.show()} hoverable name="AddCircle" size="S" />
on:click={() => actionModal.show()}
disabled={!hasCompletedInputs}
hoverable
name="AddCircle"
size="S"
/>
{#if isTrigger ? totalBlocks > 1 : blockIdx !== totalBlocks - 2} {#if isTrigger ? totalBlocks > 1 : blockIdx !== totalBlocks - 2}
<div class="separator" /> <div class="separator" />
{/if} {/if}

View File

@ -16,7 +16,7 @@
</Modal> </Modal>
</Tab> </Tab>
</Tabs> </Tabs>
<div class="add-button" data-cy="new-screen"> <div class="add-button">
<Icon hoverable name="AddCircle" on:click={modal.show} /> <Icon hoverable name="AddCircle" on:click={modal.show} />
</div> </div>
</div> </div>

View File

@ -25,11 +25,11 @@
import QueryParamSelector from "./QueryParamSelector.svelte" import QueryParamSelector from "./QueryParamSelector.svelte"
import CronBuilder from "./CronBuilder.svelte" import CronBuilder from "./CronBuilder.svelte"
import Editor from "components/integration/QueryEditor.svelte" import Editor from "components/integration/QueryEditor.svelte"
import { debounce } from "lodash"
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte" import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte" import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte"
import { LuceneUtils } from "@budibase/frontend-core" import { LuceneUtils } from "@budibase/frontend-core"
import { getSchemaForTable } from "builderStore/dataBinding" import { getSchemaForTable } from "builderStore/dataBinding"
import { Utils } from "@budibase/frontend-core"
export let block export let block
export let testData export let testData
@ -54,7 +54,7 @@
$: schema = getSchemaForTable(tableId, { searchableSchema: true }).schema $: schema = getSchemaForTable(tableId, { searchableSchema: true }).schema
$: schemaFields = Object.values(schema || {}) $: schemaFields = Object.values(schema || {})
const onChange = debounce(async function (e, key) { const onChange = Utils.sequential(async (e, key) => {
try { try {
if (isTestModal) { if (isTestModal) {
// Special case for webhook, as it requires a body, but the schema already brings back the body's contents // Special case for webhook, as it requires a body, but the schema already brings back the body's contents
@ -82,7 +82,7 @@
} catch (error) { } catch (error) {
notifications.error("Error saving automation") notifications.error("Error saving automation")
} }
}, 800) })
function getAvailableBindings(block, automation) { function getAvailableBindings(block, automation) {
if (!block || !automation) { if (!block || !automation) {
@ -226,6 +226,7 @@
on:change={e => onChange(e, key)} on:change={e => onChange(e, key)}
{bindings} {bindings}
fillWidth fillWidth
updateOnChange={false}
/> />
{:else} {:else}
<DrawerBindableInput <DrawerBindableInput
@ -237,6 +238,7 @@
on:change={e => onChange(e, key)} on:change={e => onChange(e, key)}
{bindings} {bindings}
allowJS={false} allowJS={false}
updateOnChange={false}
/> />
{/if} {/if}
{:else if value.customType === "query"} {:else if value.customType === "query"}
@ -310,6 +312,7 @@
type={value.customType} type={value.customType}
on:change={e => onChange(e, key)} on:change={e => onChange(e, key)}
{bindings} {bindings}
updateOnChange={false}
/> />
{:else} {:else}
<div class="test"> <div class="test">
@ -321,6 +324,7 @@
value={inputData[key]} value={inputData[key]}
on:change={e => onChange(e, key)} on:change={e => onChange(e, key)}
{bindings} {bindings}
updateOnChange={false}
/> />
</div> </div>
{/if} {/if}

View File

@ -43,6 +43,11 @@
} }
const coerce = (value, type) => { const coerce = (value, type) => {
const re = new RegExp(/{{([^{].*?)}}/g)
if (re.test(value)) {
return value
}
if (type === "boolean") { if (type === "boolean") {
if (typeof value === "boolean") { if (typeof value === "boolean") {
return value return value
@ -120,6 +125,7 @@
{bindings} {bindings}
fillWidth={true} fillWidth={true}
allowJS={true} allowJS={true}
updateOnChange={false}
/> />
{/if} {/if}
{:else if !rowControl} {:else if !rowControl}
@ -137,6 +143,7 @@
{bindings} {bindings}
fillWidth={true} fillWidth={true}
allowJS={true} allowJS={true}
updateOnChange={false}
/> />
{/if} {/if}
{/if} {/if}

View File

@ -60,5 +60,6 @@
{bindings} {bindings}
fillWidth={true} fillWidth={true}
allowJS={true} allowJS={true}
updateOnChange={false}
/> />
{/if} {/if}

View File

@ -30,6 +30,10 @@
label: "DateTime", label: "DateTime",
value: "datetime", value: "datetime",
}, },
{
label: "Array",
value: "array",
},
] ]
function addField() { function addField() {
@ -70,6 +74,7 @@
secondary secondary
placeholder="Enter field name" placeholder="Enter field name"
on:change={fieldNameChanged(field.name)} on:change={fieldNameChanged(field.name)}
updateOnChange={false}
/> />
<Select <Select
value={field.type} value={field.type}

View File

@ -60,6 +60,7 @@ export function getBindings({
) )
const label = path == null ? column : `${path}.0.${column}` const label = path == null ? column : `${path}.0.${column}`
const binding = path == null ? `[${column}]` : `${path}.0.[${column}]`
// only supply a description for relationship paths // only supply a description for relationship paths
const description = const description =
path == null path == null
@ -73,8 +74,8 @@ export function getBindings({
description, description,
// don't include path, it messes things up, relationship path // don't include path, it messes things up, relationship path
// will be replaced by the main array binding // will be replaced by the main array binding
readableBinding: column, readableBinding: label,
runtimeBinding: `[${column}]`, runtimeBinding: binding,
}) })
} }
return bindings return bindings

View File

@ -15,7 +15,6 @@
import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte" import ArrayRenderer from "components/common/renderers/ArrayRenderer.svelte"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import GoogleButton from "../_components/GoogleButton.svelte"
export let datasource export let datasource
export let save export let save
@ -161,11 +160,6 @@
Fetch tables Fetch tables
</Button> </Button>
<Button cta icon="Add" on:click={createNewTable}>New table</Button> <Button cta icon="Add" on:click={createNewTable}>New table</Button>
{#if integration.auth}
{#if integration.auth.type === "google"}
<GoogleButton {datasource} />
{/if}
{/if}
</div> </div>
</div> </div>
<Body> <Body>

View File

@ -40,7 +40,7 @@
</script> </script>
<ActionMenu> <ActionMenu>
<div slot="control" class="icon"> <div slot="control" class="icon open-popover">
<Icon s hoverable name="MoreSmallList" /> <Icon s hoverable name="MoreSmallList" />
</div> </div>
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem> <MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>

View File

@ -17,6 +17,7 @@
export let disabled = false export let disabled = false
export let fillWidth export let fillWidth
export let allowJS = true export let allowJS = true
export let updateOnChange = true
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
let bindingDrawer let bindingDrawer
@ -44,6 +45,7 @@
value={isJS ? "(JavaScript function)" : readableValue} value={isJS ? "(JavaScript function)" : readableValue}
on:change={event => onChange(event.detail)} on:change={event => onChange(event.detail)}
{placeholder} {placeholder}
{updateOnChange}
/> />
{#if !disabled} {#if !disabled}
<div class="icon" on:click={bindingDrawer.show}> <div class="icon" on:click={bindingDrawer.show}>

View File

@ -15,6 +15,7 @@
export let placeholder export let placeholder
export let label export let label
export let allowJS = false export let allowJS = false
export let updateOnChange = true
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
let bindingModal let bindingModal
@ -41,6 +42,7 @@
value={isJS ? "(JavaScript function)" : readableValue} value={isJS ? "(JavaScript function)" : readableValue}
on:change={event => onChange(event.detail)} on:change={event => onChange(event.detail)}
{placeholder} {placeholder}
{updateOnChange}
/> />
<div class="icon" on:click={bindingModal.show}> <div class="icon" on:click={bindingModal.show}>
<Icon size="S" name="FlashOn" /> <Icon size="S" name="FlashOn" />

View File

@ -0,0 +1,189 @@
<script>
import {
notifications,
Popover,
Layout,
Heading,
Body,
Button,
Icon,
} from "@budibase/bbui"
import { processStringSync } from "@budibase/string-templates"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import analytics, { Events, EventSource } from "analytics"
import { checkIncomingDeploymentStatus } from "components/deploy/utils"
import { API } from "api"
import { onMount } from "svelte"
import DeployModal from "components/deploy/DeployModal.svelte"
import { apps } from "stores/portal"
export let application
let publishPopover
let publishPopoverAnchor
let unpublishModal
$: filteredApps = $apps.filter(
app => app.devId === application && app.status === "published"
)
$: selectedApp = filteredApps?.length ? filteredApps[0] : null
$: deployments = []
$: latestDeployments = deployments
.filter(deployment => deployment.status === "SUCCESS")
.sort((a, b) => a.updatedAt > b.updatedAt)
$: isPublished = selectedApp && latestDeployments?.length > 0
const reviewPendingDeployments = (deployments, newDeployments) => {
if (deployments.length > 0) {
const pending = checkIncomingDeploymentStatus(deployments, newDeployments)
if (pending.length) {
notifications.warning(
"Deployment has been queued and will be processed shortly"
)
}
}
}
async function fetchDeployments() {
try {
const newDeployments = await API.getAppDeployments()
reviewPendingDeployments(deployments, newDeployments)
return newDeployments
} catch (err) {
notifications.error("Error fetching deployment history")
}
}
const viewApp = () => {
analytics.captureEvent(Events.APP.VIEW_PUBLISHED, {
appId: selectedApp.appId,
eventSource: EventSource.PORTAL,
})
if (selectedApp.url) {
window.open(`/app${selectedApp.url}`)
} else {
window.open(`/${selectedApp.prodId}`)
}
}
const unpublishApp = () => {
publishPopover.hide()
unpublishModal.show()
}
const confirmUnpublishApp = async () => {
if (!application || !isPublished) {
//confirm the app has loaded.
return
}
try {
analytics.captureEvent(Events.APP.UNPUBLISHED, {
appId: selectedApp.appId,
})
await API.unpublishApp(selectedApp.prodId)
await apps.load()
notifications.success("App unpublished successfully")
} catch (err) {
notifications.error("Error unpublishing app")
}
}
const completePublish = async () => {
try {
await apps.load()
deployments = await fetchDeployments()
} catch (err) {
notifications.error("Error refreshing app")
}
}
onMount(async () => {
if (!$apps.length) {
await apps.load()
}
deployments = await fetchDeployments()
})
</script>
<div class="deployment-top-nav">
{#if isPublished}
<div class="publish-popover">
<div bind:this={publishPopoverAnchor}>
<Icon
size="M"
hoverable
name="Globe"
tooltip="Your published app"
on:click={publishPopover.show()}
/>
</div>
<Popover
bind:this={publishPopover}
align="right"
disabled={!isPublished}
dataCy="publish-popover-menu"
showTip={true}
anchor={publishPopoverAnchor}
>
<Layout gap="M">
<Heading size="XS">Your published app</Heading>
<Body size="S">
<span class="publish-popover-message">
{processStringSync(
"Last published {{ duration time 'millisecond' }} ago",
{
time:
new Date().getTime() -
new Date(latestDeployments[0].updatedAt).getTime(),
}
)}
</span>
</Body>
<div class="publish-popover-actions">
<Button
warning={true}
icon="GlobeStrike"
disabled={!isPublished}
on:click={unpublishApp}
dataCy="publish-popover-action"
>
Unpublish
</Button>
<Button cta on:click={viewApp}>View app</Button>
</div>
</Layout>
</Popover>
</div>
{/if}
{#if !isPublished}
<Icon
size="M"
name="GlobeStrike"
disabled
tooltip="Your app has not been published yet"
/>
{/if}
</div>
<ConfirmDialog
bind:this={unpublishModal}
title="Confirm unpublish"
okText="Unpublish app"
onOk={confirmUnpublishApp}
dataCy={"unpublish-modal"}
>
Are you sure you want to unpublish the app <b>{selectedApp?.name}</b>?
</ConfirmDialog>
<DeployModal onOk={completePublish} />
<style>
.publish-popover-actions :global([data-cy="publish-popover-action"]) {
margin-right: var(--spacing-s);
}
:global([data-cy="publish-popover-menu"]) {
padding: 10px;
}
</style>

View File

@ -89,7 +89,9 @@
</Button> </Button>
</div> </div>
<ActionMenu align="right" dataCy="app-row-actions-menu-popover"> <ActionMenu align="right" dataCy="app-row-actions-menu-popover">
<Icon hoverable slot="control" name="More" dataCy="app-row-actions-menu" /> <span slot="control" class="app-row-actions-icon">
<Icon hoverable name="More" />
</span>
{#if app.lockedYou} {#if app.lockedYou}
<MenuItem on:click={() => releaseLock(app)} icon="LockOpen"> <MenuItem on:click={() => releaseLock(app)} icon="LockOpen">
Release lock Release lock

View File

@ -1,63 +1,18 @@
<script> <script>
import { store, automationStore } from "builderStore" import { store, automationStore } from "builderStore"
import { roles, flags } from "stores/backend" import { roles, flags } from "stores/backend"
import { import { Icon, Tabs, Tab, Heading, notifications } from "@budibase/bbui"
Icon,
Tabs,
Tab,
notifications,
PopoverMenu,
Layout,
Button,
Heading,
Body,
} from "@budibase/bbui"
import DeployModal from "components/deploy/DeployModal.svelte"
import RevertModal from "components/deploy/RevertModal.svelte" import RevertModal from "components/deploy/RevertModal.svelte"
import ConfirmDialog from "components/common/ConfirmDialog.svelte" import DeployNavigation from "components/deploy/DeployNavigation.svelte"
import { API } from "api" import { API } from "api"
import { auth, apps } from "stores/portal"
import { isActive, goto, layout, redirect } from "@roxi/routify" import { isActive, goto, layout, redirect } from "@roxi/routify"
import { capitalise } from "helpers" import { capitalise } from "helpers"
import { onMount, onDestroy } from "svelte" import { onMount, onDestroy } from "svelte"
import { processStringSync } from "@budibase/string-templates"
import { checkIncomingDeploymentStatus } from "components/deploy/utils"
import analytics, { Events, EventSource } from "analytics"
export let application export let application
// Get Package and set store // Get Package and set store
let promise = getPackage() let promise = getPackage()
let unpublishModal
let publishPopover
$: enrichedApps = enrichApps($apps, $auth.user)
const enrichApps = (apps, user) => {
const enrichedApps = apps
.map(app => ({
...app,
deployed: app.status === "published",
lockedYou: app.lockedBy && app.lockedBy.email === user?.email,
lockedOther: app.lockedBy && app.lockedBy.email !== user?.email,
}))
.filter(app => {
return app.devId === application
})
return enrichedApps
}
$: selectedApp = enrichedApps.length > 0 ? enrichedApps[0] : {}
$: deployments = []
$: latestDeployments = deployments
.filter(deployment => deployment.status === "SUCCESS")
.sort((a, b) => a.updatedAt > b.updatedAt)
$: isPublished =
selectedApp.deployed && latestDeployments && latestDeployments?.length
? true
: false
// Sync once when you load the app // Sync once when you load the app
let hasSynced = false let hasSynced = false
@ -65,25 +20,11 @@
$: selected = capitalise( $: selected = capitalise(
$layout.children.find(layout => $isActive(layout.path))?.title ?? "data" $layout.children.find(layout => $isActive(layout.path))?.title ?? "data"
) )
$: appInfo = $apps?.find(app => app.devId === application)
$: published = appInfo?.status === "published"
const previewApp = () => { const previewApp = () => {
window.open(`/${application}`) window.open(`/${application}`)
} }
const viewApp = () => {
analytics.captureEvent(Events.APP.VIEW_PUBLISHED, {
appId: selectedApp.appId,
eventSource: EventSource.PORTAL,
})
if (selectedApp.url) {
window.open(`/app${selectedApp.url}`)
} else {
window.open(`/${selectedApp.prodId}`)
}
}
async function getPackage() { async function getPackage() {
try { try {
const pkg = await API.fetchAppPackage(application) const pkg = await API.fetchAppPackage(application)
@ -114,74 +55,20 @@
}) })
} }
const reviewPendingDeployments = (deployments, newDeployments) => {
if (deployments.length > 0) {
const pending = checkIncomingDeploymentStatus(deployments, newDeployments)
if (pending.length) {
notifications.warning(
"Deployment has been queued and will be processed shortly"
)
}
}
}
async function fetchDeployments() {
try {
const newDeployments = await API.getAppDeployments()
reviewPendingDeployments(deployments, newDeployments)
return newDeployments
} catch (err) {
notifications.error("Error fetching deployment history")
}
}
onMount(async () => { onMount(async () => {
if (!hasSynced && application) { if (!hasSynced && application) {
try { try {
await API.syncApp(application) await API.syncApp(application)
await apps.load()
} catch (error) { } catch (error) {
notifications.error("Failed to sync with production database") notifications.error("Failed to sync with production database")
} }
hasSynced = true hasSynced = true
} }
deployments = await fetchDeployments()
}) })
onDestroy(() => { onDestroy(() => {
store.actions.reset() store.actions.reset()
}) })
const unpublishApp = () => {
publishPopover.hide()
unpublishModal.show()
}
const completePublish = async () => {
try {
await apps.load()
deployments = await fetchDeployments()
} catch (err) {
notifications.error("Error refreshing app")
}
}
const confirmUnpublishApp = async () => {
if (!application || !isPublished) {
//confirm the app has loaded.
return
}
try {
analytics.captureEvent(Events.APP.UNPUBLISHED, {
appId: selectedApp.appId,
})
await API.unpublishApp(selectedApp.prodId)
await apps.load()
notifications.success("App unpublished successfully")
} catch (err) {
notifications.error("Error unpublishing app")
}
}
</script> </script>
{#await promise} {#await promise}
@ -215,87 +102,20 @@
<RevertModal /> <RevertModal />
<Icon <Icon
name="Visibility" name="Visibility"
tooltip="Open app preview"
hoverable hoverable
on:click={previewApp} on:click={previewApp}
tooltip="View app preview"
/> />
{#if isPublished} <DeployNavigation {application} />
<PopoverMenu
bind:this={publishPopover}
align="right"
disabled={!isPublished}
dataCy="publish-popover-menu"
>
<div slot="control" class="icon app-status-icon">
<Icon
size="M"
hoverable
name="Globe"
disabled={!isPublished}
tooltip="Your published app"
/>
</div>
<Layout gap="M">
<Heading size="XS">Your published app</Heading>
<Body size="S">
{#if isPublished}
{processStringSync(
"Last published {{ duration time 'millisecond' }} ago",
{
time:
new Date().getTime() -
new Date(latestDeployments[0].updatedAt).getTime(),
}
)}
{/if}
</Body>
<div class="publish-popover-actions">
<Button
warning={true}
icon="GlobeStrike"
disabled={!isPublished}
on:click={unpublishApp}
dataCy="publish-popover-action"
>
Unpublish
</Button>
<Button cta on:click={viewApp}>View app</Button>
</div>
</Layout>
</PopoverMenu>
{/if}
{#if !isPublished}
<Icon
size="M"
name="GlobeStrike"
disabled
tooltip="Your app has not been published yet"
/>
{/if}
<DeployModal onOk={completePublish} />
</div> </div>
</div> </div>
<slot /> <slot />
<ConfirmDialog
bind:this={unpublishModal}
title="Confirm unpublish"
okText="Unpublish app"
onOk={confirmUnpublishApp}
dataCy={"unpublish-modal"}
>
Are you sure you want to unpublish the app <b>{selectedApp?.name}</b>?
</ConfirmDialog>
</div> </div>
{:catch error} {:catch error}
<p>Something went wrong: {error.message}</p> <p>Something went wrong: {error.message}</p>
{/await} {/await}
<style> <style>
.publish-popover-actions :global([data-cy="publish-popover-action"]) {
margin-right: var(--spacing-s);
}
.loading { .loading {
min-height: 100%; min-height: 100%;
height: 100%; height: 100%;

View File

@ -136,7 +136,7 @@
notifications.success("Request sent successfully") notifications.success("Request sent successfully")
} }
} catch (error) { } catch (error) {
notifications.error("Error running query") notifications.error(`Query Error: ${error.message}`)
} }
} }

View File

@ -160,6 +160,10 @@
} }
docs.forEach(element => { docs.forEach(element => {
// Delete unsupported fields
delete element.createdAt
delete element.updatedAt
if (element.type === ConfigTypes.OIDC) { if (element.type === ConfigTypes.OIDC) {
// Add a UUID here so each config is distinguishable when it arrives at the login page // Add a UUID here so each config is distinguishable when it arrives at the login page
for (let config of element.config.configs) { for (let config of element.config.configs) {

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "1.0.105-alpha.38", "version": "1.0.124-alpha.0",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js", "main": "src/index.js",
"bin": { "bin": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/client", "name": "@budibase/client",
"version": "1.0.105-alpha.38", "version": "1.0.124-alpha.0",
"license": "MPL-2.0", "license": "MPL-2.0",
"module": "dist/budibase-client.js", "module": "dist/budibase-client.js",
"main": "dist/budibase-client.js", "main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw" "dev:builder": "rollup -cw"
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.105-alpha.38", "@budibase/bbui": "^1.0.124-alpha.0",
"@budibase/frontend-core": "^1.0.105-alpha.38", "@budibase/frontend-core": "^1.0.124-alpha.0",
"@budibase/string-templates": "^1.0.105-alpha.38", "@budibase/string-templates": "^1.0.124-alpha.0",
"@spectrum-css/button": "^3.0.3", "@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3", "@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3", "@spectrum-css/divider": "^1.0.3",

View File

@ -34,6 +34,7 @@ export const API = createAPIClient({
// Or we could check error.status and redirect to login on a 403 etc. // Or we could check error.status and redirect to login on a 403 etc.
onError: error => { onError: error => {
const { status, method, url, message, handled } = error || {} const { status, method, url, message, handled } = error || {}
const ignoreErrorUrls = ["bbtel", "/api/global/self"]
// Log any errors that we haven't manually handled // Log any errors that we haven't manually handled
if (!handled) { if (!handled) {
@ -45,7 +46,14 @@ export const API = createAPIClient({
if (message) { if (message) {
// Don't notify if the URL contains the word analytics as it may be // Don't notify if the URL contains the word analytics as it may be
// blocked by browser extensions // blocked by browser extensions
if (!url?.includes("analytics")) { let ignore = false
for (let ignoreUrl of ignoreErrorUrls) {
if (url?.includes(ignoreUrl)) {
ignore = true
break
}
}
if (!ignore) {
notificationStore.actions.error(message) notificationStore.actions.error(message)
} }
} }

View File

@ -36,8 +36,13 @@
div { div {
font-style: italic; font-style: italic;
} }
@media (hover: hover) {
.hoverable:hover { .hoverable:hover {
color: var(--spectrum-alias-icon-color-selected-hover) !important; color: var(--spectrum-alias-icon-color-selected-hover) !important;
cursor: pointer; cursor: pointer;
} }
}
.hoverable:active {
color: var(--spectrum-alias-icon-color-selected-hover) !important;
}
</style> </style>

View File

@ -775,9 +775,9 @@ minimatch@^3.0.2, minimatch@^3.0.4:
brace-expansion "^1.1.7" brace-expansion "^1.1.7"
minimist@^1.2.0: minimist@^1.2.0:
version "1.2.5" version "1.2.6"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
nanoid@^2.1.0: nanoid@^2.1.0:
version "2.1.11" version "2.1.11"
@ -790,9 +790,9 @@ nanoid@^3.1.30, nanoid@^3.1.32:
integrity sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA== integrity sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==
nanoid@^3.3.1: nanoid@^3.3.1:
version "3.3.2" version "3.3.1"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.2.tgz#c89622fafb4381cd221421c69ec58547a1eec557" resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.1.tgz#6347a18cac88af88f58af0b3594b723d5e99bb35"
integrity sha512-CuHBogktKwpm5g2sRgv83jEy2ijFzBwMoYA60orPDR7ynsLijJDqgsi4RDGj3OJpy3Ieb+LYwiRmIOGyytgITA== integrity sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw==
node-releases@^2.0.1: node-releases@^2.0.1:
version "2.0.1" version "2.0.1"

View File

@ -1,12 +1,12 @@
{ {
"name": "@budibase/frontend-core", "name": "@budibase/frontend-core",
"version": "1.0.105-alpha.38", "version": "1.0.124-alpha.0",
"description": "Budibase frontend core libraries used in builder and client", "description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase", "author": "Budibase",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"dependencies": { "dependencies": {
"@budibase/bbui": "^1.0.105-alpha.38", "@budibase/bbui": "^1.0.124-alpha.0",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"svelte": "^3.46.2" "svelte": "^3.46.2"
} }

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/server", "name": "@budibase/server",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "1.0.105-alpha.38", "version": "1.0.124-alpha.0",
"description": "Budibase Web Server", "description": "Budibase Web Server",
"main": "src/index.ts", "main": "src/index.ts",
"repository": { "repository": {
@ -68,10 +68,10 @@
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@apidevtools/swagger-parser": "^10.0.3", "@apidevtools/swagger-parser": "^10.0.3",
"@budibase/backend-core": "^1.0.105-alpha.38", "@budibase/backend-core": "^1.0.124-alpha.0",
"@budibase/client": "^1.0.105-alpha.38", "@budibase/client": "^1.0.124-alpha.0",
"@budibase/pro": "1.0.105-alpha.38", "@budibase/pro": "1.0.124-alpha.0",
"@budibase/string-templates": "^1.0.105-alpha.38", "@budibase/string-templates": "^1.0.124-alpha.0",
"@bull-board/api": "^3.7.0", "@bull-board/api": "^3.7.0",
"@bull-board/koa": "^3.7.0", "@bull-board/koa": "^3.7.0",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
@ -121,7 +121,7 @@
"pg": "8.5.1", "pg": "8.5.1",
"pino-pretty": "4.0.0", "pino-pretty": "4.0.0",
"posthog-node": "^1.1.4", "posthog-node": "^1.1.4",
"pouchdb": "7.2.1", "pouchdb": "7.3.0",
"pouchdb-adapter-memory": "^7.2.1", "pouchdb-adapter-memory": "^7.2.1",
"pouchdb-all-dbs": "1.0.2", "pouchdb-all-dbs": "1.0.2",
"pouchdb-find": "^7.2.2", "pouchdb-find": "^7.2.2",

View File

@ -2,7 +2,8 @@
const yargs = require("yargs") const yargs = require("yargs")
const fs = require("fs") const fs = require("fs")
const { join } = require("path") const { join } = require("path")
const CouchDB = require("../src/db") require("../src/db").init()
const { doWithDB } = require("@budibase/backend-core/db")
// load environment // load environment
const env = require("../src/environment") const env = require("../src/environment")
const { const {
@ -47,14 +48,15 @@ yargs
const writeStream = fs.createWriteStream(join(exportPath, "dump.text")) const writeStream = fs.createWriteStream(join(exportPath, "dump.text"))
// perform couch dump // perform couch dump
const instanceDb = new CouchDB(appId) await doWithDB(appId, async db => {
await instanceDb.dump(writeStream, { return db.dump(writeStream, {
filter: doc => filter: doc =>
!( !(
doc._id.includes(USER_METDATA_PREFIX) || doc._id.includes(USER_METDATA_PREFIX) ||
doc.includes(LINK_USER_METADATA_PREFIX) doc.includes(LINK_USER_METADATA_PREFIX)
), ),
}) })
})
console.log(`Template ${name} exported to ${exportPath}`) console.log(`Template ${name} exported to ${exportPath}`)
} }
) )

View File

@ -26,6 +26,7 @@ CREATE TABLE Products (
updated time updated time
); );
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07');
INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01');
INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31');
INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00'); INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00');

View File

@ -1,50 +0,0 @@
/**
* Script to replicate your PouchDb (in your home directory) to a remote CouchDB
* USAGE...
* node scripts/replicateApp <app_name> <remote_url>
* e.g. node scripts/replicateApp Mike http://admin:password@127.0.0.1:5984
*/
const CouchDB = require("../src/db")
const { DocumentTypes } = require("../src/db/utils")
const { getAllDbs } = require("@budibase/backend-core/db")
const appName = process.argv[2].toLowerCase()
const remoteUrl = process.argv[3]
console.log(`Replicating from ${appName} to ${remoteUrl}/${appName}`)
const run = async () => {
const dbs = await getAllDbs()
const appDbNames = dbs.filter(dbName => dbName.startsWith("inst_app"))
let apps = []
for (let dbName of appDbNames) {
const db = new CouchDB(dbName)
apps.push(db.get(DocumentTypes.APP_METADATA))
}
apps = await Promise.all(apps)
const app = apps.find(
a => a.name === appName || a.name.toLowerCase() === appName
)
if (!app) {
console.log(
`Could not find app... apps: ${apps.map(app => app.name).join(", ")}`
)
return
}
const instanceDb = new CouchDB(app.appId)
const remoteDb = new CouchDB(`${remoteUrl}/${appName}`)
instanceDb.replicate
.to(remoteDb)
.on("complete", function () {
console.log("SUCCESS!")
})
.on("error", function (err) {
console.log(`FAILED: ${err}`)
})
}
run()

View File

@ -131,7 +131,7 @@ async function createInstance(template: any) {
const tenantId = isMultiTenant() ? getTenantId() : null const tenantId = isMultiTenant() ? getTenantId() : null
const baseAppId = generateAppID(tenantId) const baseAppId = generateAppID(tenantId)
const appId = generateDevAppID(baseAppId) const appId = generateDevAppID(baseAppId)
updateAppId(appId) await updateAppId(appId)
const db = getAppDB() const db = getAppDB()
await db.put({ await db.put({
@ -471,6 +471,8 @@ export const sync = async (ctx: any, next: any) => {
}) })
} catch (err) { } catch (err) {
error = err error = err
} finally {
await replication.close()
} }
// sync the users // sync the users

View File

@ -93,6 +93,7 @@ async function initDeployedApp(prodAppId: any) {
} }
async function deployApp(deployment: any) { async function deployApp(deployment: any) {
let replication
try { try {
const appId = getAppId() const appId = getAppId()
const devAppId = getDevelopmentAppID(appId) const devAppId = getDevelopmentAppID(appId)
@ -102,10 +103,9 @@ async function deployApp(deployment: any) {
source: devAppId, source: devAppId,
target: productionAppId, target: productionAppId,
} }
const replication = new Replication(config) replication = new Replication(config)
console.log("Replication object created") console.log("Replication object created")
await replication.replicate() await replication.replicate()
console.log("replication complete.. replacing app meta doc") console.log("replication complete.. replacing app meta doc")
const db = getProdAppDB() const db = getProdAppDB()
@ -129,6 +129,10 @@ async function deployApp(deployment: any) {
...err, ...err,
message: `Deployment Failed: ${err.message}`, message: `Deployment Failed: ${err.message}`,
} }
} finally {
if (replication) {
await replication.close()
}
} }
} }

View File

@ -83,7 +83,9 @@ exports.revert = async ctx => {
try { try {
const db = getProdAppDB({ skip_setup: true }) const db = getProdAppDB({ skip_setup: true })
const info = await db.info() const info = await db.info()
if (info.error) throw info.error if (info.error) {
throw info.error
}
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
if ( if (
!deploymentDoc.history || !deploymentDoc.history ||
@ -95,12 +97,11 @@ exports.revert = async ctx => {
return ctx.throw(400, "App has not yet been deployed") return ctx.throw(400, "App has not yet been deployed")
} }
try {
const replication = new Replication({ const replication = new Replication({
source: productionAppId, source: productionAppId,
target: appId, target: appId,
}) })
try {
await replication.rollback() await replication.rollback()
// update appID in reverted app to be dev version again // update appID in reverted app to be dev version again
const db = getAppDB() const db = getAppDB()
@ -114,6 +115,8 @@ exports.revert = async ctx => {
} }
} catch (err) { } catch (err) {
ctx.throw(400, `Unable to revert. ${err}`) ctx.throw(400, `Unable to revert. ${err}`)
} finally {
await replication.close()
} }
} }

View File

@ -1,12 +1,4 @@
const TestConfig = require("../../../../../tests/utilities/TestConfiguration")
const bulkDocs = jest.fn()
const db = jest.fn(() => {
return {
bulkDocs
}
})
jest.mock("../../../../../db", () => db)
require("@budibase/backend-core").init(require("../../../../../db"))
const { RestImporter } = require("../index") const { RestImporter } = require("../index")
@ -48,6 +40,12 @@ const datasets = {
} }
describe("Rest Importer", () => { describe("Rest Importer", () => {
const config = new TestConfig(false)
beforeEach(async () => {
await config.init()
})
let restImporter let restImporter
const init = async (data) => { const init = async (data) => {
@ -105,11 +103,9 @@ describe("Rest Importer", () => {
const testImportQueries = async (key, data, assertions) => { const testImportQueries = async (key, data, assertions) => {
await init(data) await init(data)
bulkDocs.mockReturnValue([])
const importResult = await restImporter.importQueries("datasourceId") const importResult = await restImporter.importQueries("datasourceId")
expect(importResult.errorQueries.length).toBe(0) expect(importResult.errorQueries.length).toBe(0)
expect(importResult.queries.length).toBe(assertions[key].count) expect(importResult.queries.length).toBe(assertions[key].count)
expect(bulkDocs).toHaveBeenCalledTimes(1)
jest.clearAllMocks() jest.clearAllMocks()
} }

View File

@ -323,6 +323,28 @@ module External {
return { row: newRow, manyRelationships } return { row: newRow, manyRelationships }
} }
squashRelationshipColumns(
table: Table,
row: Row,
relationships: RelationshipsJson[]
): Row {
for (let relationship of relationships) {
const linkedTable = this.tables[relationship.tableName]
if (!linkedTable || !row[relationship.column]) {
continue
}
const display = linkedTable.primaryDisplay
for (let key of Object.keys(row[relationship.column])) {
const related: Row = row[relationship.column][key]
row[relationship.column][key] = {
primaryDisplay: display ? related[display] : undefined,
_id: related._id,
}
}
}
return row
}
/** /**
* This iterates through the returned rows and works out what elements of the rows * This iterates through the returned rows and works out what elements of the rows
* actually match up to another row (based on primary keys) - this is pretty specific * actually match up to another row (based on primary keys) - this is pretty specific
@ -354,12 +376,6 @@ module External {
if (!linked._id) { if (!linked._id) {
continue continue
} }
// if not returning full docs then get the minimal links out
const display = linkedTable.primaryDisplay
linked = {
primaryDisplay: display ? linked[display] : undefined,
_id: linked._id,
}
columns[relationship.column] = linked columns[relationship.column] = linked
} }
for (let [column, related] of Object.entries(columns)) { for (let [column, related] of Object.entries(columns)) {
@ -417,7 +433,9 @@ module External {
relationships relationships
) )
} }
return processFormulas(table, Object.values(finalRows)) return processFormulas(table, Object.values(finalRows)).map((row: Row) =>
this.squashRelationshipColumns(table, row, relationships)
)
} }
/** /**

View File

@ -6,6 +6,7 @@ const {
DocumentTypes, DocumentTypes,
InternalTables, InternalTables,
} = require("../../../db/utils") } = require("../../../db/utils")
const { dangerousGetDB } = require("@budibase/backend-core/db")
const userController = require("../user") const userController = require("../user")
const { const {
inputProcessing, inputProcessing,
@ -250,7 +251,7 @@ exports.fetch = async ctx => {
} }
exports.find = async ctx => { exports.find = async ctx => {
const db = getAppDB() const db = dangerousGetDB(ctx.appId)
const table = await db.get(ctx.params.tableId) const table = await db.get(ctx.params.tableId)
let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId) let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId)
row = await outputProcessing(table, row) row = await outputProcessing(table, row)

View File

@ -9,9 +9,31 @@ import {
} from "./utils" } from "./utils"
const { getAppDB } = require("@budibase/backend-core/context") const { getAppDB } = require("@budibase/backend-core/context")
import { isTest } from "../../../environment" import { isTest } from "../../../environment"
import { cleanupAttachments } from "../../../utilities/rowProcessor" import {
cleanupAttachments,
fixAutoColumnSubType,
} from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula" import { runStaticFormulaChecks } from "./bulkFormula"
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro" import { Table } from "../../../definitions/common"
import { quotas } from "@budibase/pro"
function checkAutoColumns(table: Table, oldTable: Table) {
if (!table.schema) {
return table
}
for (let [key, schema] of Object.entries(table.schema)) {
if (!schema.autocolumn || schema.subtype) {
continue
}
const oldSchema = oldTable && oldTable.schema[key]
if (oldSchema && oldSchema.subtype) {
table.schema[key].subtype = oldSchema.subtype
} else {
table.schema[key] = fixAutoColumnSubType(schema)
}
}
return table
}
export async function save(ctx: any) { export async function save(ctx: any) {
const db = getAppDB() const db = getAppDB()
@ -29,9 +51,12 @@ export async function save(ctx: any) {
oldTable = await db.get(ctx.request.body._id) oldTable = await db.get(ctx.request.body._id)
} }
// check all types are correct
if (hasTypeChanged(tableToSave, oldTable)) { if (hasTypeChanged(tableToSave, oldTable)) {
ctx.throw(400, "A column type has changed.") ctx.throw(400, "A column type has changed.")
} }
// check that subtypes have been maintained
tableToSave = checkAutoColumns(tableToSave, oldTable)
// saving a table is a complex operation, involving many different steps, this // saving a table is a complex operation, involving many different steps, this
// has been broken out into a utility to make it more obvious/easier to manipulate // has been broken out into a utility to make it more obvious/easier to manipulate

View File

@ -54,7 +54,7 @@ exports.destroy = async ctx => {
} }
exports.buildSchema = async ctx => { exports.buildSchema = async ctx => {
updateAppId(ctx.params.instance) await updateAppId(ctx.params.instance)
const db = getAppDB() const db = getAppDB()
const webhook = await db.get(ctx.params.id) const webhook = await db.get(ctx.params.id)
webhook.bodySchema = toJsonSchema(ctx.request.body) webhook.bodySchema = toJsonSchema(ctx.request.body)
@ -80,7 +80,7 @@ exports.buildSchema = async ctx => {
exports.trigger = async ctx => { exports.trigger = async ctx => {
const prodAppId = getProdAppID(ctx.params.instance) const prodAppId = getProdAppID(ctx.params.instance)
updateAppId(prodAppId) await updateAppId(prodAppId)
try { try {
const db = getAppDB() const db = getAppDB()
const webhook = await db.get(ctx.params.id) const webhook = await db.get(ctx.params.id)

View File

@ -43,6 +43,7 @@ describe("run misc tests", () => {
describe("test table utilities", () => { describe("test table utilities", () => {
it("should be able to import a CSV", async () => { it("should be able to import a CSV", async () => {
return config.doInContext(null, async () => {
const table = await config.createTable({ const table = await config.createTable({
name: "table", name: "table",
type: "table", type: "table",
@ -93,3 +94,4 @@ describe("run misc tests", () => {
}) })
}) })
}) })
})

View File

@ -2,7 +2,6 @@ const setup = require("./utilities")
const { basicScreen } = setup.structures const { basicScreen } = setup.structures
const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions") const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions")
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
const { doInAppContext } = require("@budibase/backend-core/context")
const route = "/test" const route = "/test"

View File

@ -2,6 +2,7 @@ const { outputProcessing } = require("../../../utilities/rowProcessor")
const setup = require("./utilities") const setup = require("./utilities")
const { basicRow } = setup.structures const { basicRow } = setup.structures
const { doInAppContext } = require("@budibase/backend-core/context") const { doInAppContext } = require("@budibase/backend-core/context")
const { doInTenant } = require("@budibase/backend-core/tenancy")
// mock the fetch for the search system // mock the fetch for the search system
jest.mock("node-fetch") jest.mock("node-fetch")
@ -340,6 +341,7 @@ describe("/rows", () => {
describe("fetchEnrichedRows", () => { describe("fetchEnrichedRows", () => {
it("should allow enriching some linked rows", async () => { it("should allow enriching some linked rows", async () => {
const { table, firstRow, secondRow } = await doInTenant(setup.structures.TENANT_ID, async () => {
const table = await config.createLinkedTable() const table = await config.createLinkedTable()
const firstRow = await config.createRow({ const firstRow = await config.createRow({
name: "Test Contact", name: "Test Contact",
@ -352,6 +354,8 @@ describe("/rows", () => {
link: [{_id: firstRow._id}], link: [{_id: firstRow._id}],
tableId: table._id, tableId: table._id,
}) })
return { table, firstRow, secondRow }
})
// test basic enrichment // test basic enrichment
const resBasic = await request const resBasic = await request

View File

@ -1,4 +1,5 @@
const { checkBuilderEndpoint, getDB } = require("./utilities/TestFunctions") const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const { getAppDB } = require("@budibase/backend-core/context")
const setup = require("./utilities") const setup = require("./utilities")
const { basicTable } = setup.structures const { basicTable } = setup.structures
@ -122,7 +123,7 @@ describe("/tables", () => {
describe("indexing", () => { describe("indexing", () => {
it("should be able to create a table with indexes", async () => { it("should be able to create a table with indexes", async () => {
const db = getDB(config) const db = getAppDB(config)
const indexCount = (await db.getIndexes()).total_rows const indexCount = (await db.getIndexes()).total_rows
const table = basicTable() const table = basicTable()
table.indexes = ["name"] table.indexes = ["name"]

View File

@ -2,6 +2,7 @@ import * as rowController from "../../../controllers/row"
import * as appController from "../../../controllers/application" import * as appController from "../../../controllers/application"
import { AppStatus } from "../../../../db/utils" import { AppStatus } from "../../../../db/utils"
import { BUILTIN_ROLE_IDS } from "@budibase/backend-core/roles" import { BUILTIN_ROLE_IDS } from "@budibase/backend-core/roles"
import { doInTenant } from "@budibase/backend-core/tenancy"
import { TENANT_ID } from "../../../../tests/utilities/structures" import { TENANT_ID } from "../../../../tests/utilities/structures"
import { getAppDB, doInAppContext } from "@budibase/backend-core/context" import { getAppDB, doInAppContext } from "@budibase/backend-core/context"
import * as env from "../../../../environment" import * as env from "../../../../environment"
@ -32,6 +33,7 @@ export const getAllTableRows = async (config: any) => {
} }
export const clearAllApps = async (tenantId = TENANT_ID) => { export const clearAllApps = async (tenantId = TENANT_ID) => {
await doInTenant(tenantId, async () => {
const req: any = { query: { status: AppStatus.DEV }, user: { tenantId } } const req: any = { query: { status: AppStatus.DEV }, user: { tenantId } }
await appController.fetch(req) await appController.fetch(req)
const apps = req.body const apps = req.body
@ -43,6 +45,7 @@ export const clearAllApps = async (tenantId = TENANT_ID) => {
const req = new Request(null, { appId }) const req = new Request(null, { appId })
await runRequest(appId, appController.destroy, req) await runRequest(appId, appController.destroy, req)
} }
})
} }
export const clearAllAutomations = async (config: any) => { export const clearAllAutomations = async (config: any) => {

View File

@ -1,8 +1,8 @@
// need to load environment first // need to load environment first
import { ExtendableContext } from "koa" import { ExtendableContext } from "koa"
import * as env from "./environment" import * as env from "./environment"
const CouchDB = require("./db") import db from "./db"
require("@budibase/backend-core").init(CouchDB) db.init()
const Koa = require("koa") const Koa = require("koa")
const destroyable = require("server-destroy") const destroyable = require("server-destroy")
const koaBody = require("koa-body") const koaBody = require("koa-body")

View File

@ -1,6 +1,7 @@
const { execSync } = require("child_process") const { execSync } = require("child_process")
const { processStringSync } = require("@budibase/string-templates") const { processStringSync } = require("@budibase/string-templates")
const automationUtils = require("../automationUtils") const automationUtils = require("../automationUtils")
const environment = require("../../environment")
exports.definition = { exports.definition = {
name: "Bash Scripting", name: "Bash Scripting",
@ -51,7 +52,9 @@ exports.run = async function ({ inputs, context }) {
let stdout, let stdout,
success = true success = true
try { try {
stdout = execSync(command, { timeout: 500 }).toString() stdout = execSync(command, {
timeout: environment.QUERY_THREAD_TIMEOUT || 500,
}).toString()
} catch (err) { } catch (err) {
stdout = err.message stdout = err.message
success = false success = false

View File

@ -26,7 +26,7 @@ describe("test the delete row action", () => {
expect(res.row._id).toEqual(row._id) expect(res.row._id).toEqual(row._id)
let error let error
try { try {
await config.getRow(table._id, res.id) await config.getRow(table._id, res.row._id)
} catch (err) { } catch (err) {
error = err error = err
} }

View File

@ -1,4 +1,6 @@
const TestConfig = require("../../../tests/utilities/TestConfiguration") const TestConfig = require("../../../tests/utilities/TestConfiguration")
const { TENANT_ID } = require("../../../tests/utilities/structures")
const { doInTenant } = require("@budibase/backend-core/tenancy")
const actions = require("../../actions") const actions = require("../../actions")
const emitter = require("../../../events/index") const emitter = require("../../../events/index")
const env = require("../../../environment") const env = require("../../../environment")
@ -31,6 +33,7 @@ exports.runInProd = async fn => {
} }
exports.runStep = async function runStep(stepId, inputs) { exports.runStep = async function runStep(stepId, inputs) {
return doInTenant(TENANT_ID, async () => {
let step = await actions.getAction(stepId) let step = await actions.getAction(stepId)
expect(step).toBeDefined() expect(step).toBeDefined()
return step({ return step({
@ -40,6 +43,7 @@ exports.runStep = async function runStep(stepId, inputs) {
apiKey: exports.apiKey, apiKey: exports.apiKey,
emitter, emitter,
}) })
})
} }
exports.apiKey = "test" exports.apiKey = "test"

View File

@ -1,12 +1,11 @@
import { Thread, ThreadType } from "../threads" import { Thread, ThreadType } from "../threads"
import { definitions } from "./triggerInfo" import { definitions } from "./triggerInfo"
import * as webhooks from "../api/controllers/webhook" import * as webhooks from "../api/controllers/webhook"
import CouchDB from "../db"
import { queue } from "./bullboard" import { queue } from "./bullboard"
import newid from "../db/newid" import newid from "../db/newid"
import { updateEntityMetadata } from "../utilities" import { updateEntityMetadata } from "../utilities"
import { MetadataTypes, WebhookType } from "../constants" import { MetadataTypes, WebhookType } from "../constants"
import { getProdAppID } from "@budibase/backend-core/db" import { getProdAppID, doWithDB } from "@budibase/backend-core/db"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { getAppDB, getAppId } from "@budibase/backend-core/context" import { getAppDB, getAppId } from "@budibase/backend-core/context"
import { tenancy } from "@budibase/backend-core" import { tenancy } from "@budibase/backend-core"
@ -113,10 +112,11 @@ export async function enableCronTrigger(appId: any, automation: any) {
// can't use getAppDB here as this is likely to be called from dev app, // can't use getAppDB here as this is likely to be called from dev app,
// but this call could be for dev app or prod app, need to just use what // but this call could be for dev app or prod app, need to just use what
// was passed in // was passed in
const db = new CouchDB(appId) await doWithDB(appId, async (db: any) => {
const response = await db.put(automation) const response = await db.put(automation)
automation._id = response.id automation._id = response.id
automation._rev = response.rev automation._rev = response.rev
})
} }
return automation return automation
} }

View File

@ -162,6 +162,14 @@ exports.AutoFieldSubTypes = {
AUTO_ID: "autoID", AUTO_ID: "autoID",
} }
exports.AutoFieldDefaultNames = {
CREATED_BY: "Created By",
CREATED_AT: "Created At",
UPDATED_BY: "Updated By",
UPDATED_AT: "Updated At",
AUTO_ID: "Auto ID",
}
exports.OBJ_STORE_DIRECTORY = "/prod-budi-app-assets" exports.OBJ_STORE_DIRECTORY = "/prod-budi-app-assets"
exports.BaseQueryVerbs = { exports.BaseQueryVerbs = {
CREATE: "create", CREATE: "create",

View File

@ -1,31 +0,0 @@
const PouchDB = require("pouchdb")
const { getCouchUrl } = require("@budibase/backend-core/db")
const replicationStream = require("pouchdb-replication-stream")
const allDbs = require("pouchdb-all-dbs")
const find = require("pouchdb-find")
const env = require("../environment")
const COUCH_DB_URL = getCouchUrl() || "http://localhost:4005"
PouchDB.plugin(replicationStream.plugin)
PouchDB.plugin(find)
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
let POUCH_DB_DEFAULTS = {
prefix: COUCH_DB_URL,
}
if (env.isTest()) {
PouchDB.plugin(require("pouchdb-adapter-memory"))
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "memory",
}
}
const Pouch = PouchDB.defaults(POUCH_DB_DEFAULTS)
// have to still have pouch alldbs for testing
allDbs(Pouch)
module.exports = Pouch

View File

@ -1,18 +1,16 @@
const PouchDB = require("pouchdb")
const memory = require("pouchdb-adapter-memory")
const newid = require("./newid") const newid = require("./newid")
PouchDB.plugin(memory) // bypass the main application db config
const Pouch = PouchDB.defaults({ // use in memory pouchdb directly
prefix: undefined, const { getPouch, closeDB } = require("@budibase/backend-core/db")
adapter: "memory", const Pouch = getPouch({ inMemory: true })
})
exports.runView = async (view, calculation, group, data) => { exports.runView = async (view, calculation, group, data) => {
// use a different ID each time for the DB, make sure they // use a different ID each time for the DB, make sure they
// are always unique for each query, don't want overlap // are always unique for each query, don't want overlap
// which could cause 409s // which could cause 409s
const db = new Pouch(newid()) const db = new Pouch(newid())
try {
// write all the docs to the in memory Pouch (remove revs) // write all the docs to the in memory Pouch (remove revs)
await db.bulkDocs( await db.bulkDocs(
data.map(row => ({ data.map(row => ({
@ -43,6 +41,9 @@ exports.runView = async (view, calculation, group, data) => {
row._rev = found._rev row._rev = found._rev
} }
} }
await db.destroy()
return response return response
} finally {
await db.destroy()
await closeDB(db)
}
} }

View File

@ -1,3 +1,16 @@
const client = require("./client") const core = require("@budibase/backend-core")
const env = require("../environment")
module.exports = client exports.init = () => {
const dbConfig = {
replication: true,
find: true,
}
if (env.isTest()) {
dbConfig.inMemory = true
dbConfig.allDbs = true
}
core.init({ db: dbConfig })
}

View File

@ -376,6 +376,7 @@ class LinkController {
if (field.autocolumn) { if (field.autocolumn) {
linkedField.autocolumn = field.autocolumn linkedField.autocolumn = field.autocolumn
linkedField.subtype = field.subtype
} }
// check the linked table to make sure we aren't overwriting an existing column // check the linked table to make sure we aren't overwriting an existing column

Some files were not shown because too many files have changed in this diff Show More