diff --git a/packages/backend-core/context.js b/packages/backend-core/context.js new file mode 100644 index 0000000000..4bc100687d --- /dev/null +++ b/packages/backend-core/context.js @@ -0,0 +1,17 @@ +const { + getAppDB, + getDevAppDB, + getProdAppDB, + getAppId, + updateAppId, + doInAppContext, +} = require("./src/context") + +module.exports = { + getAppDB, + getDevAppDB, + getProdAppDB, + getAppId, + updateAppId, + doInAppContext, +} diff --git a/packages/backend-core/db.js b/packages/backend-core/db.js index 47854ca9c7..d2adf6c092 100644 --- a/packages/backend-core/db.js +++ b/packages/backend-core/db.js @@ -1,5 +1,6 @@ module.exports = { ...require("./src/db/utils"), ...require("./src/db/constants"), + ...require("./src/db"), ...require("./src/db/views"), } diff --git a/packages/backend-core/deprovision.js b/packages/backend-core/deprovision.js index b4b8dc6110..672da214ff 100644 --- a/packages/backend-core/deprovision.js +++ b/packages/backend-core/deprovision.js @@ -1 +1 @@ -module.exports = require("./src/tenancy/deprovision") +module.exports = require("./src/context/deprovision") diff --git a/packages/backend-core/src/tenancy/FunctionContext.js b/packages/backend-core/src/context/FunctionContext.js similarity index 70% rename from packages/backend-core/src/tenancy/FunctionContext.js rename to packages/backend-core/src/context/FunctionContext.js index d97a3a30b4..1a3f65056e 100644 --- a/packages/backend-core/src/tenancy/FunctionContext.js +++ b/packages/backend-core/src/context/FunctionContext.js @@ -4,8 +4,8 @@ const { newid } = require("../hashing") const REQUEST_ID_KEY = "requestId" class FunctionContext { - static getMiddleware(updateCtxFn = null) { - const namespace = this.createNamespace() + static getMiddleware(updateCtxFn = null, contextName = "session") { + const namespace = this.createNamespace(contextName) return async function (ctx, next) { await new Promise( @@ -24,14 +24,14 @@ class FunctionContext { } } - static run(callback) { - const namespace = this.createNamespace() + static run(callback, contextName = "session") { + const namespace = this.createNamespace(contextName) return namespace.runAndReturn(callback) } - static setOnContext(key, value) { - const namespace = this.createNamespace() + static setOnContext(key, value, contextName = "session") { + const namespace = this.createNamespace(contextName) namespace.set(key, value) } @@ -55,16 +55,16 @@ class FunctionContext { } } - static destroyNamespace() { + static destroyNamespace(name = "session") { if (this._namespace) { - cls.destroyNamespace("session") + cls.destroyNamespace(name) this._namespace = null } } - static createNamespace() { + static createNamespace(name = "session") { if (!this._namespace) { - this._namespace = cls.createNamespace("session") + this._namespace = cls.createNamespace(name) } return this._namespace } diff --git a/packages/backend-core/src/tenancy/deprovision.js b/packages/backend-core/src/context/deprovision.js similarity index 98% rename from packages/backend-core/src/tenancy/deprovision.js rename to packages/backend-core/src/context/deprovision.js index 608ca1b84a..1fbc2c8398 100644 --- a/packages/backend-core/src/tenancy/deprovision.js +++ b/packages/backend-core/src/context/deprovision.js @@ -1,6 +1,6 @@ const { getGlobalUserParams, getAllApps } = require("../db/utils") const { getDB, getCouch } = require("../db") -const { getGlobalDB } = require("./tenancy") +const { getGlobalDB } = require("../tenancy") const { StaticDatabases } = require("../db/constants") const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants diff --git a/packages/backend-core/src/context/index.js b/packages/backend-core/src/context/index.js new file mode 100644 index 0000000000..1c1238278e --- /dev/null +++ b/packages/backend-core/src/context/index.js @@ -0,0 +1,195 @@ +const env = require("../environment") +const { Headers } = require("../../constants") +const cls = require("./FunctionContext") +const { getCouch } = require("../db") +const { getProdAppID, getDevelopmentAppID } = require("../db/conversions") +const { isEqual } = require("lodash") + +// some test cases call functions directly, need to +// store an app ID to pretend there is a context +let TEST_APP_ID = null + +const ContextKeys = { + TENANT_ID: "tenantId", + APP_ID: "appId", + // whatever the request app DB was + CURRENT_DB: "currentDb", + // get the prod app DB from the request + PROD_DB: "prodDb", + // get the dev app DB from the request + DEV_DB: "devDb", + DB_OPTS: "dbOpts", +} + +exports.DEFAULT_TENANT_ID = "default" + +exports.isDefaultTenant = () => { + return exports.getTenantId() === exports.DEFAULT_TENANT_ID +} + +exports.isMultiTenant = () => { + return env.MULTI_TENANCY +} + +// used for automations, API endpoints should always be in context already +exports.doInTenant = (tenantId, task) => { + return cls.run(() => { + // set the tenant id + cls.setOnContext(ContextKeys.TENANT_ID, tenantId) + + // invoke the task + return task() + }) +} + +exports.doInAppContext = (appId, task) => { + return cls.run(() => { + // set the app ID + cls.setOnContext(ContextKeys.APP_ID, appId) + + // invoke the task + return task() + }) +} + +exports.updateTenantId = tenantId => { + cls.setOnContext(ContextKeys.TENANT_ID, tenantId) +} + +exports.updateAppId = appId => { + try { + cls.setOnContext(ContextKeys.APP_ID, appId) + cls.setOnContext(ContextKeys.PROD_DB, null) + cls.setOnContext(ContextKeys.DEV_DB, null) + cls.setOnContext(ContextKeys.CURRENT_DB, null) + cls.setOnContext(ContextKeys.DB_OPTS, null) + } catch (err) { + if (env.isTest()) { + TEST_APP_ID = appId + } else { + throw err + } + } +} + +exports.setTenantId = ( + ctx, + opts = { allowQs: false, allowNoTenant: false } +) => { + let tenantId + // exit early if not multi-tenant + if (!exports.isMultiTenant()) { + cls.setOnContext(ContextKeys.TENANT_ID, this.DEFAULT_TENANT_ID) + return + } + + const allowQs = opts && opts.allowQs + const allowNoTenant = opts && opts.allowNoTenant + const header = ctx.request.headers[Headers.TENANT_ID] + const user = ctx.user || {} + if (allowQs) { + const query = ctx.request.query || {} + tenantId = query.tenantId + } + // override query string (if allowed) by user, or header + // URL params cannot be used in a middleware, as they are + // processed later in the chain + tenantId = user.tenantId || header || tenantId + + // Set the tenantId from the subdomain + if (!tenantId) { + tenantId = ctx.subdomains && ctx.subdomains[0] + } + + if (!tenantId && !allowNoTenant) { + ctx.throw(403, "Tenant id not set") + } + // check tenant ID just incase no tenant was allowed + if (tenantId) { + cls.setOnContext(ContextKeys.TENANT_ID, tenantId) + } +} + +exports.isTenantIdSet = () => { + const tenantId = cls.getFromContext(ContextKeys.TENANT_ID) + return !!tenantId +} + +exports.getTenantId = () => { + if (!exports.isMultiTenant()) { + return exports.DEFAULT_TENANT_ID + } + const tenantId = cls.getFromContext(ContextKeys.TENANT_ID) + if (!tenantId) { + throw Error("Tenant id not found") + } + return tenantId +} + +exports.getAppId = () => { + const foundId = cls.getFromContext(ContextKeys.APP_ID) + if (!foundId && env.isTest() && TEST_APP_ID) { + return TEST_APP_ID + } else { + return foundId + } +} + +function getDB(key, opts) { + const dbOptsKey = `${key}${ContextKeys.DB_OPTS}` + let storedOpts = cls.getFromContext(dbOptsKey) + let db = cls.getFromContext(key) + if (db && isEqual(opts, storedOpts)) { + return db + } + const appId = exports.getAppId() + const CouchDB = getCouch() + let toUseAppId + switch (key) { + case ContextKeys.CURRENT_DB: + toUseAppId = appId + break + case ContextKeys.PROD_DB: + toUseAppId = getProdAppID(appId) + break + case ContextKeys.DEV_DB: + toUseAppId = getDevelopmentAppID(appId) + break + } + db = new CouchDB(toUseAppId, opts) + try { + cls.setOnContext(key, db) + if (opts) { + cls.setOnContext(dbOptsKey, opts) + } + } catch (err) { + if (!env.isTest()) { + throw err + } + } + return db +} + +/** + * Opens the app database based on whatever the request + * contained, dev or prod. + */ +exports.getAppDB = opts => { + return getDB(ContextKeys.CURRENT_DB, opts) +} + +/** + * This specifically gets the prod app ID, if the request + * contained a development app ID, this will open the prod one. + */ +exports.getProdAppDB = opts => { + return getDB(ContextKeys.PROD_DB, opts) +} + +/** + * This specifically gets the dev app ID, if the request + * contained a prod app ID, this will open the dev one. + */ +exports.getDevAppDB = opts => { + return getDB(ContextKeys.DEV_DB, opts) +} diff --git a/packages/backend-core/src/db/constants.js b/packages/backend-core/src/db/constants.js index 2affb09c7c..b41a9a9c08 100644 --- a/packages/backend-core/src/db/constants.js +++ b/packages/backend-core/src/db/constants.js @@ -32,3 +32,7 @@ exports.StaticDatabases = { }, }, } + +exports.APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR +exports.APP_DEV = exports.APP_DEV_PREFIX = + exports.DocumentTypes.APP_DEV + exports.SEPARATOR diff --git a/packages/backend-core/src/db/conversions.js b/packages/backend-core/src/db/conversions.js new file mode 100644 index 0000000000..50d896322f --- /dev/null +++ b/packages/backend-core/src/db/conversions.js @@ -0,0 +1,46 @@ +const NO_APP_ERROR = "No app provided" +const { APP_DEV_PREFIX, APP_PREFIX } = require("./constants") + +exports.isDevAppID = appId => { + if (!appId) { + throw NO_APP_ERROR + } + return appId.startsWith(APP_DEV_PREFIX) +} + +exports.isProdAppID = appId => { + if (!appId) { + throw NO_APP_ERROR + } + return appId.startsWith(APP_PREFIX) && !exports.isDevAppID(appId) +} + +exports.isDevApp = app => { + if (!app) { + throw NO_APP_ERROR + } + return exports.isDevAppID(app.appId) +} + +/** + * Convert a development app ID to a deployed app ID. + */ +exports.getProdAppID = appId => { + // if dev, convert it + if (appId.startsWith(APP_DEV_PREFIX)) { + const id = appId.split(APP_DEV_PREFIX)[1] + return `${APP_PREFIX}${id}` + } + return appId +} + +/** + * Convert a deployed app ID to a development app ID. + */ +exports.getDevelopmentAppID = appId => { + if (!appId.startsWith(APP_DEV_PREFIX)) { + const id = appId.split(APP_PREFIX)[1] + return `${APP_DEV_PREFIX}${id}` + } + return appId +} diff --git a/packages/backend-core/src/db/utils.js b/packages/backend-core/src/db/utils.js index 2bc5462646..2800cf43c2 100644 --- a/packages/backend-core/src/db/utils.js +++ b/packages/backend-core/src/db/utils.js @@ -2,7 +2,13 @@ const { newid } = require("../hashing") const Replication = require("./Replication") const { DEFAULT_TENANT_ID, Configs } = require("../constants") const env = require("../environment") -const { StaticDatabases, SEPARATOR, DocumentTypes } = require("./constants") +const { + StaticDatabases, + SEPARATOR, + DocumentTypes, + APP_PREFIX, + APP_DEV, +} = require("./constants") const { getTenantId, getTenantIDFromAppID, @@ -12,8 +18,13 @@ const fetch = require("node-fetch") const { getCouch } = require("./index") const { getAppMetadata } = require("../cache/appMetadata") const { checkSlashesInUrl } = require("../helpers") - -const NO_APP_ERROR = "No app provided" +const { + isDevApp, + isProdAppID, + isDevAppID, + getDevelopmentAppID, + getProdAppID, +} = require("./conversions") const UNICODE_MAX = "\ufff0" @@ -24,10 +35,15 @@ exports.ViewNames = { exports.StaticDatabases = StaticDatabases exports.DocumentTypes = DocumentTypes -exports.APP_PREFIX = DocumentTypes.APP + SEPARATOR -exports.APP_DEV = exports.APP_DEV_PREFIX = DocumentTypes.APP_DEV + SEPARATOR +exports.APP_PREFIX = APP_PREFIX +exports.APP_DEV = exports.APP_DEV_PREFIX = APP_DEV exports.SEPARATOR = SEPARATOR exports.getTenantIDFromAppID = getTenantIDFromAppID +exports.isDevApp = isDevApp +exports.isProdAppID = isProdAppID +exports.isDevAppID = isDevAppID +exports.getDevelopmentAppID = getDevelopmentAppID +exports.getProdAppID = getProdAppID /** * If creating DB allDocs/query params with only a single top level ID this can be used, this @@ -52,27 +68,6 @@ function getDocParams(docType, docId = null, otherProps = {}) { } } -exports.isDevAppID = appId => { - if (!appId) { - throw NO_APP_ERROR - } - return appId.startsWith(exports.APP_DEV_PREFIX) -} - -exports.isProdAppID = appId => { - if (!appId) { - throw NO_APP_ERROR - } - return appId.startsWith(exports.APP_PREFIX) && !exports.isDevAppID(appId) -} - -function isDevApp(app) { - if (!app) { - throw NO_APP_ERROR - } - return exports.isDevAppID(app.appId) -} - /** * Generates a new workspace ID. * @returns {string} The new workspace ID which the workspace doc can be stored under. @@ -157,29 +152,6 @@ exports.getRoleParams = (roleId = null, otherProps = {}) => { return getDocParams(DocumentTypes.ROLE, roleId, otherProps) } -/** - * Convert a development app ID to a deployed app ID. - */ -exports.getDeployedAppID = appId => { - // if dev, convert it - if (appId.startsWith(exports.APP_DEV_PREFIX)) { - const id = appId.split(exports.APP_DEV_PREFIX)[1] - return `${exports.APP_PREFIX}${id}` - } - return appId -} - -/** - * Convert a deployed app ID to a development app ID. - */ -exports.getDevelopmentAppID = appId => { - if (!appId.startsWith(exports.APP_DEV_PREFIX)) { - const id = appId.split(exports.APP_PREFIX)[1] - return `${exports.APP_DEV_PREFIX}${id}` - } - return appId -} - exports.getCouchUrl = () => { if (!env.COUCH_DB_URL) return @@ -225,7 +197,7 @@ exports.getAllDbs = async () => { } let couchUrl = `${exports.getCouchUrl()}/_all_dbs` let tenantId = getTenantId() - if (!env.MULTI_TENANCY || tenantId == DEFAULT_TENANT_ID) { + if (!env.MULTI_TENANCY || tenantId === DEFAULT_TENANT_ID) { // just get all DBs when: // - single tenancy // - default tenant @@ -250,11 +222,10 @@ exports.getAllDbs = async () => { /** * Lots of different points in the system need to find the full list of apps, this will * enumerate the entire CouchDB cluster and get the list of databases (every app). - * NOTE: this operation is fine in self hosting, but cannot be used when hosting many - * different users/companies apps as there is no security around it - all apps are returned. * @return {Promise} returns the app information document stored in each app database. */ -exports.getAllApps = async (CouchDB, { dev, all, idsOnly } = {}) => { +exports.getAllApps = async ({ dev, all, idsOnly } = {}) => { + const CouchDB = getCouch() let tenantId = getTenantId() if (!env.MULTI_TENANCY && !tenantId) { tenantId = DEFAULT_TENANT_ID @@ -310,8 +281,8 @@ exports.getAllApps = async (CouchDB, { dev, all, idsOnly } = {}) => { /** * Utility function for getAllApps but filters to production apps only. */ -exports.getDeployedAppIDs = async CouchDB => { - return (await exports.getAllApps(CouchDB, { idsOnly: true })).filter( +exports.getProdAppIDs = async () => { + return (await exports.getAllApps({ idsOnly: true })).filter( id => !exports.isDevAppID(id) ) } @@ -319,13 +290,14 @@ exports.getDeployedAppIDs = async CouchDB => { /** * Utility function for the inverse of above. */ -exports.getDevAppIDs = async CouchDB => { - return (await exports.getAllApps(CouchDB, { idsOnly: true })).filter(id => +exports.getDevAppIDs = async () => { + return (await exports.getAllApps({ idsOnly: true })).filter(id => exports.isDevAppID(id) ) } -exports.dbExists = async (CouchDB, dbName) => { +exports.dbExists = async dbName => { + const CouchDB = getCouch() let exists = false try { const db = CouchDB(dbName, { skip_setup: true }) diff --git a/packages/backend-core/src/middleware/appTenancy.js b/packages/backend-core/src/middleware/appTenancy.js index 30fc4f7453..b0430a0051 100644 --- a/packages/backend-core/src/middleware/appTenancy.js +++ b/packages/backend-core/src/middleware/appTenancy.js @@ -3,8 +3,9 @@ const { updateTenantId, isTenantIdSet, DEFAULT_TENANT_ID, + updateAppId, } = require("../tenancy") -const ContextFactory = require("../tenancy/FunctionContext") +const ContextFactory = require("../context/FunctionContext") const { getTenantIDFromAppID } = require("../db/utils") module.exports = () => { @@ -21,5 +22,6 @@ module.exports = () => { const appId = ctx.appId ? ctx.appId : ctx.user ? ctx.user.appId : null const tenantId = getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID updateTenantId(tenantId) + updateAppId(appId) }) } diff --git a/packages/backend-core/src/middleware/tenancy.js b/packages/backend-core/src/middleware/tenancy.js index adfd36a503..5bb81f8824 100644 --- a/packages/backend-core/src/middleware/tenancy.js +++ b/packages/backend-core/src/middleware/tenancy.js @@ -1,5 +1,5 @@ const { setTenantId } = require("../tenancy") -const ContextFactory = require("../tenancy/FunctionContext") +const ContextFactory = require("../context/FunctionContext") const { buildMatcherRegex, matches } = require("./matchers") module.exports = ( diff --git a/packages/backend-core/src/security/roles.js b/packages/backend-core/src/security/roles.js index 8529dde6f4..82bfbd5212 100644 --- a/packages/backend-core/src/security/roles.js +++ b/packages/backend-core/src/security/roles.js @@ -1,4 +1,3 @@ -const { getDB } = require("../db") const { cloneDeep } = require("lodash/fp") const { BUILTIN_PERMISSION_IDS } = require("./permissions") const { @@ -7,6 +6,8 @@ const { DocumentTypes, SEPARATOR, } = require("../db/utils") +const { getAppDB } = require("../context") +const { getDB } = require("../db") const BUILTIN_IDS = { ADMIN: "ADMIN", @@ -111,11 +112,10 @@ exports.lowerBuiltinRoleID = (roleId1, roleId2) => { /** * Gets the role object, this is mainly useful for two purposes, to check if the level exists and * to check if the role inherits any others. - * @param {string} appId The app in which to look for the role. * @param {string|null} roleId The level ID to lookup. * @returns {Promise} The role object, which may contain an "inherits" property. */ -exports.getRole = async (appId, roleId) => { +exports.getRole = async roleId => { if (!roleId) { return null } @@ -128,7 +128,7 @@ exports.getRole = async (appId, roleId) => { ) } try { - const db = getDB(appId) + const db = getAppDB() const dbRole = await db.get(exports.getDBRoleID(roleId)) role = Object.assign(role, dbRole) // finalise the ID @@ -145,11 +145,11 @@ exports.getRole = async (appId, roleId) => { /** * Simple function to get all the roles based on the top level user role ID. */ -async function getAllUserRoles(appId, userRoleId) { +async function getAllUserRoles(userRoleId) { if (!userRoleId) { return [BUILTIN_IDS.BASIC] } - let currentRole = await exports.getRole(appId, userRoleId) + let currentRole = await exports.getRole(userRoleId) let roles = currentRole ? [currentRole] : [] let roleIds = [userRoleId] // get all the inherited roles @@ -159,7 +159,7 @@ async function getAllUserRoles(appId, userRoleId) { roleIds.indexOf(currentRole.inherits) === -1 ) { roleIds.push(currentRole.inherits) - currentRole = await exports.getRole(appId, currentRole.inherits) + currentRole = await exports.getRole(currentRole.inherits) roles.push(currentRole) } return roles @@ -168,29 +168,23 @@ async function getAllUserRoles(appId, userRoleId) { /** * Returns an ordered array of the user's inherited role IDs, this can be used * to determine if a user can access something that requires a specific role. - * @param {string} appId The ID of the application from which roles should be obtained. * @param {string} userRoleId The user's role ID, this can be found in their access token. * @param {object} opts Various options, such as whether to only retrieve the IDs (default true). * @returns {Promise} returns an ordered array of the roles, with the first being their * highest level of access and the last being the lowest level. */ -exports.getUserRoleHierarchy = async ( - appId, - userRoleId, - opts = { idOnly: true } -) => { +exports.getUserRoleHierarchy = async (userRoleId, opts = { idOnly: true }) => { // special case, if they don't have a role then they are a public user - const roles = await getAllUserRoles(appId, userRoleId) + const roles = await getAllUserRoles(userRoleId) return opts.idOnly ? roles.map(role => role._id) : roles } /** * Given an app ID this will retrieve all of the roles that are currently within that app. - * @param {string} appId The ID of the app to retrieve the roles from. * @return {Promise} An array of the role objects that were found. */ exports.getAllRoles = async appId => { - const db = getDB(appId) + const db = appId ? getDB(appId) : getAppDB() const body = await db.allDocs( getRoleParams(null, { include_docs: true, @@ -218,19 +212,17 @@ exports.getAllRoles = async appId => { } /** - * This retrieves the required role/ - * @param appId + * This retrieves the required role * @param permLevel * @param resourceId * @param subResourceId * @return {Promise<{permissions}|Object>} */ exports.getRequiredResourceRole = async ( - appId, permLevel, { resourceId, subResourceId } ) => { - const roles = await exports.getAllRoles(appId) + const roles = await exports.getAllRoles() let main = [], sub = [] for (let role of roles) { @@ -251,8 +243,7 @@ exports.getRequiredResourceRole = async ( } class AccessController { - constructor(appId) { - this.appId = appId + constructor() { this.userHierarchies = {} } @@ -270,7 +261,7 @@ class AccessController { } let roleIds = this.userHierarchies[userRoleId] if (!roleIds) { - roleIds = await exports.getUserRoleHierarchy(this.appId, userRoleId) + roleIds = await exports.getUserRoleHierarchy(userRoleId) this.userHierarchies[userRoleId] = roleIds } diff --git a/packages/backend-core/src/tenancy/context.js b/packages/backend-core/src/tenancy/context.js deleted file mode 100644 index 01d1fdc604..0000000000 --- a/packages/backend-core/src/tenancy/context.js +++ /dev/null @@ -1,84 +0,0 @@ -const env = require("../environment") -const { Headers } = require("../../constants") -const cls = require("./FunctionContext") - -exports.DEFAULT_TENANT_ID = "default" - -exports.isDefaultTenant = () => { - return exports.getTenantId() === exports.DEFAULT_TENANT_ID -} - -exports.isMultiTenant = () => { - return env.MULTI_TENANCY -} - -const TENANT_ID = "tenantId" - -// used for automations, API endpoints should always be in context already -exports.doInTenant = (tenantId, task) => { - return cls.run(() => { - // set the tenant id - cls.setOnContext(TENANT_ID, tenantId) - - // invoke the task - return task() - }) -} - -exports.updateTenantId = tenantId => { - cls.setOnContext(TENANT_ID, tenantId) -} - -exports.setTenantId = ( - ctx, - opts = { allowQs: false, allowNoTenant: false } -) => { - let tenantId - // exit early if not multi-tenant - if (!exports.isMultiTenant()) { - cls.setOnContext(TENANT_ID, this.DEFAULT_TENANT_ID) - return - } - - const allowQs = opts && opts.allowQs - const allowNoTenant = opts && opts.allowNoTenant - const header = ctx.request.headers[Headers.TENANT_ID] - const user = ctx.user || {} - if (allowQs) { - const query = ctx.request.query || {} - tenantId = query.tenantId - } - // override query string (if allowed) by user, or header - // URL params cannot be used in a middleware, as they are - // processed later in the chain - tenantId = user.tenantId || header || tenantId - - // Set the tenantId from the subdomain - if (!tenantId) { - tenantId = ctx.subdomains && ctx.subdomains[0] - } - - if (!tenantId && !allowNoTenant) { - ctx.throw(403, "Tenant id not set") - } - // check tenant ID just incase no tenant was allowed - if (tenantId) { - cls.setOnContext(TENANT_ID, tenantId) - } -} - -exports.isTenantIdSet = () => { - const tenantId = cls.getFromContext(TENANT_ID) - return !!tenantId -} - -exports.getTenantId = () => { - if (!exports.isMultiTenant()) { - return exports.DEFAULT_TENANT_ID - } - const tenantId = cls.getFromContext(TENANT_ID) - if (!tenantId) { - throw Error("Tenant id not found") - } - return tenantId -} diff --git a/packages/backend-core/src/tenancy/index.js b/packages/backend-core/src/tenancy/index.js index 2fe257d885..c847033a12 100644 --- a/packages/backend-core/src/tenancy/index.js +++ b/packages/backend-core/src/tenancy/index.js @@ -1,4 +1,4 @@ module.exports = { - ...require("./context"), + ...require("../context"), ...require("./tenancy"), } diff --git a/packages/backend-core/src/tenancy/tenancy.js b/packages/backend-core/src/tenancy/tenancy.js index de597eac01..8360198b60 100644 --- a/packages/backend-core/src/tenancy/tenancy.js +++ b/packages/backend-core/src/tenancy/tenancy.js @@ -1,6 +1,6 @@ const { getDB } = require("../db") const { SEPARATOR, StaticDatabases, DocumentTypes } = require("../db/constants") -const { getTenantId, DEFAULT_TENANT_ID, isMultiTenant } = require("./context") +const { getTenantId, DEFAULT_TENANT_ID, isMultiTenant } = require("../context") const env = require("../environment") const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants diff --git a/packages/builder/cypress/setup.js b/packages/builder/cypress/setup.js index cf5ec53ceb..0f3d333cb7 100644 --- a/packages/builder/cypress/setup.js +++ b/packages/builder/cypress/setup.js @@ -3,9 +3,6 @@ const path = require("path") const tmpdir = path.join(require("os").tmpdir(), ".budibase") -// these run on ports we don't normally use so that they can run alongside the -const fs = require("fs") - // normal development system const WORKER_PORT = "10002" const MAIN_PORT = cypressConfig.env.PORT @@ -29,22 +26,20 @@ process.env.ALLOW_DEV_AUTOMATIONS = 1 // Stop info logs polluting test outputs process.env.LOG_LEVEL = "error" -async function run() { +exports.run = ( + serverLoc = "../../server/dist", + workerLoc = "../../worker/dist" +) => { // require("dotenv").config({ path: resolve(dir, ".env") }) - if (!fs.existsSync("../server/dist")) { - console.error("Unable to run cypress, need to build server first") - process.exit(-1) - } - // don't make this a variable or top level require // it will cause environment module to be loaded prematurely - const server = require("../../server/dist/app") + require(serverLoc) process.env.PORT = WORKER_PORT - const worker = require("../../worker/dist/index") + require(workerLoc) // reload main port for rest of system process.env.PORT = MAIN_PORT - server.on("close", () => console.log("Server Closed")) - worker.on("close", () => console.log("Worker Closed")) } -run() +if (require.main === module) { + exports.run() +} diff --git a/packages/builder/cypress/ts/setup.ts b/packages/builder/cypress/ts/setup.ts new file mode 100644 index 0000000000..b6b12bf730 --- /dev/null +++ b/packages/builder/cypress/ts/setup.ts @@ -0,0 +1,4 @@ +// @ts-ignore +import { run } from "../setup" + +run("../../server/src/index", "../../worker/src/index") diff --git a/packages/builder/package.json b/packages/builder/package.json index bfc8d4395b..b4cef67afc 100644 --- a/packages/builder/package.json +++ b/packages/builder/package.json @@ -11,12 +11,13 @@ "dev:builder": "routify -c dev:vite", "dev:vite": "vite --host 0.0.0.0", "rollup": "rollup -c -w", - "cy:setup": "node ./cypress/setup.js", + "cy:setup": "ts-node ./cypress/ts/setup.ts", + "cy:setup:ci": "node ./cypress/setup.js", "cy:run": "cypress run", "cy:open": "cypress open", "cy:run:ci": "cypress run --record", "cy:test": "start-server-and-test cy:setup http://localhost:10001/builder cy:run", - "cy:ci": "start-server-and-test cy:setup http://localhost:10001/builder cy:run", + "cy:ci": "start-server-and-test cy:setup:ci http://localhost:10001/builder cy:run", "cy:debug": "start-server-and-test cy:setup http://localhost:10001/builder cy:open" }, "jest": { @@ -106,6 +107,8 @@ "start-server-and-test": "^1.12.1", "svelte": "^3.38.2", "svelte-jester": "^1.3.2", + "ts-node": "^10.4.0", + "typescript": "^4.5.5", "vite": "^2.1.5" }, "gitHead": "115189f72a850bfb52b65ec61d932531bf327072" diff --git a/packages/builder/tsconfig.json b/packages/builder/tsconfig.json new file mode 100644 index 0000000000..6a5ba315a1 --- /dev/null +++ b/packages/builder/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "es6", + "module": "commonjs", + "lib": ["es2019"], + "allowJs": true, + "outDir": "dist", + "strict": true, + "noImplicitAny": true, + "esModuleInterop": true, + "resolveJsonModule": true, + "incremental": true + }, + "include": [ + "./src/**/*" + ], + "exclude": [ + "node_modules", + "**/*.json", + "**/*.spec.ts", + "**/*.spec.js" + ] +} diff --git a/packages/builder/yarn.lock b/packages/builder/yarn.lock index f827c20328..dcaa00b14c 100644 --- a/packages/builder/yarn.lock +++ b/packages/builder/yarn.lock @@ -970,10 +970,10 @@ svelte-flatpickr "^3.2.3" svelte-portal "^1.0.0" -"@budibase/bbui@^1.0.46", "@budibase/bbui@^1.0.46-alpha.3": - version "1.0.46" - resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.46.tgz#7306d4eda7f2c827577a4affa1fd314b38ba1198" - integrity sha512-padm0qq2SBNIslXEQW+HIv32pkIHFzloR93FDzSXh0sO43Q+/d2gbAhjI9ZUSAVncx9JNc46dolL1CwrvHFElg== +"@budibase/bbui@^1.0.46-alpha.6", "@budibase/bbui@^1.0.47": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.47.tgz#df2848b89f881fe603e7156855d6a6c31d4f58bf" + integrity sha512-RRm/BgK5aSx2/vGjMGljw240/48Ksc3/h4yB1nhQj8Xx3fKhlGnWDvWNy+sakvA6+fJvEXuti8RoxHtQ6lXmqA== dependencies: "@adobe/spectrum-css-workflow-icons" "^1.2.1" "@spectrum-css/actionbutton" "^1.0.1" @@ -1020,14 +1020,14 @@ svelte-flatpickr "^3.2.3" svelte-portal "^1.0.0" -"@budibase/client@^1.0.46-alpha.3": - version "1.0.46" - resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.46.tgz#e6ef8945b9d7046b6e6d6761628aa1d85387acca" - integrity sha512-jI3z1G/EsfJNCQCvrqzsR4vR1zLoVefzCXCEASIPg9BPzdiAFSwuUJVLijLFIIKfuDVeveUll94fgu7XNY8U2w== +"@budibase/client@^1.0.46-alpha.6": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.47.tgz#ce9e2fbd300e5dc389ea29a3a3347897f096c824" + integrity sha512-jB/al8v+nY/VLc6sH5Jt9JzWONVo+24/cI95iXlZSV5xwiKIVGj4+2F5QjKZ0c9Gm7SrrfP2T571N+4XaXNCGg== dependencies: - "@budibase/bbui" "^1.0.46" + "@budibase/bbui" "^1.0.47" "@budibase/standard-components" "^0.9.139" - "@budibase/string-templates" "^1.0.46" + "@budibase/string-templates" "^1.0.47" regexparam "^1.3.0" shortid "^2.2.15" svelte-spa-router "^3.0.5" @@ -1082,10 +1082,10 @@ svelte-apexcharts "^1.0.2" svelte-flatpickr "^3.1.0" -"@budibase/string-templates@^1.0.46", "@budibase/string-templates@^1.0.46-alpha.3": - version "1.0.46" - resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.46.tgz#5beef1687b451e4512a465b4e143c8ab46234006" - integrity sha512-t4ZAUkSz2XatjAN0faex5ovmD3mFz672lV/aBk7tfLFzZiKlWjngqdwpLLQNnsqeGvYo75JP2J06j86SX6O83w== +"@budibase/string-templates@^1.0.46-alpha.6", "@budibase/string-templates@^1.0.47": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.47.tgz#626b9fc4542c7b36a0ae24e820d25a704c527bec" + integrity sha512-87BUfOPr8FGKH8Pt88jhKNGT9PcOmkLRCeen4xi1dI113pAQznBO9vgV+cXOChUBBEQka9Rrt85LMJXidiwVgg== dependencies: "@budibase/handlebars-helpers" "^0.11.7" dayjs "^1.10.4" @@ -1102,6 +1102,18 @@ exec-sh "^0.3.2" minimist "^1.2.0" +"@cspotcode/source-map-consumer@0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz#33bf4b7b39c178821606f669bbc447a6a629786b" + integrity sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg== + +"@cspotcode/source-map-support@0.7.0": + version "0.7.0" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz#4789840aa859e46d2f3173727ab707c66bf344f5" + integrity sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA== + dependencies: + "@cspotcode/source-map-consumer" "0.8.0" + "@cypress/listr-verbose-renderer@^0.4.1": version "0.4.1" resolved "https://registry.yarnpkg.com/@cypress/listr-verbose-renderer/-/listr-verbose-renderer-0.4.1.tgz#a77492f4b11dcc7c446a34b3e28721afd33c642a" @@ -1795,6 +1807,26 @@ resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== +"@tsconfig/node10@^1.0.7": + version "1.0.8" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9" + integrity sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg== + +"@tsconfig/node12@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.9.tgz#62c1f6dee2ebd9aead80dc3afa56810e58e1a04c" + integrity sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw== + +"@tsconfig/node14@^1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.1.tgz#95f2d167ffb9b8d2068b0b235302fafd4df711f2" + integrity sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg== + +"@tsconfig/node16@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.2.tgz#423c77877d0569db20e1fc80885ac4118314010e" + integrity sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA== + "@types/aria-query@^4.2.0": version "4.2.2" resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" @@ -1971,6 +2003,11 @@ acorn-walk@^7.1.1: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== +acorn-walk@^8.1.1: + version "8.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" @@ -1981,6 +2018,11 @@ acorn@^8.2.4: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.5.0.tgz#4512ccb99b3698c752591e9bb4472e38ad43cee2" integrity sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q== +acorn@^8.4.1: + version "8.7.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" + integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ== + agent-base@6: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" @@ -2087,6 +2129,11 @@ arch@^2.1.2: resolved "https://registry.yarnpkg.com/arch/-/arch-2.2.0.tgz#1bc47818f305764f23ab3306b0bfc086c5a29d11" integrity sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ== +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + argparse@^1.0.10, argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -2720,6 +2767,11 @@ core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" @@ -2965,6 +3017,11 @@ diff-sequences@^27.0.6: resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.0.6.tgz#3305cb2e55a033924054695cc66019fd7f8e5723" integrity sha512-ag6wfpBFyNXZ0p8pcuIDS//D8H062ZQJ3fzYxjpmeKjnz8W4pekL3AI8VohmyZmsWW2PWaHgjsmqR6L13101VQ== +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -5004,6 +5061,11 @@ make-dir@^3.0.0: dependencies: semver "^6.0.0" +make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + makeerror@1.0.12: version "1.0.12" resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" @@ -6587,6 +6649,24 @@ tr46@~0.0.3: resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= +ts-node@^10.4.0: + version "10.4.0" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.4.0.tgz#680f88945885f4e6cf450e7f0d6223dd404895f7" + integrity sha512-g0FlPvvCXSIO1JDF6S232P5jPYqBkRL9qly81ZgAOSU7rwI0stphCgd2kLiCrU9DjQCrJMWEqcNSjQL02s6d8A== + dependencies: + "@cspotcode/source-map-support" "0.7.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + yn "3.1.1" + tslib@^1.9.0, tslib@^1.9.3: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" @@ -6655,6 +6735,11 @@ typeof-article@^0.1.1: dependencies: kind-of "^3.1.0" +typescript@^4.5.5: + version "4.5.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.5.tgz#d8c953832d28924a9e3d37c73d729c846c5896f3" + integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA== + uglify-js@^3.1.4: version "3.14.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.14.5.tgz#cdabb7d4954231d80cb4a927654c4655e51f4859" @@ -7011,6 +7096,11 @@ year@^0.2.1: resolved "https://registry.yarnpkg.com/year/-/year-0.2.1.tgz#4083ae520a318b23ec86037f3000cb892bdf9bb0" integrity sha1-QIOuUgoxiyPshgN/MADLiSvfm7A= +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + yup@0.29.2: version "0.29.2" resolved "https://registry.yarnpkg.com/yup/-/yup-0.29.2.tgz#5302abd9024cca335b987793f8df868e410b7b67" diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js index eb1f7bc5e6..00d3efccb8 100644 --- a/packages/server/src/api/controllers/application.js +++ b/packages/server/src/api/controllers/application.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const env = require("../../environment") const packageJson = require("../../../package.json") const { @@ -29,7 +28,7 @@ const { processObject } = require("@budibase/string-templates") const { getAllApps, isDevAppID, - getDeployedAppID, + getProdAppID, Replication, } = require("@budibase/backend-core/db") const { USERS_TABLE_SCHEMA } = require("../../constants") @@ -45,11 +44,17 @@ const { getTenantId, isMultiTenant } = require("@budibase/backend-core/tenancy") const { syncGlobalUsers } = require("./user") const { app: appCache } = require("@budibase/backend-core/cache") const { cleanupAutomations } = require("../../automations/utils") +const { + getAppDB, + getProdAppDB, + updateAppId, +} = require("@budibase/backend-core/context") const URL_REGEX_SLASH = /\/|\\/g // utility function, need to do away with this -async function getLayouts(db) { +async function getLayouts() { + const db = getAppDB() return ( await db.allDocs( getLayoutParams(null, { @@ -59,7 +64,8 @@ async function getLayouts(db) { ).rows.map(row => row.doc) } -async function getScreens(db) { +async function getScreens() { + const db = getAppDB() return ( await db.allDocs( getScreenParams(null, { @@ -117,8 +123,9 @@ async function createInstance(template) { const tenantId = isMultiTenant() ? getTenantId() : null const baseAppId = generateAppID(tenantId) const appId = generateDevAppID(baseAppId) + updateAppId(appId) - const db = new CouchDB(appId) + const db = getAppDB() await db.put({ _id: "_design/database", // view collation information, read before writing any complex views: @@ -128,9 +135,9 @@ async function createInstance(template) { // NOTE: indexes need to be created before any tables/templates // add view for linked rows - await createLinkView(appId) - await createRoutingView(appId) - await createAllSearchIndex(appId) + await createLinkView() + await createRoutingView() + await createAllSearchIndex() // replicate the template data to the instance DB // this is currently very hard to test, downloading and importing template files @@ -156,7 +163,7 @@ async function createInstance(template) { exports.fetch = async ctx => { const dev = ctx.query && ctx.query.status === AppStatus.DEV const all = ctx.query && ctx.query.status === AppStatus.ALL - const apps = await getAllApps(CouchDB, { dev, all }) + const apps = await getAllApps({ dev, all }) // get the locks for all the dev apps if (dev || all) { @@ -179,12 +186,11 @@ exports.fetch = async ctx => { } exports.fetchAppDefinition = async ctx => { - const db = new CouchDB(ctx.params.appId) - const layouts = await getLayouts(db) + const layouts = await getLayouts() const userRoleId = getUserRoleId(ctx) - const accessController = new AccessController(ctx.params.appId) + const accessController = new AccessController() const screens = await accessController.checkScreensAccess( - await getScreens(db), + await getScreens(), userRoleId ) ctx.body = { @@ -195,15 +201,15 @@ exports.fetchAppDefinition = async ctx => { } exports.fetchAppPackage = async ctx => { - const db = new CouchDB(ctx.params.appId) + const db = getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) - const layouts = await getLayouts(db) - let screens = await getScreens(db) + const layouts = await getLayouts() + let screens = await getScreens() // Only filter screens if the user is not a builder if (!(ctx.user.builder && ctx.user.builder.global)) { const userRoleId = getUserRoleId(ctx) - const accessController = new AccessController(ctx.params.appId) + const accessController = new AccessController() screens = await accessController.checkScreensAccess(screens, userRoleId) } @@ -216,7 +222,7 @@ exports.fetchAppPackage = async ctx => { } exports.create = async ctx => { - const apps = await getAllApps(CouchDB, { dev: true }) + const apps = await getAllApps({ dev: true }) const name = ctx.request.body.name checkAppName(ctx, apps, name) const url = exports.getAppUrl(ctx) @@ -234,7 +240,7 @@ exports.create = async ctx => { const instance = await createInstance(instanceConfig) const appId = instance._id - const db = new CouchDB(appId) + const db = getAppDB() let _rev try { // if template there will be an existing doc @@ -280,7 +286,7 @@ exports.create = async ctx => { // This endpoint currently operates as a PATCH rather than a PUT // Thus name and url fields are handled only if present exports.update = async ctx => { - const apps = await getAllApps(CouchDB, { dev: true }) + const apps = await getAllApps({ dev: true }) // validation const name = ctx.request.body.name if (name) { @@ -299,7 +305,7 @@ exports.update = async ctx => { exports.updateClient = async ctx => { // Get current app version - const db = new CouchDB(ctx.params.appId) + const db = getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) const currentVersion = application.version @@ -321,7 +327,7 @@ exports.updateClient = async ctx => { exports.revertClient = async ctx => { // Check app can be reverted - const db = new CouchDB(ctx.params.appId) + const db = getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) if (!application.revertableVersion) { ctx.throw(400, "There is no version to revert to") @@ -343,7 +349,7 @@ exports.revertClient = async ctx => { } exports.delete = async ctx => { - const db = new CouchDB(ctx.params.appId) + const db = getAppDB() const result = await db.destroy() /* istanbul ignore next */ @@ -368,10 +374,11 @@ exports.sync = async (ctx, next) => { } // replicate prod to dev - const prodAppId = getDeployedAppID(appId) + const prodAppId = getProdAppID(appId) try { - const prodDb = new CouchDB(prodAppId, { skip_setup: true }) + // specific case, want to make sure setup is skipped + const prodDb = getProdAppDB({ skip_setup: true }) const info = await prodDb.info() if (info.error) throw info.error } catch (err) { @@ -399,7 +406,7 @@ exports.sync = async (ctx, next) => { } // sync the users - await syncGlobalUsers(appId) + await syncGlobalUsers() if (error) { ctx.throw(400, error) @@ -411,7 +418,7 @@ exports.sync = async (ctx, next) => { } const updateAppPackage = async (appPackage, appId) => { - const db = new CouchDB(appId) + const db = getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) const newAppPackage = { ...application, ...appPackage } @@ -430,7 +437,7 @@ const updateAppPackage = async (appPackage, appId) => { } const createEmptyAppPackage = async (ctx, app) => { - const db = new CouchDB(app.appId) + const db = getAppDB() let screensAndLayouts = [] for (let layout of BASE_LAYOUTS) { diff --git a/packages/server/src/api/controllers/auth.js b/packages/server/src/api/controllers/auth.js index f1b665c069..30c0e5d09c 100644 --- a/packages/server/src/api/controllers/auth.js +++ b/packages/server/src/api/controllers/auth.js @@ -1,11 +1,10 @@ -const CouchDB = require("../../db") const { outputProcessing } = require("../../utilities/rowProcessor") const { InternalTables } = require("../../db/utils") const { getFullUser } = require("../../utilities/users") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") +const { getAppDB, getAppId } = require("@budibase/backend-core/context") exports.fetchSelf = async ctx => { - const appId = ctx.appId let userId = ctx.user.userId || ctx.user._id /* istanbul ignore next */ if (!userId) { @@ -19,8 +18,8 @@ exports.fetchSelf = async ctx => { // forward the csrf token from the session user.csrfToken = ctx.user.csrfToken - if (appId) { - const db = new CouchDB(appId) + if (getAppId()) { + const db = getAppDB() // remove the full roles structure delete user.roles try { @@ -29,7 +28,7 @@ exports.fetchSelf = async ctx => { // make sure there is never a stale csrf token delete metadata.csrfToken // specifically needs to make sure is enriched - ctx.body = await outputProcessing(ctx, userTable, { + ctx.body = await outputProcessing(userTable, { ...user, ...metadata, }) diff --git a/packages/server/src/api/controllers/automation.js b/packages/server/src/api/controllers/automation.js index 05337579a0..74942dad40 100644 --- a/packages/server/src/api/controllers/automation.js +++ b/packages/server/src/api/controllers/automation.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const actions = require("../../automations/actions") const triggers = require("../../automations/triggers") const { getAutomationParams, generateAutomationID } = require("../../db/utils") @@ -10,6 +9,7 @@ const { const { deleteEntityMetadata } = require("../../utilities") const { MetadataTypes } = require("../../constants") const { setTestFlag, clearTestFlag } = require("../../utilities/redis") +const { getAppDB } = require("@budibase/backend-core/context") const ACTION_DEFS = removeDeprecated(actions.ACTION_DEFINITIONS) const TRIGGER_DEFS = removeDeprecated(triggers.TRIGGER_DEFINITIONS) @@ -20,14 +20,9 @@ const TRIGGER_DEFS = removeDeprecated(triggers.TRIGGER_DEFINITIONS) * * *************************/ -async function cleanupAutomationMetadata(appId, automationId) { +async function cleanupAutomationMetadata(automationId) { + await deleteEntityMetadata(MetadataTypes.AUTOMATION_TEST_INPUT, automationId) await deleteEntityMetadata( - appId, - MetadataTypes.AUTOMATION_TEST_INPUT, - automationId - ) - await deleteEntityMetadata( - appId, MetadataTypes.AUTOMATION_TEST_HISTORY, automationId ) @@ -58,7 +53,7 @@ function cleanAutomationInputs(automation) { } exports.create = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() let automation = ctx.request.body automation.appId = ctx.appId @@ -72,7 +67,6 @@ exports.create = async function (ctx) { automation.type = "automation" automation = cleanAutomationInputs(automation) automation = await checkForWebhooks({ - appId: ctx.appId, newAuto: automation, }) const response = await db.put(automation) @@ -89,13 +83,12 @@ exports.create = async function (ctx) { } exports.update = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() let automation = ctx.request.body automation.appId = ctx.appId const oldAutomation = await db.get(automation._id) automation = cleanAutomationInputs(automation) automation = await checkForWebhooks({ - appId: ctx.appId, oldAuto: oldAutomation, newAuto: automation, }) @@ -131,7 +124,7 @@ exports.update = async function (ctx) { } exports.fetch = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const response = await db.allDocs( getAutomationParams(null, { include_docs: true, @@ -141,20 +134,19 @@ exports.fetch = async function (ctx) { } exports.find = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() ctx.body = await db.get(ctx.params.id) } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const automationId = ctx.params.id const oldAutomation = await db.get(automationId) await checkForWebhooks({ - appId: ctx.appId, oldAuto: oldAutomation, }) // delete metadata first - await cleanupAutomationMetadata(ctx.appId, automationId) + await cleanupAutomationMetadata(automationId) ctx.body = await db.remove(automationId, ctx.params.rev) } @@ -180,12 +172,11 @@ module.exports.getDefinitionList = async function (ctx) { *********************/ exports.trigger = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() let automation = await db.get(ctx.params.id) await triggers.externalTrigger(automation, { ...ctx.request.body, - appId, + appId: ctx.appId, }) ctx.body = { message: `Automation ${automation._id} has been triggered.`, @@ -205,8 +196,7 @@ function prepareTestInput(input) { } exports.test = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() let automation = await db.get(ctx.params.id) await setTestFlag(automation._id) const testInput = prepareTestInput(ctx.request.body) @@ -214,7 +204,7 @@ exports.test = async function (ctx) { automation, { ...testInput, - appId, + appId: ctx.appId, }, { getResponses: true } ) diff --git a/packages/server/src/api/controllers/cloud.js b/packages/server/src/api/controllers/cloud.js index ea6cc9b71e..38804f4d4a 100644 --- a/packages/server/src/api/controllers/cloud.js +++ b/packages/server/src/api/controllers/cloud.js @@ -1,6 +1,5 @@ const env = require("../../environment") const { getAllApps } = require("@budibase/backend-core/db") -const CouchDB = require("../../db") const { exportDB, sendTempFile, @@ -30,7 +29,7 @@ exports.exportApps = async ctx => { if (env.SELF_HOSTED || !env.MULTI_TENANCY) { ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.") } - const apps = await getAllApps(CouchDB, { all: true }) + const apps = await getAllApps({ all: true }) const globalDBString = await exportDB(getGlobalDBName(), { filter: doc => !doc._id.startsWith(DocumentTypes.USER), }) @@ -63,7 +62,7 @@ async function hasBeenImported() { if (!env.SELF_HOSTED || env.MULTI_TENANCY) { return true } - const apps = await getAllApps(CouchDB, { all: true }) + const apps = await getAllApps({ all: true }) return apps.length !== 0 } diff --git a/packages/server/src/api/controllers/component.js b/packages/server/src/api/controllers/component.js index 06cb2cd211..2d0aaea23a 100644 --- a/packages/server/src/api/controllers/component.js +++ b/packages/server/src/api/controllers/component.js @@ -1,15 +1,14 @@ -const CouchDB = require("../../db") const { DocumentTypes } = require("../../db/utils") const { getComponentLibraryManifest } = require("../../utilities/fileSystem") +const { getAppDB } = require("@budibase/backend-core/context") exports.fetchAppComponentDefinitions = async function (ctx) { - const appId = ctx.params.appId || ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const app = await db.get(DocumentTypes.APP_METADATA) let componentManifests = await Promise.all( app.componentLibraries.map(async library => { - let manifest = await getComponentLibraryManifest(appId, library) + let manifest = await getComponentLibraryManifest(library) return { manifest, diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js index 5ab3c0a865..999f322563 100644 --- a/packages/server/src/api/controllers/datasource.js +++ b/packages/server/src/api/controllers/datasource.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const { generateDatasourceID, getDatasourceParams, @@ -11,12 +10,11 @@ const { BuildSchemaErrors, InvalidColumns } = require("../../constants") const { integrations } = require("../../integrations") const { getDatasourceAndQuery } = require("./row/utils") const { invalidateDynamicVariables } = require("../../threads/utils") +const { getAppDB } = require("@budibase/backend-core/context") exports.fetch = async function (ctx) { - const database = new CouchDB(ctx.appId) - // Get internal tables - const db = new CouchDB(ctx.appId) + const db = getAppDB() const internalTables = await db.allDocs( getTableParams(null, { include_docs: true, @@ -31,7 +29,7 @@ exports.fetch = async function (ctx) { // Get external datasources const datasources = ( - await database.allDocs( + await db.allDocs( getDatasourceParams(null, { include_docs: true, }) @@ -49,7 +47,7 @@ exports.fetch = async function (ctx) { } exports.buildSchemaFromDb = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const datasource = await db.get(ctx.params.datasourceId) const { tables, error } = await buildSchemaHelper(datasource) @@ -98,7 +96,7 @@ const invalidateVariables = async (existingDatasource, updatedDatasource) => { } exports.update = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const datasourceId = ctx.params.datasourceId let datasource = await db.get(datasourceId) const auth = datasource.config.auth @@ -126,7 +124,7 @@ exports.update = async function (ctx) { } exports.save = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const plus = ctx.request.body.datasource.plus const fetchSchema = ctx.request.body.fetchSchema @@ -162,7 +160,7 @@ exports.save = async function (ctx) { } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() // Delete all queries for the datasource const queries = await db.allDocs( @@ -184,7 +182,7 @@ exports.destroy = async function (ctx) { } exports.find = async function (ctx) { - const database = new CouchDB(ctx.appId) + const database = getAppDB() ctx.body = await database.get(ctx.params.datasourceId) } @@ -192,7 +190,7 @@ exports.find = async function (ctx) { exports.query = async function (ctx) { const queryJson = ctx.request.body try { - ctx.body = await getDatasourceAndQuery(ctx.appId, queryJson) + ctx.body = await getDatasourceAndQuery(queryJson) } catch (err) { ctx.throw(400, err) } diff --git a/packages/server/src/api/controllers/deploy/Deployment.js b/packages/server/src/api/controllers/deploy/Deployment.js index b398aa2e6d..65cca97d07 100644 --- a/packages/server/src/api/controllers/deploy/Deployment.js +++ b/packages/server/src/api/controllers/deploy/Deployment.js @@ -1,18 +1,14 @@ const newid = require("../../../db/newid") +const { getAppId } = require("@budibase/backend-core/context") /** * This is used to pass around information about the deployment that is occurring */ class Deployment { - constructor(appId, id = null) { - this.appId = appId + constructor(id = null) { this._id = id || newid() } - getAppId() { - return this.appId - } - setVerification(verification) { if (!verification) { return @@ -43,7 +39,7 @@ class Deployment { getJSON() { const obj = { _id: this._id, - appId: this.appId, + appId: getAppId(), status: this.status, } if (this.err) { diff --git a/packages/server/src/api/controllers/deploy/index.js b/packages/server/src/api/controllers/deploy/index.js index 76d7b75912..4186a192a4 100644 --- a/packages/server/src/api/controllers/deploy/index.js +++ b/packages/server/src/api/controllers/deploy/index.js @@ -1,12 +1,20 @@ -const CouchDB = require("../../../db") const Deployment = require("./Deployment") -const { Replication, getDeployedAppID } = require("@budibase/backend-core/db") +const { + Replication, + getProdAppID, + getDevelopmentAppID, +} = require("@budibase/backend-core/db") const { DocumentTypes, getAutomationParams } = require("../../../db/utils") const { disableAllCrons, enableCronTrigger, } = require("../../../automations/utils") const { app: appCache } = require("@budibase/backend-core/cache") +const { + getAppId, + getAppDB, + getProdAppDB, +} = require("@budibase/backend-core/context") // the max time we can wait for an invalidation to complete before considering it failed const MAX_PENDING_TIME_MS = 30 * 60000 @@ -34,9 +42,8 @@ async function checkAllDeployments(deployments) { } async function storeDeploymentHistory(deployment) { - const appId = deployment.getAppId() const deploymentJSON = deployment.getJSON() - const db = new CouchDB(appId) + const db = getAppDB() let deploymentDoc try { @@ -64,7 +71,7 @@ async function storeDeploymentHistory(deployment) { } async function initDeployedApp(prodAppId) { - const db = new CouchDB(prodAppId) + const db = getProdAppDB() console.log("Reading automation docs") const automations = ( await db.allDocs( @@ -88,10 +95,12 @@ async function initDeployedApp(prodAppId) { async function deployApp(deployment) { try { - const productionAppId = getDeployedAppID(deployment.appId) + const appId = getAppId() + const devAppId = getDevelopmentAppID(appId) + const productionAppId = getProdAppID(appId) const replication = new Replication({ - source: deployment.appId, + source: devAppId, target: productionAppId, }) @@ -99,7 +108,7 @@ async function deployApp(deployment) { await replication.replicate() console.log("replication complete.. replacing app meta doc") - const db = new CouchDB(productionAppId) + const db = getProdAppDB() const appDoc = await db.get(DocumentTypes.APP_METADATA) appDoc.appId = productionAppId appDoc.instance._id = productionAppId @@ -122,8 +131,7 @@ async function deployApp(deployment) { exports.fetchDeployments = async function (ctx) { try { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) const { updated, deployments } = await checkAllDeployments( deploymentDoc, @@ -140,8 +148,7 @@ exports.fetchDeployments = async function (ctx) { exports.deploymentProgress = async function (ctx) { try { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) ctx.body = deploymentDoc[ctx.params.deploymentId] } catch (err) { @@ -153,7 +160,7 @@ exports.deploymentProgress = async function (ctx) { } exports.deployApp = async function (ctx) { - let deployment = new Deployment(ctx.appId) + let deployment = new Deployment() console.log("Deployment object created") deployment.setStatus(DeploymentStatus.PENDING) console.log("Deployment object set to pending") diff --git a/packages/server/src/api/controllers/dev.js b/packages/server/src/api/controllers/dev.js index 3126454a6b..bec9478245 100644 --- a/packages/server/src/api/controllers/dev.js +++ b/packages/server/src/api/controllers/dev.js @@ -1,12 +1,12 @@ const fetch = require("node-fetch") -const CouchDB = require("../../db") const env = require("../../environment") const { checkSlashesInUrl } = require("../../utilities") const { request } = require("../../utilities/workerRequests") const { clearLock } = require("../../utilities/redis") -const { Replication } = require("@budibase/backend-core/db") +const { Replication, getProdAppID } = require("@budibase/backend-core/db") const { DocumentTypes } = require("../../db/utils") const { app: appCache } = require("@budibase/backend-core/cache") +const { getProdAppDB, getAppDB } = require("@budibase/backend-core/context") async function redirect(ctx, method, path = "global") { const { devPath } = ctx.params @@ -77,11 +77,11 @@ exports.clearLock = async ctx => { exports.revert = async ctx => { const { appId } = ctx.params - const productionAppId = appId.replace("_dev", "") + const productionAppId = getProdAppID(appId) // App must have been deployed first try { - const db = new CouchDB(productionAppId, { skip_setup: true }) + const db = getProdAppDB({ skip_setup: true }) const info = await db.info() if (info.error) throw info.error const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) @@ -103,7 +103,7 @@ exports.revert = async ctx => { await replication.rollback() // update appID in reverted app to be dev version again - const db = new CouchDB(appId) + const db = getAppDB() const appDoc = await db.get(DocumentTypes.APP_METADATA) appDoc.appId = appId appDoc.instance._id = appId diff --git a/packages/server/src/api/controllers/layout.js b/packages/server/src/api/controllers/layout.js index c3cae1b6a7..a92eec424a 100644 --- a/packages/server/src/api/controllers/layout.js +++ b/packages/server/src/api/controllers/layout.js @@ -2,11 +2,11 @@ const { EMPTY_LAYOUT, BASE_LAYOUT_PROP_IDS, } = require("../../constants/layouts") -const CouchDB = require("../../db") const { generateLayoutID, getScreenParams } = require("../../db/utils") +const { getAppDB } = require("@budibase/backend-core/context") exports.save = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() let layout = ctx.request.body if (!layout.props) { @@ -26,7 +26,7 @@ exports.save = async function (ctx) { } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const layoutId = ctx.params.layoutId, layoutRev = ctx.params.layoutRev diff --git a/packages/server/src/api/controllers/metadata.js b/packages/server/src/api/controllers/metadata.js index 75236650fd..e68db9b003 100644 --- a/packages/server/src/api/controllers/metadata.js +++ b/packages/server/src/api/controllers/metadata.js @@ -1,7 +1,7 @@ const { MetadataTypes } = require("../../constants") -const CouchDB = require("../../db") const { generateMetadataID } = require("../../db/utils") const { saveEntityMetadata, deleteEntityMetadata } = require("../../utilities") +const { getAppDB } = require("@budibase/backend-core/context") exports.getTypes = async ctx => { ctx.body = { @@ -14,17 +14,12 @@ exports.saveMetadata = async ctx => { if (type === MetadataTypes.AUTOMATION_TEST_HISTORY) { ctx.throw(400, "Cannot save automation history type") } - ctx.body = await saveEntityMetadata( - ctx.appId, - type, - entityId, - ctx.request.body - ) + ctx.body = await saveEntityMetadata(type, entityId, ctx.request.body) } exports.deleteMetadata = async ctx => { const { type, entityId } = ctx.params - await deleteEntityMetadata(ctx.appId, type, entityId) + await deleteEntityMetadata(type, entityId) ctx.body = { message: "Metadata deleted successfully", } @@ -32,7 +27,7 @@ exports.deleteMetadata = async ctx => { exports.getMetadata = async ctx => { const { type, entityId } = ctx.params - const db = new CouchDB(ctx.appId) + const db = getAppDB() const id = generateMetadataID(type, entityId) try { ctx.body = await db.get(id) diff --git a/packages/server/src/api/controllers/permission.js b/packages/server/src/api/controllers/permission.js index 5c42fe77ef..0e37a3e7d3 100644 --- a/packages/server/src/api/controllers/permission.js +++ b/packages/server/src/api/controllers/permission.js @@ -6,12 +6,12 @@ const { getBuiltinRoles, } = require("@budibase/backend-core/roles") const { getRoleParams } = require("../../db/utils") -const CouchDB = require("../../db") const { CURRENTLY_SUPPORTED_LEVELS, getBasePermissions, } = require("../../utilities/security") const { removeFromArray } = require("../../utilities") +const { getAppDB } = require("@budibase/backend-core/context") const PermissionUpdateType = { REMOVE: "remove", @@ -35,7 +35,7 @@ async function updatePermissionOnRole( { roleId, resourceId, level }, updateType ) { - const db = new CouchDB(appId) + const db = getAppDB() const remove = updateType === PermissionUpdateType.REMOVE const isABuiltin = isBuiltin(roleId) const dbRoleId = getDBRoleID(roleId) @@ -106,7 +106,7 @@ exports.fetchLevels = function (ctx) { } exports.fetch = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const roles = await getAllDBRoles(db) let permissions = {} // create an object with structure role ID -> resource ID -> level @@ -133,7 +133,7 @@ exports.fetch = async function (ctx) { exports.getResourcePerms = async function (ctx) { const resourceId = ctx.params.resourceId - const db = new CouchDB(ctx.appId) + const db = getAppDB() const body = await db.allDocs( getRoleParams(null, { include_docs: true, diff --git a/packages/server/src/api/controllers/query/import/index.ts b/packages/server/src/api/controllers/query/import/index.ts index 933d6b101c..593fb05fd3 100644 --- a/packages/server/src/api/controllers/query/import/index.ts +++ b/packages/server/src/api/controllers/query/import/index.ts @@ -1,10 +1,11 @@ -import CouchDB from "../../../../db" import { queryValidation } from "../validation" import { generateQueryID } from "../../../../db/utils" import { ImportInfo, ImportSource } from "./sources/base" import { OpenAPI2 } from "./sources/openapi2" import { Query } from "./../../../../definitions/common" import { Curl } from "./sources/curl" +// @ts-ignore +import { getAppDB } from "@budibase/backend-core/context" interface ImportResult { errorQueries: Query[] queries: Query[] @@ -33,10 +34,7 @@ export class RestImporter { return this.source.getInfo() } - importQueries = async ( - appId: string, - datasourceId: string - ): Promise => { + importQueries = async (datasourceId: string): Promise => { // constuct the queries let queries = await this.source.getQueries(datasourceId) @@ -58,7 +56,7 @@ export class RestImporter { }) // persist queries - const db = new CouchDB(appId) + const db = getAppDB() const response = await db.bulkDocs(queries) // create index to seperate queries and errors diff --git a/packages/server/src/api/controllers/query/import/tests/index.spec.js b/packages/server/src/api/controllers/query/import/tests/index.spec.js index 5a509d2258..8d074ea885 100644 --- a/packages/server/src/api/controllers/query/import/tests/index.spec.js +++ b/packages/server/src/api/controllers/query/import/tests/index.spec.js @@ -6,6 +6,7 @@ const db = jest.fn(() => { } }) jest.mock("../../../../../db", () => db) +require("@budibase/backend-core").init(require("../../../../../db")) const { RestImporter } = require("../index") @@ -77,7 +78,7 @@ describe("Rest Importer", () => { const testImportQueries = async (key, data, assertions) => { await init(data) bulkDocs.mockReturnValue([]) - const importResult = await restImporter.importQueries("appId", "datasourceId") + const importResult = await restImporter.importQueries("datasourceId") expect(importResult.errorQueries.length).toBe(0) expect(importResult.queries.length).toBe(assertions[key].count) expect(bulkDocs).toHaveBeenCalledTimes(1) diff --git a/packages/server/src/api/controllers/query/index.js b/packages/server/src/api/controllers/query/index.js index 9cf7612e8a..7a179bab35 100644 --- a/packages/server/src/api/controllers/query/index.js +++ b/packages/server/src/api/controllers/query/index.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const { generateQueryID, getQueryParams, @@ -10,6 +9,7 @@ const { save: saveDatasource } = require("../datasource") const { RestImporter } = require("./import") const { invalidateDynamicVariables } = require("../../../threads/utils") const environment = require("../../../environment") +const { getAppDB } = require("@budibase/backend-core/context") const Runner = new Thread(ThreadType.QUERY, { timeoutMs: environment.QUERY_THREAD_TIMEOUT || 10000, @@ -28,7 +28,7 @@ function enrichQueries(input) { } exports.fetch = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const body = await db.allDocs( getQueryParams(null, { @@ -69,7 +69,7 @@ exports.import = async ctx => { datasourceId = body.datasourceId } - const importResult = await importer.importQueries(ctx.appId, datasourceId) + const importResult = await importer.importQueries(datasourceId) ctx.body = { ...importResult, @@ -79,7 +79,7 @@ exports.import = async ctx => { } exports.save = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const query = ctx.request.body if (!query._id) { @@ -94,7 +94,7 @@ exports.save = async function (ctx) { } exports.find = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const query = enrichQueries(await db.get(ctx.params.queryId)) // remove properties that could be dangerous in real app if (isProdAppID(ctx.appId)) { @@ -105,7 +105,7 @@ exports.find = async function (ctx) { } exports.preview = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const datasource = await db.get(ctx.request.body.datasourceId) // preview may not have a queryId as it hasn't been saved, but if it does @@ -136,7 +136,7 @@ exports.preview = async function (ctx) { } async function execute(ctx, opts = { rowsOnly: false }) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const query = await db.get(ctx.params.queryId) const datasource = await db.get(query.datasourceId) @@ -181,7 +181,8 @@ exports.executeV2 = async function (ctx) { return execute(ctx, { rowsOnly: false }) } -const removeDynamicVariables = async (db, queryId) => { +const removeDynamicVariables = async queryId => { + const db = getAppDB() const query = await db.get(queryId) const datasource = await db.get(query.datasourceId) const dynamicVariables = datasource.config.dynamicVariables @@ -202,8 +203,8 @@ const removeDynamicVariables = async (db, queryId) => { } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) - await removeDynamicVariables(db, ctx.params.queryId) + const db = getAppDB() + await removeDynamicVariables(ctx.params.queryId) await db.remove(ctx.params.queryId, ctx.params.revId) ctx.message = `Query deleted.` ctx.status = 200 diff --git a/packages/server/src/api/controllers/role.js b/packages/server/src/api/controllers/role.js index b79907031d..11b4b9a520 100644 --- a/packages/server/src/api/controllers/role.js +++ b/packages/server/src/api/controllers/role.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const { Role, getRole, @@ -10,6 +9,7 @@ const { getUserMetadataParams, InternalTables, } = require("../../db/utils") +const { getAppDB } = require("@budibase/backend-core/context") const UpdateRolesOptions = { CREATED: "created", @@ -40,15 +40,15 @@ async function updateRolesOnUserTable(db, roleId, updateOption) { } exports.fetch = async function (ctx) { - ctx.body = await getAllRoles(ctx.appId) + ctx.body = await getAllRoles() } exports.find = async function (ctx) { - ctx.body = await getRole(ctx.appId, ctx.params.roleId) + ctx.body = await getRole(ctx.params.roleId) } exports.save = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() let { _id, name, inherits, permissionId } = ctx.request.body if (!_id) { _id = generateRoleID() @@ -69,7 +69,7 @@ exports.save = async function (ctx) { } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const roleId = ctx.params.roleId if (isBuiltin(roleId)) { ctx.throw(400, "Cannot delete builtin role.") diff --git a/packages/server/src/api/controllers/routing.js b/packages/server/src/api/controllers/routing.js index d45d33ed07..d6ba9d6ac2 100644 --- a/packages/server/src/api/controllers/routing.js +++ b/packages/server/src/api/controllers/routing.js @@ -39,12 +39,11 @@ Routing.prototype.addScreenId = function (fullpath, roleId, screenId) { /** * Gets the full routing structure by querying the routing view and processing the result into the tree. - * @param {string} appId The application to produce the routing structure for. * @returns {Promise} The routing structure, this is the full structure designed for use in the builder, * if the client routing is required then the updateRoutingStructureForUserRole should be used. */ -async function getRoutingStructure(appId) { - const screenRoutes = await getRoutingInfo(appId) +async function getRoutingStructure() { + const screenRoutes = await getRoutingInfo() const routing = new Routing() for (let screenRoute of screenRoutes) { @@ -57,13 +56,13 @@ async function getRoutingStructure(appId) { } exports.fetch = async ctx => { - ctx.body = await getRoutingStructure(ctx.appId) + ctx.body = await getRoutingStructure() } exports.clientFetch = async ctx => { - const routing = await getRoutingStructure(ctx.appId) + const routing = await getRoutingStructure() let roleId = ctx.user.role._id - const roleIds = await getUserRoleHierarchy(ctx.appId, roleId) + const roleIds = await getUserRoleHierarchy(roleId) for (let topLevel of Object.values(routing.routes)) { for (let subpathKey of Object.keys(topLevel.subpaths)) { let found = false diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 0bffd134c1..5aa486546b 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -19,6 +19,19 @@ import { isRowId, convertRowId, } from "../../../integrations/utils" +import { getDatasourceAndQuery } from "./utils" +import { + DataSourceOperation, + FieldTypes, + RelationshipTypes, +} from "../../../constants" +import { breakExternalTableId, isSQL } from "../../../integrations/utils" +import { processObjectSync } from "@budibase/string-templates" +// @ts-ignore +import { cloneDeep } from "lodash/fp" +import { processFormulas } from "../../../utilities/rowProcessor/utils" +// @ts-ignore +import { getAppDB } from "@budibase/backend-core/context" interface ManyRelationship { tableId?: string @@ -38,18 +51,6 @@ interface RunConfig { } module External { - const { getDatasourceAndQuery } = require("./utils") - const { - DataSourceOperation, - FieldTypes, - RelationshipTypes, - } = require("../../../constants") - const { breakExternalTableId, isSQL } = require("../../../integrations/utils") - const { processObjectSync } = require("@budibase/string-templates") - const { cloneDeep } = require("lodash/fp") - const CouchDB = require("../../../db") - const { processFormulas } = require("../../../utilities/rowProcessor/utils") - function buildFilters( id: string | undefined, filters: SearchFilters, @@ -210,19 +211,12 @@ module External { } class ExternalRequest { - private readonly appId: string private operation: Operation private tableId: string private datasource: Datasource private tables: { [key: string]: Table } = {} - constructor( - appId: string, - operation: Operation, - tableId: string, - datasource: Datasource - ) { - this.appId = appId + constructor(operation: Operation, tableId: string, datasource: Datasource) { this.operation = operation this.tableId = tableId this.datasource = datasource @@ -231,12 +225,14 @@ module External { } } - getTable(tableId: string | undefined): Table { + getTable(tableId: string | undefined): Table | undefined { if (!tableId) { throw "Table ID is unknown, cannot find table" } const { tableName } = breakExternalTableId(tableId) - return this.tables[tableName] + if (tableName) { + return this.tables[tableName] + } } inputProcessing(row: Row | undefined, table: Table) { @@ -272,9 +268,11 @@ module External { newRow[key] = row[key] continue } - const { tableName: linkTableName } = breakExternalTableId(field.tableId) + const { tableName: linkTableName } = breakExternalTableId( + field?.tableId + ) // table has to exist for many to many - if (!this.tables[linkTableName]) { + if (!linkTableName || !this.tables[linkTableName]) { continue } const linkTable = this.tables[linkTableName] @@ -422,7 +420,7 @@ module External { } const { tableName: linkTableName } = breakExternalTableId(field.tableId) // no table to link to, this is not a valid relationships - if (!this.tables[linkTableName]) { + if (!linkTableName || !this.tables[linkTableName]) { continue } const linkTable = this.tables[linkTableName] @@ -460,6 +458,9 @@ module External { async lookupRelations(tableId: string, row: Row) { const related: { [key: string]: any } = {} const { tableName } = breakExternalTableId(tableId) + if (!tableName) { + return related + } const table = this.tables[tableName] // @ts-ignore const primaryKey = table.primary[0] @@ -484,7 +485,7 @@ module External { if (!lookupField || !row[lookupField]) { continue } - const response = await getDatasourceAndQuery(this.appId, { + const response = await getDatasourceAndQuery({ endpoint: getEndpoint(tableId, DataSourceOperation.READ), filters: { equal: { @@ -515,28 +516,30 @@ module External { row: Row, relationships: ManyRelationship[] ) { - const { appId } = this // if we're creating (in a through table) need to wipe the existing ones first const promises = [] const related = await this.lookupRelations(mainTableId, row) for (let relationship of relationships) { const { key, tableId, isUpdate, id, ...rest } = relationship - const body = processObjectSync(rest, row) + const body: { [key: string]: any } = processObjectSync(rest, row, {}) const linkTable = this.getTable(tableId) // @ts-ignore - const linkPrimary = linkTable.primary[0] - const rows = related[key]?.rows || [] + const linkPrimary = linkTable?.primary[0] + if (!linkTable || !linkPrimary) { + return + } + const rows = related[key].rows || [] const found = rows.find( (row: { [key: string]: any }) => row[linkPrimary] === relationship.id || - row[linkPrimary] === body[linkPrimary] + row[linkPrimary] === body?.[linkPrimary] ) const operation = isUpdate ? DataSourceOperation.UPDATE : DataSourceOperation.CREATE if (!found) { promises.push( - getDatasourceAndQuery(appId, { + getDatasourceAndQuery({ endpoint: getEndpoint(tableId, operation), // if we're doing many relationships then we're writing, only one response body, @@ -552,9 +555,12 @@ module External { for (let [colName, { isMany, rows, tableId }] of Object.entries( related )) { - const table: Table = this.getTable(tableId) + const table: Table | undefined = this.getTable(tableId) // if its not the foreign key skip it, nothing to do - if (table.primary && table.primary.indexOf(colName) !== -1) { + if ( + !table || + (table.primary && table.primary.indexOf(colName) !== -1) + ) { continue } for (let row of rows) { @@ -566,7 +572,7 @@ module External { : DataSourceOperation.UPDATE const body = isMany ? null : { [colName]: null } promises.push( - getDatasourceAndQuery(this.appId, { + getDatasourceAndQuery({ endpoint: getEndpoint(tableId, op), body, filters, @@ -605,20 +611,25 @@ module External { continue } const { tableName: linkTableName } = breakExternalTableId(field.tableId) - const linkTable = this.tables[linkTableName] - if (linkTable) { - const linkedFields = extractRealFields(linkTable, fields) - fields = fields.concat(linkedFields) + if (linkTableName) { + const linkTable = this.tables[linkTableName] + if (linkTable) { + const linkedFields = extractRealFields(linkTable, fields) + fields = fields.concat(linkedFields) + } } } return fields } async run(config: RunConfig) { - const { appId, operation, tableId } = this + const { operation, tableId } = this let { datasourceId, tableName } = breakExternalTableId(tableId) + if (!tableName) { + throw "Unable to run without a table name" + } if (!this.datasource) { - const db = new CouchDB(appId) + const db = getAppDB() this.datasource = await db.get(datasourceId) if (!this.datasource || !this.datasource.entities) { throw "No tables found, fetch tables before query." @@ -670,7 +681,7 @@ module External { }, } // can't really use response right now - const response = await getDatasourceAndQuery(appId, json) + const response = await getDatasourceAndQuery(json) // handle many to many relationships now if we know the ID (could be auto increment) if ( operation !== DataSourceOperation.READ && diff --git a/packages/server/src/api/controllers/row/external.js b/packages/server/src/api/controllers/row/external.js index b8620f7bc3..0bd57d256f 100644 --- a/packages/server/src/api/controllers/row/external.js +++ b/packages/server/src/api/controllers/row/external.js @@ -9,9 +9,9 @@ const { breakRowIdField, } = require("../../../integrations/utils") const ExternalRequest = require("./ExternalRequest") -const CouchDB = require("../../../db") +const { getAppDB } = require("@budibase/backend-core/context") -async function handleRequest(appId, operation, tableId, opts = {}) { +async function handleRequest(operation, tableId, opts = {}) { // make sure the filters are cleaned up, no empty strings for equals, fuzzy or string if (opts && opts.filters) { for (let filterField of NoEmptyFilterStrings) { @@ -25,9 +25,7 @@ async function handleRequest(appId, operation, tableId, opts = {}) { } } } - return new ExternalRequest(appId, operation, tableId, opts.datasource).run( - opts - ) + return new ExternalRequest(operation, tableId, opts.datasource).run(opts) } exports.handleRequest = handleRequest @@ -181,7 +179,7 @@ exports.fetchEnrichedRow = async ctx => { const id = ctx.params.rowId const tableId = ctx.params.tableId const { datasourceId, tableName } = breakExternalTableId(tableId) - const db = new CouchDB(appId) + const db = getAppDB() const datasource = await db.get(datasourceId) if (!datasource || !datasource.entities) { ctx.throw(400, "Datasource has not been configured for plus API.") diff --git a/packages/server/src/api/controllers/row/internal.js b/packages/server/src/api/controllers/row/internal.js index 0e9c2e651d..e1ea32e557 100644 --- a/packages/server/src/api/controllers/row/internal.js +++ b/packages/server/src/api/controllers/row/internal.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const linkRows = require("../../../db/linkedRows") const { generateRowID, @@ -25,6 +24,7 @@ const { getFromMemoryDoc, } = require("../view/utils") const { cloneDeep } = require("lodash/fp") +const { getAppDB } = require("@budibase/backend-core/context") const { finaliseRow, updateRelatedFormula } = require("./staticFormula") const CALCULATION_TYPES = { @@ -76,8 +76,7 @@ async function getRawTableData(ctx, db, tableId) { } exports.patch = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const inputs = ctx.request.body const tableId = inputs.tableId const isUserTable = tableId === InternalTables.USER_METADATA @@ -116,14 +115,13 @@ exports.patch = async ctx => { // returned row is cleaned and prepared for writing to DB row = await linkRows.updateLinks({ - appId, eventType: linkRows.EventType.ROW_UPDATE, row, tableId: row.tableId, table, }) // check if any attachments removed - await cleanupAttachments(appId, table, { oldRow, row }) + await cleanupAttachments(table, { oldRow, row }) if (isUserTable) { // the row has been updated, need to put it into the ctx @@ -132,15 +130,14 @@ exports.patch = async ctx => { return { row: ctx.body, table } } - return finaliseRow(ctx.appId, table, row, { + return finaliseRow(table, row, { oldTable: dbTable, updateFormula: true, }) } exports.save = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() let inputs = ctx.request.body inputs.tableId = ctx.params.tableId @@ -162,21 +159,19 @@ exports.save = async function (ctx) { // make sure link rows are up to date row = await linkRows.updateLinks({ - appId, eventType: linkRows.EventType.ROW_SAVE, row, tableId: row.tableId, table, }) - return finaliseRow(ctx.appId, table, row, { + return finaliseRow(table, row, { oldTable: dbTable, updateFormula: true, }) } exports.fetchView = async ctx => { - const appId = ctx.appId const viewName = ctx.params.viewName // if this is a table view being looked for just transfer to that @@ -185,7 +180,7 @@ exports.fetchView = async ctx => { return exports.fetch(ctx) } - const db = new CouchDB(appId) + const db = getAppDB() const { calculation, group, field } = ctx.query const viewInfo = await getView(db, viewName) let response @@ -212,7 +207,7 @@ exports.fetchView = async ctx => { schema: {}, } } - rows = await outputProcessing(ctx, table, response.rows) + rows = await outputProcessing(table, response.rows) } if (calculation === CALCULATION_TYPES.STATS) { @@ -239,27 +234,24 @@ exports.fetchView = async ctx => { } exports.fetch = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const tableId = ctx.params.tableId let table = await db.get(tableId) let rows = await getRawTableData(ctx, db, tableId) - return outputProcessing(ctx, table, rows) + return outputProcessing(table, rows) } exports.find = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const table = await db.get(ctx.params.tableId) - let row = await findRow(ctx, db, ctx.params.tableId, ctx.params.rowId) - row = await outputProcessing(ctx, table, row) + let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId) + row = await outputProcessing(table, row) return row } exports.destroy = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const { _id, _rev } = ctx.request.body let row = await db.get(_id) @@ -268,18 +260,17 @@ exports.destroy = async function (ctx) { } const table = await db.get(row.tableId) // update the row to include full relationships before deleting them - row = await outputProcessing(ctx, table, row, { squash: false }) + row = await outputProcessing(table, row, { squash: false }) // now remove the relationships await linkRows.updateLinks({ - appId, eventType: linkRows.EventType.ROW_DELETE, row, tableId: row.tableId, }) // remove any attachments that were on the row from object storage - await cleanupAttachments(appId, table, { row }) + await cleanupAttachments(table, { row }) // remove any static formula - await updateRelatedFormula(appId, table, row) + await updateRelatedFormula(table, row) let response if (ctx.params.tableId === InternalTables.USER_METADATA) { @@ -295,20 +286,18 @@ exports.destroy = async function (ctx) { } exports.bulkDestroy = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const tableId = ctx.params.tableId const table = await db.get(tableId) let { rows } = ctx.request.body // before carrying out any updates, make sure the rows are ready to be returned // they need to be the full rows (including previous relationships) for automations - rows = await outputProcessing(ctx, table, rows, { squash: false }) + rows = await outputProcessing(table, rows, { squash: false }) // remove the relationships first let updates = rows.map(row => linkRows.updateLinks({ - appId, eventType: linkRows.EventType.ROW_DELETE, row, tableId: row.tableId, @@ -327,8 +316,8 @@ exports.bulkDestroy = async ctx => { await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true }))) } // remove any attachments that were on the rows from object storage - await cleanupAttachments(appId, table, { rows }) - await updateRelatedFormula(appId, table, rows) + await cleanupAttachments(table, { rows }) + await updateRelatedFormula(table, rows) await Promise.all(updates) return { response: { ok: true }, rows } } @@ -339,28 +328,27 @@ exports.search = async ctx => { return { rows: await exports.fetch(ctx) } } - const appId = ctx.appId const { tableId } = ctx.params - const db = new CouchDB(appId) + const db = getAppDB() const { paginate, query, ...params } = ctx.request.body params.version = ctx.version params.tableId = tableId let response if (paginate) { - response = await paginatedSearch(appId, query, params) + response = await paginatedSearch(query, params) } else { - response = await fullSearch(appId, query, params) + response = await fullSearch(query, params) } // Enrich search results with relationships if (response.rows && response.rows.length) { // enrich with global users if from users table if (tableId === InternalTables.USER_METADATA) { - response.rows = await getGlobalUsersFromMetadata(appId, response.rows) + response.rows = await getGlobalUsersFromMetadata(response.rows) } const table = await db.get(tableId) - response.rows = await outputProcessing(ctx, table, response.rows) + response.rows = await outputProcessing(table, response.rows) } return response @@ -368,25 +356,22 @@ exports.search = async ctx => { exports.validate = async ctx => { return validate({ - appId: ctx.appId, tableId: ctx.params.tableId, row: ctx.request.body, }) } exports.fetchEnrichedRow = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const tableId = ctx.params.tableId const rowId = ctx.params.rowId // need table to work out where links go in row let [table, row] = await Promise.all([ db.get(tableId), - findRow(ctx, db, tableId, rowId), + findRow(ctx, tableId, rowId), ]) // get the link docs const linkVals = await linkRows.getLinkDocuments({ - appId, tableId, rowId, }) @@ -413,7 +398,7 @@ exports.fetchEnrichedRow = async ctx => { for (let [tableId, rows] of Object.entries(groups)) { // need to include the IDs in these rows for any links they may have linkedRows = linkedRows.concat( - await outputProcessing(ctx, tables[tableId], rows) + await outputProcessing(tables[tableId], rows) ) } diff --git a/packages/server/src/api/controllers/row/internalSearch.js b/packages/server/src/api/controllers/row/internalSearch.js index a185386b7a..611b3272f3 100644 --- a/packages/server/src/api/controllers/row/internalSearch.js +++ b/packages/server/src/api/controllers/row/internalSearch.js @@ -1,14 +1,14 @@ const { SearchIndexes } = require("../../../db/utils") const fetch = require("node-fetch") const { getCouchUrl } = require("@budibase/backend-core/db") +const { getAppId } = require("@budibase/backend-core/context") /** * Class to build lucene query URLs. * Optionally takes a base lucene query object. */ class QueryBuilder { - constructor(appId, base) { - this.appId = appId + constructor(base) { this.query = { string: {}, fuzzy: {}, @@ -241,7 +241,8 @@ class QueryBuilder { } async run() { - const url = `${getCouchUrl()}/${this.appId}/_design/database/_search/${ + const appId = getAppId() + const url = `${getCouchUrl()}/${appId}/_design/database/_search/${ SearchIndexes.ROWS }` const body = this.buildSearchBody() @@ -278,7 +279,6 @@ const runQuery = async (url, body) => { * Gets round the fixed limit of 200 results from a query by fetching as many * pages as required and concatenating the results. This recursively operates * until enough results have been found. - * @param appId {string} The app ID to search * @param query {object} The JSON query structure * @param params {object} The search params including: * tableId {string} The table ID to search @@ -291,7 +291,7 @@ const runQuery = async (url, body) => { * rows {array|null} Current results in the recursive search * @returns {Promise<*[]|*>} */ -const recursiveSearch = async (appId, query, params) => { +const recursiveSearch = async (query, params) => { const bookmark = params.bookmark const rows = params.rows || [] if (rows.length >= params.limit) { @@ -301,7 +301,7 @@ const recursiveSearch = async (appId, query, params) => { if (rows.length > params.limit - 200) { pageSize = params.limit - rows.length } - const page = await new QueryBuilder(appId, query) + const page = await new QueryBuilder(query) .setVersion(params.version) .setTable(params.tableId) .setBookmark(bookmark) @@ -321,14 +321,13 @@ const recursiveSearch = async (appId, query, params) => { bookmark: page.bookmark, rows: [...rows, ...page.rows], } - return await recursiveSearch(appId, query, newParams) + return await recursiveSearch(query, newParams) } /** * Performs a paginated search. A bookmark will be returned to allow the next * page to be fetched. There is a max limit off 200 results per page in a * paginated search. - * @param appId {string} The app ID to search * @param query {object} The JSON query structure * @param params {object} The search params including: * tableId {string} The table ID to search @@ -340,13 +339,13 @@ const recursiveSearch = async (appId, query, params) => { * bookmark {string} The bookmark to resume from * @returns {Promise<{hasNextPage: boolean, rows: *[]}>} */ -exports.paginatedSearch = async (appId, query, params) => { +exports.paginatedSearch = async (query, params) => { let limit = params.limit if (limit == null || isNaN(limit) || limit < 0) { limit = 50 } limit = Math.min(limit, 200) - const search = new QueryBuilder(appId, query) + const search = new QueryBuilder(query) .setVersion(params.version) .setTable(params.tableId) .setSort(params.sort) @@ -375,7 +374,6 @@ exports.paginatedSearch = async (appId, query, params) => { * desired amount of results. There is a limit of 1000 results to avoid * heavy performance hits, and to avoid client components breaking from * handling too much data. - * @param appId {string} The app ID to search * @param query {object} The JSON query structure * @param params {object} The search params including: * tableId {string} The table ID to search @@ -386,12 +384,12 @@ exports.paginatedSearch = async (appId, query, params) => { * limit {number} The desired number of results * @returns {Promise<{rows: *}>} */ -exports.fullSearch = async (appId, query, params) => { +exports.fullSearch = async (query, params) => { let limit = params.limit if (limit == null || isNaN(limit) || limit < 0) { limit = 1000 } params.limit = Math.min(limit, 1000) - const rows = await recursiveSearch(appId, query, params) + const rows = await recursiveSearch(query, params) return { rows } } diff --git a/packages/server/src/api/controllers/row/staticFormula.js b/packages/server/src/api/controllers/row/staticFormula.js index fc0edd1cb4..bc62c08198 100644 --- a/packages/server/src/api/controllers/row/staticFormula.js +++ b/packages/server/src/api/controllers/row/staticFormula.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const { getRowParams } = require("../../../db/utils") const { outputProcessing, @@ -8,6 +7,7 @@ const { const { FieldTypes, FormulaTypes } = require("../../../constants") const { isEqual } = require("lodash") const { cloneDeep } = require("lodash/fp") +const { getAppDB } = require("@budibase/backend-core/context") /** * This function runs through a list of enriched rows, looks at the rows which @@ -15,8 +15,8 @@ const { cloneDeep } = require("lodash/fp") * updated. * NOTE: this will only for affect static formulas. */ -exports.updateRelatedFormula = async (appId, table, enrichedRows) => { - const db = new CouchDB(appId) +exports.updateRelatedFormula = async (table, enrichedRows) => { + const db = getAppDB() // no formula to update, we're done if (!table.relatedFormula) { return @@ -57,7 +57,7 @@ exports.updateRelatedFormula = async (appId, table, enrichedRows) => { // re-enrich rows for all the related, don't update the related formula for them promises = promises.concat( relatedRows[tableId].map(related => - exports.finaliseRow(appId, relatedTable, related, { + exports.finaliseRow(relatedTable, related, { updateFormula: false, }) ) @@ -69,8 +69,8 @@ exports.updateRelatedFormula = async (appId, table, enrichedRows) => { await Promise.all(promises) } -exports.updateAllFormulasInTable = async (appId, table) => { - const db = new CouchDB(appId) +exports.updateAllFormulasInTable = async table => { + const db = getAppDB() // start by getting the raw rows (which will be written back to DB after update) let rows = ( await db.allDocs( @@ -81,7 +81,7 @@ exports.updateAllFormulasInTable = async (appId, table) => { ).rows.map(row => row.doc) // now enrich the rows, note the clone so that we have the base state of the // rows so that we don't write any of the enriched information back - let enrichedRows = await outputProcessing({ appId }, table, cloneDeep(rows), { + let enrichedRows = await outputProcessing(table, cloneDeep(rows), { squash: false, }) const updatedRows = [] @@ -109,15 +109,14 @@ exports.updateAllFormulasInTable = async (appId, table) => { * expects the row to be totally enriched/contain all relationships. */ exports.finaliseRow = async ( - appId, table, row, { oldTable, updateFormula } = { updateFormula: true } ) => { - const db = new CouchDB(appId) + const db = getAppDB() row.type = "row" // process the row before return, to include relationships - let enrichedRow = await outputProcessing({ appId }, table, cloneDeep(row), { + let enrichedRow = await outputProcessing(table, cloneDeep(row), { squash: false, }) // use enriched row to generate formulas for saving, specifically only use as context @@ -151,7 +150,7 @@ exports.finaliseRow = async ( enrichedRow = await processFormulas(table, enrichedRow, { dynamic: false }) // this updates the related formulas in other rows based on the relations to this row if (updateFormula) { - await exports.updateRelatedFormula(appId, table, enrichedRow) + await exports.updateRelatedFormula(table, enrichedRow) } return { row: enrichedRow, table } } diff --git a/packages/server/src/api/controllers/row/utils.js b/packages/server/src/api/controllers/row/utils.js index 51bc03eba4..4235e70127 100644 --- a/packages/server/src/api/controllers/row/utils.js +++ b/packages/server/src/api/controllers/row/utils.js @@ -1,11 +1,11 @@ const validateJs = require("validate.js") const { cloneDeep } = require("lodash/fp") -const CouchDB = require("../../../db") const { InternalTables } = require("../../../db/utils") const userController = require("../user") const { FieldTypes } = require("../../../constants") const { processStringSync } = require("@budibase/string-templates") const { makeExternalQuery } = require("../../../integrations/base/utils") +const { getAppDB } = require("@budibase/backend-core/context") validateJs.extend(validateJs.validators.datetime, { parse: function (value) { @@ -17,14 +17,15 @@ validateJs.extend(validateJs.validators.datetime, { }, }) -exports.getDatasourceAndQuery = async (appId, json) => { +exports.getDatasourceAndQuery = async json => { const datasourceId = json.endpoint.datasourceId - const db = new CouchDB(appId) + const db = getAppDB() const datasource = await db.get(datasourceId) return makeExternalQuery(datasource, json) } -exports.findRow = async (ctx, db, tableId, rowId) => { +exports.findRow = async (ctx, tableId, rowId) => { + const db = getAppDB() let row // TODO remove special user case in future if (tableId === InternalTables.USER_METADATA) { @@ -42,9 +43,9 @@ exports.findRow = async (ctx, db, tableId, rowId) => { return row } -exports.validate = async ({ appId, tableId, row, table }) => { +exports.validate = async ({ tableId, row, table }) => { if (!table) { - const db = new CouchDB(appId) + const db = getAppDB() table = await db.get(tableId) } const errors = {} diff --git a/packages/server/src/api/controllers/screen.js b/packages/server/src/api/controllers/screen.js index 5e0eeb5176..e166ab3eb8 100644 --- a/packages/server/src/api/controllers/screen.js +++ b/packages/server/src/api/controllers/screen.js @@ -1,10 +1,9 @@ -const CouchDB = require("../../db") const { getScreenParams, generateScreenID } = require("../../db/utils") const { AccessController } = require("@budibase/backend-core/roles") +const { getAppDB } = require("@budibase/backend-core/context") exports.fetch = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const screens = ( await db.allDocs( @@ -14,15 +13,14 @@ exports.fetch = async ctx => { ) ).rows.map(element => element.doc) - ctx.body = await new AccessController(appId).checkScreensAccess( + ctx.body = await new AccessController().checkScreensAccess( screens, ctx.user.role._id ) } exports.save = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() let screen = ctx.request.body if (!screen._id) { @@ -39,7 +37,7 @@ exports.save = async ctx => { } exports.destroy = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() await db.remove(ctx.params.screenId, ctx.params.screenRev) ctx.body = { message: "Screen deleted successfully", diff --git a/packages/server/src/api/controllers/static/index.js b/packages/server/src/api/controllers/static/index.js index 11bb14e282..cafe999150 100644 --- a/packages/server/src/api/controllers/static/index.js +++ b/packages/server/src/api/controllers/static/index.js @@ -6,7 +6,6 @@ const uuid = require("uuid") const { ObjectStoreBuckets } = require("../../../constants") const { processString } = require("@budibase/string-templates") const { getAllApps } = require("@budibase/backend-core/db") -const CouchDB = require("../../../db") const { loadHandlebarsFile, NODE_MODULES_PATH, @@ -17,6 +16,7 @@ const { clientLibraryPath } = require("../../../utilities") const { upload } = require("../../../utilities/fileSystem") const { attachmentsRelativeURL } = require("../../../utilities") const { DocumentTypes } = require("../../../db/utils") +const { getAppDB } = require("@budibase/backend-core/context") const AWS = require("aws-sdk") const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1" @@ -44,7 +44,7 @@ async function getAppIdFromUrl(ctx) { let possibleAppUrl = `/${encodeURI(ctx.params.appId).toLowerCase()}` // search prod apps for a url that matches, exclude dev where id is always used - const apps = await getAllApps(CouchDB, { dev: false }) + const apps = await getAllApps({ dev: false }) const app = apps.filter( a => a.url && a.url.toLowerCase() === possibleAppUrl )[0] @@ -85,7 +85,7 @@ exports.uploadFile = async function (ctx) { exports.serveApp = async function (ctx) { let appId = await getAppIdFromUrl(ctx) const App = require("./templates/BudibaseApp.svelte").default - const db = new CouchDB(appId, { skip_setup: true }) + const db = getAppDB({ skip_setup: true }) const appInfo = await db.get(DocumentTypes.APP_METADATA) const { head, html, css } = App.render({ @@ -111,7 +111,7 @@ exports.serveClientLibrary = async function (ctx) { } exports.getSignedUploadURL = async function (ctx) { - const database = new CouchDB(ctx.appId) + const database = getAppDB() // Ensure datasource is valid let datasource diff --git a/packages/server/src/api/controllers/table/bulkFormula.js b/packages/server/src/api/controllers/table/bulkFormula.js index 1866d8e650..27f62415c9 100644 --- a/packages/server/src/api/controllers/table/bulkFormula.js +++ b/packages/server/src/api/controllers/table/bulkFormula.js @@ -1,10 +1,10 @@ -const CouchDB = require("../../../db") const { FieldTypes, FormulaTypes } = require("../../../constants") const { getAllInternalTables, clearColumns } = require("./utils") const { doesContainStrings } = require("@budibase/string-templates") const { cloneDeep } = require("lodash/fp") const { isEqual, uniq } = require("lodash") const { updateAllFormulasInTable } = require("../row/staticFormula") +const { getAppDB } = require("@budibase/backend-core/context") function isStaticFormula(column) { return ( @@ -37,14 +37,9 @@ function getFormulaThatUseColumn(table, columnNames) { * This functions checks for when a related table, column or related column is deleted, if any * tables need to have the formula column removed. */ -async function checkIfFormulaNeedsCleared( - appId, - table, - { oldTable, deletion } -) { - const db = new CouchDB(appId) +async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) { // start by retrieving all tables, remove the current table from the list - const tables = (await getAllInternalTables(appId)).filter( + const tables = (await getAllInternalTables()).filter( tbl => tbl._id !== table._id ) const schemaToUse = oldTable ? oldTable.schema : table.schema @@ -60,7 +55,7 @@ async function checkIfFormulaNeedsCleared( } const columnsToDelete = getFormulaThatUseColumn(tableToUse, removed.name) if (columnsToDelete.length > 0) { - await clearColumns(db, table, columnsToDelete) + await clearColumns(table, columnsToDelete) } // need a special case, where a column has been removed from this table, but was used // in a different, related tables formula @@ -85,7 +80,7 @@ async function checkIfFormulaNeedsCleared( ) } if (relatedFormulaToRemove.length > 0) { - await clearColumns(db, relatedTable, uniq(relatedFormulaToRemove)) + await clearColumns(relatedTable, uniq(relatedFormulaToRemove)) } } } @@ -99,13 +94,12 @@ async function checkIfFormulaNeedsCleared( * specifically only for static formula. */ async function updateRelatedFormulaLinksOnTables( - appId, table, { deletion } = { deletion: false } ) { - const db = new CouchDB(appId) + const db = getAppDB() // start by retrieving all tables, remove the current table from the list - const tables = (await getAllInternalTables(appId)).filter( + const tables = (await getAllInternalTables()).filter( tbl => tbl._id !== table._id ) // clone the tables, so we can compare at end @@ -155,7 +149,7 @@ async function updateRelatedFormulaLinksOnTables( } } -async function checkIfFormulaUpdated(appId, table, { oldTable }) { +async function checkIfFormulaUpdated(table, { oldTable }) { // look to see if any formula values have changed const shouldUpdate = Object.values(table.schema).find( column => @@ -166,18 +160,14 @@ async function checkIfFormulaUpdated(appId, table, { oldTable }) { ) // if a static formula column has updated, then need to run the update if (shouldUpdate != null) { - await updateAllFormulasInTable(appId, table) + await updateAllFormulasInTable(table) } } -exports.runStaticFormulaChecks = async ( - appId, - table, - { oldTable, deletion } -) => { - await updateRelatedFormulaLinksOnTables(appId, table, { deletion }) - await checkIfFormulaNeedsCleared(appId, table, { oldTable, deletion }) +exports.runStaticFormulaChecks = async (table, { oldTable, deletion }) => { + await updateRelatedFormulaLinksOnTables(table, { deletion }) + await checkIfFormulaNeedsCleared(table, { oldTable, deletion }) if (!deletion) { - await checkIfFormulaUpdated(appId, table, { oldTable }) + await checkIfFormulaUpdated(table, { oldTable }) } } diff --git a/packages/server/src/api/controllers/table/external.js b/packages/server/src/api/controllers/table/external.js index 2453ca7a37..b27eebb0c4 100644 --- a/packages/server/src/api/controllers/table/external.js +++ b/packages/server/src/api/controllers/table/external.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const { buildExternalTableId, breakExternalTableId, @@ -19,6 +18,7 @@ const { makeExternalQuery } = require("../../../integrations/base/utils") const { cloneDeep } = require("lodash/fp") const csvParser = require("../../../utilities/csvParser") const { handleRequest } = require("../row/external") +const { getAppDB } = require("@budibase/backend-core/context") async function makeTableRequest( datasource, @@ -159,7 +159,6 @@ function isRelationshipSetup(column) { } exports.save = async function (ctx) { - const appId = ctx.appId const table = ctx.request.body // can't do this right now delete table.dataImport @@ -176,14 +175,14 @@ exports.save = async function (ctx) { let oldTable if (ctx.request.body && ctx.request.body._id) { - oldTable = await getTable(appId, ctx.request.body._id) + oldTable = await getTable(ctx.request.body._id) } if (hasTypeChanged(tableToSave, oldTable)) { ctx.throw(400, "A column type has changed.") } - const db = new CouchDB(appId) + const db = getAppDB() const datasource = await db.get(datasourceId) const oldTables = cloneDeep(datasource.entities) const tables = datasource.entities @@ -267,14 +266,13 @@ exports.save = async function (ctx) { } exports.destroy = async function (ctx) { - const appId = ctx.appId - const tableToDelete = await getTable(appId, ctx.params.tableId) + const tableToDelete = await getTable(ctx.params.tableId) if (!tableToDelete || !tableToDelete.created) { ctx.throw(400, "Cannot delete tables which weren't created in Budibase.") } const datasourceId = getDatasourceId(tableToDelete) - const db = new CouchDB(appId) + const db = getAppDB() const datasource = await db.get(datasourceId) const tables = datasource.entities @@ -290,8 +288,7 @@ exports.destroy = async function (ctx) { } exports.bulkImport = async function (ctx) { - const appId = ctx.appId - const table = await getTable(appId, ctx.params.tableId) + const table = await getTable(ctx.params.tableId) const { dataImport } = ctx.request.body if (!dataImport || !dataImport.schema || !dataImport.csvString) { ctx.throw(400, "Provided data import information is invalid.") @@ -300,7 +297,7 @@ exports.bulkImport = async function (ctx) { ...dataImport, existingTable: table, }) - await handleRequest(appId, DataSourceOperation.BULK_CREATE, table._id, { + await handleRequest(DataSourceOperation.BULK_CREATE, table._id, { rows, }) return table diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js index 2f6bfd0cb3..3e1845b91f 100644 --- a/packages/server/src/api/controllers/table/index.js +++ b/packages/server/src/api/controllers/table/index.js @@ -1,9 +1,9 @@ -const CouchDB = require("../../../db") const internal = require("./internal") const external = require("./external") const csvParser = require("../../../utilities/csvParser") const { isExternalTable, isSQL } = require("../../../integrations/utils") const { getDatasourceParams } = require("../../../db/utils") +const { getAppDB } = require("@budibase/backend-core/context") const { getTable, getAllInternalTables } = require("./utils") function pickApi({ tableId, table }) { @@ -20,9 +20,9 @@ function pickApi({ tableId, table }) { // covers both internal and external exports.fetch = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() - const internal = await getAllInternalTables(ctx.appId) + const internal = await getAllInternalTables() const externalTables = await db.allDocs( getDatasourceParams("plus", { @@ -49,7 +49,7 @@ exports.fetch = async function (ctx) { exports.find = async function (ctx) { const tableId = ctx.params.id - ctx.body = await getTable(ctx.appId, tableId) + ctx.body = await getTable(tableId) } exports.save = async function (ctx) { @@ -88,7 +88,7 @@ exports.validateCSVSchema = async function (ctx) { const { csvString, schema = {}, tableId } = ctx.request.body let existingTable if (tableId) { - existingTable = await getTable(ctx.appId, tableId) + existingTable = await getTable(tableId) } let result = await csvParser.parse(csvString, schema) if (existingTable) { diff --git a/packages/server/src/api/controllers/table/internal.js b/packages/server/src/api/controllers/table/internal.js index f38a114c25..476e7a52af 100644 --- a/packages/server/src/api/controllers/table/internal.js +++ b/packages/server/src/api/controllers/table/internal.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const linkRows = require("../../../db/linkedRows") const { getRowParams, generateTableID } = require("../../../db/utils") const { FieldTypes } = require("../../../constants") @@ -9,12 +8,13 @@ const { handleDataImport, } = require("./utils") const usageQuota = require("../../../utilities/usageQuota") +const { getAppDB } = require("@budibase/backend-core/context") +const env = require("../../../environment") const { cleanupAttachments } = require("../../../utilities/rowProcessor") const { runStaticFormulaChecks } = require("./bulkFormula") exports.save = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const { dataImport, ...rest } = ctx.request.body let tableToSave = { type: "table", @@ -36,8 +36,7 @@ exports.save = async function (ctx) { // saving a table is a complex operation, involving many different steps, this // has been broken out into a utility to make it more obvious/easier to manipulate const tableSaveFunctions = new TableSaveFunctions({ - db, - ctx, + user: ctx.user, oldTable, dataImport, }) @@ -82,7 +81,6 @@ exports.save = async function (ctx) { // update linked rows try { const linkResp = await linkRows.updateLinks({ - appId, eventType: oldTable ? linkRows.EventType.TABLE_UPDATED : linkRows.EventType.TABLE_SAVE, @@ -107,13 +105,12 @@ exports.save = async function (ctx) { tableToSave = await tableSaveFunctions.after(tableToSave) // has to run after, make sure it has _id - await runStaticFormulaChecks(appId, tableToSave, { oldTable }) + await runStaticFormulaChecks(tableToSave, { oldTable }) return tableToSave } exports.destroy = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const tableToDelete = await db.get(ctx.params.tableId) // Delete all rows for that table @@ -127,7 +124,6 @@ exports.destroy = async function (ctx) { // update linked rows await linkRows.updateLinks({ - appId, eventType: linkRows.EventType.TABLE_DELETE, table: tableToDelete, }) @@ -136,24 +132,25 @@ exports.destroy = async function (ctx) { await db.remove(tableToDelete) // remove table search index - const currentIndexes = await db.getIndexes() - const existingIndex = currentIndexes.indexes.find( - existing => existing.name === `search:${ctx.params.tableId}` - ) - if (existingIndex) { - await db.deleteIndex(existingIndex) + if (!env.isTest()) { + const currentIndexes = await db.getIndexes() + const existingIndex = currentIndexes.indexes.find( + existing => existing.name === `search:${ctx.params.tableId}` + ) + if (existingIndex) { + await db.deleteIndex(existingIndex) + } } // has to run after, make sure it has _id - await runStaticFormulaChecks(appId, tableToDelete, { deletion: true }) - await cleanupAttachments(appId, tableToDelete, { rows }) + await runStaticFormulaChecks(tableToDelete, { deletion: true }) + await cleanupAttachments(tableToDelete, { rows }) return tableToDelete } exports.bulkImport = async function (ctx) { - const appId = ctx.appId - const table = await getTable(appId, ctx.params.tableId) + const table = await getTable(ctx.params.tableId) const { dataImport } = ctx.request.body - await handleDataImport(appId, ctx.user, table, dataImport) + await handleDataImport(ctx.user, table, dataImport) return table } diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js index f1907666c9..0e299dbd0d 100644 --- a/packages/server/src/api/controllers/table/utils.js +++ b/packages/server/src/api/controllers/table/utils.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const csvParser = require("../../../utilities/csvParser") const { getRowParams, @@ -26,10 +25,11 @@ const { const { getViews, saveView } = require("../view/utils") const viewTemplate = require("../view/viewBuilder") const usageQuota = require("../../../utilities/usageQuota") +const { getAppDB } = require("@budibase/backend-core/context") const { cloneDeep } = require("lodash/fp") -exports.clearColumns = async (appId, table, columnNames) => { - const db = new CouchDB(appId) +exports.clearColumns = async (table, columnNames) => { + const db = getAppDB() const rows = await db.allDocs( getRowParams(table._id, null, { include_docs: true, @@ -43,7 +43,8 @@ exports.clearColumns = async (appId, table, columnNames) => { ) } -exports.checkForColumnUpdates = async (appId, db, oldTable, updatedTable) => { +exports.checkForColumnUpdates = async (oldTable, updatedTable) => { + const db = getAppDB() let updatedRows = [] const rename = updatedTable._rename let deletedColumns = [] @@ -73,9 +74,9 @@ exports.checkForColumnUpdates = async (appId, db, oldTable, updatedTable) => { }) // cleanup any attachments from object storage for deleted attachment columns - await cleanupAttachments(appId, updatedTable, { oldTable, rows: rawRows }) + await cleanupAttachments(updatedTable, { oldTable, rows: rawRows }) // Update views - await exports.checkForViewUpdates(db, updatedTable, rename, deletedColumns) + await exports.checkForViewUpdates(updatedTable, rename, deletedColumns) delete updatedTable._rename } return { rows: updatedRows, table: updatedTable } @@ -102,12 +103,12 @@ exports.makeSureTableUpToDate = (table, tableToSave) => { return tableToSave } -exports.handleDataImport = async (appId, user, table, dataImport) => { +exports.handleDataImport = async (user, table, dataImport) => { if (!dataImport || !dataImport.csvString) { return table } - const db = new CouchDB(appId) + const db = getAppDB() // Populate the table with rows imported from CSV in a bulk update const data = await csvParser.transform({ ...dataImport, @@ -152,8 +153,8 @@ exports.handleDataImport = async (appId, user, table, dataImport) => { return table } -exports.handleSearchIndexes = async (appId, table) => { - const db = new CouchDB(appId) +exports.handleSearchIndexes = async table => { + const db = getAppDB() // create relevant search indexes if (table.indexes && table.indexes.length > 0) { const currentIndexes = await db.getIndexes() @@ -210,12 +211,9 @@ exports.checkStaticTables = table => { } class TableSaveFunctions { - constructor({ db, ctx, oldTable, dataImport }) { - this.db = db - this.ctx = ctx - if (this.ctx && this.ctx.user) { - this.appId = this.ctx.appId - } + constructor({ user, oldTable, dataImport }) { + this.db = getAppDB() + this.user = user this.oldTable = oldTable this.dataImport = dataImport // any rows that need updated @@ -233,25 +231,15 @@ class TableSaveFunctions { // when confirmed valid async mid(table) { - let response = await exports.checkForColumnUpdates( - this.appId, - this.db, - this.oldTable, - table - ) + let response = await exports.checkForColumnUpdates(this.oldTable, table) this.rows = this.rows.concat(response.rows) return table } // after saving async after(table) { - table = await exports.handleSearchIndexes(this.appId, table) - table = await exports.handleDataImport( - this.appId, - this.ctx.user, - table, - this.dataImport - ) + table = await exports.handleSearchIndexes(table) + table = await exports.handleDataImport(this.user, table, this.dataImport) return table } @@ -260,8 +248,8 @@ class TableSaveFunctions { } } -exports.getAllInternalTables = async appId => { - const db = new CouchDB(appId) +exports.getAllInternalTables = async () => { + const db = getAppDB() const internalTables = await db.allDocs( getTableParams(null, { include_docs: true, @@ -274,8 +262,8 @@ exports.getAllInternalTables = async appId => { })) } -exports.getAllExternalTables = async (appId, datasourceId) => { - const db = new CouchDB(appId) +exports.getAllExternalTables = async datasourceId => { + const db = getAppDB() const datasource = await db.get(datasourceId) if (!datasource || !datasource.entities) { throw "Datasource is not configured fully." @@ -283,25 +271,25 @@ exports.getAllExternalTables = async (appId, datasourceId) => { return datasource.entities } -exports.getExternalTable = async (appId, datasourceId, tableName) => { - const entities = await exports.getAllExternalTables(appId, datasourceId) +exports.getExternalTable = async (datasourceId, tableName) => { + const entities = await exports.getAllExternalTables(datasourceId) return entities[tableName] } -exports.getTable = async (appId, tableId) => { - const db = new CouchDB(appId) +exports.getTable = async tableId => { + const db = getAppDB() if (isExternalTable(tableId)) { let { datasourceId, tableName } = breakExternalTableId(tableId) const datasource = await db.get(datasourceId) - const table = await exports.getExternalTable(appId, datasourceId, tableName) + const table = await exports.getExternalTable(datasourceId, tableName) return { ...table, sql: isSQL(datasource) } } else { return db.get(tableId) } } -exports.checkForViewUpdates = async (db, table, rename, deletedColumns) => { - const views = await getViews(db) +exports.checkForViewUpdates = async (table, rename, deletedColumns) => { + const views = await getViews() const tableViews = views.filter(view => view.meta.tableId === table._id) // Check each table view to see if impacted by this table action @@ -363,7 +351,7 @@ exports.checkForViewUpdates = async (db, table, rename, deletedColumns) => { // Update view if required if (needsUpdated) { const newViewTemplate = viewTemplate(view.meta) - await saveView(db, null, view.name, newViewTemplate) + await saveView(null, view.name, newViewTemplate) if (!newViewTemplate.meta.schema) { newViewTemplate.meta.schema = table.schema } diff --git a/packages/server/src/api/controllers/user.js b/packages/server/src/api/controllers/user.js index 1bd8bd6a12..ca7ef24162 100644 --- a/packages/server/src/api/controllers/user.js +++ b/packages/server/src/api/controllers/user.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const { generateUserMetadataID, getUserMetadataParams, @@ -11,12 +10,14 @@ const { isEqual } = require("lodash") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { getDevelopmentAppID, - getDeployedAppIDs, + getProdAppIDs, + dbExists, } = require("@budibase/backend-core/db") -const { doesDatabaseExist } = require("../../utilities") const { UserStatus } = require("@budibase/backend-core/constants") +const { getAppDB } = require("@budibase/backend-core/context") -async function rawMetadata(db) { +async function rawMetadata() { + const db = getAppDB() return ( await db.allDocs( getUserMetadataParams(null, { @@ -54,13 +55,10 @@ function combineMetadataAndUser(user, metadata) { return null } -exports.syncGlobalUsers = async appId => { +exports.syncGlobalUsers = async () => { // sync user metadata - const db = new CouchDB(appId) - const [users, metadata] = await Promise.all([ - getGlobalUsers(appId), - rawMetadata(db), - ]) + const db = getAppDB() + const [users, metadata] = await Promise.all([getGlobalUsers(), rawMetadata()]) const toWrite = [] for (let user of users) { const combined = await combineMetadataAndUser(user, metadata) @@ -94,7 +92,7 @@ exports.syncUser = async function (ctx) { let prodAppIds // if they are a builder then get all production app IDs if ((user.builder && user.builder.global) || deleting) { - prodAppIds = await getDeployedAppIDs(CouchDB) + prodAppIds = await getProdAppIDs() } else { prodAppIds = Object.entries(roles) .filter(entry => entry[1] !== BUILTIN_ROLE_IDS.PUBLIC) @@ -104,10 +102,10 @@ exports.syncUser = async function (ctx) { const roleId = roles[prodAppId] const devAppId = getDevelopmentAppID(prodAppId) for (let appId of [prodAppId, devAppId]) { - if (!(await doesDatabaseExist(appId))) { + if (!(await dbExists(appId))) { continue } - const db = new CouchDB(appId) + const db = getAppDB() const metadataId = generateUserMetadataID(userId) let metadata try { @@ -143,8 +141,8 @@ exports.syncUser = async function (ctx) { } exports.fetchMetadata = async function (ctx) { - const database = new CouchDB(ctx.appId) - const global = await getGlobalUsers(ctx.appId) + const database = getAppDB() + const global = await getGlobalUsers() const metadata = await rawMetadata(database) const users = [] for (let user of global) { @@ -173,8 +171,7 @@ exports.updateSelfMetadata = async function (ctx) { } exports.updateMetadata = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const user = ctx.request.body // this isn't applicable to the user delete user.roles @@ -186,7 +183,7 @@ exports.updateMetadata = async function (ctx) { } exports.destroyMetadata = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() try { const dbUser = await db.get(ctx.params.id) await db.remove(dbUser._id, dbUser._rev) @@ -209,7 +206,7 @@ exports.setFlag = async function (ctx) { ctx.throw(400, "Must supply a 'flag' field in request body.") } const flagDocId = generateUserFlagID(userId) - const db = new CouchDB(ctx.appId) + const db = getAppDB() let doc try { doc = await db.get(flagDocId) @@ -224,7 +221,7 @@ exports.setFlag = async function (ctx) { exports.getFlags = async function (ctx) { const userId = ctx.user._id const docId = generateUserFlagID(userId) - const db = new CouchDB(ctx.appId) + const db = getAppDB() let doc try { doc = await db.get(docId) diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js index e3232323bf..fd6b32f3d6 100644 --- a/packages/server/src/api/controllers/view/index.js +++ b/packages/server/src/api/controllers/view/index.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const viewTemplate = require("./viewBuilder") const { apiFileReturn } = require("../../../utilities/fileSystem") const exporters = require("./exporters") @@ -6,14 +5,14 @@ const { saveView, getView, getViews, deleteView } = require("./utils") const { fetchView } = require("../row") const { getTable } = require("../table/utils") const { FieldTypes } = require("../../../constants") +const { getAppDB } = require("@budibase/backend-core/context") exports.fetch = async ctx => { - const db = new CouchDB(ctx.appId) - ctx.body = await getViews(db) + ctx.body = await getViews() } exports.save = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const { originalName, ...viewToSave } = ctx.request.body const view = viewTemplate(viewToSave) @@ -21,7 +20,7 @@ exports.save = async ctx => { ctx.throw(400, "Cannot create view without a name") } - await saveView(db, originalName, viewToSave.name, view) + await saveView(originalName, viewToSave.name, view) // add views to table document const table = await db.get(ctx.request.body.tableId) @@ -42,9 +41,9 @@ exports.save = async ctx => { } exports.destroy = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const viewName = decodeURI(ctx.params.viewName) - const view = await deleteView(db, viewName) + const view = await deleteView(viewName) const table = await db.get(view.meta.tableId) delete table.views[viewName] await db.put(table) @@ -53,9 +52,8 @@ exports.destroy = async ctx => { } exports.exportView = async ctx => { - const db = new CouchDB(ctx.appId) const viewName = decodeURI(ctx.query.view) - const view = await getView(db, viewName) + const view = await getView(viewName) const format = ctx.query.format if (!format || !Object.values(exporters.ExportFormats).includes(format)) { @@ -83,7 +81,7 @@ exports.exportView = async ctx => { let schema = view && view.meta && view.meta.schema if (!schema) { const tableId = ctx.params.tableId || view.meta.tableId - const table = await getTable(ctx.appId, tableId) + const table = await getTable(tableId) schema = table.schema } diff --git a/packages/server/src/api/controllers/view/utils.js b/packages/server/src/api/controllers/view/utils.js index 27fccaf47f..59d169ef7f 100644 --- a/packages/server/src/api/controllers/view/utils.js +++ b/packages/server/src/api/controllers/view/utils.js @@ -6,8 +6,10 @@ const { SEPARATOR, } = require("../../../db/utils") const env = require("../../../environment") +const { getAppDB } = require("@budibase/backend-core/context") -exports.getView = async (db, viewName) => { +exports.getView = async viewName => { + const db = getAppDB() if (env.SELF_HOSTED) { const designDoc = await db.get("_design/database") return designDoc.views[viewName] @@ -22,7 +24,8 @@ exports.getView = async (db, viewName) => { } } -exports.getViews = async db => { +exports.getViews = async () => { + const db = getAppDB() const response = [] if (env.SELF_HOSTED) { const designDoc = await db.get("_design/database") @@ -54,7 +57,8 @@ exports.getViews = async db => { return response } -exports.saveView = async (db, originalName, viewName, viewTemplate) => { +exports.saveView = async (originalName, viewName, viewTemplate) => { + const db = getAppDB() if (env.SELF_HOSTED) { const designDoc = await db.get("_design/database") designDoc.views = { @@ -91,7 +95,8 @@ exports.saveView = async (db, originalName, viewName, viewTemplate) => { } } -exports.deleteView = async (db, viewName) => { +exports.deleteView = async viewName => { + const db = getAppDB() if (env.SELF_HOSTED) { const designDoc = await db.get("_design/database") const view = designDoc.views[viewName] diff --git a/packages/server/src/api/controllers/webhook.js b/packages/server/src/api/controllers/webhook.js index 0230fb481b..49ab652cbf 100644 --- a/packages/server/src/api/controllers/webhook.js +++ b/packages/server/src/api/controllers/webhook.js @@ -1,9 +1,9 @@ -const CouchDB = require("../../db") const { generateWebhookID, getWebhookParams } = require("../../db/utils") const toJsonSchema = require("to-json-schema") const validate = require("jsonschema").validate const triggers = require("../../automations/triggers") -const { getDeployedAppID } = require("@budibase/backend-core/db") +const { getProdAppID } = require("@budibase/backend-core/db") +const { getAppDB, updateAppId } = require("@budibase/backend-core/context") const AUTOMATION_DESCRIPTION = "Generated from Webhook Schema" @@ -23,7 +23,7 @@ exports.WebhookType = { } exports.fetch = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const response = await db.allDocs( getWebhookParams(null, { include_docs: true, @@ -33,7 +33,7 @@ exports.fetch = async ctx => { } exports.save = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const webhook = ctx.request.body webhook.appId = ctx.appId @@ -52,12 +52,13 @@ exports.save = async ctx => { } exports.destroy = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() ctx.body = await db.remove(ctx.params.id, ctx.params.rev) } exports.buildSchema = async ctx => { - const db = new CouchDB(ctx.params.instance) + updateAppId(ctx.params.instance) + const db = getAppDB() const webhook = await db.get(ctx.params.id) webhook.bodySchema = toJsonSchema(ctx.request.body) // update the automation outputs @@ -81,9 +82,10 @@ exports.buildSchema = async ctx => { } exports.trigger = async ctx => { - const prodAppId = getDeployedAppID(ctx.params.instance) + const prodAppId = getProdAppID(ctx.params.instance) + updateAppId(prodAppId) try { - const db = new CouchDB(prodAppId) + const db = getAppDB() const webhook = await db.get(ctx.params.id) // validate against the schema if (webhook.bodySchema) { diff --git a/packages/server/src/api/routes/tests/automation.spec.js b/packages/server/src/api/routes/tests/automation.spec.js index c412c34fdc..3e5725bb95 100644 --- a/packages/server/src/api/routes/tests/automation.spec.js +++ b/packages/server/src/api/routes/tests/automation.spec.js @@ -145,6 +145,7 @@ describe("/automations", () => { let table = await config.createTable() automation.definition.trigger.inputs.tableId = table._id automation.definition.steps[0].inputs.row.tableId = table._id + automation.appId = config.appId automation = await config.createAutomation(automation) await setup.delay(500) const res = await testAutomation(config, automation) diff --git a/packages/server/src/api/routes/tests/misc.spec.js b/packages/server/src/api/routes/tests/misc.spec.js index ae5c0cca60..e5b87543d2 100644 --- a/packages/server/src/api/routes/tests/misc.spec.js +++ b/packages/server/src/api/routes/tests/misc.spec.js @@ -82,7 +82,6 @@ describe("run misc tests", () => { dataImport.schema[col] = { type: "string" } } await tableUtils.handleDataImport( - config.getAppId(), { userId: "test" }, table, dataImport diff --git a/packages/server/src/api/routes/tests/routing.spec.js b/packages/server/src/api/routes/tests/routing.spec.js index fdc414448c..d6d05c3322 100644 --- a/packages/server/src/api/routes/tests/routing.spec.js +++ b/packages/server/src/api/routes/tests/routing.spec.js @@ -1,10 +1,15 @@ const setup = require("./utilities") const { basicScreen } = setup.structures -const { checkBuilderEndpoint } = require("./utilities/TestFunctions") +const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") +const { doInAppContext } = require("@budibase/backend-core/context") const route = "/test" +// there are checks which are disabled in test env, +// these checks need to be enabled for this test + + describe("/routing", () => { let request = setup.getRequest() let config = setup.getConfig() @@ -26,20 +31,24 @@ describe("/routing", () => { describe("fetch", () => { it("prevents a public user from accessing development app", async () => { - await request - .get(`/api/routing/client`) - .set(config.publicHeaders({ prodApp: false })) - .expect(302) + await runInProd(() => { + return request + .get(`/api/routing/client`) + .set(config.publicHeaders({ prodApp: false })) + .expect(302) + }) }) it("prevents a non builder from accessing development app", async () => { - await request - .get(`/api/routing/client`) - .set(await config.roleHeaders({ - roleId: BUILTIN_ROLE_IDS.BASIC, - prodApp: false - })) - .expect(302) + await runInProd(async () => { + return request + .get(`/api/routing/client`) + .set(await config.roleHeaders({ + roleId: BUILTIN_ROLE_IDS.BASIC, + prodApp: false + })) + .expect(302) + }) }) it("returns the correct routing for basic user", async () => { const res = await request diff --git a/packages/server/src/api/routes/tests/row.spec.js b/packages/server/src/api/routes/tests/row.spec.js index 01284552c5..8354f01ad7 100644 --- a/packages/server/src/api/routes/tests/row.spec.js +++ b/packages/server/src/api/routes/tests/row.spec.js @@ -1,6 +1,7 @@ const { outputProcessing } = require("../../../utilities/rowProcessor") const setup = require("./utilities") const { basicRow } = setup.structures +const { doInAppContext } = require("@budibase/backend-core/context") // mock the fetch for the search system jest.mock("node-fetch") @@ -387,10 +388,12 @@ describe("/rows", () => { }) // the environment needs configured for this await setup.switchToSelfHosted(async () => { - const enriched = await outputProcessing({ appId: config.getAppId() }, table, [row]) - expect(enriched[0].attachment[0].url).toBe( - `/prod-budi-app-assets/${config.getAppId()}/attachments/test/thing.csv` - ) + doInAppContext(config.getAppId(), async () => { + const enriched = await outputProcessing(table, [row]) + expect(enriched[0].attachment[0].url).toBe( + `/prod-budi-app-assets/${config.getAppId()}/attachments/test/thing.csv` + ) + }) }) }) }) diff --git a/packages/server/src/api/routes/tests/utilities/TestFunctions.js b/packages/server/src/api/routes/tests/utilities/TestFunctions.js index 9bd54f0d75..c752507d25 100644 --- a/packages/server/src/api/routes/tests/utilities/TestFunctions.js +++ b/packages/server/src/api/routes/tests/utilities/TestFunctions.js @@ -1,9 +1,10 @@ const rowController = require("../../../controllers/row") const appController = require("../../../controllers/application") -const CouchDB = require("../../../../db") const { AppStatus } = require("../../../../db/utils") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { TENANT_ID } = require("../../../../tests/utilities/structures") +const { getAppDB, doInAppContext } = require("@budibase/backend-core/context") +const env = require("../../../../environment") function Request(appId, params) { this.appId = appId @@ -11,9 +12,15 @@ function Request(appId, params) { this.request = {} } +function runRequest(appId, controlFunc, request) { + return doInAppContext(appId, async () => { + return controlFunc(request) + }) +} + exports.getAllTableRows = async config => { const req = new Request(config.appId, { tableId: config.table._id }) - await rowController.fetch(req) + await runRequest(config.appId, rowController.fetch, req) return req.body } @@ -26,14 +33,17 @@ exports.clearAllApps = async (tenantId = TENANT_ID) => { } for (let app of apps) { const { appId } = app - await appController.delete(new Request(null, { appId })) + const req = new Request(null, { appId }) + await runRequest(appId, appController.delete, req) } } exports.clearAllAutomations = async config => { const automations = await config.getAllAutomations() for (let auto of automations) { - await config.deleteAutomation(auto) + await doInAppContext(config.appId, async () => { + await config.deleteAutomation(auto) + }) } } @@ -96,20 +106,32 @@ exports.checkPermissionsEndpoint = async ({ .expect(403) } -exports.getDB = config => { - return new CouchDB(config.getAppId()) +exports.getDB = () => { + return getAppDB() } exports.testAutomation = async (config, automation) => { - return await config.request - .post(`/api/automations/${automation._id}/test`) - .send({ - row: { - name: "Test", - description: "TEST", - }, - }) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + return runRequest(automation.appId, async () => { + return await config.request + .post(`/api/automations/${automation._id}/test`) + .send({ + row: { + name: "Test", + description: "TEST", + }, + }) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + }) +} + +exports.runInProd = async func => { + const nodeEnv = env.NODE_ENV + const workerId = env.JEST_WORKER_ID + env._set("NODE_ENV", "PRODUCTION") + env._set("JEST_WORKER_ID", null) + await func() + env._set("NODE_ENV", nodeEnv) + env._set("JEST_WORKER_ID", workerId) } diff --git a/packages/server/src/automations/automationUtils.js b/packages/server/src/automations/automationUtils.js index aab341a1f8..9360840efd 100644 --- a/packages/server/src/automations/automationUtils.js +++ b/packages/server/src/automations/automationUtils.js @@ -53,13 +53,12 @@ exports.cleanInputValues = (inputs, schema) => { * the automation but is instead part of the Table/Table. This function will get the table schema and use it to instead * perform the cleanInputValues function on the input row. * - * @param {string} appId The instance which the Table/Table is contained under. * @param {string} tableId The ID of the Table/Table which the schema is to be retrieved for. * @param {object} row The input row structure which requires clean-up after having been through template statements. * @returns {Promise} The cleaned up rows object, will should now have all the required primitive types. */ -exports.cleanUpRow = async (appId, tableId, row) => { - let table = await getTable(appId, tableId) +exports.cleanUpRow = async (tableId, row) => { + let table = await getTable(tableId) return exports.cleanInputValues(row, { properties: table.schema }) } diff --git a/packages/server/src/automations/steps/createRow.js b/packages/server/src/automations/steps/createRow.js index 1937121062..a16521d25d 100644 --- a/packages/server/src/automations/steps/createRow.js +++ b/packages/server/src/automations/steps/createRow.js @@ -78,7 +78,6 @@ exports.run = async function ({ inputs, appId, emitter }) { try { inputs.row = await automationUtils.cleanUpRow( - appId, inputs.row.tableId, inputs.row ) diff --git a/packages/server/src/automations/steps/updateRow.js b/packages/server/src/automations/steps/updateRow.js index a9569932fa..f66fcf9432 100644 --- a/packages/server/src/automations/steps/updateRow.js +++ b/packages/server/src/automations/steps/updateRow.js @@ -87,7 +87,7 @@ exports.run = async function ({ inputs, appId, emitter }) { try { if (tableId) { - inputs.row = await automationUtils.cleanUpRow(appId, tableId, inputs.row) + inputs.row = await automationUtils.cleanUpRow(tableId, inputs.row) } await rowController.patch(ctx) return { diff --git a/packages/server/src/automations/triggers.js b/packages/server/src/automations/triggers.js index 49e50ec34f..deff9f7503 100644 --- a/packages/server/src/automations/triggers.js +++ b/packages/server/src/automations/triggers.js @@ -1,4 +1,3 @@ -const CouchDB = require("../db") const emitter = require("../events/index") const { getAutomationParams } = require("../db/utils") const { coerce } = require("../utilities/rowProcessor") @@ -9,6 +8,7 @@ const { queue } = require("./bullboard") const { checkTestFlag } = require("../utilities/redis") const utils = require("./utils") const env = require("../environment") +const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") const TRIGGER_DEFINITIONS = definitions const JOB_OPTS = { @@ -21,39 +21,41 @@ async function queueRelevantRowAutomations(event, eventType) { throw `No appId specified for ${eventType} - check event emitters.` } - const db = new CouchDB(event.appId) - let automations = await db.allDocs( - getAutomationParams(null, { include_docs: true }) - ) + doInAppContext(event.appId, async () => { + const db = getAppDB() + let automations = await db.allDocs( + getAutomationParams(null, { include_docs: true }) + ) - // filter down to the correct event type - automations = automations.rows - .map(automation => automation.doc) - .filter(automation => { - const trigger = automation.definition.trigger - return trigger && trigger.event === eventType - }) + // filter down to the correct event type + automations = automations.rows + .map(automation => automation.doc) + .filter(automation => { + const trigger = automation.definition.trigger + return trigger && trigger.event === eventType + }) - for (let automation of automations) { - let automationDef = automation.definition - let automationTrigger = automationDef ? automationDef.trigger : {} - // don't queue events which are for dev apps, only way to test automations is - // running tests on them, in production the test flag will never - // be checked due to lazy evaluation (first always false) - if ( - !env.ALLOW_DEV_AUTOMATIONS && - isDevAppID(event.appId) && - !(await checkTestFlag(automation._id)) - ) { - continue + for (let automation of automations) { + let automationDef = automation.definition + let automationTrigger = automationDef ? automationDef.trigger : {} + // don't queue events which are for dev apps, only way to test automations is + // running tests on them, in production the test flag will never + // be checked due to lazy evaluation (first always false) + if ( + !env.ALLOW_DEV_AUTOMATIONS && + isDevAppID(event.appId) && + !(await checkTestFlag(automation._id)) + ) { + continue + } + if ( + automationTrigger.inputs && + automationTrigger.inputs.tableId === event.row.tableId + ) { + await queue.add({ automation, event }, JOB_OPTS) + } } - if ( - automationTrigger.inputs && - automationTrigger.inputs.tableId === event.row.tableId - ) { - await queue.add({ automation, event }, JOB_OPTS) - } - } + }) } emitter.on("row:save", async function (event) { diff --git a/packages/server/src/automations/utils.js b/packages/server/src/automations/utils.js index 4a554793f8..3ee1f535c7 100644 --- a/packages/server/src/automations/utils.js +++ b/packages/server/src/automations/utils.js @@ -6,8 +6,9 @@ const { queue } = require("./bullboard") const newid = require("../db/newid") const { updateEntityMetadata } = require("../utilities") const { MetadataTypes } = require("../constants") -const { getDeployedAppID } = require("@budibase/backend-core/db") +const { getProdAppID } = require("@budibase/backend-core/db") const { cloneDeep } = require("lodash/fp") +const { getAppDB, getAppId } = require("@budibase/backend-core/context") const WH_STEP_ID = definitions.WEBHOOK.stepId const CRON_STEP_ID = definitions.CRON.stepId @@ -27,7 +28,6 @@ exports.processEvent = async job => { exports.updateTestHistory = async (appId, automation, history) => { return updateEntityMetadata( - appId, MetadataTypes.AUTOMATION_TEST_HISTORY, automation._id, metadata => { @@ -93,6 +93,9 @@ exports.enableCronTrigger = async (appId, automation) => { ) // Assign cron job ID from bull so we can remove it later if the cron trigger is removed trigger.cronJobId = job.id + // can't use getAppDB here as this is likely to be called from dev app, + // but this call could be for dev app or prod app, need to just use what + // was passed in const db = new CouchDB(appId) const response = await db.put(automation) automation._id = response.id @@ -109,7 +112,8 @@ exports.enableCronTrigger = async (appId, automation) => { * @returns {Promise} After this is complete the new automation object may have been updated and should be * written to DB (this does not write to DB as it would be wasteful to repeat). */ -exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => { +exports.checkForWebhooks = async ({ oldAuto, newAuto }) => { + const appId = getAppId() const oldTrigger = oldAuto ? oldAuto.definition.trigger : null const newTrigger = newAuto ? newAuto.definition.trigger : null const triggerChanged = @@ -128,7 +132,7 @@ exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => { oldTrigger.webhookId ) { try { - let db = new CouchDB(appId) + let db = getAppDB() // need to get the webhook to get the rev const webhook = await db.get(oldTrigger.webhookId) const ctx = { @@ -166,7 +170,7 @@ exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => { // the app ID has to be development for this endpoint // it can only be used when building the app // but the trigger endpoint will always be used in production - const prodAppId = getDeployedAppID(appId) + const prodAppId = getProdAppID(appId) newTrigger.inputs = { schemaUrl: `api/webhooks/schema/${appId}/${id}`, triggerUrl: `api/webhooks/trigger/${prodAppId}/${id}`, diff --git a/packages/server/src/db/linkedRows/LinkController.js b/packages/server/src/db/linkedRows/LinkController.js index b66e2debb5..86c32bf94f 100644 --- a/packages/server/src/db/linkedRows/LinkController.js +++ b/packages/server/src/db/linkedRows/LinkController.js @@ -1,4 +1,3 @@ -const CouchDB = require("../index") const { IncludeDocs, getLinkDocuments } = require("./linkUtils") const { generateLinkID, @@ -7,6 +6,7 @@ const { } = require("../utils") const Sentry = require("@sentry/node") const { FieldTypes, RelationshipTypes } = require("../../constants") +const { getAppDB } = require("@budibase/backend-core/context") /** * Creates a new link document structure which can be put to the database. It is important to @@ -52,9 +52,8 @@ function LinkDocument( } class LinkController { - constructor({ appId, tableId, row, table, oldTable }) { - this._appId = appId - this._db = new CouchDB(appId) + constructor({ tableId, row, table, oldTable }) { + this._db = getAppDB() this._tableId = tableId this._row = row this._table = table @@ -99,7 +98,6 @@ class LinkController { */ getRowLinkDocs(rowId) { return getLinkDocuments({ - appId: this._appId, tableId: this._tableId, rowId, includeDocs: IncludeDocs.INCLUDE, @@ -111,7 +109,6 @@ class LinkController { */ getTableLinkDocs() { return getLinkDocuments({ - appId: this._appId, tableId: this._tableId, includeDocs: IncludeDocs.INCLUDE, }) @@ -230,7 +227,6 @@ class LinkController { if (linkedSchema.relationshipType === RelationshipTypes.ONE_TO_MANY) { let links = ( await getLinkDocuments({ - appId: this._appId, tableId: field.tableId, rowId: linkId, includeDocs: IncludeDocs.EXCLUDE, diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.js index eab287aa33..6cb45f9781 100644 --- a/packages/server/src/db/linkedRows/index.js +++ b/packages/server/src/db/linkedRows/index.js @@ -9,12 +9,12 @@ const { getLinkedTable, } = require("./linkUtils") const { flatten } = require("lodash") -const CouchDB = require("../../db") const { FieldTypes } = require("../../constants") const { getMultiIDParams, USER_METDATA_PREFIX } = require("../../db/utils") const { partition } = require("lodash") const { getGlobalUsersFromMetadata } = require("../../utilities/global") const { processFormulas } = require("../../utilities/rowProcessor/utils") +const { getAppDB } = require("@budibase/backend-core/context") /** * This functionality makes sure that when rows with links are created, updated or deleted they are processed @@ -48,14 +48,13 @@ function clearRelationshipFields(table, rows) { return rows } -async function getLinksForRows(appId, rows) { +async function getLinksForRows(rows) { const tableIds = [...new Set(rows.map(el => el.tableId))] // start by getting all the link values for performance reasons const responses = flatten( await Promise.all( tableIds.map(tableId => getLinkDocuments({ - appId, tableId: tableId, includeDocs: IncludeDocs.EXCLUDE, }) @@ -72,9 +71,9 @@ async function getLinksForRows(appId, rows) { ) } -async function getFullLinkedDocs(appId, links) { +async function getFullLinkedDocs(links) { // create DBs - const db = new CouchDB(appId) + const db = getAppDB() const linkedRowIds = links.map(link => link.id) const uniqueRowIds = [...new Set(linkedRowIds)] let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map( @@ -88,7 +87,7 @@ async function getFullLinkedDocs(appId, links) { let [users, other] = partition(linked, linkRow => linkRow._id.startsWith(USER_METDATA_PREFIX) ) - users = await getGlobalUsersFromMetadata(appId, users) + users = await getGlobalUsersFromMetadata(users) return [...other, ...users] } @@ -96,20 +95,16 @@ async function getFullLinkedDocs(appId, links) { * Update link documents for a row or table - this is to be called by the API controller when a change is occurring. * @param {string} args.eventType states what type of change which is occurring, means this can be expanded upon in the * future quite easily (all updates go through one function). - * @param {string} args.appId The ID of the instance in which the change is occurring. * @param {string} args.tableId The ID of the of the table which is being changed. - * @param {object|null} args.row The row which is changing, e.g. created, updated or deleted. - * @param {object|null} args.table If the table has already been retrieved this can be used to reduce database gets. - * @param {object|null} args.oldTable If the table is being updated then the old table can be provided for differencing. + * @param {object|undefined} args.row The row which is changing, e.g. created, updated or deleted. + * @param {object|undefined} args.table If the table has already been retrieved this can be used to reduce database gets. + * @param {object|undefined} args.oldTable If the table is being updated then the old table can be provided for differencing. * @returns {Promise} When the update is complete this will respond successfully. Returns the row for * row operations and the table for table operations. */ exports.updateLinks = async function (args) { - const { eventType, appId, row, tableId, table, oldTable } = args + const { eventType, row, tableId, table, oldTable } = args const baseReturnObj = row == null ? table : row - if (appId == null) { - throw "Cannot operate without an instance ID." - } // make sure table ID is set if (tableId == null && table != null) { args.tableId = table._id @@ -146,26 +141,23 @@ exports.updateLinks = async function (args) { /** * Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row. * This is required for formula fields, this may only be utilised internally (for now). - * @param {string} appId The ID of the app which this request is in the context of. * @param {object} table The table from which the rows originated. * @param {array} rows The rows which are to be enriched. * @return {Promise<*>} returns the rows with all of the enriched relationships on it. */ -exports.attachFullLinkedDocs = async (appId, table, rows) => { +exports.attachFullLinkedDocs = async (table, rows) => { const linkedTableIds = getLinkedTableIDs(table) if (linkedTableIds.length === 0) { return rows } - // create DBs - const db = new CouchDB(appId) // get all the links - const links = (await getLinksForRows(appId, rows)).filter(link => + const links = (await getLinksForRows(rows)).filter(link => rows.some(row => row._id === link.thisId) ) // clear any existing links that could be dupe'd rows = clearRelationshipFields(table, rows) // now get the docs and combine into the rows - let linked = await getFullLinkedDocs(appId, links) + let linked = await getFullLinkedDocs(links) const linkedTables = [] for (let row of rows) { for (let link of links.filter(link => link.thisId === row._id)) { @@ -176,11 +168,7 @@ exports.attachFullLinkedDocs = async (appId, table, rows) => { if (linkedRow) { const linkedTableId = linkedRow.tableId || getRelatedTableForField(table, link.fieldName) - const linkedTable = await getLinkedTable( - db, - linkedTableId, - linkedTables - ) + const linkedTable = await getLinkedTable(linkedTableId, linkedTables) if (linkedTable) { row[link.fieldName].push(processFormulas(linkedTable, linkedRow)) } @@ -192,18 +180,16 @@ exports.attachFullLinkedDocs = async (appId, table, rows) => { /** * This function will take the given enriched rows and squash the links to only contain the primary display field. - * @param {string} appId The app in which the tables/rows/links exist. * @param {object} table The table from which the rows originated. * @param {array} enriched The pre-enriched rows (full docs) which are to be squashed. * @returns {Promise} The rows after having their links squashed to only contain the ID and primary display. */ -exports.squashLinksToPrimaryDisplay = async (appId, table, enriched) => { - const db = new CouchDB(appId) +exports.squashLinksToPrimaryDisplay = async (table, enriched) => { // will populate this as we find them const linkedTables = [table] for (let row of enriched) { // this only fetches the table if its not already in array - const rowTable = await getLinkedTable(db, row.tableId, linkedTables) + const rowTable = await getLinkedTable(row.tableId, linkedTables) for (let [column, schema] of Object.entries(rowTable.schema)) { if (schema.type !== FieldTypes.LINK || !Array.isArray(row[column])) { continue @@ -211,7 +197,7 @@ exports.squashLinksToPrimaryDisplay = async (appId, table, enriched) => { const newLinks = [] for (let link of row[column]) { const linkTblId = link.tableId || getRelatedTableForField(table, column) - const linkedTable = await getLinkedTable(db, linkTblId, linkedTables) + const linkedTable = await getLinkedTable(linkTblId, linkedTables) const obj = { _id: link._id } if (link[linkedTable.primaryDisplay]) { obj.primaryDisplay = link[linkedTable.primaryDisplay] diff --git a/packages/server/src/db/linkedRows/linkUtils.js b/packages/server/src/db/linkedRows/linkUtils.js index 12e72af78d..5af4aa919a 100644 --- a/packages/server/src/db/linkedRows/linkUtils.js +++ b/packages/server/src/db/linkedRows/linkUtils.js @@ -1,8 +1,8 @@ -const CouchDB = require("../index") const Sentry = require("@sentry/node") const { ViewNames, getQueryIndex } = require("../utils") const { FieldTypes } = require("../../constants") const { createLinkView } = require("../views/staticViews") +const { getAppDB } = require("@budibase/backend-core/context") /** * Only needed so that boolean parameters are being used for includeDocs @@ -17,7 +17,6 @@ exports.createLinkView = createLinkView /** * Gets the linking documents, not the linked documents themselves. - * @param {string} args.appId The instance in which we are searching for linked rows. * @param {string} args.tableId The table which we are searching for linked rows against. * @param {string|null} args.fieldName The name of column/field which is being altered, only looking for * linking documents that are related to it. If this is not specified then the table level will be assumed. @@ -30,8 +29,8 @@ exports.createLinkView = createLinkView * (if any). */ exports.getLinkDocuments = async function (args) { - const { appId, tableId, rowId, includeDocs } = args - const db = new CouchDB(appId) + const { tableId, rowId, includeDocs } = args + const db = getAppDB() let params if (rowId != null) { params = { key: [tableId, rowId] } @@ -68,7 +67,7 @@ exports.getLinkDocuments = async function (args) { } catch (err) { // check if the view doesn't exist, it should for all new instances if (err != null && err.name === "not_found") { - await exports.createLinkView(appId) + await exports.createLinkView() return exports.getLinkDocuments(arguments[0]) } else { /* istanbul ignore next */ @@ -89,7 +88,8 @@ exports.getLinkedTableIDs = table => { .map(column => column.tableId) } -exports.getLinkedTable = async (db, id, tables) => { +exports.getLinkedTable = async (id, tables) => { + const db = getAppDB() let linkedTable = tables.find(table => table._id === id) if (linkedTable) { return linkedTable diff --git a/packages/server/src/db/tests/linkController.spec.js b/packages/server/src/db/tests/linkController.spec.js index d45bd99ea2..180cc2b3a0 100644 --- a/packages/server/src/db/tests/linkController.spec.js +++ b/packages/server/src/db/tests/linkController.spec.js @@ -20,7 +20,6 @@ describe("test the link controller", () => { function createLinkController(table, row = null, oldTable = null) { const linkConfig = { - appId: config.getAppId(), tableId: table._id, table, } diff --git a/packages/server/src/db/tests/linkTests.spec.js b/packages/server/src/db/tests/linkTests.spec.js index 8dad7be049..9a309df70a 100644 --- a/packages/server/src/db/tests/linkTests.spec.js +++ b/packages/server/src/db/tests/linkTests.spec.js @@ -1,8 +1,8 @@ const TestConfig = require("../../tests/utilities/TestConfiguration") -const { basicTable, basicLinkedRow } = require("../../tests/utilities/structures") +const { basicTable } = require("../../tests/utilities/structures") const linkUtils = require("../linkedRows/linkUtils") -const links = require("../linkedRows") const CouchDB = require("../index") +const { getAppDB } = require("@budibase/backend-core/context") describe("test link functionality", () => { const config = new TestConfig(false) @@ -11,18 +11,18 @@ describe("test link functionality", () => { let db, table beforeEach(async () => { await config.init() - db = new CouchDB(config.getAppId()) + db = getAppDB() table = await config.createTable() }) it("should be able to retrieve a linked table from a list", async () => { - const retrieved = await linkUtils.getLinkedTable(db, table._id, [table]) + const retrieved = await linkUtils.getLinkedTable(table._id, [table]) expect(retrieved._id).toBe(table._id) }) it("should be able to retrieve a table from DB and update list", async () => { const tables = [] - const retrieved = await linkUtils.getLinkedTable(db, table._id, tables) + const retrieved = await linkUtils.getLinkedTable(table._id, tables) expect(retrieved._id).toBe(table._id) expect(tables[0]).toBeDefined() }) @@ -51,7 +51,6 @@ describe("test link functionality", () => { const db = new CouchDB("test") await db.put({ _id: "_design/database", views: {} }) const output = await linkUtils.getLinkDocuments({ - appId: "test", tableId: "test", rowId: "test", includeDocs: false, diff --git a/packages/server/src/db/views/staticViews.js b/packages/server/src/db/views/staticViews.js index 8e7b101ef5..50b7c305d3 100644 --- a/packages/server/src/db/views/staticViews.js +++ b/packages/server/src/db/views/staticViews.js @@ -1,4 +1,4 @@ -const CouchDB = require("../index") +const { getAppDB } = require("@budibase/backend-core/context") const { DocumentTypes, SEPARATOR, @@ -21,12 +21,11 @@ const SCREEN_PREFIX = DocumentTypes.SCREEN + SEPARATOR /** * Creates the link view for the instance, this will overwrite the existing one, but this should only * be called if it is found that the view does not exist. - * @param {string} appId The instance to which the view should be added. * @returns {Promise} The view now exists, please note that the next view of this query will actually build it, * so it may be slow. */ -exports.createLinkView = async appId => { - const db = new CouchDB(appId) +exports.createLinkView = async () => { + const db = getAppDB() const designDoc = await db.get("_design/database") const view = { map: function (doc) { @@ -57,8 +56,8 @@ exports.createLinkView = async appId => { await db.put(designDoc) } -exports.createRoutingView = async appId => { - const db = new CouchDB(appId) +exports.createRoutingView = async () => { + const db = getAppDB() const designDoc = await db.get("_design/database") const view = { // if using variables in a map function need to inject them before use @@ -78,8 +77,8 @@ exports.createRoutingView = async appId => { await db.put(designDoc) } -async function searchIndex(appId, indexName, fnString) { - const db = new CouchDB(appId) +async function searchIndex(indexName, fnString) { + const db = getAppDB() const designDoc = await db.get("_design/database") designDoc.indexes = { [indexName]: { @@ -90,9 +89,8 @@ async function searchIndex(appId, indexName, fnString) { await db.put(designDoc) } -exports.createAllSearchIndex = async appId => { +exports.createAllSearchIndex = async () => { await searchIndex( - appId, SearchIndexes.ROWS, function (doc) { function idx(input, prev) { diff --git a/packages/server/src/environment.js b/packages/server/src/environment.js index 99343937d9..7ed8b16b6f 100644 --- a/packages/server/src/environment.js +++ b/packages/server/src/environment.js @@ -2,7 +2,8 @@ function isTest() { return ( process.env.NODE_ENV === "jest" || process.env.NODE_ENV === "cypress" || - process.env.JEST_WORKER_ID != null + (process.env.JEST_WORKER_ID != null && + process.env.JEST_WORKER_ID !== "null") ) } diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts index 8fe8fedcc8..1341f5abca 100644 --- a/packages/server/src/integrations/utils.ts +++ b/packages/server/src/integrations/utils.ts @@ -52,7 +52,10 @@ export function buildExternalTableId(datasourceId: string, tableName: string) { return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}` } -export function breakExternalTableId(tableId: string) { +export function breakExternalTableId(tableId: string | undefined) { + if (!tableId) { + return {} + } const parts = tableId.split(DOUBLE_SEPARATOR) let tableName = parts.pop() // if they need joined diff --git a/packages/server/src/middleware/authorized.js b/packages/server/src/middleware/authorized.js index 7125ec3246..c8d6497ca3 100644 --- a/packages/server/src/middleware/authorized.js +++ b/packages/server/src/middleware/authorized.js @@ -10,6 +10,7 @@ const { const builderMiddleware = require("./builder") const { isWebhookEndpoint } = require("./utils") const { buildCsrfMiddleware } = require("@budibase/backend-core/auth") +const { getAppId } = require("@budibase/backend-core/context") function hasResource(ctx) { return ctx.resourceId != null @@ -45,7 +46,7 @@ const checkAuthorizedResource = async ( ) => { // get the user's roles const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC - const userRoles = await getUserRoleHierarchy(ctx.appId, roleId, { + const userRoles = await getUserRoleHierarchy(roleId, { idOnly: false, }) const permError = "User does not have permission" @@ -81,8 +82,9 @@ module.exports = // get the resource roles let resourceRoles = [] - if (ctx.appId && hasResource(ctx)) { - resourceRoles = await getRequiredResourceRole(ctx.appId, permLevel, ctx) + const appId = getAppId() + if (appId && hasResource(ctx)) { + resourceRoles = await getRequiredResourceRole(permLevel, ctx) } // if the resource is public, proceed diff --git a/packages/server/src/middleware/builder.js b/packages/server/src/middleware/builder.js index d2a8ee80f0..a6404780ff 100644 --- a/packages/server/src/middleware/builder.js +++ b/packages/server/src/middleware/builder.js @@ -5,7 +5,7 @@ const { checkDebounce, setDebounce, } = require("../utilities/redis") -const CouchDB = require("../db") +const { getDB } = require("@budibase/backend-core/db") const { DocumentTypes } = require("../db/utils") const { PermissionTypes } = require("@budibase/backend-core/permissions") const { app: appCache } = require("@budibase/backend-core/cache") @@ -48,7 +48,7 @@ async function updateAppUpdatedAt(ctx) { if (ctx.method === "GET" || (await checkDebounce(appId))) { return } - const db = new CouchDB(appId) + const db = getDB(appId) const metadata = await db.get(DocumentTypes.APP_METADATA) metadata.updatedAt = new Date().toISOString() const response = await db.put(metadata) diff --git a/packages/server/src/middleware/currentapp.js b/packages/server/src/middleware/currentapp.js index 69f80c895b..70dd1bf578 100644 --- a/packages/server/src/middleware/currentapp.js +++ b/packages/server/src/middleware/currentapp.js @@ -11,9 +11,9 @@ const { generateUserMetadataID, isDevAppID } = require("../db/utils") const { dbExists } = require("@budibase/backend-core/db") const { isUserInAppTenant } = require("@budibase/backend-core/tenancy") const { getCachedSelf } = require("../utilities/global") -const CouchDB = require("../db") const env = require("../environment") const { isWebhookEndpoint } = require("./utils") +const { doInAppContext } = require("@budibase/backend-core/context") module.exports = async (ctx, next) => { // try to get the appID from the request @@ -31,7 +31,7 @@ module.exports = async (ctx, next) => { // check the app exists referenced in cookie if (appCookie) { const appId = appCookie.appId - const exists = await dbExists(CouchDB, appId) + const exists = await dbExists(appId) if (!exists) { clearCookie(ctx, Cookies.CurrentApp) return next() @@ -41,13 +41,15 @@ module.exports = async (ctx, next) => { } // deny access to application preview - if ( - isDevAppID(requestAppId) && - !isWebhookEndpoint(ctx) && - (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) - ) { - clearCookie(ctx, Cookies.CurrentApp) - return ctx.redirect("/") + if (!env.isTest()) { + if ( + isDevAppID(requestAppId) && + !isWebhookEndpoint(ctx) && + (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) + ) { + clearCookie(ctx, Cookies.CurrentApp) + return ctx.redirect("/") + } } let appId, @@ -68,44 +70,46 @@ module.exports = async (ctx, next) => { return next() } - let noCookieSet = false - // if the user not in the right tenant then make sure they have no permissions - // need to judge this only based on the request app ID, - if ( - env.MULTI_TENANCY && - ctx.user && - requestAppId && - !isUserInAppTenant(requestAppId) - ) { - // don't error, simply remove the users rights (they are a public user) - delete ctx.user.builder - delete ctx.user.admin - delete ctx.user.roles - roleId = BUILTIN_ROLE_IDS.PUBLIC - noCookieSet = true - } - - ctx.appId = appId - if (roleId) { - ctx.roleId = roleId - const userId = ctx.user ? generateUserMetadataID(ctx.user._id) : null - ctx.user = { - ...ctx.user, - // override userID with metadata one - _id: userId, - userId, - roleId, - role: await getRole(appId, roleId), + return doInAppContext(appId, async () => { + let noCookieSet = false + // if the user not in the right tenant then make sure they have no permissions + // need to judge this only based on the request app ID, + if ( + env.MULTI_TENANCY && + ctx.user && + requestAppId && + !isUserInAppTenant(requestAppId) + ) { + // don't error, simply remove the users rights (they are a public user) + delete ctx.user.builder + delete ctx.user.admin + delete ctx.user.roles + roleId = BUILTIN_ROLE_IDS.PUBLIC + noCookieSet = true } - } - if ( - (requestAppId !== appId || - appCookie == null || - appCookie.appId !== requestAppId) && - !noCookieSet - ) { - setCookie(ctx, { appId }, Cookies.CurrentApp) - } - return next() + ctx.appId = appId + if (roleId) { + ctx.roleId = roleId + const userId = ctx.user ? generateUserMetadataID(ctx.user._id) : null + ctx.user = { + ...ctx.user, + // override userID with metadata one + _id: userId, + userId, + roleId, + role: await getRole(roleId), + } + } + if ( + (requestAppId !== appId || + appCookie == null || + appCookie.appId !== requestAppId) && + !noCookieSet + ) { + setCookie(ctx, { appId }, Cookies.CurrentApp) + } + + return next() + }) } diff --git a/packages/server/src/middleware/tests/authorized.spec.js b/packages/server/src/middleware/tests/authorized.spec.js index 04ef6e2b07..9cfa9d368f 100644 --- a/packages/server/src/middleware/tests/authorized.spec.js +++ b/packages/server/src/middleware/tests/authorized.spec.js @@ -11,6 +11,9 @@ const authorizedMiddleware = require("../authorized") const env = require("../../environment") const { PermissionTypes, PermissionLevels } = require("@budibase/backend-core/permissions") require("@budibase/backend-core").init(require("../../db")) +const { doInAppContext } = require("@budibase/backend-core/context") + +const APP_ID = "" class TestConfiguration { constructor(role) { @@ -23,7 +26,7 @@ class TestConfiguration { request: { url: "" }, - appId: "", + appId: APP_ID, auth: {}, next: this.next, throw: this.throw, @@ -32,7 +35,9 @@ class TestConfiguration { } executeMiddleware() { - return this.middleware(this.ctx, this.next) + return doInAppContext(APP_ID, () => { + return this.middleware(this.ctx, this.next) + }) } setUser(user) { diff --git a/packages/server/src/middleware/tests/currentapp.spec.js b/packages/server/src/middleware/tests/currentapp.spec.js index 27c88f3b48..4e53a6a4c0 100644 --- a/packages/server/src/middleware/tests/currentapp.spec.js +++ b/packages/server/src/middleware/tests/currentapp.spec.js @@ -1,6 +1,11 @@ mockAuthWithNoCookie() mockWorker() +jest.mock("@budibase/backend-core/db", () => ({ + ...jest.requireActual("@budibase/backend-core/db"), + dbExists: () => true, +})) + function mockWorker() { jest.mock("../../utilities/workerRequests", () => ({ getGlobalSelf: () => { @@ -50,6 +55,7 @@ function mockAuthWithCookie() { return "app_test" }, setCookie: jest.fn(), + clearCookie: jest.fn(), getCookie: () => ({appId: "app_different", roleId: "PUBLIC"}), })) jest.mock("@budibase/backend-core/constants", () => ({ diff --git a/packages/server/src/middleware/usageQuota.js b/packages/server/src/middleware/usageQuota.js index 2cd0836113..d8f028de3a 100644 --- a/packages/server/src/middleware/usageQuota.js +++ b/packages/server/src/middleware/usageQuota.js @@ -1,10 +1,10 @@ -const CouchDB = require("../db") const usageQuota = require("../utilities/usageQuota") const { getUniqueRows } = require("../utilities/usageQuota/rows") const { isExternalTable, isRowId: isExternalRowId, } = require("../integrations/utils") +const { getAppDB } = require("@budibase/backend-core/context") // currently only counting new writes and deletes const METHOD_MAP = { @@ -46,7 +46,7 @@ module.exports = async (ctx, next) => { const usageId = ctx.request.body._id try { if (ctx.appId) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() await db.get(usageId) } return next() diff --git a/packages/server/src/migrations/functions/usageQuotas/syncApps.ts b/packages/server/src/migrations/functions/usageQuotas/syncApps.ts index 0fba4f0f7f..e8559a3af9 100644 --- a/packages/server/src/migrations/functions/usageQuotas/syncApps.ts +++ b/packages/server/src/migrations/functions/usageQuotas/syncApps.ts @@ -1,12 +1,13 @@ -const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") -const { getAllApps } = require("@budibase/backend-core/db") -import CouchDB from "../../../db" +// @ts-ignore +import { getGlobalDB, getTenantId } from "@budibase/backend-core/tenancy" +// @ts-ignore +import { getAllApps } from "@budibase/backend-core/db" import { getUsageQuotaDoc } from "../../../utilities/usageQuota" export const run = async () => { const db = getGlobalDB() // get app count - const devApps = await getAllApps(CouchDB, { dev: true }) + const devApps = await getAllApps({ dev: true }) const appCount = devApps ? devApps.length : 0 // sync app count diff --git a/packages/server/src/migrations/functions/usageQuotas/syncRows.ts b/packages/server/src/migrations/functions/usageQuotas/syncRows.ts index 58767d0c0a..b4323135c9 100644 --- a/packages/server/src/migrations/functions/usageQuotas/syncRows.ts +++ b/packages/server/src/migrations/functions/usageQuotas/syncRows.ts @@ -1,13 +1,14 @@ -const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") -const { getAllApps } = require("@budibase/backend-core/db") -import CouchDB from "../../../db" +// @ts-ignore +import { getGlobalDB, getTenantId } from "@budibase/backend-core/tenancy" +// @ts-ignore +import { getAllApps } from "@budibase/backend-core/db" import { getUsageQuotaDoc } from "../../../utilities/usageQuota" import { getUniqueRows } from "../../../utilities/usageQuota/rows" export const run = async () => { const db = getGlobalDB() // get all rows in all apps - const allApps = await getAllApps(CouchDB, { all: true }) + const allApps = await getAllApps({ all: true }) const appIds = allApps ? allApps.map((app: { appId: any }) => app.appId) : [] const rows = await getUniqueRows(appIds) const rowCount = rows ? rows.length : 0 diff --git a/packages/server/src/tests/utilities/TestConfiguration.js b/packages/server/src/tests/utilities/TestConfiguration.js index 68aa68dc66..6c2b7d4f98 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.js +++ b/packages/server/src/tests/utilities/TestConfiguration.js @@ -1,3 +1,6 @@ +const core = require("@budibase/backend-core") +const CouchDB = require("../../db") +core.init(CouchDB) const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const env = require("../../environment") const { @@ -17,13 +20,11 @@ const supertest = require("supertest") const { cleanup } = require("../../utilities/fileSystem") const { Cookies, Headers } = require("@budibase/backend-core/constants") const { jwt } = require("@budibase/backend-core/auth") -const core = require("@budibase/backend-core") const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { createASession } = require("@budibase/backend-core/sessions") const { user: userCache } = require("@budibase/backend-core/cache") -const CouchDB = require("../../db") const newid = require("../../db/newid") -core.init(CouchDB) +const context = require("@budibase/backend-core/context") const GLOBAL_USER_ID = "us_uuid1" const EMAIL = "babs@babs.com" @@ -65,11 +66,21 @@ class TestConfiguration { request.request = { body: config, } - if (params) { - request.params = params + async function run() { + if (params) { + request.params = params + } + await controlFunc(request) + return request.body + } + // check if already in a context + if (context.getAppId() == null) { + return context.doInAppContext(this.appId, async () => { + return run() + }) + } else { + return run() } - await controlFunc(request) - return request.body } async globalUser({ @@ -175,6 +186,7 @@ class TestConfiguration { // create dev app this.app = await this._req({ name: appName }, null, controllers.app.create) this.appId = this.app.appId + context.updateAppId(this.appId) // create production app this.prodApp = await this.deploy() @@ -187,14 +199,16 @@ class TestConfiguration { } async deploy() { - const deployment = await this._req(null, null, controllers.deploy.deployApp) - const prodAppId = deployment.appId.replace("_dev", "") - const appPackage = await this._req( - null, - { appId: prodAppId }, - controllers.app.fetchAppPackage - ) - return appPackage.application + await this._req(null, null, controllers.deploy.deployApp) + const prodAppId = this.getAppId().replace("_dev", "") + return context.doInAppContext(prodAppId, async () => { + const appPackage = await this._req( + null, + { appId: prodAppId }, + controllers.app.fetchAppPackage + ) + return appPackage.application + }) } async updateTable(config = null) { @@ -423,42 +437,47 @@ class TestConfiguration { async login({ roleId, userId, builder, prodApp = false } = {}) { const appId = prodApp ? this.prodAppId : this.appId - - userId = !userId ? `us_uuid1` : userId - if (!this.request) { - throw "Server has not been opened, cannot login." - } - // make sure the user exists in the global DB - if (roleId !== BUILTIN_ROLE_IDS.PUBLIC) { - await this.globalUser({ - userId, - builder, - roles: { [this.prodAppId]: roleId }, + return context.doInAppContext(appId, async () => { + userId = !userId ? `us_uuid1` : userId + if (!this.request) { + throw "Server has not been opened, cannot login." + } + // make sure the user exists in the global DB + if (roleId !== BUILTIN_ROLE_IDS.PUBLIC) { + await this.globalUser({ + id: userId, + builder, + roles: { [this.prodAppId]: roleId }, + }) + } + await createASession(userId, { + sessionId: "sessionid", + tenantId: TENANT_ID, }) - } - // have to fake this - const auth = { - userId, - sessionId: "sessionid", - tenantId: TENANT_ID, - } - const app = { - roleId: roleId, - appId, - } - const authToken = jwt.sign(auth, env.JWT_SECRET) - const appToken = jwt.sign(app, env.JWT_SECRET) + // have to fake this + const auth = { + userId, + sessionId: "sessionid", + tenantId: TENANT_ID, + } + const app = { + roleId: roleId, + appId, + } + const authToken = jwt.sign(auth, env.JWT_SECRET) + const appToken = jwt.sign(app, env.JWT_SECRET) - // returning necessary request headers - await userCache.invalidateUser(userId) - return { - Accept: "application/json", - Cookie: [ - `${Cookies.Auth}=${authToken}`, - `${Cookies.CurrentApp}=${appToken}`, - ], - [Headers.APP_ID]: appId, - } + // returning necessary request headers + await userCache.invalidateUser(userId) + return { + Accept: "application/json", + Cookie: [ + `${Cookies.Auth}=${authToken}`, + `${Cookies.CurrentApp}=${appToken}`, + ], + [Headers.APP_ID]: appId, + } + }) } } diff --git a/packages/server/src/threads/automation.js b/packages/server/src/threads/automation.js index 2a39773520..c0843a286c 100644 --- a/packages/server/src/threads/automation.js +++ b/packages/server/src/threads/automation.js @@ -5,11 +5,11 @@ const automationUtils = require("../automations/automationUtils") const AutomationEmitter = require("../events/AutomationEmitter") const { processObject } = require("@budibase/string-templates") const { DEFAULT_TENANT_ID } = require("@budibase/backend-core/constants") -const CouchDB = require("../db") const { DocumentTypes, isDevAppID } = require("../db/utils") const { doInTenant } = require("@budibase/backend-core/tenancy") const usage = require("../utilities/usageQuota") const { definitions: triggerDefs } = require("../automations/triggerInfo") +const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId const CRON_STEP_ID = triggerDefs.CRON.stepId @@ -59,11 +59,10 @@ class Orchestrator { } async getApp() { - const appId = this._appId if (this._app) { return this._app } - const db = new CouchDB(appId) + const db = getAppDB() this._app = await db.get(DocumentTypes.APP_METADATA) return this._app } @@ -131,16 +130,19 @@ class Orchestrator { } module.exports = (input, callback) => { - const automationOrchestrator = new Orchestrator( - input.data.automation, - input.data.event - ) - automationOrchestrator - .execute() - .then(response => { - callback(null, response) - }) - .catch(err => { - callback(err) - }) + const appId = input.data.event.appId + doInAppContext(appId, () => { + const automationOrchestrator = new Orchestrator( + input.data.automation, + input.data.event + ) + automationOrchestrator + .execute() + .then(response => { + callback(null, response) + }) + .catch(err => { + callback(err) + }) + }) } diff --git a/packages/server/src/threads/query.js b/packages/server/src/threads/query.js index ff3e101d48..be0260882e 100644 --- a/packages/server/src/threads/query.js +++ b/packages/server/src/threads/query.js @@ -3,14 +3,13 @@ threadUtils.threadSetup() const ScriptRunner = require("../utilities/scriptRunner") const { integrations } = require("../integrations") const { processStringSync } = require("@budibase/string-templates") -const CouchDB = require("../db") +const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") const IS_TRIPLE_BRACE = new RegExp(/^{{3}.*}{3}$/) const IS_HANDLEBARS = new RegExp(/^{{2}.*}{2}$/) class QueryRunner { constructor(input, flags = { noRecursiveQuery: false }) { - this.appId = input.appId this.datasource = input.datasource this.queryVerb = input.queryVerb this.fields = input.fields @@ -104,12 +103,11 @@ class QueryRunner { } async runAnotherQuery(queryId, parameters) { - const db = new CouchDB(this.appId) + const db = getAppDB() const query = await db.get(queryId) const datasource = await db.get(query.datasourceId) return new QueryRunner( { - appId: this.appId, datasource, queryVerb: query.queryVerb, fields: query.fields, @@ -223,12 +221,14 @@ class QueryRunner { } module.exports = (input, callback) => { - const Runner = new QueryRunner(input) - Runner.execute() - .then(response => { - callback(null, response) - }) - .catch(err => { - callback(err) - }) + doInAppContext(input.appId, () => { + const Runner = new QueryRunner(input) + Runner.execute() + .then(response => { + callback(null, response) + }) + .catch(err => { + callback(err) + }) + }) } diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.js index b8ddb1a356..904b4ced18 100644 --- a/packages/server/src/utilities/fileSystem/index.js +++ b/packages/server/src/utilities/fileSystem/index.js @@ -1,5 +1,4 @@ const { budibaseTempDir } = require("../budibaseDir") -const { isDev } = require("../index") const fs = require("fs") const { join } = require("path") const uuid = require("uuid/v4") @@ -20,6 +19,7 @@ const { LINK_USER_METADATA_PREFIX, } = require("../../db/utils") const MemoryStream = require("memorystream") +const { getAppId } = require("@budibase/backend-core/context") const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules") @@ -51,7 +51,7 @@ exports.init = () => { * everything required to function is ready. */ exports.checkDevelopmentEnvironment = () => { - if (!isDev()) { + if (!env.isDev() || env.isTest()) { return } if (!fs.existsSync(budibaseTempDir())) { @@ -251,7 +251,8 @@ exports.downloadTemplate = async (type, name) => { /** * Retrieves component libraries from object store (or tmp symlink if in local) */ -exports.getComponentLibraryManifest = async (appId, library) => { +exports.getComponentLibraryManifest = async library => { + const appId = getAppId() const filename = "manifest.json" /* istanbul ignore next */ // when testing in cypress and so on we need to get the package diff --git a/packages/server/src/utilities/global.js b/packages/server/src/utilities/global.js index 7ef1c09405..f8ec5ea647 100644 --- a/packages/server/src/utilities/global.js +++ b/packages/server/src/utilities/global.js @@ -3,7 +3,7 @@ const { getGlobalIDFromUserMetadataID, } = require("../db/utils") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") -const { getDeployedAppID } = require("@budibase/backend-core/db") +const { getProdAppID } = require("@budibase/backend-core/db") const { getGlobalUserParams } = require("@budibase/backend-core/db") const { user: userCache } = require("@budibase/backend-core/cache") const { @@ -11,8 +11,10 @@ const { isUserInAppTenant, } = require("@budibase/backend-core/tenancy") const env = require("../environment") +const { getAppId } = require("@budibase/backend-core/context") -exports.updateAppRole = (appId, user) => { +exports.updateAppRole = (user, { appId } = {}) => { + appId = appId || getAppId() if (!user || !user.roles) { return user } @@ -24,7 +26,7 @@ exports.updateAppRole = (appId, user) => { return user } // always use the deployed app - user.roleId = user.roles[getDeployedAppID(appId)] + user.roleId = user.roles[getProdAppID(appId)] // if a role wasn't found then either set as admin (builder) or public (everyone else) if (!user.roleId && user.builder && user.builder.global) { user.roleId = BUILTIN_ROLE_IDS.ADMIN @@ -35,18 +37,18 @@ exports.updateAppRole = (appId, user) => { return user } -function processUser(appId, user) { +function processUser(user, { appId } = {}) { if (user) { delete user.password } - return exports.updateAppRole(appId, user) + return exports.updateAppRole(user, { appId }) } exports.getCachedSelf = async (ctx, appId) => { // this has to be tenant aware, can't depend on the context to find it out // running some middlewares before the tenancy causes context to break const user = await userCache.getUser(ctx.user._id) - return processUser(appId, user) + return processUser(user, { appId }) } exports.getRawGlobalUser = async userId => { @@ -54,12 +56,13 @@ exports.getRawGlobalUser = async userId => { return db.get(getGlobalIDFromUserMetadataID(userId)) } -exports.getGlobalUser = async (appId, userId) => { +exports.getGlobalUser = async userId => { let user = await exports.getRawGlobalUser(userId) - return processUser(appId, user) + return processUser(user) } -exports.getGlobalUsers = async (appId = null, users = null) => { +exports.getGlobalUsers = async (users = null) => { + const appId = getAppId() const db = getGlobalDB() let globalUsers if (users) { @@ -86,11 +89,11 @@ exports.getGlobalUsers = async (appId = null, users = null) => { if (!appId) { return globalUsers } - return globalUsers.map(user => exports.updateAppRole(appId, user)) + return globalUsers.map(user => exports.updateAppRole(user)) } -exports.getGlobalUsersFromMetadata = async (appId, users) => { - const globalUsers = await exports.getGlobalUsers(appId, users) +exports.getGlobalUsersFromMetadata = async users => { + const globalUsers = await exports.getGlobalUsers(users) return users.map(user => { const globalUser = globalUsers.find( globalUser => globalUser && user._id.includes(globalUser._id) diff --git a/packages/server/src/utilities/index.js b/packages/server/src/utilities/index.js index 0dba11141c..d1e277541a 100644 --- a/packages/server/src/utilities/index.js +++ b/packages/server/src/utilities/index.js @@ -1,9 +1,9 @@ const env = require("../environment") const { OBJ_STORE_DIRECTORY } = require("../constants") const { sanitizeKey } = require("@budibase/backend-core/objectStore") -const CouchDB = require("../db") const { generateMetadataID } = require("../db/utils") const Readable = require("stream").Readable +const { getAppDB } = require("@budibase/backend-core/context") const BB_CDN = "https://cdn.budi.live" @@ -73,8 +73,8 @@ exports.attachmentsRelativeURL = attachmentKey => { ) } -exports.updateEntityMetadata = async (appId, type, entityId, updateFn) => { - const db = new CouchDB(appId) +exports.updateEntityMetadata = async (type, entityId, updateFn) => { + const db = getAppDB() const id = generateMetadataID(type, entityId) // read it to see if it exists, we'll overwrite it no matter what let rev, @@ -99,14 +99,14 @@ exports.updateEntityMetadata = async (appId, type, entityId, updateFn) => { } } -exports.saveEntityMetadata = async (appId, type, entityId, metadata) => { - return exports.updateEntityMetadata(appId, type, entityId, () => { +exports.saveEntityMetadata = async (type, entityId, metadata) => { + return exports.updateEntityMetadata(type, entityId, () => { return metadata }) } -exports.deleteEntityMetadata = async (appId, type, entityId) => { - const db = new CouchDB(appId) +exports.deleteEntityMetadata = async (type, entityId) => { + const db = getAppDB() const id = generateMetadataID(type, entityId) let rev try { @@ -141,16 +141,6 @@ exports.stringToReadStream = string => { }) } -exports.doesDatabaseExist = async dbName => { - try { - const db = new CouchDB(dbName, { skip_setup: true }) - const info = await db.info() - return info && !info.error - } catch (err) { - return false - } -} - exports.formatBytes = bytes => { const units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"] const byteIncrements = 1024 diff --git a/packages/server/src/utilities/routing/index.js b/packages/server/src/utilities/routing/index.js index 541733dcc4..b68001c3c3 100644 --- a/packages/server/src/utilities/routing/index.js +++ b/packages/server/src/utilities/routing/index.js @@ -1,9 +1,9 @@ -const CouchDB = require("../../db") const { createRoutingView } = require("../../db/views/staticViews") const { ViewNames, getQueryIndex, UNICODE_MAX } = require("../../db/utils") +const { getAppDB } = require("@budibase/backend-core/context") -exports.getRoutingInfo = async appId => { - const db = new CouchDB(appId) +exports.getRoutingInfo = async () => { + const db = getAppDB() try { const allRouting = await db.query(getQueryIndex(ViewNames.ROUTING), { startKey: "", @@ -14,8 +14,8 @@ exports.getRoutingInfo = async appId => { // check if the view doesn't exist, it should for all new instances /* istanbul ignore next */ if (err != null && err.name === "not_found") { - await createRoutingView(appId) - return exports.getRoutingInfo(appId) + await createRoutingView() + return exports.getRoutingInfo() } else { throw err } diff --git a/packages/server/src/utilities/rowProcessor/index.js b/packages/server/src/utilities/rowProcessor/index.js index dc56312d63..18e0b14de6 100644 --- a/packages/server/src/utilities/rowProcessor/index.js +++ b/packages/server/src/utilities/rowProcessor/index.js @@ -7,10 +7,10 @@ const { deleteFiles } = require("../../utilities/fileSystem/utilities") const { ObjectStoreBuckets } = require("../../constants") const { isProdAppID, - getDeployedAppID, + getProdAppID, dbExists, } = require("@budibase/backend-core/db") -const CouchDB = require("../../db") +const { getAppId } = require("@budibase/backend-core/context") const BASE_AUTO_ID = 1 @@ -253,26 +253,20 @@ exports.inputProcessing = ( /** * This function enriches the input rows with anything they are supposed to contain, for example * link records or attachment links. - * @param {string} appId the app in which the request is looking for enriched rows. * @param {object} table the table from which these rows came from originally, this is used to determine * the schema of the rows and then enrich. * @param {object[]|object} rows the rows which are to be enriched. * @param {object} opts used to set some options for the output, such as disabling relationship squashing. * @returns {object[]|object} the enriched rows will be returned. */ -exports.outputProcessing = async ( - { appId }, - table, - rows, - opts = { squash: true } -) => { +exports.outputProcessing = async (table, rows, opts = { squash: true }) => { let wasArray = true if (!(rows instanceof Array)) { rows = [rows] wasArray = false } // attach any linked row information - let enriched = await linkRows.attachFullLinkedDocs(appId, table, rows) + let enriched = await linkRows.attachFullLinkedDocs(table, rows) // process formulas enriched = processFormulas(table, enriched, { dynamic: true }) @@ -291,18 +285,13 @@ exports.outputProcessing = async ( } } if (opts.squash) { - enriched = await linkRows.squashLinksToPrimaryDisplay( - appId, - table, - enriched - ) + enriched = await linkRows.squashLinksToPrimaryDisplay(table, enriched) } return wasArray ? enriched : enriched[0] } /** * Clean up any attachments that were attached to a row. - * @param {string} appId The ID of the app from which a row is being deleted. * @param {object} table The table from which a row is being removed. * @param {any} row optional - the row being removed. * @param {any} rows optional - if multiple rows being deleted can do this in bulk. @@ -311,15 +300,12 @@ exports.outputProcessing = async ( * deleted attachment columns. * @return {Promise} When all attachments have been removed this will return. */ -exports.cleanupAttachments = async ( - appId, - table, - { row, rows, oldRow, oldTable } -) => { +exports.cleanupAttachments = async (table, { row, rows, oldRow, oldTable }) => { + const appId = getAppId() if (!isProdAppID(appId)) { - const prodAppId = getDeployedAppID(appId) + const prodAppId = getProdAppID(appId) // if prod exists, then don't allow deleting - const exists = await dbExists(CouchDB, prodAppId) + const exists = await dbExists(prodAppId) if (exists) { return } diff --git a/packages/server/src/utilities/usageQuota/index.js b/packages/server/src/utilities/usageQuota/index.js index b0ff310aa3..e27877b977 100644 --- a/packages/server/src/utilities/usageQuota/index.js +++ b/packages/server/src/utilities/usageQuota/index.js @@ -52,6 +52,7 @@ exports.getUsageQuotaDoc = async db => { * Given a specified tenantId this will add to the usage object for the specified property. * @param {string} property The property which is to be added to (within the nested usageQuota object). * @param {number} usage The amount (this can be negative) to adjust the number by. + * @param {object} opts optional - options such as dryRun, to check what update will do. * @returns {Promise} When this completes the API key will now be up to date - the quota period may have * also been reset after this call. */ diff --git a/packages/server/src/utilities/usageQuota/rows.js b/packages/server/src/utilities/usageQuota/rows.js index 67ad07410d..378caffc46 100644 --- a/packages/server/src/utilities/usageQuota/rows.js +++ b/packages/server/src/utilities/usageQuota/rows.js @@ -23,6 +23,7 @@ const getAppPairs = appIds => { } const getAppRows = async appId => { + // need to specify the app ID, as this is used for different apps in one call const appDb = new CouchDB(appId) const response = await appDb.allDocs( getRowParams(null, null, { diff --git a/packages/server/src/utilities/users.js b/packages/server/src/utilities/users.js index 6144397bf1..b3601986d8 100644 --- a/packages/server/src/utilities/users.js +++ b/packages/server/src/utilities/users.js @@ -1,13 +1,13 @@ -const CouchDB = require("../db") const { InternalTables } = require("../db/utils") const { getGlobalUser } = require("../utilities/global") +const { getAppDB } = require("@budibase/backend-core/context") exports.getFullUser = async (ctx, userId) => { - const global = await getGlobalUser(ctx.appId, userId) + const global = await getGlobalUser(userId) let metadata try { // this will throw an error if the db doesn't exist, or there is no appId - const db = new CouchDB(ctx.appId) + const db = getAppDB() metadata = await db.get(userId) } catch (err) { // it is fine if there is no user metadata, just remove global db info diff --git a/packages/server/src/utilities/workerRequests.js b/packages/server/src/utilities/workerRequests.js index 5e46f1678f..91db63d2a4 100644 --- a/packages/server/src/utilities/workerRequests.js +++ b/packages/server/src/utilities/workerRequests.js @@ -1,7 +1,7 @@ const fetch = require("node-fetch") const env = require("../environment") const { checkSlashesInUrl } = require("./index") -const { getDeployedAppID } = require("@budibase/backend-core/db") +const { getProdAppID } = require("@budibase/backend-core/db") const { updateAppRole } = require("./global") const { Headers } = require("@budibase/backend-core/constants") const { getTenantId, isTenantIdSet } = require("@budibase/backend-core/tenancy") @@ -70,15 +70,15 @@ exports.getGlobalSelf = async (ctx, appId = null) => { } let json = await response.json() if (appId) { - json = updateAppRole(appId, json) + json = updateAppRole(json) } return json } exports.removeAppFromUserRoles = async (ctx, appId) => { - const deployedAppId = getDeployedAppID(appId) + const prodAppId = getProdAppID(appId) const response = await fetch( - checkSlashesInUrl(env.WORKER_URL + `/api/global/roles/${deployedAppId}`), + checkSlashesInUrl(env.WORKER_URL + `/api/global/roles/${prodAppId}`), request(ctx, { method: "DELETE", }) diff --git a/packages/worker/src/api/controllers/global/configs.js b/packages/worker/src/api/controllers/global/configs.js index fc0aa868a3..604e7d0e93 100644 --- a/packages/worker/src/api/controllers/global/configs.js +++ b/packages/worker/src/api/controllers/global/configs.js @@ -11,7 +11,6 @@ const { upload, ObjectStoreBuckets, } = require("@budibase/backend-core/objectStore") -const CouchDB = require("../../../db") const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") const env = require("../../../environment") const { googleCallbackUrl, oidcCallbackUrl } = require("./auth") @@ -252,7 +251,7 @@ exports.configChecklist = async function (ctx) { // TODO: Watch get started video // Apps exist - const apps = await getAllApps(CouchDB, { idsOnly: true }) + const apps = await getAllApps({ idsOnly: true }) // They have set up SMTP const smtpConfig = await getScopedFullConfig(db, { diff --git a/packages/worker/src/api/controllers/global/roles.js b/packages/worker/src/api/controllers/global/roles.js index 3c977a6290..96de0e4753 100644 --- a/packages/worker/src/api/controllers/global/roles.js +++ b/packages/worker/src/api/controllers/global/roles.js @@ -1,15 +1,15 @@ const { getAllRoles } = require("@budibase/backend-core/roles") const { getAllApps, - getDeployedAppID, + getProdAppID, DocumentTypes, } = require("@budibase/backend-core/db") -const CouchDB = require("../../../db") +const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") exports.fetch = async ctx => { const tenantId = ctx.user.tenantId // always use the dev apps as they'll be most up to date (true) - const apps = await getAllApps(CouchDB, { tenantId, all: true }) + const apps = await getAllApps({ tenantId, all: true }) const promises = [] for (let app of apps) { // use dev app IDs @@ -18,7 +18,7 @@ exports.fetch = async ctx => { const roles = await Promise.all(promises) const response = {} for (let app of apps) { - const deployedAppId = getDeployedAppID(app.appId) + const deployedAppId = getProdAppID(app.appId) response[deployedAppId] = { roles: roles.shift(), name: app.name, @@ -31,12 +31,14 @@ exports.fetch = async ctx => { exports.find = async ctx => { const appId = ctx.params.appId - const db = new CouchDB(appId) - const app = await db.get(DocumentTypes.APP_METADATA) - ctx.body = { - roles: await getAllRoles(appId), - name: app.name, - version: app.version, - url: app.url, - } + await doInAppContext(appId, async () => { + const db = getAppDB() + const app = await db.get(DocumentTypes.APP_METADATA) + ctx.body = { + roles: await getAllRoles(), + name: app.name, + version: app.version, + url: app.url, + } + }) } diff --git a/packages/worker/yarn.lock b/packages/worker/yarn.lock index 48df51e68f..e8e277349e 100644 --- a/packages/worker/yarn.lock +++ b/packages/worker/yarn.lock @@ -286,67 +286,6 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/backend-core@^1.0.49-alpha.4": - version "1.0.49-alpha.4" - resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.49-alpha.4.tgz#c9620f95a06e77f665b2a64c32eeb1f355841a73" - integrity sha512-j9+GXXZXvtShX1jMnkRH43eJjjqdAqbmlR5zmguw6TI2Ft7sjB9FZ+/NK07X58Uvc0sVFizw/n6iKGQOUwWDdg== - dependencies: - "@techpass/passport-openidconnect" "^0.3.0" - aws-sdk "^2.901.0" - bcryptjs "^2.4.3" - cls-hooked "^4.2.2" - ioredis "^4.27.1" - jsonwebtoken "^8.5.1" - koa-passport "^4.1.4" - lodash "^4.17.21" - lodash.isarguments "^3.1.0" - node-fetch "^2.6.1" - passport-google-auth "^1.0.2" - passport-google-oauth "^2.0.0" - passport-jwt "^4.0.0" - passport-local "^1.0.0" - sanitize-s3-objectkey "^0.0.1" - tar-fs "^2.1.1" - uuid "^8.3.2" - zlib "^1.0.5" - -"@budibase/handlebars-helpers@^0.11.7": - version "0.11.8" - resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.11.8.tgz#6953d29673a8c5c407e096c0a84890465c7ce841" - integrity sha512-ggWJUt0GqsHFAEup5tlWlcrmYML57nKhpNGGLzVsqXVYN8eVmf3xluYmmMe7fDYhQH0leSprrdEXmsdFQF3HAQ== - dependencies: - array-sort "^1.0.0" - define-property "^2.0.2" - extend-shallow "^3.0.2" - for-in "^1.0.2" - get-object "^0.2.0" - get-value "^3.0.1" - handlebars "^4.7.7" - handlebars-utils "^1.0.6" - has-value "^2.0.2" - helper-md "^0.2.2" - html-tag "^2.0.0" - is-even "^1.0.0" - is-glob "^4.0.1" - kind-of "^6.0.3" - micromatch "^3.1.5" - relative "^3.0.2" - striptags "^3.1.1" - to-gfm-code-block "^0.1.1" - year "^0.2.1" - -"@budibase/string-templates@^1.0.49-alpha.4": - version "1.0.49-alpha.4" - resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.49-alpha.4.tgz#cb3c138fe734436d404314f0af691a8f6fbb7e8f" - integrity sha512-1v2marwEfziTGihtr2PHDo2rMGRB3WWxC2CCobjSFN0sPy1PlRHoF3QZVutMqoh0C4gVZ50rww1vw8xj2rNgvQ== - dependencies: - "@budibase/handlebars-helpers" "^0.11.7" - dayjs "^1.10.4" - handlebars "^4.7.6" - handlebars-utils "^1.0.6" - lodash "^4.17.20" - vm2 "^3.9.4" - "@cspotcode/source-map-consumer@0.8.0": version "0.8.0" resolved "https://registry.yarnpkg.com/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz#33bf4b7b39c178821606f669bbc447a6a629786b" @@ -1242,7 +1181,7 @@ arg@^4.1.0: resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== -argparse@^1.0.10, argparse@^1.0.7: +argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== @@ -1264,40 +1203,11 @@ argsarray@0.0.1: resolved "https://registry.yarnpkg.com/argsarray/-/argsarray-0.0.1.tgz#6e7207b4ecdb39b0af88303fa5ae22bda8df61cb" integrity sha1-bnIHtOzbObCviDA/pa4ivajfYcs= -arr-diff@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" - integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= - -arr-flatten@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" - integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== - -arr-union@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" - integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= - -array-sort@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-sort/-/array-sort-1.0.0.tgz#e4c05356453f56f53512a7d1d6123f2c54c0a88a" - integrity sha512-ihLeJkonmdiAsD7vpgN3CRcx2J2S0TiYW+IS/5zHBI7mKUq3ySvBdzzBfD236ubDBQFiiyG3SWCPc+msQ9KoYg== - dependencies: - default-compare "^1.0.0" - get-value "^2.0.6" - kind-of "^5.0.2" - array-union@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -array-unique@^0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" - integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= - asap@^2.0.0: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" @@ -1315,11 +1225,6 @@ assert-plus@1.0.0, assert-plus@^1.0.0: resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= -assign-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" - integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= - ast-types@0.9.6: version "0.9.6" resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.9.6.tgz#102c9e9e9005d3e7e3829bf0c4fa24ee862ee9b9" @@ -1330,43 +1235,17 @@ astral-regex@^1.0.0: resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg== -async-hook-jl@^1.7.6: - version "1.7.6" - resolved "https://registry.yarnpkg.com/async-hook-jl/-/async-hook-jl-1.7.6.tgz#4fd25c2f864dbaf279c610d73bf97b1b28595e68" - integrity sha512-gFaHkFfSxTjvoxDMYqDuGHlcRyUuamF8s+ZTtJdDzqjws4mCt7v0vuV79/E2Wr2/riMQgtG4/yUtXWs1gZ7JMg== - dependencies: - stack-chain "^1.3.7" - -async@~2.1.4: - version "2.1.5" - resolved "https://registry.yarnpkg.com/async/-/async-2.1.5.tgz#e587c68580994ac67fc56ff86d3ac56bdbe810bc" - integrity sha1-5YfGhYCZSsZ/xW/4bTrFa9voELw= - dependencies: - lodash "^4.14.0" - asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= -atob@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" - integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== - atomic-sleep@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== -autolinker@~0.28.0: - version "0.28.1" - resolved "https://registry.yarnpkg.com/autolinker/-/autolinker-0.28.1.tgz#0652b491881879f0775dace0cdca3233942a4e47" - integrity sha1-BlK0kYgYefB3XazgzcoyM5QqTkc= - dependencies: - gulp-header "^1.7.1" - -aws-sdk@^2.811.0, aws-sdk@^2.901.0: +aws-sdk@^2.811.0: version "2.1066.0" resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1066.0.tgz#2a9b00d983f3c740a7adda18d4e9a5c34d4d3887" integrity sha512-9BZPdJgIvau8Jf2l3PxInNqQd733uKLqGGDywMV71duxNTLgdBZe2zvCkbgl22+ldC8R2LVMdS64DzchfQIxHg== @@ -1472,19 +1351,6 @@ base64url@3.x.x, base64url@^3.0.1: resolved "https://registry.yarnpkg.com/base64url/-/base64url-3.0.1.tgz#6399d572e2bc3f90a9a8b22d5dbb0a32d33f788d" integrity sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A== -base@^0.11.1: - version "0.11.2" - resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" - integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== - dependencies: - cache-base "^1.0.1" - class-utils "^0.3.5" - component-emitter "^1.2.1" - define-property "^1.0.0" - isobject "^3.0.1" - mixin-deep "^1.2.0" - pascalcase "^0.1.1" - bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" @@ -1502,15 +1368,6 @@ binary-extensions@^2.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== -bl@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" - integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" - boxen@^5.0.0: version "5.1.2" resolved "https://registry.yarnpkg.com/boxen/-/boxen-5.1.2.tgz#788cb686fc83c1f486dfa8a40c68fc2b831d2b50" @@ -1533,22 +1390,6 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -braces@^2.3.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== - dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" - braces@^3.0.1, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" @@ -1628,21 +1469,6 @@ bytes@^3.0.0: resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== -cache-base@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" - integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== - dependencies: - collection-visit "^1.0.0" - component-emitter "^1.2.1" - get-value "^2.0.6" - has-value "^1.0.0" - isobject "^3.0.1" - set-value "^2.0.0" - to-object-path "^0.3.0" - union-value "^1.0.0" - unset-value "^1.0.0" - cache-content-type@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-content-type/-/cache-content-type-1.0.1.tgz#035cde2b08ee2129f4a8315ea8f00a00dba1453c" @@ -1762,11 +1588,6 @@ chokidar@^3.5.2: optionalDependencies: fsevents "~2.3.2" -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - ci-info@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" @@ -1782,16 +1603,6 @@ cjs-module-lexer@^1.0.0: resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== -class-utils@^0.3.5: - version "0.3.6" - resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" - integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== - dependencies: - arr-union "^3.1.0" - define-property "^0.2.5" - isobject "^3.0.0" - static-extend "^0.1.1" - cli-boxes@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f" @@ -1830,20 +1641,6 @@ clone-response@^1.0.2: dependencies: mimic-response "^1.0.0" -cls-hooked@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/cls-hooked/-/cls-hooked-4.2.2.tgz#ad2e9a4092680cdaffeb2d3551da0e225eae1908" - integrity sha512-J4Xj5f5wq/4jAvcdgoGsL3G103BtWpZrMo8NEinRltN+xpTZdI+M38pyQqhuFU/P792xkMFvnKSf+Lm81U1bxw== - dependencies: - async-hook-jl "^1.7.6" - emitter-listener "^1.0.1" - semver "^5.4.1" - -cluster-key-slot@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz#30474b2a981fb12172695833052bc0d01336d10d" - integrity sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw== - co-body@^5.1.1: version "5.2.0" resolved "https://registry.yarnpkg.com/co-body/-/co-body-5.2.0.tgz#5a0a658c46029131e0e3a306f67647302f71c124" @@ -1864,14 +1661,6 @@ collect-v8-coverage@^1.0.0: resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== -collection-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" - integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= - dependencies: - map-visit "^1.0.0" - object-visit "^1.0.0" - color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" @@ -1923,7 +1712,7 @@ commoner@^0.10.1: q "^1.1.2" recast "^0.11.17" -component-emitter@^1.2.1, component-emitter@^1.3.0: +component-emitter@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== @@ -1940,13 +1729,6 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-with-sourcemaps@*: - version "1.1.0" - resolved "https://registry.yarnpkg.com/concat-with-sourcemaps/-/concat-with-sourcemaps-1.1.0.tgz#d4ea93f05ae25790951b99e7b3b09e3908a4082e" - integrity sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg== - dependencies: - source-map "^0.6.1" - configstore@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96" @@ -1996,11 +1778,6 @@ cookies@~0.8.0: depd "~2.0.0" keygrip "~1.1.0" -copy-descriptor@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" - integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= - copyfiles@^2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/copyfiles/-/copyfiles-2.4.1.tgz#d2dcff60aaad1015f09d0b66e7f0f1c5cd3c5da5" @@ -2099,11 +1876,6 @@ dateformat@^4.5.1: resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5" integrity sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA== -dayjs@^1.10.4: - version "1.10.7" - resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.10.7.tgz#2cf5f91add28116748440866a0a1d26f3a6ce468" - integrity sha512-P6twpd70BcPK34K26uJ1KT3wlhpuOAPoMwJzpsIWUxHZ7wpmbdZL/hQqBDfz7hGurYSa5PhzdhDHtt319hL3ig== - debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3: version "4.3.3" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664" @@ -2111,13 +1883,6 @@ debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, d dependencies: ms "2.1.2" -debug@^2.2.0, debug@^2.3.3: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - debug@^3.1.0, debug@^3.2.7: version "3.2.7" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" @@ -2130,11 +1895,6 @@ decimal.js@^10.2.1: resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783" integrity sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ== -decode-uri-component@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" - integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= - decompress-response@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" @@ -2174,13 +1934,6 @@ deepmerge@^4.2.2: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -default-compare@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/default-compare/-/default-compare-1.0.0.tgz#cb61131844ad84d84788fb68fd01681ca7781a2f" - integrity sha512-QWfXlM0EkAbqOCbD/6HjdwT19j7WCkMyiRhWilc4H9/5h/RzTF9gv5LYh1+CmDV5d1rki6KAWLtQale0xt20eQ== - dependencies: - kind-of "^5.0.2" - defer-to-connect@^1.0.1: version "1.1.3" resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" @@ -2199,28 +1952,6 @@ deferred-leveldown@~5.3.0: abstract-leveldown "~6.2.1" inherits "^2.0.3" -define-property@^0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" - integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= - dependencies: - is-descriptor "^0.1.0" - -define-property@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" - integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= - dependencies: - is-descriptor "^1.0.0" - -define-property@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" - integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== - dependencies: - is-descriptor "^1.0.2" - isobject "^3.0.1" - defined@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" @@ -2236,11 +1967,6 @@ delegates@^1.0.0: resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= -denque@^1.1.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/denque/-/denque-1.5.1.tgz#07f670e29c9a78f8faecb2566a1e2c11929c5cbf" - integrity sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw== - depd@^2.0.0, depd@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" @@ -2360,13 +2086,6 @@ electron-to-chromium@^1.4.17: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.58.tgz#cd980b08338210b591c25492857a518fe286b1d4" integrity sha512-7LXwnKyqcEaMFVXOer+2JPfFs1D+ej7yRRrfZoIH1YlLQZ81OvBNwSCBBLtExVkoMQQgOWwO0FbZVge6U/8rhQ== -emitter-listener@^1.0.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/emitter-listener/-/emitter-listener-1.1.2.tgz#56b140e8f6992375b3d7cb2cab1cc7432d9632e8" - integrity sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ== - dependencies: - shimmer "^1.2.0" - emittery@^0.8.1: version "0.8.1" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" @@ -2397,7 +2116,7 @@ encoding-down@^6.3.0: level-codec "^9.0.0" level-errors "^2.0.0" -end-of-stream@^1.1.0, end-of-stream@^1.4.1: +end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== @@ -2411,11 +2130,6 @@ end-stream@~0.1.0: dependencies: write-stream "~0.4.3" -ent@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" - integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= - errno@~0.1.1: version "0.1.8" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" @@ -2625,19 +2339,6 @@ exit@^0.1.2: resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= -expand-brackets@^2.1.4: - version "2.1.4" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" - integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= - dependencies: - debug "^2.3.3" - define-property "^0.2.5" - extend-shallow "^2.0.1" - posix-character-classes "^0.1.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - expect@^27.4.6: version "27.4.6" resolved "https://registry.yarnpkg.com/expect/-/expect-27.4.6.tgz#f335e128b0335b6ceb4fcab67ece7cbd14c942e6" @@ -2648,21 +2349,6 @@ expect@^27.4.6: jest-matcher-utils "^27.4.6" jest-message-util "^27.4.6" -extend-shallow@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" - integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= - dependencies: - is-extendable "^0.1.0" - -extend-shallow@^3.0.0, extend-shallow@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" - integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= - dependencies: - assign-symbols "^1.0.0" - is-extendable "^1.0.1" - extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" @@ -2677,20 +2363,6 @@ external-editor@^3.0.3: iconv-lite "^0.4.24" tmp "^0.0.33" -extglob@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" - integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== - dependencies: - array-unique "^0.3.2" - define-property "^1.0.0" - expand-brackets "^2.1.4" - extend-shallow "^2.0.1" - fragment-cache "^0.2.1" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" @@ -2779,16 +2451,6 @@ file-entry-cache@^5.0.1: dependencies: flat-cache "^2.0.1" -fill-range@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" - integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= - dependencies: - extend-shallow "^2.0.1" - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range "^2.1.0" - fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" @@ -2823,11 +2485,6 @@ flatted@^2.0.0: resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.2.tgz#4575b21e2bcee7434aa9be662f4b7b5f9c2b5138" integrity sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA== -for-in@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= - forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" @@ -2875,28 +2532,11 @@ formidable@^2.0.1: once "1.4.0" qs "6.9.3" -fragment-cache@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" - integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= - dependencies: - map-cache "^0.2.2" - fresh@~0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - -fs-exists-sync@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz#982d6893af918e72d08dec9e8673ff2b5a8d6add" - integrity sha1-mC1ok6+RjnLQjeyehnP/K1qNat0= - fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -2936,14 +2576,6 @@ get-intrinsic@^1.0.2: has "^1.0.3" has-symbols "^1.0.1" -get-object@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/get-object/-/get-object-0.2.0.tgz#d92ff7d5190c64530cda0543dac63a3d47fe8c0c" - integrity sha1-2S/31RkMZFMM2gVD2sY6PUf+jAw= - dependencies: - is-number "^2.0.2" - isobject "^0.2.0" - get-package-type@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" @@ -2968,18 +2600,6 @@ get-stream@^6.0.0: resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== -get-value@^2.0.3, get-value@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" - integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= - -get-value@^3.0.0, get-value@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-3.0.1.tgz#5efd2a157f1d6a516d7524e124ac52d0a39ef5a8" - integrity sha512-mKZj9JLQrwMBtj5wxi6MH8Z5eSKaERpAwjg43dPtlGI1ZVEgH/qC7T8/6R2OBSUA+zzHBZgICsVJaEIV2tKTDA== - dependencies: - isobject "^3.0.1" - getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" @@ -3048,32 +2668,6 @@ globby@^11.0.3: merge2 "^1.4.1" slash "^3.0.0" -google-auth-library@~0.10.0: - version "0.10.0" - resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-0.10.0.tgz#6e15babee85fd1dd14d8d128a295b6838d52136e" - integrity sha1-bhW6vuhf0d0U2NEoopW2g41SE24= - dependencies: - gtoken "^1.2.1" - jws "^3.1.4" - lodash.noop "^3.0.1" - request "^2.74.0" - -google-p12-pem@^0.1.0: - version "0.1.2" - resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-0.1.2.tgz#33c46ab021aa734fa0332b3960a9a3ffcb2f3177" - integrity sha1-M8RqsCGqc0+gMys5YKmj/8svMXc= - dependencies: - node-forge "^0.7.1" - -googleapis@^16.0.0: - version "16.1.0" - resolved "https://registry.yarnpkg.com/googleapis/-/googleapis-16.1.0.tgz#0f19f2d70572d918881a0f626e3b1a2fa8629576" - integrity sha1-Dxny1wVy2RiIGg9ibjsaL6hilXY= - dependencies: - async "~2.1.4" - google-auth-library "~0.10.0" - string-template "~1.0.0" - got@^11.8.1: version "11.8.3" resolved "https://registry.yarnpkg.com/got/-/got-11.8.3.tgz#f496c8fdda5d729a90b4905d2b07dbd148170770" @@ -3113,45 +2707,6 @@ graceful-fs@^4.1.2, graceful-fs@^4.2.4: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96" integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ== -gtoken@^1.2.1: - version "1.2.3" - resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-1.2.3.tgz#5509571b8afd4322e124cf66cf68115284c476d8" - integrity sha512-wQAJflfoqSgMWrSBk9Fg86q+sd6s7y6uJhIvvIPz++RElGlMtEqsdAR2oWwZ/WTEtp7P9xFbJRrT976oRgzJ/w== - dependencies: - google-p12-pem "^0.1.0" - jws "^3.0.0" - mime "^1.4.1" - request "^2.72.0" - -gulp-header@^1.7.1: - version "1.8.12" - resolved "https://registry.yarnpkg.com/gulp-header/-/gulp-header-1.8.12.tgz#ad306be0066599127281c4f8786660e705080a84" - integrity sha512-lh9HLdb53sC7XIZOYzTXM4lFuXElv3EVkSDhsd7DoJBj7hm+Ni7D3qYbb+Rr8DuM8nRanBvkVO9d7askreXGnQ== - dependencies: - concat-with-sourcemaps "*" - lodash.template "^4.4.0" - through2 "^2.0.0" - -handlebars-utils@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/handlebars-utils/-/handlebars-utils-1.0.6.tgz#cb9db43362479054782d86ffe10f47abc76357f9" - integrity sha512-d5mmoQXdeEqSKMtQQZ9WkiUcO1E3tPbWxluCK9hVgIDPzQa9WsKo3Lbe/sGflTe7TomHEeZaOgwIkyIr1kfzkw== - dependencies: - kind-of "^6.0.0" - typeof-article "^0.1.1" - -handlebars@^4.7.6, handlebars@^4.7.7: - version "4.7.7" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" - integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== - dependencies: - minimist "^1.2.5" - neo-async "^2.6.0" - source-map "^0.6.1" - wordwrap "^1.0.0" - optionalDependencies: - uglify-js "^3.1.4" - har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" @@ -3187,52 +2742,6 @@ has-tostringtag@^1.0.0: dependencies: has-symbols "^1.0.2" -has-value@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" - integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= - dependencies: - get-value "^2.0.3" - has-values "^0.1.4" - isobject "^2.0.0" - -has-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" - integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= - dependencies: - get-value "^2.0.6" - has-values "^1.0.0" - isobject "^3.0.0" - -has-value@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-2.0.2.tgz#d0f12e8780ba8e90e66ad1a21c707fdb67c25658" - integrity sha512-ybKOlcRsK2MqrM3Hmz/lQxXHZ6ejzSPzpNabKB45jb5qDgJvKPa3SdapTsTLwEb9WltgWpOmNax7i+DzNOk4TA== - dependencies: - get-value "^3.0.0" - has-values "^2.0.1" - -has-values@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" - integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= - -has-values@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" - integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= - dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" - -has-values@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-2.0.1.tgz#3876200ff86d8a8546a9264a952c17d5fc17579d" - integrity sha512-+QdH3jOmq9P8GfdjFg0eJudqx1FqU62NQJ4P16rOEHeRdl7ckgwn6uqQjzYE0ZoHVV/e5E2esuJ5Gl5+HUW19w== - dependencies: - kind-of "^6.0.2" - has-yarn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77" @@ -3245,16 +2754,6 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" -helper-md@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/helper-md/-/helper-md-0.2.2.tgz#c1f59d7e55bbae23362fd8a0e971607aec69d41f" - integrity sha1-wfWdflW7riM2L9ig6XFgeuxp1B8= - dependencies: - ent "^2.2.0" - extend-shallow "^2.0.1" - fs-exists-sync "^0.1.0" - remarkable "^1.6.2" - hexoid@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/hexoid/-/hexoid-1.0.0.tgz#ad10c6573fb907de23d9ec63a711267d9dc9bc18" @@ -3272,14 +2771,6 @@ html-escaper@^2.0.0: resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== -html-tag@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/html-tag/-/html-tag-2.0.0.tgz#36c3bc8d816fd30b570d5764a497a641640c2fed" - integrity sha512-XxzooSo6oBoxBEUazgjdXj7VwTn/iSTSZzTYKzYY6I916tkaYzypHxy+pbVU1h+0UQ9JlVf5XkNQyxOAiiQO1g== - dependencies: - is-self-closing "^1.0.1" - kind-of "^6.0.0" - http-assert@^1.3.0: version "1.5.0" resolved "https://registry.yarnpkg.com/http-assert/-/http-assert-1.5.0.tgz#c389ccd87ac16ed2dfa6246fd73b926aa00e6b8f" @@ -3473,37 +2964,6 @@ inquirer@^7.0.0: strip-ansi "^6.0.0" through "^2.3.6" -ioredis@^4.27.1: - version "4.28.3" - resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.3.tgz#b13fce8a6a7c525ba22e666d72980a3c0ba799aa" - integrity sha512-9JOWVgBnuSxpIgfpjc1OeY1OLmA4t2KOWWURTDRXky+eWO0LZhI33pQNT9gYxANUXfh5p/zYephYni6GPRsksQ== - dependencies: - cluster-key-slot "^1.1.0" - debug "^4.3.1" - denque "^1.1.0" - lodash.defaults "^4.2.0" - lodash.flatten "^4.4.0" - lodash.isarguments "^3.1.0" - p-map "^2.1.0" - redis-commands "1.7.0" - redis-errors "^1.2.0" - redis-parser "^3.0.0" - standard-as-callback "^2.1.0" - -is-accessor-descriptor@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" - integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= - dependencies: - kind-of "^3.0.2" - -is-accessor-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" - integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== - dependencies: - kind-of "^6.0.0" - is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" @@ -3511,11 +2971,6 @@ is-binary-path@~2.1.0: dependencies: binary-extensions "^2.0.0" -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - is-ci@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" @@ -3535,57 +2990,6 @@ is-core-module@^2.8.1: dependencies: has "^1.0.3" -is-data-descriptor@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" - integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= - dependencies: - kind-of "^3.0.2" - -is-data-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" - integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== - dependencies: - kind-of "^6.0.0" - -is-descriptor@^0.1.0: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" - integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== - dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" - -is-descriptor@^1.0.0, is-descriptor@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" - integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== - dependencies: - is-accessor-descriptor "^1.0.0" - is-data-descriptor "^1.0.0" - kind-of "^6.0.2" - -is-even@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-even/-/is-even-1.0.0.tgz#76b5055fbad8d294a86b6a949015e1c97b717c06" - integrity sha1-drUFX7rY0pSoa2qUkBXhyXtxfAY= - dependencies: - is-odd "^0.1.2" - -is-extendable@^0.1.0, is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= - -is-extendable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" - integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== - dependencies: - is-plain-object "^2.0.4" - is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" @@ -3633,20 +3037,6 @@ is-npm@^5.0.0: resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-5.0.0.tgz#43e8d65cc56e1b67f8d47262cf667099193f45a8" integrity sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA== -is-number@^2.0.2: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" - integrity sha1-Afy7s5NGOlSPL0ZszhbezknbkI8= - dependencies: - kind-of "^3.0.2" - -is-number@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" - integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= - dependencies: - kind-of "^3.0.2" - is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" @@ -3657,37 +3047,16 @@ is-obj@^2.0.0: resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982" integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== -is-odd@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/is-odd/-/is-odd-0.1.2.tgz#bc573b5ce371ef2aad6e6f49799b72bef13978a7" - integrity sha1-vFc7XONx7yqtbm9JeZtyvvE5eKc= - dependencies: - is-number "^3.0.0" - is-path-inside@^3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== -is-plain-object@^2.0.3, is-plain-object@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - is-potential-custom-element-name@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== -is-self-closing@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-self-closing/-/is-self-closing-1.0.1.tgz#5f406b527c7b12610176320338af0fa3896416e4" - integrity sha512-E+60FomW7Blv5GXTlYee2KDrnG6srxF7Xt1SjrhWUGUEsTFIqY/nq2y3DaftCsgUMdh89V07IVfhY9KIJhLezg== - dependencies: - self-closing-tags "^1.0.1" - is-stream@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" @@ -3707,11 +3076,6 @@ is-typedarray@^1.0.0, is-typedarray@~1.0.0: resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= -is-windows@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" - integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== - is-yarn-global@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.3.0.tgz#d502d3382590ea3004893746754c89139973e232" @@ -3722,7 +3086,7 @@ isarray@0.0.1: resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= -isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: +isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= @@ -3732,23 +3096,6 @@ isexe@^2.0.0: resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= -isobject@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-0.2.0.tgz#a3432192f39b910b5f02cc989487836ec70aa85e" - integrity sha1-o0MhkvObkQtfAsyYlIeDbscKqF4= - -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= - dependencies: - isarray "1.0.0" - -isobject@^3.0.0, isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= - isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" @@ -4334,7 +3681,7 @@ json5@2.x, json5@^2.1.2: dependencies: minimist "^1.2.5" -jsonwebtoken@^8.2.0, jsonwebtoken@^8.5.1: +jsonwebtoken@^8.2.0: version "8.5.1" resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== @@ -4380,7 +3727,7 @@ jwa@^1.4.1: ecdsa-sig-formatter "1.0.11" safe-buffer "^5.0.1" -jws@^3.0.0, jws@^3.1.4, jws@^3.2.2: +jws@^3.2.2: version "3.2.2" resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== @@ -4409,30 +3756,6 @@ keyv@^4.0.0: dependencies: json-buffer "3.0.1" -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.1.0, kind-of@^3.2.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= - dependencies: - is-buffer "^1.1.5" - -kind-of@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" - integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= - dependencies: - is-buffer "^1.1.5" - -kind-of@^5.0.0, kind-of@^5.0.2: - version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" - integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== - -kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - kleur@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" @@ -4674,31 +3997,11 @@ locate-path@^5.0.0: dependencies: p-locate "^4.1.0" -lodash._reinterpolate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d" - integrity sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0= - -lodash.defaults@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c" - integrity sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw= - -lodash.flatten@^4.4.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" - integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8= - lodash.includes@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" integrity sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8= -lodash.isarguments@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a" - integrity sha1-L1c9hcaiQon/AGY7SRwdM4/zRYo= - lodash.isboolean@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" @@ -4729,32 +4032,12 @@ lodash.memoize@4.x: resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= -lodash.noop@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/lodash.noop/-/lodash.noop-3.0.1.tgz#38188f4d650a3a474258439b96ec45b32617133c" - integrity sha1-OBiPTWUKOkdCWEObluxFsyYXEzw= - lodash.once@^4.0.0: version "4.1.1" resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= -lodash.template@^4.4.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-4.5.0.tgz#f976195cf3f347d0d5f52483569fe8031ccce8ab" - integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A== - dependencies: - lodash._reinterpolate "^3.0.0" - lodash.templatesettings "^4.0.0" - -lodash.templatesettings@^4.0.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz#e481310f049d3cf6d47e912ad09313b154f0fb33" - integrity sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ== - dependencies: - lodash._reinterpolate "^3.0.0" - -lodash@^4.14.0, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: +lodash@^4.17.14, lodash@^4.17.19, lodash@^4.7.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -4805,18 +4088,6 @@ makeerror@1.0.12: dependencies: tmpl "1.0.5" -map-cache@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" - integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= - -map-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" - integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= - dependencies: - object-visit "^1.0.0" - media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" @@ -4849,25 +4120,6 @@ methods@^1.1.2: resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= -micromatch@^3.1.5: - version "3.1.10" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" - integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - braces "^2.3.1" - define-property "^2.0.2" - extend-shallow "^3.0.2" - extglob "^2.0.4" - fragment-cache "^0.2.1" - kind-of "^6.0.2" - nanomatch "^1.2.9" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.2" - micromatch@^4.0.4: version "4.0.4" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" @@ -4888,11 +4140,6 @@ mime-types@^2.1.12, mime-types@^2.1.18, mime-types@~2.1.19, mime-types@~2.1.24: dependencies: mime-db "1.51.0" -mime@^1.4.1: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - mime@^2.5.0: version "2.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367" @@ -4925,19 +4172,6 @@ minimist@^1.2.0, minimist@^1.2.5: resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== -mixin-deep@^1.2.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" - integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== - dependencies: - for-in "^1.0.2" - is-extendable "^1.0.1" - -mkdirp-classic@^0.5.2: - version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" - integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== - mkdirp@^0.5.0, mkdirp@^0.5.1: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" @@ -4955,11 +4189,6 @@ mri@1.1.4: resolved "https://registry.yarnpkg.com/mri/-/mri-1.1.4.tgz#7cb1dd1b9b40905f1fac053abe25b6720f44744a" integrity sha512-6y7IjGPm8AzlvoUrwAaw1tLnUBudaS3752vcd8JtrpGGQn+rXIe63LFVHm/YMwtqAuh+LJPCFdlLYPWM1nYn6w== -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= - ms@2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" @@ -4975,23 +4204,6 @@ mute-stream@0.0.8: resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== -nanomatch@^1.2.9: - version "1.2.13" - resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" - integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - define-property "^2.0.2" - extend-shallow "^3.0.2" - fragment-cache "^0.2.1" - is-windows "^1.0.2" - kind-of "^6.0.2" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - napi-macros@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" @@ -5007,11 +4219,6 @@ negotiator@0.6.2: resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== -neo-async@^2.6.0: - version "2.6.2" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" - integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== - nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" @@ -5029,11 +4236,6 @@ node-fetch@^2.6.1: dependencies: whatwg-url "^5.0.0" -node-forge@^0.7.1: - version "0.7.6" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac" - integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw== - node-gyp-build@~4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" @@ -5127,34 +4329,11 @@ object-assign@^2.0.0: resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-2.1.1.tgz#43c36e5d569ff8e4816c4efa8be02d26967c18aa" integrity sha1-Q8NuXVaf+OSBbE76i+AtJpZ8GKo= -object-copy@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" - integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= - dependencies: - copy-descriptor "^0.1.0" - define-property "^0.2.5" - kind-of "^3.0.3" - object-inspect@^1.9.0: version "1.12.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.0.tgz#6e2c120e868fd1fd18cb4f18c31741d0d6e776f0" integrity sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g== -object-visit@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" - integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= - dependencies: - isobject "^3.0.0" - -object.pick@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" - integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= - dependencies: - isobject "^3.0.1" - on-finished@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" @@ -5222,11 +4401,6 @@ p-locate@^4.1.0: dependencies: p-limit "^2.2.0" -p-map@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" - integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== - p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" @@ -5259,19 +4433,6 @@ parseurl@^1.3.2: resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== -pascalcase@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" - integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= - -passport-google-auth@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/passport-google-auth/-/passport-google-auth-1.0.2.tgz#8b300b5aa442ef433de1d832ed3112877d0b2938" - integrity sha1-izALWqRC70M94dgy7TESh30LKTg= - dependencies: - googleapis "^16.0.0" - passport-strategy "1.x" - passport-google-oauth1@1.x.x: version "1.0.0" resolved "https://registry.yarnpkg.com/passport-google-oauth1/-/passport-google-oauth1-1.0.0.tgz#af74a803df51ec646f66a44d82282be6f108e0cc" @@ -5329,7 +4490,7 @@ passport-oauth2@1.x.x: uid2 "0.0.x" utils-merge "1.x.x" -passport-strategy@1.x, passport-strategy@1.x.x, passport-strategy@^1.0.0: +passport-strategy@1.x.x, passport-strategy@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4" integrity sha1-tVOaqPwiWj0a0XlHbd8ja0QPUuQ= @@ -5461,11 +4622,6 @@ pkg-dir@^4.2.0: dependencies: find-up "^4.0.0" -posix-character-classes@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" - integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= - pouchdb-adapter-leveldb-core@7.2.2: version "7.2.2" resolved "https://registry.yarnpkg.com/pouchdb-adapter-leveldb-core/-/pouchdb-adapter-leveldb-core-7.2.2.tgz#e0aa6a476e2607d7ae89f4a803c9fba6e6d05a8a" @@ -5788,7 +4944,7 @@ readable-stream@1.1.14: isarray "0.0.1" string_decoder "~0.10.x" -"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: +"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.4.0, readable-stream@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -5842,31 +4998,6 @@ recast@^0.11.17: private "~0.1.5" source-map "~0.5.0" -redis-commands@1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/redis-commands/-/redis-commands-1.7.0.tgz#15a6fea2d58281e27b1cd1acfb4b293e278c3a89" - integrity sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ== - -redis-errors@^1.0.0, redis-errors@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/redis-errors/-/redis-errors-1.2.0.tgz#eb62d2adb15e4eaf4610c04afe1529384250abad" - integrity sha1-62LSrbFeTq9GEMBK/hUpOEJQq60= - -redis-parser@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/redis-parser/-/redis-parser-3.0.0.tgz#b66d828cdcafe6b4b8a428a7def4c6bcac31c8b4" - integrity sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ= - dependencies: - redis-errors "^1.0.0" - -regex-not@^1.0.0, regex-not@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" - integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== - dependencies: - extend-shallow "^3.0.2" - safe-regex "^1.1.0" - regexpp@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f" @@ -5886,32 +5017,7 @@ registry-url@^5.0.0: dependencies: rc "^1.2.8" -relative@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/relative/-/relative-3.0.2.tgz#0dcd8ec54a5d35a3c15e104503d65375b5a5367f" - integrity sha1-Dc2OxUpdNaPBXhBFA9ZTdbWlNn8= - dependencies: - isobject "^2.0.0" - -remarkable@^1.6.2: - version "1.7.4" - resolved "https://registry.yarnpkg.com/remarkable/-/remarkable-1.7.4.tgz#19073cb960398c87a7d6546eaa5e50d2022fcd00" - integrity sha512-e6NKUXgX95whv7IgddywbeN/ItCkWbISmc2DiqHJb0wTrqZIexqdco5b8Z3XZoo/48IdNVKM9ZCvTPJ4F5uvhg== - dependencies: - argparse "^1.0.10" - autolinker "~0.28.0" - -repeat-element@^1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" - integrity sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ== - -repeat-string@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= - -request@^2.72.0, request@^2.74.0, request@^2.88.0: +request@^2.88.0: version "2.88.2" resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== @@ -5972,11 +5078,6 @@ resolve-path@^1.4.0: http-errors "~1.6.2" path-is-absolute "1.0.1" -resolve-url@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" - integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= - resolve.exports@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" @@ -6013,11 +5114,6 @@ restore-cursor@^3.1.0: onetime "^5.1.0" signal-exit "^3.0.2" -ret@~0.1.10: - version "0.1.15" - resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" - integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== - reusify@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" @@ -6071,23 +5167,11 @@ safe-buffer@~5.1.0, safe-buffer@~5.1.1: resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" - integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= - dependencies: - ret "~0.1.10" - "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== -sanitize-s3-objectkey@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/sanitize-s3-objectkey/-/sanitize-s3-objectkey-0.0.1.tgz#efa9887cd45275b40234fb4bb12fc5754fe64e7e" - integrity sha512-ZTk7aqLxy4sD40GWcYWoLfbe05XLmkKvh6vGKe13ADlei24xlezcvjgKy1qRArlaIbIMYaqK7PCalvZtulZlaQ== - sax@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" @@ -6105,11 +5189,6 @@ saxes@^5.0.1: dependencies: xmlchars "^2.2.0" -self-closing-tags@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/self-closing-tags/-/self-closing-tags-1.0.1.tgz#6c5fa497994bb826b484216916371accee490a5d" - integrity sha512-7t6hNbYMxM+VHXTgJmxwgZgLGktuXtVVD5AivWzNTdJBM4DBjnDKDzkf2SrNjihaArpeJYNjxkELBu1evI4lQA== - semver-diff@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b" @@ -6124,7 +5203,7 @@ semver@7.x, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5: dependencies: lru-cache "^6.0.0" -semver@^5.4.1, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1: +semver@^5.5.0, semver@^5.6.0, semver@^5.7.1: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== @@ -6139,16 +5218,6 @@ server-destroy@^1.0.1: resolved "https://registry.yarnpkg.com/server-destroy/-/server-destroy-1.0.1.tgz#f13bf928e42b9c3e79383e61cc3998b5d14e6cdd" integrity sha1-8Tv5KOQrnD55OD5hzDmYtdFObN0= -set-value@^2.0.0, set-value@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" - integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== - dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.3" - split-string "^3.0.1" - setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" @@ -6183,11 +5252,6 @@ shebang-regex@^3.0.0: resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== -shimmer@^1.2.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/shimmer/-/shimmer-1.2.1.tgz#610859f7de327b587efebf501fb43117f9aff337" - integrity sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw== - side-channel@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" @@ -6221,36 +5285,6 @@ slice-ansi@^2.1.0: astral-regex "^1.0.0" is-fullwidth-code-point "^2.0.0" -snapdragon-node@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" - integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== - dependencies: - define-property "^1.0.0" - isobject "^3.0.0" - snapdragon-util "^3.0.1" - -snapdragon-util@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" - integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== - dependencies: - kind-of "^3.2.0" - -snapdragon@^0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" - integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== - dependencies: - base "^0.11.1" - debug "^2.2.0" - define-property "^0.2.5" - extend-shallow "^2.0.1" - map-cache "^0.2.2" - source-map "^0.5.6" - source-map-resolve "^0.5.0" - use "^3.1.0" - sonic-boom@^1.0.2: version "1.4.1" resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-1.4.1.tgz#d35d6a74076624f12e6f917ade7b9d75e918f53e" @@ -6259,17 +5293,6 @@ sonic-boom@^1.0.2: atomic-sleep "^1.0.0" flatstr "^1.0.12" -source-map-resolve@^0.5.0: - version "0.5.3" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" - integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - resolve-url "^0.2.1" - source-map-url "^0.4.0" - urix "^0.1.0" - source-map-support@^0.5.6: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" @@ -6278,11 +5301,6 @@ source-map-support@^0.5.6: buffer-from "^1.0.0" source-map "^0.6.0" -source-map-url@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" - integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== - source-map@^0.4.2: version "0.4.4" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" @@ -6290,7 +5308,7 @@ source-map@^0.4.2: dependencies: amdefine ">=0.0.4" -source-map@^0.5.0, source-map@^0.5.6, source-map@~0.5.0: +source-map@^0.5.0, source-map@~0.5.0: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= @@ -6310,13 +5328,6 @@ spark-md5@3.0.1: resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.1.tgz#83a0e255734f2ab4e5c466e5a2cfc9ba2aa2124d" integrity sha512-0tF3AGSD1ppQeuffsLDIOWlKUd3lS92tFxcsrh5Pe3ZphhnoK+oXIBTzOAThZCiuINZLvpiLH/1VS1/ANEJVig== -split-string@^3.0.1, split-string@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" - integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== - dependencies: - extend-shallow "^3.0.0" - split2@^3.1.1: version "3.2.2" resolved "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f" @@ -6344,11 +5355,6 @@ sshpk@^1.7.0: safer-buffer "^2.0.2" tweetnacl "~0.14.0" -stack-chain@^1.3.7: - version "1.3.7" - resolved "https://registry.yarnpkg.com/stack-chain/-/stack-chain-1.3.7.tgz#d192c9ff4ea6a22c94c4dd459171e3f00cea1285" - integrity sha1-0ZLJ/06moiyUxN1FkXHj8AzqEoU= - stack-utils@^2.0.3: version "2.0.5" resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" @@ -6356,19 +5362,6 @@ stack-utils@^2.0.3: dependencies: escape-string-regexp "^2.0.0" -standard-as-callback@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/standard-as-callback/-/standard-as-callback-2.1.0.tgz#8953fc05359868a77b5b9739a665c5977bb7df45" - integrity sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A== - -static-extend@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" - integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= - dependencies: - define-property "^0.2.5" - object-copy "^0.1.0" - "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@^1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" @@ -6392,11 +5385,6 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -string-template@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/string-template/-/string-template-1.0.0.tgz#9e9f2233dc00f218718ec379a28a5673ecca8b96" - integrity sha1-np8iM9wA8hhxjsN5oopWc+zKi5Y= - string-width@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -6468,11 +5456,6 @@ strip-json-comments@~2.0.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= -striptags@^3.1.1: - version "3.2.0" - resolved "https://registry.yarnpkg.com/striptags/-/striptags-3.2.0.tgz#cc74a137db2de8b0b9a370006334161f7dd67052" - integrity sha512-g45ZOGzHDMe2bdYMdIvdAfCQkCTDMGBazSw1ypMowwGIee7ZQ5dU0rBJ8Jqgl+jAKIv4dbeE1jscZq9wid1Tkw== - sublevel-pouchdb@7.2.2: version "7.2.2" resolved "https://registry.yarnpkg.com/sublevel-pouchdb/-/sublevel-pouchdb-7.2.2.tgz#49e46cd37883bf7ff5006d7c5b9bcc7bcc1f422f" @@ -6557,27 +5540,6 @@ table@^5.2.3: slice-ansi "^2.1.0" string-width "^3.0.0" -tar-fs@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.1.4" - -tar-stream@^2.1.4: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" - integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - terminal-link@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" @@ -6613,7 +5575,7 @@ through2@3.0.2: inherits "^2.0.4" readable-stream "2 || 3" -through2@^2.0.0, through2@^2.0.1: +through2@^2.0.1: version "2.0.5" resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== @@ -6648,31 +5610,11 @@ to-fast-properties@^2.0.0: resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= -to-gfm-code-block@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/to-gfm-code-block/-/to-gfm-code-block-0.1.1.tgz#25d045a5fae553189e9637b590900da732d8aa82" - integrity sha1-JdBFpfrlUxielje1kJANpzLYqoI= - -to-object-path@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" - integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= - dependencies: - kind-of "^3.0.2" - to-readable-stream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== -to-regex-range@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" - integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= - dependencies: - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" @@ -6680,16 +5622,6 @@ to-regex-range@^5.0.1: dependencies: is-number "^7.0.0" -to-regex@^3.0.1, to-regex@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" - integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== - dependencies: - define-property "^2.0.2" - extend-shallow "^3.0.2" - regex-not "^1.0.2" - safe-regex "^1.1.0" - toidentifier@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" @@ -6834,23 +5766,11 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typeof-article@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/typeof-article/-/typeof-article-0.1.1.tgz#9f07e733c3fbb646ffa9e61c08debacd460e06af" - integrity sha1-nwfnM8P7tkb/qeYcCN66zUYOBq8= - dependencies: - kind-of "^3.1.0" - typescript@4.3.5: version "4.3.5" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.3.5.tgz#4d1c37cc16e893973c45a06886b7113234f119f4" integrity sha512-DqQgihaQ9cUrskJo9kIyW/+g0Vxsk8cDtZ52a3NGh0YNTfpUSArXSohyUGnvbPazEPLu398C0UxmKSOrPumUzA== -uglify-js@^3.1.4: - version "3.15.0" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.15.0.tgz#2d6a689d94783cab43975721977a13c2afec28f1" - integrity sha512-x+xdeDWq7FiORDvyIJ0q/waWd4PhjBNOm5dQUOq2AKC0IEjxOS66Ha9tctiVDGcRQuh69K7fgU5oRuTK4cysSg== - uid2@0.0.x: version "0.0.4" resolved "https://registry.yarnpkg.com/uid2/-/uid2-0.0.4.tgz#033f3b1d5d32505f5ce5f888b9f3b667123c0a44" @@ -6861,16 +5781,6 @@ undefsafe@^2.0.5: resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-2.0.5.tgz#38733b9327bdcd226db889fb723a6efd162e6e2c" integrity sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA== -union-value@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" - integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== - dependencies: - arr-union "^3.1.0" - get-value "^2.0.6" - is-extendable "^0.1.1" - set-value "^2.0.1" - unique-string@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" @@ -6888,14 +5798,6 @@ unpipe@1.0.0: resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= -unset-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" - integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= - dependencies: - has-value "^0.3.1" - isobject "^3.0.0" - untildify@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" @@ -6938,11 +5840,6 @@ urijs@^1.19.2: resolved "https://registry.yarnpkg.com/urijs/-/urijs-1.19.7.tgz#4f594e59113928fea63c00ce688fb395b1168ab9" integrity sha512-Id+IKjdU0Hx+7Zx717jwLPsPeUqz7rAtuVBRLLs+qn+J2nf9NGITWVCxcijgYxBqe83C7sqsQPs6H1pyz3x9gA== -urix@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" - integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= - url-parse-lax@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" @@ -6958,11 +5855,6 @@ url@0.10.3: punycode "1.3.2" querystring "0.2.0" -use@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" - integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== - util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -6988,11 +5880,6 @@ uuid@^3.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuid@^8.3.2: - version "8.3.2" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" - integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== - v8-compile-cache@^2.0.3: version "2.3.0" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" @@ -7021,11 +5908,6 @@ verror@1.10.0: core-util-is "1.0.2" extsprintf "^1.2.0" -vm2@^3.9.4: - version "3.9.5" - resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.5.tgz#5288044860b4bbace443101fcd3bddb2a0aa2496" - integrity sha512-LuCAHZN75H9tdrAiLFf030oW7nJV5xwNMuk1ymOZwopmuK3d2H4L1Kv4+GFHgarKiLfXXLFU+7LDABHnwOkWng== - vuvuzela@1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/vuvuzela/-/vuvuzela-1.0.3.tgz#3be145e58271c73ca55279dd851f12a682114b0b" @@ -7130,11 +6012,6 @@ word-wrap@~1.2.3: resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== -wordwrap@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= - wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" @@ -7246,11 +6123,6 @@ yargs@^16.1.0, yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" -year@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/year/-/year-0.2.1.tgz#4083ae520a318b23ec86037f3000cb892bdf9bb0" - integrity sha1-QIOuUgoxiyPshgN/MADLiSvfm7A= - ylru@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/ylru/-/ylru-1.2.1.tgz#f576b63341547989c1de7ba288760923b27fe84f" @@ -7260,8 +6132,3 @@ yn@3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== - -zlib@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/zlib/-/zlib-1.0.5.tgz#6e7c972fc371c645a6afb03ab14769def114fcc0" - integrity sha1-bnyXL8NxxkWmr7A6sUdp3vEU/MA=