From d2fe119d902daf5079f8d90f8e368325ac432d08 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 27 Jan 2022 18:18:31 +0000 Subject: [PATCH 1/9] Main body of work, refactoring most usages. --- packages/backend-core/context.js | 15 +++ packages/backend-core/src/db/utils.js | 16 +-- .../backend-core/src/middleware/appTenancy.js | 2 + packages/backend-core/src/security/roles.js | 38 +++--- .../src/tenancy/FunctionContext.js | 20 ++-- packages/backend-core/src/tenancy/context.js | 113 ++++++++++++++++-- .../server/src/api/controllers/application.js | 55 +++++---- packages/server/src/api/controllers/cloud.js | 5 +- .../server/src/api/controllers/component.js | 7 +- .../server/src/api/controllers/datasource.js | 20 ++-- packages/server/src/api/controllers/layout.js | 6 +- .../server/src/api/controllers/permission.js | 8 +- packages/server/src/api/controllers/role.js | 10 +- .../server/src/api/controllers/routing.js | 2 +- .../api/controllers/row/ExternalRequest.ts | 82 +++++++------ .../src/api/controllers/row/external.js | 6 +- .../src/api/controllers/row/internal.js | 51 +++----- .../src/api/controllers/row/internalSearch.js | 24 ++-- .../server/src/api/controllers/row/utils.js | 13 +- packages/server/src/api/controllers/screen.js | 12 +- .../src/api/controllers/static/index.js | 8 +- .../src/api/controllers/table/external.js | 17 ++- .../server/src/api/controllers/table/index.js | 8 +- .../src/api/controllers/table/internal.js | 8 +- .../server/src/api/controllers/table/utils.js | 59 ++++----- packages/server/src/api/controllers/user.js | 31 +++-- .../server/src/api/controllers/view/index.js | 18 ++- .../server/src/api/controllers/view/utils.js | 13 +- .../server/src/api/routes/tests/misc.spec.js | 1 - .../routes/tests/utilities/TestFunctions.js | 6 +- .../server/src/automations/automationUtils.js | 5 +- .../server/src/automations/steps/createRow.js | 1 - .../server/src/automations/steps/updateRow.js | 2 +- .../src/db/linkedRows/LinkController.js | 10 +- packages/server/src/db/linkedRows/index.js | 41 ++----- .../server/src/db/linkedRows/linkUtils.js | 12 +- .../src/db/tests/linkController.spec.js | 1 - .../server/src/db/tests/linkTests.spec.js | 11 +- packages/server/src/db/views/staticViews.js | 18 ++- packages/server/src/integrations/utils.ts | 5 +- packages/server/src/middleware/authorized.js | 4 +- packages/server/src/middleware/currentapp.js | 3 +- .../src/migrations/usageQuotas/syncApps.js | 3 +- .../src/migrations/usageQuotas/syncRows.js | 3 +- .../src/tests/utilities/TestConfiguration.js | 3 + .../server/src/utilities/fileSystem/index.js | 4 +- packages/server/src/utilities/global.js | 23 ++-- .../src/utilities/rowProcessor/index.js | 17 +-- packages/server/src/utilities/users.js | 6 +- .../server/src/utilities/workerRequests.js | 2 +- .../src/api/controllers/global/configs.js | 3 +- .../src/api/controllers/global/roles.js | 2 +- 52 files changed, 453 insertions(+), 400 deletions(-) create mode 100644 packages/backend-core/context.js diff --git a/packages/backend-core/context.js b/packages/backend-core/context.js new file mode 100644 index 0000000000..b3d004b209 --- /dev/null +++ b/packages/backend-core/context.js @@ -0,0 +1,15 @@ +const { + getAppDB, + getDevAppDB, + getProdAppDB, + getAppId, + updateAppId, +} = require("./src/tenancy/context") + +module.exports = { + getAppDB, + getDevAppDB, + getProdAppDB, + getAppId, + updateAppId, +} diff --git a/packages/backend-core/src/db/utils.js b/packages/backend-core/src/db/utils.js index 2bc5462646..181467b402 100644 --- a/packages/backend-core/src/db/utils.js +++ b/packages/backend-core/src/db/utils.js @@ -250,11 +250,10 @@ exports.getAllDbs = async () => { /** * Lots of different points in the system need to find the full list of apps, this will * enumerate the entire CouchDB cluster and get the list of databases (every app). - * NOTE: this operation is fine in self hosting, but cannot be used when hosting many - * different users/companies apps as there is no security around it - all apps are returned. * @return {Promise} returns the app information document stored in each app database. */ -exports.getAllApps = async (CouchDB, { dev, all, idsOnly } = {}) => { +exports.getAllApps = async ({ dev, all, idsOnly } = {}) => { + const CouchDB = getCouch() let tenantId = getTenantId() if (!env.MULTI_TENANCY && !tenantId) { tenantId = DEFAULT_TENANT_ID @@ -310,8 +309,8 @@ exports.getAllApps = async (CouchDB, { dev, all, idsOnly } = {}) => { /** * Utility function for getAllApps but filters to production apps only. */ -exports.getDeployedAppIDs = async CouchDB => { - return (await exports.getAllApps(CouchDB, { idsOnly: true })).filter( +exports.getDeployedAppIDs = async () => { + return (await exports.getAllApps({ idsOnly: true })).filter( id => !exports.isDevAppID(id) ) } @@ -319,13 +318,14 @@ exports.getDeployedAppIDs = async CouchDB => { /** * Utility function for the inverse of above. */ -exports.getDevAppIDs = async CouchDB => { - return (await exports.getAllApps(CouchDB, { idsOnly: true })).filter(id => +exports.getDevAppIDs = async () => { + return (await exports.getAllApps({ idsOnly: true })).filter(id => exports.isDevAppID(id) ) } -exports.dbExists = async (CouchDB, dbName) => { +exports.dbExists = async dbName => { + const CouchDB = getCouch() let exists = false try { const db = CouchDB(dbName, { skip_setup: true }) diff --git a/packages/backend-core/src/middleware/appTenancy.js b/packages/backend-core/src/middleware/appTenancy.js index 30fc4f7453..60d7448af2 100644 --- a/packages/backend-core/src/middleware/appTenancy.js +++ b/packages/backend-core/src/middleware/appTenancy.js @@ -3,6 +3,7 @@ const { updateTenantId, isTenantIdSet, DEFAULT_TENANT_ID, + updateAppId, } = require("../tenancy") const ContextFactory = require("../tenancy/FunctionContext") const { getTenantIDFromAppID } = require("../db/utils") @@ -21,5 +22,6 @@ module.exports = () => { const appId = ctx.appId ? ctx.appId : ctx.user ? ctx.user.appId : null const tenantId = getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID updateTenantId(tenantId) + updateAppId(appId) }) } diff --git a/packages/backend-core/src/security/roles.js b/packages/backend-core/src/security/roles.js index 8529dde6f4..2be5058cbb 100644 --- a/packages/backend-core/src/security/roles.js +++ b/packages/backend-core/src/security/roles.js @@ -1,4 +1,3 @@ -const { getDB } = require("../db") const { cloneDeep } = require("lodash/fp") const { BUILTIN_PERMISSION_IDS } = require("./permissions") const { @@ -7,6 +6,7 @@ const { DocumentTypes, SEPARATOR, } = require("../db/utils") +const { getAppDB } = require("../tenancy/context") const BUILTIN_IDS = { ADMIN: "ADMIN", @@ -111,11 +111,10 @@ exports.lowerBuiltinRoleID = (roleId1, roleId2) => { /** * Gets the role object, this is mainly useful for two purposes, to check if the level exists and * to check if the role inherits any others. - * @param {string} appId The app in which to look for the role. * @param {string|null} roleId The level ID to lookup. * @returns {Promise} The role object, which may contain an "inherits" property. */ -exports.getRole = async (appId, roleId) => { +exports.getRole = async roleId => { if (!roleId) { return null } @@ -128,7 +127,7 @@ exports.getRole = async (appId, roleId) => { ) } try { - const db = getDB(appId) + const db = getAppDB() const dbRole = await db.get(exports.getDBRoleID(roleId)) role = Object.assign(role, dbRole) // finalise the ID @@ -145,11 +144,11 @@ exports.getRole = async (appId, roleId) => { /** * Simple function to get all the roles based on the top level user role ID. */ -async function getAllUserRoles(appId, userRoleId) { +async function getAllUserRoles(userRoleId) { if (!userRoleId) { return [BUILTIN_IDS.BASIC] } - let currentRole = await exports.getRole(appId, userRoleId) + let currentRole = await exports.getRole(userRoleId) let roles = currentRole ? [currentRole] : [] let roleIds = [userRoleId] // get all the inherited roles @@ -159,7 +158,7 @@ async function getAllUserRoles(appId, userRoleId) { roleIds.indexOf(currentRole.inherits) === -1 ) { roleIds.push(currentRole.inherits) - currentRole = await exports.getRole(appId, currentRole.inherits) + currentRole = await exports.getRole(currentRole.inherits) roles.push(currentRole) } return roles @@ -168,29 +167,23 @@ async function getAllUserRoles(appId, userRoleId) { /** * Returns an ordered array of the user's inherited role IDs, this can be used * to determine if a user can access something that requires a specific role. - * @param {string} appId The ID of the application from which roles should be obtained. * @param {string} userRoleId The user's role ID, this can be found in their access token. * @param {object} opts Various options, such as whether to only retrieve the IDs (default true). * @returns {Promise} returns an ordered array of the roles, with the first being their * highest level of access and the last being the lowest level. */ -exports.getUserRoleHierarchy = async ( - appId, - userRoleId, - opts = { idOnly: true } -) => { +exports.getUserRoleHierarchy = async (userRoleId, opts = { idOnly: true }) => { // special case, if they don't have a role then they are a public user - const roles = await getAllUserRoles(appId, userRoleId) + const roles = await getAllUserRoles(userRoleId) return opts.idOnly ? roles.map(role => role._id) : roles } /** * Given an app ID this will retrieve all of the roles that are currently within that app. - * @param {string} appId The ID of the app to retrieve the roles from. * @return {Promise} An array of the role objects that were found. */ -exports.getAllRoles = async appId => { - const db = getDB(appId) +exports.getAllRoles = async () => { + const db = getAppDB() const body = await db.allDocs( getRoleParams(null, { include_docs: true, @@ -218,19 +211,17 @@ exports.getAllRoles = async appId => { } /** - * This retrieves the required role/ - * @param appId + * This retrieves the required role * @param permLevel * @param resourceId * @param subResourceId * @return {Promise<{permissions}|Object>} */ exports.getRequiredResourceRole = async ( - appId, permLevel, { resourceId, subResourceId } ) => { - const roles = await exports.getAllRoles(appId) + const roles = await exports.getAllRoles() let main = [], sub = [] for (let role of roles) { @@ -251,8 +242,7 @@ exports.getRequiredResourceRole = async ( } class AccessController { - constructor(appId) { - this.appId = appId + constructor() { this.userHierarchies = {} } @@ -270,7 +260,7 @@ class AccessController { } let roleIds = this.userHierarchies[userRoleId] if (!roleIds) { - roleIds = await exports.getUserRoleHierarchy(this.appId, userRoleId) + roleIds = await exports.getUserRoleHierarchy(userRoleId) this.userHierarchies[userRoleId] = roleIds } diff --git a/packages/backend-core/src/tenancy/FunctionContext.js b/packages/backend-core/src/tenancy/FunctionContext.js index d97a3a30b4..1a3f65056e 100644 --- a/packages/backend-core/src/tenancy/FunctionContext.js +++ b/packages/backend-core/src/tenancy/FunctionContext.js @@ -4,8 +4,8 @@ const { newid } = require("../hashing") const REQUEST_ID_KEY = "requestId" class FunctionContext { - static getMiddleware(updateCtxFn = null) { - const namespace = this.createNamespace() + static getMiddleware(updateCtxFn = null, contextName = "session") { + const namespace = this.createNamespace(contextName) return async function (ctx, next) { await new Promise( @@ -24,14 +24,14 @@ class FunctionContext { } } - static run(callback) { - const namespace = this.createNamespace() + static run(callback, contextName = "session") { + const namespace = this.createNamespace(contextName) return namespace.runAndReturn(callback) } - static setOnContext(key, value) { - const namespace = this.createNamespace() + static setOnContext(key, value, contextName = "session") { + const namespace = this.createNamespace(contextName) namespace.set(key, value) } @@ -55,16 +55,16 @@ class FunctionContext { } } - static destroyNamespace() { + static destroyNamespace(name = "session") { if (this._namespace) { - cls.destroyNamespace("session") + cls.destroyNamespace(name) this._namespace = null } } - static createNamespace() { + static createNamespace(name = "session") { if (!this._namespace) { - this._namespace = cls.createNamespace("session") + this._namespace = cls.createNamespace(name) } return this._namespace } diff --git a/packages/backend-core/src/tenancy/context.js b/packages/backend-core/src/tenancy/context.js index 01d1fdc604..ac2cfbeae9 100644 --- a/packages/backend-core/src/tenancy/context.js +++ b/packages/backend-core/src/tenancy/context.js @@ -1,6 +1,25 @@ const env = require("../environment") const { Headers } = require("../../constants") const cls = require("./FunctionContext") +const { getCouch } = require("../db") +const { getDeployedAppID, getDevelopmentAppID } = require("../db/utils") +const { isEqual } = require("lodash") + +// some test cases call functions directly, need to +// store an app ID to pretend there is a context +let TEST_APP_ID = null + +const ContextKeys = { + TENANT_ID: "tenantId", + APP_ID: "appId", + // whatever the request app DB was + CURRENT_DB: "currentDb", + // get the prod app DB from the request + PROD_DB: "prodDb", + // get the dev app DB from the request + DEV_DB: "devDb", + DB_OPTS: "dbOpts", +} exports.DEFAULT_TENANT_ID = "default" @@ -12,13 +31,11 @@ exports.isMultiTenant = () => { return env.MULTI_TENANCY } -const TENANT_ID = "tenantId" - // used for automations, API endpoints should always be in context already exports.doInTenant = (tenantId, task) => { return cls.run(() => { // set the tenant id - cls.setOnContext(TENANT_ID, tenantId) + cls.setOnContext(ContextKeys.TENANT_ID, tenantId) // invoke the task return task() @@ -26,7 +43,19 @@ exports.doInTenant = (tenantId, task) => { } exports.updateTenantId = tenantId => { - cls.setOnContext(TENANT_ID, tenantId) + cls.setOnContext(ContextKeys.TENANT_ID, tenantId) +} + +exports.updateAppId = appId => { + try { + cls.setOnContext(ContextKeys.APP_ID, appId) + } catch (err) { + if (env.isTest()) { + TEST_APP_ID = appId + } else { + throw err + } + } } exports.setTenantId = ( @@ -36,7 +65,7 @@ exports.setTenantId = ( let tenantId // exit early if not multi-tenant if (!exports.isMultiTenant()) { - cls.setOnContext(TENANT_ID, this.DEFAULT_TENANT_ID) + cls.setOnContext(ContextKeys.TENANT_ID, this.DEFAULT_TENANT_ID) return } @@ -63,12 +92,12 @@ exports.setTenantId = ( } // check tenant ID just incase no tenant was allowed if (tenantId) { - cls.setOnContext(TENANT_ID, tenantId) + cls.setOnContext(ContextKeys.TENANT_ID, tenantId) } } exports.isTenantIdSet = () => { - const tenantId = cls.getFromContext(TENANT_ID) + const tenantId = cls.getFromContext(ContextKeys.TENANT_ID) return !!tenantId } @@ -76,9 +105,77 @@ exports.getTenantId = () => { if (!exports.isMultiTenant()) { return exports.DEFAULT_TENANT_ID } - const tenantId = cls.getFromContext(TENANT_ID) + const tenantId = cls.getFromContext(ContextKeys.TENANT_ID) if (!tenantId) { throw Error("Tenant id not found") } return tenantId } + +exports.getAppId = () => { + const foundId = cls.getFromContext(ContextKeys.APP_ID) + if (!foundId && env.isTest() && TEST_APP_ID) { + return TEST_APP_ID + } else { + return foundId + } +} + +function getDB(key, opts) { + const dbOptsKey = `${key}${ContextKeys.DB_OPTS}` + let storedOpts = cls.getFromContext(dbOptsKey) + let db = cls.getFromContext(key) + if (db && isEqual(opts, storedOpts)) { + return db + } + const appId = exports.getAppId() + const CouchDB = getCouch() + let toUseAppId + switch (key) { + case ContextKeys.CURRENT_DB: + toUseAppId = appId + break + case ContextKeys.PROD_DB: + toUseAppId = getDeployedAppID(appId) + break + case ContextKeys.DEV_DB: + toUseAppId = getDevelopmentAppID(appId) + break + } + db = new CouchDB(toUseAppId, opts) + try { + cls.setOnContext(key, db) + if (opts) { + cls.setOnContext(dbOptsKey, opts) + } + } catch (err) { + if (!env.isTest()) { + throw err + } + } + return db +} + +/** + * Opens the app database based on whatever the request + * contained, dev or prod. + */ +exports.getAppDB = opts => { + return getDB(ContextKeys.CURRENT_DB, opts) +} + +/** + * This specifically gets the prod app ID, if the request + * contained a development app ID, this will open the prod one. + */ +exports.getProdAppDB = opts => { + return getDB(ContextKeys.PROD_DB, opts) +} + +/** + * This specifically gets the dev app ID, if the request + * contained a prod app ID, this will open the dev one. + */ +exports.getDevAppDB = opts => { + return getDB(ContextKeys.DEV_DB, opts) +} diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js index 7aaeebc025..9197fa30a1 100644 --- a/packages/server/src/api/controllers/application.js +++ b/packages/server/src/api/controllers/application.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const env = require("../../environment") const packageJson = require("../../../package.json") const { @@ -45,11 +44,13 @@ const { getTenantId, isMultiTenant } = require("@budibase/backend-core/tenancy") const { syncGlobalUsers } = require("./user") const { app: appCache } = require("@budibase/backend-core/cache") const { cleanupAutomations } = require("../../automations/utils") +const context = require("@budibase/backend-core/context") const URL_REGEX_SLASH = /\/|\\/g // utility function, need to do away with this -async function getLayouts(db) { +async function getLayouts() { + const db = context.getAppDB() return ( await db.allDocs( getLayoutParams(null, { @@ -59,7 +60,8 @@ async function getLayouts(db) { ).rows.map(row => row.doc) } -async function getScreens(db) { +async function getScreens() { + const db = context.getAppDB() return ( await db.allDocs( getScreenParams(null, { @@ -116,8 +118,9 @@ async function createInstance(template) { const tenantId = isMultiTenant() ? getTenantId() : null const baseAppId = generateAppID(tenantId) const appId = generateDevAppID(baseAppId) + context.updateAppId(appId) - const db = new CouchDB(appId) + const db = context.getAppDB() await db.put({ _id: "_design/database", // view collation information, read before writing any complex views: @@ -127,9 +130,9 @@ async function createInstance(template) { // NOTE: indexes need to be created before any tables/templates // add view for linked rows - await createLinkView(appId) - await createRoutingView(appId) - await createAllSearchIndex(appId) + await createLinkView() + await createRoutingView() + await createAllSearchIndex() // replicate the template data to the instance DB // this is currently very hard to test, downloading and importing template files @@ -155,7 +158,7 @@ async function createInstance(template) { exports.fetch = async ctx => { const dev = ctx.query && ctx.query.status === AppStatus.DEV const all = ctx.query && ctx.query.status === AppStatus.ALL - const apps = await getAllApps(CouchDB, { dev, all }) + const apps = await getAllApps({ dev, all }) // get the locks for all the dev apps if (dev || all) { @@ -178,12 +181,11 @@ exports.fetch = async ctx => { } exports.fetchAppDefinition = async ctx => { - const db = new CouchDB(ctx.params.appId) - const layouts = await getLayouts(db) + const layouts = await getLayouts() const userRoleId = getUserRoleId(ctx) - const accessController = new AccessController(ctx.params.appId) + const accessController = new AccessController() const screens = await accessController.checkScreensAccess( - await getScreens(db), + await getScreens(), userRoleId ) ctx.body = { @@ -194,15 +196,15 @@ exports.fetchAppDefinition = async ctx => { } exports.fetchAppPackage = async ctx => { - const db = new CouchDB(ctx.params.appId) + const db = context.getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) - const layouts = await getLayouts(db) - let screens = await getScreens(db) + const layouts = await getLayouts() + let screens = await getScreens() // Only filter screens if the user is not a builder if (!(ctx.user.builder && ctx.user.builder.global)) { const userRoleId = getUserRoleId(ctx) - const accessController = new AccessController(ctx.params.appId) + const accessController = new AccessController() screens = await accessController.checkScreensAccess(screens, userRoleId) } @@ -215,7 +217,7 @@ exports.fetchAppPackage = async ctx => { } exports.create = async ctx => { - const apps = await getAllApps(CouchDB, { dev: true }) + const apps = await getAllApps({ dev: true }) const name = ctx.request.body.name checkAppName(ctx, apps, name) const url = await getAppUrl(ctx) @@ -233,7 +235,7 @@ exports.create = async ctx => { const instance = await createInstance(instanceConfig) const appId = instance._id - const db = new CouchDB(appId) + const db = context.getAppDB() let _rev try { // if template there will be an existing doc @@ -277,7 +279,7 @@ exports.create = async ctx => { } exports.update = async ctx => { - const apps = await getAllApps(CouchDB, { dev: true }) + const apps = await getAllApps({ dev: true }) // validation const name = ctx.request.body.name checkAppName(ctx, apps, name, ctx.params.appId) @@ -292,7 +294,7 @@ exports.update = async ctx => { exports.updateClient = async ctx => { // Get current app version - const db = new CouchDB(ctx.params.appId) + const db = context.getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) const currentVersion = application.version @@ -314,7 +316,7 @@ exports.updateClient = async ctx => { exports.revertClient = async ctx => { // Check app can be reverted - const db = new CouchDB(ctx.params.appId) + const db = context.getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) if (!application.revertableVersion) { ctx.throw(400, "There is no version to revert to") @@ -336,7 +338,7 @@ exports.revertClient = async ctx => { } exports.delete = async ctx => { - const db = new CouchDB(ctx.params.appId) + const db = context.getAppDB() const result = await db.destroy() /* istanbul ignore next */ @@ -364,7 +366,8 @@ exports.sync = async (ctx, next) => { const prodAppId = getDeployedAppID(appId) try { - const prodDb = new CouchDB(prodAppId, { skip_setup: true }) + // specific case, want to make sure setup is skipped + const prodDb = context.getProdAppDB({ skip_setup: true }) const info = await prodDb.info() if (info.error) throw info.error } catch (err) { @@ -392,7 +395,7 @@ exports.sync = async (ctx, next) => { } // sync the users - await syncGlobalUsers(appId) + await syncGlobalUsers() if (error) { ctx.throw(400, error) @@ -404,7 +407,7 @@ exports.sync = async (ctx, next) => { } const updateAppPackage = async (appPackage, appId) => { - const db = new CouchDB(appId) + const db = context.getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) const newAppPackage = { ...application, ...appPackage } @@ -423,7 +426,7 @@ const updateAppPackage = async (appPackage, appId) => { } const createEmptyAppPackage = async (ctx, app) => { - const db = new CouchDB(app.appId) + const db = context.getAppDB() let screensAndLayouts = [] for (let layout of BASE_LAYOUTS) { diff --git a/packages/server/src/api/controllers/cloud.js b/packages/server/src/api/controllers/cloud.js index ea6cc9b71e..38804f4d4a 100644 --- a/packages/server/src/api/controllers/cloud.js +++ b/packages/server/src/api/controllers/cloud.js @@ -1,6 +1,5 @@ const env = require("../../environment") const { getAllApps } = require("@budibase/backend-core/db") -const CouchDB = require("../../db") const { exportDB, sendTempFile, @@ -30,7 +29,7 @@ exports.exportApps = async ctx => { if (env.SELF_HOSTED || !env.MULTI_TENANCY) { ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.") } - const apps = await getAllApps(CouchDB, { all: true }) + const apps = await getAllApps({ all: true }) const globalDBString = await exportDB(getGlobalDBName(), { filter: doc => !doc._id.startsWith(DocumentTypes.USER), }) @@ -63,7 +62,7 @@ async function hasBeenImported() { if (!env.SELF_HOSTED || env.MULTI_TENANCY) { return true } - const apps = await getAllApps(CouchDB, { all: true }) + const apps = await getAllApps({ all: true }) return apps.length !== 0 } diff --git a/packages/server/src/api/controllers/component.js b/packages/server/src/api/controllers/component.js index 06cb2cd211..2d0aaea23a 100644 --- a/packages/server/src/api/controllers/component.js +++ b/packages/server/src/api/controllers/component.js @@ -1,15 +1,14 @@ -const CouchDB = require("../../db") const { DocumentTypes } = require("../../db/utils") const { getComponentLibraryManifest } = require("../../utilities/fileSystem") +const { getAppDB } = require("@budibase/backend-core/context") exports.fetchAppComponentDefinitions = async function (ctx) { - const appId = ctx.params.appId || ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const app = await db.get(DocumentTypes.APP_METADATA) let componentManifests = await Promise.all( app.componentLibraries.map(async library => { - let manifest = await getComponentLibraryManifest(appId, library) + let manifest = await getComponentLibraryManifest(library) return { manifest, diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js index 5ab3c0a865..999f322563 100644 --- a/packages/server/src/api/controllers/datasource.js +++ b/packages/server/src/api/controllers/datasource.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const { generateDatasourceID, getDatasourceParams, @@ -11,12 +10,11 @@ const { BuildSchemaErrors, InvalidColumns } = require("../../constants") const { integrations } = require("../../integrations") const { getDatasourceAndQuery } = require("./row/utils") const { invalidateDynamicVariables } = require("../../threads/utils") +const { getAppDB } = require("@budibase/backend-core/context") exports.fetch = async function (ctx) { - const database = new CouchDB(ctx.appId) - // Get internal tables - const db = new CouchDB(ctx.appId) + const db = getAppDB() const internalTables = await db.allDocs( getTableParams(null, { include_docs: true, @@ -31,7 +29,7 @@ exports.fetch = async function (ctx) { // Get external datasources const datasources = ( - await database.allDocs( + await db.allDocs( getDatasourceParams(null, { include_docs: true, }) @@ -49,7 +47,7 @@ exports.fetch = async function (ctx) { } exports.buildSchemaFromDb = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const datasource = await db.get(ctx.params.datasourceId) const { tables, error } = await buildSchemaHelper(datasource) @@ -98,7 +96,7 @@ const invalidateVariables = async (existingDatasource, updatedDatasource) => { } exports.update = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const datasourceId = ctx.params.datasourceId let datasource = await db.get(datasourceId) const auth = datasource.config.auth @@ -126,7 +124,7 @@ exports.update = async function (ctx) { } exports.save = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const plus = ctx.request.body.datasource.plus const fetchSchema = ctx.request.body.fetchSchema @@ -162,7 +160,7 @@ exports.save = async function (ctx) { } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() // Delete all queries for the datasource const queries = await db.allDocs( @@ -184,7 +182,7 @@ exports.destroy = async function (ctx) { } exports.find = async function (ctx) { - const database = new CouchDB(ctx.appId) + const database = getAppDB() ctx.body = await database.get(ctx.params.datasourceId) } @@ -192,7 +190,7 @@ exports.find = async function (ctx) { exports.query = async function (ctx) { const queryJson = ctx.request.body try { - ctx.body = await getDatasourceAndQuery(ctx.appId, queryJson) + ctx.body = await getDatasourceAndQuery(queryJson) } catch (err) { ctx.throw(400, err) } diff --git a/packages/server/src/api/controllers/layout.js b/packages/server/src/api/controllers/layout.js index c3cae1b6a7..a92eec424a 100644 --- a/packages/server/src/api/controllers/layout.js +++ b/packages/server/src/api/controllers/layout.js @@ -2,11 +2,11 @@ const { EMPTY_LAYOUT, BASE_LAYOUT_PROP_IDS, } = require("../../constants/layouts") -const CouchDB = require("../../db") const { generateLayoutID, getScreenParams } = require("../../db/utils") +const { getAppDB } = require("@budibase/backend-core/context") exports.save = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() let layout = ctx.request.body if (!layout.props) { @@ -26,7 +26,7 @@ exports.save = async function (ctx) { } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const layoutId = ctx.params.layoutId, layoutRev = ctx.params.layoutRev diff --git a/packages/server/src/api/controllers/permission.js b/packages/server/src/api/controllers/permission.js index 5c42fe77ef..0e37a3e7d3 100644 --- a/packages/server/src/api/controllers/permission.js +++ b/packages/server/src/api/controllers/permission.js @@ -6,12 +6,12 @@ const { getBuiltinRoles, } = require("@budibase/backend-core/roles") const { getRoleParams } = require("../../db/utils") -const CouchDB = require("../../db") const { CURRENTLY_SUPPORTED_LEVELS, getBasePermissions, } = require("../../utilities/security") const { removeFromArray } = require("../../utilities") +const { getAppDB } = require("@budibase/backend-core/context") const PermissionUpdateType = { REMOVE: "remove", @@ -35,7 +35,7 @@ async function updatePermissionOnRole( { roleId, resourceId, level }, updateType ) { - const db = new CouchDB(appId) + const db = getAppDB() const remove = updateType === PermissionUpdateType.REMOVE const isABuiltin = isBuiltin(roleId) const dbRoleId = getDBRoleID(roleId) @@ -106,7 +106,7 @@ exports.fetchLevels = function (ctx) { } exports.fetch = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const roles = await getAllDBRoles(db) let permissions = {} // create an object with structure role ID -> resource ID -> level @@ -133,7 +133,7 @@ exports.fetch = async function (ctx) { exports.getResourcePerms = async function (ctx) { const resourceId = ctx.params.resourceId - const db = new CouchDB(ctx.appId) + const db = getAppDB() const body = await db.allDocs( getRoleParams(null, { include_docs: true, diff --git a/packages/server/src/api/controllers/role.js b/packages/server/src/api/controllers/role.js index b79907031d..11b4b9a520 100644 --- a/packages/server/src/api/controllers/role.js +++ b/packages/server/src/api/controllers/role.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const { Role, getRole, @@ -10,6 +9,7 @@ const { getUserMetadataParams, InternalTables, } = require("../../db/utils") +const { getAppDB } = require("@budibase/backend-core/context") const UpdateRolesOptions = { CREATED: "created", @@ -40,15 +40,15 @@ async function updateRolesOnUserTable(db, roleId, updateOption) { } exports.fetch = async function (ctx) { - ctx.body = await getAllRoles(ctx.appId) + ctx.body = await getAllRoles() } exports.find = async function (ctx) { - ctx.body = await getRole(ctx.appId, ctx.params.roleId) + ctx.body = await getRole(ctx.params.roleId) } exports.save = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() let { _id, name, inherits, permissionId } = ctx.request.body if (!_id) { _id = generateRoleID() @@ -69,7 +69,7 @@ exports.save = async function (ctx) { } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const roleId = ctx.params.roleId if (isBuiltin(roleId)) { ctx.throw(400, "Cannot delete builtin role.") diff --git a/packages/server/src/api/controllers/routing.js b/packages/server/src/api/controllers/routing.js index d45d33ed07..aeb728454b 100644 --- a/packages/server/src/api/controllers/routing.js +++ b/packages/server/src/api/controllers/routing.js @@ -63,7 +63,7 @@ exports.fetch = async ctx => { exports.clientFetch = async ctx => { const routing = await getRoutingStructure(ctx.appId) let roleId = ctx.user.role._id - const roleIds = await getUserRoleHierarchy(ctx.appId, roleId) + const roleIds = await getUserRoleHierarchy(roleId) for (let topLevel of Object.values(routing.routes)) { for (let subpathKey of Object.keys(topLevel.subpaths)) { let found = false diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index af199561dc..317511f508 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -19,6 +19,19 @@ import { isRowId, convertRowId, } from "../../../integrations/utils" +import { getDatasourceAndQuery } from "./utils" +import { + DataSourceOperation, + FieldTypes, + RelationshipTypes, +} from "../../../constants" +import { breakExternalTableId, isSQL } from "../../../integrations/utils" +import { processObjectSync } from "@budibase/string-templates" +// @ts-ignore +import { cloneDeep } from "lodash/fp" +import { processFormulas } from "../../../utilities/rowProcessor/utils" +// @ts-ignore +import { getAppDB } from "@budibase/backend-core/context" interface ManyRelationship { tableId?: string @@ -38,18 +51,6 @@ interface RunConfig { } module External { - const { getDatasourceAndQuery } = require("./utils") - const { - DataSourceOperation, - FieldTypes, - RelationshipTypes, - } = require("../../../constants") - const { breakExternalTableId, isSQL } = require("../../../integrations/utils") - const { processObjectSync } = require("@budibase/string-templates") - const { cloneDeep } = require("lodash/fp") - const CouchDB = require("../../../db") - const { processFormulas } = require("../../../utilities/rowProcessor/utils") - function buildFilters( id: string | undefined, filters: SearchFilters, @@ -210,19 +211,16 @@ module External { } class ExternalRequest { - private readonly appId: string private operation: Operation private tableId: string private datasource: Datasource private tables: { [key: string]: Table } = {} constructor( - appId: string, operation: Operation, tableId: string, datasource: Datasource ) { - this.appId = appId this.operation = operation this.tableId = tableId this.datasource = datasource @@ -231,12 +229,14 @@ module External { } } - getTable(tableId: string | undefined): Table { + getTable(tableId: string | undefined): Table | undefined { if (!tableId) { throw "Table ID is unknown, cannot find table" } const { tableName } = breakExternalTableId(tableId) - return this.tables[tableName] + if (tableName) { + return this.tables[tableName] + } } inputProcessing(row: Row | undefined, table: Table) { @@ -272,9 +272,9 @@ module External { newRow[key] = row[key] continue } - const { tableName: linkTableName } = breakExternalTableId(field.tableId) + const { tableName: linkTableName } = breakExternalTableId(field?.tableId) // table has to exist for many to many - if (!this.tables[linkTableName]) { + if (!linkTableName || !this.tables[linkTableName]) { continue } const linkTable = this.tables[linkTableName] @@ -422,7 +422,7 @@ module External { } const { tableName: linkTableName } = breakExternalTableId(field.tableId) // no table to link to, this is not a valid relationships - if (!this.tables[linkTableName]) { + if (!linkTableName || !this.tables[linkTableName]) { continue } const linkTable = this.tables[linkTableName] @@ -460,6 +460,9 @@ module External { async lookupRelations(tableId: string, row: Row) { const related: { [key: string]: any } = {} const { tableName } = breakExternalTableId(tableId) + if (!tableName) { + return related + } const table = this.tables[tableName] // @ts-ignore const primaryKey = table.primary[0] @@ -484,7 +487,7 @@ module External { if (!lookupField || !row[lookupField]) { continue } - const response = await getDatasourceAndQuery(this.appId, { + const response = await getDatasourceAndQuery({ endpoint: getEndpoint(tableId, DataSourceOperation.READ), filters: { equal: { @@ -515,28 +518,30 @@ module External { row: Row, relationships: ManyRelationship[] ) { - const { appId } = this // if we're creating (in a through table) need to wipe the existing ones first const promises = [] const related = await this.lookupRelations(mainTableId, row) for (let relationship of relationships) { const { key, tableId, isUpdate, id, ...rest } = relationship - const body = processObjectSync(rest, row) + const body: { [key: string]: any } = processObjectSync(rest, row, {}) const linkTable = this.getTable(tableId) // @ts-ignore - const linkPrimary = linkTable.primary[0] + const linkPrimary = linkTable?.primary[0] + if (!linkTable || !linkPrimary) { + return + } const rows = related[key].rows || [] const found = rows.find( (row: { [key: string]: any }) => row[linkPrimary] === relationship.id || - row[linkPrimary] === body[linkPrimary] + row[linkPrimary] === body?.[linkPrimary] ) const operation = isUpdate ? DataSourceOperation.UPDATE : DataSourceOperation.CREATE if (!found) { promises.push( - getDatasourceAndQuery(appId, { + getDatasourceAndQuery({ endpoint: getEndpoint(tableId, operation), // if we're doing many relationships then we're writing, only one response body, @@ -552,9 +557,9 @@ module External { for (let [colName, { isMany, rows, tableId }] of Object.entries( related )) { - const table: Table = this.getTable(tableId) + const table: Table | undefined = this.getTable(tableId) // if its not the foreign key skip it, nothing to do - if (table.primary && table.primary.indexOf(colName) !== -1) { + if (!table || (table.primary && table.primary.indexOf(colName) !== -1)) { continue } for (let row of rows) { @@ -566,7 +571,7 @@ module External { : DataSourceOperation.UPDATE const body = isMany ? null : { [colName]: null } promises.push( - getDatasourceAndQuery(this.appId, { + getDatasourceAndQuery({ endpoint: getEndpoint(tableId, op), body, filters, @@ -605,20 +610,25 @@ module External { continue } const { tableName: linkTableName } = breakExternalTableId(field.tableId) - const linkTable = this.tables[linkTableName] - if (linkTable) { - const linkedFields = extractRealFields(linkTable, fields) - fields = fields.concat(linkedFields) + if (linkTableName) { + const linkTable = this.tables[linkTableName] + if (linkTable) { + const linkedFields = extractRealFields(linkTable, fields) + fields = fields.concat(linkedFields) + } } } return fields } async run(config: RunConfig) { - const { appId, operation, tableId } = this + const { operation, tableId } = this let { datasourceId, tableName } = breakExternalTableId(tableId) + if (!tableName) { + throw "Unable to run without a table name" + } if (!this.datasource) { - const db = new CouchDB(appId) + const db = getAppDB() this.datasource = await db.get(datasourceId) if (!this.datasource || !this.datasource.entities) { throw "No tables found, fetch tables before query." @@ -670,7 +680,7 @@ module External { }, } // can't really use response right now - const response = await getDatasourceAndQuery(appId, json) + const response = await getDatasourceAndQuery(json) // handle many to many relationships now if we know the ID (could be auto increment) if ( operation !== DataSourceOperation.READ && diff --git a/packages/server/src/api/controllers/row/external.js b/packages/server/src/api/controllers/row/external.js index b8620f7bc3..4e79975893 100644 --- a/packages/server/src/api/controllers/row/external.js +++ b/packages/server/src/api/controllers/row/external.js @@ -11,7 +11,7 @@ const { const ExternalRequest = require("./ExternalRequest") const CouchDB = require("../../../db") -async function handleRequest(appId, operation, tableId, opts = {}) { +async function handleRequest(operation, tableId, opts = {}) { // make sure the filters are cleaned up, no empty strings for equals, fuzzy or string if (opts && opts.filters) { for (let filterField of NoEmptyFilterStrings) { @@ -25,9 +25,7 @@ async function handleRequest(appId, operation, tableId, opts = {}) { } } } - return new ExternalRequest(appId, operation, tableId, opts.datasource).run( - opts - ) + return new ExternalRequest(operation, tableId, opts.datasource).run(opts) } exports.handleRequest = handleRequest diff --git a/packages/server/src/api/controllers/row/internal.js b/packages/server/src/api/controllers/row/internal.js index 75caaf2fda..8449530ce3 100644 --- a/packages/server/src/api/controllers/row/internal.js +++ b/packages/server/src/api/controllers/row/internal.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const linkRows = require("../../../db/linkedRows") const { getRowParams, @@ -27,6 +26,7 @@ const { getFromMemoryDoc, } = require("../view/utils") const { cloneDeep } = require("lodash/fp") +const { getAppDB } = require("@budibase/backend-core/context") const CALCULATION_TYPES = { SUM: "sum", @@ -106,8 +106,7 @@ async function getView(db, viewName) { } exports.patch = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const inputs = ctx.request.body const tableId = inputs.tableId const isUserTable = tableId === InternalTables.USER_METADATA @@ -146,14 +145,13 @@ exports.patch = async ctx => { // returned row is cleaned and prepared for writing to DB row = await linkRows.updateLinks({ - appId, eventType: linkRows.EventType.ROW_UPDATE, row, tableId: row.tableId, table, }) // check if any attachments removed - await cleanupAttachments(appId, table, { oldRow, row }) + await cleanupAttachments(table, { oldRow, row }) if (isUserTable) { // the row has been updated, need to put it into the ctx @@ -166,8 +164,7 @@ exports.patch = async ctx => { } exports.save = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() let inputs = ctx.request.body inputs.tableId = ctx.params.tableId @@ -189,7 +186,6 @@ exports.save = async function (ctx) { // make sure link rows are up to date row = await linkRows.updateLinks({ - appId, eventType: linkRows.EventType.ROW_SAVE, row, tableId: row.tableId, @@ -200,7 +196,6 @@ exports.save = async function (ctx) { } exports.fetchView = async ctx => { - const appId = ctx.appId const viewName = ctx.params.viewName // if this is a table view being looked for just transfer to that @@ -209,7 +204,7 @@ exports.fetchView = async ctx => { return exports.fetch(ctx) } - const db = new CouchDB(appId) + const db = getAppDB() const { calculation, group, field } = ctx.query const viewInfo = await getView(db, viewName) let response @@ -263,8 +258,7 @@ exports.fetchView = async ctx => { } exports.fetch = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const tableId = ctx.params.tableId let table = await db.get(tableId) @@ -273,17 +267,15 @@ exports.fetch = async ctx => { } exports.find = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const table = await db.get(ctx.params.tableId) - let row = await findRow(ctx, db, ctx.params.tableId, ctx.params.rowId) + let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId) row = await outputProcessing(ctx, table, row) return row } exports.destroy = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const { _id, _rev } = ctx.request.body let row = await db.get(_id) @@ -295,13 +287,12 @@ exports.destroy = async function (ctx) { row = await outputProcessing(ctx, table, row, { squash: false }) // now remove the relationships await linkRows.updateLinks({ - appId, eventType: linkRows.EventType.ROW_DELETE, row, tableId: row.tableId, }) // remove any attachments that were on the row from object storage - await cleanupAttachments(appId, table, { row }) + await cleanupAttachments(table, { row }) let response if (ctx.params.tableId === InternalTables.USER_METADATA) { @@ -317,8 +308,7 @@ exports.destroy = async function (ctx) { } exports.bulkDestroy = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const tableId = ctx.params.tableId const table = await db.get(tableId) let { rows } = ctx.request.body @@ -330,7 +320,6 @@ exports.bulkDestroy = async ctx => { // remove the relationships first let updates = rows.map(row => linkRows.updateLinks({ - appId, eventType: linkRows.EventType.ROW_DELETE, row, tableId: row.tableId, @@ -349,7 +338,7 @@ exports.bulkDestroy = async ctx => { await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true }))) } // remove any attachments that were on the rows from object storage - await cleanupAttachments(appId, table, { rows }) + await cleanupAttachments(table, { rows }) await Promise.all(updates) return { response: { ok: true }, rows } } @@ -360,25 +349,24 @@ exports.search = async ctx => { return { rows: await exports.fetch(ctx) } } - const appId = ctx.appId const { tableId } = ctx.params - const db = new CouchDB(appId) + const db = getAppDB() const { paginate, query, ...params } = ctx.request.body params.version = ctx.version params.tableId = tableId let response if (paginate) { - response = await paginatedSearch(appId, query, params) + response = await paginatedSearch(query, params) } else { - response = await fullSearch(appId, query, params) + response = await fullSearch(query, params) } // Enrich search results with relationships if (response.rows && response.rows.length) { // enrich with global users if from users table if (tableId === InternalTables.USER_METADATA) { - response.rows = await getGlobalUsersFromMetadata(appId, response.rows) + response.rows = await getGlobalUsersFromMetadata(response.rows) } const table = await db.get(tableId) response.rows = await outputProcessing(ctx, table, response.rows) @@ -389,25 +377,22 @@ exports.search = async ctx => { exports.validate = async ctx => { return validate({ - appId: ctx.appId, tableId: ctx.params.tableId, row: ctx.request.body, }) } exports.fetchEnrichedRow = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const tableId = ctx.params.tableId const rowId = ctx.params.rowId // need table to work out where links go in row let [table, row] = await Promise.all([ db.get(tableId), - findRow(ctx, db, tableId, rowId), + findRow(ctx, tableId, rowId), ]) // get the link docs const linkVals = await linkRows.getLinkDocuments({ - appId, tableId, rowId, }) diff --git a/packages/server/src/api/controllers/row/internalSearch.js b/packages/server/src/api/controllers/row/internalSearch.js index 3a2586331a..21991f4de3 100644 --- a/packages/server/src/api/controllers/row/internalSearch.js +++ b/packages/server/src/api/controllers/row/internalSearch.js @@ -1,14 +1,14 @@ const { SearchIndexes } = require("../../../db/utils") const fetch = require("node-fetch") const { getCouchUrl } = require("@budibase/backend-core/db") +const { getAppId } = require("@budibase/backend-core/context") /** * Class to build lucene query URLs. * Optionally takes a base lucene query object. */ class QueryBuilder { - constructor(appId, base) { - this.appId = appId + constructor(base) { this.query = { string: {}, fuzzy: {}, @@ -233,7 +233,8 @@ class QueryBuilder { } async run() { - const url = `${getCouchUrl()}/${this.appId}/_design/database/_search/${ + const appId = getAppId() + const url = `${getCouchUrl()}/${appId}/_design/database/_search/${ SearchIndexes.ROWS }` const body = this.buildSearchBody() @@ -270,7 +271,6 @@ const runQuery = async (url, body) => { * Gets round the fixed limit of 200 results from a query by fetching as many * pages as required and concatenating the results. This recursively operates * until enough results have been found. - * @param appId {string} The app ID to search * @param query {object} The JSON query structure * @param params {object} The search params including: * tableId {string} The table ID to search @@ -283,7 +283,7 @@ const runQuery = async (url, body) => { * rows {array|null} Current results in the recursive search * @returns {Promise<*[]|*>} */ -const recursiveSearch = async (appId, query, params) => { +const recursiveSearch = async (query, params) => { const bookmark = params.bookmark const rows = params.rows || [] if (rows.length >= params.limit) { @@ -293,7 +293,7 @@ const recursiveSearch = async (appId, query, params) => { if (rows.length > params.limit - 200) { pageSize = params.limit - rows.length } - const page = await new QueryBuilder(appId, query) + const page = await new QueryBuilder(query) .setVersion(params.version) .setTable(params.tableId) .setBookmark(bookmark) @@ -313,14 +313,13 @@ const recursiveSearch = async (appId, query, params) => { bookmark: page.bookmark, rows: [...rows, ...page.rows], } - return await recursiveSearch(appId, query, newParams) + return await recursiveSearch(query, newParams) } /** * Performs a paginated search. A bookmark will be returned to allow the next * page to be fetched. There is a max limit off 200 results per page in a * paginated search. - * @param appId {string} The app ID to search * @param query {object} The JSON query structure * @param params {object} The search params including: * tableId {string} The table ID to search @@ -332,13 +331,13 @@ const recursiveSearch = async (appId, query, params) => { * bookmark {string} The bookmark to resume from * @returns {Promise<{hasNextPage: boolean, rows: *[]}>} */ -exports.paginatedSearch = async (appId, query, params) => { +exports.paginatedSearch = async (query, params) => { let limit = params.limit if (limit == null || isNaN(limit) || limit < 0) { limit = 50 } limit = Math.min(limit, 200) - const search = new QueryBuilder(appId, query) + const search = new QueryBuilder(query) .setVersion(params.version) .setTable(params.tableId) .setSort(params.sort) @@ -367,7 +366,6 @@ exports.paginatedSearch = async (appId, query, params) => { * desired amount of results. There is a limit of 1000 results to avoid * heavy performance hits, and to avoid client components breaking from * handling too much data. - * @param appId {string} The app ID to search * @param query {object} The JSON query structure * @param params {object} The search params including: * tableId {string} The table ID to search @@ -378,12 +376,12 @@ exports.paginatedSearch = async (appId, query, params) => { * limit {number} The desired number of results * @returns {Promise<{rows: *}>} */ -exports.fullSearch = async (appId, query, params) => { +exports.fullSearch = async (query, params) => { let limit = params.limit if (limit == null || isNaN(limit) || limit < 0) { limit = 1000 } params.limit = Math.min(limit, 1000) - const rows = await recursiveSearch(appId, query, params) + const rows = await recursiveSearch(query, params) return { rows } } diff --git a/packages/server/src/api/controllers/row/utils.js b/packages/server/src/api/controllers/row/utils.js index 51bc03eba4..4235e70127 100644 --- a/packages/server/src/api/controllers/row/utils.js +++ b/packages/server/src/api/controllers/row/utils.js @@ -1,11 +1,11 @@ const validateJs = require("validate.js") const { cloneDeep } = require("lodash/fp") -const CouchDB = require("../../../db") const { InternalTables } = require("../../../db/utils") const userController = require("../user") const { FieldTypes } = require("../../../constants") const { processStringSync } = require("@budibase/string-templates") const { makeExternalQuery } = require("../../../integrations/base/utils") +const { getAppDB } = require("@budibase/backend-core/context") validateJs.extend(validateJs.validators.datetime, { parse: function (value) { @@ -17,14 +17,15 @@ validateJs.extend(validateJs.validators.datetime, { }, }) -exports.getDatasourceAndQuery = async (appId, json) => { +exports.getDatasourceAndQuery = async json => { const datasourceId = json.endpoint.datasourceId - const db = new CouchDB(appId) + const db = getAppDB() const datasource = await db.get(datasourceId) return makeExternalQuery(datasource, json) } -exports.findRow = async (ctx, db, tableId, rowId) => { +exports.findRow = async (ctx, tableId, rowId) => { + const db = getAppDB() let row // TODO remove special user case in future if (tableId === InternalTables.USER_METADATA) { @@ -42,9 +43,9 @@ exports.findRow = async (ctx, db, tableId, rowId) => { return row } -exports.validate = async ({ appId, tableId, row, table }) => { +exports.validate = async ({ tableId, row, table }) => { if (!table) { - const db = new CouchDB(appId) + const db = getAppDB() table = await db.get(tableId) } const errors = {} diff --git a/packages/server/src/api/controllers/screen.js b/packages/server/src/api/controllers/screen.js index 5e0eeb5176..e166ab3eb8 100644 --- a/packages/server/src/api/controllers/screen.js +++ b/packages/server/src/api/controllers/screen.js @@ -1,10 +1,9 @@ -const CouchDB = require("../../db") const { getScreenParams, generateScreenID } = require("../../db/utils") const { AccessController } = require("@budibase/backend-core/roles") +const { getAppDB } = require("@budibase/backend-core/context") exports.fetch = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const screens = ( await db.allDocs( @@ -14,15 +13,14 @@ exports.fetch = async ctx => { ) ).rows.map(element => element.doc) - ctx.body = await new AccessController(appId).checkScreensAccess( + ctx.body = await new AccessController().checkScreensAccess( screens, ctx.user.role._id ) } exports.save = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() let screen = ctx.request.body if (!screen._id) { @@ -39,7 +37,7 @@ exports.save = async ctx => { } exports.destroy = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() await db.remove(ctx.params.screenId, ctx.params.screenRev) ctx.body = { message: "Screen deleted successfully", diff --git a/packages/server/src/api/controllers/static/index.js b/packages/server/src/api/controllers/static/index.js index 11bb14e282..cafe999150 100644 --- a/packages/server/src/api/controllers/static/index.js +++ b/packages/server/src/api/controllers/static/index.js @@ -6,7 +6,6 @@ const uuid = require("uuid") const { ObjectStoreBuckets } = require("../../../constants") const { processString } = require("@budibase/string-templates") const { getAllApps } = require("@budibase/backend-core/db") -const CouchDB = require("../../../db") const { loadHandlebarsFile, NODE_MODULES_PATH, @@ -17,6 +16,7 @@ const { clientLibraryPath } = require("../../../utilities") const { upload } = require("../../../utilities/fileSystem") const { attachmentsRelativeURL } = require("../../../utilities") const { DocumentTypes } = require("../../../db/utils") +const { getAppDB } = require("@budibase/backend-core/context") const AWS = require("aws-sdk") const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1" @@ -44,7 +44,7 @@ async function getAppIdFromUrl(ctx) { let possibleAppUrl = `/${encodeURI(ctx.params.appId).toLowerCase()}` // search prod apps for a url that matches, exclude dev where id is always used - const apps = await getAllApps(CouchDB, { dev: false }) + const apps = await getAllApps({ dev: false }) const app = apps.filter( a => a.url && a.url.toLowerCase() === possibleAppUrl )[0] @@ -85,7 +85,7 @@ exports.uploadFile = async function (ctx) { exports.serveApp = async function (ctx) { let appId = await getAppIdFromUrl(ctx) const App = require("./templates/BudibaseApp.svelte").default - const db = new CouchDB(appId, { skip_setup: true }) + const db = getAppDB({ skip_setup: true }) const appInfo = await db.get(DocumentTypes.APP_METADATA) const { head, html, css } = App.render({ @@ -111,7 +111,7 @@ exports.serveClientLibrary = async function (ctx) { } exports.getSignedUploadURL = async function (ctx) { - const database = new CouchDB(ctx.appId) + const database = getAppDB() // Ensure datasource is valid let datasource diff --git a/packages/server/src/api/controllers/table/external.js b/packages/server/src/api/controllers/table/external.js index 2453ca7a37..b27eebb0c4 100644 --- a/packages/server/src/api/controllers/table/external.js +++ b/packages/server/src/api/controllers/table/external.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const { buildExternalTableId, breakExternalTableId, @@ -19,6 +18,7 @@ const { makeExternalQuery } = require("../../../integrations/base/utils") const { cloneDeep } = require("lodash/fp") const csvParser = require("../../../utilities/csvParser") const { handleRequest } = require("../row/external") +const { getAppDB } = require("@budibase/backend-core/context") async function makeTableRequest( datasource, @@ -159,7 +159,6 @@ function isRelationshipSetup(column) { } exports.save = async function (ctx) { - const appId = ctx.appId const table = ctx.request.body // can't do this right now delete table.dataImport @@ -176,14 +175,14 @@ exports.save = async function (ctx) { let oldTable if (ctx.request.body && ctx.request.body._id) { - oldTable = await getTable(appId, ctx.request.body._id) + oldTable = await getTable(ctx.request.body._id) } if (hasTypeChanged(tableToSave, oldTable)) { ctx.throw(400, "A column type has changed.") } - const db = new CouchDB(appId) + const db = getAppDB() const datasource = await db.get(datasourceId) const oldTables = cloneDeep(datasource.entities) const tables = datasource.entities @@ -267,14 +266,13 @@ exports.save = async function (ctx) { } exports.destroy = async function (ctx) { - const appId = ctx.appId - const tableToDelete = await getTable(appId, ctx.params.tableId) + const tableToDelete = await getTable(ctx.params.tableId) if (!tableToDelete || !tableToDelete.created) { ctx.throw(400, "Cannot delete tables which weren't created in Budibase.") } const datasourceId = getDatasourceId(tableToDelete) - const db = new CouchDB(appId) + const db = getAppDB() const datasource = await db.get(datasourceId) const tables = datasource.entities @@ -290,8 +288,7 @@ exports.destroy = async function (ctx) { } exports.bulkImport = async function (ctx) { - const appId = ctx.appId - const table = await getTable(appId, ctx.params.tableId) + const table = await getTable(ctx.params.tableId) const { dataImport } = ctx.request.body if (!dataImport || !dataImport.schema || !dataImport.csvString) { ctx.throw(400, "Provided data import information is invalid.") @@ -300,7 +297,7 @@ exports.bulkImport = async function (ctx) { ...dataImport, existingTable: table, }) - await handleRequest(appId, DataSourceOperation.BULK_CREATE, table._id, { + await handleRequest(DataSourceOperation.BULK_CREATE, table._id, { rows, }) return table diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js index 20dc10017d..f3493edb3b 100644 --- a/packages/server/src/api/controllers/table/index.js +++ b/packages/server/src/api/controllers/table/index.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const internal = require("./internal") const external = require("./external") const csvParser = require("../../../utilities/csvParser") @@ -9,6 +8,7 @@ const { BudibaseInternalDB, } = require("../../../db/utils") const { getTable } = require("./utils") +const { getAppDB } = require("@budibase/backend-core/context") function pickApi({ tableId, table }) { if (table && !tableId) { @@ -24,7 +24,7 @@ function pickApi({ tableId, table }) { // covers both internal and external exports.fetch = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const internalTables = await db.allDocs( getTableParams(null, { @@ -63,7 +63,7 @@ exports.fetch = async function (ctx) { exports.find = async function (ctx) { const tableId = ctx.params.id - ctx.body = await getTable(ctx.appId, tableId) + ctx.body = await getTable(tableId) } exports.save = async function (ctx) { @@ -102,7 +102,7 @@ exports.validateCSVSchema = async function (ctx) { const { csvString, schema = {}, tableId } = ctx.request.body let existingTable if (tableId) { - existingTable = await getTable(ctx.appId, tableId) + existingTable = await getTable(tableId) } let result = await csvParser.parse(csvString, schema) if (existingTable) { diff --git a/packages/server/src/api/controllers/table/internal.js b/packages/server/src/api/controllers/table/internal.js index 9f09e78219..7f7fe1cb3c 100644 --- a/packages/server/src/api/controllers/table/internal.js +++ b/packages/server/src/api/controllers/table/internal.js @@ -34,8 +34,7 @@ exports.save = async function (ctx) { // saving a table is a complex operation, involving many different steps, this // has been broken out into a utility to make it more obvious/easier to manipulate const tableSaveFunctions = new TableSaveFunctions({ - db, - ctx, + user: ctx.user, oldTable, dataImport, }) @@ -145,9 +144,8 @@ exports.destroy = async function (ctx) { } exports.bulkImport = async function (ctx) { - const appId = ctx.appId - const table = await getTable(appId, ctx.params.tableId) + const table = await getTable(ctx.params.tableId) const { dataImport } = ctx.request.body - await handleDataImport(appId, ctx.user, table, dataImport) + await handleDataImport(ctx.user, table, dataImport) return table } diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js index 86e2837e15..76dbada59a 100644 --- a/packages/server/src/api/controllers/table/utils.js +++ b/packages/server/src/api/controllers/table/utils.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const csvParser = require("../../../utilities/csvParser") const { getRowParams, @@ -17,8 +16,10 @@ const { const { getViews, saveView } = require("../view/utils") const viewTemplate = require("../view/viewBuilder") const usageQuota = require("../../../utilities/usageQuota") +const { getAppDB } = require("@budibase/backend-core/context") -exports.checkForColumnUpdates = async (db, oldTable, updatedTable) => { +exports.checkForColumnUpdates = async (oldTable, updatedTable) => { + const db = getAppDB() let updatedRows = [] const rename = updatedTable._rename let deletedColumns = [] @@ -46,7 +47,7 @@ exports.checkForColumnUpdates = async (db, oldTable, updatedTable) => { }) // Update views - await exports.checkForViewUpdates(db, updatedTable, rename, deletedColumns) + await exports.checkForViewUpdates(updatedTable, rename, deletedColumns) delete updatedTable._rename } return { rows: updatedRows, table: updatedTable } @@ -73,12 +74,12 @@ exports.makeSureTableUpToDate = (table, tableToSave) => { return tableToSave } -exports.handleDataImport = async (appId, user, table, dataImport) => { +exports.handleDataImport = async (user, table, dataImport) => { if (!dataImport || !dataImport.csvString) { return table } - const db = new CouchDB(appId) + const db = getAppDB() // Populate the table with rows imported from CSV in a bulk update const data = await csvParser.transform({ ...dataImport, @@ -123,8 +124,8 @@ exports.handleDataImport = async (appId, user, table, dataImport) => { return table } -exports.handleSearchIndexes = async (appId, table) => { - const db = new CouchDB(appId) +exports.handleSearchIndexes = async table => { + const db = getAppDB() // create relevant search indexes if (table.indexes && table.indexes.length > 0) { const currentIndexes = await db.getIndexes() @@ -181,12 +182,9 @@ exports.checkStaticTables = table => { } class TableSaveFunctions { - constructor({ db, ctx, oldTable, dataImport }) { - this.db = db - this.ctx = ctx - if (this.ctx && this.ctx.user) { - this.appId = this.ctx.appId - } + constructor({ user, oldTable, dataImport }) { + this.db = getAppDB() + this.user = user this.oldTable = oldTable this.dataImport = dataImport // any rows that need updated @@ -204,24 +202,15 @@ class TableSaveFunctions { // when confirmed valid async mid(table) { - let response = await exports.checkForColumnUpdates( - this.db, - this.oldTable, - table - ) + let response = await exports.checkForColumnUpdates(this.oldTable, table) this.rows = this.rows.concat(response.rows) return table } // after saving async after(table) { - table = await exports.handleSearchIndexes(this.appId, table) - table = await exports.handleDataImport( - this.appId, - this.ctx.user, - table, - this.dataImport - ) + table = await exports.handleSearchIndexes(table) + table = await exports.handleDataImport(this.user, table, this.dataImport) return table } @@ -230,8 +219,8 @@ class TableSaveFunctions { } } -exports.getAllExternalTables = async (appId, datasourceId) => { - const db = new CouchDB(appId) +exports.getAllExternalTables = async datasourceId => { + const db = getAppDB() const datasource = await db.get(datasourceId) if (!datasource || !datasource.entities) { throw "Datasource is not configured fully." @@ -239,25 +228,25 @@ exports.getAllExternalTables = async (appId, datasourceId) => { return datasource.entities } -exports.getExternalTable = async (appId, datasourceId, tableName) => { - const entities = await exports.getAllExternalTables(appId, datasourceId) +exports.getExternalTable = async (datasourceId, tableName) => { + const entities = await exports.getAllExternalTables(datasourceId) return entities[tableName] } -exports.getTable = async (appId, tableId) => { - const db = new CouchDB(appId) +exports.getTable = async tableId => { + const db = getAppDB() if (isExternalTable(tableId)) { let { datasourceId, tableName } = breakExternalTableId(tableId) const datasource = await db.get(datasourceId) - const table = await exports.getExternalTable(appId, datasourceId, tableName) + const table = await exports.getExternalTable(datasourceId, tableName) return { ...table, sql: isSQL(datasource) } } else { return db.get(tableId) } } -exports.checkForViewUpdates = async (db, table, rename, deletedColumns) => { - const views = await getViews(db) +exports.checkForViewUpdates = async (table, rename, deletedColumns) => { + const views = await getViews() const tableViews = views.filter(view => view.meta.tableId === table._id) // Check each table view to see if impacted by this table action @@ -319,7 +308,7 @@ exports.checkForViewUpdates = async (db, table, rename, deletedColumns) => { // Update view if required if (needsUpdated) { const newViewTemplate = viewTemplate(view.meta) - await saveView(db, null, view.name, newViewTemplate) + await saveView(null, view.name, newViewTemplate) if (!newViewTemplate.meta.schema) { newViewTemplate.meta.schema = table.schema } diff --git a/packages/server/src/api/controllers/user.js b/packages/server/src/api/controllers/user.js index d87afc4309..31194c3e96 100644 --- a/packages/server/src/api/controllers/user.js +++ b/packages/server/src/api/controllers/user.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const { generateUserMetadataID, getUserMetadataParams, @@ -15,8 +14,10 @@ const { } = require("@budibase/backend-core/db") const { doesDatabaseExist } = require("../../utilities") const { UserStatus } = require("@budibase/backend-core/constants") +const { getAppDB } = require("@budibase/backend-core/context") -async function rawMetadata(db) { +async function rawMetadata() { + const db = getAppDB() return ( await db.allDocs( getUserMetadataParams(null, { @@ -54,13 +55,10 @@ function combineMetadataAndUser(user, metadata) { return null } -exports.syncGlobalUsers = async appId => { +exports.syncGlobalUsers = async () => { // sync user metadata - const db = new CouchDB(appId) - const [users, metadata] = await Promise.all([ - getGlobalUsers(appId), - rawMetadata(db), - ]) + const db = getAppDB() + const [users, metadata] = await Promise.all([getGlobalUsers(), rawMetadata()]) const toWrite = [] for (let user of users) { const combined = await combineMetadataAndUser(user, metadata) @@ -94,7 +92,7 @@ exports.syncUser = async function (ctx) { let prodAppIds // if they are a builder then get all production app IDs if ((user.builder && user.builder.global) || deleting) { - prodAppIds = await getDeployedAppIDs(CouchDB) + prodAppIds = await getDeployedAppIDs() } else { prodAppIds = Object.entries(roles) .filter(entry => entry[1] !== BUILTIN_ROLE_IDS.PUBLIC) @@ -107,7 +105,7 @@ exports.syncUser = async function (ctx) { if (!(await doesDatabaseExist(appId))) { continue } - const db = new CouchDB(appId) + const db = getAppDB() const metadataId = generateUserMetadataID(userId) let metadata try { @@ -143,8 +141,8 @@ exports.syncUser = async function (ctx) { } exports.fetchMetadata = async function (ctx) { - const database = new CouchDB(ctx.appId) - const global = await getGlobalUsers(ctx.appId) + const database = getAppDB() + const global = await getGlobalUsers() const metadata = await rawMetadata(database) const users = [] for (let user of global) { @@ -171,8 +169,7 @@ exports.updateSelfMetadata = async function (ctx) { } exports.updateMetadata = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const user = ctx.request.body // this isn't applicable to the user delete user.roles @@ -184,7 +181,7 @@ exports.updateMetadata = async function (ctx) { } exports.destroyMetadata = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() try { const dbUser = await db.get(ctx.params.id) await db.remove(dbUser._id, dbUser._rev) @@ -207,7 +204,7 @@ exports.setFlag = async function (ctx) { ctx.throw(400, "Must supply a 'flag' field in request body.") } const flagDocId = generateUserFlagID(userId) - const db = new CouchDB(ctx.appId) + const db = getAppDB() let doc try { doc = await db.get(flagDocId) @@ -222,7 +219,7 @@ exports.setFlag = async function (ctx) { exports.getFlags = async function (ctx) { const userId = ctx.user._id const docId = generateUserFlagID(userId) - const db = new CouchDB(ctx.appId) + const db = getAppDB() let doc try { doc = await db.get(docId) diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js index e3232323bf..fd6b32f3d6 100644 --- a/packages/server/src/api/controllers/view/index.js +++ b/packages/server/src/api/controllers/view/index.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const viewTemplate = require("./viewBuilder") const { apiFileReturn } = require("../../../utilities/fileSystem") const exporters = require("./exporters") @@ -6,14 +5,14 @@ const { saveView, getView, getViews, deleteView } = require("./utils") const { fetchView } = require("../row") const { getTable } = require("../table/utils") const { FieldTypes } = require("../../../constants") +const { getAppDB } = require("@budibase/backend-core/context") exports.fetch = async ctx => { - const db = new CouchDB(ctx.appId) - ctx.body = await getViews(db) + ctx.body = await getViews() } exports.save = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const { originalName, ...viewToSave } = ctx.request.body const view = viewTemplate(viewToSave) @@ -21,7 +20,7 @@ exports.save = async ctx => { ctx.throw(400, "Cannot create view without a name") } - await saveView(db, originalName, viewToSave.name, view) + await saveView(originalName, viewToSave.name, view) // add views to table document const table = await db.get(ctx.request.body.tableId) @@ -42,9 +41,9 @@ exports.save = async ctx => { } exports.destroy = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const viewName = decodeURI(ctx.params.viewName) - const view = await deleteView(db, viewName) + const view = await deleteView(viewName) const table = await db.get(view.meta.tableId) delete table.views[viewName] await db.put(table) @@ -53,9 +52,8 @@ exports.destroy = async ctx => { } exports.exportView = async ctx => { - const db = new CouchDB(ctx.appId) const viewName = decodeURI(ctx.query.view) - const view = await getView(db, viewName) + const view = await getView(viewName) const format = ctx.query.format if (!format || !Object.values(exporters.ExportFormats).includes(format)) { @@ -83,7 +81,7 @@ exports.exportView = async ctx => { let schema = view && view.meta && view.meta.schema if (!schema) { const tableId = ctx.params.tableId || view.meta.tableId - const table = await getTable(ctx.appId, tableId) + const table = await getTable(tableId) schema = table.schema } diff --git a/packages/server/src/api/controllers/view/utils.js b/packages/server/src/api/controllers/view/utils.js index 27fccaf47f..59d169ef7f 100644 --- a/packages/server/src/api/controllers/view/utils.js +++ b/packages/server/src/api/controllers/view/utils.js @@ -6,8 +6,10 @@ const { SEPARATOR, } = require("../../../db/utils") const env = require("../../../environment") +const { getAppDB } = require("@budibase/backend-core/context") -exports.getView = async (db, viewName) => { +exports.getView = async viewName => { + const db = getAppDB() if (env.SELF_HOSTED) { const designDoc = await db.get("_design/database") return designDoc.views[viewName] @@ -22,7 +24,8 @@ exports.getView = async (db, viewName) => { } } -exports.getViews = async db => { +exports.getViews = async () => { + const db = getAppDB() const response = [] if (env.SELF_HOSTED) { const designDoc = await db.get("_design/database") @@ -54,7 +57,8 @@ exports.getViews = async db => { return response } -exports.saveView = async (db, originalName, viewName, viewTemplate) => { +exports.saveView = async (originalName, viewName, viewTemplate) => { + const db = getAppDB() if (env.SELF_HOSTED) { const designDoc = await db.get("_design/database") designDoc.views = { @@ -91,7 +95,8 @@ exports.saveView = async (db, originalName, viewName, viewTemplate) => { } } -exports.deleteView = async (db, viewName) => { +exports.deleteView = async viewName => { + const db = getAppDB() if (env.SELF_HOSTED) { const designDoc = await db.get("_design/database") const view = designDoc.views[viewName] diff --git a/packages/server/src/api/routes/tests/misc.spec.js b/packages/server/src/api/routes/tests/misc.spec.js index ae5c0cca60..e5b87543d2 100644 --- a/packages/server/src/api/routes/tests/misc.spec.js +++ b/packages/server/src/api/routes/tests/misc.spec.js @@ -82,7 +82,6 @@ describe("run misc tests", () => { dataImport.schema[col] = { type: "string" } } await tableUtils.handleDataImport( - config.getAppId(), { userId: "test" }, table, dataImport diff --git a/packages/server/src/api/routes/tests/utilities/TestFunctions.js b/packages/server/src/api/routes/tests/utilities/TestFunctions.js index 9bd54f0d75..e9e15b7619 100644 --- a/packages/server/src/api/routes/tests/utilities/TestFunctions.js +++ b/packages/server/src/api/routes/tests/utilities/TestFunctions.js @@ -1,9 +1,9 @@ const rowController = require("../../../controllers/row") const appController = require("../../../controllers/application") -const CouchDB = require("../../../../db") const { AppStatus } = require("../../../../db/utils") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { TENANT_ID } = require("../../../../tests/utilities/structures") +const { getAppDB } = require("@budibase/backend-core/context") function Request(appId, params) { this.appId = appId @@ -96,8 +96,8 @@ exports.checkPermissionsEndpoint = async ({ .expect(403) } -exports.getDB = config => { - return new CouchDB(config.getAppId()) +exports.getDB = () => { + return getAppDB() } exports.testAutomation = async (config, automation) => { diff --git a/packages/server/src/automations/automationUtils.js b/packages/server/src/automations/automationUtils.js index aab341a1f8..9360840efd 100644 --- a/packages/server/src/automations/automationUtils.js +++ b/packages/server/src/automations/automationUtils.js @@ -53,13 +53,12 @@ exports.cleanInputValues = (inputs, schema) => { * the automation but is instead part of the Table/Table. This function will get the table schema and use it to instead * perform the cleanInputValues function on the input row. * - * @param {string} appId The instance which the Table/Table is contained under. * @param {string} tableId The ID of the Table/Table which the schema is to be retrieved for. * @param {object} row The input row structure which requires clean-up after having been through template statements. * @returns {Promise} The cleaned up rows object, will should now have all the required primitive types. */ -exports.cleanUpRow = async (appId, tableId, row) => { - let table = await getTable(appId, tableId) +exports.cleanUpRow = async (tableId, row) => { + let table = await getTable(tableId) return exports.cleanInputValues(row, { properties: table.schema }) } diff --git a/packages/server/src/automations/steps/createRow.js b/packages/server/src/automations/steps/createRow.js index 1937121062..a16521d25d 100644 --- a/packages/server/src/automations/steps/createRow.js +++ b/packages/server/src/automations/steps/createRow.js @@ -78,7 +78,6 @@ exports.run = async function ({ inputs, appId, emitter }) { try { inputs.row = await automationUtils.cleanUpRow( - appId, inputs.row.tableId, inputs.row ) diff --git a/packages/server/src/automations/steps/updateRow.js b/packages/server/src/automations/steps/updateRow.js index a9569932fa..f66fcf9432 100644 --- a/packages/server/src/automations/steps/updateRow.js +++ b/packages/server/src/automations/steps/updateRow.js @@ -87,7 +87,7 @@ exports.run = async function ({ inputs, appId, emitter }) { try { if (tableId) { - inputs.row = await automationUtils.cleanUpRow(appId, tableId, inputs.row) + inputs.row = await automationUtils.cleanUpRow(tableId, inputs.row) } await rowController.patch(ctx) return { diff --git a/packages/server/src/db/linkedRows/LinkController.js b/packages/server/src/db/linkedRows/LinkController.js index b66e2debb5..86c32bf94f 100644 --- a/packages/server/src/db/linkedRows/LinkController.js +++ b/packages/server/src/db/linkedRows/LinkController.js @@ -1,4 +1,3 @@ -const CouchDB = require("../index") const { IncludeDocs, getLinkDocuments } = require("./linkUtils") const { generateLinkID, @@ -7,6 +6,7 @@ const { } = require("../utils") const Sentry = require("@sentry/node") const { FieldTypes, RelationshipTypes } = require("../../constants") +const { getAppDB } = require("@budibase/backend-core/context") /** * Creates a new link document structure which can be put to the database. It is important to @@ -52,9 +52,8 @@ function LinkDocument( } class LinkController { - constructor({ appId, tableId, row, table, oldTable }) { - this._appId = appId - this._db = new CouchDB(appId) + constructor({ tableId, row, table, oldTable }) { + this._db = getAppDB() this._tableId = tableId this._row = row this._table = table @@ -99,7 +98,6 @@ class LinkController { */ getRowLinkDocs(rowId) { return getLinkDocuments({ - appId: this._appId, tableId: this._tableId, rowId, includeDocs: IncludeDocs.INCLUDE, @@ -111,7 +109,6 @@ class LinkController { */ getTableLinkDocs() { return getLinkDocuments({ - appId: this._appId, tableId: this._tableId, includeDocs: IncludeDocs.INCLUDE, }) @@ -230,7 +227,6 @@ class LinkController { if (linkedSchema.relationshipType === RelationshipTypes.ONE_TO_MANY) { let links = ( await getLinkDocuments({ - appId: this._appId, tableId: field.tableId, rowId: linkId, includeDocs: IncludeDocs.EXCLUDE, diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.js index 6835719e5f..f2872d808a 100644 --- a/packages/server/src/db/linkedRows/index.js +++ b/packages/server/src/db/linkedRows/index.js @@ -9,12 +9,12 @@ const { getLinkedTable, } = require("./linkUtils") const { flatten } = require("lodash") -const CouchDB = require("../../db") const { FieldTypes } = require("../../constants") const { getMultiIDParams, USER_METDATA_PREFIX } = require("../../db/utils") const { partition } = require("lodash") const { getGlobalUsersFromMetadata } = require("../../utilities/global") const { processFormulas } = require("../../utilities/rowProcessor/utils") +const { getAppDB } = require("@budibase/backend-core/context") /** * This functionality makes sure that when rows with links are created, updated or deleted they are processed @@ -48,14 +48,13 @@ function clearRelationshipFields(table, rows) { return rows } -async function getLinksForRows(appId, rows) { +async function getLinksForRows(rows) { const tableIds = [...new Set(rows.map(el => el.tableId))] // start by getting all the link values for performance reasons const responses = flatten( await Promise.all( tableIds.map(tableId => getLinkDocuments({ - appId, tableId: tableId, includeDocs: IncludeDocs.EXCLUDE, }) @@ -72,9 +71,9 @@ async function getLinksForRows(appId, rows) { ) } -async function getFullLinkedDocs(ctx, appId, links) { +async function getFullLinkedDocs(links) { // create DBs - const db = new CouchDB(appId) + const db = getAppDB() const linkedRowIds = links.map(link => link.id) const uniqueRowIds = [...new Set(linkedRowIds)] let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map( @@ -88,7 +87,7 @@ async function getFullLinkedDocs(ctx, appId, links) { let [users, other] = partition(linked, linkRow => linkRow._id.startsWith(USER_METDATA_PREFIX) ) - users = await getGlobalUsersFromMetadata(appId, users) + users = await getGlobalUsersFromMetadata(users) return [...other, ...users] } @@ -96,7 +95,6 @@ async function getFullLinkedDocs(ctx, appId, links) { * Update link documents for a row or table - this is to be called by the API controller when a change is occurring. * @param {string} args.eventType states what type of change which is occurring, means this can be expanded upon in the * future quite easily (all updates go through one function). - * @param {string} args.appId The ID of the instance in which the change is occurring. * @param {string} args.tableId The ID of the of the table which is being changed. * @param {object|null} args.row The row which is changing, e.g. created, updated or deleted. * @param {object|null} args.table If the table has already been retrieved this can be used to reduce database gets. @@ -105,11 +103,8 @@ async function getFullLinkedDocs(ctx, appId, links) { * row operations and the table for table operations. */ exports.updateLinks = async function (args) { - const { eventType, appId, row, tableId, table, oldTable } = args + const { eventType, row, tableId, table, oldTable } = args const baseReturnObj = row == null ? table : row - if (appId == null) { - throw "Cannot operate without an instance ID." - } // make sure table ID is set if (tableId == null && table != null) { args.tableId = table._id @@ -146,27 +141,23 @@ exports.updateLinks = async function (args) { /** * Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row. * This is required for formula fields, this may only be utilised internally (for now). - * @param {object} ctx The request which is looking for rows. * @param {object} table The table from which the rows originated. * @param {array} rows The rows which are to be enriched. * @return {Promise<*>} returns the rows with all of the enriched relationships on it. */ -exports.attachFullLinkedDocs = async (ctx, table, rows) => { - const appId = ctx.appId +exports.attachFullLinkedDocs = async (table, rows) => { const linkedTableIds = getLinkedTableIDs(table) if (linkedTableIds.length === 0) { return rows } - // create DBs - const db = new CouchDB(appId) // get all the links - const links = (await getLinksForRows(appId, rows)).filter(link => + const links = (await getLinksForRows(rows)).filter(link => rows.some(row => row._id === link.thisId) ) // clear any existing links that could be dupe'd rows = clearRelationshipFields(table, rows) // now get the docs and combine into the rows - let linked = await getFullLinkedDocs(ctx, appId, links) + let linked = await getFullLinkedDocs(links) const linkedTables = [] for (let row of rows) { for (let link of links.filter(link => link.thisId === row._id)) { @@ -177,11 +168,7 @@ exports.attachFullLinkedDocs = async (ctx, table, rows) => { if (linkedRow) { const linkedTableId = linkedRow.tableId || getRelatedTableForField(table, link.fieldName) - const linkedTable = await getLinkedTable( - db, - linkedTableId, - linkedTables - ) + const linkedTable = await getLinkedTable(linkedTableId, linkedTables) if (linkedTable) { row[link.fieldName].push(processFormulas(linkedTable, linkedRow)) } @@ -193,18 +180,16 @@ exports.attachFullLinkedDocs = async (ctx, table, rows) => { /** * This function will take the given enriched rows and squash the links to only contain the primary display field. - * @param {string} appId The app in which the tables/rows/links exist. * @param {object} table The table from which the rows originated. * @param {array} enriched The pre-enriched rows (full docs) which are to be squashed. * @returns {Promise} The rows after having their links squashed to only contain the ID and primary display. */ -exports.squashLinksToPrimaryDisplay = async (appId, table, enriched) => { - const db = new CouchDB(appId) +exports.squashLinksToPrimaryDisplay = async (table, enriched) => { // will populate this as we find them const linkedTables = [table] for (let row of enriched) { // this only fetches the table if its not already in array - const rowTable = await getLinkedTable(db, row.tableId, linkedTables) + const rowTable = await getLinkedTable(row.tableId, linkedTables) for (let [column, schema] of Object.entries(rowTable.schema)) { if (schema.type !== FieldTypes.LINK || !Array.isArray(row[column])) { continue @@ -212,7 +197,7 @@ exports.squashLinksToPrimaryDisplay = async (appId, table, enriched) => { const newLinks = [] for (let link of row[column]) { const linkTblId = link.tableId || getRelatedTableForField(table, column) - const linkedTable = await getLinkedTable(db, linkTblId, linkedTables) + const linkedTable = await getLinkedTable(linkTblId, linkedTables) const obj = { _id: link._id } if (link[linkedTable.primaryDisplay]) { obj.primaryDisplay = link[linkedTable.primaryDisplay] diff --git a/packages/server/src/db/linkedRows/linkUtils.js b/packages/server/src/db/linkedRows/linkUtils.js index 12e72af78d..5af4aa919a 100644 --- a/packages/server/src/db/linkedRows/linkUtils.js +++ b/packages/server/src/db/linkedRows/linkUtils.js @@ -1,8 +1,8 @@ -const CouchDB = require("../index") const Sentry = require("@sentry/node") const { ViewNames, getQueryIndex } = require("../utils") const { FieldTypes } = require("../../constants") const { createLinkView } = require("../views/staticViews") +const { getAppDB } = require("@budibase/backend-core/context") /** * Only needed so that boolean parameters are being used for includeDocs @@ -17,7 +17,6 @@ exports.createLinkView = createLinkView /** * Gets the linking documents, not the linked documents themselves. - * @param {string} args.appId The instance in which we are searching for linked rows. * @param {string} args.tableId The table which we are searching for linked rows against. * @param {string|null} args.fieldName The name of column/field which is being altered, only looking for * linking documents that are related to it. If this is not specified then the table level will be assumed. @@ -30,8 +29,8 @@ exports.createLinkView = createLinkView * (if any). */ exports.getLinkDocuments = async function (args) { - const { appId, tableId, rowId, includeDocs } = args - const db = new CouchDB(appId) + const { tableId, rowId, includeDocs } = args + const db = getAppDB() let params if (rowId != null) { params = { key: [tableId, rowId] } @@ -68,7 +67,7 @@ exports.getLinkDocuments = async function (args) { } catch (err) { // check if the view doesn't exist, it should for all new instances if (err != null && err.name === "not_found") { - await exports.createLinkView(appId) + await exports.createLinkView() return exports.getLinkDocuments(arguments[0]) } else { /* istanbul ignore next */ @@ -89,7 +88,8 @@ exports.getLinkedTableIDs = table => { .map(column => column.tableId) } -exports.getLinkedTable = async (db, id, tables) => { +exports.getLinkedTable = async (id, tables) => { + const db = getAppDB() let linkedTable = tables.find(table => table._id === id) if (linkedTable) { return linkedTable diff --git a/packages/server/src/db/tests/linkController.spec.js b/packages/server/src/db/tests/linkController.spec.js index d45bd99ea2..180cc2b3a0 100644 --- a/packages/server/src/db/tests/linkController.spec.js +++ b/packages/server/src/db/tests/linkController.spec.js @@ -20,7 +20,6 @@ describe("test the link controller", () => { function createLinkController(table, row = null, oldTable = null) { const linkConfig = { - appId: config.getAppId(), tableId: table._id, table, } diff --git a/packages/server/src/db/tests/linkTests.spec.js b/packages/server/src/db/tests/linkTests.spec.js index 8dad7be049..9a309df70a 100644 --- a/packages/server/src/db/tests/linkTests.spec.js +++ b/packages/server/src/db/tests/linkTests.spec.js @@ -1,8 +1,8 @@ const TestConfig = require("../../tests/utilities/TestConfiguration") -const { basicTable, basicLinkedRow } = require("../../tests/utilities/structures") +const { basicTable } = require("../../tests/utilities/structures") const linkUtils = require("../linkedRows/linkUtils") -const links = require("../linkedRows") const CouchDB = require("../index") +const { getAppDB } = require("@budibase/backend-core/context") describe("test link functionality", () => { const config = new TestConfig(false) @@ -11,18 +11,18 @@ describe("test link functionality", () => { let db, table beforeEach(async () => { await config.init() - db = new CouchDB(config.getAppId()) + db = getAppDB() table = await config.createTable() }) it("should be able to retrieve a linked table from a list", async () => { - const retrieved = await linkUtils.getLinkedTable(db, table._id, [table]) + const retrieved = await linkUtils.getLinkedTable(table._id, [table]) expect(retrieved._id).toBe(table._id) }) it("should be able to retrieve a table from DB and update list", async () => { const tables = [] - const retrieved = await linkUtils.getLinkedTable(db, table._id, tables) + const retrieved = await linkUtils.getLinkedTable(table._id, tables) expect(retrieved._id).toBe(table._id) expect(tables[0]).toBeDefined() }) @@ -51,7 +51,6 @@ describe("test link functionality", () => { const db = new CouchDB("test") await db.put({ _id: "_design/database", views: {} }) const output = await linkUtils.getLinkDocuments({ - appId: "test", tableId: "test", rowId: "test", includeDocs: false, diff --git a/packages/server/src/db/views/staticViews.js b/packages/server/src/db/views/staticViews.js index 8e7b101ef5..50b7c305d3 100644 --- a/packages/server/src/db/views/staticViews.js +++ b/packages/server/src/db/views/staticViews.js @@ -1,4 +1,4 @@ -const CouchDB = require("../index") +const { getAppDB } = require("@budibase/backend-core/context") const { DocumentTypes, SEPARATOR, @@ -21,12 +21,11 @@ const SCREEN_PREFIX = DocumentTypes.SCREEN + SEPARATOR /** * Creates the link view for the instance, this will overwrite the existing one, but this should only * be called if it is found that the view does not exist. - * @param {string} appId The instance to which the view should be added. * @returns {Promise} The view now exists, please note that the next view of this query will actually build it, * so it may be slow. */ -exports.createLinkView = async appId => { - const db = new CouchDB(appId) +exports.createLinkView = async () => { + const db = getAppDB() const designDoc = await db.get("_design/database") const view = { map: function (doc) { @@ -57,8 +56,8 @@ exports.createLinkView = async appId => { await db.put(designDoc) } -exports.createRoutingView = async appId => { - const db = new CouchDB(appId) +exports.createRoutingView = async () => { + const db = getAppDB() const designDoc = await db.get("_design/database") const view = { // if using variables in a map function need to inject them before use @@ -78,8 +77,8 @@ exports.createRoutingView = async appId => { await db.put(designDoc) } -async function searchIndex(appId, indexName, fnString) { - const db = new CouchDB(appId) +async function searchIndex(indexName, fnString) { + const db = getAppDB() const designDoc = await db.get("_design/database") designDoc.indexes = { [indexName]: { @@ -90,9 +89,8 @@ async function searchIndex(appId, indexName, fnString) { await db.put(designDoc) } -exports.createAllSearchIndex = async appId => { +exports.createAllSearchIndex = async () => { await searchIndex( - appId, SearchIndexes.ROWS, function (doc) { function idx(input, prev) { diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts index b9e643e26a..398f7fe56a 100644 --- a/packages/server/src/integrations/utils.ts +++ b/packages/server/src/integrations/utils.ts @@ -52,7 +52,10 @@ export function buildExternalTableId(datasourceId: string, tableName: string) { return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}` } -export function breakExternalTableId(tableId: string) { +export function breakExternalTableId(tableId: string | undefined) { + if (!tableId) { + return {} + } const parts = tableId.split(DOUBLE_SEPARATOR) let tableName = parts.pop() // if they need joined diff --git a/packages/server/src/middleware/authorized.js b/packages/server/src/middleware/authorized.js index b463895a80..e3414192af 100644 --- a/packages/server/src/middleware/authorized.js +++ b/packages/server/src/middleware/authorized.js @@ -43,13 +43,13 @@ module.exports = // need to check this first, in-case public access, don't check authed until last const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC - const hierarchy = await getUserRoleHierarchy(ctx.appId, roleId, { + const hierarchy = await getUserRoleHierarchy(roleId, { idOnly: false, }) const permError = "User does not have permission" let possibleRoleIds = [] if (hasResource(ctx)) { - possibleRoleIds = await getRequiredResourceRole(ctx.appId, permLevel, ctx) + possibleRoleIds = await getRequiredResourceRole(permLevel, ctx) } // check if we found a role, if not fallback to base permissions if (possibleRoleIds.length > 0) { diff --git a/packages/server/src/middleware/currentapp.js b/packages/server/src/middleware/currentapp.js index 69f80c895b..43f5ed9d46 100644 --- a/packages/server/src/middleware/currentapp.js +++ b/packages/server/src/middleware/currentapp.js @@ -11,7 +11,6 @@ const { generateUserMetadataID, isDevAppID } = require("../db/utils") const { dbExists } = require("@budibase/backend-core/db") const { isUserInAppTenant } = require("@budibase/backend-core/tenancy") const { getCachedSelf } = require("../utilities/global") -const CouchDB = require("../db") const env = require("../environment") const { isWebhookEndpoint } = require("./utils") @@ -31,7 +30,7 @@ module.exports = async (ctx, next) => { // check the app exists referenced in cookie if (appCookie) { const appId = appCookie.appId - const exists = await dbExists(CouchDB, appId) + const exists = await dbExists(appId) if (!exists) { clearCookie(ctx, Cookies.CurrentApp) return next() diff --git a/packages/server/src/migrations/usageQuotas/syncApps.js b/packages/server/src/migrations/usageQuotas/syncApps.js index ee106129e6..e373c397ac 100644 --- a/packages/server/src/migrations/usageQuotas/syncApps.js +++ b/packages/server/src/migrations/usageQuotas/syncApps.js @@ -1,12 +1,11 @@ const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") const { getAllApps } = require("@budibase/backend-core/db") -const CouchDB = require("../../db") const { getUsageQuotaDoc } = require("../../utilities/usageQuota") exports.run = async () => { const db = getGlobalDB() // get app count - const devApps = await getAllApps(CouchDB, { dev: true }) + const devApps = await getAllApps({ dev: true }) const appCount = devApps ? devApps.length : 0 // sync app count diff --git a/packages/server/src/migrations/usageQuotas/syncRows.js b/packages/server/src/migrations/usageQuotas/syncRows.js index 7990f405de..5bdda08d8e 100644 --- a/packages/server/src/migrations/usageQuotas/syncRows.js +++ b/packages/server/src/migrations/usageQuotas/syncRows.js @@ -1,13 +1,12 @@ const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") const { getAllApps } = require("@budibase/backend-core/db") -const CouchDB = require("../../db") const { getUsageQuotaDoc } = require("../../utilities/usageQuota") const { getUniqueRows } = require("../../utilities/usageQuota/rows") exports.run = async () => { const db = getGlobalDB() // get all rows in all apps - const allApps = await getAllApps(CouchDB, { all: true }) + const allApps = await getAllApps({ all: true }) const appIds = allApps ? allApps.map(app => app.appId) : [] const rows = await getUniqueRows(appIds) const rowCount = rows ? rows.length : 0 diff --git a/packages/server/src/tests/utilities/TestConfiguration.js b/packages/server/src/tests/utilities/TestConfiguration.js index 7aefe4fb78..d10ccdd230 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.js +++ b/packages/server/src/tests/utilities/TestConfiguration.js @@ -23,6 +23,7 @@ const { createASession } = require("@budibase/backend-core/sessions") const { user: userCache } = require("@budibase/backend-core/cache") const CouchDB = require("../../db") const newid = require("../../db/newid") +const context = require("@budibase/backend-core/context") core.init(CouchDB) const GLOBAL_USER_ID = "us_uuid1" @@ -50,6 +51,7 @@ class TestConfiguration { } async _req(config, params, controlFunc) { + context.updateAppId(this.appId) const request = {} // fake cookies, we don't need them request.cookies = { set: () => {}, get: () => {} } @@ -165,6 +167,7 @@ class TestConfiguration { // create dev app this.app = await this._req({ name: appName }, null, controllers.app.create) this.appId = this.app.appId + context.updateAppId(this.appId) // create production app this.prodApp = await this.deploy() diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.js index b8ddb1a356..7a9c2f350c 100644 --- a/packages/server/src/utilities/fileSystem/index.js +++ b/packages/server/src/utilities/fileSystem/index.js @@ -20,6 +20,7 @@ const { LINK_USER_METADATA_PREFIX, } = require("../../db/utils") const MemoryStream = require("memorystream") +const { getAppId } = require("@budibase/backend-core/context") const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules") @@ -251,7 +252,8 @@ exports.downloadTemplate = async (type, name) => { /** * Retrieves component libraries from object store (or tmp symlink if in local) */ -exports.getComponentLibraryManifest = async (appId, library) => { +exports.getComponentLibraryManifest = async library => { + const appId = getAppId() const filename = "manifest.json" /* istanbul ignore next */ // when testing in cypress and so on we need to get the package diff --git a/packages/server/src/utilities/global.js b/packages/server/src/utilities/global.js index 7ef1c09405..959eb59932 100644 --- a/packages/server/src/utilities/global.js +++ b/packages/server/src/utilities/global.js @@ -11,8 +11,10 @@ const { isUserInAppTenant, } = require("@budibase/backend-core/tenancy") const env = require("../environment") +const { getAppId } = require("@budibase/backend-core/context") -exports.updateAppRole = (appId, user) => { +exports.updateAppRole = (user, { appId } = {}) => { + appId = appId || getAppId() if (!user || !user.roles) { return user } @@ -35,18 +37,18 @@ exports.updateAppRole = (appId, user) => { return user } -function processUser(appId, user) { +function processUser(user, { appId } = {}) { if (user) { delete user.password } - return exports.updateAppRole(appId, user) + return exports.updateAppRole(user, { appId }) } exports.getCachedSelf = async (ctx, appId) => { // this has to be tenant aware, can't depend on the context to find it out // running some middlewares before the tenancy causes context to break const user = await userCache.getUser(ctx.user._id) - return processUser(appId, user) + return processUser(user, appId) } exports.getRawGlobalUser = async userId => { @@ -54,12 +56,13 @@ exports.getRawGlobalUser = async userId => { return db.get(getGlobalIDFromUserMetadataID(userId)) } -exports.getGlobalUser = async (appId, userId) => { +exports.getGlobalUser = async userId => { let user = await exports.getRawGlobalUser(userId) - return processUser(appId, user) + return processUser(user) } -exports.getGlobalUsers = async (appId = null, users = null) => { +exports.getGlobalUsers = async (users = null) => { + const appId = getAppId() const db = getGlobalDB() let globalUsers if (users) { @@ -86,11 +89,11 @@ exports.getGlobalUsers = async (appId = null, users = null) => { if (!appId) { return globalUsers } - return globalUsers.map(user => exports.updateAppRole(appId, user)) + return globalUsers.map(user => exports.updateAppRole(user)) } -exports.getGlobalUsersFromMetadata = async (appId, users) => { - const globalUsers = await exports.getGlobalUsers(appId, users) +exports.getGlobalUsersFromMetadata = async users => { + const globalUsers = await exports.getGlobalUsers(users) return users.map(user => { const globalUser = globalUsers.find( globalUser => globalUser && user._id.includes(globalUser._id) diff --git a/packages/server/src/utilities/rowProcessor/index.js b/packages/server/src/utilities/rowProcessor/index.js index 4f5d72c179..55c7494928 100644 --- a/packages/server/src/utilities/rowProcessor/index.js +++ b/packages/server/src/utilities/rowProcessor/index.js @@ -10,7 +10,7 @@ const { getDeployedAppID, dbExists, } = require("@budibase/backend-core/db") -const CouchDB = require("../../db") +const { getAppId } = require("@budibase/backend-core/context") const BASE_AUTO_ID = 1 @@ -263,14 +263,13 @@ exports.outputProcessing = async ( rows, opts = { squash: true } ) => { - const appId = ctx.appId let wasArray = true if (!(rows instanceof Array)) { rows = [rows] wasArray = false } // attach any linked row information - let enriched = await linkRows.attachFullLinkedDocs(ctx, table, rows) + let enriched = await linkRows.attachFullLinkedDocs(table, rows) // process formulas enriched = processFormulas(table, enriched) @@ -289,29 +288,25 @@ exports.outputProcessing = async ( } } if (opts.squash) { - enriched = await linkRows.squashLinksToPrimaryDisplay( - appId, - table, - enriched - ) + enriched = await linkRows.squashLinksToPrimaryDisplay(table, enriched) } return wasArray ? enriched : enriched[0] } /** * Clean up any attachments that were attached to a row. - * @param {string} appId The ID of the app from which a row is being deleted. * @param {object} table The table from which a row is being removed. * @param {any} row optional - the row being removed. * @param {any} rows optional - if multiple rows being deleted can do this in bulk. * @param {any} oldRow optional - if updating a row this will determine the difference. * @return {Promise} When all attachments have been removed this will return. */ -exports.cleanupAttachments = async (appId, table, { row, rows, oldRow }) => { +exports.cleanupAttachments = async (table, { row, rows, oldRow }) => { + const appId = getAppId() if (!isProdAppID(appId)) { const prodAppId = getDeployedAppID(appId) // if prod exists, then don't allow deleting - const exists = await dbExists(CouchDB, prodAppId) + const exists = await dbExists(prodAppId) if (exists) { return } diff --git a/packages/server/src/utilities/users.js b/packages/server/src/utilities/users.js index 6144397bf1..b3601986d8 100644 --- a/packages/server/src/utilities/users.js +++ b/packages/server/src/utilities/users.js @@ -1,13 +1,13 @@ -const CouchDB = require("../db") const { InternalTables } = require("../db/utils") const { getGlobalUser } = require("../utilities/global") +const { getAppDB } = require("@budibase/backend-core/context") exports.getFullUser = async (ctx, userId) => { - const global = await getGlobalUser(ctx.appId, userId) + const global = await getGlobalUser(userId) let metadata try { // this will throw an error if the db doesn't exist, or there is no appId - const db = new CouchDB(ctx.appId) + const db = getAppDB() metadata = await db.get(userId) } catch (err) { // it is fine if there is no user metadata, just remove global db info diff --git a/packages/server/src/utilities/workerRequests.js b/packages/server/src/utilities/workerRequests.js index 5e46f1678f..a7fa92b295 100644 --- a/packages/server/src/utilities/workerRequests.js +++ b/packages/server/src/utilities/workerRequests.js @@ -70,7 +70,7 @@ exports.getGlobalSelf = async (ctx, appId = null) => { } let json = await response.json() if (appId) { - json = updateAppRole(appId, json) + json = updateAppRole(json) } return json } diff --git a/packages/worker/src/api/controllers/global/configs.js b/packages/worker/src/api/controllers/global/configs.js index fc0aa868a3..604e7d0e93 100644 --- a/packages/worker/src/api/controllers/global/configs.js +++ b/packages/worker/src/api/controllers/global/configs.js @@ -11,7 +11,6 @@ const { upload, ObjectStoreBuckets, } = require("@budibase/backend-core/objectStore") -const CouchDB = require("../../../db") const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") const env = require("../../../environment") const { googleCallbackUrl, oidcCallbackUrl } = require("./auth") @@ -252,7 +251,7 @@ exports.configChecklist = async function (ctx) { // TODO: Watch get started video // Apps exist - const apps = await getAllApps(CouchDB, { idsOnly: true }) + const apps = await getAllApps({ idsOnly: true }) // They have set up SMTP const smtpConfig = await getScopedFullConfig(db, { diff --git a/packages/worker/src/api/controllers/global/roles.js b/packages/worker/src/api/controllers/global/roles.js index 3c977a6290..ee55256f35 100644 --- a/packages/worker/src/api/controllers/global/roles.js +++ b/packages/worker/src/api/controllers/global/roles.js @@ -9,7 +9,7 @@ const CouchDB = require("../../../db") exports.fetch = async ctx => { const tenantId = ctx.user.tenantId // always use the dev apps as they'll be most up to date (true) - const apps = await getAllApps(CouchDB, { tenantId, all: true }) + const apps = await getAllApps({ tenantId, all: true }) const promises = [] for (let app of apps) { // use dev app IDs From d3a90acc55654fa92a912885ffa3f63d96e28c0e Mon Sep 17 00:00:00 2001 From: Michael Drury Date: Fri, 28 Jan 2022 00:05:39 +0000 Subject: [PATCH 2/9] Final pass refactoring - need to test but all code in server converted. --- packages/backend-core/context.js | 2 + packages/backend-core/src/db/constants.js | 4 + packages/backend-core/src/db/conversions.js | 46 + packages/backend-core/src/db/utils.js | 70 +- packages/backend-core/src/tenancy/context.js | 12 +- packages/server/src/api/controllers/auth.js | 7 +- .../server/src/api/controllers/automation.js | 36 +- .../src/api/controllers/deploy/Deployment.js | 10 +- .../src/api/controllers/deploy/index.js | 33 +- packages/server/src/api/controllers/dev.js | 10 +- .../server/src/api/controllers/metadata.js | 13 +- .../src/api/controllers/query/import/index.ts | 6 +- .../query/import/tests/index.spec.js | 2 +- .../server/src/api/controllers/query/index.js | 21 +- .../server/src/api/controllers/routing.js | 7 +- .../src/api/controllers/row/external.js | 4 +- .../src/api/controllers/table/internal.js | 10 +- packages/server/src/api/controllers/user.js | 4 +- .../server/src/api/controllers/webhook.js | 18 +- packages/server/src/automations/triggers.js | 64 +- packages/server/src/automations/utils.js | 7 +- packages/server/src/db/linkedRows/index.js | 6 +- packages/server/src/middleware/usageQuota.js | 4 +- .../src/tests/utilities/TestConfiguration.js | 4 +- packages/server/src/threads/automation.js | 32 +- packages/server/src/threads/query.js | 24 +- packages/server/src/utilities/index.js | 24 +- .../server/src/utilities/routing/index.js | 10 +- packages/server/yarn.lock | 783 +----------------- 29 files changed, 269 insertions(+), 1004 deletions(-) create mode 100644 packages/backend-core/src/db/conversions.js diff --git a/packages/backend-core/context.js b/packages/backend-core/context.js index b3d004b209..5cf9642392 100644 --- a/packages/backend-core/context.js +++ b/packages/backend-core/context.js @@ -4,6 +4,7 @@ const { getProdAppDB, getAppId, updateAppId, + doInAppContext, } = require("./src/tenancy/context") module.exports = { @@ -12,4 +13,5 @@ module.exports = { getProdAppDB, getAppId, updateAppId, + doInAppContext, } diff --git a/packages/backend-core/src/db/constants.js b/packages/backend-core/src/db/constants.js index 2affb09c7c..b41a9a9c08 100644 --- a/packages/backend-core/src/db/constants.js +++ b/packages/backend-core/src/db/constants.js @@ -32,3 +32,7 @@ exports.StaticDatabases = { }, }, } + +exports.APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR +exports.APP_DEV = exports.APP_DEV_PREFIX = + exports.DocumentTypes.APP_DEV + exports.SEPARATOR diff --git a/packages/backend-core/src/db/conversions.js b/packages/backend-core/src/db/conversions.js new file mode 100644 index 0000000000..766ec1ad06 --- /dev/null +++ b/packages/backend-core/src/db/conversions.js @@ -0,0 +1,46 @@ +const NO_APP_ERROR = "No app provided" +const { APP_DEV_PREFIX, APP_PREFIX } = require("./constants") + +exports.isDevAppID = appId => { + if (!appId) { + throw NO_APP_ERROR + } + return appId.startsWith(APP_DEV_PREFIX) +} + +exports.isProdAppID = appId => { + if (!appId) { + throw NO_APP_ERROR + } + return appId.startsWith(APP_PREFIX) && !exports.isDevAppID(appId) +} + +exports.isDevApp = app => { + if (!app) { + throw NO_APP_ERROR + } + return exports.isDevAppID(app.appId) +} + +/** + * Convert a development app ID to a deployed app ID. + */ +exports.getDeployedAppID = appId => { + // if dev, convert it + if (appId.startsWith(APP_DEV_PREFIX)) { + const id = appId.split(APP_DEV_PREFIX)[1] + return `${APP_PREFIX}${id}` + } + return appId +} + +/** + * Convert a deployed app ID to a development app ID. + */ +exports.getDevelopmentAppID = appId => { + if (!appId.startsWith(APP_DEV_PREFIX)) { + const id = appId.split(APP_PREFIX)[1] + return `${APP_DEV_PREFIX}${id}` + } + return appId +} diff --git a/packages/backend-core/src/db/utils.js b/packages/backend-core/src/db/utils.js index 181467b402..7190a1221b 100644 --- a/packages/backend-core/src/db/utils.js +++ b/packages/backend-core/src/db/utils.js @@ -2,7 +2,13 @@ const { newid } = require("../hashing") const Replication = require("./Replication") const { DEFAULT_TENANT_ID, Configs } = require("../constants") const env = require("../environment") -const { StaticDatabases, SEPARATOR, DocumentTypes } = require("./constants") +const { + StaticDatabases, + SEPARATOR, + DocumentTypes, + APP_PREFIX, + APP_DEV, +} = require("./constants") const { getTenantId, getTenantIDFromAppID, @@ -12,8 +18,13 @@ const fetch = require("node-fetch") const { getCouch } = require("./index") const { getAppMetadata } = require("../cache/appMetadata") const { checkSlashesInUrl } = require("../helpers") - -const NO_APP_ERROR = "No app provided" +const { + isDevApp, + isProdAppID, + isDevAppID, + getDevelopmentAppID, + getDeployedAppID, +} = require("./conversions") const UNICODE_MAX = "\ufff0" @@ -24,10 +35,15 @@ exports.ViewNames = { exports.StaticDatabases = StaticDatabases exports.DocumentTypes = DocumentTypes -exports.APP_PREFIX = DocumentTypes.APP + SEPARATOR -exports.APP_DEV = exports.APP_DEV_PREFIX = DocumentTypes.APP_DEV + SEPARATOR +exports.APP_PREFIX = APP_PREFIX +exports.APP_DEV = exports.APP_DEV_PREFIX = APP_DEV exports.SEPARATOR = SEPARATOR exports.getTenantIDFromAppID = getTenantIDFromAppID +exports.isDevApp = isDevApp +exports.isProdAppID = isProdAppID +exports.isDevAppID = isDevAppID +exports.getDevelopmentAppID = getDevelopmentAppID +exports.getDeployedAppID = getDeployedAppID /** * If creating DB allDocs/query params with only a single top level ID this can be used, this @@ -52,27 +68,6 @@ function getDocParams(docType, docId = null, otherProps = {}) { } } -exports.isDevAppID = appId => { - if (!appId) { - throw NO_APP_ERROR - } - return appId.startsWith(exports.APP_DEV_PREFIX) -} - -exports.isProdAppID = appId => { - if (!appId) { - throw NO_APP_ERROR - } - return appId.startsWith(exports.APP_PREFIX) && !exports.isDevAppID(appId) -} - -function isDevApp(app) { - if (!app) { - throw NO_APP_ERROR - } - return exports.isDevAppID(app.appId) -} - /** * Generates a new workspace ID. * @returns {string} The new workspace ID which the workspace doc can be stored under. @@ -157,29 +152,6 @@ exports.getRoleParams = (roleId = null, otherProps = {}) => { return getDocParams(DocumentTypes.ROLE, roleId, otherProps) } -/** - * Convert a development app ID to a deployed app ID. - */ -exports.getDeployedAppID = appId => { - // if dev, convert it - if (appId.startsWith(exports.APP_DEV_PREFIX)) { - const id = appId.split(exports.APP_DEV_PREFIX)[1] - return `${exports.APP_PREFIX}${id}` - } - return appId -} - -/** - * Convert a deployed app ID to a development app ID. - */ -exports.getDevelopmentAppID = appId => { - if (!appId.startsWith(exports.APP_DEV_PREFIX)) { - const id = appId.split(exports.APP_PREFIX)[1] - return `${exports.APP_DEV_PREFIX}${id}` - } - return appId -} - exports.getCouchUrl = () => { if (!env.COUCH_DB_URL) return diff --git a/packages/backend-core/src/tenancy/context.js b/packages/backend-core/src/tenancy/context.js index ac2cfbeae9..d54622f979 100644 --- a/packages/backend-core/src/tenancy/context.js +++ b/packages/backend-core/src/tenancy/context.js @@ -2,7 +2,7 @@ const env = require("../environment") const { Headers } = require("../../constants") const cls = require("./FunctionContext") const { getCouch } = require("../db") -const { getDeployedAppID, getDevelopmentAppID } = require("../db/utils") +const { getDeployedAppID, getDevelopmentAppID } = require("../db/conversions") const { isEqual } = require("lodash") // some test cases call functions directly, need to @@ -42,6 +42,16 @@ exports.doInTenant = (tenantId, task) => { }) } +exports.doInAppContext = (appId, task) => { + return cls.run(() => { + // set the app ID + cls.setOnContext(ContextKeys.APP_ID, appId) + + // invoke the task + return task() + }) +} + exports.updateTenantId = tenantId => { cls.setOnContext(ContextKeys.TENANT_ID, tenantId) } diff --git a/packages/server/src/api/controllers/auth.js b/packages/server/src/api/controllers/auth.js index 3f680225af..b082bb889e 100644 --- a/packages/server/src/api/controllers/auth.js +++ b/packages/server/src/api/controllers/auth.js @@ -1,11 +1,10 @@ -const CouchDB = require("../../db") const { outputProcessing } = require("../../utilities/rowProcessor") const { InternalTables } = require("../../db/utils") const { getFullUser } = require("../../utilities/users") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") +const { getAppDB, getAppId } = require("@budibase/backend-core/context") exports.fetchSelf = async ctx => { - const appId = ctx.appId let userId = ctx.user.userId || ctx.user._id /* istanbul ignore next */ if (!userId) { @@ -17,8 +16,8 @@ exports.fetchSelf = async ctx => { // this shouldn't be returned by the app self delete user.roles - if (appId) { - const db = new CouchDB(appId) + if (getAppId()) { + const db = getAppDB() // remove the full roles structure delete user.roles try { diff --git a/packages/server/src/api/controllers/automation.js b/packages/server/src/api/controllers/automation.js index 05337579a0..74942dad40 100644 --- a/packages/server/src/api/controllers/automation.js +++ b/packages/server/src/api/controllers/automation.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../db") const actions = require("../../automations/actions") const triggers = require("../../automations/triggers") const { getAutomationParams, generateAutomationID } = require("../../db/utils") @@ -10,6 +9,7 @@ const { const { deleteEntityMetadata } = require("../../utilities") const { MetadataTypes } = require("../../constants") const { setTestFlag, clearTestFlag } = require("../../utilities/redis") +const { getAppDB } = require("@budibase/backend-core/context") const ACTION_DEFS = removeDeprecated(actions.ACTION_DEFINITIONS) const TRIGGER_DEFS = removeDeprecated(triggers.TRIGGER_DEFINITIONS) @@ -20,14 +20,9 @@ const TRIGGER_DEFS = removeDeprecated(triggers.TRIGGER_DEFINITIONS) * * *************************/ -async function cleanupAutomationMetadata(appId, automationId) { +async function cleanupAutomationMetadata(automationId) { + await deleteEntityMetadata(MetadataTypes.AUTOMATION_TEST_INPUT, automationId) await deleteEntityMetadata( - appId, - MetadataTypes.AUTOMATION_TEST_INPUT, - automationId - ) - await deleteEntityMetadata( - appId, MetadataTypes.AUTOMATION_TEST_HISTORY, automationId ) @@ -58,7 +53,7 @@ function cleanAutomationInputs(automation) { } exports.create = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() let automation = ctx.request.body automation.appId = ctx.appId @@ -72,7 +67,6 @@ exports.create = async function (ctx) { automation.type = "automation" automation = cleanAutomationInputs(automation) automation = await checkForWebhooks({ - appId: ctx.appId, newAuto: automation, }) const response = await db.put(automation) @@ -89,13 +83,12 @@ exports.create = async function (ctx) { } exports.update = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() let automation = ctx.request.body automation.appId = ctx.appId const oldAutomation = await db.get(automation._id) automation = cleanAutomationInputs(automation) automation = await checkForWebhooks({ - appId: ctx.appId, oldAuto: oldAutomation, newAuto: automation, }) @@ -131,7 +124,7 @@ exports.update = async function (ctx) { } exports.fetch = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const response = await db.allDocs( getAutomationParams(null, { include_docs: true, @@ -141,20 +134,19 @@ exports.fetch = async function (ctx) { } exports.find = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() ctx.body = await db.get(ctx.params.id) } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const automationId = ctx.params.id const oldAutomation = await db.get(automationId) await checkForWebhooks({ - appId: ctx.appId, oldAuto: oldAutomation, }) // delete metadata first - await cleanupAutomationMetadata(ctx.appId, automationId) + await cleanupAutomationMetadata(automationId) ctx.body = await db.remove(automationId, ctx.params.rev) } @@ -180,12 +172,11 @@ module.exports.getDefinitionList = async function (ctx) { *********************/ exports.trigger = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() let automation = await db.get(ctx.params.id) await triggers.externalTrigger(automation, { ...ctx.request.body, - appId, + appId: ctx.appId, }) ctx.body = { message: `Automation ${automation._id} has been triggered.`, @@ -205,8 +196,7 @@ function prepareTestInput(input) { } exports.test = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() let automation = await db.get(ctx.params.id) await setTestFlag(automation._id) const testInput = prepareTestInput(ctx.request.body) @@ -214,7 +204,7 @@ exports.test = async function (ctx) { automation, { ...testInput, - appId, + appId: ctx.appId, }, { getResponses: true } ) diff --git a/packages/server/src/api/controllers/deploy/Deployment.js b/packages/server/src/api/controllers/deploy/Deployment.js index b398aa2e6d..65cca97d07 100644 --- a/packages/server/src/api/controllers/deploy/Deployment.js +++ b/packages/server/src/api/controllers/deploy/Deployment.js @@ -1,18 +1,14 @@ const newid = require("../../../db/newid") +const { getAppId } = require("@budibase/backend-core/context") /** * This is used to pass around information about the deployment that is occurring */ class Deployment { - constructor(appId, id = null) { - this.appId = appId + constructor(id = null) { this._id = id || newid() } - getAppId() { - return this.appId - } - setVerification(verification) { if (!verification) { return @@ -43,7 +39,7 @@ class Deployment { getJSON() { const obj = { _id: this._id, - appId: this.appId, + appId: getAppId(), status: this.status, } if (this.err) { diff --git a/packages/server/src/api/controllers/deploy/index.js b/packages/server/src/api/controllers/deploy/index.js index 76d7b75912..22c7d5ce3a 100644 --- a/packages/server/src/api/controllers/deploy/index.js +++ b/packages/server/src/api/controllers/deploy/index.js @@ -1,12 +1,20 @@ -const CouchDB = require("../../../db") const Deployment = require("./Deployment") -const { Replication, getDeployedAppID } = require("@budibase/backend-core/db") +const { + Replication, + getDeployedAppID, + getDevelopmentAppID, +} = require("@budibase/backend-core/db") const { DocumentTypes, getAutomationParams } = require("../../../db/utils") const { disableAllCrons, enableCronTrigger, } = require("../../../automations/utils") const { app: appCache } = require("@budibase/backend-core/cache") +const { + getAppId, + getAppDB, + getProdAppDB, +} = require("@budibase/backend-core/context") // the max time we can wait for an invalidation to complete before considering it failed const MAX_PENDING_TIME_MS = 30 * 60000 @@ -34,9 +42,8 @@ async function checkAllDeployments(deployments) { } async function storeDeploymentHistory(deployment) { - const appId = deployment.getAppId() const deploymentJSON = deployment.getJSON() - const db = new CouchDB(appId) + const db = getAppDB() let deploymentDoc try { @@ -64,7 +71,7 @@ async function storeDeploymentHistory(deployment) { } async function initDeployedApp(prodAppId) { - const db = new CouchDB(prodAppId) + const db = getProdAppDB() console.log("Reading automation docs") const automations = ( await db.allDocs( @@ -88,10 +95,12 @@ async function initDeployedApp(prodAppId) { async function deployApp(deployment) { try { - const productionAppId = getDeployedAppID(deployment.appId) + const appId = getAppId() + const devAppId = getDevelopmentAppID(appId) + const productionAppId = getDeployedAppID(appId) const replication = new Replication({ - source: deployment.appId, + source: devAppId, target: productionAppId, }) @@ -99,7 +108,7 @@ async function deployApp(deployment) { await replication.replicate() console.log("replication complete.. replacing app meta doc") - const db = new CouchDB(productionAppId) + const db = getProdAppDB() const appDoc = await db.get(DocumentTypes.APP_METADATA) appDoc.appId = productionAppId appDoc.instance._id = productionAppId @@ -122,8 +131,7 @@ async function deployApp(deployment) { exports.fetchDeployments = async function (ctx) { try { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) const { updated, deployments } = await checkAllDeployments( deploymentDoc, @@ -140,8 +148,7 @@ exports.fetchDeployments = async function (ctx) { exports.deploymentProgress = async function (ctx) { try { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) ctx.body = deploymentDoc[ctx.params.deploymentId] } catch (err) { @@ -153,7 +160,7 @@ exports.deploymentProgress = async function (ctx) { } exports.deployApp = async function (ctx) { - let deployment = new Deployment(ctx.appId) + let deployment = new Deployment() console.log("Deployment object created") deployment.setStatus(DeploymentStatus.PENDING) console.log("Deployment object set to pending") diff --git a/packages/server/src/api/controllers/dev.js b/packages/server/src/api/controllers/dev.js index 3126454a6b..a27fab9a83 100644 --- a/packages/server/src/api/controllers/dev.js +++ b/packages/server/src/api/controllers/dev.js @@ -1,12 +1,12 @@ const fetch = require("node-fetch") -const CouchDB = require("../../db") const env = require("../../environment") const { checkSlashesInUrl } = require("../../utilities") const { request } = require("../../utilities/workerRequests") const { clearLock } = require("../../utilities/redis") -const { Replication } = require("@budibase/backend-core/db") +const { Replication, getDeployedAppID } = require("@budibase/backend-core/db") const { DocumentTypes } = require("../../db/utils") const { app: appCache } = require("@budibase/backend-core/cache") +const { getProdAppDB, getAppDB } = require("@budibase/backend-core/context") async function redirect(ctx, method, path = "global") { const { devPath } = ctx.params @@ -77,11 +77,11 @@ exports.clearLock = async ctx => { exports.revert = async ctx => { const { appId } = ctx.params - const productionAppId = appId.replace("_dev", "") + const productionAppId = getDeployedAppID(appId) // App must have been deployed first try { - const db = new CouchDB(productionAppId, { skip_setup: true }) + const db = getProdAppDB({ skip_setup: true }) const info = await db.info() if (info.error) throw info.error const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS) @@ -103,7 +103,7 @@ exports.revert = async ctx => { await replication.rollback() // update appID in reverted app to be dev version again - const db = new CouchDB(appId) + const db = getAppDB() const appDoc = await db.get(DocumentTypes.APP_METADATA) appDoc.appId = appId appDoc.instance._id = appId diff --git a/packages/server/src/api/controllers/metadata.js b/packages/server/src/api/controllers/metadata.js index 75236650fd..e68db9b003 100644 --- a/packages/server/src/api/controllers/metadata.js +++ b/packages/server/src/api/controllers/metadata.js @@ -1,7 +1,7 @@ const { MetadataTypes } = require("../../constants") -const CouchDB = require("../../db") const { generateMetadataID } = require("../../db/utils") const { saveEntityMetadata, deleteEntityMetadata } = require("../../utilities") +const { getAppDB } = require("@budibase/backend-core/context") exports.getTypes = async ctx => { ctx.body = { @@ -14,17 +14,12 @@ exports.saveMetadata = async ctx => { if (type === MetadataTypes.AUTOMATION_TEST_HISTORY) { ctx.throw(400, "Cannot save automation history type") } - ctx.body = await saveEntityMetadata( - ctx.appId, - type, - entityId, - ctx.request.body - ) + ctx.body = await saveEntityMetadata(type, entityId, ctx.request.body) } exports.deleteMetadata = async ctx => { const { type, entityId } = ctx.params - await deleteEntityMetadata(ctx.appId, type, entityId) + await deleteEntityMetadata(type, entityId) ctx.body = { message: "Metadata deleted successfully", } @@ -32,7 +27,7 @@ exports.deleteMetadata = async ctx => { exports.getMetadata = async ctx => { const { type, entityId } = ctx.params - const db = new CouchDB(ctx.appId) + const db = getAppDB() const id = generateMetadataID(type, entityId) try { ctx.body = await db.get(id) diff --git a/packages/server/src/api/controllers/query/import/index.ts b/packages/server/src/api/controllers/query/import/index.ts index 933d6b101c..26b1853a03 100644 --- a/packages/server/src/api/controllers/query/import/index.ts +++ b/packages/server/src/api/controllers/query/import/index.ts @@ -1,10 +1,11 @@ -import CouchDB from "../../../../db" import { queryValidation } from "../validation" import { generateQueryID } from "../../../../db/utils" import { ImportInfo, ImportSource } from "./sources/base" import { OpenAPI2 } from "./sources/openapi2" import { Query } from "./../../../../definitions/common" import { Curl } from "./sources/curl" +// @ts-ignore +import { getAppDB } from "@budibase/backend-core/context" interface ImportResult { errorQueries: Query[] queries: Query[] @@ -34,7 +35,6 @@ export class RestImporter { } importQueries = async ( - appId: string, datasourceId: string ): Promise => { // constuct the queries @@ -58,7 +58,7 @@ export class RestImporter { }) // persist queries - const db = new CouchDB(appId) + const db = getAppDB() const response = await db.bulkDocs(queries) // create index to seperate queries and errors diff --git a/packages/server/src/api/controllers/query/import/tests/index.spec.js b/packages/server/src/api/controllers/query/import/tests/index.spec.js index 5a509d2258..36227a4c55 100644 --- a/packages/server/src/api/controllers/query/import/tests/index.spec.js +++ b/packages/server/src/api/controllers/query/import/tests/index.spec.js @@ -77,7 +77,7 @@ describe("Rest Importer", () => { const testImportQueries = async (key, data, assertions) => { await init(data) bulkDocs.mockReturnValue([]) - const importResult = await restImporter.importQueries("appId", "datasourceId") + const importResult = await restImporter.importQueries("datasourceId") expect(importResult.errorQueries.length).toBe(0) expect(importResult.queries.length).toBe(assertions[key].count) expect(bulkDocs).toHaveBeenCalledTimes(1) diff --git a/packages/server/src/api/controllers/query/index.js b/packages/server/src/api/controllers/query/index.js index 9cf7612e8a..7a179bab35 100644 --- a/packages/server/src/api/controllers/query/index.js +++ b/packages/server/src/api/controllers/query/index.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const { generateQueryID, getQueryParams, @@ -10,6 +9,7 @@ const { save: saveDatasource } = require("../datasource") const { RestImporter } = require("./import") const { invalidateDynamicVariables } = require("../../../threads/utils") const environment = require("../../../environment") +const { getAppDB } = require("@budibase/backend-core/context") const Runner = new Thread(ThreadType.QUERY, { timeoutMs: environment.QUERY_THREAD_TIMEOUT || 10000, @@ -28,7 +28,7 @@ function enrichQueries(input) { } exports.fetch = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const body = await db.allDocs( getQueryParams(null, { @@ -69,7 +69,7 @@ exports.import = async ctx => { datasourceId = body.datasourceId } - const importResult = await importer.importQueries(ctx.appId, datasourceId) + const importResult = await importer.importQueries(datasourceId) ctx.body = { ...importResult, @@ -79,7 +79,7 @@ exports.import = async ctx => { } exports.save = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const query = ctx.request.body if (!query._id) { @@ -94,7 +94,7 @@ exports.save = async function (ctx) { } exports.find = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const query = enrichQueries(await db.get(ctx.params.queryId)) // remove properties that could be dangerous in real app if (isProdAppID(ctx.appId)) { @@ -105,7 +105,7 @@ exports.find = async function (ctx) { } exports.preview = async function (ctx) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const datasource = await db.get(ctx.request.body.datasourceId) // preview may not have a queryId as it hasn't been saved, but if it does @@ -136,7 +136,7 @@ exports.preview = async function (ctx) { } async function execute(ctx, opts = { rowsOnly: false }) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const query = await db.get(ctx.params.queryId) const datasource = await db.get(query.datasourceId) @@ -181,7 +181,8 @@ exports.executeV2 = async function (ctx) { return execute(ctx, { rowsOnly: false }) } -const removeDynamicVariables = async (db, queryId) => { +const removeDynamicVariables = async queryId => { + const db = getAppDB() const query = await db.get(queryId) const datasource = await db.get(query.datasourceId) const dynamicVariables = datasource.config.dynamicVariables @@ -202,8 +203,8 @@ const removeDynamicVariables = async (db, queryId) => { } exports.destroy = async function (ctx) { - const db = new CouchDB(ctx.appId) - await removeDynamicVariables(db, ctx.params.queryId) + const db = getAppDB() + await removeDynamicVariables(ctx.params.queryId) await db.remove(ctx.params.queryId, ctx.params.revId) ctx.message = `Query deleted.` ctx.status = 200 diff --git a/packages/server/src/api/controllers/routing.js b/packages/server/src/api/controllers/routing.js index aeb728454b..ca4dea2738 100644 --- a/packages/server/src/api/controllers/routing.js +++ b/packages/server/src/api/controllers/routing.js @@ -39,12 +39,11 @@ Routing.prototype.addScreenId = function (fullpath, roleId, screenId) { /** * Gets the full routing structure by querying the routing view and processing the result into the tree. - * @param {string} appId The application to produce the routing structure for. * @returns {Promise} The routing structure, this is the full structure designed for use in the builder, * if the client routing is required then the updateRoutingStructureForUserRole should be used. */ -async function getRoutingStructure(appId) { - const screenRoutes = await getRoutingInfo(appId) +async function getRoutingStructure() { + const screenRoutes = await getRoutingInfo() const routing = new Routing() for (let screenRoute of screenRoutes) { @@ -57,7 +56,7 @@ async function getRoutingStructure(appId) { } exports.fetch = async ctx => { - ctx.body = await getRoutingStructure(ctx.appId) + ctx.body = await getRoutingStructure() } exports.clientFetch = async ctx => { diff --git a/packages/server/src/api/controllers/row/external.js b/packages/server/src/api/controllers/row/external.js index 4e79975893..0bd57d256f 100644 --- a/packages/server/src/api/controllers/row/external.js +++ b/packages/server/src/api/controllers/row/external.js @@ -9,7 +9,7 @@ const { breakRowIdField, } = require("../../../integrations/utils") const ExternalRequest = require("./ExternalRequest") -const CouchDB = require("../../../db") +const { getAppDB } = require("@budibase/backend-core/context") async function handleRequest(operation, tableId, opts = {}) { // make sure the filters are cleaned up, no empty strings for equals, fuzzy or string @@ -179,7 +179,7 @@ exports.fetchEnrichedRow = async ctx => { const id = ctx.params.rowId const tableId = ctx.params.tableId const { datasourceId, tableName } = breakExternalTableId(tableId) - const db = new CouchDB(appId) + const db = getAppDB() const datasource = await db.get(datasourceId) if (!datasource || !datasource.entities) { ctx.throw(400, "Datasource has not been configured for plus API.") diff --git a/packages/server/src/api/controllers/table/internal.js b/packages/server/src/api/controllers/table/internal.js index 7f7fe1cb3c..3f443bfd1d 100644 --- a/packages/server/src/api/controllers/table/internal.js +++ b/packages/server/src/api/controllers/table/internal.js @@ -1,4 +1,3 @@ -const CouchDB = require("../../../db") const linkRows = require("../../../db/linkedRows") const { getRowParams, generateTableID } = require("../../../db/utils") const { FieldTypes } = require("../../../constants") @@ -9,10 +8,10 @@ const { handleDataImport, } = require("./utils") const usageQuota = require("../../../utilities/usageQuota") +const { getAppDB } = require("@budibase/backend-core/context") exports.save = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const { dataImport, ...rest } = ctx.request.body let tableToSave = { type: "table", @@ -79,7 +78,6 @@ exports.save = async function (ctx) { // update linked rows try { const linkResp = await linkRows.updateLinks({ - appId, eventType: oldTable ? linkRows.EventType.TABLE_UPDATED : linkRows.EventType.TABLE_SAVE, @@ -108,8 +106,7 @@ exports.save = async function (ctx) { } exports.destroy = async function (ctx) { - const appId = ctx.appId - const db = new CouchDB(appId) + const db = getAppDB() const tableToDelete = await db.get(ctx.params.tableId) // Delete all rows for that table @@ -123,7 +120,6 @@ exports.destroy = async function (ctx) { // update linked rows await linkRows.updateLinks({ - appId, eventType: linkRows.EventType.TABLE_DELETE, table: tableToDelete, }) diff --git a/packages/server/src/api/controllers/user.js b/packages/server/src/api/controllers/user.js index 31194c3e96..5524a08bab 100644 --- a/packages/server/src/api/controllers/user.js +++ b/packages/server/src/api/controllers/user.js @@ -11,8 +11,8 @@ const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { getDevelopmentAppID, getDeployedAppIDs, + dbExists, } = require("@budibase/backend-core/db") -const { doesDatabaseExist } = require("../../utilities") const { UserStatus } = require("@budibase/backend-core/constants") const { getAppDB } = require("@budibase/backend-core/context") @@ -102,7 +102,7 @@ exports.syncUser = async function (ctx) { const roleId = roles[prodAppId] const devAppId = getDevelopmentAppID(prodAppId) for (let appId of [prodAppId, devAppId]) { - if (!(await doesDatabaseExist(appId))) { + if (!(await dbExists(appId))) { continue } const db = getAppDB() diff --git a/packages/server/src/api/controllers/webhook.js b/packages/server/src/api/controllers/webhook.js index 0230fb481b..7375b3e750 100644 --- a/packages/server/src/api/controllers/webhook.js +++ b/packages/server/src/api/controllers/webhook.js @@ -1,9 +1,9 @@ -const CouchDB = require("../../db") const { generateWebhookID, getWebhookParams } = require("../../db/utils") const toJsonSchema = require("to-json-schema") const validate = require("jsonschema").validate const triggers = require("../../automations/triggers") const { getDeployedAppID } = require("@budibase/backend-core/db") +const { getAppDB, updateAppId } = require("@budibase/backend-core/context") const AUTOMATION_DESCRIPTION = "Generated from Webhook Schema" @@ -23,7 +23,7 @@ exports.WebhookType = { } exports.fetch = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const response = await db.allDocs( getWebhookParams(null, { include_docs: true, @@ -33,7 +33,7 @@ exports.fetch = async ctx => { } exports.save = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() const webhook = ctx.request.body webhook.appId = ctx.appId @@ -52,12 +52,13 @@ exports.save = async ctx => { } exports.destroy = async ctx => { - const db = new CouchDB(ctx.appId) + const db = getAppDB() ctx.body = await db.remove(ctx.params.id, ctx.params.rev) } exports.buildSchema = async ctx => { - const db = new CouchDB(ctx.params.instance) + updateAppId(ctx.params.instance) + const db = getAppDB() const webhook = await db.get(ctx.params.id) webhook.bodySchema = toJsonSchema(ctx.request.body) // update the automation outputs @@ -81,9 +82,10 @@ exports.buildSchema = async ctx => { } exports.trigger = async ctx => { - const prodAppId = getDeployedAppID(ctx.params.instance) + const deployedAppId = getDeployedAppID(ctx.params.instance) + updateAppId(deployedAppId) try { - const db = new CouchDB(prodAppId) + const db = getAppDB() const webhook = await db.get(ctx.params.id) // validate against the schema if (webhook.bodySchema) { @@ -96,7 +98,7 @@ exports.trigger = async ctx => { await triggers.externalTrigger(target, { body: ctx.request.body, ...ctx.request.body, - appId: prodAppId, + appId: deployedAppId, }) } ctx.status = 200 diff --git a/packages/server/src/automations/triggers.js b/packages/server/src/automations/triggers.js index 49e50ec34f..deff9f7503 100644 --- a/packages/server/src/automations/triggers.js +++ b/packages/server/src/automations/triggers.js @@ -1,4 +1,3 @@ -const CouchDB = require("../db") const emitter = require("../events/index") const { getAutomationParams } = require("../db/utils") const { coerce } = require("../utilities/rowProcessor") @@ -9,6 +8,7 @@ const { queue } = require("./bullboard") const { checkTestFlag } = require("../utilities/redis") const utils = require("./utils") const env = require("../environment") +const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") const TRIGGER_DEFINITIONS = definitions const JOB_OPTS = { @@ -21,39 +21,41 @@ async function queueRelevantRowAutomations(event, eventType) { throw `No appId specified for ${eventType} - check event emitters.` } - const db = new CouchDB(event.appId) - let automations = await db.allDocs( - getAutomationParams(null, { include_docs: true }) - ) + doInAppContext(event.appId, async () => { + const db = getAppDB() + let automations = await db.allDocs( + getAutomationParams(null, { include_docs: true }) + ) - // filter down to the correct event type - automations = automations.rows - .map(automation => automation.doc) - .filter(automation => { - const trigger = automation.definition.trigger - return trigger && trigger.event === eventType - }) + // filter down to the correct event type + automations = automations.rows + .map(automation => automation.doc) + .filter(automation => { + const trigger = automation.definition.trigger + return trigger && trigger.event === eventType + }) - for (let automation of automations) { - let automationDef = automation.definition - let automationTrigger = automationDef ? automationDef.trigger : {} - // don't queue events which are for dev apps, only way to test automations is - // running tests on them, in production the test flag will never - // be checked due to lazy evaluation (first always false) - if ( - !env.ALLOW_DEV_AUTOMATIONS && - isDevAppID(event.appId) && - !(await checkTestFlag(automation._id)) - ) { - continue + for (let automation of automations) { + let automationDef = automation.definition + let automationTrigger = automationDef ? automationDef.trigger : {} + // don't queue events which are for dev apps, only way to test automations is + // running tests on them, in production the test flag will never + // be checked due to lazy evaluation (first always false) + if ( + !env.ALLOW_DEV_AUTOMATIONS && + isDevAppID(event.appId) && + !(await checkTestFlag(automation._id)) + ) { + continue + } + if ( + automationTrigger.inputs && + automationTrigger.inputs.tableId === event.row.tableId + ) { + await queue.add({ automation, event }, JOB_OPTS) + } } - if ( - automationTrigger.inputs && - automationTrigger.inputs.tableId === event.row.tableId - ) { - await queue.add({ automation, event }, JOB_OPTS) - } - } + }) } emitter.on("row:save", async function (event) { diff --git a/packages/server/src/automations/utils.js b/packages/server/src/automations/utils.js index 4a554793f8..6c1d8b2fdf 100644 --- a/packages/server/src/automations/utils.js +++ b/packages/server/src/automations/utils.js @@ -8,6 +8,7 @@ const { updateEntityMetadata } = require("../utilities") const { MetadataTypes } = require("../constants") const { getDeployedAppID } = require("@budibase/backend-core/db") const { cloneDeep } = require("lodash/fp") +const { getAppDB, getAppId } = require("@budibase/backend-core/context") const WH_STEP_ID = definitions.WEBHOOK.stepId const CRON_STEP_ID = definitions.CRON.stepId @@ -27,7 +28,6 @@ exports.processEvent = async job => { exports.updateTestHistory = async (appId, automation, history) => { return updateEntityMetadata( - appId, MetadataTypes.AUTOMATION_TEST_HISTORY, automation._id, metadata => { @@ -109,7 +109,8 @@ exports.enableCronTrigger = async (appId, automation) => { * @returns {Promise} After this is complete the new automation object may have been updated and should be * written to DB (this does not write to DB as it would be wasteful to repeat). */ -exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => { +exports.checkForWebhooks = async ({ oldAuto, newAuto }) => { + const appId = getAppId() const oldTrigger = oldAuto ? oldAuto.definition.trigger : null const newTrigger = newAuto ? newAuto.definition.trigger : null const triggerChanged = @@ -128,7 +129,7 @@ exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => { oldTrigger.webhookId ) { try { - let db = new CouchDB(appId) + let db = getAppDB() // need to get the webhook to get the rev const webhook = await db.get(oldTrigger.webhookId) const ctx = { diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.js index f2872d808a..6cb45f9781 100644 --- a/packages/server/src/db/linkedRows/index.js +++ b/packages/server/src/db/linkedRows/index.js @@ -96,9 +96,9 @@ async function getFullLinkedDocs(links) { * @param {string} args.eventType states what type of change which is occurring, means this can be expanded upon in the * future quite easily (all updates go through one function). * @param {string} args.tableId The ID of the of the table which is being changed. - * @param {object|null} args.row The row which is changing, e.g. created, updated or deleted. - * @param {object|null} args.table If the table has already been retrieved this can be used to reduce database gets. - * @param {object|null} args.oldTable If the table is being updated then the old table can be provided for differencing. + * @param {object|undefined} args.row The row which is changing, e.g. created, updated or deleted. + * @param {object|undefined} args.table If the table has already been retrieved this can be used to reduce database gets. + * @param {object|undefined} args.oldTable If the table is being updated then the old table can be provided for differencing. * @returns {Promise} When the update is complete this will respond successfully. Returns the row for * row operations and the table for table operations. */ diff --git a/packages/server/src/middleware/usageQuota.js b/packages/server/src/middleware/usageQuota.js index 4bafa75132..c13847705b 100644 --- a/packages/server/src/middleware/usageQuota.js +++ b/packages/server/src/middleware/usageQuota.js @@ -1,4 +1,3 @@ -const CouchDB = require("../db") const usageQuota = require("../utilities/usageQuota") const { getUniqueRows } = require("../utilities/usageQuota/rows") const { @@ -6,6 +5,7 @@ const { isRowId: isExternalRowId, } = require("../integrations/utils") const migration = require("../migrations/usageQuotas") +const { getAppDB } = require("@budibase/backend-core/context") // currently only counting new writes and deletes const METHOD_MAP = { @@ -47,7 +47,7 @@ module.exports = async (ctx, next) => { const usageId = ctx.request.body._id try { if (ctx.appId) { - const db = new CouchDB(ctx.appId) + const db = getAppDB() await db.get(usageId) } return next() diff --git a/packages/server/src/tests/utilities/TestConfiguration.js b/packages/server/src/tests/utilities/TestConfiguration.js index d10ccdd230..48c8a88410 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.js +++ b/packages/server/src/tests/utilities/TestConfiguration.js @@ -180,8 +180,8 @@ class TestConfiguration { } async deploy() { - const deployment = await this._req(null, null, controllers.deploy.deployApp) - const prodAppId = deployment.appId.replace("_dev", "") + await this._req(null, null, controllers.deploy.deployApp) + const prodAppId = this.getAppId().replace("_dev", "") const appPackage = await this._req( null, { appId: prodAppId }, diff --git a/packages/server/src/threads/automation.js b/packages/server/src/threads/automation.js index 2a39773520..c0843a286c 100644 --- a/packages/server/src/threads/automation.js +++ b/packages/server/src/threads/automation.js @@ -5,11 +5,11 @@ const automationUtils = require("../automations/automationUtils") const AutomationEmitter = require("../events/AutomationEmitter") const { processObject } = require("@budibase/string-templates") const { DEFAULT_TENANT_ID } = require("@budibase/backend-core/constants") -const CouchDB = require("../db") const { DocumentTypes, isDevAppID } = require("../db/utils") const { doInTenant } = require("@budibase/backend-core/tenancy") const usage = require("../utilities/usageQuota") const { definitions: triggerDefs } = require("../automations/triggerInfo") +const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId const CRON_STEP_ID = triggerDefs.CRON.stepId @@ -59,11 +59,10 @@ class Orchestrator { } async getApp() { - const appId = this._appId if (this._app) { return this._app } - const db = new CouchDB(appId) + const db = getAppDB() this._app = await db.get(DocumentTypes.APP_METADATA) return this._app } @@ -131,16 +130,19 @@ class Orchestrator { } module.exports = (input, callback) => { - const automationOrchestrator = new Orchestrator( - input.data.automation, - input.data.event - ) - automationOrchestrator - .execute() - .then(response => { - callback(null, response) - }) - .catch(err => { - callback(err) - }) + const appId = input.data.event.appId + doInAppContext(appId, () => { + const automationOrchestrator = new Orchestrator( + input.data.automation, + input.data.event + ) + automationOrchestrator + .execute() + .then(response => { + callback(null, response) + }) + .catch(err => { + callback(err) + }) + }) } diff --git a/packages/server/src/threads/query.js b/packages/server/src/threads/query.js index ff3e101d48..be0260882e 100644 --- a/packages/server/src/threads/query.js +++ b/packages/server/src/threads/query.js @@ -3,14 +3,13 @@ threadUtils.threadSetup() const ScriptRunner = require("../utilities/scriptRunner") const { integrations } = require("../integrations") const { processStringSync } = require("@budibase/string-templates") -const CouchDB = require("../db") +const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") const IS_TRIPLE_BRACE = new RegExp(/^{{3}.*}{3}$/) const IS_HANDLEBARS = new RegExp(/^{{2}.*}{2}$/) class QueryRunner { constructor(input, flags = { noRecursiveQuery: false }) { - this.appId = input.appId this.datasource = input.datasource this.queryVerb = input.queryVerb this.fields = input.fields @@ -104,12 +103,11 @@ class QueryRunner { } async runAnotherQuery(queryId, parameters) { - const db = new CouchDB(this.appId) + const db = getAppDB() const query = await db.get(queryId) const datasource = await db.get(query.datasourceId) return new QueryRunner( { - appId: this.appId, datasource, queryVerb: query.queryVerb, fields: query.fields, @@ -223,12 +221,14 @@ class QueryRunner { } module.exports = (input, callback) => { - const Runner = new QueryRunner(input) - Runner.execute() - .then(response => { - callback(null, response) - }) - .catch(err => { - callback(err) - }) + doInAppContext(input.appId, () => { + const Runner = new QueryRunner(input) + Runner.execute() + .then(response => { + callback(null, response) + }) + .catch(err => { + callback(err) + }) + }) } diff --git a/packages/server/src/utilities/index.js b/packages/server/src/utilities/index.js index 0dba11141c..d1e277541a 100644 --- a/packages/server/src/utilities/index.js +++ b/packages/server/src/utilities/index.js @@ -1,9 +1,9 @@ const env = require("../environment") const { OBJ_STORE_DIRECTORY } = require("../constants") const { sanitizeKey } = require("@budibase/backend-core/objectStore") -const CouchDB = require("../db") const { generateMetadataID } = require("../db/utils") const Readable = require("stream").Readable +const { getAppDB } = require("@budibase/backend-core/context") const BB_CDN = "https://cdn.budi.live" @@ -73,8 +73,8 @@ exports.attachmentsRelativeURL = attachmentKey => { ) } -exports.updateEntityMetadata = async (appId, type, entityId, updateFn) => { - const db = new CouchDB(appId) +exports.updateEntityMetadata = async (type, entityId, updateFn) => { + const db = getAppDB() const id = generateMetadataID(type, entityId) // read it to see if it exists, we'll overwrite it no matter what let rev, @@ -99,14 +99,14 @@ exports.updateEntityMetadata = async (appId, type, entityId, updateFn) => { } } -exports.saveEntityMetadata = async (appId, type, entityId, metadata) => { - return exports.updateEntityMetadata(appId, type, entityId, () => { +exports.saveEntityMetadata = async (type, entityId, metadata) => { + return exports.updateEntityMetadata(type, entityId, () => { return metadata }) } -exports.deleteEntityMetadata = async (appId, type, entityId) => { - const db = new CouchDB(appId) +exports.deleteEntityMetadata = async (type, entityId) => { + const db = getAppDB() const id = generateMetadataID(type, entityId) let rev try { @@ -141,16 +141,6 @@ exports.stringToReadStream = string => { }) } -exports.doesDatabaseExist = async dbName => { - try { - const db = new CouchDB(dbName, { skip_setup: true }) - const info = await db.info() - return info && !info.error - } catch (err) { - return false - } -} - exports.formatBytes = bytes => { const units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"] const byteIncrements = 1024 diff --git a/packages/server/src/utilities/routing/index.js b/packages/server/src/utilities/routing/index.js index 541733dcc4..b68001c3c3 100644 --- a/packages/server/src/utilities/routing/index.js +++ b/packages/server/src/utilities/routing/index.js @@ -1,9 +1,9 @@ -const CouchDB = require("../../db") const { createRoutingView } = require("../../db/views/staticViews") const { ViewNames, getQueryIndex, UNICODE_MAX } = require("../../db/utils") +const { getAppDB } = require("@budibase/backend-core/context") -exports.getRoutingInfo = async appId => { - const db = new CouchDB(appId) +exports.getRoutingInfo = async () => { + const db = getAppDB() try { const allRouting = await db.query(getQueryIndex(ViewNames.ROUTING), { startKey: "", @@ -14,8 +14,8 @@ exports.getRoutingInfo = async appId => { // check if the view doesn't exist, it should for all new instances /* istanbul ignore next */ if (err != null && err.name === "not_found") { - await createRoutingView(appId) - return exports.getRoutingInfo(appId) + await createRoutingView() + return exports.getRoutingInfo() } else { throw err } diff --git a/packages/server/yarn.lock b/packages/server/yarn.lock index df4e50c48f..f1aa43dfde 100644 --- a/packages/server/yarn.lock +++ b/packages/server/yarn.lock @@ -983,30 +983,6 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/backend-core@^1.0.46-alpha.5": - version "1.0.47" - resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.47.tgz#af1e501e20f8a648a40fe7d336b89e65f058c803" - integrity sha512-nj+MC2j6WEH+6LEJhs+zMbnm4BRGCaX7kXvlyq7EXA9h6QOxrNkB/PNFqEumkMJGjorkZAQ/qe8MUEjcE26QBw== - dependencies: - "@techpass/passport-openidconnect" "^0.3.0" - aws-sdk "^2.901.0" - bcryptjs "^2.4.3" - cls-hooked "^4.2.2" - ioredis "^4.27.1" - jsonwebtoken "^8.5.1" - koa-passport "^4.1.4" - lodash "^4.17.21" - lodash.isarguments "^3.1.0" - node-fetch "^2.6.1" - passport-google-auth "^1.0.2" - passport-google-oauth "^2.0.0" - passport-jwt "^4.0.0" - passport-local "^1.0.0" - sanitize-s3-objectkey "^0.0.1" - tar-fs "^2.1.1" - uuid "^8.3.2" - zlib "^1.0.5" - "@budibase/bbui@^0.9.139": version "0.9.187" resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.187.tgz#84f0a37301cfa41f50eaa335243ac08923d9e34f" @@ -1056,95 +1032,6 @@ svelte-flatpickr "^3.2.3" svelte-portal "^1.0.0" -"@budibase/bbui@^1.0.47": - version "1.0.47" - resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.47.tgz#df2848b89f881fe603e7156855d6a6c31d4f58bf" - integrity sha512-RRm/BgK5aSx2/vGjMGljw240/48Ksc3/h4yB1nhQj8Xx3fKhlGnWDvWNy+sakvA6+fJvEXuti8RoxHtQ6lXmqA== - dependencies: - "@adobe/spectrum-css-workflow-icons" "^1.2.1" - "@spectrum-css/actionbutton" "^1.0.1" - "@spectrum-css/actiongroup" "^1.0.1" - "@spectrum-css/avatar" "^3.0.2" - "@spectrum-css/button" "^3.0.1" - "@spectrum-css/buttongroup" "^3.0.2" - "@spectrum-css/checkbox" "^3.0.2" - "@spectrum-css/dialog" "^3.0.1" - "@spectrum-css/divider" "^1.0.3" - "@spectrum-css/dropzone" "^3.0.2" - "@spectrum-css/fieldgroup" "^3.0.2" - "@spectrum-css/fieldlabel" "^3.0.1" - "@spectrum-css/icon" "^3.0.1" - "@spectrum-css/illustratedmessage" "^3.0.2" - "@spectrum-css/inlinealert" "^2.0.1" - "@spectrum-css/inputgroup" "^3.0.2" - "@spectrum-css/label" "^2.0.10" - "@spectrum-css/link" "^3.1.1" - "@spectrum-css/menu" "^3.0.1" - "@spectrum-css/modal" "^3.0.1" - "@spectrum-css/pagination" "^3.0.3" - "@spectrum-css/picker" "^1.0.1" - "@spectrum-css/popover" "^3.0.1" - "@spectrum-css/progressbar" "^1.0.2" - "@spectrum-css/progresscircle" "^1.0.2" - "@spectrum-css/radio" "^3.0.2" - "@spectrum-css/search" "^3.0.2" - "@spectrum-css/sidenav" "^3.0.2" - "@spectrum-css/statuslight" "^3.0.2" - "@spectrum-css/stepper" "^3.0.3" - "@spectrum-css/switch" "^1.0.2" - "@spectrum-css/table" "^3.0.1" - "@spectrum-css/tabs" "^3.0.1" - "@spectrum-css/tags" "^3.0.2" - "@spectrum-css/textfield" "^3.0.1" - "@spectrum-css/toast" "^3.0.1" - "@spectrum-css/tooltip" "^3.0.3" - "@spectrum-css/treeview" "^3.0.2" - "@spectrum-css/typography" "^3.0.1" - "@spectrum-css/underlay" "^2.0.9" - "@spectrum-css/vars" "^3.0.1" - dayjs "^1.10.4" - svelte-flatpickr "^3.2.3" - svelte-portal "^1.0.0" - -"@budibase/client@^1.0.46-alpha.5": - version "1.0.47" - resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.47.tgz#ce9e2fbd300e5dc389ea29a3a3347897f096c824" - integrity sha512-jB/al8v+nY/VLc6sH5Jt9JzWONVo+24/cI95iXlZSV5xwiKIVGj4+2F5QjKZ0c9Gm7SrrfP2T571N+4XaXNCGg== - dependencies: - "@budibase/bbui" "^1.0.47" - "@budibase/standard-components" "^0.9.139" - "@budibase/string-templates" "^1.0.47" - regexparam "^1.3.0" - shortid "^2.2.15" - svelte-spa-router "^3.0.5" - -"@budibase/handlebars-helpers@^0.11.7": - version "0.11.7" - resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.11.7.tgz#8e5f9843d7dd10503e9f608555a96ccf4d836c46" - integrity sha512-PvGHAv22cWSFExs1kc0WglwsmCEUEOqWvSp6JCFZwtc3qAAr5yMfLK8WGVQ63ALvyzWZiyxF+yrlzeeaohCMJw== - dependencies: - array-sort "^1.0.0" - define-property "^2.0.2" - extend-shallow "^3.0.2" - for-in "^1.0.2" - get-object "^0.2.0" - get-value "^3.0.1" - handlebars "^4.7.7" - handlebars-utils "^1.0.6" - has-value "^2.0.2" - helper-date "^1.0.1" - helper-markdown "^1.0.0" - helper-md "^0.2.2" - html-tag "^2.0.0" - is-even "^1.0.0" - is-glob "^4.0.1" - kind-of "^6.0.3" - micromatch "^3.1.5" - relative "^3.0.2" - striptags "^3.1.1" - to-gfm-code-block "^0.1.1" - year "^0.2.1" - "@budibase/standard-components@^0.9.139": version "0.9.139" resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.139.tgz#cf8e2b759ae863e469e50272b3ca87f2827e66e3" @@ -1163,18 +1050,6 @@ svelte-apexcharts "^1.0.2" svelte-flatpickr "^3.1.0" -"@budibase/string-templates@^1.0.46-alpha.5", "@budibase/string-templates@^1.0.47": - version "1.0.47" - resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.47.tgz#626b9fc4542c7b36a0ae24e820d25a704c527bec" - integrity sha512-87BUfOPr8FGKH8Pt88jhKNGT9PcOmkLRCeen4xi1dI113pAQznBO9vgV+cXOChUBBEQka9Rrt85LMJXidiwVgg== - dependencies: - "@budibase/handlebars-helpers" "^0.11.7" - dayjs "^1.10.4" - handlebars "^4.7.6" - handlebars-utils "^1.0.6" - lodash "^4.17.20" - vm2 "^3.9.4" - "@bull-board/api@3.7.0", "@bull-board/api@^3.7.0": version "3.7.0" resolved "https://registry.yarnpkg.com/@bull-board/api/-/api-3.7.0.tgz#231f687187c0cb34e0b97f463917b6aaeb4ef6af" @@ -2098,11 +1973,6 @@ resolved "https://registry.yarnpkg.com/@spectrum-css/illustratedmessage/-/illustratedmessage-3.0.8.tgz#69ef0c935bcc5027f233a78de5aeb0064bf033cb" integrity sha512-HvC4dywDi11GdrXQDCvKQ0vFlrXLTyJuc9UKf7meQLCGoJbGYDBwe+tHXNK1c6gPMD9BoL6pPMP1K/vRzR4EBQ== -"@spectrum-css/inlinealert@^2.0.1": - version "2.0.6" - resolved "https://registry.yarnpkg.com/@spectrum-css/inlinealert/-/inlinealert-2.0.6.tgz#4c5e923a1f56a96cc1adb30ef1f06ae04f2c6376" - integrity sha512-OpvvoWP02wWyCnF4IgG8SOPkXymovkC9cGtgMS1FdDubnG3tJZB/JeKTsRR9C9Vt3WBaOmISRdSKlZ4lC9CFzA== - "@spectrum-css/inputgroup@^3.0.2": version "3.0.8" resolved "https://registry.yarnpkg.com/@spectrum-css/inputgroup/-/inputgroup-3.0.8.tgz#fc23afc8a73c24d17249c9d2337e8b42085b298b" @@ -2252,17 +2122,6 @@ dependencies: defer-to-connect "^1.0.1" -"@techpass/passport-openidconnect@^0.3.0": - version "0.3.2" - resolved "https://registry.yarnpkg.com/@techpass/passport-openidconnect/-/passport-openidconnect-0.3.2.tgz#f8fd5d97256286665dbf26dac92431f977ab1e63" - integrity sha512-fnCtEiexXSHA029B//hJcCJlLJrT3lhpNCyA0rnz58Qttz0BLGCVv6yMT8HmOnGThH6vcDOVwdgKM3kbCQtEhw== - dependencies: - base64url "^3.0.1" - oauth "^0.9.15" - passport-strategy "^1.0.0" - request "^2.88.0" - webfinger "^0.4.2" - "@tootallnate/once@1": version "1.1.2" resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" @@ -3132,7 +2991,7 @@ arg@^4.1.0: resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== -argparse@^1.0.10, argparse@^1.0.7: +argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== @@ -3179,15 +3038,6 @@ array-equal@^1.0.0: resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" integrity sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM= -array-sort@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-sort/-/array-sort-1.0.0.tgz#e4c05356453f56f53512a7d1d6123f2c54c0a88a" - integrity sha512-ihLeJkonmdiAsD7vpgN3CRcx2J2S0TiYW+IS/5zHBI7mKUq3ySvBdzzBfD236ubDBQFiiyG3SWCPc+msQ9KoYg== - dependencies: - default-compare "^1.0.0" - get-value "^2.0.6" - kind-of "^5.0.2" - array-union@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" @@ -3240,13 +3090,6 @@ astral-regex@^1.0.0: resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg== -async-hook-jl@^1.7.6: - version "1.7.6" - resolved "https://registry.yarnpkg.com/async-hook-jl/-/async-hook-jl-1.7.6.tgz#4fd25c2f864dbaf279c610d73bf97b1b28595e68" - integrity sha512-gFaHkFfSxTjvoxDMYqDuGHlcRyUuamF8s+ZTtJdDzqjws4mCt7v0vuV79/E2Wr2/riMQgtG4/yUtXWs1gZ7JMg== - dependencies: - stack-chain "^1.3.7" - async-limiter@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" @@ -3269,13 +3112,6 @@ async@^3.1.0: resolved "https://registry.yarnpkg.com/async/-/async-3.2.2.tgz#2eb7671034bb2194d45d30e31e24ec7e7f9670cd" integrity sha512-H0E+qZaDEfx/FY4t7iLRv1W2fFI6+pyCeTw1uN20AQPiwqwM6ojPxHxdLv4z8hi2DtnW9BOckSspLucW7pIE5g== -async@~2.1.4: - version "2.1.5" - resolved "https://registry.yarnpkg.com/async/-/async-2.1.5.tgz#e587c68580994ac67fc56ff86d3ac56bdbe810bc" - integrity sha1-5YfGhYCZSsZ/xW/4bTrFa9voELw= - dependencies: - lodash "^4.14.0" - asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" @@ -3291,13 +3127,6 @@ atomic-sleep@^1.0.0: resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== -autolinker@~0.28.0: - version "0.28.1" - resolved "https://registry.yarnpkg.com/autolinker/-/autolinker-0.28.1.tgz#0652b491881879f0775dace0cdca3233942a4e47" - integrity sha1-BlK0kYgYefB3XazgzcoyM5QqTkc= - dependencies: - gulp-header "^1.7.1" - aws-sdk@^2.767.0: version "2.1030.0" resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1030.0.tgz#24a856af3d2b8b37c14a8f59974993661c66fd82" @@ -3313,21 +3142,6 @@ aws-sdk@^2.767.0: uuid "3.3.2" xml2js "0.4.19" -aws-sdk@^2.901.0: - version "2.1046.0" - resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1046.0.tgz#9147b0fa1c86acbebd1a061e951ab5012f4499d7" - integrity sha512-ocwHclMXdIA+NWocUyvp9Ild3/zy2vr5mHp3mTyodf0WU5lzBE8PocCVLSWhMAXLxyia83xv2y5f5AzAcetbqA== - dependencies: - buffer "4.9.2" - events "1.1.1" - ieee754 "1.1.13" - jmespath "0.15.0" - querystring "0.2.0" - sax "1.2.1" - url "0.10.3" - uuid "3.3.2" - xml2js "0.4.19" - aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" @@ -3510,11 +3324,6 @@ base64-js@^1.0.2, base64-js@^1.3.0, base64-js@^1.3.1: resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== -base64url@3.x.x, base64url@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/base64url/-/base64url-3.0.1.tgz#6399d572e2bc3f90a9a8b22d5dbb0a32d33f788d" - integrity sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A== - base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" @@ -3535,7 +3344,7 @@ bcrypt-pbkdf@^1.0.0: dependencies: tweetnacl "^0.14.3" -bcryptjs@2.4.3, bcryptjs@^2.4.3: +bcryptjs@2.4.3: version "2.4.3" resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb" integrity sha1-mrVie5PmBiH/fNrF2pczAn3x0Ms= @@ -3585,15 +3394,6 @@ bl@^3.0.0: dependencies: readable-stream "^3.0.1" -bl@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" - integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" - bluebird@^3.5.1, bluebird@^3.7.2: version "3.7.2" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" @@ -3941,11 +3741,6 @@ chokidar@^3.5.2: optionalDependencies: fsevents "~2.3.2" -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - chrome-trace-event@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" @@ -4032,15 +3827,6 @@ clone-response@1.0.2, clone-response@^1.0.2: dependencies: mimic-response "^1.0.0" -cls-hooked@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/cls-hooked/-/cls-hooked-4.2.2.tgz#ad2e9a4092680cdaffeb2d3551da0e225eae1908" - integrity sha512-J4Xj5f5wq/4jAvcdgoGsL3G103BtWpZrMo8NEinRltN+xpTZdI+M38pyQqhuFU/P792xkMFvnKSf+Lm81U1bxw== - dependencies: - async-hook-jl "^1.7.6" - emitter-listener "^1.0.1" - semver "^5.4.1" - cluster-key-slot@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz#30474b2a981fb12172695833052bc0d01336d10d" @@ -4196,13 +3982,6 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-with-sourcemaps@*: - version "1.1.0" - resolved "https://registry.yarnpkg.com/concat-with-sourcemaps/-/concat-with-sourcemaps-1.1.0.tgz#d4ea93f05ae25790951b99e7b3b09e3908a4082e" - integrity sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg== - dependencies: - source-map "^0.6.1" - condense-newlines@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/condense-newlines/-/condense-newlines-0.2.1.tgz#3de985553139475d32502c83b02f60684d24c55f" @@ -4449,13 +4228,6 @@ date-utils@*: resolved "https://registry.yarnpkg.com/date-utils/-/date-utils-1.2.21.tgz#61fb16cdc1274b3c9acaaffe9fc69df8720a2b64" integrity sha1-YfsWzcEnSzyayq/+n8ad+HIKK2Q= -date.js@^0.3.1: - version "0.3.3" - resolved "https://registry.yarnpkg.com/date.js/-/date.js-0.3.3.tgz#ef1e92332f507a638795dbb985e951882e50bbda" - integrity sha512-HgigOS3h3k6HnW011nAb43c5xx5rBXk8P2v/WIT9Zv4koIaVXiH2BURguI78VVp+5Qc076T7OR378JViCnZtBw== - dependencies: - debug "~3.1.0" - dateformat@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-3.0.3.tgz#a6e37499a4d9a9cf85ef5872044d62901c9889ae" @@ -4599,13 +4371,6 @@ deepmerge@^4.2.2: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -default-compare@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/default-compare/-/default-compare-1.0.0.tgz#cb61131844ad84d84788fb68fd01681ca7781a2f" - integrity sha512-QWfXlM0EkAbqOCbD/6HjdwT19j7WCkMyiRhWilc4H9/5h/RzTF9gv5LYh1+CmDV5d1rki6KAWLtQale0xt20eQ== - dependencies: - kind-of "^5.0.2" - default-shell@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/default-shell/-/default-shell-1.0.1.tgz#752304bddc6174f49eb29cb988feea0b8813c8bc" @@ -4875,13 +4640,6 @@ electron-to-chromium@^1.3.896: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.900.tgz#5be2c5818a2a012c511b4b43e87b6ab7a296d4f5" integrity sha512-SuXbQD8D4EjsaBaJJxySHbC+zq8JrFfxtb4GIr4E9n1BcROyMcRrJCYQNpJ9N+Wjf5mFp7Wp0OHykd14JNEzzQ== -emitter-listener@^1.0.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/emitter-listener/-/emitter-listener-1.1.2.tgz#56b140e8f6992375b3d7cb2cab1cc7432d9632e8" - integrity sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ== - dependencies: - shimmer "^1.2.0" - emittery@^0.8.1: version "0.8.1" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" @@ -4922,7 +4680,7 @@ encoding-down@^6.3.0: level-codec "^9.0.0" level-errors "^2.0.0" -end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1: +end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== @@ -4944,11 +4702,6 @@ enhanced-resolve@^5.8.3: graceful-fs "^4.2.4" tapable "^2.2.0" -ent@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" - integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= - entities@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.1.0.tgz#992d3129cf7df6870b96c57858c249a120f8b8b5" @@ -5888,11 +5641,6 @@ fs-constants@^1.0.0: resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== -fs-exists-sync@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz#982d6893af918e72d08dec9e8673ff2b5a8d6add" - integrity sha1-mC1ok6+RjnLQjeyehnP/K1qNat0= - fs-extra@8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" @@ -5984,14 +5732,6 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: has "^1.0.3" has-symbols "^1.0.1" -get-object@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/get-object/-/get-object-0.2.0.tgz#d92ff7d5190c64530cda0543dac63a3d47fe8c0c" - integrity sha1-2S/31RkMZFMM2gVD2sY6PUf+jAw= - dependencies: - is-number "^2.0.2" - isobject "^0.2.0" - get-package-type@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" @@ -6054,13 +5794,6 @@ get-value@^2.0.3, get-value@^2.0.6: resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= -get-value@^3.0.0, get-value@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-3.0.1.tgz#5efd2a157f1d6a516d7524e124ac52d0a39ef5a8" - integrity sha512-mKZj9JLQrwMBtj5wxi6MH8Z5eSKaERpAwjg43dPtlGI1ZVEgH/qC7T8/6R2OBSUA+zzHBZgICsVJaEIV2tKTDA== - dependencies: - isobject "^3.0.1" - getopts@2.2.5: version "2.2.5" resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.2.5.tgz#67a0fe471cacb9c687d817cab6450b96dde8313b" @@ -6185,23 +5918,6 @@ google-auth-library@^7.11.0: jws "^4.0.0" lru-cache "^6.0.0" -google-auth-library@~0.10.0: - version "0.10.0" - resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-0.10.0.tgz#6e15babee85fd1dd14d8d128a295b6838d52136e" - integrity sha1-bhW6vuhf0d0U2NEoopW2g41SE24= - dependencies: - gtoken "^1.2.1" - jws "^3.1.4" - lodash.noop "^3.0.1" - request "^2.74.0" - -google-p12-pem@^0.1.0: - version "0.1.2" - resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-0.1.2.tgz#33c46ab021aa734fa0332b3960a9a3ffcb2f3177" - integrity sha1-M8RqsCGqc0+gMys5YKmj/8svMXc= - dependencies: - node-forge "^0.7.1" - google-p12-pem@^3.0.3: version "3.1.2" resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-3.1.2.tgz#c3d61c2da8e10843ff830fdb0d2059046238c1d4" @@ -6218,15 +5934,6 @@ google-spreadsheet@^3.2.0: google-auth-library "^6.1.3" lodash "^4.17.21" -googleapis@^16.0.0: - version "16.1.0" - resolved "https://registry.yarnpkg.com/googleapis/-/googleapis-16.1.0.tgz#0f19f2d70572d918881a0f626e3b1a2fa8629576" - integrity sha1-Dxny1wVy2RiIGg9ibjsaL6hilXY= - dependencies: - async "~2.1.4" - google-auth-library "~0.10.0" - string-template "~1.0.0" - got@^8.3.1: version "8.3.2" resolved "https://registry.yarnpkg.com/got/-/got-8.3.2.tgz#1d23f64390e97f776cac52e5b936e5f514d2e937" @@ -6272,16 +5979,6 @@ graceful-fs@^4.1.10, graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1. resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a" integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg== -gtoken@^1.2.1: - version "1.2.3" - resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-1.2.3.tgz#5509571b8afd4322e124cf66cf68115284c476d8" - integrity sha512-wQAJflfoqSgMWrSBk9Fg86q+sd6s7y6uJhIvvIPz++RElGlMtEqsdAR2oWwZ/WTEtp7P9xFbJRrT976oRgzJ/w== - dependencies: - google-p12-pem "^0.1.0" - jws "^3.0.0" - mime "^1.4.1" - request "^2.72.0" - gtoken@^5.0.4: version "5.3.1" resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-5.3.1.tgz#c1c2598a826f2b5df7c6bb53d7be6cf6d50c3c78" @@ -6291,24 +5988,7 @@ gtoken@^5.0.4: google-p12-pem "^3.0.3" jws "^4.0.0" -gulp-header@^1.7.1: - version "1.8.12" - resolved "https://registry.yarnpkg.com/gulp-header/-/gulp-header-1.8.12.tgz#ad306be0066599127281c4f8786660e705080a84" - integrity sha512-lh9HLdb53sC7XIZOYzTXM4lFuXElv3EVkSDhsd7DoJBj7hm+Ni7D3qYbb+Rr8DuM8nRanBvkVO9d7askreXGnQ== - dependencies: - concat-with-sourcemaps "*" - lodash.template "^4.4.0" - through2 "^2.0.0" - -handlebars-utils@^1.0.2, handlebars-utils@^1.0.4, handlebars-utils@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/handlebars-utils/-/handlebars-utils-1.0.6.tgz#cb9db43362479054782d86ffe10f47abc76357f9" - integrity sha512-d5mmoQXdeEqSKMtQQZ9WkiUcO1E3tPbWxluCK9hVgIDPzQa9WsKo3Lbe/sGflTe7TomHEeZaOgwIkyIr1kfzkw== - dependencies: - kind-of "^6.0.0" - typeof-article "^0.1.1" - -handlebars@^4.7.6, handlebars@^4.7.7: +handlebars@^4.7.7: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== @@ -6390,14 +6070,6 @@ has-value@^1.0.0: has-values "^1.0.0" isobject "^3.0.0" -has-value@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-2.0.2.tgz#d0f12e8780ba8e90e66ad1a21c707fdb67c25658" - integrity sha512-ybKOlcRsK2MqrM3Hmz/lQxXHZ6ejzSPzpNabKB45jb5qDgJvKPa3SdapTsTLwEb9WltgWpOmNax7i+DzNOk4TA== - dependencies: - get-value "^3.0.0" - has-values "^2.0.1" - has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" @@ -6411,13 +6083,6 @@ has-values@^1.0.0: is-number "^3.0.0" kind-of "^4.0.0" -has-values@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-2.0.1.tgz#3876200ff86d8a8546a9264a952c17d5fc17579d" - integrity sha512-+QdH3jOmq9P8GfdjFg0eJudqx1FqU62NQJ4P16rOEHeRdl7ckgwn6uqQjzYE0ZoHVV/e5E2esuJ5Gl5+HUW19w== - dependencies: - kind-of "^6.0.2" - has-yarn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77" @@ -6430,39 +6095,6 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" -helper-date@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/helper-date/-/helper-date-1.0.1.tgz#12fedea3ad8e44a7ca4c4efb0ff4104a5120cffb" - integrity sha512-wU3VOwwTJvGr/w5rZr3cprPHO+hIhlblTJHD6aFBrKLuNbf4lAmkawd2iK3c6NbJEvY7HAmDpqjOFSI5/+Ey2w== - dependencies: - date.js "^0.3.1" - handlebars-utils "^1.0.4" - moment "^2.18.1" - -helper-markdown@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/helper-markdown/-/helper-markdown-1.0.0.tgz#ee7e9fc554675007d37eb90f7853b13ce74f3e10" - integrity sha512-AnDqMS4ejkQK0MXze7pA9TM3pu01ZY+XXsES6gEE0RmCGk5/NIfvTn0NmItfyDOjRAzyo9z6X7YHbHX4PzIvOA== - dependencies: - handlebars-utils "^1.0.2" - highlight.js "^9.12.0" - remarkable "^1.7.1" - -helper-md@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/helper-md/-/helper-md-0.2.2.tgz#c1f59d7e55bbae23362fd8a0e971607aec69d41f" - integrity sha1-wfWdflW7riM2L9ig6XFgeuxp1B8= - dependencies: - ent "^2.2.0" - extend-shallow "^2.0.1" - fs-exists-sync "^0.1.0" - remarkable "^1.6.2" - -highlight.js@^9.12.0: - version "9.18.5" - resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-9.18.5.tgz#d18a359867f378c138d6819edfc2a8acd5f29825" - integrity sha512-a5bFyofd/BHCX52/8i8uJkjr9DYwXIPnM/plwI6W7ezItLGqzt7X2G2nXuYSfsIJdkwwj/g9DG1LkcGJI/dDoA== - hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" @@ -6492,14 +6124,6 @@ html-escaper@^2.0.0: resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== -html-tag@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/html-tag/-/html-tag-2.0.0.tgz#36c3bc8d816fd30b570d5764a497a641640c2fed" - integrity sha512-XxzooSo6oBoxBEUazgjdXj7VwTn/iSTSZzTYKzYY6I916tkaYzypHxy+pbVU1h+0UQ9JlVf5XkNQyxOAiiQO1g== - dependencies: - is-self-closing "^1.0.1" - kind-of "^6.0.0" - http-assert@^1.3.0: version "1.5.0" resolved "https://registry.yarnpkg.com/http-assert/-/http-assert-1.5.0.tgz#c389ccd87ac16ed2dfa6246fd73b926aa00e6b8f" @@ -6763,23 +6387,6 @@ ioredis@^4.27.0: redis-parser "^3.0.0" standard-as-callback "^2.1.0" -ioredis@^4.27.1: - version "4.28.2" - resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.2.tgz#493ccd5d869fd0ec86c96498192718171f6c9203" - integrity sha512-kQ+Iv7+c6HsDdPP2XUHaMv8DhnSeAeKEwMbaoqsXYbO+03dItXt7+5jGQDRyjdRUV2rFJbzg7P4Qt1iX2tqkOg== - dependencies: - cluster-key-slot "^1.1.0" - debug "^4.3.1" - denque "^1.1.0" - lodash.defaults "^4.2.0" - lodash.flatten "^4.4.0" - lodash.isarguments "^3.1.0" - p-map "^2.1.0" - redis-commands "1.7.0" - redis-errors "^1.2.0" - redis-parser "^3.0.0" - standard-as-callback "^2.1.0" - ip-regex@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" @@ -6904,13 +6511,6 @@ is-docker@^2.0.0, is-docker@^2.1.1: resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== -is-even@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-even/-/is-even-1.0.0.tgz#76b5055fbad8d294a86b6a949015e1c97b717c06" - integrity sha1-drUFX7rY0pSoa2qUkBXhyXtxfAY= - dependencies: - is-odd "^0.1.2" - is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" @@ -7000,13 +6600,6 @@ is-number-object@^1.0.4: dependencies: has-tostringtag "^1.0.0" -is-number@^2.0.2: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" - integrity sha1-Afy7s5NGOlSPL0ZszhbezknbkI8= - dependencies: - kind-of "^3.0.2" - is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" @@ -7029,13 +6622,6 @@ is-object@^1.0.1: resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== -is-odd@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/is-odd/-/is-odd-0.1.2.tgz#bc573b5ce371ef2aad6e6f49799b72bef13978a7" - integrity sha1-vFc7XONx7yqtbm9JeZtyvvE5eKc= - dependencies: - is-number "^3.0.0" - is-path-inside@^3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" @@ -7081,13 +6667,6 @@ is-retry-allowed@^2.2.0: resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz#88f34cbd236e043e71b6932d09b0c65fb7b4d71d" integrity sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg== -is-self-closing@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-self-closing/-/is-self-closing-1.0.1.tgz#5f406b527c7b12610176320338af0fa3896416e4" - integrity sha512-E+60FomW7Blv5GXTlYee2KDrnG6srxF7Xt1SjrhWUGUEsTFIqY/nq2y3DaftCsgUMdh89V07IVfhY9KIJhLezg== - dependencies: - self-closing-tags "^1.0.1" - is-shared-array-buffer@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz#97b0c85fbdacb59c9c446fe653b82cf2b5b7cfe6" @@ -7180,11 +6759,6 @@ isexe@^2.0.0: resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= -isobject@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-0.2.0.tgz#a3432192f39b910b5f02cc989487836ec70aa85e" - integrity sha1-o0MhkvObkQtfAsyYlIeDbscKqF4= - isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" @@ -8243,22 +7817,6 @@ jsonschema@1.4.0: resolved "https://registry.yarnpkg.com/jsonschema/-/jsonschema-1.4.0.tgz#1afa34c4bc22190d8e42271ec17ac8b3404f87b2" integrity sha512-/YgW6pRMr6M7C+4o8kS+B/2myEpHCrxO4PEWnqJNBFMjn7EWXqlQ4tGwL6xTHeRplwuZmcAncdvfOad1nT2yMw== -jsonwebtoken@^8.2.0, jsonwebtoken@^8.5.1: - version "8.5.1" - resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" - integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== - dependencies: - jws "^3.2.2" - lodash.includes "^4.3.0" - lodash.isboolean "^3.0.3" - lodash.isinteger "^4.0.4" - lodash.isnumber "^3.0.3" - lodash.isplainobject "^4.0.6" - lodash.isstring "^4.0.1" - lodash.once "^4.0.0" - ms "^2.1.1" - semver "^5.6.0" - jsprim@^1.2.2: version "1.4.0" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.0.tgz#a3b87e40298d8c380552d8cc7628a0bb95a22918" @@ -8307,7 +7865,7 @@ jwa@^2.0.0: ecdsa-sig-formatter "1.0.11" safe-buffer "^5.0.1" -jws@3.x.x, jws@^3.0.0, jws@^3.1.4, jws@^3.2.2: +jws@3.x.x: version "3.2.2" resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== @@ -8342,7 +7900,7 @@ keyv@3.0.0, keyv@^3.0.0: dependencies: json-buffer "3.0.0" -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.1.0, kind-of@^3.2.0: +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= @@ -8356,12 +7914,12 @@ kind-of@^4.0.0: dependencies: is-buffer "^1.1.5" -kind-of@^5.0.0, kind-of@^5.0.2: +kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== -kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: +kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.3" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== @@ -8463,13 +8021,6 @@ koa-mount@^4.0.0: debug "^4.0.1" koa-compose "^4.1.0" -koa-passport@^4.1.4: - version "4.1.4" - resolved "https://registry.yarnpkg.com/koa-passport/-/koa-passport-4.1.4.tgz#5f1665c1c2a37ace79af9f970b770885ca30ccfa" - integrity sha512-dJBCkl4X+zdYxbI2V2OtoGy0PUenpvp2ZLLWObc8UJhsId0iQpTFT8RVcuA0709AL2txGwRHnSPoT1bYNGa6Kg== - dependencies: - passport "^0.4.0" - koa-pino-logger@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/koa-pino-logger/-/koa-pino-logger-3.0.0.tgz#27600b4f3639e8767dfc6b66493109c5457f53ba" @@ -8812,11 +8363,6 @@ locate-path@^5.0.0: dependencies: p-locate "^4.1.0" -lodash._reinterpolate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d" - integrity sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0= - lodash.debounce@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" @@ -8837,46 +8383,16 @@ lodash.get@^4.4.2: resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= -lodash.includes@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" - integrity sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8= - lodash.isarguments@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a" integrity sha1-L1c9hcaiQon/AGY7SRwdM4/zRYo= -lodash.isboolean@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" - integrity sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY= - lodash.isequal@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0" integrity sha1-QVxEePK8wwEgwizhDtMib30+GOA= -lodash.isinteger@^4.0.4: - version "4.0.4" - resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" - integrity sha1-YZwK89A/iwTDH1iChAt3sRzWg0M= - -lodash.isnumber@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" - integrity sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w= - -lodash.isplainobject@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" - integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs= - -lodash.isstring@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" - integrity sha1-1SfftUVuynzJu5XV2ur4i6VKVFE= - lodash.keys@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-4.2.0.tgz#a08602ac12e4fb83f91fc1fb7a360a4d9ba35205" @@ -8892,21 +8408,11 @@ lodash.merge@^4.6.2: resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -lodash.noop@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/lodash.noop/-/lodash.noop-3.0.1.tgz#38188f4d650a3a474258439b96ec45b32617133c" - integrity sha1-OBiPTWUKOkdCWEObluxFsyYXEzw= - lodash.omit@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.omit/-/lodash.omit-4.5.0.tgz#6eb19ae5a1ee1dd9df0b969e66ce0b7fa30b5e60" integrity sha1-brGa5aHuHdnfC5aeZs4Lf6MLXmA= -lodash.once@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" - integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= - lodash.pick@^4.0.0: version "4.4.0" resolved "https://registry.yarnpkg.com/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3" @@ -8917,21 +8423,6 @@ lodash.sortby@^4.7.0: resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= -lodash.template@^4.4.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-4.5.0.tgz#f976195cf3f347d0d5f52483569fe8031ccce8ab" - integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A== - dependencies: - lodash._reinterpolate "^3.0.0" - lodash.templatesettings "^4.0.0" - -lodash.templatesettings@^4.0.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz#e481310f049d3cf6d47e912ad09313b154f0fb33" - integrity sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ== - dependencies: - lodash._reinterpolate "^3.0.0" - lodash.without@^4.4.0: version "4.4.0" resolved "https://registry.yarnpkg.com/lodash.without/-/lodash.without-4.4.0.tgz#3cd4574a00b67bae373a94b748772640507b7aac" @@ -8942,7 +8433,7 @@ lodash.xor@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.xor/-/lodash.xor-4.5.0.tgz#4d48ed7e98095b0632582ba714d3ff8ae8fb1db6" integrity sha1-TUjtfpgJWwYyWCunFNP/iuj7HbY= -lodash@4.17.21, lodash@^4.14.0, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.3, lodash@^4.7.0: +lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.21, lodash@^4.17.3, lodash@^4.7.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -9139,7 +8630,7 @@ methods@^1.0.1, methods@^1.1.1, methods@^1.1.2: resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= -micromatch@^3.1.10, micromatch@^3.1.4, micromatch@^3.1.5: +micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== @@ -9240,11 +8731,6 @@ mixin-deep@^1.2.0: for-in "^1.0.2" is-extendable "^1.0.1" -mkdirp-classic@^0.5.2: - version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" - integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== - mkdirp@^0.5.0, mkdirp@^0.5.1: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" @@ -9264,7 +8750,7 @@ moment-timezone@^0.5.31: dependencies: moment ">= 2.9.0" -"moment@>= 2.9.0", moment@^2.18.1: +"moment@>= 2.9.0": version "2.29.1" resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3" integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== @@ -9364,11 +8850,6 @@ nan@^2.12.1: resolved "https://registry.yarnpkg.com/nan/-/nan-2.15.0.tgz#3f34a473ff18e15c1b5626b62903b5ad6e665fee" integrity sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ== -nanoid@^2.1.0: - version "2.1.11" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-2.1.11.tgz#ec24b8a758d591561531b4176a01e3ab4f0f0280" - integrity sha512-s/snB+WGm6uwi0WjsZdaVcuf3KJXlfGl2LcxgwkEwJF0D/BWzVWAZW/XY4bFaiR7s0Jk3FPvlnepg1H1b1UwlA== - nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" @@ -9448,11 +8929,6 @@ node-forge@^0.10.0: resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== -node-forge@^0.7.1: - version "0.7.6" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac" - integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw== - node-gyp-build@~4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" @@ -9580,11 +9056,6 @@ oauth-sign@~0.9.0: resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== -oauth@0.9.x, oauth@^0.9.15: - version "0.9.15" - resolved "https://registry.yarnpkg.com/oauth/-/oauth-0.9.15.tgz#bd1fefaf686c96b75475aed5196412ff60cfb9c1" - integrity sha1-vR/vr2hslrdUda7VGWQS/2DPucE= - object-assign@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-2.1.1.tgz#43c36e5d569ff8e4816c4efa8be02d26967c18aa" @@ -9895,84 +9366,6 @@ pascalcase@^0.1.1: resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= -passport-google-auth@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/passport-google-auth/-/passport-google-auth-1.0.2.tgz#8b300b5aa442ef433de1d832ed3112877d0b2938" - integrity sha1-izALWqRC70M94dgy7TESh30LKTg= - dependencies: - googleapis "^16.0.0" - passport-strategy "1.x" - -passport-google-oauth1@1.x.x: - version "1.0.0" - resolved "https://registry.yarnpkg.com/passport-google-oauth1/-/passport-google-oauth1-1.0.0.tgz#af74a803df51ec646f66a44d82282be6f108e0cc" - integrity sha1-r3SoA99R7GRvZqRNgigr5vEI4Mw= - dependencies: - passport-oauth1 "1.x.x" - -passport-google-oauth20@2.x.x: - version "2.0.0" - resolved "https://registry.yarnpkg.com/passport-google-oauth20/-/passport-google-oauth20-2.0.0.tgz#0d241b2d21ebd3dc7f2b60669ec4d587e3a674ef" - integrity sha512-KSk6IJ15RoxuGq7D1UKK/8qKhNfzbLeLrG3gkLZ7p4A6DBCcv7xpyQwuXtWdpyR0+E0mwkpjY1VfPOhxQrKzdQ== - dependencies: - passport-oauth2 "1.x.x" - -passport-google-oauth@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/passport-google-oauth/-/passport-google-oauth-2.0.0.tgz#f6eb4bc96dd6c16ec0ecfdf4e05ec48ca54d4dae" - integrity sha512-JKxZpBx6wBQXX1/a1s7VmdBgwOugohH+IxCy84aPTZNq/iIPX6u7Mqov1zY7MKRz3niFPol0KJz8zPLBoHKtYA== - dependencies: - passport-google-oauth1 "1.x.x" - passport-google-oauth20 "2.x.x" - -passport-jwt@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/passport-jwt/-/passport-jwt-4.0.0.tgz#7f0be7ba942e28b9f5d22c2ebbb8ce96ef7cf065" - integrity sha512-BwC0n2GP/1hMVjR4QpnvqA61TxenUMlmfNjYNgK0ZAs0HK4SOQkHcSv4L328blNTLtHq7DbmvyNJiH+bn6C5Mg== - dependencies: - jsonwebtoken "^8.2.0" - passport-strategy "^1.0.0" - -passport-local@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/passport-local/-/passport-local-1.0.0.tgz#1fe63268c92e75606626437e3b906662c15ba6ee" - integrity sha1-H+YyaMkudWBmJkN+O5BmYsFbpu4= - dependencies: - passport-strategy "1.x.x" - -passport-oauth1@1.x.x: - version "1.2.0" - resolved "https://registry.yarnpkg.com/passport-oauth1/-/passport-oauth1-1.2.0.tgz#5229d431781bf5b265bec86ce9a9cce58a756cf9" - integrity sha512-Sv2YWodC6jN12M/OXwmR4BIXeeIHjjbwYTQw4kS6tHK4zYzSEpxBgSJJnknBjICA5cj0ju3FSnG1XmHgIhYnLg== - dependencies: - oauth "0.9.x" - passport-strategy "1.x.x" - utils-merge "1.x.x" - -passport-oauth2@1.x.x: - version "1.6.1" - resolved "https://registry.yarnpkg.com/passport-oauth2/-/passport-oauth2-1.6.1.tgz#c5aee8f849ce8bd436c7f81d904a3cd1666f181b" - integrity sha512-ZbV43Hq9d/SBSYQ22GOiglFsjsD1YY/qdiptA+8ej+9C1dL1TVB+mBE5kDH/D4AJo50+2i8f4bx0vg4/yDDZCQ== - dependencies: - base64url "3.x.x" - oauth "0.9.x" - passport-strategy "1.x.x" - uid2 "0.0.x" - utils-merge "1.x.x" - -passport-strategy@1.x, passport-strategy@1.x.x, passport-strategy@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4" - integrity sha1-tVOaqPwiWj0a0XlHbd8ja0QPUuQ= - -passport@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.1.tgz#941446a21cb92fc688d97a0861c38ce9f738f270" - integrity sha512-IxXgZZs8d7uFSt3eqNjM9NQ3g3uQCW5avD8mRNoXV99Yig50vjuaez6dQK2qC0kVWPRTujxY0dWgGfT09adjYg== - dependencies: - passport-strategy "1.x.x" - pause "0.0.1" - path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" @@ -10027,11 +9420,6 @@ path-type@^4.0.0: resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== -pause@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/pause/-/pause-0.0.1.tgz#1d408b3fdb76923b9543d96fb4c9dfd535d9cb5d" - integrity sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10= - pend@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" @@ -10755,7 +10143,7 @@ readable-stream@1.1.14, readable-stream@^1.0.27-1: isarray "0.0.1" string_decoder "~0.10.x" -"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.0.1, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: +"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.0.1, readable-stream@^3.4.0, readable-stream@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -10896,16 +10284,6 @@ regex-not@^1.0.0, regex-not@^1.0.2: extend-shallow "^3.0.2" safe-regex "^1.1.0" -regexparam@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-2.0.0.tgz#059476767d5f5f87f735fc7922d133fd1a118c8c" - integrity sha512-gJKwd2MVPWHAIFLsaYDZfyKzHNS4o7E/v8YmNf44vmeV2e4YfVoDToTOKTvE7ab68cRJ++kLuEXJBaEeJVt5ow== - -regexparam@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-1.3.0.tgz#2fe42c93e32a40eff6235d635e0ffa344b92965f" - integrity sha512-6IQpFBv6e5vz1QAqI+V4k8P2e/3gRrqfCJ9FI+O1FLQTO+Uz6RXZEZOPmTJ6hlGj7gkERzY5BRCv09whKP96/g== - regexpp@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f" @@ -10949,21 +10327,6 @@ regjsparser@^0.7.0: dependencies: jsesc "~0.5.0" -relative@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/relative/-/relative-3.0.2.tgz#0dcd8ec54a5d35a3c15e104503d65375b5a5367f" - integrity sha1-Dc2OxUpdNaPBXhBFA9ZTdbWlNn8= - dependencies: - isobject "^2.0.0" - -remarkable@^1.6.2, remarkable@^1.7.1: - version "1.7.4" - resolved "https://registry.yarnpkg.com/remarkable/-/remarkable-1.7.4.tgz#19073cb960398c87a7d6546eaa5e50d2022fcd00" - integrity sha512-e6NKUXgX95whv7IgddywbeN/ItCkWbISmc2DiqHJb0wTrqZIexqdco5b8Z3XZoo/48IdNVKM9ZCvTPJ4F5uvhg== - dependencies: - argparse "^1.0.10" - autolinker "~0.28.0" - remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" @@ -11000,7 +10363,7 @@ request-promise-native@^1.0.5: stealthy-require "^1.1.1" tough-cookie "^2.3.3" -request@^2.72.0, request@^2.74.0, request@^2.87.0, request@^2.88.0: +request@^2.87.0: version "2.88.2" resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== @@ -11212,11 +10575,6 @@ sane@^4.0.3: minimist "^1.1.1" walker "~1.0.5" -sanitize-s3-objectkey@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/sanitize-s3-objectkey/-/sanitize-s3-objectkey-0.0.1.tgz#efa9887cd45275b40234fb4bb12fc5754fe64e7e" - integrity sha512-ZTk7aqLxy4sD40GWcYWoLfbe05XLmkKvh6vGKe13ADlei24xlezcvjgKy1qRArlaIbIMYaqK7PCalvZtulZlaQ== - saslprep@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/saslprep/-/saslprep-1.0.3.tgz#4c02f946b56cf54297e347ba1093e7acac4cf226" @@ -11229,7 +10587,7 @@ sax@1.2.1: resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" integrity sha1-e45lYZCyKOgaZq6nSEgNgozS03o= -sax@>=0.1.1, sax@>=0.6.0, sax@^1.2.4: +sax@>=0.6.0, sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== @@ -11262,11 +10620,6 @@ seek-bzip@^1.0.5: dependencies: commander "^2.8.1" -self-closing-tags@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/self-closing-tags/-/self-closing-tags-1.0.1.tgz#6c5fa497994bb826b484216916371accee490a5d" - integrity sha512-7t6hNbYMxM+VHXTgJmxwgZgLGktuXtVVD5AivWzNTdJBM4DBjnDKDzkf2SrNjihaArpeJYNjxkELBu1evI4lQA== - semver-diff@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b" @@ -11274,7 +10627,7 @@ semver-diff@^3.1.1: dependencies: semver "^6.3.0" -"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@^5.4.1, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1: +"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== @@ -11385,18 +10738,6 @@ shell-path@^2.1.0: dependencies: shell-env "^0.3.0" -shimmer@^1.2.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/shimmer/-/shimmer-1.2.1.tgz#610859f7de327b587efebf501fb43117f9aff337" - integrity sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw== - -shortid@^2.2.15: - version "2.2.16" - resolved "https://registry.yarnpkg.com/shortid/-/shortid-2.2.16.tgz#b742b8f0cb96406fd391c76bfc18a67a57fe5608" - integrity sha512-Ugt+GIZqvGXCIItnsL+lvFJOiN7RYqlGy7QE41O3YC1xbNSeDGIRO7xg2JJXIAj1cAGnOeC1r7/T9pgrtQbv4g== - dependencies: - nanoid "^2.1.0" - side-channel@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" @@ -11671,11 +11012,6 @@ sshpk@^1.7.0: safer-buffer "^2.0.2" tweetnacl "~0.14.0" -stack-chain@^1.3.7: - version "1.3.7" - resolved "https://registry.yarnpkg.com/stack-chain/-/stack-chain-1.3.7.tgz#d192c9ff4ea6a22c94c4dd459171e3f00cea1285" - integrity sha1-0ZLJ/06moiyUxN1FkXHj8AzqEoU= - stack-trace@0.0.x: version "0.0.10" resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" @@ -11723,11 +11059,6 @@ stealthy-require@^1.1.1: resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= -step@0.0.x: - version "0.0.6" - resolved "https://registry.yarnpkg.com/step/-/step-0.0.6.tgz#143e7849a5d7d3f4a088fe29af94915216eeede2" - integrity sha1-FD54SaXX0/SgiP4pr5SRUhbu7eI= - strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" @@ -11746,11 +11077,6 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -string-template@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/string-template/-/string-template-1.0.0.tgz#9e9f2233dc00f218718ec379a28a5673ecca8b96" - integrity sha1-np8iM9wA8hhxjsN5oopWc+zKi5Y= - string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -11884,11 +11210,6 @@ strip-outer@^1.0.0: dependencies: escape-string-regexp "^1.0.2" -striptags@^3.1.1: - version "3.2.0" - resolved "https://registry.yarnpkg.com/striptags/-/striptags-3.2.0.tgz#cc74a137db2de8b0b9a370006334161f7dd67052" - integrity sha512-g45ZOGzHDMe2bdYMdIvdAfCQkCTDMGBazSw1ypMowwGIee7ZQ5dU0rBJ8Jqgl+jAKIv4dbeE1jscZq9wid1Tkw== - style-loader@^3.3.1: version "3.3.1" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575" @@ -11983,13 +11304,6 @@ svelte-portal@^1.0.0: resolved "https://registry.yarnpkg.com/svelte-portal/-/svelte-portal-1.0.0.tgz#36a47c5578b1a4d9b4dc60fa32a904640ec4cdd3" integrity sha512-nHf+DS/jZ6jjnZSleBMSaZua9JlG5rZv9lOGKgJuaZStfevtjIlUJrkLc3vbV8QdBvPPVmvcjTlazAzfKu0v3Q== -svelte-spa-router@^3.0.5: - version "3.2.0" - resolved "https://registry.yarnpkg.com/svelte-spa-router/-/svelte-spa-router-3.2.0.tgz#fae3311d292451236cb57131262406cf312b15ee" - integrity sha512-igemo5Vs82TGBBw+DjWt6qKameXYzNs6aDXcTxou5XbEvOjiRcAM6MLkdVRCatn6u8r42dE99bt/br7T4qe/AQ== - dependencies: - regexparam "2.0.0" - svelte@^3.38.2: version "3.44.1" resolved "https://registry.yarnpkg.com/svelte/-/svelte-3.44.1.tgz#5cc772a8340f4519a4ecd1ac1a842325466b1a63" @@ -12077,16 +11391,6 @@ tapable@^2.1.1, tapable@^2.2.0: resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== -tar-fs@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.1.4" - tar-stream@^1.5.2: version "1.6.2" resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.2.tgz#8ea55dab37972253d9a9af90fdcd559ae435c555" @@ -12100,17 +11404,6 @@ tar-stream@^1.5.2: to-buffer "^1.1.1" xtend "^4.0.0" -tar-stream@^2.1.4: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" - integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - tarn@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/tarn/-/tarn-1.1.5.tgz#7be88622e951738b9fa3fb77477309242cdddc2d" @@ -12302,11 +11595,6 @@ to-fast-properties@^2.0.0: resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= -to-gfm-code-block@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/to-gfm-code-block/-/to-gfm-code-block-0.1.1.tgz#25d045a5fae553189e9637b590900da732d8aa82" - integrity sha1-JdBFpfrlUxielje1kJANpzLYqoI= - to-json-schema@0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/to-json-schema/-/to-json-schema-0.2.5.tgz#ef3c3f11ad64460dcfbdbafd0fd525d69d62a98f" @@ -12543,13 +11831,6 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typeof-article@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/typeof-article/-/typeof-article-0.1.1.tgz#9f07e733c3fbb646ffa9e61c08debacd460e06af" - integrity sha1-nwfnM8P7tkb/qeYcCN66zUYOBq8= - dependencies: - kind-of "^3.1.0" - typescript@^4.3.5: version "4.3.5" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.3.5.tgz#4d1c37cc16e893973c45a06886b7113234f119f4" @@ -12565,11 +11846,6 @@ uglify-js@^3.1.4: resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.14.3.tgz#c0f25dfea1e8e5323eccf59610be08b6043c15cf" integrity sha512-mic3aOdiq01DuSVx0TseaEzMIVqebMZ0Z3vaeDhFEh9bsc24hV1TFvN74reA2vs08D0ZWfNjAcJ3UbVLaBss+g== -uid2@0.0.x: - version "0.0.4" - resolved "https://registry.yarnpkg.com/uid2/-/uid2-0.0.4.tgz#033f3b1d5d32505f5ce5f888b9f3b667123c0a44" - integrity sha512-IevTus0SbGwQzYh3+fRsAMTVVPOoIVufzacXcHPmdlle1jUpq7BRL+mw3dgeLanvGZdwwbWhRV6XrcFNdBmjWA== - unbox-primitive@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471" @@ -12773,11 +12049,6 @@ util.promisify@^1.0.0, util.promisify@^1.0.1: has-symbols "^1.0.1" object.getownpropertydescriptors "^2.1.1" -utils-merge@1.x.x: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= - uuid@3.3.2, uuid@^3.1.0, uuid@^3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" @@ -12842,7 +12113,7 @@ verror@1.3.6: dependencies: extsprintf "1.0.2" -vm2@^3.9.3, vm2@^3.9.4: +vm2@^3.9.3: version "3.9.5" resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.5.tgz#5288044860b4bbace443101fcd3bddb2a0aa2496" integrity sha512-LuCAHZN75H9tdrAiLFf030oW7nJV5xwNMuk1ymOZwopmuK3d2H4L1Kv4+GFHgarKiLfXXLFU+7LDABHnwOkWng== @@ -12881,14 +12152,6 @@ watchpack@^2.2.0: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" -webfinger@^0.4.2: - version "0.4.2" - resolved "https://registry.yarnpkg.com/webfinger/-/webfinger-0.4.2.tgz#3477a6d97799461896039fcffc650b73468ee76d" - integrity sha1-NHem2XeZRhiWA5/P/GULc0aO520= - dependencies: - step "0.0.x" - xml2js "0.1.x" - webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" @@ -13205,13 +12468,6 @@ xml-parse-from-string@^1.0.0: resolved "https://registry.yarnpkg.com/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz#a9029e929d3dbcded169f3c6e28238d95a5d5a28" integrity sha1-qQKekp09vN7RafPG4oI42VpdWig= -xml2js@0.1.x: - version "0.1.14" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c" - integrity sha1-UnTmf1pkxfkpdM2FE54DMq3GuQw= - dependencies: - sax ">=0.1.1" - xml2js@0.4.19: version "0.4.19" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7" @@ -13353,11 +12609,6 @@ yauzl@^2.4.2: buffer-crc32 "~0.2.3" fd-slicer "~1.1.0" -year@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/year/-/year-0.2.1.tgz#4083ae520a318b23ec86037f3000cb892bdf9bb0" - integrity sha1-QIOuUgoxiyPshgN/MADLiSvfm7A= - ylru@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/ylru/-/ylru-1.2.1.tgz#f576b63341547989c1de7ba288760923b27fe84f" @@ -13379,7 +12630,7 @@ z-schema@^5.0.1: optionalDependencies: commander "^2.7.1" -zlib@1.0.5, zlib@^1.0.5: +zlib@1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/zlib/-/zlib-1.0.5.tgz#6e7c972fc371c645a6afb03ab14769def114fcc0" integrity sha1-bnyXL8NxxkWmr7A6sUdp3vEU/MA= From d5f8cc1023de5315e473958ecd8962451ef2dd1a Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 28 Jan 2022 15:43:51 +0000 Subject: [PATCH 3/9] Tests updating, all now passing, fixed some issues discovered by them. --- packages/backend-core/src/tenancy/context.js | 4 + .../query/import/tests/index.spec.js | 1 + .../server/src/api/controllers/routing.js | 2 +- .../src/api/routes/tests/automation.spec.js | 1 + .../src/api/routes/tests/routing.spec.js | 33 +++-- .../routes/tests/utilities/TestFunctions.js | 52 +++++--- packages/server/src/environment.js | 3 +- packages/server/src/middleware/currentapp.js | 95 +++++++------- .../src/middleware/tests/authorized.spec.js | 9 +- .../src/middleware/tests/currentapp.spec.js | 6 + .../src/tests/utilities/TestConfiguration.js | 116 ++++++++++-------- packages/server/src/utilities/global.js | 2 +- 12 files changed, 195 insertions(+), 129 deletions(-) diff --git a/packages/backend-core/src/tenancy/context.js b/packages/backend-core/src/tenancy/context.js index d54622f979..3d2c862d71 100644 --- a/packages/backend-core/src/tenancy/context.js +++ b/packages/backend-core/src/tenancy/context.js @@ -59,6 +59,10 @@ exports.updateTenantId = tenantId => { exports.updateAppId = appId => { try { cls.setOnContext(ContextKeys.APP_ID, appId) + cls.setOnContext(ContextKeys.PROD_DB, null) + cls.setOnContext(ContextKeys.DEV_DB, null) + cls.setOnContext(ContextKeys.CURRENT_DB, null) + cls.setOnContext(ContextKeys.DB_OPTS, null) } catch (err) { if (env.isTest()) { TEST_APP_ID = appId diff --git a/packages/server/src/api/controllers/query/import/tests/index.spec.js b/packages/server/src/api/controllers/query/import/tests/index.spec.js index 36227a4c55..8d074ea885 100644 --- a/packages/server/src/api/controllers/query/import/tests/index.spec.js +++ b/packages/server/src/api/controllers/query/import/tests/index.spec.js @@ -6,6 +6,7 @@ const db = jest.fn(() => { } }) jest.mock("../../../../../db", () => db) +require("@budibase/backend-core").init(require("../../../../../db")) const { RestImporter } = require("../index") diff --git a/packages/server/src/api/controllers/routing.js b/packages/server/src/api/controllers/routing.js index ca4dea2738..d6ba9d6ac2 100644 --- a/packages/server/src/api/controllers/routing.js +++ b/packages/server/src/api/controllers/routing.js @@ -60,7 +60,7 @@ exports.fetch = async ctx => { } exports.clientFetch = async ctx => { - const routing = await getRoutingStructure(ctx.appId) + const routing = await getRoutingStructure() let roleId = ctx.user.role._id const roleIds = await getUserRoleHierarchy(roleId) for (let topLevel of Object.values(routing.routes)) { diff --git a/packages/server/src/api/routes/tests/automation.spec.js b/packages/server/src/api/routes/tests/automation.spec.js index c412c34fdc..3e5725bb95 100644 --- a/packages/server/src/api/routes/tests/automation.spec.js +++ b/packages/server/src/api/routes/tests/automation.spec.js @@ -145,6 +145,7 @@ describe("/automations", () => { let table = await config.createTable() automation.definition.trigger.inputs.tableId = table._id automation.definition.steps[0].inputs.row.tableId = table._id + automation.appId = config.appId automation = await config.createAutomation(automation) await setup.delay(500) const res = await testAutomation(config, automation) diff --git a/packages/server/src/api/routes/tests/routing.spec.js b/packages/server/src/api/routes/tests/routing.spec.js index fdc414448c..d6d05c3322 100644 --- a/packages/server/src/api/routes/tests/routing.spec.js +++ b/packages/server/src/api/routes/tests/routing.spec.js @@ -1,10 +1,15 @@ const setup = require("./utilities") const { basicScreen } = setup.structures -const { checkBuilderEndpoint } = require("./utilities/TestFunctions") +const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") +const { doInAppContext } = require("@budibase/backend-core/context") const route = "/test" +// there are checks which are disabled in test env, +// these checks need to be enabled for this test + + describe("/routing", () => { let request = setup.getRequest() let config = setup.getConfig() @@ -26,20 +31,24 @@ describe("/routing", () => { describe("fetch", () => { it("prevents a public user from accessing development app", async () => { - await request - .get(`/api/routing/client`) - .set(config.publicHeaders({ prodApp: false })) - .expect(302) + await runInProd(() => { + return request + .get(`/api/routing/client`) + .set(config.publicHeaders({ prodApp: false })) + .expect(302) + }) }) it("prevents a non builder from accessing development app", async () => { - await request - .get(`/api/routing/client`) - .set(await config.roleHeaders({ - roleId: BUILTIN_ROLE_IDS.BASIC, - prodApp: false - })) - .expect(302) + await runInProd(async () => { + return request + .get(`/api/routing/client`) + .set(await config.roleHeaders({ + roleId: BUILTIN_ROLE_IDS.BASIC, + prodApp: false + })) + .expect(302) + }) }) it("returns the correct routing for basic user", async () => { const res = await request diff --git a/packages/server/src/api/routes/tests/utilities/TestFunctions.js b/packages/server/src/api/routes/tests/utilities/TestFunctions.js index e9e15b7619..c752507d25 100644 --- a/packages/server/src/api/routes/tests/utilities/TestFunctions.js +++ b/packages/server/src/api/routes/tests/utilities/TestFunctions.js @@ -3,7 +3,8 @@ const appController = require("../../../controllers/application") const { AppStatus } = require("../../../../db/utils") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { TENANT_ID } = require("../../../../tests/utilities/structures") -const { getAppDB } = require("@budibase/backend-core/context") +const { getAppDB, doInAppContext } = require("@budibase/backend-core/context") +const env = require("../../../../environment") function Request(appId, params) { this.appId = appId @@ -11,9 +12,15 @@ function Request(appId, params) { this.request = {} } +function runRequest(appId, controlFunc, request) { + return doInAppContext(appId, async () => { + return controlFunc(request) + }) +} + exports.getAllTableRows = async config => { const req = new Request(config.appId, { tableId: config.table._id }) - await rowController.fetch(req) + await runRequest(config.appId, rowController.fetch, req) return req.body } @@ -26,14 +33,17 @@ exports.clearAllApps = async (tenantId = TENANT_ID) => { } for (let app of apps) { const { appId } = app - await appController.delete(new Request(null, { appId })) + const req = new Request(null, { appId }) + await runRequest(appId, appController.delete, req) } } exports.clearAllAutomations = async config => { const automations = await config.getAllAutomations() for (let auto of automations) { - await config.deleteAutomation(auto) + await doInAppContext(config.appId, async () => { + await config.deleteAutomation(auto) + }) } } @@ -101,15 +111,27 @@ exports.getDB = () => { } exports.testAutomation = async (config, automation) => { - return await config.request - .post(`/api/automations/${automation._id}/test`) - .send({ - row: { - name: "Test", - description: "TEST", - }, - }) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + return runRequest(automation.appId, async () => { + return await config.request + .post(`/api/automations/${automation._id}/test`) + .send({ + row: { + name: "Test", + description: "TEST", + }, + }) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + }) +} + +exports.runInProd = async func => { + const nodeEnv = env.NODE_ENV + const workerId = env.JEST_WORKER_ID + env._set("NODE_ENV", "PRODUCTION") + env._set("JEST_WORKER_ID", null) + await func() + env._set("NODE_ENV", nodeEnv) + env._set("JEST_WORKER_ID", workerId) } diff --git a/packages/server/src/environment.js b/packages/server/src/environment.js index 614f41a29f..05edb0a95e 100644 --- a/packages/server/src/environment.js +++ b/packages/server/src/environment.js @@ -2,7 +2,8 @@ function isTest() { return ( process.env.NODE_ENV === "jest" || process.env.NODE_ENV === "cypress" || - process.env.JEST_WORKER_ID != null + (process.env.JEST_WORKER_ID != null && + process.env.JEST_WORKER_ID !== "null") ) } diff --git a/packages/server/src/middleware/currentapp.js b/packages/server/src/middleware/currentapp.js index 43f5ed9d46..70dd1bf578 100644 --- a/packages/server/src/middleware/currentapp.js +++ b/packages/server/src/middleware/currentapp.js @@ -13,6 +13,7 @@ const { isUserInAppTenant } = require("@budibase/backend-core/tenancy") const { getCachedSelf } = require("../utilities/global") const env = require("../environment") const { isWebhookEndpoint } = require("./utils") +const { doInAppContext } = require("@budibase/backend-core/context") module.exports = async (ctx, next) => { // try to get the appID from the request @@ -40,13 +41,15 @@ module.exports = async (ctx, next) => { } // deny access to application preview - if ( - isDevAppID(requestAppId) && - !isWebhookEndpoint(ctx) && - (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) - ) { - clearCookie(ctx, Cookies.CurrentApp) - return ctx.redirect("/") + if (!env.isTest()) { + if ( + isDevAppID(requestAppId) && + !isWebhookEndpoint(ctx) && + (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) + ) { + clearCookie(ctx, Cookies.CurrentApp) + return ctx.redirect("/") + } } let appId, @@ -67,44 +70,46 @@ module.exports = async (ctx, next) => { return next() } - let noCookieSet = false - // if the user not in the right tenant then make sure they have no permissions - // need to judge this only based on the request app ID, - if ( - env.MULTI_TENANCY && - ctx.user && - requestAppId && - !isUserInAppTenant(requestAppId) - ) { - // don't error, simply remove the users rights (they are a public user) - delete ctx.user.builder - delete ctx.user.admin - delete ctx.user.roles - roleId = BUILTIN_ROLE_IDS.PUBLIC - noCookieSet = true - } - - ctx.appId = appId - if (roleId) { - ctx.roleId = roleId - const userId = ctx.user ? generateUserMetadataID(ctx.user._id) : null - ctx.user = { - ...ctx.user, - // override userID with metadata one - _id: userId, - userId, - roleId, - role: await getRole(appId, roleId), + return doInAppContext(appId, async () => { + let noCookieSet = false + // if the user not in the right tenant then make sure they have no permissions + // need to judge this only based on the request app ID, + if ( + env.MULTI_TENANCY && + ctx.user && + requestAppId && + !isUserInAppTenant(requestAppId) + ) { + // don't error, simply remove the users rights (they are a public user) + delete ctx.user.builder + delete ctx.user.admin + delete ctx.user.roles + roleId = BUILTIN_ROLE_IDS.PUBLIC + noCookieSet = true } - } - if ( - (requestAppId !== appId || - appCookie == null || - appCookie.appId !== requestAppId) && - !noCookieSet - ) { - setCookie(ctx, { appId }, Cookies.CurrentApp) - } - return next() + ctx.appId = appId + if (roleId) { + ctx.roleId = roleId + const userId = ctx.user ? generateUserMetadataID(ctx.user._id) : null + ctx.user = { + ...ctx.user, + // override userID with metadata one + _id: userId, + userId, + roleId, + role: await getRole(roleId), + } + } + if ( + (requestAppId !== appId || + appCookie == null || + appCookie.appId !== requestAppId) && + !noCookieSet + ) { + setCookie(ctx, { appId }, Cookies.CurrentApp) + } + + return next() + }) } diff --git a/packages/server/src/middleware/tests/authorized.spec.js b/packages/server/src/middleware/tests/authorized.spec.js index 9775965b5a..205d0b8d2c 100644 --- a/packages/server/src/middleware/tests/authorized.spec.js +++ b/packages/server/src/middleware/tests/authorized.spec.js @@ -11,6 +11,9 @@ const authorizedMiddleware = require("../authorized") const env = require("../../environment") const { PermissionTypes, PermissionLevels } = require("@budibase/backend-core/permissions") require("@budibase/backend-core").init(require("../../db")) +const { doInAppContext } = require("@budibase/backend-core/context") + +const APP_ID = "" class TestConfiguration { constructor(role) { @@ -22,7 +25,7 @@ class TestConfiguration { request: { url: "" }, - appId: "", + appId: APP_ID, auth: {}, next: this.next, throw: this.throw @@ -30,7 +33,9 @@ class TestConfiguration { } executeMiddleware() { - return this.middleware(this.ctx, this.next) + return doInAppContext(APP_ID, () => { + return this.middleware(this.ctx, this.next) + }) } setUser(user) { diff --git a/packages/server/src/middleware/tests/currentapp.spec.js b/packages/server/src/middleware/tests/currentapp.spec.js index 27c88f3b48..4e53a6a4c0 100644 --- a/packages/server/src/middleware/tests/currentapp.spec.js +++ b/packages/server/src/middleware/tests/currentapp.spec.js @@ -1,6 +1,11 @@ mockAuthWithNoCookie() mockWorker() +jest.mock("@budibase/backend-core/db", () => ({ + ...jest.requireActual("@budibase/backend-core/db"), + dbExists: () => true, +})) + function mockWorker() { jest.mock("../../utilities/workerRequests", () => ({ getGlobalSelf: () => { @@ -50,6 +55,7 @@ function mockAuthWithCookie() { return "app_test" }, setCookie: jest.fn(), + clearCookie: jest.fn(), getCookie: () => ({appId: "app_different", roleId: "PUBLIC"}), })) jest.mock("@budibase/backend-core/constants", () => ({ diff --git a/packages/server/src/tests/utilities/TestConfiguration.js b/packages/server/src/tests/utilities/TestConfiguration.js index 48c8a88410..f08067ea2e 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.js +++ b/packages/server/src/tests/utilities/TestConfiguration.js @@ -1,3 +1,6 @@ +const core = require("@budibase/backend-core") +const CouchDB = require("../../db") +core.init(CouchDB) const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const env = require("../../environment") const { @@ -17,14 +20,11 @@ const supertest = require("supertest") const { cleanup } = require("../../utilities/fileSystem") const { Cookies, Headers } = require("@budibase/backend-core/constants") const { jwt } = require("@budibase/backend-core/auth") -const core = require("@budibase/backend-core") const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { createASession } = require("@budibase/backend-core/sessions") const { user: userCache } = require("@budibase/backend-core/cache") -const CouchDB = require("../../db") const newid = require("../../db/newid") const context = require("@budibase/backend-core/context") -core.init(CouchDB) const GLOBAL_USER_ID = "us_uuid1" const EMAIL = "babs@babs.com" @@ -51,7 +51,6 @@ class TestConfiguration { } async _req(config, params, controlFunc) { - context.updateAppId(this.appId) const request = {} // fake cookies, we don't need them request.cookies = { set: () => {}, get: () => {} } @@ -62,11 +61,21 @@ class TestConfiguration { request.request = { body: config, } - if (params) { - request.params = params + async function run() { + if (params) { + request.params = params + } + await controlFunc(request) + return request.body + } + // check if already in a context + if (context.getAppId() == null) { + return context.doInAppContext(this.appId, async () => { + return run() + }) + } else { + return run() } - await controlFunc(request) - return request.body } async globalUser({ @@ -182,12 +191,14 @@ class TestConfiguration { async deploy() { await this._req(null, null, controllers.deploy.deployApp) const prodAppId = this.getAppId().replace("_dev", "") - const appPackage = await this._req( - null, - { appId: prodAppId }, - controllers.app.fetchAppPackage - ) - return appPackage.application + return context.doInAppContext(prodAppId, async () => { + const appPackage = await this._req( + null, + { appId: prodAppId }, + controllers.app.fetchAppPackage + ) + return appPackage.application + }) } async updateTable(config = null) { @@ -416,46 +427,47 @@ class TestConfiguration { async login({ roleId, userId, builder, prodApp = false } = {}) { const appId = prodApp ? this.prodAppId : this.appId - - userId = !userId ? `us_uuid1` : userId - if (!this.request) { - throw "Server has not been opened, cannot login." - } - // make sure the user exists in the global DB - if (roleId !== BUILTIN_ROLE_IDS.PUBLIC) { - await this.globalUser({ - userId, - builder, - roles: { [this.prodAppId]: roleId }, + return context.doInAppContext(appId, async () => { + userId = !userId ? `us_uuid1` : userId + if (!this.request) { + throw "Server has not been opened, cannot login." + } + // make sure the user exists in the global DB + if (roleId !== BUILTIN_ROLE_IDS.PUBLIC) { + await this.globalUser({ + id: userId, + builder, + roles: { [this.prodAppId]: roleId }, + }) + } + await createASession(userId, { + sessionId: "sessionid", + tenantId: TENANT_ID, }) - } - await createASession(userId, { - sessionId: "sessionid", - tenantId: TENANT_ID, - }) - // have to fake this - const auth = { - userId, - sessionId: "sessionid", - tenantId: TENANT_ID, - } - const app = { - roleId: roleId, - appId, - } - const authToken = jwt.sign(auth, env.JWT_SECRET) - const appToken = jwt.sign(app, env.JWT_SECRET) + // have to fake this + const auth = { + userId, + sessionId: "sessionid", + tenantId: TENANT_ID, + } + const app = { + roleId: roleId, + appId, + } + const authToken = jwt.sign(auth, env.JWT_SECRET) + const appToken = jwt.sign(app, env.JWT_SECRET) - // returning necessary request headers - await userCache.invalidateUser(userId) - return { - Accept: "application/json", - Cookie: [ - `${Cookies.Auth}=${authToken}`, - `${Cookies.CurrentApp}=${appToken}`, - ], - [Headers.APP_ID]: appId, - } + // returning necessary request headers + await userCache.invalidateUser(userId) + return { + Accept: "application/json", + Cookie: [ + `${Cookies.Auth}=${authToken}`, + `${Cookies.CurrentApp}=${appToken}`, + ], + [Headers.APP_ID]: appId, + } + }) } } diff --git a/packages/server/src/utilities/global.js b/packages/server/src/utilities/global.js index 959eb59932..317d80689a 100644 --- a/packages/server/src/utilities/global.js +++ b/packages/server/src/utilities/global.js @@ -48,7 +48,7 @@ exports.getCachedSelf = async (ctx, appId) => { // this has to be tenant aware, can't depend on the context to find it out // running some middlewares before the tenancy causes context to break const user = await userCache.getUser(ctx.user._id) - return processUser(user, appId) + return processUser(user, { appId }) } exports.getRawGlobalUser = async userId => { From 91a90e62b298e2ef641c5c81f4e3647038d147ae Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 28 Jan 2022 18:52:34 +0000 Subject: [PATCH 4/9] Fixing some issues discovered by the cypress tests, cleaning up how Couch is used. --- packages/backend-core/db.js | 1 + packages/backend-core/src/security/roles.js | 5 +- packages/builder/cypress/setup.js | 23 ++-- packages/builder/cypress/ts/setup.ts | 4 + packages/builder/package.json | 2 + packages/builder/tsconfig.json | 23 ++++ packages/builder/yarn.lock | 118 +++++++++++++++--- .../src/api/controllers/table/internal.js | 15 ++- packages/server/src/middleware/builder.js | 4 +- .../server/src/utilities/fileSystem/index.js | 3 +- .../src/api/controllers/global/roles.js | 20 +-- 11 files changed, 169 insertions(+), 49 deletions(-) create mode 100644 packages/builder/cypress/ts/setup.ts create mode 100644 packages/builder/tsconfig.json diff --git a/packages/backend-core/db.js b/packages/backend-core/db.js index a7b38821a7..37b2ffbfa5 100644 --- a/packages/backend-core/db.js +++ b/packages/backend-core/db.js @@ -1,4 +1,5 @@ module.exports = { ...require("./src/db/utils"), ...require("./src/db/constants"), + ...require("./src/db"), } diff --git a/packages/backend-core/src/security/roles.js b/packages/backend-core/src/security/roles.js index 2be5058cbb..ca61ab41c6 100644 --- a/packages/backend-core/src/security/roles.js +++ b/packages/backend-core/src/security/roles.js @@ -7,6 +7,7 @@ const { SEPARATOR, } = require("../db/utils") const { getAppDB } = require("../tenancy/context") +const { getDB } = require("../db") const BUILTIN_IDS = { ADMIN: "ADMIN", @@ -182,8 +183,8 @@ exports.getUserRoleHierarchy = async (userRoleId, opts = { idOnly: true }) => { * Given an app ID this will retrieve all of the roles that are currently within that app. * @return {Promise} An array of the role objects that were found. */ -exports.getAllRoles = async () => { - const db = getAppDB() +exports.getAllRoles = async appId => { + const db = appId ? getDB(appId) : getAppDB() const body = await db.allDocs( getRoleParams(null, { include_docs: true, diff --git a/packages/builder/cypress/setup.js b/packages/builder/cypress/setup.js index 7657303853..07b3cd45c5 100644 --- a/packages/builder/cypress/setup.js +++ b/packages/builder/cypress/setup.js @@ -3,9 +3,6 @@ const path = require("path") const tmpdir = path.join(require("os").tmpdir(), ".budibase") -// these run on ports we don't normally use so that they can run alongside the -const fs = require("fs") - // normal development system const WORKER_PORT = "10002" const MAIN_PORT = cypressConfig.env.PORT @@ -29,22 +26,20 @@ process.env.ALLOW_DEV_AUTOMATIONS = 1 // Stop info logs polluting test outputs process.env.LOG_LEVEL = "error" -async function run() { +exports.run = ( + serverLoc = "../../server/dist", + workerLoc = "../../worker/src/index" +) => { // require("dotenv").config({ path: resolve(dir, ".env") }) - if (!fs.existsSync("../server/dist")) { - console.error("Unable to run cypress, need to build server first") - process.exit(-1) - } - // don't make this a variable or top level require // it will cause environment module to be loaded prematurely - const server = require("../../server/dist/app") + require(serverLoc) process.env.PORT = WORKER_PORT - const worker = require("../../worker/src/index") + require(workerLoc) // reload main port for rest of system process.env.PORT = MAIN_PORT - server.on("close", () => console.log("Server Closed")) - worker.on("close", () => console.log("Worker Closed")) } -run() +if (require.main === module) { + exports.run() +} diff --git a/packages/builder/cypress/ts/setup.ts b/packages/builder/cypress/ts/setup.ts new file mode 100644 index 0000000000..a983b0ba3e --- /dev/null +++ b/packages/builder/cypress/ts/setup.ts @@ -0,0 +1,4 @@ +// @ts-ignore +import { run } from "../setup" + +run("../../server/src/index") \ No newline at end of file diff --git a/packages/builder/package.json b/packages/builder/package.json index 18e2f0d4ef..3435402bac 100644 --- a/packages/builder/package.json +++ b/packages/builder/package.json @@ -106,6 +106,8 @@ "start-server-and-test": "^1.12.1", "svelte": "^3.38.2", "svelte-jester": "^1.3.2", + "ts-node": "^10.4.0", + "typescript": "^4.5.5", "vite": "^2.1.5" }, "gitHead": "115189f72a850bfb52b65ec61d932531bf327072" diff --git a/packages/builder/tsconfig.json b/packages/builder/tsconfig.json new file mode 100644 index 0000000000..6a5ba315a1 --- /dev/null +++ b/packages/builder/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "es6", + "module": "commonjs", + "lib": ["es2019"], + "allowJs": true, + "outDir": "dist", + "strict": true, + "noImplicitAny": true, + "esModuleInterop": true, + "resolveJsonModule": true, + "incremental": true + }, + "include": [ + "./src/**/*" + ], + "exclude": [ + "node_modules", + "**/*.json", + "**/*.spec.ts", + "**/*.spec.js" + ] +} diff --git a/packages/builder/yarn.lock b/packages/builder/yarn.lock index f827c20328..dcaa00b14c 100644 --- a/packages/builder/yarn.lock +++ b/packages/builder/yarn.lock @@ -970,10 +970,10 @@ svelte-flatpickr "^3.2.3" svelte-portal "^1.0.0" -"@budibase/bbui@^1.0.46", "@budibase/bbui@^1.0.46-alpha.3": - version "1.0.46" - resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.46.tgz#7306d4eda7f2c827577a4affa1fd314b38ba1198" - integrity sha512-padm0qq2SBNIslXEQW+HIv32pkIHFzloR93FDzSXh0sO43Q+/d2gbAhjI9ZUSAVncx9JNc46dolL1CwrvHFElg== +"@budibase/bbui@^1.0.46-alpha.6", "@budibase/bbui@^1.0.47": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.47.tgz#df2848b89f881fe603e7156855d6a6c31d4f58bf" + integrity sha512-RRm/BgK5aSx2/vGjMGljw240/48Ksc3/h4yB1nhQj8Xx3fKhlGnWDvWNy+sakvA6+fJvEXuti8RoxHtQ6lXmqA== dependencies: "@adobe/spectrum-css-workflow-icons" "^1.2.1" "@spectrum-css/actionbutton" "^1.0.1" @@ -1020,14 +1020,14 @@ svelte-flatpickr "^3.2.3" svelte-portal "^1.0.0" -"@budibase/client@^1.0.46-alpha.3": - version "1.0.46" - resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.46.tgz#e6ef8945b9d7046b6e6d6761628aa1d85387acca" - integrity sha512-jI3z1G/EsfJNCQCvrqzsR4vR1zLoVefzCXCEASIPg9BPzdiAFSwuUJVLijLFIIKfuDVeveUll94fgu7XNY8U2w== +"@budibase/client@^1.0.46-alpha.6": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.47.tgz#ce9e2fbd300e5dc389ea29a3a3347897f096c824" + integrity sha512-jB/al8v+nY/VLc6sH5Jt9JzWONVo+24/cI95iXlZSV5xwiKIVGj4+2F5QjKZ0c9Gm7SrrfP2T571N+4XaXNCGg== dependencies: - "@budibase/bbui" "^1.0.46" + "@budibase/bbui" "^1.0.47" "@budibase/standard-components" "^0.9.139" - "@budibase/string-templates" "^1.0.46" + "@budibase/string-templates" "^1.0.47" regexparam "^1.3.0" shortid "^2.2.15" svelte-spa-router "^3.0.5" @@ -1082,10 +1082,10 @@ svelte-apexcharts "^1.0.2" svelte-flatpickr "^3.1.0" -"@budibase/string-templates@^1.0.46", "@budibase/string-templates@^1.0.46-alpha.3": - version "1.0.46" - resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.46.tgz#5beef1687b451e4512a465b4e143c8ab46234006" - integrity sha512-t4ZAUkSz2XatjAN0faex5ovmD3mFz672lV/aBk7tfLFzZiKlWjngqdwpLLQNnsqeGvYo75JP2J06j86SX6O83w== +"@budibase/string-templates@^1.0.46-alpha.6", "@budibase/string-templates@^1.0.47": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.47.tgz#626b9fc4542c7b36a0ae24e820d25a704c527bec" + integrity sha512-87BUfOPr8FGKH8Pt88jhKNGT9PcOmkLRCeen4xi1dI113pAQznBO9vgV+cXOChUBBEQka9Rrt85LMJXidiwVgg== dependencies: "@budibase/handlebars-helpers" "^0.11.7" dayjs "^1.10.4" @@ -1102,6 +1102,18 @@ exec-sh "^0.3.2" minimist "^1.2.0" +"@cspotcode/source-map-consumer@0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz#33bf4b7b39c178821606f669bbc447a6a629786b" + integrity sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg== + +"@cspotcode/source-map-support@0.7.0": + version "0.7.0" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz#4789840aa859e46d2f3173727ab707c66bf344f5" + integrity sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA== + dependencies: + "@cspotcode/source-map-consumer" "0.8.0" + "@cypress/listr-verbose-renderer@^0.4.1": version "0.4.1" resolved "https://registry.yarnpkg.com/@cypress/listr-verbose-renderer/-/listr-verbose-renderer-0.4.1.tgz#a77492f4b11dcc7c446a34b3e28721afd33c642a" @@ -1795,6 +1807,26 @@ resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== +"@tsconfig/node10@^1.0.7": + version "1.0.8" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9" + integrity sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg== + +"@tsconfig/node12@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.9.tgz#62c1f6dee2ebd9aead80dc3afa56810e58e1a04c" + integrity sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw== + +"@tsconfig/node14@^1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.1.tgz#95f2d167ffb9b8d2068b0b235302fafd4df711f2" + integrity sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg== + +"@tsconfig/node16@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.2.tgz#423c77877d0569db20e1fc80885ac4118314010e" + integrity sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA== + "@types/aria-query@^4.2.0": version "4.2.2" resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" @@ -1971,6 +2003,11 @@ acorn-walk@^7.1.1: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== +acorn-walk@^8.1.1: + version "8.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" @@ -1981,6 +2018,11 @@ acorn@^8.2.4: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.5.0.tgz#4512ccb99b3698c752591e9bb4472e38ad43cee2" integrity sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q== +acorn@^8.4.1: + version "8.7.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" + integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ== + agent-base@6: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" @@ -2087,6 +2129,11 @@ arch@^2.1.2: resolved "https://registry.yarnpkg.com/arch/-/arch-2.2.0.tgz#1bc47818f305764f23ab3306b0bfc086c5a29d11" integrity sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ== +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + argparse@^1.0.10, argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -2720,6 +2767,11 @@ core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + cross-spawn@^6.0.0: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" @@ -2965,6 +3017,11 @@ diff-sequences@^27.0.6: resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.0.6.tgz#3305cb2e55a033924054695cc66019fd7f8e5723" integrity sha512-ag6wfpBFyNXZ0p8pcuIDS//D8H062ZQJ3fzYxjpmeKjnz8W4pekL3AI8VohmyZmsWW2PWaHgjsmqR6L13101VQ== +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -5004,6 +5061,11 @@ make-dir@^3.0.0: dependencies: semver "^6.0.0" +make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + makeerror@1.0.12: version "1.0.12" resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" @@ -6587,6 +6649,24 @@ tr46@~0.0.3: resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= +ts-node@^10.4.0: + version "10.4.0" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.4.0.tgz#680f88945885f4e6cf450e7f0d6223dd404895f7" + integrity sha512-g0FlPvvCXSIO1JDF6S232P5jPYqBkRL9qly81ZgAOSU7rwI0stphCgd2kLiCrU9DjQCrJMWEqcNSjQL02s6d8A== + dependencies: + "@cspotcode/source-map-support" "0.7.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + yn "3.1.1" + tslib@^1.9.0, tslib@^1.9.3: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" @@ -6655,6 +6735,11 @@ typeof-article@^0.1.1: dependencies: kind-of "^3.1.0" +typescript@^4.5.5: + version "4.5.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.5.tgz#d8c953832d28924a9e3d37c73d729c846c5896f3" + integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA== + uglify-js@^3.1.4: version "3.14.5" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.14.5.tgz#cdabb7d4954231d80cb4a927654c4655e51f4859" @@ -7011,6 +7096,11 @@ year@^0.2.1: resolved "https://registry.yarnpkg.com/year/-/year-0.2.1.tgz#4083ae520a318b23ec86037f3000cb892bdf9bb0" integrity sha1-QIOuUgoxiyPshgN/MADLiSvfm7A= +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + yup@0.29.2: version "0.29.2" resolved "https://registry.yarnpkg.com/yup/-/yup-0.29.2.tgz#5302abd9024cca335b987793f8df868e410b7b67" diff --git a/packages/server/src/api/controllers/table/internal.js b/packages/server/src/api/controllers/table/internal.js index 3f443bfd1d..4131f8bdcc 100644 --- a/packages/server/src/api/controllers/table/internal.js +++ b/packages/server/src/api/controllers/table/internal.js @@ -9,6 +9,7 @@ const { } = require("./utils") const usageQuota = require("../../../utilities/usageQuota") const { getAppDB } = require("@budibase/backend-core/context") +const env = require("../../../environment") exports.save = async function (ctx) { const db = getAppDB() @@ -128,12 +129,14 @@ exports.destroy = async function (ctx) { await db.remove(tableToDelete) // remove table search index - const currentIndexes = await db.getIndexes() - const existingIndex = currentIndexes.indexes.find( - existing => existing.name === `search:${ctx.params.tableId}` - ) - if (existingIndex) { - await db.deleteIndex(existingIndex) + if (!env.isTest()) { + const currentIndexes = await db.getIndexes() + const existingIndex = currentIndexes.indexes.find( + existing => existing.name === `search:${ctx.params.tableId}` + ) + if (existingIndex) { + await db.deleteIndex(existingIndex) + } } return tableToDelete diff --git a/packages/server/src/middleware/builder.js b/packages/server/src/middleware/builder.js index d2a8ee80f0..a6404780ff 100644 --- a/packages/server/src/middleware/builder.js +++ b/packages/server/src/middleware/builder.js @@ -5,7 +5,7 @@ const { checkDebounce, setDebounce, } = require("../utilities/redis") -const CouchDB = require("../db") +const { getDB } = require("@budibase/backend-core/db") const { DocumentTypes } = require("../db/utils") const { PermissionTypes } = require("@budibase/backend-core/permissions") const { app: appCache } = require("@budibase/backend-core/cache") @@ -48,7 +48,7 @@ async function updateAppUpdatedAt(ctx) { if (ctx.method === "GET" || (await checkDebounce(appId))) { return } - const db = new CouchDB(appId) + const db = getDB(appId) const metadata = await db.get(DocumentTypes.APP_METADATA) metadata.updatedAt = new Date().toISOString() const response = await db.put(metadata) diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.js index 7a9c2f350c..904b4ced18 100644 --- a/packages/server/src/utilities/fileSystem/index.js +++ b/packages/server/src/utilities/fileSystem/index.js @@ -1,5 +1,4 @@ const { budibaseTempDir } = require("../budibaseDir") -const { isDev } = require("../index") const fs = require("fs") const { join } = require("path") const uuid = require("uuid/v4") @@ -52,7 +51,7 @@ exports.init = () => { * everything required to function is ready. */ exports.checkDevelopmentEnvironment = () => { - if (!isDev()) { + if (!env.isDev() || env.isTest()) { return } if (!fs.existsSync(budibaseTempDir())) { diff --git a/packages/worker/src/api/controllers/global/roles.js b/packages/worker/src/api/controllers/global/roles.js index ee55256f35..72fd42d452 100644 --- a/packages/worker/src/api/controllers/global/roles.js +++ b/packages/worker/src/api/controllers/global/roles.js @@ -4,7 +4,7 @@ const { getDeployedAppID, DocumentTypes, } = require("@budibase/backend-core/db") -const CouchDB = require("../../../db") +const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") exports.fetch = async ctx => { const tenantId = ctx.user.tenantId @@ -31,12 +31,14 @@ exports.fetch = async ctx => { exports.find = async ctx => { const appId = ctx.params.appId - const db = new CouchDB(appId) - const app = await db.get(DocumentTypes.APP_METADATA) - ctx.body = { - roles: await getAllRoles(appId), - name: app.name, - version: app.version, - url: app.url, - } + await doInAppContext(appId, async () => { + const db = getAppDB() + const app = await db.get(DocumentTypes.APP_METADATA) + ctx.body = { + roles: await getAllRoles(), + name: app.name, + version: app.version, + url: app.url, + } + }) } From 9b4f684f05dd37f0219a9e10b4a9d8d934332368 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 31 Jan 2022 11:51:52 +0000 Subject: [PATCH 5/9] Cypress setup, allowing option for using typescript locally. --- packages/builder/package.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/builder/package.json b/packages/builder/package.json index 3435402bac..17f737f89b 100644 --- a/packages/builder/package.json +++ b/packages/builder/package.json @@ -11,12 +11,13 @@ "dev:builder": "routify -c dev:vite", "dev:vite": "vite --host 0.0.0.0", "rollup": "rollup -c -w", - "cy:setup": "node ./cypress/setup.js", + "cy:setup": "ts-node ./cypress/ts/setup.ts", + "cy:setup:ci": "node ./cypress/setup.js", "cy:run": "cypress run", "cy:open": "cypress open", "cy:run:ci": "cypress run --record", "cy:test": "start-server-and-test cy:setup http://localhost:10001/builder cy:run", - "cy:ci": "start-server-and-test cy:setup http://localhost:10001/builder cy:run", + "cy:ci": "start-server-and-test cy:setup:ci http://localhost:10001/builder cy:run", "cy:debug": "start-server-and-test cy:setup http://localhost:10001/builder cy:open" }, "jest": { From 1095ad17c6c55ca253d94841bacbe70ec96d1b11 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 31 Jan 2022 17:27:47 +0000 Subject: [PATCH 6/9] Adding comments to areas that need to keep using old db creation method. --- packages/server/src/automations/utils.js | 3 +++ packages/server/src/utilities/usageQuota/rows.js | 1 + 2 files changed, 4 insertions(+) diff --git a/packages/server/src/automations/utils.js b/packages/server/src/automations/utils.js index 6c1d8b2fdf..1d8a03f26d 100644 --- a/packages/server/src/automations/utils.js +++ b/packages/server/src/automations/utils.js @@ -93,6 +93,9 @@ exports.enableCronTrigger = async (appId, automation) => { ) // Assign cron job ID from bull so we can remove it later if the cron trigger is removed trigger.cronJobId = job.id + // can't use getAppDB here as this is likely to be called from dev app, + // but this call could be for dev app or prod app, need to just use what + // was passed in const db = new CouchDB(appId) const response = await db.put(automation) automation._id = response.id diff --git a/packages/server/src/utilities/usageQuota/rows.js b/packages/server/src/utilities/usageQuota/rows.js index 67ad07410d..378caffc46 100644 --- a/packages/server/src/utilities/usageQuota/rows.js +++ b/packages/server/src/utilities/usageQuota/rows.js @@ -23,6 +23,7 @@ const getAppPairs = appIds => { } const getAppRows = async appId => { + // need to specify the app ID, as this is used for different apps in one call const appDb = new CouchDB(appId) const response = await appDb.allDocs( getRowParams(null, null, { From dbc048b365432f9aa38f6388e4c0b2b508241aa1 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 31 Jan 2022 17:42:51 +0000 Subject: [PATCH 7/9] Review comments, renaming deployed -> prod in terms of app IDs. --- packages/backend-core/src/db/conversions.js | 2 +- packages/backend-core/src/db/utils.js | 8 ++--- packages/backend-core/src/tenancy/context.js | 4 +-- .../server/src/api/controllers/application.js | 34 +++++++++++-------- .../src/api/controllers/deploy/index.js | 4 +-- packages/server/src/api/controllers/dev.js | 4 +-- packages/server/src/api/controllers/user.js | 4 +-- .../server/src/api/controllers/webhook.js | 8 ++--- packages/server/src/automations/utils.js | 4 +-- packages/server/src/utilities/global.js | 4 +-- .../src/utilities/rowProcessor/index.js | 4 +-- .../server/src/utilities/workerRequests.js | 6 ++-- .../src/api/controllers/global/roles.js | 4 +-- 13 files changed, 47 insertions(+), 43 deletions(-) diff --git a/packages/backend-core/src/db/conversions.js b/packages/backend-core/src/db/conversions.js index 766ec1ad06..50d896322f 100644 --- a/packages/backend-core/src/db/conversions.js +++ b/packages/backend-core/src/db/conversions.js @@ -25,7 +25,7 @@ exports.isDevApp = app => { /** * Convert a development app ID to a deployed app ID. */ -exports.getDeployedAppID = appId => { +exports.getProdAppID = appId => { // if dev, convert it if (appId.startsWith(APP_DEV_PREFIX)) { const id = appId.split(APP_DEV_PREFIX)[1] diff --git a/packages/backend-core/src/db/utils.js b/packages/backend-core/src/db/utils.js index 7190a1221b..2800cf43c2 100644 --- a/packages/backend-core/src/db/utils.js +++ b/packages/backend-core/src/db/utils.js @@ -23,7 +23,7 @@ const { isProdAppID, isDevAppID, getDevelopmentAppID, - getDeployedAppID, + getProdAppID, } = require("./conversions") const UNICODE_MAX = "\ufff0" @@ -43,7 +43,7 @@ exports.isDevApp = isDevApp exports.isProdAppID = isProdAppID exports.isDevAppID = isDevAppID exports.getDevelopmentAppID = getDevelopmentAppID -exports.getDeployedAppID = getDeployedAppID +exports.getProdAppID = getProdAppID /** * If creating DB allDocs/query params with only a single top level ID this can be used, this @@ -197,7 +197,7 @@ exports.getAllDbs = async () => { } let couchUrl = `${exports.getCouchUrl()}/_all_dbs` let tenantId = getTenantId() - if (!env.MULTI_TENANCY || tenantId == DEFAULT_TENANT_ID) { + if (!env.MULTI_TENANCY || tenantId === DEFAULT_TENANT_ID) { // just get all DBs when: // - single tenancy // - default tenant @@ -281,7 +281,7 @@ exports.getAllApps = async ({ dev, all, idsOnly } = {}) => { /** * Utility function for getAllApps but filters to production apps only. */ -exports.getDeployedAppIDs = async () => { +exports.getProdAppIDs = async () => { return (await exports.getAllApps({ idsOnly: true })).filter( id => !exports.isDevAppID(id) ) diff --git a/packages/backend-core/src/tenancy/context.js b/packages/backend-core/src/tenancy/context.js index 3d2c862d71..1c1238278e 100644 --- a/packages/backend-core/src/tenancy/context.js +++ b/packages/backend-core/src/tenancy/context.js @@ -2,7 +2,7 @@ const env = require("../environment") const { Headers } = require("../../constants") const cls = require("./FunctionContext") const { getCouch } = require("../db") -const { getDeployedAppID, getDevelopmentAppID } = require("../db/conversions") +const { getProdAppID, getDevelopmentAppID } = require("../db/conversions") const { isEqual } = require("lodash") // some test cases call functions directly, need to @@ -150,7 +150,7 @@ function getDB(key, opts) { toUseAppId = appId break case ContextKeys.PROD_DB: - toUseAppId = getDeployedAppID(appId) + toUseAppId = getProdAppID(appId) break case ContextKeys.DEV_DB: toUseAppId = getDevelopmentAppID(appId) diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js index c16c54f1e3..00d3efccb8 100644 --- a/packages/server/src/api/controllers/application.js +++ b/packages/server/src/api/controllers/application.js @@ -28,7 +28,7 @@ const { processObject } = require("@budibase/string-templates") const { getAllApps, isDevAppID, - getDeployedAppID, + getProdAppID, Replication, } = require("@budibase/backend-core/db") const { USERS_TABLE_SCHEMA } = require("../../constants") @@ -44,13 +44,17 @@ const { getTenantId, isMultiTenant } = require("@budibase/backend-core/tenancy") const { syncGlobalUsers } = require("./user") const { app: appCache } = require("@budibase/backend-core/cache") const { cleanupAutomations } = require("../../automations/utils") -const context = require("@budibase/backend-core/context") +const { + getAppDB, + getProdAppDB, + updateAppId, +} = require("@budibase/backend-core/context") const URL_REGEX_SLASH = /\/|\\/g // utility function, need to do away with this async function getLayouts() { - const db = context.getAppDB() + const db = getAppDB() return ( await db.allDocs( getLayoutParams(null, { @@ -61,7 +65,7 @@ async function getLayouts() { } async function getScreens() { - const db = context.getAppDB() + const db = getAppDB() return ( await db.allDocs( getScreenParams(null, { @@ -119,9 +123,9 @@ async function createInstance(template) { const tenantId = isMultiTenant() ? getTenantId() : null const baseAppId = generateAppID(tenantId) const appId = generateDevAppID(baseAppId) - context.updateAppId(appId) + updateAppId(appId) - const db = context.getAppDB() + const db = getAppDB() await db.put({ _id: "_design/database", // view collation information, read before writing any complex views: @@ -197,7 +201,7 @@ exports.fetchAppDefinition = async ctx => { } exports.fetchAppPackage = async ctx => { - const db = context.getAppDB() + const db = getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) const layouts = await getLayouts() let screens = await getScreens() @@ -236,7 +240,7 @@ exports.create = async ctx => { const instance = await createInstance(instanceConfig) const appId = instance._id - const db = context.getAppDB() + const db = getAppDB() let _rev try { // if template there will be an existing doc @@ -301,7 +305,7 @@ exports.update = async ctx => { exports.updateClient = async ctx => { // Get current app version - const db = context.getAppDB() + const db = getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) const currentVersion = application.version @@ -323,7 +327,7 @@ exports.updateClient = async ctx => { exports.revertClient = async ctx => { // Check app can be reverted - const db = context.getAppDB() + const db = getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) if (!application.revertableVersion) { ctx.throw(400, "There is no version to revert to") @@ -345,7 +349,7 @@ exports.revertClient = async ctx => { } exports.delete = async ctx => { - const db = context.getAppDB() + const db = getAppDB() const result = await db.destroy() /* istanbul ignore next */ @@ -370,11 +374,11 @@ exports.sync = async (ctx, next) => { } // replicate prod to dev - const prodAppId = getDeployedAppID(appId) + const prodAppId = getProdAppID(appId) try { // specific case, want to make sure setup is skipped - const prodDb = context.getProdAppDB({ skip_setup: true }) + const prodDb = getProdAppDB({ skip_setup: true }) const info = await prodDb.info() if (info.error) throw info.error } catch (err) { @@ -414,7 +418,7 @@ exports.sync = async (ctx, next) => { } const updateAppPackage = async (appPackage, appId) => { - const db = context.getAppDB() + const db = getAppDB() const application = await db.get(DocumentTypes.APP_METADATA) const newAppPackage = { ...application, ...appPackage } @@ -433,7 +437,7 @@ const updateAppPackage = async (appPackage, appId) => { } const createEmptyAppPackage = async (ctx, app) => { - const db = context.getAppDB() + const db = getAppDB() let screensAndLayouts = [] for (let layout of BASE_LAYOUTS) { diff --git a/packages/server/src/api/controllers/deploy/index.js b/packages/server/src/api/controllers/deploy/index.js index 22c7d5ce3a..4186a192a4 100644 --- a/packages/server/src/api/controllers/deploy/index.js +++ b/packages/server/src/api/controllers/deploy/index.js @@ -1,7 +1,7 @@ const Deployment = require("./Deployment") const { Replication, - getDeployedAppID, + getProdAppID, getDevelopmentAppID, } = require("@budibase/backend-core/db") const { DocumentTypes, getAutomationParams } = require("../../../db/utils") @@ -97,7 +97,7 @@ async function deployApp(deployment) { try { const appId = getAppId() const devAppId = getDevelopmentAppID(appId) - const productionAppId = getDeployedAppID(appId) + const productionAppId = getProdAppID(appId) const replication = new Replication({ source: devAppId, diff --git a/packages/server/src/api/controllers/dev.js b/packages/server/src/api/controllers/dev.js index a27fab9a83..bec9478245 100644 --- a/packages/server/src/api/controllers/dev.js +++ b/packages/server/src/api/controllers/dev.js @@ -3,7 +3,7 @@ const env = require("../../environment") const { checkSlashesInUrl } = require("../../utilities") const { request } = require("../../utilities/workerRequests") const { clearLock } = require("../../utilities/redis") -const { Replication, getDeployedAppID } = require("@budibase/backend-core/db") +const { Replication, getProdAppID } = require("@budibase/backend-core/db") const { DocumentTypes } = require("../../db/utils") const { app: appCache } = require("@budibase/backend-core/cache") const { getProdAppDB, getAppDB } = require("@budibase/backend-core/context") @@ -77,7 +77,7 @@ exports.clearLock = async ctx => { exports.revert = async ctx => { const { appId } = ctx.params - const productionAppId = getDeployedAppID(appId) + const productionAppId = getProdAppID(appId) // App must have been deployed first try { diff --git a/packages/server/src/api/controllers/user.js b/packages/server/src/api/controllers/user.js index 208d3a60a3..ca7ef24162 100644 --- a/packages/server/src/api/controllers/user.js +++ b/packages/server/src/api/controllers/user.js @@ -10,7 +10,7 @@ const { isEqual } = require("lodash") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") const { getDevelopmentAppID, - getDeployedAppIDs, + getProdAppIDs, dbExists, } = require("@budibase/backend-core/db") const { UserStatus } = require("@budibase/backend-core/constants") @@ -92,7 +92,7 @@ exports.syncUser = async function (ctx) { let prodAppIds // if they are a builder then get all production app IDs if ((user.builder && user.builder.global) || deleting) { - prodAppIds = await getDeployedAppIDs() + prodAppIds = await getProdAppIDs() } else { prodAppIds = Object.entries(roles) .filter(entry => entry[1] !== BUILTIN_ROLE_IDS.PUBLIC) diff --git a/packages/server/src/api/controllers/webhook.js b/packages/server/src/api/controllers/webhook.js index 7375b3e750..49ab652cbf 100644 --- a/packages/server/src/api/controllers/webhook.js +++ b/packages/server/src/api/controllers/webhook.js @@ -2,7 +2,7 @@ const { generateWebhookID, getWebhookParams } = require("../../db/utils") const toJsonSchema = require("to-json-schema") const validate = require("jsonschema").validate const triggers = require("../../automations/triggers") -const { getDeployedAppID } = require("@budibase/backend-core/db") +const { getProdAppID } = require("@budibase/backend-core/db") const { getAppDB, updateAppId } = require("@budibase/backend-core/context") const AUTOMATION_DESCRIPTION = "Generated from Webhook Schema" @@ -82,8 +82,8 @@ exports.buildSchema = async ctx => { } exports.trigger = async ctx => { - const deployedAppId = getDeployedAppID(ctx.params.instance) - updateAppId(deployedAppId) + const prodAppId = getProdAppID(ctx.params.instance) + updateAppId(prodAppId) try { const db = getAppDB() const webhook = await db.get(ctx.params.id) @@ -98,7 +98,7 @@ exports.trigger = async ctx => { await triggers.externalTrigger(target, { body: ctx.request.body, ...ctx.request.body, - appId: deployedAppId, + appId: prodAppId, }) } ctx.status = 200 diff --git a/packages/server/src/automations/utils.js b/packages/server/src/automations/utils.js index 1d8a03f26d..3ee1f535c7 100644 --- a/packages/server/src/automations/utils.js +++ b/packages/server/src/automations/utils.js @@ -6,7 +6,7 @@ const { queue } = require("./bullboard") const newid = require("../db/newid") const { updateEntityMetadata } = require("../utilities") const { MetadataTypes } = require("../constants") -const { getDeployedAppID } = require("@budibase/backend-core/db") +const { getProdAppID } = require("@budibase/backend-core/db") const { cloneDeep } = require("lodash/fp") const { getAppDB, getAppId } = require("@budibase/backend-core/context") @@ -170,7 +170,7 @@ exports.checkForWebhooks = async ({ oldAuto, newAuto }) => { // the app ID has to be development for this endpoint // it can only be used when building the app // but the trigger endpoint will always be used in production - const prodAppId = getDeployedAppID(appId) + const prodAppId = getProdAppID(appId) newTrigger.inputs = { schemaUrl: `api/webhooks/schema/${appId}/${id}`, triggerUrl: `api/webhooks/trigger/${prodAppId}/${id}`, diff --git a/packages/server/src/utilities/global.js b/packages/server/src/utilities/global.js index 317d80689a..f8ec5ea647 100644 --- a/packages/server/src/utilities/global.js +++ b/packages/server/src/utilities/global.js @@ -3,7 +3,7 @@ const { getGlobalIDFromUserMetadataID, } = require("../db/utils") const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles") -const { getDeployedAppID } = require("@budibase/backend-core/db") +const { getProdAppID } = require("@budibase/backend-core/db") const { getGlobalUserParams } = require("@budibase/backend-core/db") const { user: userCache } = require("@budibase/backend-core/cache") const { @@ -26,7 +26,7 @@ exports.updateAppRole = (user, { appId } = {}) => { return user } // always use the deployed app - user.roleId = user.roles[getDeployedAppID(appId)] + user.roleId = user.roles[getProdAppID(appId)] // if a role wasn't found then either set as admin (builder) or public (everyone else) if (!user.roleId && user.builder && user.builder.global) { user.roleId = BUILTIN_ROLE_IDS.ADMIN diff --git a/packages/server/src/utilities/rowProcessor/index.js b/packages/server/src/utilities/rowProcessor/index.js index ab1a4bf92a..18e0b14de6 100644 --- a/packages/server/src/utilities/rowProcessor/index.js +++ b/packages/server/src/utilities/rowProcessor/index.js @@ -7,7 +7,7 @@ const { deleteFiles } = require("../../utilities/fileSystem/utilities") const { ObjectStoreBuckets } = require("../../constants") const { isProdAppID, - getDeployedAppID, + getProdAppID, dbExists, } = require("@budibase/backend-core/db") const { getAppId } = require("@budibase/backend-core/context") @@ -303,7 +303,7 @@ exports.outputProcessing = async (table, rows, opts = { squash: true }) => { exports.cleanupAttachments = async (table, { row, rows, oldRow, oldTable }) => { const appId = getAppId() if (!isProdAppID(appId)) { - const prodAppId = getDeployedAppID(appId) + const prodAppId = getProdAppID(appId) // if prod exists, then don't allow deleting const exists = await dbExists(prodAppId) if (exists) { diff --git a/packages/server/src/utilities/workerRequests.js b/packages/server/src/utilities/workerRequests.js index a7fa92b295..91db63d2a4 100644 --- a/packages/server/src/utilities/workerRequests.js +++ b/packages/server/src/utilities/workerRequests.js @@ -1,7 +1,7 @@ const fetch = require("node-fetch") const env = require("../environment") const { checkSlashesInUrl } = require("./index") -const { getDeployedAppID } = require("@budibase/backend-core/db") +const { getProdAppID } = require("@budibase/backend-core/db") const { updateAppRole } = require("./global") const { Headers } = require("@budibase/backend-core/constants") const { getTenantId, isTenantIdSet } = require("@budibase/backend-core/tenancy") @@ -76,9 +76,9 @@ exports.getGlobalSelf = async (ctx, appId = null) => { } exports.removeAppFromUserRoles = async (ctx, appId) => { - const deployedAppId = getDeployedAppID(appId) + const prodAppId = getProdAppID(appId) const response = await fetch( - checkSlashesInUrl(env.WORKER_URL + `/api/global/roles/${deployedAppId}`), + checkSlashesInUrl(env.WORKER_URL + `/api/global/roles/${prodAppId}`), request(ctx, { method: "DELETE", }) diff --git a/packages/worker/src/api/controllers/global/roles.js b/packages/worker/src/api/controllers/global/roles.js index 72fd42d452..96de0e4753 100644 --- a/packages/worker/src/api/controllers/global/roles.js +++ b/packages/worker/src/api/controllers/global/roles.js @@ -1,7 +1,7 @@ const { getAllRoles } = require("@budibase/backend-core/roles") const { getAllApps, - getDeployedAppID, + getProdAppID, DocumentTypes, } = require("@budibase/backend-core/db") const { doInAppContext, getAppDB } = require("@budibase/backend-core/context") @@ -18,7 +18,7 @@ exports.fetch = async ctx => { const roles = await Promise.all(promises) const response = {} for (let app of apps) { - const deployedAppId = getDeployedAppID(app.appId) + const deployedAppId = getProdAppID(app.appId) response[deployedAppId] = { roles: roles.shift(), name: app.name, From 72534c6ed20559efb4d2d70cda140d5421759eb2 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 31 Jan 2022 17:53:19 +0000 Subject: [PATCH 8/9] Refactoring backend-core tenancy to context, splitting out tenancy functionality to its own area. --- packages/backend-core/context.js | 2 +- packages/backend-core/deprovision.js | 2 +- .../backend-core/src/{tenancy => context}/FunctionContext.js | 0 packages/backend-core/src/{tenancy => context}/deprovision.js | 2 +- .../backend-core/src/{tenancy/context.js => context/index.js} | 0 packages/backend-core/src/middleware/appTenancy.js | 2 +- packages/backend-core/src/middleware/tenancy.js | 2 +- packages/backend-core/src/security/roles.js | 2 +- packages/backend-core/src/tenancy/index.js | 2 +- packages/backend-core/src/tenancy/tenancy.js | 2 +- 10 files changed, 8 insertions(+), 8 deletions(-) rename packages/backend-core/src/{tenancy => context}/FunctionContext.js (100%) rename packages/backend-core/src/{tenancy => context}/deprovision.js (98%) rename packages/backend-core/src/{tenancy/context.js => context/index.js} (100%) diff --git a/packages/backend-core/context.js b/packages/backend-core/context.js index 5cf9642392..4bc100687d 100644 --- a/packages/backend-core/context.js +++ b/packages/backend-core/context.js @@ -5,7 +5,7 @@ const { getAppId, updateAppId, doInAppContext, -} = require("./src/tenancy/context") +} = require("./src/context") module.exports = { getAppDB, diff --git a/packages/backend-core/deprovision.js b/packages/backend-core/deprovision.js index b4b8dc6110..672da214ff 100644 --- a/packages/backend-core/deprovision.js +++ b/packages/backend-core/deprovision.js @@ -1 +1 @@ -module.exports = require("./src/tenancy/deprovision") +module.exports = require("./src/context/deprovision") diff --git a/packages/backend-core/src/tenancy/FunctionContext.js b/packages/backend-core/src/context/FunctionContext.js similarity index 100% rename from packages/backend-core/src/tenancy/FunctionContext.js rename to packages/backend-core/src/context/FunctionContext.js diff --git a/packages/backend-core/src/tenancy/deprovision.js b/packages/backend-core/src/context/deprovision.js similarity index 98% rename from packages/backend-core/src/tenancy/deprovision.js rename to packages/backend-core/src/context/deprovision.js index 608ca1b84a..1fbc2c8398 100644 --- a/packages/backend-core/src/tenancy/deprovision.js +++ b/packages/backend-core/src/context/deprovision.js @@ -1,6 +1,6 @@ const { getGlobalUserParams, getAllApps } = require("../db/utils") const { getDB, getCouch } = require("../db") -const { getGlobalDB } = require("./tenancy") +const { getGlobalDB } = require("../tenancy") const { StaticDatabases } = require("../db/constants") const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants diff --git a/packages/backend-core/src/tenancy/context.js b/packages/backend-core/src/context/index.js similarity index 100% rename from packages/backend-core/src/tenancy/context.js rename to packages/backend-core/src/context/index.js diff --git a/packages/backend-core/src/middleware/appTenancy.js b/packages/backend-core/src/middleware/appTenancy.js index 60d7448af2..b0430a0051 100644 --- a/packages/backend-core/src/middleware/appTenancy.js +++ b/packages/backend-core/src/middleware/appTenancy.js @@ -5,7 +5,7 @@ const { DEFAULT_TENANT_ID, updateAppId, } = require("../tenancy") -const ContextFactory = require("../tenancy/FunctionContext") +const ContextFactory = require("../context/FunctionContext") const { getTenantIDFromAppID } = require("../db/utils") module.exports = () => { diff --git a/packages/backend-core/src/middleware/tenancy.js b/packages/backend-core/src/middleware/tenancy.js index adfd36a503..5bb81f8824 100644 --- a/packages/backend-core/src/middleware/tenancy.js +++ b/packages/backend-core/src/middleware/tenancy.js @@ -1,5 +1,5 @@ const { setTenantId } = require("../tenancy") -const ContextFactory = require("../tenancy/FunctionContext") +const ContextFactory = require("../context/FunctionContext") const { buildMatcherRegex, matches } = require("./matchers") module.exports = ( diff --git a/packages/backend-core/src/security/roles.js b/packages/backend-core/src/security/roles.js index ca61ab41c6..82bfbd5212 100644 --- a/packages/backend-core/src/security/roles.js +++ b/packages/backend-core/src/security/roles.js @@ -6,7 +6,7 @@ const { DocumentTypes, SEPARATOR, } = require("../db/utils") -const { getAppDB } = require("../tenancy/context") +const { getAppDB } = require("../context") const { getDB } = require("../db") const BUILTIN_IDS = { diff --git a/packages/backend-core/src/tenancy/index.js b/packages/backend-core/src/tenancy/index.js index 2fe257d885..c847033a12 100644 --- a/packages/backend-core/src/tenancy/index.js +++ b/packages/backend-core/src/tenancy/index.js @@ -1,4 +1,4 @@ module.exports = { - ...require("./context"), + ...require("../context"), ...require("./tenancy"), } diff --git a/packages/backend-core/src/tenancy/tenancy.js b/packages/backend-core/src/tenancy/tenancy.js index de597eac01..8360198b60 100644 --- a/packages/backend-core/src/tenancy/tenancy.js +++ b/packages/backend-core/src/tenancy/tenancy.js @@ -1,6 +1,6 @@ const { getDB } = require("../db") const { SEPARATOR, StaticDatabases, DocumentTypes } = require("../db/constants") -const { getTenantId, DEFAULT_TENANT_ID, isMultiTenant } = require("./context") +const { getTenantId, DEFAULT_TENANT_ID, isMultiTenant } = require("../context") const env = require("../environment") const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants From b847b578c78128ad893c8980bff6abf672706789 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 1 Feb 2022 16:02:44 +0000 Subject: [PATCH 9/9] Making sure worker runs through ts-node in cypress, locally, then built in CI. --- packages/builder/cypress/setup.js | 2 +- packages/builder/cypress/ts/setup.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/builder/cypress/setup.js b/packages/builder/cypress/setup.js index 07b3cd45c5..0f3d333cb7 100644 --- a/packages/builder/cypress/setup.js +++ b/packages/builder/cypress/setup.js @@ -28,7 +28,7 @@ process.env.LOG_LEVEL = "error" exports.run = ( serverLoc = "../../server/dist", - workerLoc = "../../worker/src/index" + workerLoc = "../../worker/dist" ) => { // require("dotenv").config({ path: resolve(dir, ".env") }) // don't make this a variable or top level require diff --git a/packages/builder/cypress/ts/setup.ts b/packages/builder/cypress/ts/setup.ts index 44d739182c..b6b12bf730 100644 --- a/packages/builder/cypress/ts/setup.ts +++ b/packages/builder/cypress/ts/setup.ts @@ -1,4 +1,4 @@ // @ts-ignore import { run } from "../setup" -run("../../server/src/index") +run("../../server/src/index", "../../worker/src/index")