Initial version of memory leak protection, making sure that PouchDB databases are closed correctly after use, using a combination of closures wrapping DB gets (this replaces the getDB, leaving only a dangerousGetDB function which can be used in very very specific scenarios) and then closing the DB as part of CLS hooked functions finishing. Also moving the GlobalDB init to the tenancy middleware as this is used everywhere in the worker/app services - means that not all getGlobalDB calls require an async closure around them.
This commit is contained in:
parent
41144db055
commit
192fb1307e
|
@ -1,5 +1,5 @@
|
|||
const redis = require("../redis/authRedis")
|
||||
const { getDB } = require("../db")
|
||||
const { doWithDB } = require("../db")
|
||||
const { DocumentTypes } = require("../db/constants")
|
||||
|
||||
const AppState = {
|
||||
|
@ -11,8 +11,13 @@ const EXPIRY_SECONDS = 3600
|
|||
* The default populate app metadata function
|
||||
*/
|
||||
const populateFromDB = async appId => {
|
||||
const db = getDB(appId, { skip_setup: true })
|
||||
return db.get(DocumentTypes.APP_METADATA)
|
||||
return doWithDB(
|
||||
appId,
|
||||
db => {
|
||||
return db.get(DocumentTypes.APP_METADATA)
|
||||
},
|
||||
{ skip_setup: true }
|
||||
)
|
||||
}
|
||||
|
||||
const isInvalid = metadata => {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const redis = require("../redis/authRedis")
|
||||
const { getTenantId, lookupTenantId, getGlobalDB } = require("../tenancy")
|
||||
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
|
||||
const env = require("../environment")
|
||||
const accounts = require("../cloud/accounts")
|
||||
|
||||
|
@ -9,9 +9,8 @@ const EXPIRY_SECONDS = 3600
|
|||
* The default populate user function
|
||||
*/
|
||||
const populateFromDB = async (userId, tenantId) => {
|
||||
const user = await getGlobalDB(tenantId).get(userId)
|
||||
const user = await doWithGlobalDB(tenantId, db => db.get(userId))
|
||||
user.budibaseAccess = true
|
||||
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(user.email)
|
||||
if (account) {
|
||||
|
|
|
@ -4,7 +4,11 @@ const { newid } = require("../hashing")
|
|||
const REQUEST_ID_KEY = "requestId"
|
||||
|
||||
class FunctionContext {
|
||||
static getMiddleware(updateCtxFn = null, contextName = "session") {
|
||||
static getMiddleware(
|
||||
updateCtxFn = null,
|
||||
destroyFn = null,
|
||||
contextName = "session"
|
||||
) {
|
||||
const namespace = this.createNamespace(contextName)
|
||||
|
||||
return async function (ctx, next) {
|
||||
|
@ -18,7 +22,14 @@ class FunctionContext {
|
|||
if (updateCtxFn) {
|
||||
updateCtxFn(ctx)
|
||||
}
|
||||
next().then(resolve).catch(reject)
|
||||
next()
|
||||
.then(resolve)
|
||||
.catch(reject)
|
||||
.finally(() => {
|
||||
if (destroyFn) {
|
||||
return destroyFn(ctx)
|
||||
}
|
||||
})
|
||||
})
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { getGlobalUserParams, getAllApps } = require("../db/utils")
|
||||
const { getDB } = require("../db")
|
||||
const { getGlobalDB } = require("../tenancy")
|
||||
const { doWithDB } = require("../db")
|
||||
const { doWithGlobalDB } = require("../tenancy")
|
||||
const { StaticDatabases } = require("../db/constants")
|
||||
|
||||
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
|
||||
|
@ -8,11 +8,12 @@ const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
|
|||
|
||||
const removeTenantFromInfoDB = async tenantId => {
|
||||
try {
|
||||
const infoDb = getDB(PLATFORM_INFO_DB)
|
||||
let tenants = await infoDb.get(TENANT_DOC)
|
||||
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
|
||||
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
|
||||
let tenants = await infoDb.get(TENANT_DOC)
|
||||
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
|
||||
|
||||
await infoDb.put(tenants)
|
||||
await infoDb.put(tenants)
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} from info db`, err)
|
||||
throw err
|
||||
|
@ -20,36 +21,8 @@ const removeTenantFromInfoDB = async tenantId => {
|
|||
}
|
||||
|
||||
exports.removeUserFromInfoDB = async dbUser => {
|
||||
const infoDb = getDB(PLATFORM_INFO_DB)
|
||||
const keys = [dbUser._id, dbUser.email]
|
||||
const userDocs = await infoDb.allDocs({
|
||||
keys,
|
||||
include_docs: true,
|
||||
})
|
||||
const toDelete = userDocs.rows.map(row => {
|
||||
return {
|
||||
...row.doc,
|
||||
_deleted: true,
|
||||
}
|
||||
})
|
||||
await infoDb.bulkDocs(toDelete)
|
||||
}
|
||||
|
||||
const removeUsersFromInfoDB = async tenantId => {
|
||||
try {
|
||||
const globalDb = getGlobalDB(tenantId)
|
||||
const infoDb = getDB(PLATFORM_INFO_DB)
|
||||
const allUsers = await globalDb.allDocs(
|
||||
getGlobalUserParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
const allEmails = allUsers.rows.map(row => row.doc.email)
|
||||
// get the id docs
|
||||
let keys = allUsers.rows.map(row => row.id)
|
||||
// and the email docs
|
||||
keys = keys.concat(allEmails)
|
||||
// retrieve the docs and delete them
|
||||
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
|
||||
const keys = [dbUser._id, dbUser.email]
|
||||
const userDocs = await infoDb.allDocs({
|
||||
keys,
|
||||
include_docs: true,
|
||||
|
@ -61,26 +34,60 @@ const removeUsersFromInfoDB = async tenantId => {
|
|||
}
|
||||
})
|
||||
await infoDb.bulkDocs(toDelete)
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} users from info db`, err)
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const removeUsersFromInfoDB = async tenantId => {
|
||||
return doWithGlobalDB(tenantId, async db => {
|
||||
try {
|
||||
const allUsers = await db.allDocs(
|
||||
getGlobalUserParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
|
||||
const allEmails = allUsers.rows.map(row => row.doc.email)
|
||||
// get the id docs
|
||||
let keys = allUsers.rows.map(row => row.id)
|
||||
// and the email docs
|
||||
keys = keys.concat(allEmails)
|
||||
// retrieve the docs and delete them
|
||||
const userDocs = await infoDb.allDocs({
|
||||
keys,
|
||||
include_docs: true,
|
||||
})
|
||||
const toDelete = userDocs.rows.map(row => {
|
||||
return {
|
||||
...row.doc,
|
||||
_deleted: true,
|
||||
}
|
||||
})
|
||||
await infoDb.bulkDocs(toDelete)
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} users from info db`, err)
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const removeGlobalDB = async tenantId => {
|
||||
try {
|
||||
const globalDb = getGlobalDB(tenantId)
|
||||
await globalDb.destroy()
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} users from info db`, err)
|
||||
throw err
|
||||
}
|
||||
return doWithGlobalDB(tenantId, async db => {
|
||||
try {
|
||||
await db.destroy()
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} users from info db`, err)
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const removeTenantApps = async tenantId => {
|
||||
try {
|
||||
const apps = await getAllApps({ all: true })
|
||||
const destroyPromises = apps.map(app => getDB(app.appId).destroy())
|
||||
const destroyPromises = apps.map(app =>
|
||||
doWithDB(app.appId, db => db.destroy())
|
||||
)
|
||||
await Promise.allSettled(destroyPromises)
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} apps`, err)
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
const env = require("../environment")
|
||||
const { Headers } = require("../../constants")
|
||||
const { SEPARATOR, DocumentTypes } = require("../db/constants")
|
||||
const { DEFAULT_TENANT_ID } = require("../constants")
|
||||
const cls = require("./FunctionContext")
|
||||
const { getDB } = require("../db")
|
||||
const { dangerousGetDB } = require("../db")
|
||||
const { getProdAppID, getDevelopmentAppID } = require("../db/conversions")
|
||||
const { baseGlobalDBName } = require("../tenancy/utils")
|
||||
const { isEqual } = require("lodash")
|
||||
|
||||
// some test cases call functions directly, need to
|
||||
|
@ -12,6 +14,7 @@ let TEST_APP_ID = null
|
|||
|
||||
const ContextKeys = {
|
||||
TENANT_ID: "tenantId",
|
||||
GLOBAL_DB: "globalDb",
|
||||
APP_ID: "appId",
|
||||
// whatever the request app DB was
|
||||
CURRENT_DB: "currentDb",
|
||||
|
@ -22,7 +25,28 @@ const ContextKeys = {
|
|||
DB_OPTS: "dbOpts",
|
||||
}
|
||||
|
||||
exports.DEFAULT_TENANT_ID = "default"
|
||||
exports.DEFAULT_TENANT_ID = DEFAULT_TENANT_ID
|
||||
|
||||
// this function makes sure the PouchDB objects are closed and
|
||||
// fully deleted when finished - this protects against memory leaks
|
||||
async function closeAppDBs() {
|
||||
const dbKeys = [
|
||||
ContextKeys.CURRENT_DB,
|
||||
ContextKeys.PROD_DB,
|
||||
ContextKeys.DEV_DB,
|
||||
]
|
||||
for (let dbKey of dbKeys) {
|
||||
const db = cls.getFromContext(dbKey)
|
||||
if (!db) {
|
||||
continue
|
||||
}
|
||||
try {
|
||||
await db.close()
|
||||
} catch (err) {
|
||||
// ignore error, its already closed likely
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.isDefaultTenant = () => {
|
||||
return exports.getTenantId() === exports.DEFAULT_TENANT_ID
|
||||
|
@ -34,13 +58,29 @@ exports.isMultiTenant = () => {
|
|||
|
||||
// used for automations, API endpoints should always be in context already
|
||||
exports.doInTenant = (tenantId, task) => {
|
||||
return cls.run(() => {
|
||||
// the internal function is so that we can re-use an existing
|
||||
// context - don't want to close DB on a parent context
|
||||
async function internal(opts = { existing: false }) {
|
||||
// set the tenant id
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
if (!opts.existing) {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
exports.setGlobalDB(tenantId)
|
||||
}
|
||||
|
||||
// invoke the task
|
||||
return task()
|
||||
})
|
||||
const response = await task()
|
||||
if (!opts.existing) {
|
||||
await exports.getGlobalDB().close()
|
||||
}
|
||||
return response
|
||||
}
|
||||
if (cls.getFromContext(ContextKeys.TENANT_ID) === tenantId) {
|
||||
return internal({ existing: true })
|
||||
} else {
|
||||
return cls.run(async () => {
|
||||
return internal()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -64,24 +104,38 @@ exports.getTenantIDFromAppID = appId => {
|
|||
}
|
||||
|
||||
const setAppTenantId = appId => {
|
||||
const appTenantId = this.getTenantIDFromAppID(appId) || this.DEFAULT_TENANT_ID
|
||||
this.updateTenantId(appTenantId)
|
||||
const appTenantId =
|
||||
exports.getTenantIDFromAppID(appId) || exports.DEFAULT_TENANT_ID
|
||||
exports.updateTenantId(appTenantId)
|
||||
}
|
||||
|
||||
exports.doInAppContext = (appId, task) => {
|
||||
if (!appId) {
|
||||
throw new Error("appId is required")
|
||||
}
|
||||
return cls.run(() => {
|
||||
// the internal function is so that we can re-use an existing
|
||||
// context - don't want to close DB on a parent context
|
||||
async function internal(opts = { existing: false }) {
|
||||
// set the app tenant id
|
||||
setAppTenantId(appId)
|
||||
|
||||
if (!opts.existing) {
|
||||
setAppTenantId(appId)
|
||||
}
|
||||
// set the app ID
|
||||
cls.setOnContext(ContextKeys.APP_ID, appId)
|
||||
|
||||
// invoke the task
|
||||
return task()
|
||||
})
|
||||
const response = await task()
|
||||
if (!opts.existing) {
|
||||
await closeAppDBs()
|
||||
}
|
||||
return response
|
||||
}
|
||||
if (appId === cls.getFromContext(ContextKeys.APP_ID)) {
|
||||
return internal({ existing: true })
|
||||
} else {
|
||||
return cls.run(async () => {
|
||||
return internal()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
exports.updateTenantId = tenantId => {
|
||||
|
@ -90,11 +144,13 @@ exports.updateTenantId = tenantId => {
|
|||
|
||||
exports.updateAppId = appId => {
|
||||
try {
|
||||
const promise = closeAppDBs()
|
||||
cls.setOnContext(ContextKeys.APP_ID, appId)
|
||||
cls.setOnContext(ContextKeys.PROD_DB, null)
|
||||
cls.setOnContext(ContextKeys.DEV_DB, null)
|
||||
cls.setOnContext(ContextKeys.CURRENT_DB, null)
|
||||
cls.setOnContext(ContextKeys.DB_OPTS, null)
|
||||
return promise
|
||||
} catch (err) {
|
||||
if (env.isTest()) {
|
||||
TEST_APP_ID = appId
|
||||
|
@ -111,8 +167,8 @@ exports.setTenantId = (
|
|||
let tenantId
|
||||
// exit early if not multi-tenant
|
||||
if (!exports.isMultiTenant()) {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, this.DEFAULT_TENANT_ID)
|
||||
return
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, exports.DEFAULT_TENANT_ID)
|
||||
return exports.DEFAULT_TENANT_ID
|
||||
}
|
||||
|
||||
const allowQs = opts && opts.allowQs
|
||||
|
@ -140,6 +196,22 @@ exports.setTenantId = (
|
|||
if (tenantId) {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
}
|
||||
return tenantId
|
||||
}
|
||||
|
||||
exports.setGlobalDB = tenantId => {
|
||||
const dbName = baseGlobalDBName(tenantId)
|
||||
const db = dangerousGetDB(dbName)
|
||||
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
|
||||
return db
|
||||
}
|
||||
|
||||
exports.getGlobalDB = () => {
|
||||
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
|
||||
if (!db) {
|
||||
throw new Error("Global DB not found")
|
||||
}
|
||||
return db
|
||||
}
|
||||
|
||||
exports.isTenantIdSet = () => {
|
||||
|
@ -187,7 +259,7 @@ function getContextDB(key, opts) {
|
|||
toUseAppId = getDevelopmentAppID(appId)
|
||||
break
|
||||
}
|
||||
db = getDB(toUseAppId, opts)
|
||||
db = dangerousGetDB(toUseAppId, opts)
|
||||
try {
|
||||
cls.setOnContext(key, db)
|
||||
if (opts) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const { getDB } = require(".")
|
||||
const { dangerousGetDB } = require(".")
|
||||
|
||||
class Replication {
|
||||
/**
|
||||
|
@ -7,8 +7,8 @@ class Replication {
|
|||
* @param {String} target - the DB you want to replicate to, or rollback from
|
||||
*/
|
||||
constructor({ source, target }) {
|
||||
this.source = getDB(source)
|
||||
this.target = getDB(target)
|
||||
this.source = dangerousGetDB(source)
|
||||
this.target = dangerousGetDB(target)
|
||||
}
|
||||
|
||||
promisify(operation, opts = {}) {
|
||||
|
@ -51,7 +51,7 @@ class Replication {
|
|||
async rollback() {
|
||||
await this.target.destroy()
|
||||
// Recreate the DB again
|
||||
this.target = getDB(this.target.name)
|
||||
this.target = dangerousGetDB(this.target.name)
|
||||
await this.replicate()
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,10 @@ exports.init = opts => {
|
|||
initialised = true
|
||||
}
|
||||
|
||||
exports.getDB = (dbName, opts) => {
|
||||
// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
|
||||
// this function is prone to leaks, should only be used
|
||||
// in situations that using the function doWithDB does not work
|
||||
exports.dangerousGetDB = (dbName, opts) => {
|
||||
checkInitialised()
|
||||
const db = new PouchDB(dbName, opts)
|
||||
const dbPut = db.put
|
||||
|
@ -30,6 +33,22 @@ exports.getDB = (dbName, opts) => {
|
|||
return db
|
||||
}
|
||||
|
||||
// we have to use a callback for this so that we can close
|
||||
// the DB when we're done, without this manual requests would
|
||||
// need to close the database when done with it to avoid memory leaks
|
||||
exports.doWithDB = async (dbName, cb, opts) => {
|
||||
const db = exports.dangerousGetDB(dbName, opts)
|
||||
// need this to be async so that we can correctly close DB after all
|
||||
// async operations have been completed
|
||||
const resp = await cb(db)
|
||||
try {
|
||||
await db.close()
|
||||
} catch (err) {
|
||||
// ignore error - it may have not opened database/is closed already
|
||||
}
|
||||
return resp
|
||||
}
|
||||
|
||||
exports.allDbs = () => {
|
||||
checkInitialised()
|
||||
return PouchDB.allDbs()
|
||||
|
|
|
@ -11,7 +11,7 @@ const {
|
|||
} = require("./constants")
|
||||
const { getTenantId, getGlobalDBName } = require("../tenancy")
|
||||
const fetch = require("node-fetch")
|
||||
const { getDB, allDbs } = require("./index")
|
||||
const { doWithDB, allDbs } = require("./index")
|
||||
const { getCouchUrl } = require("./pouch")
|
||||
const { getAppMetadata } = require("../cache/appMetadata")
|
||||
const { checkSlashesInUrl } = require("../helpers")
|
||||
|
@ -280,17 +280,22 @@ exports.getDevAppIDs = async () => {
|
|||
|
||||
exports.dbExists = async dbName => {
|
||||
let exists = false
|
||||
try {
|
||||
const db = getDB(dbName, { skip_setup: true })
|
||||
// check if database exists
|
||||
const info = await db.info()
|
||||
if (info && !info.error) {
|
||||
exists = true
|
||||
}
|
||||
} catch (err) {
|
||||
exists = false
|
||||
}
|
||||
return exists
|
||||
return doWithDB(
|
||||
dbName,
|
||||
async db => {
|
||||
try {
|
||||
// check if database exists
|
||||
const info = await db.info()
|
||||
if (info && !info.error) {
|
||||
exists = true
|
||||
}
|
||||
} catch (err) {
|
||||
exists = false
|
||||
}
|
||||
return exists
|
||||
},
|
||||
{ skip_setup: true }
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const google = require("../google")
|
||||
const { Cookies, Configs } = require("../../../constants")
|
||||
const { clearCookie, getCookie } = require("../../../utils")
|
||||
const { getDB } = require("../../../db")
|
||||
const { doWithDB } = require("../../../db")
|
||||
const { getScopedConfig } = require("../../../db/utils")
|
||||
const environment = require("../../../environment")
|
||||
const { getGlobalDB } = require("../../../tenancy")
|
||||
|
@ -13,12 +13,12 @@ async function fetchGoogleCreds() {
|
|||
type: Configs.GOOGLE,
|
||||
})
|
||||
// or fall back to env variables
|
||||
const config = googleConfig || {
|
||||
clientID: environment.GOOGLE_CLIENT_ID,
|
||||
clientSecret: environment.GOOGLE_CLIENT_SECRET,
|
||||
}
|
||||
|
||||
return config
|
||||
return (
|
||||
googleConfig || {
|
||||
clientID: environment.GOOGLE_CLIENT_ID,
|
||||
clientSecret: environment.GOOGLE_CLIENT_SECRET,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function preAuth(passport, ctx, next) {
|
||||
|
@ -59,16 +59,17 @@ async function postAuth(passport, ctx, next) {
|
|||
{ successRedirect: "/", failureRedirect: "/error" },
|
||||
async (err, tokens) => {
|
||||
// update the DB for the datasource with all the user info
|
||||
const db = getDB(authStateCookie.appId)
|
||||
const datasource = await db.get(authStateCookie.datasourceId)
|
||||
if (!datasource.config) {
|
||||
datasource.config = {}
|
||||
}
|
||||
datasource.config.auth = { type: "google", ...tokens }
|
||||
await db.put(datasource)
|
||||
ctx.redirect(
|
||||
`/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}`
|
||||
)
|
||||
await doWithDB(authStateCookie.appId, async db => {
|
||||
const datasource = await db.get(authStateCookie.datasourceId)
|
||||
if (!datasource.config) {
|
||||
datasource.config = {}
|
||||
}
|
||||
datasource.config.auth = { type: "google", ...tokens }
|
||||
await db.put(datasource)
|
||||
ctx.redirect(
|
||||
`/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}`
|
||||
)
|
||||
})
|
||||
}
|
||||
)(ctx, next)
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require("../../../tests/utilities/dbConfig")
|
||||
|
||||
const database = require("../../../db")
|
||||
const { dangerousGetDB } = require("../../../db")
|
||||
const { authenticateThirdParty } = require("../third-party-common")
|
||||
const { data } = require("./utilities/mock-data")
|
||||
|
||||
|
@ -29,7 +29,7 @@ describe("third party common", () => {
|
|||
let thirdPartyUser
|
||||
|
||||
beforeEach(() => {
|
||||
db = database.getDB(StaticDatabases.GLOBAL.name)
|
||||
db = dangerousGetDB(StaticDatabases.GLOBAL.name)
|
||||
thirdPartyUser = data.buildThirdPartyUser()
|
||||
})
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const { setTenantId } = require("../tenancy")
|
||||
const { setTenantId, setGlobalDB, getGlobalDB } = require("../tenancy")
|
||||
const ContextFactory = require("../context/FunctionContext")
|
||||
const { buildMatcherRegex, matches } = require("./matchers")
|
||||
|
||||
|
@ -10,10 +10,16 @@ module.exports = (
|
|||
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
|
||||
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
|
||||
|
||||
return ContextFactory.getMiddleware(ctx => {
|
||||
const updateCtxFn = ctx => {
|
||||
const allowNoTenant =
|
||||
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
|
||||
const allowQs = !!matches(ctx, allowQsOptions)
|
||||
setTenantId(ctx, { allowQs, allowNoTenant })
|
||||
})
|
||||
const tenantId = setTenantId(ctx, { allowQs, allowNoTenant })
|
||||
setGlobalDB(tenantId)
|
||||
}
|
||||
const destroyFn = async () => {
|
||||
await getGlobalDB().close()
|
||||
}
|
||||
|
||||
return ContextFactory.getMiddleware(updateCtxFn, destroyFn)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const { DEFAULT_TENANT_ID } = require("../constants")
|
||||
const { getDB } = require("../db")
|
||||
const { doWithDB } = require("../db")
|
||||
const { DocumentTypes } = require("../db/constants")
|
||||
const { getAllApps } = require("../db/utils")
|
||||
const environment = require("../environment")
|
||||
|
@ -47,45 +47,46 @@ const runMigration = async (migration, options = {}) => {
|
|||
|
||||
// run the migration against each db
|
||||
for (const dbName of dbNames) {
|
||||
const db = getDB(dbName)
|
||||
try {
|
||||
const doc = await exports.getMigrationsDoc(db)
|
||||
await doWithDB(dbName, async db => {
|
||||
try {
|
||||
const doc = await exports.getMigrationsDoc(db)
|
||||
|
||||
// exit if the migration has been performed already
|
||||
if (doc[migrationName]) {
|
||||
if (
|
||||
options.force &&
|
||||
options.force[migrationType] &&
|
||||
options.force[migrationType].includes(migrationName)
|
||||
) {
|
||||
console.log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Forcing`
|
||||
)
|
||||
} else {
|
||||
// the migration has already been performed
|
||||
continue
|
||||
// exit if the migration has been performed already
|
||||
if (doc[migrationName]) {
|
||||
if (
|
||||
options.force &&
|
||||
options.force[migrationType] &&
|
||||
options.force[migrationType].includes(migrationName)
|
||||
) {
|
||||
console.log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Forcing`
|
||||
)
|
||||
} else {
|
||||
// the migration has already been performed
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Running`
|
||||
)
|
||||
// run the migration with tenant context
|
||||
await migration.fn(db)
|
||||
console.log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Complete`
|
||||
)
|
||||
|
||||
// mark as complete
|
||||
doc[migrationName] = Date.now()
|
||||
await db.put(doc)
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Error: `,
|
||||
err
|
||||
)
|
||||
throw err
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Running`
|
||||
)
|
||||
// run the migration with tenant context
|
||||
await migration.fn(db)
|
||||
console.log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Complete`
|
||||
)
|
||||
|
||||
// mark as complete
|
||||
doc[migrationName] = Date.now()
|
||||
await db.put(doc)
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Error: `,
|
||||
err
|
||||
)
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
require("../../tests/utilities/dbConfig")
|
||||
|
||||
const { runMigrations, getMigrationsDoc } = require("../index")
|
||||
const { getDB } = require("../../db")
|
||||
const { dangerousGetDB } = require("../../db")
|
||||
const {
|
||||
StaticDatabases,
|
||||
} = require("../../db/utils")
|
||||
|
@ -20,7 +20,7 @@ describe("migrations", () => {
|
|||
}]
|
||||
|
||||
beforeEach(() => {
|
||||
db = getDB(StaticDatabases.GLOBAL.name)
|
||||
db = dangerousGetDB(StaticDatabases.GLOBAL.name)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
|
|
|
@ -7,7 +7,7 @@ const {
|
|||
SEPARATOR,
|
||||
} = require("../db/utils")
|
||||
const { getAppDB } = require("../context")
|
||||
const { getDB } = require("../db")
|
||||
const { doWithDB } = require("../db")
|
||||
|
||||
const BUILTIN_IDS = {
|
||||
ADMIN: "ADMIN",
|
||||
|
@ -199,43 +199,49 @@ exports.checkForRoleResourceArray = (rolePerms, resourceId) => {
|
|||
* @return {Promise<object[]>} An array of the role objects that were found.
|
||||
*/
|
||||
exports.getAllRoles = async appId => {
|
||||
const db = appId ? getDB(appId) : getAppDB()
|
||||
const body = await db.allDocs(
|
||||
getRoleParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
let roles = body.rows.map(row => row.doc)
|
||||
const builtinRoles = exports.getBuiltinRoles()
|
||||
if (appId) {
|
||||
return doWithDB(appId, internal)
|
||||
} else {
|
||||
return internal(getAppDB())
|
||||
}
|
||||
async function internal(db) {
|
||||
const body = await db.allDocs(
|
||||
getRoleParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
let roles = body.rows.map(row => row.doc)
|
||||
const builtinRoles = exports.getBuiltinRoles()
|
||||
|
||||
// need to combine builtin with any DB record of them (for sake of permissions)
|
||||
for (let builtinRoleId of EXTERNAL_BUILTIN_ROLE_IDS) {
|
||||
const builtinRole = builtinRoles[builtinRoleId]
|
||||
const dbBuiltin = roles.filter(
|
||||
dbRole => exports.getExternalRoleID(dbRole._id) === builtinRoleId
|
||||
)[0]
|
||||
if (dbBuiltin == null) {
|
||||
roles.push(builtinRole || builtinRoles.BASIC)
|
||||
} else {
|
||||
// remove role and all back after combining with the builtin
|
||||
roles = roles.filter(role => role._id !== dbBuiltin._id)
|
||||
dbBuiltin._id = exports.getExternalRoleID(dbBuiltin._id)
|
||||
roles.push(Object.assign(builtinRole, dbBuiltin))
|
||||
// need to combine builtin with any DB record of them (for sake of permissions)
|
||||
for (let builtinRoleId of EXTERNAL_BUILTIN_ROLE_IDS) {
|
||||
const builtinRole = builtinRoles[builtinRoleId]
|
||||
const dbBuiltin = roles.filter(
|
||||
dbRole => exports.getExternalRoleID(dbRole._id) === builtinRoleId
|
||||
)[0]
|
||||
if (dbBuiltin == null) {
|
||||
roles.push(builtinRole || builtinRoles.BASIC)
|
||||
} else {
|
||||
// remove role and all back after combining with the builtin
|
||||
roles = roles.filter(role => role._id !== dbBuiltin._id)
|
||||
dbBuiltin._id = exports.getExternalRoleID(dbBuiltin._id)
|
||||
roles.push(Object.assign(builtinRole, dbBuiltin))
|
||||
}
|
||||
}
|
||||
// check permissions
|
||||
for (let role of roles) {
|
||||
if (!role.permissions) {
|
||||
continue
|
||||
}
|
||||
for (let resourceId of Object.keys(role.permissions)) {
|
||||
role.permissions = exports.checkForRoleResourceArray(
|
||||
role.permissions,
|
||||
resourceId
|
||||
)
|
||||
}
|
||||
}
|
||||
return roles
|
||||
}
|
||||
// check permissions
|
||||
for (let role of roles) {
|
||||
if (!role.permissions) {
|
||||
continue
|
||||
}
|
||||
for (let resourceId of Object.keys(role.permissions)) {
|
||||
role.permissions = exports.checkForRoleResourceArray(
|
||||
role.permissions,
|
||||
resourceId
|
||||
)
|
||||
}
|
||||
}
|
||||
return roles
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const { getDB } = require("../db")
|
||||
const { SEPARATOR, StaticDatabases } = require("../db/constants")
|
||||
const { doWithDB } = require("../db")
|
||||
const { StaticDatabases } = require("../db/constants")
|
||||
const { baseGlobalDBName } = require("./utils")
|
||||
const {
|
||||
getTenantId,
|
||||
DEFAULT_TENANT_ID,
|
||||
|
@ -23,59 +24,61 @@ exports.addTenantToUrl = url => {
|
|||
}
|
||||
|
||||
exports.doesTenantExist = async tenantId => {
|
||||
const db = getDB(PLATFORM_INFO_DB)
|
||||
let tenants
|
||||
try {
|
||||
tenants = await db.get(TENANT_DOC)
|
||||
} catch (err) {
|
||||
// if theres an error the doc doesn't exist, no tenants exist
|
||||
return false
|
||||
}
|
||||
return (
|
||||
tenants &&
|
||||
Array.isArray(tenants.tenantIds) &&
|
||||
tenants.tenantIds.indexOf(tenantId) !== -1
|
||||
)
|
||||
return doWithDB(PLATFORM_INFO_DB, async db => {
|
||||
let tenants
|
||||
try {
|
||||
tenants = await db.get(TENANT_DOC)
|
||||
} catch (err) {
|
||||
// if theres an error the doc doesn't exist, no tenants exist
|
||||
return false
|
||||
}
|
||||
return (
|
||||
tenants &&
|
||||
Array.isArray(tenants.tenantIds) &&
|
||||
tenants.tenantIds.indexOf(tenantId) !== -1
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
exports.tryAddTenant = async (tenantId, userId, email) => {
|
||||
const db = getDB(PLATFORM_INFO_DB)
|
||||
const getDoc = async id => {
|
||||
if (!id) {
|
||||
return null
|
||||
return doWithDB(PLATFORM_INFO_DB, async db => {
|
||||
const getDoc = async id => {
|
||||
if (!id) {
|
||||
return null
|
||||
}
|
||||
try {
|
||||
return await db.get(id)
|
||||
} catch (err) {
|
||||
return { _id: id }
|
||||
}
|
||||
}
|
||||
try {
|
||||
return await db.get(id)
|
||||
} catch (err) {
|
||||
return { _id: id }
|
||||
let [tenants, userIdDoc, emailDoc] = await Promise.all([
|
||||
getDoc(TENANT_DOC),
|
||||
getDoc(userId),
|
||||
getDoc(email),
|
||||
])
|
||||
if (!Array.isArray(tenants.tenantIds)) {
|
||||
tenants = {
|
||||
_id: TENANT_DOC,
|
||||
tenantIds: [],
|
||||
}
|
||||
}
|
||||
}
|
||||
let [tenants, userIdDoc, emailDoc] = await Promise.all([
|
||||
getDoc(TENANT_DOC),
|
||||
getDoc(userId),
|
||||
getDoc(email),
|
||||
])
|
||||
if (!Array.isArray(tenants.tenantIds)) {
|
||||
tenants = {
|
||||
_id: TENANT_DOC,
|
||||
tenantIds: [],
|
||||
let promises = []
|
||||
if (userIdDoc) {
|
||||
userIdDoc.tenantId = tenantId
|
||||
promises.push(db.put(userIdDoc))
|
||||
}
|
||||
}
|
||||
let promises = []
|
||||
if (userIdDoc) {
|
||||
userIdDoc.tenantId = tenantId
|
||||
promises.push(db.put(userIdDoc))
|
||||
}
|
||||
if (emailDoc) {
|
||||
emailDoc.tenantId = tenantId
|
||||
emailDoc.userId = userId
|
||||
promises.push(db.put(emailDoc))
|
||||
}
|
||||
if (tenants.tenantIds.indexOf(tenantId) === -1) {
|
||||
tenants.tenantIds.push(tenantId)
|
||||
promises.push(db.put(tenants))
|
||||
}
|
||||
await Promise.all(promises)
|
||||
if (emailDoc) {
|
||||
emailDoc.tenantId = tenantId
|
||||
emailDoc.userId = userId
|
||||
promises.push(db.put(emailDoc))
|
||||
}
|
||||
if (tenants.tenantIds.indexOf(tenantId) === -1) {
|
||||
tenants.tenantIds.push(tenantId)
|
||||
promises.push(db.put(tenants))
|
||||
}
|
||||
await Promise.all(promises)
|
||||
})
|
||||
}
|
||||
|
||||
exports.getGlobalDBName = (tenantId = null) => {
|
||||
|
@ -84,43 +87,37 @@ exports.getGlobalDBName = (tenantId = null) => {
|
|||
if (!tenantId) {
|
||||
tenantId = getTenantId()
|
||||
}
|
||||
|
||||
let dbName
|
||||
if (tenantId === DEFAULT_TENANT_ID) {
|
||||
dbName = StaticDatabases.GLOBAL.name
|
||||
} else {
|
||||
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
|
||||
}
|
||||
return dbName
|
||||
return baseGlobalDBName(tenantId)
|
||||
}
|
||||
|
||||
exports.getGlobalDB = (tenantId = null) => {
|
||||
const dbName = exports.getGlobalDBName(tenantId)
|
||||
return getDB(dbName)
|
||||
exports.doWithGlobalDB = (tenantId, cb) => {
|
||||
return doWithDB(exports.getGlobalDBName(tenantId), cb)
|
||||
}
|
||||
|
||||
exports.lookupTenantId = async userId => {
|
||||
const db = getDB(StaticDatabases.PLATFORM_INFO.name)
|
||||
let tenantId = env.MULTI_TENANCY ? DEFAULT_TENANT_ID : null
|
||||
try {
|
||||
const doc = await db.get(userId)
|
||||
if (doc && doc.tenantId) {
|
||||
tenantId = doc.tenantId
|
||||
return doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
|
||||
let tenantId = env.MULTI_TENANCY ? DEFAULT_TENANT_ID : null
|
||||
try {
|
||||
const doc = await db.get(userId)
|
||||
if (doc && doc.tenantId) {
|
||||
tenantId = doc.tenantId
|
||||
}
|
||||
} catch (err) {
|
||||
// just return the default
|
||||
}
|
||||
} catch (err) {
|
||||
// just return the default
|
||||
}
|
||||
return tenantId
|
||||
return tenantId
|
||||
})
|
||||
}
|
||||
|
||||
// lookup, could be email or userId, either will return a doc
|
||||
exports.getTenantUser = async identifier => {
|
||||
const db = getDB(PLATFORM_INFO_DB)
|
||||
try {
|
||||
return await db.get(identifier)
|
||||
} catch (err) {
|
||||
return null
|
||||
}
|
||||
return doWithDB(PLATFORM_INFO_DB, async db => {
|
||||
try {
|
||||
return await db.get(identifier)
|
||||
} catch (err) {
|
||||
return null
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
exports.isUserInAppTenant = (appId, user = null) => {
|
||||
|
@ -135,13 +132,14 @@ exports.isUserInAppTenant = (appId, user = null) => {
|
|||
}
|
||||
|
||||
exports.getTenantIds = async () => {
|
||||
const db = getDB(PLATFORM_INFO_DB)
|
||||
let tenants
|
||||
try {
|
||||
tenants = await db.get(TENANT_DOC)
|
||||
} catch (err) {
|
||||
// if theres an error the doc doesn't exist, no tenants exist
|
||||
return []
|
||||
}
|
||||
return (tenants && tenants.tenantIds) || []
|
||||
return doWithDB(PLATFORM_INFO_DB, async db => {
|
||||
let tenants
|
||||
try {
|
||||
tenants = await db.get(TENANT_DOC)
|
||||
} catch (err) {
|
||||
// if theres an error the doc doesn't exist, no tenants exist
|
||||
return []
|
||||
}
|
||||
return (tenants && tenants.tenantIds) || []
|
||||
})
|
||||
}
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
const { DEFAULT_TENANT_ID } = require("../constants")
|
||||
const { StaticDatabases, SEPARATOR } = require("../db/constants")
|
||||
|
||||
exports.baseGlobalDBName = tenantId => {
|
||||
let dbName
|
||||
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
|
||||
dbName = StaticDatabases.GLOBAL.name
|
||||
} else {
|
||||
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
|
||||
}
|
||||
return dbName
|
||||
}
|
|
@ -10,7 +10,7 @@ const { options } = require("./middleware/passport/jwt")
|
|||
const { queryGlobalView } = require("./db/views")
|
||||
const { Headers, UserStatus, Cookies, MAX_VALID_DATE } = require("./constants")
|
||||
const {
|
||||
getGlobalDB,
|
||||
doWithGlobalDB,
|
||||
updateTenantId,
|
||||
getTenantUser,
|
||||
tryAddTenant,
|
||||
|
@ -188,82 +188,83 @@ exports.saveUser = async (
|
|||
// need to set the context for this request, as specified
|
||||
updateTenantId(tenantId)
|
||||
// specify the tenancy incase we're making a new admin user (public)
|
||||
const db = getGlobalDB(tenantId)
|
||||
let { email, password, _id } = user
|
||||
// make sure another user isn't using the same email
|
||||
let dbUser
|
||||
if (email) {
|
||||
// check budibase users inside the tenant
|
||||
dbUser = await exports.getGlobalUserByEmail(email)
|
||||
if (dbUser != null && (dbUser._id !== _id || Array.isArray(dbUser))) {
|
||||
throw `Email address ${email} already in use.`
|
||||
}
|
||||
|
||||
// check budibase users in other tenants
|
||||
if (env.MULTI_TENANCY) {
|
||||
const tenantUser = await getTenantUser(email)
|
||||
if (tenantUser != null && tenantUser.tenantId !== tenantId) {
|
||||
return doWithGlobalDB(tenantId, async db => {
|
||||
let { email, password, _id } = user
|
||||
// make sure another user isn't using the same email
|
||||
let dbUser
|
||||
if (email) {
|
||||
// check budibase users inside the tenant
|
||||
dbUser = await exports.getGlobalUserByEmail(email)
|
||||
if (dbUser != null && (dbUser._id !== _id || Array.isArray(dbUser))) {
|
||||
throw `Email address ${email} already in use.`
|
||||
}
|
||||
}
|
||||
|
||||
// check root account users in account portal
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(email)
|
||||
if (account && account.verified && account.tenantId !== tenantId) {
|
||||
throw `Email address ${email} already in use.`
|
||||
// check budibase users in other tenants
|
||||
if (env.MULTI_TENANCY) {
|
||||
const tenantUser = await getTenantUser(email)
|
||||
if (tenantUser != null && tenantUser.tenantId !== tenantId) {
|
||||
throw `Email address ${email} already in use.`
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
dbUser = await db.get(_id)
|
||||
}
|
||||
|
||||
// get the password, make sure one is defined
|
||||
let hashedPassword
|
||||
if (password) {
|
||||
hashedPassword = hashPassword ? await hash(password) : password
|
||||
} else if (dbUser) {
|
||||
hashedPassword = dbUser.password
|
||||
} else if (requirePassword) {
|
||||
throw "Password must be specified."
|
||||
}
|
||||
|
||||
_id = _id || generateGlobalUserID()
|
||||
user = {
|
||||
createdAt: Date.now(),
|
||||
...dbUser,
|
||||
...user,
|
||||
_id,
|
||||
password: hashedPassword,
|
||||
tenantId,
|
||||
}
|
||||
// make sure the roles object is always present
|
||||
if (!user.roles) {
|
||||
user.roles = {}
|
||||
}
|
||||
// add the active status to a user if its not provided
|
||||
if (user.status == null) {
|
||||
user.status = UserStatus.ACTIVE
|
||||
}
|
||||
try {
|
||||
const response = await db.put({
|
||||
password: hashedPassword,
|
||||
...user,
|
||||
})
|
||||
await tryAddTenant(tenantId, _id, email)
|
||||
await userCache.invalidateUser(response.id)
|
||||
return {
|
||||
_id: response.id,
|
||||
_rev: response.rev,
|
||||
email,
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.status === 409) {
|
||||
throw "User exists already"
|
||||
// check root account users in account portal
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(email)
|
||||
if (account && account.verified && account.tenantId !== tenantId) {
|
||||
throw `Email address ${email} already in use.`
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw err
|
||||
dbUser = await db.get(_id)
|
||||
}
|
||||
}
|
||||
|
||||
// get the password, make sure one is defined
|
||||
let hashedPassword
|
||||
if (password) {
|
||||
hashedPassword = hashPassword ? await hash(password) : password
|
||||
} else if (dbUser) {
|
||||
hashedPassword = dbUser.password
|
||||
} else if (requirePassword) {
|
||||
throw "Password must be specified."
|
||||
}
|
||||
|
||||
_id = _id || generateGlobalUserID()
|
||||
user = {
|
||||
createdAt: Date.now(),
|
||||
...dbUser,
|
||||
...user,
|
||||
_id,
|
||||
password: hashedPassword,
|
||||
tenantId,
|
||||
}
|
||||
// make sure the roles object is always present
|
||||
if (!user.roles) {
|
||||
user.roles = {}
|
||||
}
|
||||
// add the active status to a user if its not provided
|
||||
if (user.status == null) {
|
||||
user.status = UserStatus.ACTIVE
|
||||
}
|
||||
try {
|
||||
const response = await db.put({
|
||||
password: hashedPassword,
|
||||
...user,
|
||||
})
|
||||
await tryAddTenant(tenantId, _id, email)
|
||||
await userCache.invalidateUser(response.id)
|
||||
return {
|
||||
_id: response.id,
|
||||
_rev: response.rev,
|
||||
email,
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.status === 409) {
|
||||
throw "User exists already"
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -258,6 +258,13 @@
|
|||
dependencies:
|
||||
"@babel/helper-plugin-utils" "^7.14.5"
|
||||
|
||||
"@babel/runtime@^7.15.4":
|
||||
version "7.17.9"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.17.9.tgz#d19fbf802d01a8cb6cf053a64e472d42c434ba72"
|
||||
integrity sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.13.4"
|
||||
|
||||
"@babel/template@^7.16.0", "@babel/template@^7.3.3":
|
||||
version "7.16.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.0.tgz#d16a35ebf4cd74e202083356fab21dd89363ddd6"
|
||||
|
@ -857,6 +864,21 @@ aws4@^1.8.0:
|
|||
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59"
|
||||
integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==
|
||||
|
||||
axios-retry@^3.1.9:
|
||||
version "3.2.4"
|
||||
resolved "https://registry.yarnpkg.com/axios-retry/-/axios-retry-3.2.4.tgz#f447a53c3456f5bfeca18f20c3a3272207d082ae"
|
||||
integrity sha512-Co3UXiv4npi6lM963mfnuH90/YFLKWWDmoBYfxkHT5xtkSSWNqK9zdG3fw5/CP/dsoKB5aMMJCsgab+tp1OxLQ==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.15.4"
|
||||
is-retry-allowed "^2.2.0"
|
||||
|
||||
axios@0.24.0:
|
||||
version "0.24.0"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6"
|
||||
integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==
|
||||
dependencies:
|
||||
follow-redirects "^1.14.4"
|
||||
|
||||
babel-jest@^26.6.3:
|
||||
version "26.6.3"
|
||||
resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056"
|
||||
|
@ -1139,6 +1161,11 @@ char-regex@^1.0.2:
|
|||
resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf"
|
||||
integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==
|
||||
|
||||
charenc@0.0.2:
|
||||
version "0.0.2"
|
||||
resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
|
||||
integrity sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=
|
||||
|
||||
chownr@^1.1.1:
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
|
||||
|
@ -1273,6 +1300,11 @@ component-emitter@^1.2.1:
|
|||
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0"
|
||||
integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==
|
||||
|
||||
component-type@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/component-type/-/component-type-1.2.1.tgz#8a47901700238e4fc32269771230226f24b415a9"
|
||||
integrity sha1-ikeQFwAjjk/DIml3EjAibyS0Fak=
|
||||
|
||||
concat-map@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
|
||||
|
@ -1315,6 +1347,11 @@ cross-spawn@^7.0.0:
|
|||
shebang-command "^2.0.0"
|
||||
which "^2.0.1"
|
||||
|
||||
crypt@0.0.2:
|
||||
version "0.0.2"
|
||||
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
|
||||
integrity sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=
|
||||
|
||||
cryptiles@2.x.x:
|
||||
version "2.0.5"
|
||||
resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
|
||||
|
@ -1802,6 +1839,11 @@ find-up@^4.0.0, find-up@^4.1.0:
|
|||
locate-path "^5.0.0"
|
||||
path-exists "^4.0.0"
|
||||
|
||||
follow-redirects@^1.14.4:
|
||||
version "1.14.9"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.9.tgz#dd4ea157de7bfaf9ea9b3fbd85aa16951f78d8d7"
|
||||
integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==
|
||||
|
||||
for-in@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
|
||||
|
@ -2226,7 +2268,7 @@ is-arrayish@^0.2.1:
|
|||
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
|
||||
integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
|
||||
|
||||
is-buffer@^1.1.5:
|
||||
is-buffer@^1.1.5, is-buffer@~1.1.6:
|
||||
version "1.1.6"
|
||||
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
|
||||
integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
|
||||
|
@ -2328,6 +2370,11 @@ is-potential-custom-element-name@^1.0.1:
|
|||
resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5"
|
||||
integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==
|
||||
|
||||
is-retry-allowed@^2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz#88f34cbd236e043e71b6932d09b0c65fb7b4d71d"
|
||||
integrity sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==
|
||||
|
||||
is-stream@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
|
||||
|
@ -2824,6 +2871,11 @@ jodid25519@^1.0.0:
|
|||
dependencies:
|
||||
jsbn "~0.1.0"
|
||||
|
||||
join-component@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/join-component/-/join-component-1.1.0.tgz#b8417b750661a392bee2c2537c68b2a9d4977cd5"
|
||||
integrity sha1-uEF7dQZho5K+4sJTfGiyqdSXfNU=
|
||||
|
||||
js-tokens@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
|
||||
|
@ -3257,6 +3309,15 @@ map-visit@^1.0.0:
|
|||
dependencies:
|
||||
object-visit "^1.0.0"
|
||||
|
||||
md5@^2.3.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f"
|
||||
integrity sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==
|
||||
dependencies:
|
||||
charenc "0.0.2"
|
||||
crypt "0.0.2"
|
||||
is-buffer "~1.1.6"
|
||||
|
||||
memdown@1.4.1:
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/memdown/-/memdown-1.4.1.tgz#b4e4e192174664ffbae41361aa500f3119efe215"
|
||||
|
@ -3377,6 +3438,11 @@ ms@2.1.2, ms@^2.1.1:
|
|||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
|
||||
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
|
||||
|
||||
ms@^2.1.3:
|
||||
version "2.1.3"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
|
||||
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
||||
|
||||
nanomatch@^1.2.9:
|
||||
version "1.2.13"
|
||||
resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
|
||||
|
@ -4195,6 +4261,11 @@ redis-parser@^3.0.0:
|
|||
dependencies:
|
||||
redis-errors "^1.0.0"
|
||||
|
||||
regenerator-runtime@^0.13.4:
|
||||
version "0.13.9"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52"
|
||||
integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==
|
||||
|
||||
regex-not@^1.0.0, regex-not@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c"
|
||||
|
@ -4208,6 +4279,11 @@ remove-trailing-separator@^1.0.1:
|
|||
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
|
||||
integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8=
|
||||
|
||||
remove-trailing-slash@^0.1.1:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/remove-trailing-slash/-/remove-trailing-slash-0.1.1.tgz#be2285a59f39c74d1bce4f825950061915e3780d"
|
||||
integrity sha512-o4S4Qh6L2jpnCy83ysZDau+VORNvnFw07CKSAymkd6ICNVEPisMyzlc00KlvvicsxKck94SEwhDnMNdICzO+tA==
|
||||
|
||||
repeat-element@^1.1.2:
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9"
|
||||
|
|
|
@ -3,7 +3,7 @@ const yargs = require("yargs")
|
|||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
require("../src/db").init()
|
||||
const { getDB } = require("@budibase/backend-core/db")
|
||||
const { doWithDB } = require("@budibase/backend-core/db")
|
||||
// load environment
|
||||
const env = require("../src/environment")
|
||||
const {
|
||||
|
@ -48,13 +48,14 @@ yargs
|
|||
const writeStream = fs.createWriteStream(join(exportPath, "dump.text"))
|
||||
// perform couch dump
|
||||
|
||||
const instanceDb = getDB(appId)
|
||||
await instanceDb.dump(writeStream, {
|
||||
filter: doc =>
|
||||
!(
|
||||
doc._id.includes(USER_METDATA_PREFIX) ||
|
||||
doc.includes(LINK_USER_METADATA_PREFIX)
|
||||
),
|
||||
await doWithDB(appId, async db => {
|
||||
return db.dump(writeStream, {
|
||||
filter: doc =>
|
||||
!(
|
||||
doc._id.includes(USER_METDATA_PREFIX) ||
|
||||
doc.includes(LINK_USER_METADATA_PREFIX)
|
||||
),
|
||||
})
|
||||
})
|
||||
console.log(`Template ${name} exported to ${exportPath}`)
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
require("../src/db").init()
|
||||
const { DocumentTypes } = require("../src/db/utils")
|
||||
const { getAllDbs, getDB } = require("@budibase/backend-core/db")
|
||||
const { getAllDbs, dangerousGetDB } = require("@budibase/backend-core/db")
|
||||
const appName = process.argv[2].toLowerCase()
|
||||
const remoteUrl = process.argv[3]
|
||||
|
||||
|
@ -18,7 +18,7 @@ const run = async () => {
|
|||
const appDbNames = dbs.filter(dbName => dbName.startsWith("inst_app"))
|
||||
let apps = []
|
||||
for (let dbName of appDbNames) {
|
||||
const db = getDB(dbName)
|
||||
const db = dangerousGetDB(dbName)
|
||||
apps.push(db.get(DocumentTypes.APP_METADATA))
|
||||
}
|
||||
apps = await Promise.all(apps)
|
||||
|
@ -33,8 +33,8 @@ const run = async () => {
|
|||
return
|
||||
}
|
||||
|
||||
const instanceDb = getDB(app.appId)
|
||||
const remoteDb = getDB(`${remoteUrl}/${appName}`)
|
||||
const instanceDb = dangerousGetDB(app.appId)
|
||||
const remoteDb = dangerousGetDB(`${remoteUrl}/${appName}`)
|
||||
|
||||
instanceDb.replicate
|
||||
.to(remoteDb)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
const { checkBuilderEndpoint, getDB } = require("./utilities/TestFunctions")
|
||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const setup = require("./utilities")
|
||||
const { basicTable } = setup.structures
|
||||
|
||||
|
@ -122,7 +123,7 @@ describe("/tables", () => {
|
|||
|
||||
describe("indexing", () => {
|
||||
it("should be able to create a table with indexes", async () => {
|
||||
const db = getDB(config)
|
||||
const db = getAppDB(config)
|
||||
const indexCount = (await db.getIndexes()).total_rows
|
||||
const table = basicTable()
|
||||
table.indexes = ["name"]
|
||||
|
|
|
@ -3,7 +3,7 @@ const appController = require("../../../controllers/application")
|
|||
const { AppStatus } = require("../../../../db/utils")
|
||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||
const { TENANT_ID } = require("../../../../tests/utilities/structures")
|
||||
const { getAppDB, doInAppContext } = require("@budibase/backend-core/context")
|
||||
const { doInAppContext } = require("@budibase/backend-core/context")
|
||||
const env = require("../../../../environment")
|
||||
|
||||
function Request(appId, params) {
|
||||
|
@ -106,10 +106,6 @@ exports.checkPermissionsEndpoint = async ({
|
|||
.expect(403)
|
||||
}
|
||||
|
||||
exports.getDB = () => {
|
||||
return getAppDB()
|
||||
}
|
||||
|
||||
exports.testAutomation = async (config, automation) => {
|
||||
return runRequest(automation.appId, async () => {
|
||||
return await config.request
|
||||
|
|
|
@ -7,7 +7,7 @@ const { updateEntityMetadata } = require("../utilities")
|
|||
const { MetadataTypes, WebhookType } = require("../constants")
|
||||
const { getProdAppID } = require("@budibase/backend-core/db")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { getDB } = require("@budibase/backend-core/db")
|
||||
const { doWithDB } = require("@budibase/backend-core/db")
|
||||
const { getAppDB, getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
const WH_STEP_ID = definitions.WEBHOOK.stepId
|
||||
|
@ -101,10 +101,11 @@ exports.enableCronTrigger = async (appId, automation) => {
|
|||
// can't use getAppDB here as this is likely to be called from dev app,
|
||||
// but this call could be for dev app or prod app, need to just use what
|
||||
// was passed in
|
||||
const db = getDB(appId)
|
||||
const response = await db.put(automation)
|
||||
automation._id = response.id
|
||||
automation._rev = response.rev
|
||||
await doWithDB(appId, async db => {
|
||||
const response = await db.put(automation)
|
||||
automation._id = response.id
|
||||
automation._rev = response.rev
|
||||
})
|
||||
}
|
||||
return automation
|
||||
}
|
||||
|
|
|
@ -41,5 +41,6 @@ exports.runView = async (view, calculation, group, data) => {
|
|||
}
|
||||
}
|
||||
await db.destroy()
|
||||
await db.close()
|
||||
return response
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ const TestConfig = require("../../tests/utilities/TestConfiguration")
|
|||
const { basicTable } = require("../../tests/utilities/structures")
|
||||
const linkUtils = require("../linkedRows/linkUtils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const { getDB } = require("@budibase/backend-core/db")
|
||||
const { doWithDB } = require("@budibase/backend-core/db")
|
||||
|
||||
describe("test link functionality", () => {
|
||||
const config = new TestConfig(false)
|
||||
|
@ -48,12 +48,13 @@ describe("test link functionality", () => {
|
|||
describe("getLinkDocuments", () => {
|
||||
it("should create the link view when it doesn't exist", async () => {
|
||||
// create the DB and a very basic app design DB
|
||||
const db = getDB("test")
|
||||
await db.put({ _id: "_design/database", views: {} })
|
||||
const output = await linkUtils.getLinkDocuments({
|
||||
tableId: "test",
|
||||
rowId: "test",
|
||||
includeDocs: false,
|
||||
const output = await doWithDB("test", async db => {
|
||||
await db.put({ _id: "_design/database", views: {} })
|
||||
return await linkUtils.getLinkDocuments({
|
||||
tableId: "test",
|
||||
rowId: "test",
|
||||
includeDocs: false,
|
||||
})
|
||||
})
|
||||
expect(Array.isArray(output)).toBe(true)
|
||||
})
|
||||
|
|
|
@ -53,51 +53,55 @@ module CouchDBModule {
|
|||
|
||||
class CouchDBIntegration implements IntegrationBase {
|
||||
private config: CouchDBConfig
|
||||
private client: any
|
||||
private readonly client: any
|
||||
|
||||
constructor(config: CouchDBConfig) {
|
||||
this.config = config
|
||||
this.client = new PouchDB(`${config.url}/${config.database}`)
|
||||
}
|
||||
|
||||
async create(query: { json: object }) {
|
||||
async query(
|
||||
command: string,
|
||||
errorMsg: string,
|
||||
query: { json?: object; id?: string }
|
||||
) {
|
||||
try {
|
||||
return this.client.post(query.json)
|
||||
const response = await this.client[command](query.id || query.json)
|
||||
await this.client.close()
|
||||
return response
|
||||
} catch (err) {
|
||||
console.error("Error writing to couchDB", err)
|
||||
console.error(errorMsg, err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async create(query: { json: object }) {
|
||||
return this.query("post", "Error writing to couchDB", query)
|
||||
}
|
||||
|
||||
async read(query: { json: object }) {
|
||||
try {
|
||||
const result = await this.client.allDocs({
|
||||
const result = await this.query("allDocs", "Error querying couchDB", {
|
||||
json: {
|
||||
include_docs: true,
|
||||
...query.json,
|
||||
})
|
||||
return result.rows.map((row: { doc: object }) => row.doc)
|
||||
} catch (err) {
|
||||
console.error("Error querying couchDB", err)
|
||||
throw err
|
||||
}
|
||||
},
|
||||
})
|
||||
return result.rows.map((row: { doc: object }) => row.doc)
|
||||
}
|
||||
|
||||
async update(query: { json: object }) {
|
||||
try {
|
||||
return this.client.put(query.json)
|
||||
} catch (err) {
|
||||
console.error("Error updating couchDB document", err)
|
||||
throw err
|
||||
}
|
||||
return this.query("put", "Error updating couchDB document", query)
|
||||
}
|
||||
|
||||
async delete(query: { id: string }) {
|
||||
try {
|
||||
return await this.client.remove(query.id)
|
||||
} catch (err) {
|
||||
console.error("Error deleting couchDB document", err)
|
||||
throw err
|
||||
}
|
||||
const doc = await this.query(
|
||||
"get",
|
||||
"Cannot find doc to be deleted",
|
||||
query
|
||||
)
|
||||
return this.query("remove", "Error deleting couchDB document", {
|
||||
json: doc,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ const {
|
|||
checkDebounce,
|
||||
setDebounce,
|
||||
} = require("../utilities/redis")
|
||||
const { getDB } = require("@budibase/backend-core/db")
|
||||
const { doWithDB } = require("@budibase/backend-core/db")
|
||||
const { DocumentTypes } = require("../db/utils")
|
||||
const { PermissionTypes } = require("@budibase/backend-core/permissions")
|
||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
||||
|
@ -48,14 +48,15 @@ async function updateAppUpdatedAt(ctx) {
|
|||
if (ctx.method === "GET" || (await checkDebounce(appId))) {
|
||||
return
|
||||
}
|
||||
const db = getDB(appId)
|
||||
const metadata = await db.get(DocumentTypes.APP_METADATA)
|
||||
metadata.updatedAt = new Date().toISOString()
|
||||
const response = await db.put(metadata)
|
||||
metadata._rev = response.rev
|
||||
await appCache.invalidateAppMetadata(appId, metadata)
|
||||
// set a new debounce record with a short TTL
|
||||
await setDebounce(appId, DEBOUNCE_TIME_SEC)
|
||||
await doWithDB(appId, async db => {
|
||||
const metadata = await db.get(DocumentTypes.APP_METADATA)
|
||||
metadata.updatedAt = new Date().toISOString()
|
||||
const response = await db.put(metadata)
|
||||
metadata._rev = response.rev
|
||||
await appCache.invalidateAppMetadata(appId, metadata)
|
||||
// set a new debounce record with a short TTL
|
||||
await setDebounce(appId, DEBOUNCE_TIME_SEC)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = async (ctx, permType) => {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const { DocumentTypes, getDB } = require("@budibase/backend-core/db")
|
||||
const { DocumentTypes, doWithDB } = require("@budibase/backend-core/db")
|
||||
const TestConfig = require("../../../tests/utilities/TestConfiguration")
|
||||
|
||||
const migration = require("../appUrls")
|
||||
|
@ -14,14 +14,13 @@ describe("run", () => {
|
|||
|
||||
it("runs successfully", async () => {
|
||||
const app = await config.createApp("testApp")
|
||||
const appDb = getDB(app.appId)
|
||||
let metadata = await appDb.get(DocumentTypes.APP_METADATA)
|
||||
delete metadata.url
|
||||
await appDb.put(metadata)
|
||||
|
||||
await migration.run(appDb)
|
||||
|
||||
metadata = await appDb.get(DocumentTypes.APP_METADATA)
|
||||
const metadata = doWithDB(app.appId, async db => {
|
||||
const metadataDoc = await db.get(DocumentTypes.APP_METADATA)
|
||||
delete metadataDoc.url
|
||||
await db.put(metadataDoc)
|
||||
await migration.run(db)
|
||||
return await db.get(DocumentTypes.APP_METADATA)
|
||||
})
|
||||
expect(metadata.url).toEqual("/testapp")
|
||||
})
|
||||
})
|
||||
|
|
|
@ -18,7 +18,7 @@ const supertest = require("supertest")
|
|||
const { cleanup } = require("../../utilities/fileSystem")
|
||||
const { Cookies, Headers } = require("@budibase/backend-core/constants")
|
||||
const { jwt } = require("@budibase/backend-core/auth")
|
||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||
const { doWithGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||
const { createASession } = require("@budibase/backend-core/sessions")
|
||||
const { user: userCache } = require("@budibase/backend-core/cache")
|
||||
const newid = require("../../db/newid")
|
||||
|
@ -84,17 +84,18 @@ class TestConfiguration {
|
|||
}
|
||||
|
||||
async generateApiKey(userId = GLOBAL_USER_ID) {
|
||||
const db = getGlobalDB(TENANT_ID)
|
||||
const id = generateDevInfoID(userId)
|
||||
let devInfo
|
||||
try {
|
||||
devInfo = await db.get(id)
|
||||
} catch (err) {
|
||||
devInfo = { _id: id, userId }
|
||||
}
|
||||
devInfo.apiKey = encrypt(`${TENANT_ID}${SEPARATOR}${newid()}`)
|
||||
await db.put(devInfo)
|
||||
return devInfo.apiKey
|
||||
return doWithGlobalDB(TENANT_ID, async db => {
|
||||
const id = generateDevInfoID(userId)
|
||||
let devInfo
|
||||
try {
|
||||
devInfo = await db.get(id)
|
||||
} catch (err) {
|
||||
devInfo = { _id: id, userId }
|
||||
}
|
||||
devInfo.apiKey = encrypt(`${TENANT_ID}${SEPARATOR}${newid()}`)
|
||||
await db.put(devInfo)
|
||||
return devInfo.apiKey
|
||||
})
|
||||
}
|
||||
|
||||
async globalUser({
|
||||
|
@ -103,34 +104,35 @@ class TestConfiguration {
|
|||
email = EMAIL,
|
||||
roles,
|
||||
} = {}) {
|
||||
const db = getGlobalDB(TENANT_ID)
|
||||
let existing
|
||||
try {
|
||||
existing = await db.get(id)
|
||||
} catch (err) {
|
||||
existing = { email }
|
||||
}
|
||||
const user = {
|
||||
_id: id,
|
||||
...existing,
|
||||
roles: roles || {},
|
||||
tenantId: TENANT_ID,
|
||||
}
|
||||
await createASession(id, {
|
||||
sessionId: "sessionid",
|
||||
tenantId: TENANT_ID,
|
||||
csrfToken: CSRF_TOKEN,
|
||||
return doWithGlobalDB(TENANT_ID, async db => {
|
||||
let existing
|
||||
try {
|
||||
existing = await db.get(id)
|
||||
} catch (err) {
|
||||
existing = { email }
|
||||
}
|
||||
const user = {
|
||||
_id: id,
|
||||
...existing,
|
||||
roles: roles || {},
|
||||
tenantId: TENANT_ID,
|
||||
}
|
||||
await createASession(id, {
|
||||
sessionId: "sessionid",
|
||||
tenantId: TENANT_ID,
|
||||
csrfToken: CSRF_TOKEN,
|
||||
})
|
||||
if (builder) {
|
||||
user.builder = { global: true }
|
||||
} else {
|
||||
user.builder = { global: false }
|
||||
}
|
||||
const resp = await db.put(user)
|
||||
return {
|
||||
_rev: resp._rev,
|
||||
...user,
|
||||
}
|
||||
})
|
||||
if (builder) {
|
||||
user.builder = { global: true }
|
||||
} else {
|
||||
user.builder = { global: false }
|
||||
}
|
||||
const resp = await db.put(user)
|
||||
return {
|
||||
_rev: resp._rev,
|
||||
...user,
|
||||
}
|
||||
}
|
||||
|
||||
// use a new id as the name to avoid name collisions
|
||||
|
|
|
@ -2,7 +2,7 @@ const { budibaseTempDir } = require("../budibaseDir")
|
|||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
const uuid = require("uuid/v4")
|
||||
const { getDB } = require("@budibase/backend-core/db")
|
||||
const { doWithDB } = require("@budibase/backend-core/db")
|
||||
const { ObjectStoreBuckets } = require("../../constants")
|
||||
const {
|
||||
upload,
|
||||
|
@ -151,41 +151,41 @@ exports.streamBackup = async appId => {
|
|||
* @return {*} either a readable stream or a string
|
||||
*/
|
||||
exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => {
|
||||
const instanceDb = getDB(dbName)
|
||||
|
||||
// Stream the dump if required
|
||||
if (stream) {
|
||||
const memStream = new MemoryStream()
|
||||
instanceDb.dump(memStream, { filter })
|
||||
return memStream
|
||||
}
|
||||
|
||||
// Write the dump to file if required
|
||||
if (exportName) {
|
||||
const path = join(budibaseTempDir(), exportName)
|
||||
const writeStream = fs.createWriteStream(path)
|
||||
await instanceDb.dump(writeStream, { filter })
|
||||
|
||||
// Upload the dump to the object store if self hosted
|
||||
if (env.SELF_HOSTED) {
|
||||
await streamUpload(
|
||||
ObjectStoreBuckets.BACKUPS,
|
||||
join(dbName, exportName),
|
||||
fs.createReadStream(path)
|
||||
)
|
||||
return doWithDB(dbName, async db => {
|
||||
// Stream the dump if required
|
||||
if (stream) {
|
||||
const memStream = new MemoryStream()
|
||||
db.dump(memStream, { filter })
|
||||
return memStream
|
||||
}
|
||||
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
// Write the dump to file if required
|
||||
if (exportName) {
|
||||
const path = join(budibaseTempDir(), exportName)
|
||||
const writeStream = fs.createWriteStream(path)
|
||||
await db.dump(writeStream, { filter })
|
||||
|
||||
// Stringify the dump in memory if required
|
||||
const memStream = new MemoryStream()
|
||||
let appString = ""
|
||||
memStream.on("data", chunk => {
|
||||
appString += chunk.toString()
|
||||
// Upload the dump to the object store if self hosted
|
||||
if (env.SELF_HOSTED) {
|
||||
await streamUpload(
|
||||
ObjectStoreBuckets.BACKUPS,
|
||||
join(dbName, exportName),
|
||||
fs.createReadStream(path)
|
||||
)
|
||||
}
|
||||
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
// Stringify the dump in memory if required
|
||||
const memStream = new MemoryStream()
|
||||
let appString = ""
|
||||
memStream.on("data", chunk => {
|
||||
appString += chunk.toString()
|
||||
})
|
||||
await db.dump(memStream, { filter })
|
||||
return appString
|
||||
})
|
||||
await instanceDb.dump(memStream, { filter })
|
||||
return appString
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -2,7 +2,7 @@ const { getRowParams, USER_METDATA_PREFIX } = require("../../db/utils")
|
|||
const {
|
||||
isDevAppID,
|
||||
getDevelopmentAppID,
|
||||
getDB,
|
||||
doWithDB,
|
||||
} = require("@budibase/backend-core/db")
|
||||
|
||||
const ROW_EXCLUSIONS = [USER_METDATA_PREFIX]
|
||||
|
@ -27,22 +27,23 @@ const getAppPairs = appIds => {
|
|||
|
||||
const getAppRows = async appId => {
|
||||
// need to specify the app ID, as this is used for different apps in one call
|
||||
const appDb = getDB(appId)
|
||||
const response = await appDb.allDocs(
|
||||
getRowParams(null, null, {
|
||||
include_docs: false,
|
||||
})
|
||||
)
|
||||
return response.rows
|
||||
.map(r => r.id)
|
||||
.filter(id => {
|
||||
for (let exclusion of ROW_EXCLUSIONS) {
|
||||
if (id.startsWith(exclusion)) {
|
||||
return false
|
||||
return doWithDB(appId, async db => {
|
||||
const response = await db.allDocs(
|
||||
getRowParams(null, null, {
|
||||
include_docs: false,
|
||||
})
|
||||
)
|
||||
return response.rows
|
||||
.map(r => r.id)
|
||||
.filter(id => {
|
||||
for (let exclusion of ROW_EXCLUSIONS) {
|
||||
if (id.startsWith(exclusion)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return true
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -15,6 +15,7 @@ const { invalidateSessions } = require("@budibase/backend-core/sessions")
|
|||
const accounts = require("@budibase/backend-core/accounts")
|
||||
const {
|
||||
getGlobalDB,
|
||||
doWithGlobalDB,
|
||||
getTenantId,
|
||||
getTenantUser,
|
||||
doesTenantExist,
|
||||
|
@ -51,26 +52,27 @@ exports.adminUser = async ctx => {
|
|||
ctx.throw(403, "Organisation already exists.")
|
||||
}
|
||||
|
||||
const db = getGlobalDB(tenantId)
|
||||
const response = await db.allDocs(
|
||||
getGlobalUserParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
|
||||
// write usage quotas for cloud
|
||||
if (!env.SELF_HOSTED) {
|
||||
// could be a scenario where it exists, make sure its clean
|
||||
try {
|
||||
const usageQuota = await db.get(StaticDatabases.GLOBAL.docs.usageQuota)
|
||||
if (usageQuota) {
|
||||
await db.remove(usageQuota._id, usageQuota._rev)
|
||||
const response = await doWithGlobalDB(tenantId, async db => {
|
||||
const response = await db.allDocs(
|
||||
getGlobalUserParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
// write usage quotas for cloud
|
||||
if (!env.SELF_HOSTED) {
|
||||
// could be a scenario where it exists, make sure its clean
|
||||
try {
|
||||
const usageQuota = await db.get(StaticDatabases.GLOBAL.docs.usageQuota)
|
||||
if (usageQuota) {
|
||||
await db.remove(usageQuota._id, usageQuota._rev)
|
||||
}
|
||||
} catch (err) {
|
||||
// don't worry about errors
|
||||
}
|
||||
} catch (err) {
|
||||
// don't worry about errors
|
||||
await db.put(generateNewUsageQuotaDoc())
|
||||
}
|
||||
await db.put(generateNewUsageQuotaDoc())
|
||||
}
|
||||
return response
|
||||
})
|
||||
|
||||
if (response.rows.some(row => row.doc.admin)) {
|
||||
ctx.throw(
|
||||
|
|
|
@ -1,36 +1,42 @@
|
|||
const { StaticDatabases, getDB } = require("@budibase/backend-core/db")
|
||||
const { StaticDatabases, doWithDB } = require("@budibase/backend-core/db")
|
||||
const { getTenantId } = require("@budibase/backend-core/tenancy")
|
||||
const { deleteTenant } = require("@budibase/backend-core/deprovision")
|
||||
|
||||
exports.exists = async ctx => {
|
||||
const tenantId = ctx.request.params
|
||||
const db = getDB(StaticDatabases.PLATFORM_INFO.name)
|
||||
let exists = false
|
||||
try {
|
||||
const tenantsDoc = await db.get(StaticDatabases.PLATFORM_INFO.docs.tenants)
|
||||
if (tenantsDoc) {
|
||||
exists = tenantsDoc.tenantIds.indexOf(tenantId) !== -1
|
||||
}
|
||||
} catch (err) {
|
||||
// if error it doesn't exist
|
||||
}
|
||||
ctx.body = {
|
||||
exists,
|
||||
exists: await doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
|
||||
let exists = false
|
||||
try {
|
||||
const tenantsDoc = await db.get(
|
||||
StaticDatabases.PLATFORM_INFO.docs.tenants
|
||||
)
|
||||
if (tenantsDoc) {
|
||||
exists = tenantsDoc.tenantIds.indexOf(tenantId) !== -1
|
||||
}
|
||||
} catch (err) {
|
||||
// if error it doesn't exist
|
||||
}
|
||||
return exists
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
const db = getDB(StaticDatabases.PLATFORM_INFO.name)
|
||||
let tenants = []
|
||||
try {
|
||||
const tenantsDoc = await db.get(StaticDatabases.PLATFORM_INFO.docs.tenants)
|
||||
if (tenantsDoc) {
|
||||
tenants = tenantsDoc.tenantIds
|
||||
ctx.body = await doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
|
||||
let tenants = []
|
||||
try {
|
||||
const tenantsDoc = await db.get(
|
||||
StaticDatabases.PLATFORM_INFO.docs.tenants
|
||||
)
|
||||
if (tenantsDoc) {
|
||||
tenants = tenantsDoc.tenantIds
|
||||
}
|
||||
} catch (err) {
|
||||
// if error it doesn't exist
|
||||
}
|
||||
} catch (err) {
|
||||
// if error it doesn't exist
|
||||
}
|
||||
ctx.body = tenants
|
||||
return tenants
|
||||
})
|
||||
}
|
||||
|
||||
exports.delete = async ctx => {
|
||||
|
|
Loading…
Reference in New Issue