Merge pull request #4276 from Budibase/lab-day/refactor-app-db
Refactoring app DB usage - one DB load per request
This commit is contained in:
commit
6107f1472c
|
@ -0,0 +1,17 @@
|
|||
const {
|
||||
getAppDB,
|
||||
getDevAppDB,
|
||||
getProdAppDB,
|
||||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
} = require("./src/context")
|
||||
|
||||
module.exports = {
|
||||
getAppDB,
|
||||
getDevAppDB,
|
||||
getProdAppDB,
|
||||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
module.exports = {
|
||||
...require("./src/db/utils"),
|
||||
...require("./src/db/constants"),
|
||||
...require("./src/db"),
|
||||
...require("./src/db/views"),
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
module.exports = require("./src/tenancy/deprovision")
|
||||
module.exports = require("./src/context/deprovision")
|
||||
|
|
|
@ -4,8 +4,8 @@ const { newid } = require("../hashing")
|
|||
const REQUEST_ID_KEY = "requestId"
|
||||
|
||||
class FunctionContext {
|
||||
static getMiddleware(updateCtxFn = null) {
|
||||
const namespace = this.createNamespace()
|
||||
static getMiddleware(updateCtxFn = null, contextName = "session") {
|
||||
const namespace = this.createNamespace(contextName)
|
||||
|
||||
return async function (ctx, next) {
|
||||
await new Promise(
|
||||
|
@ -24,14 +24,14 @@ class FunctionContext {
|
|||
}
|
||||
}
|
||||
|
||||
static run(callback) {
|
||||
const namespace = this.createNamespace()
|
||||
static run(callback, contextName = "session") {
|
||||
const namespace = this.createNamespace(contextName)
|
||||
|
||||
return namespace.runAndReturn(callback)
|
||||
}
|
||||
|
||||
static setOnContext(key, value) {
|
||||
const namespace = this.createNamespace()
|
||||
static setOnContext(key, value, contextName = "session") {
|
||||
const namespace = this.createNamespace(contextName)
|
||||
namespace.set(key, value)
|
||||
}
|
||||
|
||||
|
@ -55,16 +55,16 @@ class FunctionContext {
|
|||
}
|
||||
}
|
||||
|
||||
static destroyNamespace() {
|
||||
static destroyNamespace(name = "session") {
|
||||
if (this._namespace) {
|
||||
cls.destroyNamespace("session")
|
||||
cls.destroyNamespace(name)
|
||||
this._namespace = null
|
||||
}
|
||||
}
|
||||
|
||||
static createNamespace() {
|
||||
static createNamespace(name = "session") {
|
||||
if (!this._namespace) {
|
||||
this._namespace = cls.createNamespace("session")
|
||||
this._namespace = cls.createNamespace(name)
|
||||
}
|
||||
return this._namespace
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
const { getGlobalUserParams, getAllApps } = require("../db/utils")
|
||||
const { getDB, getCouch } = require("../db")
|
||||
const { getGlobalDB } = require("./tenancy")
|
||||
const { getGlobalDB } = require("../tenancy")
|
||||
const { StaticDatabases } = require("../db/constants")
|
||||
|
||||
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
|
|
@ -0,0 +1,195 @@
|
|||
const env = require("../environment")
|
||||
const { Headers } = require("../../constants")
|
||||
const cls = require("./FunctionContext")
|
||||
const { getCouch } = require("../db")
|
||||
const { getProdAppID, getDevelopmentAppID } = require("../db/conversions")
|
||||
const { isEqual } = require("lodash")
|
||||
|
||||
// some test cases call functions directly, need to
|
||||
// store an app ID to pretend there is a context
|
||||
let TEST_APP_ID = null
|
||||
|
||||
const ContextKeys = {
|
||||
TENANT_ID: "tenantId",
|
||||
APP_ID: "appId",
|
||||
// whatever the request app DB was
|
||||
CURRENT_DB: "currentDb",
|
||||
// get the prod app DB from the request
|
||||
PROD_DB: "prodDb",
|
||||
// get the dev app DB from the request
|
||||
DEV_DB: "devDb",
|
||||
DB_OPTS: "dbOpts",
|
||||
}
|
||||
|
||||
exports.DEFAULT_TENANT_ID = "default"
|
||||
|
||||
exports.isDefaultTenant = () => {
|
||||
return exports.getTenantId() === exports.DEFAULT_TENANT_ID
|
||||
}
|
||||
|
||||
exports.isMultiTenant = () => {
|
||||
return env.MULTI_TENANCY
|
||||
}
|
||||
|
||||
// used for automations, API endpoints should always be in context already
|
||||
exports.doInTenant = (tenantId, task) => {
|
||||
return cls.run(() => {
|
||||
// set the tenant id
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
|
||||
// invoke the task
|
||||
return task()
|
||||
})
|
||||
}
|
||||
|
||||
exports.doInAppContext = (appId, task) => {
|
||||
return cls.run(() => {
|
||||
// set the app ID
|
||||
cls.setOnContext(ContextKeys.APP_ID, appId)
|
||||
|
||||
// invoke the task
|
||||
return task()
|
||||
})
|
||||
}
|
||||
|
||||
exports.updateTenantId = tenantId => {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
}
|
||||
|
||||
exports.updateAppId = appId => {
|
||||
try {
|
||||
cls.setOnContext(ContextKeys.APP_ID, appId)
|
||||
cls.setOnContext(ContextKeys.PROD_DB, null)
|
||||
cls.setOnContext(ContextKeys.DEV_DB, null)
|
||||
cls.setOnContext(ContextKeys.CURRENT_DB, null)
|
||||
cls.setOnContext(ContextKeys.DB_OPTS, null)
|
||||
} catch (err) {
|
||||
if (env.isTest()) {
|
||||
TEST_APP_ID = appId
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.setTenantId = (
|
||||
ctx,
|
||||
opts = { allowQs: false, allowNoTenant: false }
|
||||
) => {
|
||||
let tenantId
|
||||
// exit early if not multi-tenant
|
||||
if (!exports.isMultiTenant()) {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, this.DEFAULT_TENANT_ID)
|
||||
return
|
||||
}
|
||||
|
||||
const allowQs = opts && opts.allowQs
|
||||
const allowNoTenant = opts && opts.allowNoTenant
|
||||
const header = ctx.request.headers[Headers.TENANT_ID]
|
||||
const user = ctx.user || {}
|
||||
if (allowQs) {
|
||||
const query = ctx.request.query || {}
|
||||
tenantId = query.tenantId
|
||||
}
|
||||
// override query string (if allowed) by user, or header
|
||||
// URL params cannot be used in a middleware, as they are
|
||||
// processed later in the chain
|
||||
tenantId = user.tenantId || header || tenantId
|
||||
|
||||
// Set the tenantId from the subdomain
|
||||
if (!tenantId) {
|
||||
tenantId = ctx.subdomains && ctx.subdomains[0]
|
||||
}
|
||||
|
||||
if (!tenantId && !allowNoTenant) {
|
||||
ctx.throw(403, "Tenant id not set")
|
||||
}
|
||||
// check tenant ID just incase no tenant was allowed
|
||||
if (tenantId) {
|
||||
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
|
||||
}
|
||||
}
|
||||
|
||||
exports.isTenantIdSet = () => {
|
||||
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
|
||||
return !!tenantId
|
||||
}
|
||||
|
||||
exports.getTenantId = () => {
|
||||
if (!exports.isMultiTenant()) {
|
||||
return exports.DEFAULT_TENANT_ID
|
||||
}
|
||||
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
|
||||
if (!tenantId) {
|
||||
throw Error("Tenant id not found")
|
||||
}
|
||||
return tenantId
|
||||
}
|
||||
|
||||
exports.getAppId = () => {
|
||||
const foundId = cls.getFromContext(ContextKeys.APP_ID)
|
||||
if (!foundId && env.isTest() && TEST_APP_ID) {
|
||||
return TEST_APP_ID
|
||||
} else {
|
||||
return foundId
|
||||
}
|
||||
}
|
||||
|
||||
function getDB(key, opts) {
|
||||
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
|
||||
let storedOpts = cls.getFromContext(dbOptsKey)
|
||||
let db = cls.getFromContext(key)
|
||||
if (db && isEqual(opts, storedOpts)) {
|
||||
return db
|
||||
}
|
||||
const appId = exports.getAppId()
|
||||
const CouchDB = getCouch()
|
||||
let toUseAppId
|
||||
switch (key) {
|
||||
case ContextKeys.CURRENT_DB:
|
||||
toUseAppId = appId
|
||||
break
|
||||
case ContextKeys.PROD_DB:
|
||||
toUseAppId = getProdAppID(appId)
|
||||
break
|
||||
case ContextKeys.DEV_DB:
|
||||
toUseAppId = getDevelopmentAppID(appId)
|
||||
break
|
||||
}
|
||||
db = new CouchDB(toUseAppId, opts)
|
||||
try {
|
||||
cls.setOnContext(key, db)
|
||||
if (opts) {
|
||||
cls.setOnContext(dbOptsKey, opts)
|
||||
}
|
||||
} catch (err) {
|
||||
if (!env.isTest()) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
return db
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the app database based on whatever the request
|
||||
* contained, dev or prod.
|
||||
*/
|
||||
exports.getAppDB = opts => {
|
||||
return getDB(ContextKeys.CURRENT_DB, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
* This specifically gets the prod app ID, if the request
|
||||
* contained a development app ID, this will open the prod one.
|
||||
*/
|
||||
exports.getProdAppDB = opts => {
|
||||
return getDB(ContextKeys.PROD_DB, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
* This specifically gets the dev app ID, if the request
|
||||
* contained a prod app ID, this will open the dev one.
|
||||
*/
|
||||
exports.getDevAppDB = opts => {
|
||||
return getDB(ContextKeys.DEV_DB, opts)
|
||||
}
|
|
@ -32,3 +32,7 @@ exports.StaticDatabases = {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
exports.APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR
|
||||
exports.APP_DEV = exports.APP_DEV_PREFIX =
|
||||
exports.DocumentTypes.APP_DEV + exports.SEPARATOR
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
const NO_APP_ERROR = "No app provided"
|
||||
const { APP_DEV_PREFIX, APP_PREFIX } = require("./constants")
|
||||
|
||||
exports.isDevAppID = appId => {
|
||||
if (!appId) {
|
||||
throw NO_APP_ERROR
|
||||
}
|
||||
return appId.startsWith(APP_DEV_PREFIX)
|
||||
}
|
||||
|
||||
exports.isProdAppID = appId => {
|
||||
if (!appId) {
|
||||
throw NO_APP_ERROR
|
||||
}
|
||||
return appId.startsWith(APP_PREFIX) && !exports.isDevAppID(appId)
|
||||
}
|
||||
|
||||
exports.isDevApp = app => {
|
||||
if (!app) {
|
||||
throw NO_APP_ERROR
|
||||
}
|
||||
return exports.isDevAppID(app.appId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a development app ID to a deployed app ID.
|
||||
*/
|
||||
exports.getProdAppID = appId => {
|
||||
// if dev, convert it
|
||||
if (appId.startsWith(APP_DEV_PREFIX)) {
|
||||
const id = appId.split(APP_DEV_PREFIX)[1]
|
||||
return `${APP_PREFIX}${id}`
|
||||
}
|
||||
return appId
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a deployed app ID to a development app ID.
|
||||
*/
|
||||
exports.getDevelopmentAppID = appId => {
|
||||
if (!appId.startsWith(APP_DEV_PREFIX)) {
|
||||
const id = appId.split(APP_PREFIX)[1]
|
||||
return `${APP_DEV_PREFIX}${id}`
|
||||
}
|
||||
return appId
|
||||
}
|
|
@ -2,7 +2,13 @@ const { newid } = require("../hashing")
|
|||
const Replication = require("./Replication")
|
||||
const { DEFAULT_TENANT_ID, Configs } = require("../constants")
|
||||
const env = require("../environment")
|
||||
const { StaticDatabases, SEPARATOR, DocumentTypes } = require("./constants")
|
||||
const {
|
||||
StaticDatabases,
|
||||
SEPARATOR,
|
||||
DocumentTypes,
|
||||
APP_PREFIX,
|
||||
APP_DEV,
|
||||
} = require("./constants")
|
||||
const {
|
||||
getTenantId,
|
||||
getTenantIDFromAppID,
|
||||
|
@ -12,8 +18,13 @@ const fetch = require("node-fetch")
|
|||
const { getCouch } = require("./index")
|
||||
const { getAppMetadata } = require("../cache/appMetadata")
|
||||
const { checkSlashesInUrl } = require("../helpers")
|
||||
|
||||
const NO_APP_ERROR = "No app provided"
|
||||
const {
|
||||
isDevApp,
|
||||
isProdAppID,
|
||||
isDevAppID,
|
||||
getDevelopmentAppID,
|
||||
getProdAppID,
|
||||
} = require("./conversions")
|
||||
|
||||
const UNICODE_MAX = "\ufff0"
|
||||
|
||||
|
@ -24,10 +35,15 @@ exports.ViewNames = {
|
|||
exports.StaticDatabases = StaticDatabases
|
||||
|
||||
exports.DocumentTypes = DocumentTypes
|
||||
exports.APP_PREFIX = DocumentTypes.APP + SEPARATOR
|
||||
exports.APP_DEV = exports.APP_DEV_PREFIX = DocumentTypes.APP_DEV + SEPARATOR
|
||||
exports.APP_PREFIX = APP_PREFIX
|
||||
exports.APP_DEV = exports.APP_DEV_PREFIX = APP_DEV
|
||||
exports.SEPARATOR = SEPARATOR
|
||||
exports.getTenantIDFromAppID = getTenantIDFromAppID
|
||||
exports.isDevApp = isDevApp
|
||||
exports.isProdAppID = isProdAppID
|
||||
exports.isDevAppID = isDevAppID
|
||||
exports.getDevelopmentAppID = getDevelopmentAppID
|
||||
exports.getProdAppID = getProdAppID
|
||||
|
||||
/**
|
||||
* If creating DB allDocs/query params with only a single top level ID this can be used, this
|
||||
|
@ -52,27 +68,6 @@ function getDocParams(docType, docId = null, otherProps = {}) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.isDevAppID = appId => {
|
||||
if (!appId) {
|
||||
throw NO_APP_ERROR
|
||||
}
|
||||
return appId.startsWith(exports.APP_DEV_PREFIX)
|
||||
}
|
||||
|
||||
exports.isProdAppID = appId => {
|
||||
if (!appId) {
|
||||
throw NO_APP_ERROR
|
||||
}
|
||||
return appId.startsWith(exports.APP_PREFIX) && !exports.isDevAppID(appId)
|
||||
}
|
||||
|
||||
function isDevApp(app) {
|
||||
if (!app) {
|
||||
throw NO_APP_ERROR
|
||||
}
|
||||
return exports.isDevAppID(app.appId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a new workspace ID.
|
||||
* @returns {string} The new workspace ID which the workspace doc can be stored under.
|
||||
|
@ -157,29 +152,6 @@ exports.getRoleParams = (roleId = null, otherProps = {}) => {
|
|||
return getDocParams(DocumentTypes.ROLE, roleId, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a development app ID to a deployed app ID.
|
||||
*/
|
||||
exports.getDeployedAppID = appId => {
|
||||
// if dev, convert it
|
||||
if (appId.startsWith(exports.APP_DEV_PREFIX)) {
|
||||
const id = appId.split(exports.APP_DEV_PREFIX)[1]
|
||||
return `${exports.APP_PREFIX}${id}`
|
||||
}
|
||||
return appId
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a deployed app ID to a development app ID.
|
||||
*/
|
||||
exports.getDevelopmentAppID = appId => {
|
||||
if (!appId.startsWith(exports.APP_DEV_PREFIX)) {
|
||||
const id = appId.split(exports.APP_PREFIX)[1]
|
||||
return `${exports.APP_DEV_PREFIX}${id}`
|
||||
}
|
||||
return appId
|
||||
}
|
||||
|
||||
exports.getCouchUrl = () => {
|
||||
if (!env.COUCH_DB_URL) return
|
||||
|
||||
|
@ -225,7 +197,7 @@ exports.getAllDbs = async () => {
|
|||
}
|
||||
let couchUrl = `${exports.getCouchUrl()}/_all_dbs`
|
||||
let tenantId = getTenantId()
|
||||
if (!env.MULTI_TENANCY || tenantId == DEFAULT_TENANT_ID) {
|
||||
if (!env.MULTI_TENANCY || tenantId === DEFAULT_TENANT_ID) {
|
||||
// just get all DBs when:
|
||||
// - single tenancy
|
||||
// - default tenant
|
||||
|
@ -250,11 +222,10 @@ exports.getAllDbs = async () => {
|
|||
/**
|
||||
* Lots of different points in the system need to find the full list of apps, this will
|
||||
* enumerate the entire CouchDB cluster and get the list of databases (every app).
|
||||
* NOTE: this operation is fine in self hosting, but cannot be used when hosting many
|
||||
* different users/companies apps as there is no security around it - all apps are returned.
|
||||
* @return {Promise<object[]>} returns the app information document stored in each app database.
|
||||
*/
|
||||
exports.getAllApps = async (CouchDB, { dev, all, idsOnly } = {}) => {
|
||||
exports.getAllApps = async ({ dev, all, idsOnly } = {}) => {
|
||||
const CouchDB = getCouch()
|
||||
let tenantId = getTenantId()
|
||||
if (!env.MULTI_TENANCY && !tenantId) {
|
||||
tenantId = DEFAULT_TENANT_ID
|
||||
|
@ -310,8 +281,8 @@ exports.getAllApps = async (CouchDB, { dev, all, idsOnly } = {}) => {
|
|||
/**
|
||||
* Utility function for getAllApps but filters to production apps only.
|
||||
*/
|
||||
exports.getDeployedAppIDs = async CouchDB => {
|
||||
return (await exports.getAllApps(CouchDB, { idsOnly: true })).filter(
|
||||
exports.getProdAppIDs = async () => {
|
||||
return (await exports.getAllApps({ idsOnly: true })).filter(
|
||||
id => !exports.isDevAppID(id)
|
||||
)
|
||||
}
|
||||
|
@ -319,13 +290,14 @@ exports.getDeployedAppIDs = async CouchDB => {
|
|||
/**
|
||||
* Utility function for the inverse of above.
|
||||
*/
|
||||
exports.getDevAppIDs = async CouchDB => {
|
||||
return (await exports.getAllApps(CouchDB, { idsOnly: true })).filter(id =>
|
||||
exports.getDevAppIDs = async () => {
|
||||
return (await exports.getAllApps({ idsOnly: true })).filter(id =>
|
||||
exports.isDevAppID(id)
|
||||
)
|
||||
}
|
||||
|
||||
exports.dbExists = async (CouchDB, dbName) => {
|
||||
exports.dbExists = async dbName => {
|
||||
const CouchDB = getCouch()
|
||||
let exists = false
|
||||
try {
|
||||
const db = CouchDB(dbName, { skip_setup: true })
|
||||
|
|
|
@ -3,8 +3,9 @@ const {
|
|||
updateTenantId,
|
||||
isTenantIdSet,
|
||||
DEFAULT_TENANT_ID,
|
||||
updateAppId,
|
||||
} = require("../tenancy")
|
||||
const ContextFactory = require("../tenancy/FunctionContext")
|
||||
const ContextFactory = require("../context/FunctionContext")
|
||||
const { getTenantIDFromAppID } = require("../db/utils")
|
||||
|
||||
module.exports = () => {
|
||||
|
@ -21,5 +22,6 @@ module.exports = () => {
|
|||
const appId = ctx.appId ? ctx.appId : ctx.user ? ctx.user.appId : null
|
||||
const tenantId = getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID
|
||||
updateTenantId(tenantId)
|
||||
updateAppId(appId)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const { setTenantId } = require("../tenancy")
|
||||
const ContextFactory = require("../tenancy/FunctionContext")
|
||||
const ContextFactory = require("../context/FunctionContext")
|
||||
const { buildMatcherRegex, matches } = require("./matchers")
|
||||
|
||||
module.exports = (
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const { getDB } = require("../db")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { BUILTIN_PERMISSION_IDS } = require("./permissions")
|
||||
const {
|
||||
|
@ -7,6 +6,8 @@ const {
|
|||
DocumentTypes,
|
||||
SEPARATOR,
|
||||
} = require("../db/utils")
|
||||
const { getAppDB } = require("../context")
|
||||
const { getDB } = require("../db")
|
||||
|
||||
const BUILTIN_IDS = {
|
||||
ADMIN: "ADMIN",
|
||||
|
@ -111,11 +112,10 @@ exports.lowerBuiltinRoleID = (roleId1, roleId2) => {
|
|||
/**
|
||||
* Gets the role object, this is mainly useful for two purposes, to check if the level exists and
|
||||
* to check if the role inherits any others.
|
||||
* @param {string} appId The app in which to look for the role.
|
||||
* @param {string|null} roleId The level ID to lookup.
|
||||
* @returns {Promise<Role|object|null>} The role object, which may contain an "inherits" property.
|
||||
*/
|
||||
exports.getRole = async (appId, roleId) => {
|
||||
exports.getRole = async roleId => {
|
||||
if (!roleId) {
|
||||
return null
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ exports.getRole = async (appId, roleId) => {
|
|||
)
|
||||
}
|
||||
try {
|
||||
const db = getDB(appId)
|
||||
const db = getAppDB()
|
||||
const dbRole = await db.get(exports.getDBRoleID(roleId))
|
||||
role = Object.assign(role, dbRole)
|
||||
// finalise the ID
|
||||
|
@ -145,11 +145,11 @@ exports.getRole = async (appId, roleId) => {
|
|||
/**
|
||||
* Simple function to get all the roles based on the top level user role ID.
|
||||
*/
|
||||
async function getAllUserRoles(appId, userRoleId) {
|
||||
async function getAllUserRoles(userRoleId) {
|
||||
if (!userRoleId) {
|
||||
return [BUILTIN_IDS.BASIC]
|
||||
}
|
||||
let currentRole = await exports.getRole(appId, userRoleId)
|
||||
let currentRole = await exports.getRole(userRoleId)
|
||||
let roles = currentRole ? [currentRole] : []
|
||||
let roleIds = [userRoleId]
|
||||
// get all the inherited roles
|
||||
|
@ -159,7 +159,7 @@ async function getAllUserRoles(appId, userRoleId) {
|
|||
roleIds.indexOf(currentRole.inherits) === -1
|
||||
) {
|
||||
roleIds.push(currentRole.inherits)
|
||||
currentRole = await exports.getRole(appId, currentRole.inherits)
|
||||
currentRole = await exports.getRole(currentRole.inherits)
|
||||
roles.push(currentRole)
|
||||
}
|
||||
return roles
|
||||
|
@ -168,29 +168,23 @@ async function getAllUserRoles(appId, userRoleId) {
|
|||
/**
|
||||
* Returns an ordered array of the user's inherited role IDs, this can be used
|
||||
* to determine if a user can access something that requires a specific role.
|
||||
* @param {string} appId The ID of the application from which roles should be obtained.
|
||||
* @param {string} userRoleId The user's role ID, this can be found in their access token.
|
||||
* @param {object} opts Various options, such as whether to only retrieve the IDs (default true).
|
||||
* @returns {Promise<string[]>} returns an ordered array of the roles, with the first being their
|
||||
* highest level of access and the last being the lowest level.
|
||||
*/
|
||||
exports.getUserRoleHierarchy = async (
|
||||
appId,
|
||||
userRoleId,
|
||||
opts = { idOnly: true }
|
||||
) => {
|
||||
exports.getUserRoleHierarchy = async (userRoleId, opts = { idOnly: true }) => {
|
||||
// special case, if they don't have a role then they are a public user
|
||||
const roles = await getAllUserRoles(appId, userRoleId)
|
||||
const roles = await getAllUserRoles(userRoleId)
|
||||
return opts.idOnly ? roles.map(role => role._id) : roles
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an app ID this will retrieve all of the roles that are currently within that app.
|
||||
* @param {string} appId The ID of the app to retrieve the roles from.
|
||||
* @return {Promise<object[]>} An array of the role objects that were found.
|
||||
*/
|
||||
exports.getAllRoles = async appId => {
|
||||
const db = getDB(appId)
|
||||
const db = appId ? getDB(appId) : getAppDB()
|
||||
const body = await db.allDocs(
|
||||
getRoleParams(null, {
|
||||
include_docs: true,
|
||||
|
@ -218,19 +212,17 @@ exports.getAllRoles = async appId => {
|
|||
}
|
||||
|
||||
/**
|
||||
* This retrieves the required role/
|
||||
* @param appId
|
||||
* This retrieves the required role
|
||||
* @param permLevel
|
||||
* @param resourceId
|
||||
* @param subResourceId
|
||||
* @return {Promise<{permissions}|Object>}
|
||||
*/
|
||||
exports.getRequiredResourceRole = async (
|
||||
appId,
|
||||
permLevel,
|
||||
{ resourceId, subResourceId }
|
||||
) => {
|
||||
const roles = await exports.getAllRoles(appId)
|
||||
const roles = await exports.getAllRoles()
|
||||
let main = [],
|
||||
sub = []
|
||||
for (let role of roles) {
|
||||
|
@ -251,8 +243,7 @@ exports.getRequiredResourceRole = async (
|
|||
}
|
||||
|
||||
class AccessController {
|
||||
constructor(appId) {
|
||||
this.appId = appId
|
||||
constructor() {
|
||||
this.userHierarchies = {}
|
||||
}
|
||||
|
||||
|
@ -270,7 +261,7 @@ class AccessController {
|
|||
}
|
||||
let roleIds = this.userHierarchies[userRoleId]
|
||||
if (!roleIds) {
|
||||
roleIds = await exports.getUserRoleHierarchy(this.appId, userRoleId)
|
||||
roleIds = await exports.getUserRoleHierarchy(userRoleId)
|
||||
this.userHierarchies[userRoleId] = roleIds
|
||||
}
|
||||
|
||||
|
|
|
@ -1,84 +0,0 @@
|
|||
const env = require("../environment")
|
||||
const { Headers } = require("../../constants")
|
||||
const cls = require("./FunctionContext")
|
||||
|
||||
exports.DEFAULT_TENANT_ID = "default"
|
||||
|
||||
exports.isDefaultTenant = () => {
|
||||
return exports.getTenantId() === exports.DEFAULT_TENANT_ID
|
||||
}
|
||||
|
||||
exports.isMultiTenant = () => {
|
||||
return env.MULTI_TENANCY
|
||||
}
|
||||
|
||||
const TENANT_ID = "tenantId"
|
||||
|
||||
// used for automations, API endpoints should always be in context already
|
||||
exports.doInTenant = (tenantId, task) => {
|
||||
return cls.run(() => {
|
||||
// set the tenant id
|
||||
cls.setOnContext(TENANT_ID, tenantId)
|
||||
|
||||
// invoke the task
|
||||
return task()
|
||||
})
|
||||
}
|
||||
|
||||
exports.updateTenantId = tenantId => {
|
||||
cls.setOnContext(TENANT_ID, tenantId)
|
||||
}
|
||||
|
||||
exports.setTenantId = (
|
||||
ctx,
|
||||
opts = { allowQs: false, allowNoTenant: false }
|
||||
) => {
|
||||
let tenantId
|
||||
// exit early if not multi-tenant
|
||||
if (!exports.isMultiTenant()) {
|
||||
cls.setOnContext(TENANT_ID, this.DEFAULT_TENANT_ID)
|
||||
return
|
||||
}
|
||||
|
||||
const allowQs = opts && opts.allowQs
|
||||
const allowNoTenant = opts && opts.allowNoTenant
|
||||
const header = ctx.request.headers[Headers.TENANT_ID]
|
||||
const user = ctx.user || {}
|
||||
if (allowQs) {
|
||||
const query = ctx.request.query || {}
|
||||
tenantId = query.tenantId
|
||||
}
|
||||
// override query string (if allowed) by user, or header
|
||||
// URL params cannot be used in a middleware, as they are
|
||||
// processed later in the chain
|
||||
tenantId = user.tenantId || header || tenantId
|
||||
|
||||
// Set the tenantId from the subdomain
|
||||
if (!tenantId) {
|
||||
tenantId = ctx.subdomains && ctx.subdomains[0]
|
||||
}
|
||||
|
||||
if (!tenantId && !allowNoTenant) {
|
||||
ctx.throw(403, "Tenant id not set")
|
||||
}
|
||||
// check tenant ID just incase no tenant was allowed
|
||||
if (tenantId) {
|
||||
cls.setOnContext(TENANT_ID, tenantId)
|
||||
}
|
||||
}
|
||||
|
||||
exports.isTenantIdSet = () => {
|
||||
const tenantId = cls.getFromContext(TENANT_ID)
|
||||
return !!tenantId
|
||||
}
|
||||
|
||||
exports.getTenantId = () => {
|
||||
if (!exports.isMultiTenant()) {
|
||||
return exports.DEFAULT_TENANT_ID
|
||||
}
|
||||
const tenantId = cls.getFromContext(TENANT_ID)
|
||||
if (!tenantId) {
|
||||
throw Error("Tenant id not found")
|
||||
}
|
||||
return tenantId
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
module.exports = {
|
||||
...require("./context"),
|
||||
...require("../context"),
|
||||
...require("./tenancy"),
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { getDB } = require("../db")
|
||||
const { SEPARATOR, StaticDatabases, DocumentTypes } = require("../db/constants")
|
||||
const { getTenantId, DEFAULT_TENANT_ID, isMultiTenant } = require("./context")
|
||||
const { getTenantId, DEFAULT_TENANT_ID, isMultiTenant } = require("../context")
|
||||
const env = require("../environment")
|
||||
|
||||
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
|
||||
|
|
|
@ -3,9 +3,6 @@ const path = require("path")
|
|||
|
||||
const tmpdir = path.join(require("os").tmpdir(), ".budibase")
|
||||
|
||||
// these run on ports we don't normally use so that they can run alongside the
|
||||
const fs = require("fs")
|
||||
|
||||
// normal development system
|
||||
const WORKER_PORT = "10002"
|
||||
const MAIN_PORT = cypressConfig.env.PORT
|
||||
|
@ -29,22 +26,20 @@ process.env.ALLOW_DEV_AUTOMATIONS = 1
|
|||
// Stop info logs polluting test outputs
|
||||
process.env.LOG_LEVEL = "error"
|
||||
|
||||
async function run() {
|
||||
exports.run = (
|
||||
serverLoc = "../../server/dist",
|
||||
workerLoc = "../../worker/dist"
|
||||
) => {
|
||||
// require("dotenv").config({ path: resolve(dir, ".env") })
|
||||
if (!fs.existsSync("../server/dist")) {
|
||||
console.error("Unable to run cypress, need to build server first")
|
||||
process.exit(-1)
|
||||
}
|
||||
|
||||
// don't make this a variable or top level require
|
||||
// it will cause environment module to be loaded prematurely
|
||||
const server = require("../../server/dist/app")
|
||||
require(serverLoc)
|
||||
process.env.PORT = WORKER_PORT
|
||||
const worker = require("../../worker/dist/index")
|
||||
require(workerLoc)
|
||||
// reload main port for rest of system
|
||||
process.env.PORT = MAIN_PORT
|
||||
server.on("close", () => console.log("Server Closed"))
|
||||
worker.on("close", () => console.log("Worker Closed"))
|
||||
}
|
||||
|
||||
run()
|
||||
if (require.main === module) {
|
||||
exports.run()
|
||||
}
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
// @ts-ignore
|
||||
import { run } from "../setup"
|
||||
|
||||
run("../../server/src/index", "../../worker/src/index")
|
|
@ -11,12 +11,13 @@
|
|||
"dev:builder": "routify -c dev:vite",
|
||||
"dev:vite": "vite --host 0.0.0.0",
|
||||
"rollup": "rollup -c -w",
|
||||
"cy:setup": "node ./cypress/setup.js",
|
||||
"cy:setup": "ts-node ./cypress/ts/setup.ts",
|
||||
"cy:setup:ci": "node ./cypress/setup.js",
|
||||
"cy:run": "cypress run",
|
||||
"cy:open": "cypress open",
|
||||
"cy:run:ci": "cypress run --record",
|
||||
"cy:test": "start-server-and-test cy:setup http://localhost:10001/builder cy:run",
|
||||
"cy:ci": "start-server-and-test cy:setup http://localhost:10001/builder cy:run",
|
||||
"cy:ci": "start-server-and-test cy:setup:ci http://localhost:10001/builder cy:run",
|
||||
"cy:debug": "start-server-and-test cy:setup http://localhost:10001/builder cy:open"
|
||||
},
|
||||
"jest": {
|
||||
|
@ -106,6 +107,8 @@
|
|||
"start-server-and-test": "^1.12.1",
|
||||
"svelte": "^3.38.2",
|
||||
"svelte-jester": "^1.3.2",
|
||||
"ts-node": "^10.4.0",
|
||||
"typescript": "^4.5.5",
|
||||
"vite": "^2.1.5"
|
||||
},
|
||||
"gitHead": "115189f72a850bfb52b65ec61d932531bf327072"
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"lib": ["es2019"],
|
||||
"allowJs": true,
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"esModuleInterop": true,
|
||||
"resolveJsonModule": true,
|
||||
"incremental": true
|
||||
},
|
||||
"include": [
|
||||
"./src/**/*"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"**/*.json",
|
||||
"**/*.spec.ts",
|
||||
"**/*.spec.js"
|
||||
]
|
||||
}
|
|
@ -970,10 +970,10 @@
|
|||
svelte-flatpickr "^3.2.3"
|
||||
svelte-portal "^1.0.0"
|
||||
|
||||
"@budibase/bbui@^1.0.46", "@budibase/bbui@^1.0.46-alpha.3":
|
||||
version "1.0.46"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.46.tgz#7306d4eda7f2c827577a4affa1fd314b38ba1198"
|
||||
integrity sha512-padm0qq2SBNIslXEQW+HIv32pkIHFzloR93FDzSXh0sO43Q+/d2gbAhjI9ZUSAVncx9JNc46dolL1CwrvHFElg==
|
||||
"@budibase/bbui@^1.0.46-alpha.6", "@budibase/bbui@^1.0.47":
|
||||
version "1.0.47"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.47.tgz#df2848b89f881fe603e7156855d6a6c31d4f58bf"
|
||||
integrity sha512-RRm/BgK5aSx2/vGjMGljw240/48Ksc3/h4yB1nhQj8Xx3fKhlGnWDvWNy+sakvA6+fJvEXuti8RoxHtQ6lXmqA==
|
||||
dependencies:
|
||||
"@adobe/spectrum-css-workflow-icons" "^1.2.1"
|
||||
"@spectrum-css/actionbutton" "^1.0.1"
|
||||
|
@ -1020,14 +1020,14 @@
|
|||
svelte-flatpickr "^3.2.3"
|
||||
svelte-portal "^1.0.0"
|
||||
|
||||
"@budibase/client@^1.0.46-alpha.3":
|
||||
version "1.0.46"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.46.tgz#e6ef8945b9d7046b6e6d6761628aa1d85387acca"
|
||||
integrity sha512-jI3z1G/EsfJNCQCvrqzsR4vR1zLoVefzCXCEASIPg9BPzdiAFSwuUJVLijLFIIKfuDVeveUll94fgu7XNY8U2w==
|
||||
"@budibase/client@^1.0.46-alpha.6":
|
||||
version "1.0.47"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.47.tgz#ce9e2fbd300e5dc389ea29a3a3347897f096c824"
|
||||
integrity sha512-jB/al8v+nY/VLc6sH5Jt9JzWONVo+24/cI95iXlZSV5xwiKIVGj4+2F5QjKZ0c9Gm7SrrfP2T571N+4XaXNCGg==
|
||||
dependencies:
|
||||
"@budibase/bbui" "^1.0.46"
|
||||
"@budibase/bbui" "^1.0.47"
|
||||
"@budibase/standard-components" "^0.9.139"
|
||||
"@budibase/string-templates" "^1.0.46"
|
||||
"@budibase/string-templates" "^1.0.47"
|
||||
regexparam "^1.3.0"
|
||||
shortid "^2.2.15"
|
||||
svelte-spa-router "^3.0.5"
|
||||
|
@ -1082,10 +1082,10 @@
|
|||
svelte-apexcharts "^1.0.2"
|
||||
svelte-flatpickr "^3.1.0"
|
||||
|
||||
"@budibase/string-templates@^1.0.46", "@budibase/string-templates@^1.0.46-alpha.3":
|
||||
version "1.0.46"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.46.tgz#5beef1687b451e4512a465b4e143c8ab46234006"
|
||||
integrity sha512-t4ZAUkSz2XatjAN0faex5ovmD3mFz672lV/aBk7tfLFzZiKlWjngqdwpLLQNnsqeGvYo75JP2J06j86SX6O83w==
|
||||
"@budibase/string-templates@^1.0.46-alpha.6", "@budibase/string-templates@^1.0.47":
|
||||
version "1.0.47"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.47.tgz#626b9fc4542c7b36a0ae24e820d25a704c527bec"
|
||||
integrity sha512-87BUfOPr8FGKH8Pt88jhKNGT9PcOmkLRCeen4xi1dI113pAQznBO9vgV+cXOChUBBEQka9Rrt85LMJXidiwVgg==
|
||||
dependencies:
|
||||
"@budibase/handlebars-helpers" "^0.11.7"
|
||||
dayjs "^1.10.4"
|
||||
|
@ -1102,6 +1102,18 @@
|
|||
exec-sh "^0.3.2"
|
||||
minimist "^1.2.0"
|
||||
|
||||
"@cspotcode/source-map-consumer@0.8.0":
|
||||
version "0.8.0"
|
||||
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz#33bf4b7b39c178821606f669bbc447a6a629786b"
|
||||
integrity sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg==
|
||||
|
||||
"@cspotcode/source-map-support@0.7.0":
|
||||
version "0.7.0"
|
||||
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz#4789840aa859e46d2f3173727ab707c66bf344f5"
|
||||
integrity sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA==
|
||||
dependencies:
|
||||
"@cspotcode/source-map-consumer" "0.8.0"
|
||||
|
||||
"@cypress/listr-verbose-renderer@^0.4.1":
|
||||
version "0.4.1"
|
||||
resolved "https://registry.yarnpkg.com/@cypress/listr-verbose-renderer/-/listr-verbose-renderer-0.4.1.tgz#a77492f4b11dcc7c446a34b3e28721afd33c642a"
|
||||
|
@ -1795,6 +1807,26 @@
|
|||
resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82"
|
||||
integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==
|
||||
|
||||
"@tsconfig/node10@^1.0.7":
|
||||
version "1.0.8"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9"
|
||||
integrity sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg==
|
||||
|
||||
"@tsconfig/node12@^1.0.7":
|
||||
version "1.0.9"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.9.tgz#62c1f6dee2ebd9aead80dc3afa56810e58e1a04c"
|
||||
integrity sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw==
|
||||
|
||||
"@tsconfig/node14@^1.0.0":
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.1.tgz#95f2d167ffb9b8d2068b0b235302fafd4df711f2"
|
||||
integrity sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg==
|
||||
|
||||
"@tsconfig/node16@^1.0.2":
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.2.tgz#423c77877d0569db20e1fc80885ac4118314010e"
|
||||
integrity sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==
|
||||
|
||||
"@types/aria-query@^4.2.0":
|
||||
version "4.2.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc"
|
||||
|
@ -1971,6 +2003,11 @@ acorn-walk@^7.1.1:
|
|||
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc"
|
||||
integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==
|
||||
|
||||
acorn-walk@^8.1.1:
|
||||
version "8.2.0"
|
||||
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1"
|
||||
integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==
|
||||
|
||||
acorn@^7.1.1:
|
||||
version "7.4.1"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
|
||||
|
@ -1981,6 +2018,11 @@ acorn@^8.2.4:
|
|||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.5.0.tgz#4512ccb99b3698c752591e9bb4472e38ad43cee2"
|
||||
integrity sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==
|
||||
|
||||
acorn@^8.4.1:
|
||||
version "8.7.0"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf"
|
||||
integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==
|
||||
|
||||
agent-base@6:
|
||||
version "6.0.2"
|
||||
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77"
|
||||
|
@ -2087,6 +2129,11 @@ arch@^2.1.2:
|
|||
resolved "https://registry.yarnpkg.com/arch/-/arch-2.2.0.tgz#1bc47818f305764f23ab3306b0bfc086c5a29d11"
|
||||
integrity sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==
|
||||
|
||||
arg@^4.1.0:
|
||||
version "4.1.3"
|
||||
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
|
||||
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
|
||||
|
||||
argparse@^1.0.10, argparse@^1.0.7:
|
||||
version "1.0.10"
|
||||
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
|
||||
|
@ -2720,6 +2767,11 @@ core-util-is@~1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
|
||||
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
|
||||
|
||||
create-require@^1.1.0:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333"
|
||||
integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==
|
||||
|
||||
cross-spawn@^6.0.0:
|
||||
version "6.0.5"
|
||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
|
||||
|
@ -2965,6 +3017,11 @@ diff-sequences@^27.0.6:
|
|||
resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.0.6.tgz#3305cb2e55a033924054695cc66019fd7f8e5723"
|
||||
integrity sha512-ag6wfpBFyNXZ0p8pcuIDS//D8H062ZQJ3fzYxjpmeKjnz8W4pekL3AI8VohmyZmsWW2PWaHgjsmqR6L13101VQ==
|
||||
|
||||
diff@^4.0.1:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
|
||||
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
|
||||
|
||||
dir-glob@^3.0.1:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f"
|
||||
|
@ -5004,6 +5061,11 @@ make-dir@^3.0.0:
|
|||
dependencies:
|
||||
semver "^6.0.0"
|
||||
|
||||
make-error@^1.1.1:
|
||||
version "1.3.6"
|
||||
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
|
||||
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
|
||||
|
||||
makeerror@1.0.12:
|
||||
version "1.0.12"
|
||||
resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a"
|
||||
|
@ -6587,6 +6649,24 @@ tr46@~0.0.3:
|
|||
resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
|
||||
integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=
|
||||
|
||||
ts-node@^10.4.0:
|
||||
version "10.4.0"
|
||||
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.4.0.tgz#680f88945885f4e6cf450e7f0d6223dd404895f7"
|
||||
integrity sha512-g0FlPvvCXSIO1JDF6S232P5jPYqBkRL9qly81ZgAOSU7rwI0stphCgd2kLiCrU9DjQCrJMWEqcNSjQL02s6d8A==
|
||||
dependencies:
|
||||
"@cspotcode/source-map-support" "0.7.0"
|
||||
"@tsconfig/node10" "^1.0.7"
|
||||
"@tsconfig/node12" "^1.0.7"
|
||||
"@tsconfig/node14" "^1.0.0"
|
||||
"@tsconfig/node16" "^1.0.2"
|
||||
acorn "^8.4.1"
|
||||
acorn-walk "^8.1.1"
|
||||
arg "^4.1.0"
|
||||
create-require "^1.1.0"
|
||||
diff "^4.0.1"
|
||||
make-error "^1.1.1"
|
||||
yn "3.1.1"
|
||||
|
||||
tslib@^1.9.0, tslib@^1.9.3:
|
||||
version "1.14.1"
|
||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
|
||||
|
@ -6655,6 +6735,11 @@ typeof-article@^0.1.1:
|
|||
dependencies:
|
||||
kind-of "^3.1.0"
|
||||
|
||||
typescript@^4.5.5:
|
||||
version "4.5.5"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.5.tgz#d8c953832d28924a9e3d37c73d729c846c5896f3"
|
||||
integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==
|
||||
|
||||
uglify-js@^3.1.4:
|
||||
version "3.14.5"
|
||||
resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.14.5.tgz#cdabb7d4954231d80cb4a927654c4655e51f4859"
|
||||
|
@ -7011,6 +7096,11 @@ year@^0.2.1:
|
|||
resolved "https://registry.yarnpkg.com/year/-/year-0.2.1.tgz#4083ae520a318b23ec86037f3000cb892bdf9bb0"
|
||||
integrity sha1-QIOuUgoxiyPshgN/MADLiSvfm7A=
|
||||
|
||||
yn@3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
||||
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==
|
||||
|
||||
yup@0.29.2:
|
||||
version "0.29.2"
|
||||
resolved "https://registry.yarnpkg.com/yup/-/yup-0.29.2.tgz#5302abd9024cca335b987793f8df868e410b7b67"
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../db")
|
||||
const env = require("../../environment")
|
||||
const packageJson = require("../../../package.json")
|
||||
const {
|
||||
|
@ -29,7 +28,7 @@ const { processObject } = require("@budibase/string-templates")
|
|||
const {
|
||||
getAllApps,
|
||||
isDevAppID,
|
||||
getDeployedAppID,
|
||||
getProdAppID,
|
||||
Replication,
|
||||
} = require("@budibase/backend-core/db")
|
||||
const { USERS_TABLE_SCHEMA } = require("../../constants")
|
||||
|
@ -45,11 +44,17 @@ const { getTenantId, isMultiTenant } = require("@budibase/backend-core/tenancy")
|
|||
const { syncGlobalUsers } = require("./user")
|
||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
||||
const { cleanupAutomations } = require("../../automations/utils")
|
||||
const {
|
||||
getAppDB,
|
||||
getProdAppDB,
|
||||
updateAppId,
|
||||
} = require("@budibase/backend-core/context")
|
||||
|
||||
const URL_REGEX_SLASH = /\/|\\/g
|
||||
|
||||
// utility function, need to do away with this
|
||||
async function getLayouts(db) {
|
||||
async function getLayouts() {
|
||||
const db = getAppDB()
|
||||
return (
|
||||
await db.allDocs(
|
||||
getLayoutParams(null, {
|
||||
|
@ -59,7 +64,8 @@ async function getLayouts(db) {
|
|||
).rows.map(row => row.doc)
|
||||
}
|
||||
|
||||
async function getScreens(db) {
|
||||
async function getScreens() {
|
||||
const db = getAppDB()
|
||||
return (
|
||||
await db.allDocs(
|
||||
getScreenParams(null, {
|
||||
|
@ -117,8 +123,9 @@ async function createInstance(template) {
|
|||
const tenantId = isMultiTenant() ? getTenantId() : null
|
||||
const baseAppId = generateAppID(tenantId)
|
||||
const appId = generateDevAppID(baseAppId)
|
||||
updateAppId(appId)
|
||||
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
await db.put({
|
||||
_id: "_design/database",
|
||||
// view collation information, read before writing any complex views:
|
||||
|
@ -128,9 +135,9 @@ async function createInstance(template) {
|
|||
|
||||
// NOTE: indexes need to be created before any tables/templates
|
||||
// add view for linked rows
|
||||
await createLinkView(appId)
|
||||
await createRoutingView(appId)
|
||||
await createAllSearchIndex(appId)
|
||||
await createLinkView()
|
||||
await createRoutingView()
|
||||
await createAllSearchIndex()
|
||||
|
||||
// replicate the template data to the instance DB
|
||||
// this is currently very hard to test, downloading and importing template files
|
||||
|
@ -156,7 +163,7 @@ async function createInstance(template) {
|
|||
exports.fetch = async ctx => {
|
||||
const dev = ctx.query && ctx.query.status === AppStatus.DEV
|
||||
const all = ctx.query && ctx.query.status === AppStatus.ALL
|
||||
const apps = await getAllApps(CouchDB, { dev, all })
|
||||
const apps = await getAllApps({ dev, all })
|
||||
|
||||
// get the locks for all the dev apps
|
||||
if (dev || all) {
|
||||
|
@ -179,12 +186,11 @@ exports.fetch = async ctx => {
|
|||
}
|
||||
|
||||
exports.fetchAppDefinition = async ctx => {
|
||||
const db = new CouchDB(ctx.params.appId)
|
||||
const layouts = await getLayouts(db)
|
||||
const layouts = await getLayouts()
|
||||
const userRoleId = getUserRoleId(ctx)
|
||||
const accessController = new AccessController(ctx.params.appId)
|
||||
const accessController = new AccessController()
|
||||
const screens = await accessController.checkScreensAccess(
|
||||
await getScreens(db),
|
||||
await getScreens(),
|
||||
userRoleId
|
||||
)
|
||||
ctx.body = {
|
||||
|
@ -195,15 +201,15 @@ exports.fetchAppDefinition = async ctx => {
|
|||
}
|
||||
|
||||
exports.fetchAppPackage = async ctx => {
|
||||
const db = new CouchDB(ctx.params.appId)
|
||||
const db = getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
const layouts = await getLayouts(db)
|
||||
let screens = await getScreens(db)
|
||||
const layouts = await getLayouts()
|
||||
let screens = await getScreens()
|
||||
|
||||
// Only filter screens if the user is not a builder
|
||||
if (!(ctx.user.builder && ctx.user.builder.global)) {
|
||||
const userRoleId = getUserRoleId(ctx)
|
||||
const accessController = new AccessController(ctx.params.appId)
|
||||
const accessController = new AccessController()
|
||||
screens = await accessController.checkScreensAccess(screens, userRoleId)
|
||||
}
|
||||
|
||||
|
@ -216,7 +222,7 @@ exports.fetchAppPackage = async ctx => {
|
|||
}
|
||||
|
||||
exports.create = async ctx => {
|
||||
const apps = await getAllApps(CouchDB, { dev: true })
|
||||
const apps = await getAllApps({ dev: true })
|
||||
const name = ctx.request.body.name
|
||||
checkAppName(ctx, apps, name)
|
||||
const url = exports.getAppUrl(ctx)
|
||||
|
@ -234,7 +240,7 @@ exports.create = async ctx => {
|
|||
const instance = await createInstance(instanceConfig)
|
||||
const appId = instance._id
|
||||
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
let _rev
|
||||
try {
|
||||
// if template there will be an existing doc
|
||||
|
@ -280,7 +286,7 @@ exports.create = async ctx => {
|
|||
// This endpoint currently operates as a PATCH rather than a PUT
|
||||
// Thus name and url fields are handled only if present
|
||||
exports.update = async ctx => {
|
||||
const apps = await getAllApps(CouchDB, { dev: true })
|
||||
const apps = await getAllApps({ dev: true })
|
||||
// validation
|
||||
const name = ctx.request.body.name
|
||||
if (name) {
|
||||
|
@ -299,7 +305,7 @@ exports.update = async ctx => {
|
|||
|
||||
exports.updateClient = async ctx => {
|
||||
// Get current app version
|
||||
const db = new CouchDB(ctx.params.appId)
|
||||
const db = getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
const currentVersion = application.version
|
||||
|
||||
|
@ -321,7 +327,7 @@ exports.updateClient = async ctx => {
|
|||
|
||||
exports.revertClient = async ctx => {
|
||||
// Check app can be reverted
|
||||
const db = new CouchDB(ctx.params.appId)
|
||||
const db = getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
if (!application.revertableVersion) {
|
||||
ctx.throw(400, "There is no version to revert to")
|
||||
|
@ -343,7 +349,7 @@ exports.revertClient = async ctx => {
|
|||
}
|
||||
|
||||
exports.delete = async ctx => {
|
||||
const db = new CouchDB(ctx.params.appId)
|
||||
const db = getAppDB()
|
||||
|
||||
const result = await db.destroy()
|
||||
/* istanbul ignore next */
|
||||
|
@ -368,10 +374,11 @@ exports.sync = async (ctx, next) => {
|
|||
}
|
||||
|
||||
// replicate prod to dev
|
||||
const prodAppId = getDeployedAppID(appId)
|
||||
const prodAppId = getProdAppID(appId)
|
||||
|
||||
try {
|
||||
const prodDb = new CouchDB(prodAppId, { skip_setup: true })
|
||||
// specific case, want to make sure setup is skipped
|
||||
const prodDb = getProdAppDB({ skip_setup: true })
|
||||
const info = await prodDb.info()
|
||||
if (info.error) throw info.error
|
||||
} catch (err) {
|
||||
|
@ -399,7 +406,7 @@ exports.sync = async (ctx, next) => {
|
|||
}
|
||||
|
||||
// sync the users
|
||||
await syncGlobalUsers(appId)
|
||||
await syncGlobalUsers()
|
||||
|
||||
if (error) {
|
||||
ctx.throw(400, error)
|
||||
|
@ -411,7 +418,7 @@ exports.sync = async (ctx, next) => {
|
|||
}
|
||||
|
||||
const updateAppPackage = async (appPackage, appId) => {
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||
|
||||
const newAppPackage = { ...application, ...appPackage }
|
||||
|
@ -430,7 +437,7 @@ const updateAppPackage = async (appPackage, appId) => {
|
|||
}
|
||||
|
||||
const createEmptyAppPackage = async (ctx, app) => {
|
||||
const db = new CouchDB(app.appId)
|
||||
const db = getAppDB()
|
||||
|
||||
let screensAndLayouts = []
|
||||
for (let layout of BASE_LAYOUTS) {
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
const CouchDB = require("../../db")
|
||||
const { outputProcessing } = require("../../utilities/rowProcessor")
|
||||
const { InternalTables } = require("../../db/utils")
|
||||
const { getFullUser } = require("../../utilities/users")
|
||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||
const { getAppDB, getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.fetchSelf = async ctx => {
|
||||
const appId = ctx.appId
|
||||
let userId = ctx.user.userId || ctx.user._id
|
||||
/* istanbul ignore next */
|
||||
if (!userId) {
|
||||
|
@ -19,8 +18,8 @@ exports.fetchSelf = async ctx => {
|
|||
// forward the csrf token from the session
|
||||
user.csrfToken = ctx.user.csrfToken
|
||||
|
||||
if (appId) {
|
||||
const db = new CouchDB(appId)
|
||||
if (getAppId()) {
|
||||
const db = getAppDB()
|
||||
// remove the full roles structure
|
||||
delete user.roles
|
||||
try {
|
||||
|
@ -29,7 +28,7 @@ exports.fetchSelf = async ctx => {
|
|||
// make sure there is never a stale csrf token
|
||||
delete metadata.csrfToken
|
||||
// specifically needs to make sure is enriched
|
||||
ctx.body = await outputProcessing(ctx, userTable, {
|
||||
ctx.body = await outputProcessing(userTable, {
|
||||
...user,
|
||||
...metadata,
|
||||
})
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../db")
|
||||
const actions = require("../../automations/actions")
|
||||
const triggers = require("../../automations/triggers")
|
||||
const { getAutomationParams, generateAutomationID } = require("../../db/utils")
|
||||
|
@ -10,6 +9,7 @@ const {
|
|||
const { deleteEntityMetadata } = require("../../utilities")
|
||||
const { MetadataTypes } = require("../../constants")
|
||||
const { setTestFlag, clearTestFlag } = require("../../utilities/redis")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
const ACTION_DEFS = removeDeprecated(actions.ACTION_DEFINITIONS)
|
||||
const TRIGGER_DEFS = removeDeprecated(triggers.TRIGGER_DEFINITIONS)
|
||||
|
@ -20,14 +20,9 @@ const TRIGGER_DEFS = removeDeprecated(triggers.TRIGGER_DEFINITIONS)
|
|||
* *
|
||||
*************************/
|
||||
|
||||
async function cleanupAutomationMetadata(appId, automationId) {
|
||||
async function cleanupAutomationMetadata(automationId) {
|
||||
await deleteEntityMetadata(MetadataTypes.AUTOMATION_TEST_INPUT, automationId)
|
||||
await deleteEntityMetadata(
|
||||
appId,
|
||||
MetadataTypes.AUTOMATION_TEST_INPUT,
|
||||
automationId
|
||||
)
|
||||
await deleteEntityMetadata(
|
||||
appId,
|
||||
MetadataTypes.AUTOMATION_TEST_HISTORY,
|
||||
automationId
|
||||
)
|
||||
|
@ -58,7 +53,7 @@ function cleanAutomationInputs(automation) {
|
|||
}
|
||||
|
||||
exports.create = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
let automation = ctx.request.body
|
||||
automation.appId = ctx.appId
|
||||
|
||||
|
@ -72,7 +67,6 @@ exports.create = async function (ctx) {
|
|||
automation.type = "automation"
|
||||
automation = cleanAutomationInputs(automation)
|
||||
automation = await checkForWebhooks({
|
||||
appId: ctx.appId,
|
||||
newAuto: automation,
|
||||
})
|
||||
const response = await db.put(automation)
|
||||
|
@ -89,13 +83,12 @@ exports.create = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.update = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
let automation = ctx.request.body
|
||||
automation.appId = ctx.appId
|
||||
const oldAutomation = await db.get(automation._id)
|
||||
automation = cleanAutomationInputs(automation)
|
||||
automation = await checkForWebhooks({
|
||||
appId: ctx.appId,
|
||||
oldAuto: oldAutomation,
|
||||
newAuto: automation,
|
||||
})
|
||||
|
@ -131,7 +124,7 @@ exports.update = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const response = await db.allDocs(
|
||||
getAutomationParams(null, {
|
||||
include_docs: true,
|
||||
|
@ -141,20 +134,19 @@ exports.fetch = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.find = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
ctx.body = await db.get(ctx.params.id)
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const automationId = ctx.params.id
|
||||
const oldAutomation = await db.get(automationId)
|
||||
await checkForWebhooks({
|
||||
appId: ctx.appId,
|
||||
oldAuto: oldAutomation,
|
||||
})
|
||||
// delete metadata first
|
||||
await cleanupAutomationMetadata(ctx.appId, automationId)
|
||||
await cleanupAutomationMetadata(automationId)
|
||||
ctx.body = await db.remove(automationId, ctx.params.rev)
|
||||
}
|
||||
|
||||
|
@ -180,12 +172,11 @@ module.exports.getDefinitionList = async function (ctx) {
|
|||
*********************/
|
||||
|
||||
exports.trigger = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
let automation = await db.get(ctx.params.id)
|
||||
await triggers.externalTrigger(automation, {
|
||||
...ctx.request.body,
|
||||
appId,
|
||||
appId: ctx.appId,
|
||||
})
|
||||
ctx.body = {
|
||||
message: `Automation ${automation._id} has been triggered.`,
|
||||
|
@ -205,8 +196,7 @@ function prepareTestInput(input) {
|
|||
}
|
||||
|
||||
exports.test = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
let automation = await db.get(ctx.params.id)
|
||||
await setTestFlag(automation._id)
|
||||
const testInput = prepareTestInput(ctx.request.body)
|
||||
|
@ -214,7 +204,7 @@ exports.test = async function (ctx) {
|
|||
automation,
|
||||
{
|
||||
...testInput,
|
||||
appId,
|
||||
appId: ctx.appId,
|
||||
},
|
||||
{ getResponses: true }
|
||||
)
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
const env = require("../../environment")
|
||||
const { getAllApps } = require("@budibase/backend-core/db")
|
||||
const CouchDB = require("../../db")
|
||||
const {
|
||||
exportDB,
|
||||
sendTempFile,
|
||||
|
@ -30,7 +29,7 @@ exports.exportApps = async ctx => {
|
|||
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
|
||||
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
|
||||
}
|
||||
const apps = await getAllApps(CouchDB, { all: true })
|
||||
const apps = await getAllApps({ all: true })
|
||||
const globalDBString = await exportDB(getGlobalDBName(), {
|
||||
filter: doc => !doc._id.startsWith(DocumentTypes.USER),
|
||||
})
|
||||
|
@ -63,7 +62,7 @@ async function hasBeenImported() {
|
|||
if (!env.SELF_HOSTED || env.MULTI_TENANCY) {
|
||||
return true
|
||||
}
|
||||
const apps = await getAllApps(CouchDB, { all: true })
|
||||
const apps = await getAllApps({ all: true })
|
||||
return apps.length !== 0
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
const CouchDB = require("../../db")
|
||||
const { DocumentTypes } = require("../../db/utils")
|
||||
const { getComponentLibraryManifest } = require("../../utilities/fileSystem")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.fetchAppComponentDefinitions = async function (ctx) {
|
||||
const appId = ctx.params.appId || ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const app = await db.get(DocumentTypes.APP_METADATA)
|
||||
|
||||
let componentManifests = await Promise.all(
|
||||
app.componentLibraries.map(async library => {
|
||||
let manifest = await getComponentLibraryManifest(appId, library)
|
||||
let manifest = await getComponentLibraryManifest(library)
|
||||
|
||||
return {
|
||||
manifest,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../db")
|
||||
const {
|
||||
generateDatasourceID,
|
||||
getDatasourceParams,
|
||||
|
@ -11,12 +10,11 @@ const { BuildSchemaErrors, InvalidColumns } = require("../../constants")
|
|||
const { integrations } = require("../../integrations")
|
||||
const { getDatasourceAndQuery } = require("./row/utils")
|
||||
const { invalidateDynamicVariables } = require("../../threads/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
const database = new CouchDB(ctx.appId)
|
||||
|
||||
// Get internal tables
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const internalTables = await db.allDocs(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
|
@ -31,7 +29,7 @@ exports.fetch = async function (ctx) {
|
|||
|
||||
// Get external datasources
|
||||
const datasources = (
|
||||
await database.allDocs(
|
||||
await db.allDocs(
|
||||
getDatasourceParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
|
@ -49,7 +47,7 @@ exports.fetch = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.buildSchemaFromDb = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const datasource = await db.get(ctx.params.datasourceId)
|
||||
|
||||
const { tables, error } = await buildSchemaHelper(datasource)
|
||||
|
@ -98,7 +96,7 @@ const invalidateVariables = async (existingDatasource, updatedDatasource) => {
|
|||
}
|
||||
|
||||
exports.update = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const datasourceId = ctx.params.datasourceId
|
||||
let datasource = await db.get(datasourceId)
|
||||
const auth = datasource.config.auth
|
||||
|
@ -126,7 +124,7 @@ exports.update = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const plus = ctx.request.body.datasource.plus
|
||||
const fetchSchema = ctx.request.body.fetchSchema
|
||||
|
||||
|
@ -162,7 +160,7 @@ exports.save = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
|
||||
// Delete all queries for the datasource
|
||||
const queries = await db.allDocs(
|
||||
|
@ -184,7 +182,7 @@ exports.destroy = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.find = async function (ctx) {
|
||||
const database = new CouchDB(ctx.appId)
|
||||
const database = getAppDB()
|
||||
ctx.body = await database.get(ctx.params.datasourceId)
|
||||
}
|
||||
|
||||
|
@ -192,7 +190,7 @@ exports.find = async function (ctx) {
|
|||
exports.query = async function (ctx) {
|
||||
const queryJson = ctx.request.body
|
||||
try {
|
||||
ctx.body = await getDatasourceAndQuery(ctx.appId, queryJson)
|
||||
ctx.body = await getDatasourceAndQuery(queryJson)
|
||||
} catch (err) {
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
|
|
|
@ -1,18 +1,14 @@
|
|||
const newid = require("../../../db/newid")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
/**
|
||||
* This is used to pass around information about the deployment that is occurring
|
||||
*/
|
||||
class Deployment {
|
||||
constructor(appId, id = null) {
|
||||
this.appId = appId
|
||||
constructor(id = null) {
|
||||
this._id = id || newid()
|
||||
}
|
||||
|
||||
getAppId() {
|
||||
return this.appId
|
||||
}
|
||||
|
||||
setVerification(verification) {
|
||||
if (!verification) {
|
||||
return
|
||||
|
@ -43,7 +39,7 @@ class Deployment {
|
|||
getJSON() {
|
||||
const obj = {
|
||||
_id: this._id,
|
||||
appId: this.appId,
|
||||
appId: getAppId(),
|
||||
status: this.status,
|
||||
}
|
||||
if (this.err) {
|
||||
|
|
|
@ -1,12 +1,20 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const Deployment = require("./Deployment")
|
||||
const { Replication, getDeployedAppID } = require("@budibase/backend-core/db")
|
||||
const {
|
||||
Replication,
|
||||
getProdAppID,
|
||||
getDevelopmentAppID,
|
||||
} = require("@budibase/backend-core/db")
|
||||
const { DocumentTypes, getAutomationParams } = require("../../../db/utils")
|
||||
const {
|
||||
disableAllCrons,
|
||||
enableCronTrigger,
|
||||
} = require("../../../automations/utils")
|
||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
||||
const {
|
||||
getAppId,
|
||||
getAppDB,
|
||||
getProdAppDB,
|
||||
} = require("@budibase/backend-core/context")
|
||||
|
||||
// the max time we can wait for an invalidation to complete before considering it failed
|
||||
const MAX_PENDING_TIME_MS = 30 * 60000
|
||||
|
@ -34,9 +42,8 @@ async function checkAllDeployments(deployments) {
|
|||
}
|
||||
|
||||
async function storeDeploymentHistory(deployment) {
|
||||
const appId = deployment.getAppId()
|
||||
const deploymentJSON = deployment.getJSON()
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
|
||||
let deploymentDoc
|
||||
try {
|
||||
|
@ -64,7 +71,7 @@ async function storeDeploymentHistory(deployment) {
|
|||
}
|
||||
|
||||
async function initDeployedApp(prodAppId) {
|
||||
const db = new CouchDB(prodAppId)
|
||||
const db = getProdAppDB()
|
||||
console.log("Reading automation docs")
|
||||
const automations = (
|
||||
await db.allDocs(
|
||||
|
@ -88,10 +95,12 @@ async function initDeployedApp(prodAppId) {
|
|||
|
||||
async function deployApp(deployment) {
|
||||
try {
|
||||
const productionAppId = getDeployedAppID(deployment.appId)
|
||||
const appId = getAppId()
|
||||
const devAppId = getDevelopmentAppID(appId)
|
||||
const productionAppId = getProdAppID(appId)
|
||||
|
||||
const replication = new Replication({
|
||||
source: deployment.appId,
|
||||
source: devAppId,
|
||||
target: productionAppId,
|
||||
})
|
||||
|
||||
|
@ -99,7 +108,7 @@ async function deployApp(deployment) {
|
|||
|
||||
await replication.replicate()
|
||||
console.log("replication complete.. replacing app meta doc")
|
||||
const db = new CouchDB(productionAppId)
|
||||
const db = getProdAppDB()
|
||||
const appDoc = await db.get(DocumentTypes.APP_METADATA)
|
||||
appDoc.appId = productionAppId
|
||||
appDoc.instance._id = productionAppId
|
||||
|
@ -122,8 +131,7 @@ async function deployApp(deployment) {
|
|||
|
||||
exports.fetchDeployments = async function (ctx) {
|
||||
try {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||
const { updated, deployments } = await checkAllDeployments(
|
||||
deploymentDoc,
|
||||
|
@ -140,8 +148,7 @@ exports.fetchDeployments = async function (ctx) {
|
|||
|
||||
exports.deploymentProgress = async function (ctx) {
|
||||
try {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||
ctx.body = deploymentDoc[ctx.params.deploymentId]
|
||||
} catch (err) {
|
||||
|
@ -153,7 +160,7 @@ exports.deploymentProgress = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.deployApp = async function (ctx) {
|
||||
let deployment = new Deployment(ctx.appId)
|
||||
let deployment = new Deployment()
|
||||
console.log("Deployment object created")
|
||||
deployment.setStatus(DeploymentStatus.PENDING)
|
||||
console.log("Deployment object set to pending")
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
const fetch = require("node-fetch")
|
||||
const CouchDB = require("../../db")
|
||||
const env = require("../../environment")
|
||||
const { checkSlashesInUrl } = require("../../utilities")
|
||||
const { request } = require("../../utilities/workerRequests")
|
||||
const { clearLock } = require("../../utilities/redis")
|
||||
const { Replication } = require("@budibase/backend-core/db")
|
||||
const { Replication, getProdAppID } = require("@budibase/backend-core/db")
|
||||
const { DocumentTypes } = require("../../db/utils")
|
||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
||||
const { getProdAppDB, getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
async function redirect(ctx, method, path = "global") {
|
||||
const { devPath } = ctx.params
|
||||
|
@ -77,11 +77,11 @@ exports.clearLock = async ctx => {
|
|||
|
||||
exports.revert = async ctx => {
|
||||
const { appId } = ctx.params
|
||||
const productionAppId = appId.replace("_dev", "")
|
||||
const productionAppId = getProdAppID(appId)
|
||||
|
||||
// App must have been deployed first
|
||||
try {
|
||||
const db = new CouchDB(productionAppId, { skip_setup: true })
|
||||
const db = getProdAppDB({ skip_setup: true })
|
||||
const info = await db.info()
|
||||
if (info.error) throw info.error
|
||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||
|
@ -103,7 +103,7 @@ exports.revert = async ctx => {
|
|||
|
||||
await replication.rollback()
|
||||
// update appID in reverted app to be dev version again
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const appDoc = await db.get(DocumentTypes.APP_METADATA)
|
||||
appDoc.appId = appId
|
||||
appDoc.instance._id = appId
|
||||
|
|
|
@ -2,11 +2,11 @@ const {
|
|||
EMPTY_LAYOUT,
|
||||
BASE_LAYOUT_PROP_IDS,
|
||||
} = require("../../constants/layouts")
|
||||
const CouchDB = require("../../db")
|
||||
const { generateLayoutID, getScreenParams } = require("../../db/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
let layout = ctx.request.body
|
||||
|
||||
if (!layout.props) {
|
||||
|
@ -26,7 +26,7 @@ exports.save = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const layoutId = ctx.params.layoutId,
|
||||
layoutRev = ctx.params.layoutRev
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const { MetadataTypes } = require("../../constants")
|
||||
const CouchDB = require("../../db")
|
||||
const { generateMetadataID } = require("../../db/utils")
|
||||
const { saveEntityMetadata, deleteEntityMetadata } = require("../../utilities")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.getTypes = async ctx => {
|
||||
ctx.body = {
|
||||
|
@ -14,17 +14,12 @@ exports.saveMetadata = async ctx => {
|
|||
if (type === MetadataTypes.AUTOMATION_TEST_HISTORY) {
|
||||
ctx.throw(400, "Cannot save automation history type")
|
||||
}
|
||||
ctx.body = await saveEntityMetadata(
|
||||
ctx.appId,
|
||||
type,
|
||||
entityId,
|
||||
ctx.request.body
|
||||
)
|
||||
ctx.body = await saveEntityMetadata(type, entityId, ctx.request.body)
|
||||
}
|
||||
|
||||
exports.deleteMetadata = async ctx => {
|
||||
const { type, entityId } = ctx.params
|
||||
await deleteEntityMetadata(ctx.appId, type, entityId)
|
||||
await deleteEntityMetadata(type, entityId)
|
||||
ctx.body = {
|
||||
message: "Metadata deleted successfully",
|
||||
}
|
||||
|
@ -32,7 +27,7 @@ exports.deleteMetadata = async ctx => {
|
|||
|
||||
exports.getMetadata = async ctx => {
|
||||
const { type, entityId } = ctx.params
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const id = generateMetadataID(type, entityId)
|
||||
try {
|
||||
ctx.body = await db.get(id)
|
||||
|
|
|
@ -6,12 +6,12 @@ const {
|
|||
getBuiltinRoles,
|
||||
} = require("@budibase/backend-core/roles")
|
||||
const { getRoleParams } = require("../../db/utils")
|
||||
const CouchDB = require("../../db")
|
||||
const {
|
||||
CURRENTLY_SUPPORTED_LEVELS,
|
||||
getBasePermissions,
|
||||
} = require("../../utilities/security")
|
||||
const { removeFromArray } = require("../../utilities")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
const PermissionUpdateType = {
|
||||
REMOVE: "remove",
|
||||
|
@ -35,7 +35,7 @@ async function updatePermissionOnRole(
|
|||
{ roleId, resourceId, level },
|
||||
updateType
|
||||
) {
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const remove = updateType === PermissionUpdateType.REMOVE
|
||||
const isABuiltin = isBuiltin(roleId)
|
||||
const dbRoleId = getDBRoleID(roleId)
|
||||
|
@ -106,7 +106,7 @@ exports.fetchLevels = function (ctx) {
|
|||
}
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const roles = await getAllDBRoles(db)
|
||||
let permissions = {}
|
||||
// create an object with structure role ID -> resource ID -> level
|
||||
|
@ -133,7 +133,7 @@ exports.fetch = async function (ctx) {
|
|||
|
||||
exports.getResourcePerms = async function (ctx) {
|
||||
const resourceId = ctx.params.resourceId
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const body = await db.allDocs(
|
||||
getRoleParams(null, {
|
||||
include_docs: true,
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import CouchDB from "../../../../db"
|
||||
import { queryValidation } from "../validation"
|
||||
import { generateQueryID } from "../../../../db/utils"
|
||||
import { ImportInfo, ImportSource } from "./sources/base"
|
||||
import { OpenAPI2 } from "./sources/openapi2"
|
||||
import { Query } from "./../../../../definitions/common"
|
||||
import { Curl } from "./sources/curl"
|
||||
// @ts-ignore
|
||||
import { getAppDB } from "@budibase/backend-core/context"
|
||||
interface ImportResult {
|
||||
errorQueries: Query[]
|
||||
queries: Query[]
|
||||
|
@ -33,10 +34,7 @@ export class RestImporter {
|
|||
return this.source.getInfo()
|
||||
}
|
||||
|
||||
importQueries = async (
|
||||
appId: string,
|
||||
datasourceId: string
|
||||
): Promise<ImportResult> => {
|
||||
importQueries = async (datasourceId: string): Promise<ImportResult> => {
|
||||
// constuct the queries
|
||||
let queries = await this.source.getQueries(datasourceId)
|
||||
|
||||
|
@ -58,7 +56,7 @@ export class RestImporter {
|
|||
})
|
||||
|
||||
// persist queries
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const response = await db.bulkDocs(queries)
|
||||
|
||||
// create index to seperate queries and errors
|
||||
|
|
|
@ -6,6 +6,7 @@ const db = jest.fn(() => {
|
|||
}
|
||||
})
|
||||
jest.mock("../../../../../db", () => db)
|
||||
require("@budibase/backend-core").init(require("../../../../../db"))
|
||||
|
||||
const { RestImporter } = require("../index")
|
||||
|
||||
|
@ -77,7 +78,7 @@ describe("Rest Importer", () => {
|
|||
const testImportQueries = async (key, data, assertions) => {
|
||||
await init(data)
|
||||
bulkDocs.mockReturnValue([])
|
||||
const importResult = await restImporter.importQueries("appId", "datasourceId")
|
||||
const importResult = await restImporter.importQueries("datasourceId")
|
||||
expect(importResult.errorQueries.length).toBe(0)
|
||||
expect(importResult.queries.length).toBe(assertions[key].count)
|
||||
expect(bulkDocs).toHaveBeenCalledTimes(1)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const {
|
||||
generateQueryID,
|
||||
getQueryParams,
|
||||
|
@ -10,6 +9,7 @@ const { save: saveDatasource } = require("../datasource")
|
|||
const { RestImporter } = require("./import")
|
||||
const { invalidateDynamicVariables } = require("../../../threads/utils")
|
||||
const environment = require("../../../environment")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
const Runner = new Thread(ThreadType.QUERY, {
|
||||
timeoutMs: environment.QUERY_THREAD_TIMEOUT || 10000,
|
||||
|
@ -28,7 +28,7 @@ function enrichQueries(input) {
|
|||
}
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
|
||||
const body = await db.allDocs(
|
||||
getQueryParams(null, {
|
||||
|
@ -69,7 +69,7 @@ exports.import = async ctx => {
|
|||
datasourceId = body.datasourceId
|
||||
}
|
||||
|
||||
const importResult = await importer.importQueries(ctx.appId, datasourceId)
|
||||
const importResult = await importer.importQueries(datasourceId)
|
||||
|
||||
ctx.body = {
|
||||
...importResult,
|
||||
|
@ -79,7 +79,7 @@ exports.import = async ctx => {
|
|||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const query = ctx.request.body
|
||||
|
||||
if (!query._id) {
|
||||
|
@ -94,7 +94,7 @@ exports.save = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.find = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const query = enrichQueries(await db.get(ctx.params.queryId))
|
||||
// remove properties that could be dangerous in real app
|
||||
if (isProdAppID(ctx.appId)) {
|
||||
|
@ -105,7 +105,7 @@ exports.find = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.preview = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
|
||||
const datasource = await db.get(ctx.request.body.datasourceId)
|
||||
// preview may not have a queryId as it hasn't been saved, but if it does
|
||||
|
@ -136,7 +136,7 @@ exports.preview = async function (ctx) {
|
|||
}
|
||||
|
||||
async function execute(ctx, opts = { rowsOnly: false }) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
|
||||
const query = await db.get(ctx.params.queryId)
|
||||
const datasource = await db.get(query.datasourceId)
|
||||
|
@ -181,7 +181,8 @@ exports.executeV2 = async function (ctx) {
|
|||
return execute(ctx, { rowsOnly: false })
|
||||
}
|
||||
|
||||
const removeDynamicVariables = async (db, queryId) => {
|
||||
const removeDynamicVariables = async queryId => {
|
||||
const db = getAppDB()
|
||||
const query = await db.get(queryId)
|
||||
const datasource = await db.get(query.datasourceId)
|
||||
const dynamicVariables = datasource.config.dynamicVariables
|
||||
|
@ -202,8 +203,8 @@ const removeDynamicVariables = async (db, queryId) => {
|
|||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
await removeDynamicVariables(db, ctx.params.queryId)
|
||||
const db = getAppDB()
|
||||
await removeDynamicVariables(ctx.params.queryId)
|
||||
await db.remove(ctx.params.queryId, ctx.params.revId)
|
||||
ctx.message = `Query deleted.`
|
||||
ctx.status = 200
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../db")
|
||||
const {
|
||||
Role,
|
||||
getRole,
|
||||
|
@ -10,6 +9,7 @@ const {
|
|||
getUserMetadataParams,
|
||||
InternalTables,
|
||||
} = require("../../db/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
const UpdateRolesOptions = {
|
||||
CREATED: "created",
|
||||
|
@ -40,15 +40,15 @@ async function updateRolesOnUserTable(db, roleId, updateOption) {
|
|||
}
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
ctx.body = await getAllRoles(ctx.appId)
|
||||
ctx.body = await getAllRoles()
|
||||
}
|
||||
|
||||
exports.find = async function (ctx) {
|
||||
ctx.body = await getRole(ctx.appId, ctx.params.roleId)
|
||||
ctx.body = await getRole(ctx.params.roleId)
|
||||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
let { _id, name, inherits, permissionId } = ctx.request.body
|
||||
if (!_id) {
|
||||
_id = generateRoleID()
|
||||
|
@ -69,7 +69,7 @@ exports.save = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const roleId = ctx.params.roleId
|
||||
if (isBuiltin(roleId)) {
|
||||
ctx.throw(400, "Cannot delete builtin role.")
|
||||
|
|
|
@ -39,12 +39,11 @@ Routing.prototype.addScreenId = function (fullpath, roleId, screenId) {
|
|||
|
||||
/**
|
||||
* Gets the full routing structure by querying the routing view and processing the result into the tree.
|
||||
* @param {string} appId The application to produce the routing structure for.
|
||||
* @returns {Promise<object>} The routing structure, this is the full structure designed for use in the builder,
|
||||
* if the client routing is required then the updateRoutingStructureForUserRole should be used.
|
||||
*/
|
||||
async function getRoutingStructure(appId) {
|
||||
const screenRoutes = await getRoutingInfo(appId)
|
||||
async function getRoutingStructure() {
|
||||
const screenRoutes = await getRoutingInfo()
|
||||
const routing = new Routing()
|
||||
|
||||
for (let screenRoute of screenRoutes) {
|
||||
|
@ -57,13 +56,13 @@ async function getRoutingStructure(appId) {
|
|||
}
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
ctx.body = await getRoutingStructure(ctx.appId)
|
||||
ctx.body = await getRoutingStructure()
|
||||
}
|
||||
|
||||
exports.clientFetch = async ctx => {
|
||||
const routing = await getRoutingStructure(ctx.appId)
|
||||
const routing = await getRoutingStructure()
|
||||
let roleId = ctx.user.role._id
|
||||
const roleIds = await getUserRoleHierarchy(ctx.appId, roleId)
|
||||
const roleIds = await getUserRoleHierarchy(roleId)
|
||||
for (let topLevel of Object.values(routing.routes)) {
|
||||
for (let subpathKey of Object.keys(topLevel.subpaths)) {
|
||||
let found = false
|
||||
|
|
|
@ -19,6 +19,19 @@ import {
|
|||
isRowId,
|
||||
convertRowId,
|
||||
} from "../../../integrations/utils"
|
||||
import { getDatasourceAndQuery } from "./utils"
|
||||
import {
|
||||
DataSourceOperation,
|
||||
FieldTypes,
|
||||
RelationshipTypes,
|
||||
} from "../../../constants"
|
||||
import { breakExternalTableId, isSQL } from "../../../integrations/utils"
|
||||
import { processObjectSync } from "@budibase/string-templates"
|
||||
// @ts-ignore
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { processFormulas } from "../../../utilities/rowProcessor/utils"
|
||||
// @ts-ignore
|
||||
import { getAppDB } from "@budibase/backend-core/context"
|
||||
|
||||
interface ManyRelationship {
|
||||
tableId?: string
|
||||
|
@ -38,18 +51,6 @@ interface RunConfig {
|
|||
}
|
||||
|
||||
module External {
|
||||
const { getDatasourceAndQuery } = require("./utils")
|
||||
const {
|
||||
DataSourceOperation,
|
||||
FieldTypes,
|
||||
RelationshipTypes,
|
||||
} = require("../../../constants")
|
||||
const { breakExternalTableId, isSQL } = require("../../../integrations/utils")
|
||||
const { processObjectSync } = require("@budibase/string-templates")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const CouchDB = require("../../../db")
|
||||
const { processFormulas } = require("../../../utilities/rowProcessor/utils")
|
||||
|
||||
function buildFilters(
|
||||
id: string | undefined,
|
||||
filters: SearchFilters,
|
||||
|
@ -210,19 +211,12 @@ module External {
|
|||
}
|
||||
|
||||
class ExternalRequest {
|
||||
private readonly appId: string
|
||||
private operation: Operation
|
||||
private tableId: string
|
||||
private datasource: Datasource
|
||||
private tables: { [key: string]: Table } = {}
|
||||
|
||||
constructor(
|
||||
appId: string,
|
||||
operation: Operation,
|
||||
tableId: string,
|
||||
datasource: Datasource
|
||||
) {
|
||||
this.appId = appId
|
||||
constructor(operation: Operation, tableId: string, datasource: Datasource) {
|
||||
this.operation = operation
|
||||
this.tableId = tableId
|
||||
this.datasource = datasource
|
||||
|
@ -231,12 +225,14 @@ module External {
|
|||
}
|
||||
}
|
||||
|
||||
getTable(tableId: string | undefined): Table {
|
||||
getTable(tableId: string | undefined): Table | undefined {
|
||||
if (!tableId) {
|
||||
throw "Table ID is unknown, cannot find table"
|
||||
}
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
return this.tables[tableName]
|
||||
if (tableName) {
|
||||
return this.tables[tableName]
|
||||
}
|
||||
}
|
||||
|
||||
inputProcessing(row: Row | undefined, table: Table) {
|
||||
|
@ -272,9 +268,11 @@ module External {
|
|||
newRow[key] = row[key]
|
||||
continue
|
||||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
const { tableName: linkTableName } = breakExternalTableId(
|
||||
field?.tableId
|
||||
)
|
||||
// table has to exist for many to many
|
||||
if (!this.tables[linkTableName]) {
|
||||
if (!linkTableName || !this.tables[linkTableName]) {
|
||||
continue
|
||||
}
|
||||
const linkTable = this.tables[linkTableName]
|
||||
|
@ -422,7 +420,7 @@ module External {
|
|||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
// no table to link to, this is not a valid relationships
|
||||
if (!this.tables[linkTableName]) {
|
||||
if (!linkTableName || !this.tables[linkTableName]) {
|
||||
continue
|
||||
}
|
||||
const linkTable = this.tables[linkTableName]
|
||||
|
@ -460,6 +458,9 @@ module External {
|
|||
async lookupRelations(tableId: string, row: Row) {
|
||||
const related: { [key: string]: any } = {}
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
if (!tableName) {
|
||||
return related
|
||||
}
|
||||
const table = this.tables[tableName]
|
||||
// @ts-ignore
|
||||
const primaryKey = table.primary[0]
|
||||
|
@ -484,7 +485,7 @@ module External {
|
|||
if (!lookupField || !row[lookupField]) {
|
||||
continue
|
||||
}
|
||||
const response = await getDatasourceAndQuery(this.appId, {
|
||||
const response = await getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
|
||||
filters: {
|
||||
equal: {
|
||||
|
@ -515,28 +516,30 @@ module External {
|
|||
row: Row,
|
||||
relationships: ManyRelationship[]
|
||||
) {
|
||||
const { appId } = this
|
||||
// if we're creating (in a through table) need to wipe the existing ones first
|
||||
const promises = []
|
||||
const related = await this.lookupRelations(mainTableId, row)
|
||||
for (let relationship of relationships) {
|
||||
const { key, tableId, isUpdate, id, ...rest } = relationship
|
||||
const body = processObjectSync(rest, row)
|
||||
const body: { [key: string]: any } = processObjectSync(rest, row, {})
|
||||
const linkTable = this.getTable(tableId)
|
||||
// @ts-ignore
|
||||
const linkPrimary = linkTable.primary[0]
|
||||
const rows = related[key]?.rows || []
|
||||
const linkPrimary = linkTable?.primary[0]
|
||||
if (!linkTable || !linkPrimary) {
|
||||
return
|
||||
}
|
||||
const rows = related[key].rows || []
|
||||
const found = rows.find(
|
||||
(row: { [key: string]: any }) =>
|
||||
row[linkPrimary] === relationship.id ||
|
||||
row[linkPrimary] === body[linkPrimary]
|
||||
row[linkPrimary] === body?.[linkPrimary]
|
||||
)
|
||||
const operation = isUpdate
|
||||
? DataSourceOperation.UPDATE
|
||||
: DataSourceOperation.CREATE
|
||||
if (!found) {
|
||||
promises.push(
|
||||
getDatasourceAndQuery(appId, {
|
||||
getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, operation),
|
||||
// if we're doing many relationships then we're writing, only one response
|
||||
body,
|
||||
|
@ -552,9 +555,12 @@ module External {
|
|||
for (let [colName, { isMany, rows, tableId }] of Object.entries(
|
||||
related
|
||||
)) {
|
||||
const table: Table = this.getTable(tableId)
|
||||
const table: Table | undefined = this.getTable(tableId)
|
||||
// if its not the foreign key skip it, nothing to do
|
||||
if (table.primary && table.primary.indexOf(colName) !== -1) {
|
||||
if (
|
||||
!table ||
|
||||
(table.primary && table.primary.indexOf(colName) !== -1)
|
||||
) {
|
||||
continue
|
||||
}
|
||||
for (let row of rows) {
|
||||
|
@ -566,7 +572,7 @@ module External {
|
|||
: DataSourceOperation.UPDATE
|
||||
const body = isMany ? null : { [colName]: null }
|
||||
promises.push(
|
||||
getDatasourceAndQuery(this.appId, {
|
||||
getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, op),
|
||||
body,
|
||||
filters,
|
||||
|
@ -605,20 +611,25 @@ module External {
|
|||
continue
|
||||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
const linkTable = this.tables[linkTableName]
|
||||
if (linkTable) {
|
||||
const linkedFields = extractRealFields(linkTable, fields)
|
||||
fields = fields.concat(linkedFields)
|
||||
if (linkTableName) {
|
||||
const linkTable = this.tables[linkTableName]
|
||||
if (linkTable) {
|
||||
const linkedFields = extractRealFields(linkTable, fields)
|
||||
fields = fields.concat(linkedFields)
|
||||
}
|
||||
}
|
||||
}
|
||||
return fields
|
||||
}
|
||||
|
||||
async run(config: RunConfig) {
|
||||
const { appId, operation, tableId } = this
|
||||
const { operation, tableId } = this
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
if (!tableName) {
|
||||
throw "Unable to run without a table name"
|
||||
}
|
||||
if (!this.datasource) {
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
this.datasource = await db.get(datasourceId)
|
||||
if (!this.datasource || !this.datasource.entities) {
|
||||
throw "No tables found, fetch tables before query."
|
||||
|
@ -670,7 +681,7 @@ module External {
|
|||
},
|
||||
}
|
||||
// can't really use response right now
|
||||
const response = await getDatasourceAndQuery(appId, json)
|
||||
const response = await getDatasourceAndQuery(json)
|
||||
// handle many to many relationships now if we know the ID (could be auto increment)
|
||||
if (
|
||||
operation !== DataSourceOperation.READ &&
|
||||
|
|
|
@ -9,9 +9,9 @@ const {
|
|||
breakRowIdField,
|
||||
} = require("../../../integrations/utils")
|
||||
const ExternalRequest = require("./ExternalRequest")
|
||||
const CouchDB = require("../../../db")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
async function handleRequest(appId, operation, tableId, opts = {}) {
|
||||
async function handleRequest(operation, tableId, opts = {}) {
|
||||
// make sure the filters are cleaned up, no empty strings for equals, fuzzy or string
|
||||
if (opts && opts.filters) {
|
||||
for (let filterField of NoEmptyFilterStrings) {
|
||||
|
@ -25,9 +25,7 @@ async function handleRequest(appId, operation, tableId, opts = {}) {
|
|||
}
|
||||
}
|
||||
}
|
||||
return new ExternalRequest(appId, operation, tableId, opts.datasource).run(
|
||||
opts
|
||||
)
|
||||
return new ExternalRequest(operation, tableId, opts.datasource).run(opts)
|
||||
}
|
||||
|
||||
exports.handleRequest = handleRequest
|
||||
|
@ -181,7 +179,7 @@ exports.fetchEnrichedRow = async ctx => {
|
|||
const id = ctx.params.rowId
|
||||
const tableId = ctx.params.tableId
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const datasource = await db.get(datasourceId)
|
||||
if (!datasource || !datasource.entities) {
|
||||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const linkRows = require("../../../db/linkedRows")
|
||||
const {
|
||||
generateRowID,
|
||||
|
@ -25,6 +24,7 @@ const {
|
|||
getFromMemoryDoc,
|
||||
} = require("../view/utils")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const { finaliseRow, updateRelatedFormula } = require("./staticFormula")
|
||||
|
||||
const CALCULATION_TYPES = {
|
||||
|
@ -76,8 +76,7 @@ async function getRawTableData(ctx, db, tableId) {
|
|||
}
|
||||
|
||||
exports.patch = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const inputs = ctx.request.body
|
||||
const tableId = inputs.tableId
|
||||
const isUserTable = tableId === InternalTables.USER_METADATA
|
||||
|
@ -116,14 +115,13 @@ exports.patch = async ctx => {
|
|||
|
||||
// returned row is cleaned and prepared for writing to DB
|
||||
row = await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_UPDATE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})
|
||||
// check if any attachments removed
|
||||
await cleanupAttachments(appId, table, { oldRow, row })
|
||||
await cleanupAttachments(table, { oldRow, row })
|
||||
|
||||
if (isUserTable) {
|
||||
// the row has been updated, need to put it into the ctx
|
||||
|
@ -132,15 +130,14 @@ exports.patch = async ctx => {
|
|||
return { row: ctx.body, table }
|
||||
}
|
||||
|
||||
return finaliseRow(ctx.appId, table, row, {
|
||||
return finaliseRow(table, row, {
|
||||
oldTable: dbTable,
|
||||
updateFormula: true,
|
||||
})
|
||||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
let inputs = ctx.request.body
|
||||
inputs.tableId = ctx.params.tableId
|
||||
|
||||
|
@ -162,21 +159,19 @@ exports.save = async function (ctx) {
|
|||
|
||||
// make sure link rows are up to date
|
||||
row = await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})
|
||||
|
||||
return finaliseRow(ctx.appId, table, row, {
|
||||
return finaliseRow(table, row, {
|
||||
oldTable: dbTable,
|
||||
updateFormula: true,
|
||||
})
|
||||
}
|
||||
|
||||
exports.fetchView = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const viewName = ctx.params.viewName
|
||||
|
||||
// if this is a table view being looked for just transfer to that
|
||||
|
@ -185,7 +180,7 @@ exports.fetchView = async ctx => {
|
|||
return exports.fetch(ctx)
|
||||
}
|
||||
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const { calculation, group, field } = ctx.query
|
||||
const viewInfo = await getView(db, viewName)
|
||||
let response
|
||||
|
@ -212,7 +207,7 @@ exports.fetchView = async ctx => {
|
|||
schema: {},
|
||||
}
|
||||
}
|
||||
rows = await outputProcessing(ctx, table, response.rows)
|
||||
rows = await outputProcessing(table, response.rows)
|
||||
}
|
||||
|
||||
if (calculation === CALCULATION_TYPES.STATS) {
|
||||
|
@ -239,27 +234,24 @@ exports.fetchView = async ctx => {
|
|||
}
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
|
||||
const tableId = ctx.params.tableId
|
||||
let table = await db.get(tableId)
|
||||
let rows = await getRawTableData(ctx, db, tableId)
|
||||
return outputProcessing(ctx, table, rows)
|
||||
return outputProcessing(table, rows)
|
||||
}
|
||||
|
||||
exports.find = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const table = await db.get(ctx.params.tableId)
|
||||
let row = await findRow(ctx, db, ctx.params.tableId, ctx.params.rowId)
|
||||
row = await outputProcessing(ctx, table, row)
|
||||
let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId)
|
||||
row = await outputProcessing(table, row)
|
||||
return row
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const { _id, _rev } = ctx.request.body
|
||||
let row = await db.get(_id)
|
||||
|
||||
|
@ -268,18 +260,17 @@ exports.destroy = async function (ctx) {
|
|||
}
|
||||
const table = await db.get(row.tableId)
|
||||
// update the row to include full relationships before deleting them
|
||||
row = await outputProcessing(ctx, table, row, { squash: false })
|
||||
row = await outputProcessing(table, row, { squash: false })
|
||||
// now remove the relationships
|
||||
await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_DELETE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
})
|
||||
// remove any attachments that were on the row from object storage
|
||||
await cleanupAttachments(appId, table, { row })
|
||||
await cleanupAttachments(table, { row })
|
||||
// remove any static formula
|
||||
await updateRelatedFormula(appId, table, row)
|
||||
await updateRelatedFormula(table, row)
|
||||
|
||||
let response
|
||||
if (ctx.params.tableId === InternalTables.USER_METADATA) {
|
||||
|
@ -295,20 +286,18 @@ exports.destroy = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.bulkDestroy = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const tableId = ctx.params.tableId
|
||||
const table = await db.get(tableId)
|
||||
let { rows } = ctx.request.body
|
||||
|
||||
// before carrying out any updates, make sure the rows are ready to be returned
|
||||
// they need to be the full rows (including previous relationships) for automations
|
||||
rows = await outputProcessing(ctx, table, rows, { squash: false })
|
||||
rows = await outputProcessing(table, rows, { squash: false })
|
||||
|
||||
// remove the relationships first
|
||||
let updates = rows.map(row =>
|
||||
linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_DELETE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
|
@ -327,8 +316,8 @@ exports.bulkDestroy = async ctx => {
|
|||
await db.bulkDocs(rows.map(row => ({ ...row, _deleted: true })))
|
||||
}
|
||||
// remove any attachments that were on the rows from object storage
|
||||
await cleanupAttachments(appId, table, { rows })
|
||||
await updateRelatedFormula(appId, table, rows)
|
||||
await cleanupAttachments(table, { rows })
|
||||
await updateRelatedFormula(table, rows)
|
||||
await Promise.all(updates)
|
||||
return { response: { ok: true }, rows }
|
||||
}
|
||||
|
@ -339,28 +328,27 @@ exports.search = async ctx => {
|
|||
return { rows: await exports.fetch(ctx) }
|
||||
}
|
||||
|
||||
const appId = ctx.appId
|
||||
const { tableId } = ctx.params
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const { paginate, query, ...params } = ctx.request.body
|
||||
params.version = ctx.version
|
||||
params.tableId = tableId
|
||||
|
||||
let response
|
||||
if (paginate) {
|
||||
response = await paginatedSearch(appId, query, params)
|
||||
response = await paginatedSearch(query, params)
|
||||
} else {
|
||||
response = await fullSearch(appId, query, params)
|
||||
response = await fullSearch(query, params)
|
||||
}
|
||||
|
||||
// Enrich search results with relationships
|
||||
if (response.rows && response.rows.length) {
|
||||
// enrich with global users if from users table
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
response.rows = await getGlobalUsersFromMetadata(appId, response.rows)
|
||||
response.rows = await getGlobalUsersFromMetadata(response.rows)
|
||||
}
|
||||
const table = await db.get(tableId)
|
||||
response.rows = await outputProcessing(ctx, table, response.rows)
|
||||
response.rows = await outputProcessing(table, response.rows)
|
||||
}
|
||||
|
||||
return response
|
||||
|
@ -368,25 +356,22 @@ exports.search = async ctx => {
|
|||
|
||||
exports.validate = async ctx => {
|
||||
return validate({
|
||||
appId: ctx.appId,
|
||||
tableId: ctx.params.tableId,
|
||||
row: ctx.request.body,
|
||||
})
|
||||
}
|
||||
|
||||
exports.fetchEnrichedRow = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const tableId = ctx.params.tableId
|
||||
const rowId = ctx.params.rowId
|
||||
// need table to work out where links go in row
|
||||
let [table, row] = await Promise.all([
|
||||
db.get(tableId),
|
||||
findRow(ctx, db, tableId, rowId),
|
||||
findRow(ctx, tableId, rowId),
|
||||
])
|
||||
// get the link docs
|
||||
const linkVals = await linkRows.getLinkDocuments({
|
||||
appId,
|
||||
tableId,
|
||||
rowId,
|
||||
})
|
||||
|
@ -413,7 +398,7 @@ exports.fetchEnrichedRow = async ctx => {
|
|||
for (let [tableId, rows] of Object.entries(groups)) {
|
||||
// need to include the IDs in these rows for any links they may have
|
||||
linkedRows = linkedRows.concat(
|
||||
await outputProcessing(ctx, tables[tableId], rows)
|
||||
await outputProcessing(tables[tableId], rows)
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
const { SearchIndexes } = require("../../../db/utils")
|
||||
const fetch = require("node-fetch")
|
||||
const { getCouchUrl } = require("@budibase/backend-core/db")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
/**
|
||||
* Class to build lucene query URLs.
|
||||
* Optionally takes a base lucene query object.
|
||||
*/
|
||||
class QueryBuilder {
|
||||
constructor(appId, base) {
|
||||
this.appId = appId
|
||||
constructor(base) {
|
||||
this.query = {
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
|
@ -241,7 +241,8 @@ class QueryBuilder {
|
|||
}
|
||||
|
||||
async run() {
|
||||
const url = `${getCouchUrl()}/${this.appId}/_design/database/_search/${
|
||||
const appId = getAppId()
|
||||
const url = `${getCouchUrl()}/${appId}/_design/database/_search/${
|
||||
SearchIndexes.ROWS
|
||||
}`
|
||||
const body = this.buildSearchBody()
|
||||
|
@ -278,7 +279,6 @@ const runQuery = async (url, body) => {
|
|||
* Gets round the fixed limit of 200 results from a query by fetching as many
|
||||
* pages as required and concatenating the results. This recursively operates
|
||||
* until enough results have been found.
|
||||
* @param appId {string} The app ID to search
|
||||
* @param query {object} The JSON query structure
|
||||
* @param params {object} The search params including:
|
||||
* tableId {string} The table ID to search
|
||||
|
@ -291,7 +291,7 @@ const runQuery = async (url, body) => {
|
|||
* rows {array|null} Current results in the recursive search
|
||||
* @returns {Promise<*[]|*>}
|
||||
*/
|
||||
const recursiveSearch = async (appId, query, params) => {
|
||||
const recursiveSearch = async (query, params) => {
|
||||
const bookmark = params.bookmark
|
||||
const rows = params.rows || []
|
||||
if (rows.length >= params.limit) {
|
||||
|
@ -301,7 +301,7 @@ const recursiveSearch = async (appId, query, params) => {
|
|||
if (rows.length > params.limit - 200) {
|
||||
pageSize = params.limit - rows.length
|
||||
}
|
||||
const page = await new QueryBuilder(appId, query)
|
||||
const page = await new QueryBuilder(query)
|
||||
.setVersion(params.version)
|
||||
.setTable(params.tableId)
|
||||
.setBookmark(bookmark)
|
||||
|
@ -321,14 +321,13 @@ const recursiveSearch = async (appId, query, params) => {
|
|||
bookmark: page.bookmark,
|
||||
rows: [...rows, ...page.rows],
|
||||
}
|
||||
return await recursiveSearch(appId, query, newParams)
|
||||
return await recursiveSearch(query, newParams)
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a paginated search. A bookmark will be returned to allow the next
|
||||
* page to be fetched. There is a max limit off 200 results per page in a
|
||||
* paginated search.
|
||||
* @param appId {string} The app ID to search
|
||||
* @param query {object} The JSON query structure
|
||||
* @param params {object} The search params including:
|
||||
* tableId {string} The table ID to search
|
||||
|
@ -340,13 +339,13 @@ const recursiveSearch = async (appId, query, params) => {
|
|||
* bookmark {string} The bookmark to resume from
|
||||
* @returns {Promise<{hasNextPage: boolean, rows: *[]}>}
|
||||
*/
|
||||
exports.paginatedSearch = async (appId, query, params) => {
|
||||
exports.paginatedSearch = async (query, params) => {
|
||||
let limit = params.limit
|
||||
if (limit == null || isNaN(limit) || limit < 0) {
|
||||
limit = 50
|
||||
}
|
||||
limit = Math.min(limit, 200)
|
||||
const search = new QueryBuilder(appId, query)
|
||||
const search = new QueryBuilder(query)
|
||||
.setVersion(params.version)
|
||||
.setTable(params.tableId)
|
||||
.setSort(params.sort)
|
||||
|
@ -375,7 +374,6 @@ exports.paginatedSearch = async (appId, query, params) => {
|
|||
* desired amount of results. There is a limit of 1000 results to avoid
|
||||
* heavy performance hits, and to avoid client components breaking from
|
||||
* handling too much data.
|
||||
* @param appId {string} The app ID to search
|
||||
* @param query {object} The JSON query structure
|
||||
* @param params {object} The search params including:
|
||||
* tableId {string} The table ID to search
|
||||
|
@ -386,12 +384,12 @@ exports.paginatedSearch = async (appId, query, params) => {
|
|||
* limit {number} The desired number of results
|
||||
* @returns {Promise<{rows: *}>}
|
||||
*/
|
||||
exports.fullSearch = async (appId, query, params) => {
|
||||
exports.fullSearch = async (query, params) => {
|
||||
let limit = params.limit
|
||||
if (limit == null || isNaN(limit) || limit < 0) {
|
||||
limit = 1000
|
||||
}
|
||||
params.limit = Math.min(limit, 1000)
|
||||
const rows = await recursiveSearch(appId, query, params)
|
||||
const rows = await recursiveSearch(query, params)
|
||||
return { rows }
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const { getRowParams } = require("../../../db/utils")
|
||||
const {
|
||||
outputProcessing,
|
||||
|
@ -8,6 +7,7 @@ const {
|
|||
const { FieldTypes, FormulaTypes } = require("../../../constants")
|
||||
const { isEqual } = require("lodash")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
/**
|
||||
* This function runs through a list of enriched rows, looks at the rows which
|
||||
|
@ -15,8 +15,8 @@ const { cloneDeep } = require("lodash/fp")
|
|||
* updated.
|
||||
* NOTE: this will only for affect static formulas.
|
||||
*/
|
||||
exports.updateRelatedFormula = async (appId, table, enrichedRows) => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.updateRelatedFormula = async (table, enrichedRows) => {
|
||||
const db = getAppDB()
|
||||
// no formula to update, we're done
|
||||
if (!table.relatedFormula) {
|
||||
return
|
||||
|
@ -57,7 +57,7 @@ exports.updateRelatedFormula = async (appId, table, enrichedRows) => {
|
|||
// re-enrich rows for all the related, don't update the related formula for them
|
||||
promises = promises.concat(
|
||||
relatedRows[tableId].map(related =>
|
||||
exports.finaliseRow(appId, relatedTable, related, {
|
||||
exports.finaliseRow(relatedTable, related, {
|
||||
updateFormula: false,
|
||||
})
|
||||
)
|
||||
|
@ -69,8 +69,8 @@ exports.updateRelatedFormula = async (appId, table, enrichedRows) => {
|
|||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
exports.updateAllFormulasInTable = async (appId, table) => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.updateAllFormulasInTable = async table => {
|
||||
const db = getAppDB()
|
||||
// start by getting the raw rows (which will be written back to DB after update)
|
||||
let rows = (
|
||||
await db.allDocs(
|
||||
|
@ -81,7 +81,7 @@ exports.updateAllFormulasInTable = async (appId, table) => {
|
|||
).rows.map(row => row.doc)
|
||||
// now enrich the rows, note the clone so that we have the base state of the
|
||||
// rows so that we don't write any of the enriched information back
|
||||
let enrichedRows = await outputProcessing({ appId }, table, cloneDeep(rows), {
|
||||
let enrichedRows = await outputProcessing(table, cloneDeep(rows), {
|
||||
squash: false,
|
||||
})
|
||||
const updatedRows = []
|
||||
|
@ -109,15 +109,14 @@ exports.updateAllFormulasInTable = async (appId, table) => {
|
|||
* expects the row to be totally enriched/contain all relationships.
|
||||
*/
|
||||
exports.finaliseRow = async (
|
||||
appId,
|
||||
table,
|
||||
row,
|
||||
{ oldTable, updateFormula } = { updateFormula: true }
|
||||
) => {
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
row.type = "row"
|
||||
// process the row before return, to include relationships
|
||||
let enrichedRow = await outputProcessing({ appId }, table, cloneDeep(row), {
|
||||
let enrichedRow = await outputProcessing(table, cloneDeep(row), {
|
||||
squash: false,
|
||||
})
|
||||
// use enriched row to generate formulas for saving, specifically only use as context
|
||||
|
@ -151,7 +150,7 @@ exports.finaliseRow = async (
|
|||
enrichedRow = await processFormulas(table, enrichedRow, { dynamic: false })
|
||||
// this updates the related formulas in other rows based on the relations to this row
|
||||
if (updateFormula) {
|
||||
await exports.updateRelatedFormula(appId, table, enrichedRow)
|
||||
await exports.updateRelatedFormula(table, enrichedRow)
|
||||
}
|
||||
return { row: enrichedRow, table }
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
const validateJs = require("validate.js")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const CouchDB = require("../../../db")
|
||||
const { InternalTables } = require("../../../db/utils")
|
||||
const userController = require("../user")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
const { makeExternalQuery } = require("../../../integrations/base/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
validateJs.extend(validateJs.validators.datetime, {
|
||||
parse: function (value) {
|
||||
|
@ -17,14 +17,15 @@ validateJs.extend(validateJs.validators.datetime, {
|
|||
},
|
||||
})
|
||||
|
||||
exports.getDatasourceAndQuery = async (appId, json) => {
|
||||
exports.getDatasourceAndQuery = async json => {
|
||||
const datasourceId = json.endpoint.datasourceId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const datasource = await db.get(datasourceId)
|
||||
return makeExternalQuery(datasource, json)
|
||||
}
|
||||
|
||||
exports.findRow = async (ctx, db, tableId, rowId) => {
|
||||
exports.findRow = async (ctx, tableId, rowId) => {
|
||||
const db = getAppDB()
|
||||
let row
|
||||
// TODO remove special user case in future
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
|
@ -42,9 +43,9 @@ exports.findRow = async (ctx, db, tableId, rowId) => {
|
|||
return row
|
||||
}
|
||||
|
||||
exports.validate = async ({ appId, tableId, row, table }) => {
|
||||
exports.validate = async ({ tableId, row, table }) => {
|
||||
if (!table) {
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
table = await db.get(tableId)
|
||||
}
|
||||
const errors = {}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
const CouchDB = require("../../db")
|
||||
const { getScreenParams, generateScreenID } = require("../../db/utils")
|
||||
const { AccessController } = require("@budibase/backend-core/roles")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
|
||||
const screens = (
|
||||
await db.allDocs(
|
||||
|
@ -14,15 +13,14 @@ exports.fetch = async ctx => {
|
|||
)
|
||||
).rows.map(element => element.doc)
|
||||
|
||||
ctx.body = await new AccessController(appId).checkScreensAccess(
|
||||
ctx.body = await new AccessController().checkScreensAccess(
|
||||
screens,
|
||||
ctx.user.role._id
|
||||
)
|
||||
}
|
||||
|
||||
exports.save = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
let screen = ctx.request.body
|
||||
|
||||
if (!screen._id) {
|
||||
|
@ -39,7 +37,7 @@ exports.save = async ctx => {
|
|||
}
|
||||
|
||||
exports.destroy = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
await db.remove(ctx.params.screenId, ctx.params.screenRev)
|
||||
ctx.body = {
|
||||
message: "Screen deleted successfully",
|
||||
|
|
|
@ -6,7 +6,6 @@ const uuid = require("uuid")
|
|||
const { ObjectStoreBuckets } = require("../../../constants")
|
||||
const { processString } = require("@budibase/string-templates")
|
||||
const { getAllApps } = require("@budibase/backend-core/db")
|
||||
const CouchDB = require("../../../db")
|
||||
const {
|
||||
loadHandlebarsFile,
|
||||
NODE_MODULES_PATH,
|
||||
|
@ -17,6 +16,7 @@ const { clientLibraryPath } = require("../../../utilities")
|
|||
const { upload } = require("../../../utilities/fileSystem")
|
||||
const { attachmentsRelativeURL } = require("../../../utilities")
|
||||
const { DocumentTypes } = require("../../../db/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const AWS = require("aws-sdk")
|
||||
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
|
||||
|
||||
|
@ -44,7 +44,7 @@ async function getAppIdFromUrl(ctx) {
|
|||
let possibleAppUrl = `/${encodeURI(ctx.params.appId).toLowerCase()}`
|
||||
|
||||
// search prod apps for a url that matches, exclude dev where id is always used
|
||||
const apps = await getAllApps(CouchDB, { dev: false })
|
||||
const apps = await getAllApps({ dev: false })
|
||||
const app = apps.filter(
|
||||
a => a.url && a.url.toLowerCase() === possibleAppUrl
|
||||
)[0]
|
||||
|
@ -85,7 +85,7 @@ exports.uploadFile = async function (ctx) {
|
|||
exports.serveApp = async function (ctx) {
|
||||
let appId = await getAppIdFromUrl(ctx)
|
||||
const App = require("./templates/BudibaseApp.svelte").default
|
||||
const db = new CouchDB(appId, { skip_setup: true })
|
||||
const db = getAppDB({ skip_setup: true })
|
||||
const appInfo = await db.get(DocumentTypes.APP_METADATA)
|
||||
|
||||
const { head, html, css } = App.render({
|
||||
|
@ -111,7 +111,7 @@ exports.serveClientLibrary = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.getSignedUploadURL = async function (ctx) {
|
||||
const database = new CouchDB(ctx.appId)
|
||||
const database = getAppDB()
|
||||
|
||||
// Ensure datasource is valid
|
||||
let datasource
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const { FieldTypes, FormulaTypes } = require("../../../constants")
|
||||
const { getAllInternalTables, clearColumns } = require("./utils")
|
||||
const { doesContainStrings } = require("@budibase/string-templates")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { isEqual, uniq } = require("lodash")
|
||||
const { updateAllFormulasInTable } = require("../row/staticFormula")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
function isStaticFormula(column) {
|
||||
return (
|
||||
|
@ -37,14 +37,9 @@ function getFormulaThatUseColumn(table, columnNames) {
|
|||
* This functions checks for when a related table, column or related column is deleted, if any
|
||||
* tables need to have the formula column removed.
|
||||
*/
|
||||
async function checkIfFormulaNeedsCleared(
|
||||
appId,
|
||||
table,
|
||||
{ oldTable, deletion }
|
||||
) {
|
||||
const db = new CouchDB(appId)
|
||||
async function checkIfFormulaNeedsCleared(table, { oldTable, deletion }) {
|
||||
// start by retrieving all tables, remove the current table from the list
|
||||
const tables = (await getAllInternalTables(appId)).filter(
|
||||
const tables = (await getAllInternalTables()).filter(
|
||||
tbl => tbl._id !== table._id
|
||||
)
|
||||
const schemaToUse = oldTable ? oldTable.schema : table.schema
|
||||
|
@ -60,7 +55,7 @@ async function checkIfFormulaNeedsCleared(
|
|||
}
|
||||
const columnsToDelete = getFormulaThatUseColumn(tableToUse, removed.name)
|
||||
if (columnsToDelete.length > 0) {
|
||||
await clearColumns(db, table, columnsToDelete)
|
||||
await clearColumns(table, columnsToDelete)
|
||||
}
|
||||
// need a special case, where a column has been removed from this table, but was used
|
||||
// in a different, related tables formula
|
||||
|
@ -85,7 +80,7 @@ async function checkIfFormulaNeedsCleared(
|
|||
)
|
||||
}
|
||||
if (relatedFormulaToRemove.length > 0) {
|
||||
await clearColumns(db, relatedTable, uniq(relatedFormulaToRemove))
|
||||
await clearColumns(relatedTable, uniq(relatedFormulaToRemove))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -99,13 +94,12 @@ async function checkIfFormulaNeedsCleared(
|
|||
* specifically only for static formula.
|
||||
*/
|
||||
async function updateRelatedFormulaLinksOnTables(
|
||||
appId,
|
||||
table,
|
||||
{ deletion } = { deletion: false }
|
||||
) {
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
// start by retrieving all tables, remove the current table from the list
|
||||
const tables = (await getAllInternalTables(appId)).filter(
|
||||
const tables = (await getAllInternalTables()).filter(
|
||||
tbl => tbl._id !== table._id
|
||||
)
|
||||
// clone the tables, so we can compare at end
|
||||
|
@ -155,7 +149,7 @@ async function updateRelatedFormulaLinksOnTables(
|
|||
}
|
||||
}
|
||||
|
||||
async function checkIfFormulaUpdated(appId, table, { oldTable }) {
|
||||
async function checkIfFormulaUpdated(table, { oldTable }) {
|
||||
// look to see if any formula values have changed
|
||||
const shouldUpdate = Object.values(table.schema).find(
|
||||
column =>
|
||||
|
@ -166,18 +160,14 @@ async function checkIfFormulaUpdated(appId, table, { oldTable }) {
|
|||
)
|
||||
// if a static formula column has updated, then need to run the update
|
||||
if (shouldUpdate != null) {
|
||||
await updateAllFormulasInTable(appId, table)
|
||||
await updateAllFormulasInTable(table)
|
||||
}
|
||||
}
|
||||
|
||||
exports.runStaticFormulaChecks = async (
|
||||
appId,
|
||||
table,
|
||||
{ oldTable, deletion }
|
||||
) => {
|
||||
await updateRelatedFormulaLinksOnTables(appId, table, { deletion })
|
||||
await checkIfFormulaNeedsCleared(appId, table, { oldTable, deletion })
|
||||
exports.runStaticFormulaChecks = async (table, { oldTable, deletion }) => {
|
||||
await updateRelatedFormulaLinksOnTables(table, { deletion })
|
||||
await checkIfFormulaNeedsCleared(table, { oldTable, deletion })
|
||||
if (!deletion) {
|
||||
await checkIfFormulaUpdated(appId, table, { oldTable })
|
||||
await checkIfFormulaUpdated(table, { oldTable })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const {
|
||||
buildExternalTableId,
|
||||
breakExternalTableId,
|
||||
|
@ -19,6 +18,7 @@ const { makeExternalQuery } = require("../../../integrations/base/utils")
|
|||
const { cloneDeep } = require("lodash/fp")
|
||||
const csvParser = require("../../../utilities/csvParser")
|
||||
const { handleRequest } = require("../row/external")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
async function makeTableRequest(
|
||||
datasource,
|
||||
|
@ -159,7 +159,6 @@ function isRelationshipSetup(column) {
|
|||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const table = ctx.request.body
|
||||
// can't do this right now
|
||||
delete table.dataImport
|
||||
|
@ -176,14 +175,14 @@ exports.save = async function (ctx) {
|
|||
|
||||
let oldTable
|
||||
if (ctx.request.body && ctx.request.body._id) {
|
||||
oldTable = await getTable(appId, ctx.request.body._id)
|
||||
oldTable = await getTable(ctx.request.body._id)
|
||||
}
|
||||
|
||||
if (hasTypeChanged(tableToSave, oldTable)) {
|
||||
ctx.throw(400, "A column type has changed.")
|
||||
}
|
||||
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const datasource = await db.get(datasourceId)
|
||||
const oldTables = cloneDeep(datasource.entities)
|
||||
const tables = datasource.entities
|
||||
|
@ -267,14 +266,13 @@ exports.save = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const tableToDelete = await getTable(appId, ctx.params.tableId)
|
||||
const tableToDelete = await getTable(ctx.params.tableId)
|
||||
if (!tableToDelete || !tableToDelete.created) {
|
||||
ctx.throw(400, "Cannot delete tables which weren't created in Budibase.")
|
||||
}
|
||||
const datasourceId = getDatasourceId(tableToDelete)
|
||||
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const datasource = await db.get(datasourceId)
|
||||
const tables = datasource.entities
|
||||
|
||||
|
@ -290,8 +288,7 @@ exports.destroy = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.bulkImport = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const table = await getTable(appId, ctx.params.tableId)
|
||||
const table = await getTable(ctx.params.tableId)
|
||||
const { dataImport } = ctx.request.body
|
||||
if (!dataImport || !dataImport.schema || !dataImport.csvString) {
|
||||
ctx.throw(400, "Provided data import information is invalid.")
|
||||
|
@ -300,7 +297,7 @@ exports.bulkImport = async function (ctx) {
|
|||
...dataImport,
|
||||
existingTable: table,
|
||||
})
|
||||
await handleRequest(appId, DataSourceOperation.BULK_CREATE, table._id, {
|
||||
await handleRequest(DataSourceOperation.BULK_CREATE, table._id, {
|
||||
rows,
|
||||
})
|
||||
return table
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const internal = require("./internal")
|
||||
const external = require("./external")
|
||||
const csvParser = require("../../../utilities/csvParser")
|
||||
const { isExternalTable, isSQL } = require("../../../integrations/utils")
|
||||
const { getDatasourceParams } = require("../../../db/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const { getTable, getAllInternalTables } = require("./utils")
|
||||
|
||||
function pickApi({ tableId, table }) {
|
||||
|
@ -20,9 +20,9 @@ function pickApi({ tableId, table }) {
|
|||
|
||||
// covers both internal and external
|
||||
exports.fetch = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
|
||||
const internal = await getAllInternalTables(ctx.appId)
|
||||
const internal = await getAllInternalTables()
|
||||
|
||||
const externalTables = await db.allDocs(
|
||||
getDatasourceParams("plus", {
|
||||
|
@ -49,7 +49,7 @@ exports.fetch = async function (ctx) {
|
|||
|
||||
exports.find = async function (ctx) {
|
||||
const tableId = ctx.params.id
|
||||
ctx.body = await getTable(ctx.appId, tableId)
|
||||
ctx.body = await getTable(tableId)
|
||||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
|
@ -88,7 +88,7 @@ exports.validateCSVSchema = async function (ctx) {
|
|||
const { csvString, schema = {}, tableId } = ctx.request.body
|
||||
let existingTable
|
||||
if (tableId) {
|
||||
existingTable = await getTable(ctx.appId, tableId)
|
||||
existingTable = await getTable(tableId)
|
||||
}
|
||||
let result = await csvParser.parse(csvString, schema)
|
||||
if (existingTable) {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const linkRows = require("../../../db/linkedRows")
|
||||
const { getRowParams, generateTableID } = require("../../../db/utils")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
|
@ -9,12 +8,13 @@ const {
|
|||
handleDataImport,
|
||||
} = require("./utils")
|
||||
const usageQuota = require("../../../utilities/usageQuota")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const env = require("../../../environment")
|
||||
const { cleanupAttachments } = require("../../../utilities/rowProcessor")
|
||||
const { runStaticFormulaChecks } = require("./bulkFormula")
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const { dataImport, ...rest } = ctx.request.body
|
||||
let tableToSave = {
|
||||
type: "table",
|
||||
|
@ -36,8 +36,7 @@ exports.save = async function (ctx) {
|
|||
// saving a table is a complex operation, involving many different steps, this
|
||||
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||
const tableSaveFunctions = new TableSaveFunctions({
|
||||
db,
|
||||
ctx,
|
||||
user: ctx.user,
|
||||
oldTable,
|
||||
dataImport,
|
||||
})
|
||||
|
@ -82,7 +81,6 @@ exports.save = async function (ctx) {
|
|||
// update linked rows
|
||||
try {
|
||||
const linkResp = await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: oldTable
|
||||
? linkRows.EventType.TABLE_UPDATED
|
||||
: linkRows.EventType.TABLE_SAVE,
|
||||
|
@ -107,13 +105,12 @@ exports.save = async function (ctx) {
|
|||
|
||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(appId, tableToSave, { oldTable })
|
||||
await runStaticFormulaChecks(tableToSave, { oldTable })
|
||||
return tableToSave
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const tableToDelete = await db.get(ctx.params.tableId)
|
||||
|
||||
// Delete all rows for that table
|
||||
|
@ -127,7 +124,6 @@ exports.destroy = async function (ctx) {
|
|||
|
||||
// update linked rows
|
||||
await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.TABLE_DELETE,
|
||||
table: tableToDelete,
|
||||
})
|
||||
|
@ -136,24 +132,25 @@ exports.destroy = async function (ctx) {
|
|||
await db.remove(tableToDelete)
|
||||
|
||||
// remove table search index
|
||||
const currentIndexes = await db.getIndexes()
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
existing => existing.name === `search:${ctx.params.tableId}`
|
||||
)
|
||||
if (existingIndex) {
|
||||
await db.deleteIndex(existingIndex)
|
||||
if (!env.isTest()) {
|
||||
const currentIndexes = await db.getIndexes()
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
existing => existing.name === `search:${ctx.params.tableId}`
|
||||
)
|
||||
if (existingIndex) {
|
||||
await db.deleteIndex(existingIndex)
|
||||
}
|
||||
}
|
||||
|
||||
// has to run after, make sure it has _id
|
||||
await runStaticFormulaChecks(appId, tableToDelete, { deletion: true })
|
||||
await cleanupAttachments(appId, tableToDelete, { rows })
|
||||
await runStaticFormulaChecks(tableToDelete, { deletion: true })
|
||||
await cleanupAttachments(tableToDelete, { rows })
|
||||
return tableToDelete
|
||||
}
|
||||
|
||||
exports.bulkImport = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const table = await getTable(appId, ctx.params.tableId)
|
||||
const table = await getTable(ctx.params.tableId)
|
||||
const { dataImport } = ctx.request.body
|
||||
await handleDataImport(appId, ctx.user, table, dataImport)
|
||||
await handleDataImport(ctx.user, table, dataImport)
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const csvParser = require("../../../utilities/csvParser")
|
||||
const {
|
||||
getRowParams,
|
||||
|
@ -26,10 +25,11 @@ const {
|
|||
const { getViews, saveView } = require("../view/utils")
|
||||
const viewTemplate = require("../view/viewBuilder")
|
||||
const usageQuota = require("../../../utilities/usageQuota")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
|
||||
exports.clearColumns = async (appId, table, columnNames) => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.clearColumns = async (table, columnNames) => {
|
||||
const db = getAppDB()
|
||||
const rows = await db.allDocs(
|
||||
getRowParams(table._id, null, {
|
||||
include_docs: true,
|
||||
|
@ -43,7 +43,8 @@ exports.clearColumns = async (appId, table, columnNames) => {
|
|||
)
|
||||
}
|
||||
|
||||
exports.checkForColumnUpdates = async (appId, db, oldTable, updatedTable) => {
|
||||
exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
|
||||
const db = getAppDB()
|
||||
let updatedRows = []
|
||||
const rename = updatedTable._rename
|
||||
let deletedColumns = []
|
||||
|
@ -73,9 +74,9 @@ exports.checkForColumnUpdates = async (appId, db, oldTable, updatedTable) => {
|
|||
})
|
||||
|
||||
// cleanup any attachments from object storage for deleted attachment columns
|
||||
await cleanupAttachments(appId, updatedTable, { oldTable, rows: rawRows })
|
||||
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
||||
// Update views
|
||||
await exports.checkForViewUpdates(db, updatedTable, rename, deletedColumns)
|
||||
await exports.checkForViewUpdates(updatedTable, rename, deletedColumns)
|
||||
delete updatedTable._rename
|
||||
}
|
||||
return { rows: updatedRows, table: updatedTable }
|
||||
|
@ -102,12 +103,12 @@ exports.makeSureTableUpToDate = (table, tableToSave) => {
|
|||
return tableToSave
|
||||
}
|
||||
|
||||
exports.handleDataImport = async (appId, user, table, dataImport) => {
|
||||
exports.handleDataImport = async (user, table, dataImport) => {
|
||||
if (!dataImport || !dataImport.csvString) {
|
||||
return table
|
||||
}
|
||||
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform({
|
||||
...dataImport,
|
||||
|
@ -152,8 +153,8 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
|||
return table
|
||||
}
|
||||
|
||||
exports.handleSearchIndexes = async (appId, table) => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.handleSearchIndexes = async table => {
|
||||
const db = getAppDB()
|
||||
// create relevant search indexes
|
||||
if (table.indexes && table.indexes.length > 0) {
|
||||
const currentIndexes = await db.getIndexes()
|
||||
|
@ -210,12 +211,9 @@ exports.checkStaticTables = table => {
|
|||
}
|
||||
|
||||
class TableSaveFunctions {
|
||||
constructor({ db, ctx, oldTable, dataImport }) {
|
||||
this.db = db
|
||||
this.ctx = ctx
|
||||
if (this.ctx && this.ctx.user) {
|
||||
this.appId = this.ctx.appId
|
||||
}
|
||||
constructor({ user, oldTable, dataImport }) {
|
||||
this.db = getAppDB()
|
||||
this.user = user
|
||||
this.oldTable = oldTable
|
||||
this.dataImport = dataImport
|
||||
// any rows that need updated
|
||||
|
@ -233,25 +231,15 @@ class TableSaveFunctions {
|
|||
|
||||
// when confirmed valid
|
||||
async mid(table) {
|
||||
let response = await exports.checkForColumnUpdates(
|
||||
this.appId,
|
||||
this.db,
|
||||
this.oldTable,
|
||||
table
|
||||
)
|
||||
let response = await exports.checkForColumnUpdates(this.oldTable, table)
|
||||
this.rows = this.rows.concat(response.rows)
|
||||
return table
|
||||
}
|
||||
|
||||
// after saving
|
||||
async after(table) {
|
||||
table = await exports.handleSearchIndexes(this.appId, table)
|
||||
table = await exports.handleDataImport(
|
||||
this.appId,
|
||||
this.ctx.user,
|
||||
table,
|
||||
this.dataImport
|
||||
)
|
||||
table = await exports.handleSearchIndexes(table)
|
||||
table = await exports.handleDataImport(this.user, table, this.dataImport)
|
||||
return table
|
||||
}
|
||||
|
||||
|
@ -260,8 +248,8 @@ class TableSaveFunctions {
|
|||
}
|
||||
}
|
||||
|
||||
exports.getAllInternalTables = async appId => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.getAllInternalTables = async () => {
|
||||
const db = getAppDB()
|
||||
const internalTables = await db.allDocs(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
|
@ -274,8 +262,8 @@ exports.getAllInternalTables = async appId => {
|
|||
}))
|
||||
}
|
||||
|
||||
exports.getAllExternalTables = async (appId, datasourceId) => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.getAllExternalTables = async datasourceId => {
|
||||
const db = getAppDB()
|
||||
const datasource = await db.get(datasourceId)
|
||||
if (!datasource || !datasource.entities) {
|
||||
throw "Datasource is not configured fully."
|
||||
|
@ -283,25 +271,25 @@ exports.getAllExternalTables = async (appId, datasourceId) => {
|
|||
return datasource.entities
|
||||
}
|
||||
|
||||
exports.getExternalTable = async (appId, datasourceId, tableName) => {
|
||||
const entities = await exports.getAllExternalTables(appId, datasourceId)
|
||||
exports.getExternalTable = async (datasourceId, tableName) => {
|
||||
const entities = await exports.getAllExternalTables(datasourceId)
|
||||
return entities[tableName]
|
||||
}
|
||||
|
||||
exports.getTable = async (appId, tableId) => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.getTable = async tableId => {
|
||||
const db = getAppDB()
|
||||
if (isExternalTable(tableId)) {
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const datasource = await db.get(datasourceId)
|
||||
const table = await exports.getExternalTable(appId, datasourceId, tableName)
|
||||
const table = await exports.getExternalTable(datasourceId, tableName)
|
||||
return { ...table, sql: isSQL(datasource) }
|
||||
} else {
|
||||
return db.get(tableId)
|
||||
}
|
||||
}
|
||||
|
||||
exports.checkForViewUpdates = async (db, table, rename, deletedColumns) => {
|
||||
const views = await getViews(db)
|
||||
exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
||||
const views = await getViews()
|
||||
const tableViews = views.filter(view => view.meta.tableId === table._id)
|
||||
|
||||
// Check each table view to see if impacted by this table action
|
||||
|
@ -363,7 +351,7 @@ exports.checkForViewUpdates = async (db, table, rename, deletedColumns) => {
|
|||
// Update view if required
|
||||
if (needsUpdated) {
|
||||
const newViewTemplate = viewTemplate(view.meta)
|
||||
await saveView(db, null, view.name, newViewTemplate)
|
||||
await saveView(null, view.name, newViewTemplate)
|
||||
if (!newViewTemplate.meta.schema) {
|
||||
newViewTemplate.meta.schema = table.schema
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../db")
|
||||
const {
|
||||
generateUserMetadataID,
|
||||
getUserMetadataParams,
|
||||
|
@ -11,12 +10,14 @@ const { isEqual } = require("lodash")
|
|||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||
const {
|
||||
getDevelopmentAppID,
|
||||
getDeployedAppIDs,
|
||||
getProdAppIDs,
|
||||
dbExists,
|
||||
} = require("@budibase/backend-core/db")
|
||||
const { doesDatabaseExist } = require("../../utilities")
|
||||
const { UserStatus } = require("@budibase/backend-core/constants")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
async function rawMetadata(db) {
|
||||
async function rawMetadata() {
|
||||
const db = getAppDB()
|
||||
return (
|
||||
await db.allDocs(
|
||||
getUserMetadataParams(null, {
|
||||
|
@ -54,13 +55,10 @@ function combineMetadataAndUser(user, metadata) {
|
|||
return null
|
||||
}
|
||||
|
||||
exports.syncGlobalUsers = async appId => {
|
||||
exports.syncGlobalUsers = async () => {
|
||||
// sync user metadata
|
||||
const db = new CouchDB(appId)
|
||||
const [users, metadata] = await Promise.all([
|
||||
getGlobalUsers(appId),
|
||||
rawMetadata(db),
|
||||
])
|
||||
const db = getAppDB()
|
||||
const [users, metadata] = await Promise.all([getGlobalUsers(), rawMetadata()])
|
||||
const toWrite = []
|
||||
for (let user of users) {
|
||||
const combined = await combineMetadataAndUser(user, metadata)
|
||||
|
@ -94,7 +92,7 @@ exports.syncUser = async function (ctx) {
|
|||
let prodAppIds
|
||||
// if they are a builder then get all production app IDs
|
||||
if ((user.builder && user.builder.global) || deleting) {
|
||||
prodAppIds = await getDeployedAppIDs(CouchDB)
|
||||
prodAppIds = await getProdAppIDs()
|
||||
} else {
|
||||
prodAppIds = Object.entries(roles)
|
||||
.filter(entry => entry[1] !== BUILTIN_ROLE_IDS.PUBLIC)
|
||||
|
@ -104,10 +102,10 @@ exports.syncUser = async function (ctx) {
|
|||
const roleId = roles[prodAppId]
|
||||
const devAppId = getDevelopmentAppID(prodAppId)
|
||||
for (let appId of [prodAppId, devAppId]) {
|
||||
if (!(await doesDatabaseExist(appId))) {
|
||||
if (!(await dbExists(appId))) {
|
||||
continue
|
||||
}
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const metadataId = generateUserMetadataID(userId)
|
||||
let metadata
|
||||
try {
|
||||
|
@ -143,8 +141,8 @@ exports.syncUser = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.fetchMetadata = async function (ctx) {
|
||||
const database = new CouchDB(ctx.appId)
|
||||
const global = await getGlobalUsers(ctx.appId)
|
||||
const database = getAppDB()
|
||||
const global = await getGlobalUsers()
|
||||
const metadata = await rawMetadata(database)
|
||||
const users = []
|
||||
for (let user of global) {
|
||||
|
@ -173,8 +171,7 @@ exports.updateSelfMetadata = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.updateMetadata = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const user = ctx.request.body
|
||||
// this isn't applicable to the user
|
||||
delete user.roles
|
||||
|
@ -186,7 +183,7 @@ exports.updateMetadata = async function (ctx) {
|
|||
}
|
||||
|
||||
exports.destroyMetadata = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
try {
|
||||
const dbUser = await db.get(ctx.params.id)
|
||||
await db.remove(dbUser._id, dbUser._rev)
|
||||
|
@ -209,7 +206,7 @@ exports.setFlag = async function (ctx) {
|
|||
ctx.throw(400, "Must supply a 'flag' field in request body.")
|
||||
}
|
||||
const flagDocId = generateUserFlagID(userId)
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
let doc
|
||||
try {
|
||||
doc = await db.get(flagDocId)
|
||||
|
@ -224,7 +221,7 @@ exports.setFlag = async function (ctx) {
|
|||
exports.getFlags = async function (ctx) {
|
||||
const userId = ctx.user._id
|
||||
const docId = generateUserFlagID(userId)
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
let doc
|
||||
try {
|
||||
doc = await db.get(docId)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const viewTemplate = require("./viewBuilder")
|
||||
const { apiFileReturn } = require("../../../utilities/fileSystem")
|
||||
const exporters = require("./exporters")
|
||||
|
@ -6,14 +5,14 @@ const { saveView, getView, getViews, deleteView } = require("./utils")
|
|||
const { fetchView } = require("../row")
|
||||
const { getTable } = require("../table/utils")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
ctx.body = await getViews(db)
|
||||
ctx.body = await getViews()
|
||||
}
|
||||
|
||||
exports.save = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const { originalName, ...viewToSave } = ctx.request.body
|
||||
const view = viewTemplate(viewToSave)
|
||||
|
||||
|
@ -21,7 +20,7 @@ exports.save = async ctx => {
|
|||
ctx.throw(400, "Cannot create view without a name")
|
||||
}
|
||||
|
||||
await saveView(db, originalName, viewToSave.name, view)
|
||||
await saveView(originalName, viewToSave.name, view)
|
||||
|
||||
// add views to table document
|
||||
const table = await db.get(ctx.request.body.tableId)
|
||||
|
@ -42,9 +41,9 @@ exports.save = async ctx => {
|
|||
}
|
||||
|
||||
exports.destroy = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const viewName = decodeURI(ctx.params.viewName)
|
||||
const view = await deleteView(db, viewName)
|
||||
const view = await deleteView(viewName)
|
||||
const table = await db.get(view.meta.tableId)
|
||||
delete table.views[viewName]
|
||||
await db.put(table)
|
||||
|
@ -53,9 +52,8 @@ exports.destroy = async ctx => {
|
|||
}
|
||||
|
||||
exports.exportView = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const viewName = decodeURI(ctx.query.view)
|
||||
const view = await getView(db, viewName)
|
||||
const view = await getView(viewName)
|
||||
|
||||
const format = ctx.query.format
|
||||
if (!format || !Object.values(exporters.ExportFormats).includes(format)) {
|
||||
|
@ -83,7 +81,7 @@ exports.exportView = async ctx => {
|
|||
let schema = view && view.meta && view.meta.schema
|
||||
if (!schema) {
|
||||
const tableId = ctx.params.tableId || view.meta.tableId
|
||||
const table = await getTable(ctx.appId, tableId)
|
||||
const table = await getTable(tableId)
|
||||
schema = table.schema
|
||||
}
|
||||
|
||||
|
|
|
@ -6,8 +6,10 @@ const {
|
|||
SEPARATOR,
|
||||
} = require("../../../db/utils")
|
||||
const env = require("../../../environment")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.getView = async (db, viewName) => {
|
||||
exports.getView = async viewName => {
|
||||
const db = getAppDB()
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
return designDoc.views[viewName]
|
||||
|
@ -22,7 +24,8 @@ exports.getView = async (db, viewName) => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.getViews = async db => {
|
||||
exports.getViews = async () => {
|
||||
const db = getAppDB()
|
||||
const response = []
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
|
@ -54,7 +57,8 @@ exports.getViews = async db => {
|
|||
return response
|
||||
}
|
||||
|
||||
exports.saveView = async (db, originalName, viewName, viewTemplate) => {
|
||||
exports.saveView = async (originalName, viewName, viewTemplate) => {
|
||||
const db = getAppDB()
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
designDoc.views = {
|
||||
|
@ -91,7 +95,8 @@ exports.saveView = async (db, originalName, viewName, viewTemplate) => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.deleteView = async (db, viewName) => {
|
||||
exports.deleteView = async viewName => {
|
||||
const db = getAppDB()
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
const view = designDoc.views[viewName]
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
const CouchDB = require("../../db")
|
||||
const { generateWebhookID, getWebhookParams } = require("../../db/utils")
|
||||
const toJsonSchema = require("to-json-schema")
|
||||
const validate = require("jsonschema").validate
|
||||
const triggers = require("../../automations/triggers")
|
||||
const { getDeployedAppID } = require("@budibase/backend-core/db")
|
||||
const { getProdAppID } = require("@budibase/backend-core/db")
|
||||
const { getAppDB, updateAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
const AUTOMATION_DESCRIPTION = "Generated from Webhook Schema"
|
||||
|
||||
|
@ -23,7 +23,7 @@ exports.WebhookType = {
|
|||
}
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const response = await db.allDocs(
|
||||
getWebhookParams(null, {
|
||||
include_docs: true,
|
||||
|
@ -33,7 +33,7 @@ exports.fetch = async ctx => {
|
|||
}
|
||||
|
||||
exports.save = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
const webhook = ctx.request.body
|
||||
webhook.appId = ctx.appId
|
||||
|
||||
|
@ -52,12 +52,13 @@ exports.save = async ctx => {
|
|||
}
|
||||
|
||||
exports.destroy = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
ctx.body = await db.remove(ctx.params.id, ctx.params.rev)
|
||||
}
|
||||
|
||||
exports.buildSchema = async ctx => {
|
||||
const db = new CouchDB(ctx.params.instance)
|
||||
updateAppId(ctx.params.instance)
|
||||
const db = getAppDB()
|
||||
const webhook = await db.get(ctx.params.id)
|
||||
webhook.bodySchema = toJsonSchema(ctx.request.body)
|
||||
// update the automation outputs
|
||||
|
@ -81,9 +82,10 @@ exports.buildSchema = async ctx => {
|
|||
}
|
||||
|
||||
exports.trigger = async ctx => {
|
||||
const prodAppId = getDeployedAppID(ctx.params.instance)
|
||||
const prodAppId = getProdAppID(ctx.params.instance)
|
||||
updateAppId(prodAppId)
|
||||
try {
|
||||
const db = new CouchDB(prodAppId)
|
||||
const db = getAppDB()
|
||||
const webhook = await db.get(ctx.params.id)
|
||||
// validate against the schema
|
||||
if (webhook.bodySchema) {
|
||||
|
|
|
@ -145,6 +145,7 @@ describe("/automations", () => {
|
|||
let table = await config.createTable()
|
||||
automation.definition.trigger.inputs.tableId = table._id
|
||||
automation.definition.steps[0].inputs.row.tableId = table._id
|
||||
automation.appId = config.appId
|
||||
automation = await config.createAutomation(automation)
|
||||
await setup.delay(500)
|
||||
const res = await testAutomation(config, automation)
|
||||
|
|
|
@ -82,7 +82,6 @@ describe("run misc tests", () => {
|
|||
dataImport.schema[col] = { type: "string" }
|
||||
}
|
||||
await tableUtils.handleDataImport(
|
||||
config.getAppId(),
|
||||
{ userId: "test" },
|
||||
table,
|
||||
dataImport
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
const setup = require("./utilities")
|
||||
const { basicScreen } = setup.structures
|
||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||
const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions")
|
||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||
const { doInAppContext } = require("@budibase/backend-core/context")
|
||||
|
||||
const route = "/test"
|
||||
|
||||
// there are checks which are disabled in test env,
|
||||
// these checks need to be enabled for this test
|
||||
|
||||
|
||||
describe("/routing", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
|
@ -26,20 +31,24 @@ describe("/routing", () => {
|
|||
|
||||
describe("fetch", () => {
|
||||
it("prevents a public user from accessing development app", async () => {
|
||||
await request
|
||||
.get(`/api/routing/client`)
|
||||
.set(config.publicHeaders({ prodApp: false }))
|
||||
.expect(302)
|
||||
await runInProd(() => {
|
||||
return request
|
||||
.get(`/api/routing/client`)
|
||||
.set(config.publicHeaders({ prodApp: false }))
|
||||
.expect(302)
|
||||
})
|
||||
})
|
||||
|
||||
it("prevents a non builder from accessing development app", async () => {
|
||||
await request
|
||||
.get(`/api/routing/client`)
|
||||
.set(await config.roleHeaders({
|
||||
roleId: BUILTIN_ROLE_IDS.BASIC,
|
||||
prodApp: false
|
||||
}))
|
||||
.expect(302)
|
||||
await runInProd(async () => {
|
||||
return request
|
||||
.get(`/api/routing/client`)
|
||||
.set(await config.roleHeaders({
|
||||
roleId: BUILTIN_ROLE_IDS.BASIC,
|
||||
prodApp: false
|
||||
}))
|
||||
.expect(302)
|
||||
})
|
||||
})
|
||||
it("returns the correct routing for basic user", async () => {
|
||||
const res = await request
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const { outputProcessing } = require("../../../utilities/rowProcessor")
|
||||
const setup = require("./utilities")
|
||||
const { basicRow } = setup.structures
|
||||
const { doInAppContext } = require("@budibase/backend-core/context")
|
||||
|
||||
// mock the fetch for the search system
|
||||
jest.mock("node-fetch")
|
||||
|
@ -387,10 +388,12 @@ describe("/rows", () => {
|
|||
})
|
||||
// the environment needs configured for this
|
||||
await setup.switchToSelfHosted(async () => {
|
||||
const enriched = await outputProcessing({ appId: config.getAppId() }, table, [row])
|
||||
expect(enriched[0].attachment[0].url).toBe(
|
||||
`/prod-budi-app-assets/${config.getAppId()}/attachments/test/thing.csv`
|
||||
)
|
||||
doInAppContext(config.getAppId(), async () => {
|
||||
const enriched = await outputProcessing(table, [row])
|
||||
expect(enriched[0].attachment[0].url).toBe(
|
||||
`/prod-budi-app-assets/${config.getAppId()}/attachments/test/thing.csv`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
const rowController = require("../../../controllers/row")
|
||||
const appController = require("../../../controllers/application")
|
||||
const CouchDB = require("../../../../db")
|
||||
const { AppStatus } = require("../../../../db/utils")
|
||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||
const { TENANT_ID } = require("../../../../tests/utilities/structures")
|
||||
const { getAppDB, doInAppContext } = require("@budibase/backend-core/context")
|
||||
const env = require("../../../../environment")
|
||||
|
||||
function Request(appId, params) {
|
||||
this.appId = appId
|
||||
|
@ -11,9 +12,15 @@ function Request(appId, params) {
|
|||
this.request = {}
|
||||
}
|
||||
|
||||
function runRequest(appId, controlFunc, request) {
|
||||
return doInAppContext(appId, async () => {
|
||||
return controlFunc(request)
|
||||
})
|
||||
}
|
||||
|
||||
exports.getAllTableRows = async config => {
|
||||
const req = new Request(config.appId, { tableId: config.table._id })
|
||||
await rowController.fetch(req)
|
||||
await runRequest(config.appId, rowController.fetch, req)
|
||||
return req.body
|
||||
}
|
||||
|
||||
|
@ -26,14 +33,17 @@ exports.clearAllApps = async (tenantId = TENANT_ID) => {
|
|||
}
|
||||
for (let app of apps) {
|
||||
const { appId } = app
|
||||
await appController.delete(new Request(null, { appId }))
|
||||
const req = new Request(null, { appId })
|
||||
await runRequest(appId, appController.delete, req)
|
||||
}
|
||||
}
|
||||
|
||||
exports.clearAllAutomations = async config => {
|
||||
const automations = await config.getAllAutomations()
|
||||
for (let auto of automations) {
|
||||
await config.deleteAutomation(auto)
|
||||
await doInAppContext(config.appId, async () => {
|
||||
await config.deleteAutomation(auto)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -96,20 +106,32 @@ exports.checkPermissionsEndpoint = async ({
|
|||
.expect(403)
|
||||
}
|
||||
|
||||
exports.getDB = config => {
|
||||
return new CouchDB(config.getAppId())
|
||||
exports.getDB = () => {
|
||||
return getAppDB()
|
||||
}
|
||||
|
||||
exports.testAutomation = async (config, automation) => {
|
||||
return await config.request
|
||||
.post(`/api/automations/${automation._id}/test`)
|
||||
.send({
|
||||
row: {
|
||||
name: "Test",
|
||||
description: "TEST",
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return runRequest(automation.appId, async () => {
|
||||
return await config.request
|
||||
.post(`/api/automations/${automation._id}/test`)
|
||||
.send({
|
||||
row: {
|
||||
name: "Test",
|
||||
description: "TEST",
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
})
|
||||
}
|
||||
|
||||
exports.runInProd = async func => {
|
||||
const nodeEnv = env.NODE_ENV
|
||||
const workerId = env.JEST_WORKER_ID
|
||||
env._set("NODE_ENV", "PRODUCTION")
|
||||
env._set("JEST_WORKER_ID", null)
|
||||
await func()
|
||||
env._set("NODE_ENV", nodeEnv)
|
||||
env._set("JEST_WORKER_ID", workerId)
|
||||
}
|
||||
|
|
|
@ -53,13 +53,12 @@ exports.cleanInputValues = (inputs, schema) => {
|
|||
* the automation but is instead part of the Table/Table. This function will get the table schema and use it to instead
|
||||
* perform the cleanInputValues function on the input row.
|
||||
*
|
||||
* @param {string} appId The instance which the Table/Table is contained under.
|
||||
* @param {string} tableId The ID of the Table/Table which the schema is to be retrieved for.
|
||||
* @param {object} row The input row structure which requires clean-up after having been through template statements.
|
||||
* @returns {Promise<Object>} The cleaned up rows object, will should now have all the required primitive types.
|
||||
*/
|
||||
exports.cleanUpRow = async (appId, tableId, row) => {
|
||||
let table = await getTable(appId, tableId)
|
||||
exports.cleanUpRow = async (tableId, row) => {
|
||||
let table = await getTable(tableId)
|
||||
return exports.cleanInputValues(row, { properties: table.schema })
|
||||
}
|
||||
|
||||
|
|
|
@ -78,7 +78,6 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
|
||||
try {
|
||||
inputs.row = await automationUtils.cleanUpRow(
|
||||
appId,
|
||||
inputs.row.tableId,
|
||||
inputs.row
|
||||
)
|
||||
|
|
|
@ -87,7 +87,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
|
||||
try {
|
||||
if (tableId) {
|
||||
inputs.row = await automationUtils.cleanUpRow(appId, tableId, inputs.row)
|
||||
inputs.row = await automationUtils.cleanUpRow(tableId, inputs.row)
|
||||
}
|
||||
await rowController.patch(ctx)
|
||||
return {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../db")
|
||||
const emitter = require("../events/index")
|
||||
const { getAutomationParams } = require("../db/utils")
|
||||
const { coerce } = require("../utilities/rowProcessor")
|
||||
|
@ -9,6 +8,7 @@ const { queue } = require("./bullboard")
|
|||
const { checkTestFlag } = require("../utilities/redis")
|
||||
const utils = require("./utils")
|
||||
const env = require("../environment")
|
||||
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
const TRIGGER_DEFINITIONS = definitions
|
||||
const JOB_OPTS = {
|
||||
|
@ -21,39 +21,41 @@ async function queueRelevantRowAutomations(event, eventType) {
|
|||
throw `No appId specified for ${eventType} - check event emitters.`
|
||||
}
|
||||
|
||||
const db = new CouchDB(event.appId)
|
||||
let automations = await db.allDocs(
|
||||
getAutomationParams(null, { include_docs: true })
|
||||
)
|
||||
doInAppContext(event.appId, async () => {
|
||||
const db = getAppDB()
|
||||
let automations = await db.allDocs(
|
||||
getAutomationParams(null, { include_docs: true })
|
||||
)
|
||||
|
||||
// filter down to the correct event type
|
||||
automations = automations.rows
|
||||
.map(automation => automation.doc)
|
||||
.filter(automation => {
|
||||
const trigger = automation.definition.trigger
|
||||
return trigger && trigger.event === eventType
|
||||
})
|
||||
// filter down to the correct event type
|
||||
automations = automations.rows
|
||||
.map(automation => automation.doc)
|
||||
.filter(automation => {
|
||||
const trigger = automation.definition.trigger
|
||||
return trigger && trigger.event === eventType
|
||||
})
|
||||
|
||||
for (let automation of automations) {
|
||||
let automationDef = automation.definition
|
||||
let automationTrigger = automationDef ? automationDef.trigger : {}
|
||||
// don't queue events which are for dev apps, only way to test automations is
|
||||
// running tests on them, in production the test flag will never
|
||||
// be checked due to lazy evaluation (first always false)
|
||||
if (
|
||||
!env.ALLOW_DEV_AUTOMATIONS &&
|
||||
isDevAppID(event.appId) &&
|
||||
!(await checkTestFlag(automation._id))
|
||||
) {
|
||||
continue
|
||||
for (let automation of automations) {
|
||||
let automationDef = automation.definition
|
||||
let automationTrigger = automationDef ? automationDef.trigger : {}
|
||||
// don't queue events which are for dev apps, only way to test automations is
|
||||
// running tests on them, in production the test flag will never
|
||||
// be checked due to lazy evaluation (first always false)
|
||||
if (
|
||||
!env.ALLOW_DEV_AUTOMATIONS &&
|
||||
isDevAppID(event.appId) &&
|
||||
!(await checkTestFlag(automation._id))
|
||||
) {
|
||||
continue
|
||||
}
|
||||
if (
|
||||
automationTrigger.inputs &&
|
||||
automationTrigger.inputs.tableId === event.row.tableId
|
||||
) {
|
||||
await queue.add({ automation, event }, JOB_OPTS)
|
||||
}
|
||||
}
|
||||
if (
|
||||
automationTrigger.inputs &&
|
||||
automationTrigger.inputs.tableId === event.row.tableId
|
||||
) {
|
||||
await queue.add({ automation, event }, JOB_OPTS)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
emitter.on("row:save", async function (event) {
|
||||
|
|
|
@ -6,8 +6,9 @@ const { queue } = require("./bullboard")
|
|||
const newid = require("../db/newid")
|
||||
const { updateEntityMetadata } = require("../utilities")
|
||||
const { MetadataTypes } = require("../constants")
|
||||
const { getDeployedAppID } = require("@budibase/backend-core/db")
|
||||
const { getProdAppID } = require("@budibase/backend-core/db")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { getAppDB, getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
const WH_STEP_ID = definitions.WEBHOOK.stepId
|
||||
const CRON_STEP_ID = definitions.CRON.stepId
|
||||
|
@ -27,7 +28,6 @@ exports.processEvent = async job => {
|
|||
|
||||
exports.updateTestHistory = async (appId, automation, history) => {
|
||||
return updateEntityMetadata(
|
||||
appId,
|
||||
MetadataTypes.AUTOMATION_TEST_HISTORY,
|
||||
automation._id,
|
||||
metadata => {
|
||||
|
@ -93,6 +93,9 @@ exports.enableCronTrigger = async (appId, automation) => {
|
|||
)
|
||||
// Assign cron job ID from bull so we can remove it later if the cron trigger is removed
|
||||
trigger.cronJobId = job.id
|
||||
// can't use getAppDB here as this is likely to be called from dev app,
|
||||
// but this call could be for dev app or prod app, need to just use what
|
||||
// was passed in
|
||||
const db = new CouchDB(appId)
|
||||
const response = await db.put(automation)
|
||||
automation._id = response.id
|
||||
|
@ -109,7 +112,8 @@ exports.enableCronTrigger = async (appId, automation) => {
|
|||
* @returns {Promise<object|undefined>} After this is complete the new automation object may have been updated and should be
|
||||
* written to DB (this does not write to DB as it would be wasteful to repeat).
|
||||
*/
|
||||
exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => {
|
||||
exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
|
||||
const appId = getAppId()
|
||||
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
|
||||
const newTrigger = newAuto ? newAuto.definition.trigger : null
|
||||
const triggerChanged =
|
||||
|
@ -128,7 +132,7 @@ exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => {
|
|||
oldTrigger.webhookId
|
||||
) {
|
||||
try {
|
||||
let db = new CouchDB(appId)
|
||||
let db = getAppDB()
|
||||
// need to get the webhook to get the rev
|
||||
const webhook = await db.get(oldTrigger.webhookId)
|
||||
const ctx = {
|
||||
|
@ -166,7 +170,7 @@ exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => {
|
|||
// the app ID has to be development for this endpoint
|
||||
// it can only be used when building the app
|
||||
// but the trigger endpoint will always be used in production
|
||||
const prodAppId = getDeployedAppID(appId)
|
||||
const prodAppId = getProdAppID(appId)
|
||||
newTrigger.inputs = {
|
||||
schemaUrl: `api/webhooks/schema/${appId}/${id}`,
|
||||
triggerUrl: `api/webhooks/trigger/${prodAppId}/${id}`,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const CouchDB = require("../index")
|
||||
const { IncludeDocs, getLinkDocuments } = require("./linkUtils")
|
||||
const {
|
||||
generateLinkID,
|
||||
|
@ -7,6 +6,7 @@ const {
|
|||
} = require("../utils")
|
||||
const Sentry = require("@sentry/node")
|
||||
const { FieldTypes, RelationshipTypes } = require("../../constants")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
/**
|
||||
* Creates a new link document structure which can be put to the database. It is important to
|
||||
|
@ -52,9 +52,8 @@ function LinkDocument(
|
|||
}
|
||||
|
||||
class LinkController {
|
||||
constructor({ appId, tableId, row, table, oldTable }) {
|
||||
this._appId = appId
|
||||
this._db = new CouchDB(appId)
|
||||
constructor({ tableId, row, table, oldTable }) {
|
||||
this._db = getAppDB()
|
||||
this._tableId = tableId
|
||||
this._row = row
|
||||
this._table = table
|
||||
|
@ -99,7 +98,6 @@ class LinkController {
|
|||
*/
|
||||
getRowLinkDocs(rowId) {
|
||||
return getLinkDocuments({
|
||||
appId: this._appId,
|
||||
tableId: this._tableId,
|
||||
rowId,
|
||||
includeDocs: IncludeDocs.INCLUDE,
|
||||
|
@ -111,7 +109,6 @@ class LinkController {
|
|||
*/
|
||||
getTableLinkDocs() {
|
||||
return getLinkDocuments({
|
||||
appId: this._appId,
|
||||
tableId: this._tableId,
|
||||
includeDocs: IncludeDocs.INCLUDE,
|
||||
})
|
||||
|
@ -230,7 +227,6 @@ class LinkController {
|
|||
if (linkedSchema.relationshipType === RelationshipTypes.ONE_TO_MANY) {
|
||||
let links = (
|
||||
await getLinkDocuments({
|
||||
appId: this._appId,
|
||||
tableId: field.tableId,
|
||||
rowId: linkId,
|
||||
includeDocs: IncludeDocs.EXCLUDE,
|
||||
|
|
|
@ -9,12 +9,12 @@ const {
|
|||
getLinkedTable,
|
||||
} = require("./linkUtils")
|
||||
const { flatten } = require("lodash")
|
||||
const CouchDB = require("../../db")
|
||||
const { FieldTypes } = require("../../constants")
|
||||
const { getMultiIDParams, USER_METDATA_PREFIX } = require("../../db/utils")
|
||||
const { partition } = require("lodash")
|
||||
const { getGlobalUsersFromMetadata } = require("../../utilities/global")
|
||||
const { processFormulas } = require("../../utilities/rowProcessor/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
/**
|
||||
* This functionality makes sure that when rows with links are created, updated or deleted they are processed
|
||||
|
@ -48,14 +48,13 @@ function clearRelationshipFields(table, rows) {
|
|||
return rows
|
||||
}
|
||||
|
||||
async function getLinksForRows(appId, rows) {
|
||||
async function getLinksForRows(rows) {
|
||||
const tableIds = [...new Set(rows.map(el => el.tableId))]
|
||||
// start by getting all the link values for performance reasons
|
||||
const responses = flatten(
|
||||
await Promise.all(
|
||||
tableIds.map(tableId =>
|
||||
getLinkDocuments({
|
||||
appId,
|
||||
tableId: tableId,
|
||||
includeDocs: IncludeDocs.EXCLUDE,
|
||||
})
|
||||
|
@ -72,9 +71,9 @@ async function getLinksForRows(appId, rows) {
|
|||
)
|
||||
}
|
||||
|
||||
async function getFullLinkedDocs(appId, links) {
|
||||
async function getFullLinkedDocs(links) {
|
||||
// create DBs
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
const linkedRowIds = links.map(link => link.id)
|
||||
const uniqueRowIds = [...new Set(linkedRowIds)]
|
||||
let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map(
|
||||
|
@ -88,7 +87,7 @@ async function getFullLinkedDocs(appId, links) {
|
|||
let [users, other] = partition(linked, linkRow =>
|
||||
linkRow._id.startsWith(USER_METDATA_PREFIX)
|
||||
)
|
||||
users = await getGlobalUsersFromMetadata(appId, users)
|
||||
users = await getGlobalUsersFromMetadata(users)
|
||||
return [...other, ...users]
|
||||
}
|
||||
|
||||
|
@ -96,20 +95,16 @@ async function getFullLinkedDocs(appId, links) {
|
|||
* Update link documents for a row or table - this is to be called by the API controller when a change is occurring.
|
||||
* @param {string} args.eventType states what type of change which is occurring, means this can be expanded upon in the
|
||||
* future quite easily (all updates go through one function).
|
||||
* @param {string} args.appId The ID of the instance in which the change is occurring.
|
||||
* @param {string} args.tableId The ID of the of the table which is being changed.
|
||||
* @param {object|null} args.row The row which is changing, e.g. created, updated or deleted.
|
||||
* @param {object|null} args.table If the table has already been retrieved this can be used to reduce database gets.
|
||||
* @param {object|null} args.oldTable If the table is being updated then the old table can be provided for differencing.
|
||||
* @param {object|undefined} args.row The row which is changing, e.g. created, updated or deleted.
|
||||
* @param {object|undefined} args.table If the table has already been retrieved this can be used to reduce database gets.
|
||||
* @param {object|undefined} args.oldTable If the table is being updated then the old table can be provided for differencing.
|
||||
* @returns {Promise<object>} When the update is complete this will respond successfully. Returns the row for
|
||||
* row operations and the table for table operations.
|
||||
*/
|
||||
exports.updateLinks = async function (args) {
|
||||
const { eventType, appId, row, tableId, table, oldTable } = args
|
||||
const { eventType, row, tableId, table, oldTable } = args
|
||||
const baseReturnObj = row == null ? table : row
|
||||
if (appId == null) {
|
||||
throw "Cannot operate without an instance ID."
|
||||
}
|
||||
// make sure table ID is set
|
||||
if (tableId == null && table != null) {
|
||||
args.tableId = table._id
|
||||
|
@ -146,26 +141,23 @@ exports.updateLinks = async function (args) {
|
|||
/**
|
||||
* Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row.
|
||||
* This is required for formula fields, this may only be utilised internally (for now).
|
||||
* @param {string} appId The ID of the app which this request is in the context of.
|
||||
* @param {object} table The table from which the rows originated.
|
||||
* @param {array<object>} rows The rows which are to be enriched.
|
||||
* @return {Promise<*>} returns the rows with all of the enriched relationships on it.
|
||||
*/
|
||||
exports.attachFullLinkedDocs = async (appId, table, rows) => {
|
||||
exports.attachFullLinkedDocs = async (table, rows) => {
|
||||
const linkedTableIds = getLinkedTableIDs(table)
|
||||
if (linkedTableIds.length === 0) {
|
||||
return rows
|
||||
}
|
||||
// create DBs
|
||||
const db = new CouchDB(appId)
|
||||
// get all the links
|
||||
const links = (await getLinksForRows(appId, rows)).filter(link =>
|
||||
const links = (await getLinksForRows(rows)).filter(link =>
|
||||
rows.some(row => row._id === link.thisId)
|
||||
)
|
||||
// clear any existing links that could be dupe'd
|
||||
rows = clearRelationshipFields(table, rows)
|
||||
// now get the docs and combine into the rows
|
||||
let linked = await getFullLinkedDocs(appId, links)
|
||||
let linked = await getFullLinkedDocs(links)
|
||||
const linkedTables = []
|
||||
for (let row of rows) {
|
||||
for (let link of links.filter(link => link.thisId === row._id)) {
|
||||
|
@ -176,11 +168,7 @@ exports.attachFullLinkedDocs = async (appId, table, rows) => {
|
|||
if (linkedRow) {
|
||||
const linkedTableId =
|
||||
linkedRow.tableId || getRelatedTableForField(table, link.fieldName)
|
||||
const linkedTable = await getLinkedTable(
|
||||
db,
|
||||
linkedTableId,
|
||||
linkedTables
|
||||
)
|
||||
const linkedTable = await getLinkedTable(linkedTableId, linkedTables)
|
||||
if (linkedTable) {
|
||||
row[link.fieldName].push(processFormulas(linkedTable, linkedRow))
|
||||
}
|
||||
|
@ -192,18 +180,16 @@ exports.attachFullLinkedDocs = async (appId, table, rows) => {
|
|||
|
||||
/**
|
||||
* This function will take the given enriched rows and squash the links to only contain the primary display field.
|
||||
* @param {string} appId The app in which the tables/rows/links exist.
|
||||
* @param {object} table The table from which the rows originated.
|
||||
* @param {array<object>} enriched The pre-enriched rows (full docs) which are to be squashed.
|
||||
* @returns {Promise<Array>} The rows after having their links squashed to only contain the ID and primary display.
|
||||
*/
|
||||
exports.squashLinksToPrimaryDisplay = async (appId, table, enriched) => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.squashLinksToPrimaryDisplay = async (table, enriched) => {
|
||||
// will populate this as we find them
|
||||
const linkedTables = [table]
|
||||
for (let row of enriched) {
|
||||
// this only fetches the table if its not already in array
|
||||
const rowTable = await getLinkedTable(db, row.tableId, linkedTables)
|
||||
const rowTable = await getLinkedTable(row.tableId, linkedTables)
|
||||
for (let [column, schema] of Object.entries(rowTable.schema)) {
|
||||
if (schema.type !== FieldTypes.LINK || !Array.isArray(row[column])) {
|
||||
continue
|
||||
|
@ -211,7 +197,7 @@ exports.squashLinksToPrimaryDisplay = async (appId, table, enriched) => {
|
|||
const newLinks = []
|
||||
for (let link of row[column]) {
|
||||
const linkTblId = link.tableId || getRelatedTableForField(table, column)
|
||||
const linkedTable = await getLinkedTable(db, linkTblId, linkedTables)
|
||||
const linkedTable = await getLinkedTable(linkTblId, linkedTables)
|
||||
const obj = { _id: link._id }
|
||||
if (link[linkedTable.primaryDisplay]) {
|
||||
obj.primaryDisplay = link[linkedTable.primaryDisplay]
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
const CouchDB = require("../index")
|
||||
const Sentry = require("@sentry/node")
|
||||
const { ViewNames, getQueryIndex } = require("../utils")
|
||||
const { FieldTypes } = require("../../constants")
|
||||
const { createLinkView } = require("../views/staticViews")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
/**
|
||||
* Only needed so that boolean parameters are being used for includeDocs
|
||||
|
@ -17,7 +17,6 @@ exports.createLinkView = createLinkView
|
|||
|
||||
/**
|
||||
* Gets the linking documents, not the linked documents themselves.
|
||||
* @param {string} args.appId The instance in which we are searching for linked rows.
|
||||
* @param {string} args.tableId The table which we are searching for linked rows against.
|
||||
* @param {string|null} args.fieldName The name of column/field which is being altered, only looking for
|
||||
* linking documents that are related to it. If this is not specified then the table level will be assumed.
|
||||
|
@ -30,8 +29,8 @@ exports.createLinkView = createLinkView
|
|||
* (if any).
|
||||
*/
|
||||
exports.getLinkDocuments = async function (args) {
|
||||
const { appId, tableId, rowId, includeDocs } = args
|
||||
const db = new CouchDB(appId)
|
||||
const { tableId, rowId, includeDocs } = args
|
||||
const db = getAppDB()
|
||||
let params
|
||||
if (rowId != null) {
|
||||
params = { key: [tableId, rowId] }
|
||||
|
@ -68,7 +67,7 @@ exports.getLinkDocuments = async function (args) {
|
|||
} catch (err) {
|
||||
// check if the view doesn't exist, it should for all new instances
|
||||
if (err != null && err.name === "not_found") {
|
||||
await exports.createLinkView(appId)
|
||||
await exports.createLinkView()
|
||||
return exports.getLinkDocuments(arguments[0])
|
||||
} else {
|
||||
/* istanbul ignore next */
|
||||
|
@ -89,7 +88,8 @@ exports.getLinkedTableIDs = table => {
|
|||
.map(column => column.tableId)
|
||||
}
|
||||
|
||||
exports.getLinkedTable = async (db, id, tables) => {
|
||||
exports.getLinkedTable = async (id, tables) => {
|
||||
const db = getAppDB()
|
||||
let linkedTable = tables.find(table => table._id === id)
|
||||
if (linkedTable) {
|
||||
return linkedTable
|
||||
|
|
|
@ -20,7 +20,6 @@ describe("test the link controller", () => {
|
|||
|
||||
function createLinkController(table, row = null, oldTable = null) {
|
||||
const linkConfig = {
|
||||
appId: config.getAppId(),
|
||||
tableId: table._id,
|
||||
table,
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
const TestConfig = require("../../tests/utilities/TestConfiguration")
|
||||
const { basicTable, basicLinkedRow } = require("../../tests/utilities/structures")
|
||||
const { basicTable } = require("../../tests/utilities/structures")
|
||||
const linkUtils = require("../linkedRows/linkUtils")
|
||||
const links = require("../linkedRows")
|
||||
const CouchDB = require("../index")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
describe("test link functionality", () => {
|
||||
const config = new TestConfig(false)
|
||||
|
@ -11,18 +11,18 @@ describe("test link functionality", () => {
|
|||
let db, table
|
||||
beforeEach(async () => {
|
||||
await config.init()
|
||||
db = new CouchDB(config.getAppId())
|
||||
db = getAppDB()
|
||||
table = await config.createTable()
|
||||
})
|
||||
|
||||
it("should be able to retrieve a linked table from a list", async () => {
|
||||
const retrieved = await linkUtils.getLinkedTable(db, table._id, [table])
|
||||
const retrieved = await linkUtils.getLinkedTable(table._id, [table])
|
||||
expect(retrieved._id).toBe(table._id)
|
||||
})
|
||||
|
||||
it("should be able to retrieve a table from DB and update list", async () => {
|
||||
const tables = []
|
||||
const retrieved = await linkUtils.getLinkedTable(db, table._id, tables)
|
||||
const retrieved = await linkUtils.getLinkedTable(table._id, tables)
|
||||
expect(retrieved._id).toBe(table._id)
|
||||
expect(tables[0]).toBeDefined()
|
||||
})
|
||||
|
@ -51,7 +51,6 @@ describe("test link functionality", () => {
|
|||
const db = new CouchDB("test")
|
||||
await db.put({ _id: "_design/database", views: {} })
|
||||
const output = await linkUtils.getLinkDocuments({
|
||||
appId: "test",
|
||||
tableId: "test",
|
||||
rowId: "test",
|
||||
includeDocs: false,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const CouchDB = require("../index")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
const {
|
||||
DocumentTypes,
|
||||
SEPARATOR,
|
||||
|
@ -21,12 +21,11 @@ const SCREEN_PREFIX = DocumentTypes.SCREEN + SEPARATOR
|
|||
/**
|
||||
* Creates the link view for the instance, this will overwrite the existing one, but this should only
|
||||
* be called if it is found that the view does not exist.
|
||||
* @param {string} appId The instance to which the view should be added.
|
||||
* @returns {Promise<void>} The view now exists, please note that the next view of this query will actually build it,
|
||||
* so it may be slow.
|
||||
*/
|
||||
exports.createLinkView = async appId => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.createLinkView = async () => {
|
||||
const db = getAppDB()
|
||||
const designDoc = await db.get("_design/database")
|
||||
const view = {
|
||||
map: function (doc) {
|
||||
|
@ -57,8 +56,8 @@ exports.createLinkView = async appId => {
|
|||
await db.put(designDoc)
|
||||
}
|
||||
|
||||
exports.createRoutingView = async appId => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.createRoutingView = async () => {
|
||||
const db = getAppDB()
|
||||
const designDoc = await db.get("_design/database")
|
||||
const view = {
|
||||
// if using variables in a map function need to inject them before use
|
||||
|
@ -78,8 +77,8 @@ exports.createRoutingView = async appId => {
|
|||
await db.put(designDoc)
|
||||
}
|
||||
|
||||
async function searchIndex(appId, indexName, fnString) {
|
||||
const db = new CouchDB(appId)
|
||||
async function searchIndex(indexName, fnString) {
|
||||
const db = getAppDB()
|
||||
const designDoc = await db.get("_design/database")
|
||||
designDoc.indexes = {
|
||||
[indexName]: {
|
||||
|
@ -90,9 +89,8 @@ async function searchIndex(appId, indexName, fnString) {
|
|||
await db.put(designDoc)
|
||||
}
|
||||
|
||||
exports.createAllSearchIndex = async appId => {
|
||||
exports.createAllSearchIndex = async () => {
|
||||
await searchIndex(
|
||||
appId,
|
||||
SearchIndexes.ROWS,
|
||||
function (doc) {
|
||||
function idx(input, prev) {
|
||||
|
|
|
@ -2,7 +2,8 @@ function isTest() {
|
|||
return (
|
||||
process.env.NODE_ENV === "jest" ||
|
||||
process.env.NODE_ENV === "cypress" ||
|
||||
process.env.JEST_WORKER_ID != null
|
||||
(process.env.JEST_WORKER_ID != null &&
|
||||
process.env.JEST_WORKER_ID !== "null")
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -52,7 +52,10 @@ export function buildExternalTableId(datasourceId: string, tableName: string) {
|
|||
return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}`
|
||||
}
|
||||
|
||||
export function breakExternalTableId(tableId: string) {
|
||||
export function breakExternalTableId(tableId: string | undefined) {
|
||||
if (!tableId) {
|
||||
return {}
|
||||
}
|
||||
const parts = tableId.split(DOUBLE_SEPARATOR)
|
||||
let tableName = parts.pop()
|
||||
// if they need joined
|
||||
|
|
|
@ -10,6 +10,7 @@ const {
|
|||
const builderMiddleware = require("./builder")
|
||||
const { isWebhookEndpoint } = require("./utils")
|
||||
const { buildCsrfMiddleware } = require("@budibase/backend-core/auth")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
function hasResource(ctx) {
|
||||
return ctx.resourceId != null
|
||||
|
@ -45,7 +46,7 @@ const checkAuthorizedResource = async (
|
|||
) => {
|
||||
// get the user's roles
|
||||
const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC
|
||||
const userRoles = await getUserRoleHierarchy(ctx.appId, roleId, {
|
||||
const userRoles = await getUserRoleHierarchy(roleId, {
|
||||
idOnly: false,
|
||||
})
|
||||
const permError = "User does not have permission"
|
||||
|
@ -81,8 +82,9 @@ module.exports =
|
|||
|
||||
// get the resource roles
|
||||
let resourceRoles = []
|
||||
if (ctx.appId && hasResource(ctx)) {
|
||||
resourceRoles = await getRequiredResourceRole(ctx.appId, permLevel, ctx)
|
||||
const appId = getAppId()
|
||||
if (appId && hasResource(ctx)) {
|
||||
resourceRoles = await getRequiredResourceRole(permLevel, ctx)
|
||||
}
|
||||
|
||||
// if the resource is public, proceed
|
||||
|
|
|
@ -5,7 +5,7 @@ const {
|
|||
checkDebounce,
|
||||
setDebounce,
|
||||
} = require("../utilities/redis")
|
||||
const CouchDB = require("../db")
|
||||
const { getDB } = require("@budibase/backend-core/db")
|
||||
const { DocumentTypes } = require("../db/utils")
|
||||
const { PermissionTypes } = require("@budibase/backend-core/permissions")
|
||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
||||
|
@ -48,7 +48,7 @@ async function updateAppUpdatedAt(ctx) {
|
|||
if (ctx.method === "GET" || (await checkDebounce(appId))) {
|
||||
return
|
||||
}
|
||||
const db = new CouchDB(appId)
|
||||
const db = getDB(appId)
|
||||
const metadata = await db.get(DocumentTypes.APP_METADATA)
|
||||
metadata.updatedAt = new Date().toISOString()
|
||||
const response = await db.put(metadata)
|
||||
|
|
|
@ -11,9 +11,9 @@ const { generateUserMetadataID, isDevAppID } = require("../db/utils")
|
|||
const { dbExists } = require("@budibase/backend-core/db")
|
||||
const { isUserInAppTenant } = require("@budibase/backend-core/tenancy")
|
||||
const { getCachedSelf } = require("../utilities/global")
|
||||
const CouchDB = require("../db")
|
||||
const env = require("../environment")
|
||||
const { isWebhookEndpoint } = require("./utils")
|
||||
const { doInAppContext } = require("@budibase/backend-core/context")
|
||||
|
||||
module.exports = async (ctx, next) => {
|
||||
// try to get the appID from the request
|
||||
|
@ -31,7 +31,7 @@ module.exports = async (ctx, next) => {
|
|||
// check the app exists referenced in cookie
|
||||
if (appCookie) {
|
||||
const appId = appCookie.appId
|
||||
const exists = await dbExists(CouchDB, appId)
|
||||
const exists = await dbExists(appId)
|
||||
if (!exists) {
|
||||
clearCookie(ctx, Cookies.CurrentApp)
|
||||
return next()
|
||||
|
@ -41,13 +41,15 @@ module.exports = async (ctx, next) => {
|
|||
}
|
||||
|
||||
// deny access to application preview
|
||||
if (
|
||||
isDevAppID(requestAppId) &&
|
||||
!isWebhookEndpoint(ctx) &&
|
||||
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global)
|
||||
) {
|
||||
clearCookie(ctx, Cookies.CurrentApp)
|
||||
return ctx.redirect("/")
|
||||
if (!env.isTest()) {
|
||||
if (
|
||||
isDevAppID(requestAppId) &&
|
||||
!isWebhookEndpoint(ctx) &&
|
||||
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global)
|
||||
) {
|
||||
clearCookie(ctx, Cookies.CurrentApp)
|
||||
return ctx.redirect("/")
|
||||
}
|
||||
}
|
||||
|
||||
let appId,
|
||||
|
@ -68,44 +70,46 @@ module.exports = async (ctx, next) => {
|
|||
return next()
|
||||
}
|
||||
|
||||
let noCookieSet = false
|
||||
// if the user not in the right tenant then make sure they have no permissions
|
||||
// need to judge this only based on the request app ID,
|
||||
if (
|
||||
env.MULTI_TENANCY &&
|
||||
ctx.user &&
|
||||
requestAppId &&
|
||||
!isUserInAppTenant(requestAppId)
|
||||
) {
|
||||
// don't error, simply remove the users rights (they are a public user)
|
||||
delete ctx.user.builder
|
||||
delete ctx.user.admin
|
||||
delete ctx.user.roles
|
||||
roleId = BUILTIN_ROLE_IDS.PUBLIC
|
||||
noCookieSet = true
|
||||
}
|
||||
|
||||
ctx.appId = appId
|
||||
if (roleId) {
|
||||
ctx.roleId = roleId
|
||||
const userId = ctx.user ? generateUserMetadataID(ctx.user._id) : null
|
||||
ctx.user = {
|
||||
...ctx.user,
|
||||
// override userID with metadata one
|
||||
_id: userId,
|
||||
userId,
|
||||
roleId,
|
||||
role: await getRole(appId, roleId),
|
||||
return doInAppContext(appId, async () => {
|
||||
let noCookieSet = false
|
||||
// if the user not in the right tenant then make sure they have no permissions
|
||||
// need to judge this only based on the request app ID,
|
||||
if (
|
||||
env.MULTI_TENANCY &&
|
||||
ctx.user &&
|
||||
requestAppId &&
|
||||
!isUserInAppTenant(requestAppId)
|
||||
) {
|
||||
// don't error, simply remove the users rights (they are a public user)
|
||||
delete ctx.user.builder
|
||||
delete ctx.user.admin
|
||||
delete ctx.user.roles
|
||||
roleId = BUILTIN_ROLE_IDS.PUBLIC
|
||||
noCookieSet = true
|
||||
}
|
||||
}
|
||||
if (
|
||||
(requestAppId !== appId ||
|
||||
appCookie == null ||
|
||||
appCookie.appId !== requestAppId) &&
|
||||
!noCookieSet
|
||||
) {
|
||||
setCookie(ctx, { appId }, Cookies.CurrentApp)
|
||||
}
|
||||
|
||||
return next()
|
||||
ctx.appId = appId
|
||||
if (roleId) {
|
||||
ctx.roleId = roleId
|
||||
const userId = ctx.user ? generateUserMetadataID(ctx.user._id) : null
|
||||
ctx.user = {
|
||||
...ctx.user,
|
||||
// override userID with metadata one
|
||||
_id: userId,
|
||||
userId,
|
||||
roleId,
|
||||
role: await getRole(roleId),
|
||||
}
|
||||
}
|
||||
if (
|
||||
(requestAppId !== appId ||
|
||||
appCookie == null ||
|
||||
appCookie.appId !== requestAppId) &&
|
||||
!noCookieSet
|
||||
) {
|
||||
setCookie(ctx, { appId }, Cookies.CurrentApp)
|
||||
}
|
||||
|
||||
return next()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -11,6 +11,9 @@ const authorizedMiddleware = require("../authorized")
|
|||
const env = require("../../environment")
|
||||
const { PermissionTypes, PermissionLevels } = require("@budibase/backend-core/permissions")
|
||||
require("@budibase/backend-core").init(require("../../db"))
|
||||
const { doInAppContext } = require("@budibase/backend-core/context")
|
||||
|
||||
const APP_ID = ""
|
||||
|
||||
class TestConfiguration {
|
||||
constructor(role) {
|
||||
|
@ -23,7 +26,7 @@ class TestConfiguration {
|
|||
request: {
|
||||
url: ""
|
||||
},
|
||||
appId: "",
|
||||
appId: APP_ID,
|
||||
auth: {},
|
||||
next: this.next,
|
||||
throw: this.throw,
|
||||
|
@ -32,7 +35,9 @@ class TestConfiguration {
|
|||
}
|
||||
|
||||
executeMiddleware() {
|
||||
return this.middleware(this.ctx, this.next)
|
||||
return doInAppContext(APP_ID, () => {
|
||||
return this.middleware(this.ctx, this.next)
|
||||
})
|
||||
}
|
||||
|
||||
setUser(user) {
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
mockAuthWithNoCookie()
|
||||
mockWorker()
|
||||
|
||||
jest.mock("@budibase/backend-core/db", () => ({
|
||||
...jest.requireActual("@budibase/backend-core/db"),
|
||||
dbExists: () => true,
|
||||
}))
|
||||
|
||||
function mockWorker() {
|
||||
jest.mock("../../utilities/workerRequests", () => ({
|
||||
getGlobalSelf: () => {
|
||||
|
@ -50,6 +55,7 @@ function mockAuthWithCookie() {
|
|||
return "app_test"
|
||||
},
|
||||
setCookie: jest.fn(),
|
||||
clearCookie: jest.fn(),
|
||||
getCookie: () => ({appId: "app_different", roleId: "PUBLIC"}),
|
||||
}))
|
||||
jest.mock("@budibase/backend-core/constants", () => ({
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
const CouchDB = require("../db")
|
||||
const usageQuota = require("../utilities/usageQuota")
|
||||
const { getUniqueRows } = require("../utilities/usageQuota/rows")
|
||||
const {
|
||||
isExternalTable,
|
||||
isRowId: isExternalRowId,
|
||||
} = require("../integrations/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
// currently only counting new writes and deletes
|
||||
const METHOD_MAP = {
|
||||
|
@ -46,7 +46,7 @@ module.exports = async (ctx, next) => {
|
|||
const usageId = ctx.request.body._id
|
||||
try {
|
||||
if (ctx.appId) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
await db.get(usageId)
|
||||
}
|
||||
return next()
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy")
|
||||
const { getAllApps } = require("@budibase/backend-core/db")
|
||||
import CouchDB from "../../../db"
|
||||
// @ts-ignore
|
||||
import { getGlobalDB, getTenantId } from "@budibase/backend-core/tenancy"
|
||||
// @ts-ignore
|
||||
import { getAllApps } from "@budibase/backend-core/db"
|
||||
import { getUsageQuotaDoc } from "../../../utilities/usageQuota"
|
||||
|
||||
export const run = async () => {
|
||||
const db = getGlobalDB()
|
||||
// get app count
|
||||
const devApps = await getAllApps(CouchDB, { dev: true })
|
||||
const devApps = await getAllApps({ dev: true })
|
||||
const appCount = devApps ? devApps.length : 0
|
||||
|
||||
// sync app count
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy")
|
||||
const { getAllApps } = require("@budibase/backend-core/db")
|
||||
import CouchDB from "../../../db"
|
||||
// @ts-ignore
|
||||
import { getGlobalDB, getTenantId } from "@budibase/backend-core/tenancy"
|
||||
// @ts-ignore
|
||||
import { getAllApps } from "@budibase/backend-core/db"
|
||||
import { getUsageQuotaDoc } from "../../../utilities/usageQuota"
|
||||
import { getUniqueRows } from "../../../utilities/usageQuota/rows"
|
||||
|
||||
export const run = async () => {
|
||||
const db = getGlobalDB()
|
||||
// get all rows in all apps
|
||||
const allApps = await getAllApps(CouchDB, { all: true })
|
||||
const allApps = await getAllApps({ all: true })
|
||||
const appIds = allApps ? allApps.map((app: { appId: any }) => app.appId) : []
|
||||
const rows = await getUniqueRows(appIds)
|
||||
const rowCount = rows ? rows.length : 0
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
const core = require("@budibase/backend-core")
|
||||
const CouchDB = require("../../db")
|
||||
core.init(CouchDB)
|
||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||
const env = require("../../environment")
|
||||
const {
|
||||
|
@ -17,13 +20,11 @@ const supertest = require("supertest")
|
|||
const { cleanup } = require("../../utilities/fileSystem")
|
||||
const { Cookies, Headers } = require("@budibase/backend-core/constants")
|
||||
const { jwt } = require("@budibase/backend-core/auth")
|
||||
const core = require("@budibase/backend-core")
|
||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||
const { createASession } = require("@budibase/backend-core/sessions")
|
||||
const { user: userCache } = require("@budibase/backend-core/cache")
|
||||
const CouchDB = require("../../db")
|
||||
const newid = require("../../db/newid")
|
||||
core.init(CouchDB)
|
||||
const context = require("@budibase/backend-core/context")
|
||||
|
||||
const GLOBAL_USER_ID = "us_uuid1"
|
||||
const EMAIL = "babs@babs.com"
|
||||
|
@ -65,11 +66,21 @@ class TestConfiguration {
|
|||
request.request = {
|
||||
body: config,
|
||||
}
|
||||
if (params) {
|
||||
request.params = params
|
||||
async function run() {
|
||||
if (params) {
|
||||
request.params = params
|
||||
}
|
||||
await controlFunc(request)
|
||||
return request.body
|
||||
}
|
||||
// check if already in a context
|
||||
if (context.getAppId() == null) {
|
||||
return context.doInAppContext(this.appId, async () => {
|
||||
return run()
|
||||
})
|
||||
} else {
|
||||
return run()
|
||||
}
|
||||
await controlFunc(request)
|
||||
return request.body
|
||||
}
|
||||
|
||||
async globalUser({
|
||||
|
@ -175,6 +186,7 @@ class TestConfiguration {
|
|||
// create dev app
|
||||
this.app = await this._req({ name: appName }, null, controllers.app.create)
|
||||
this.appId = this.app.appId
|
||||
context.updateAppId(this.appId)
|
||||
|
||||
// create production app
|
||||
this.prodApp = await this.deploy()
|
||||
|
@ -187,14 +199,16 @@ class TestConfiguration {
|
|||
}
|
||||
|
||||
async deploy() {
|
||||
const deployment = await this._req(null, null, controllers.deploy.deployApp)
|
||||
const prodAppId = deployment.appId.replace("_dev", "")
|
||||
const appPackage = await this._req(
|
||||
null,
|
||||
{ appId: prodAppId },
|
||||
controllers.app.fetchAppPackage
|
||||
)
|
||||
return appPackage.application
|
||||
await this._req(null, null, controllers.deploy.deployApp)
|
||||
const prodAppId = this.getAppId().replace("_dev", "")
|
||||
return context.doInAppContext(prodAppId, async () => {
|
||||
const appPackage = await this._req(
|
||||
null,
|
||||
{ appId: prodAppId },
|
||||
controllers.app.fetchAppPackage
|
||||
)
|
||||
return appPackage.application
|
||||
})
|
||||
}
|
||||
|
||||
async updateTable(config = null) {
|
||||
|
@ -423,42 +437,47 @@ class TestConfiguration {
|
|||
|
||||
async login({ roleId, userId, builder, prodApp = false } = {}) {
|
||||
const appId = prodApp ? this.prodAppId : this.appId
|
||||
|
||||
userId = !userId ? `us_uuid1` : userId
|
||||
if (!this.request) {
|
||||
throw "Server has not been opened, cannot login."
|
||||
}
|
||||
// make sure the user exists in the global DB
|
||||
if (roleId !== BUILTIN_ROLE_IDS.PUBLIC) {
|
||||
await this.globalUser({
|
||||
userId,
|
||||
builder,
|
||||
roles: { [this.prodAppId]: roleId },
|
||||
return context.doInAppContext(appId, async () => {
|
||||
userId = !userId ? `us_uuid1` : userId
|
||||
if (!this.request) {
|
||||
throw "Server has not been opened, cannot login."
|
||||
}
|
||||
// make sure the user exists in the global DB
|
||||
if (roleId !== BUILTIN_ROLE_IDS.PUBLIC) {
|
||||
await this.globalUser({
|
||||
id: userId,
|
||||
builder,
|
||||
roles: { [this.prodAppId]: roleId },
|
||||
})
|
||||
}
|
||||
await createASession(userId, {
|
||||
sessionId: "sessionid",
|
||||
tenantId: TENANT_ID,
|
||||
})
|
||||
}
|
||||
// have to fake this
|
||||
const auth = {
|
||||
userId,
|
||||
sessionId: "sessionid",
|
||||
tenantId: TENANT_ID,
|
||||
}
|
||||
const app = {
|
||||
roleId: roleId,
|
||||
appId,
|
||||
}
|
||||
const authToken = jwt.sign(auth, env.JWT_SECRET)
|
||||
const appToken = jwt.sign(app, env.JWT_SECRET)
|
||||
// have to fake this
|
||||
const auth = {
|
||||
userId,
|
||||
sessionId: "sessionid",
|
||||
tenantId: TENANT_ID,
|
||||
}
|
||||
const app = {
|
||||
roleId: roleId,
|
||||
appId,
|
||||
}
|
||||
const authToken = jwt.sign(auth, env.JWT_SECRET)
|
||||
const appToken = jwt.sign(app, env.JWT_SECRET)
|
||||
|
||||
// returning necessary request headers
|
||||
await userCache.invalidateUser(userId)
|
||||
return {
|
||||
Accept: "application/json",
|
||||
Cookie: [
|
||||
`${Cookies.Auth}=${authToken}`,
|
||||
`${Cookies.CurrentApp}=${appToken}`,
|
||||
],
|
||||
[Headers.APP_ID]: appId,
|
||||
}
|
||||
// returning necessary request headers
|
||||
await userCache.invalidateUser(userId)
|
||||
return {
|
||||
Accept: "application/json",
|
||||
Cookie: [
|
||||
`${Cookies.Auth}=${authToken}`,
|
||||
`${Cookies.CurrentApp}=${appToken}`,
|
||||
],
|
||||
[Headers.APP_ID]: appId,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,11 +5,11 @@ const automationUtils = require("../automations/automationUtils")
|
|||
const AutomationEmitter = require("../events/AutomationEmitter")
|
||||
const { processObject } = require("@budibase/string-templates")
|
||||
const { DEFAULT_TENANT_ID } = require("@budibase/backend-core/constants")
|
||||
const CouchDB = require("../db")
|
||||
const { DocumentTypes, isDevAppID } = require("../db/utils")
|
||||
const { doInTenant } = require("@budibase/backend-core/tenancy")
|
||||
const usage = require("../utilities/usageQuota")
|
||||
const { definitions: triggerDefs } = require("../automations/triggerInfo")
|
||||
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId
|
||||
const CRON_STEP_ID = triggerDefs.CRON.stepId
|
||||
|
@ -59,11 +59,10 @@ class Orchestrator {
|
|||
}
|
||||
|
||||
async getApp() {
|
||||
const appId = this._appId
|
||||
if (this._app) {
|
||||
return this._app
|
||||
}
|
||||
const db = new CouchDB(appId)
|
||||
const db = getAppDB()
|
||||
this._app = await db.get(DocumentTypes.APP_METADATA)
|
||||
return this._app
|
||||
}
|
||||
|
@ -131,16 +130,19 @@ class Orchestrator {
|
|||
}
|
||||
|
||||
module.exports = (input, callback) => {
|
||||
const automationOrchestrator = new Orchestrator(
|
||||
input.data.automation,
|
||||
input.data.event
|
||||
)
|
||||
automationOrchestrator
|
||||
.execute()
|
||||
.then(response => {
|
||||
callback(null, response)
|
||||
})
|
||||
.catch(err => {
|
||||
callback(err)
|
||||
})
|
||||
const appId = input.data.event.appId
|
||||
doInAppContext(appId, () => {
|
||||
const automationOrchestrator = new Orchestrator(
|
||||
input.data.automation,
|
||||
input.data.event
|
||||
)
|
||||
automationOrchestrator
|
||||
.execute()
|
||||
.then(response => {
|
||||
callback(null, response)
|
||||
})
|
||||
.catch(err => {
|
||||
callback(err)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -3,14 +3,13 @@ threadUtils.threadSetup()
|
|||
const ScriptRunner = require("../utilities/scriptRunner")
|
||||
const { integrations } = require("../integrations")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
const CouchDB = require("../db")
|
||||
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
const IS_TRIPLE_BRACE = new RegExp(/^{{3}.*}{3}$/)
|
||||
const IS_HANDLEBARS = new RegExp(/^{{2}.*}{2}$/)
|
||||
|
||||
class QueryRunner {
|
||||
constructor(input, flags = { noRecursiveQuery: false }) {
|
||||
this.appId = input.appId
|
||||
this.datasource = input.datasource
|
||||
this.queryVerb = input.queryVerb
|
||||
this.fields = input.fields
|
||||
|
@ -104,12 +103,11 @@ class QueryRunner {
|
|||
}
|
||||
|
||||
async runAnotherQuery(queryId, parameters) {
|
||||
const db = new CouchDB(this.appId)
|
||||
const db = getAppDB()
|
||||
const query = await db.get(queryId)
|
||||
const datasource = await db.get(query.datasourceId)
|
||||
return new QueryRunner(
|
||||
{
|
||||
appId: this.appId,
|
||||
datasource,
|
||||
queryVerb: query.queryVerb,
|
||||
fields: query.fields,
|
||||
|
@ -223,12 +221,14 @@ class QueryRunner {
|
|||
}
|
||||
|
||||
module.exports = (input, callback) => {
|
||||
const Runner = new QueryRunner(input)
|
||||
Runner.execute()
|
||||
.then(response => {
|
||||
callback(null, response)
|
||||
})
|
||||
.catch(err => {
|
||||
callback(err)
|
||||
})
|
||||
doInAppContext(input.appId, () => {
|
||||
const Runner = new QueryRunner(input)
|
||||
Runner.execute()
|
||||
.then(response => {
|
||||
callback(null, response)
|
||||
})
|
||||
.catch(err => {
|
||||
callback(err)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const { budibaseTempDir } = require("../budibaseDir")
|
||||
const { isDev } = require("../index")
|
||||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
const uuid = require("uuid/v4")
|
||||
|
@ -20,6 +19,7 @@ const {
|
|||
LINK_USER_METADATA_PREFIX,
|
||||
} = require("../../db/utils")
|
||||
const MemoryStream = require("memorystream")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
|
||||
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
|
||||
|
@ -51,7 +51,7 @@ exports.init = () => {
|
|||
* everything required to function is ready.
|
||||
*/
|
||||
exports.checkDevelopmentEnvironment = () => {
|
||||
if (!isDev()) {
|
||||
if (!env.isDev() || env.isTest()) {
|
||||
return
|
||||
}
|
||||
if (!fs.existsSync(budibaseTempDir())) {
|
||||
|
@ -251,7 +251,8 @@ exports.downloadTemplate = async (type, name) => {
|
|||
/**
|
||||
* Retrieves component libraries from object store (or tmp symlink if in local)
|
||||
*/
|
||||
exports.getComponentLibraryManifest = async (appId, library) => {
|
||||
exports.getComponentLibraryManifest = async library => {
|
||||
const appId = getAppId()
|
||||
const filename = "manifest.json"
|
||||
/* istanbul ignore next */
|
||||
// when testing in cypress and so on we need to get the package
|
||||
|
|
|
@ -3,7 +3,7 @@ const {
|
|||
getGlobalIDFromUserMetadataID,
|
||||
} = require("../db/utils")
|
||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||
const { getDeployedAppID } = require("@budibase/backend-core/db")
|
||||
const { getProdAppID } = require("@budibase/backend-core/db")
|
||||
const { getGlobalUserParams } = require("@budibase/backend-core/db")
|
||||
const { user: userCache } = require("@budibase/backend-core/cache")
|
||||
const {
|
||||
|
@ -11,8 +11,10 @@ const {
|
|||
isUserInAppTenant,
|
||||
} = require("@budibase/backend-core/tenancy")
|
||||
const env = require("../environment")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.updateAppRole = (appId, user) => {
|
||||
exports.updateAppRole = (user, { appId } = {}) => {
|
||||
appId = appId || getAppId()
|
||||
if (!user || !user.roles) {
|
||||
return user
|
||||
}
|
||||
|
@ -24,7 +26,7 @@ exports.updateAppRole = (appId, user) => {
|
|||
return user
|
||||
}
|
||||
// always use the deployed app
|
||||
user.roleId = user.roles[getDeployedAppID(appId)]
|
||||
user.roleId = user.roles[getProdAppID(appId)]
|
||||
// if a role wasn't found then either set as admin (builder) or public (everyone else)
|
||||
if (!user.roleId && user.builder && user.builder.global) {
|
||||
user.roleId = BUILTIN_ROLE_IDS.ADMIN
|
||||
|
@ -35,18 +37,18 @@ exports.updateAppRole = (appId, user) => {
|
|||
return user
|
||||
}
|
||||
|
||||
function processUser(appId, user) {
|
||||
function processUser(user, { appId } = {}) {
|
||||
if (user) {
|
||||
delete user.password
|
||||
}
|
||||
return exports.updateAppRole(appId, user)
|
||||
return exports.updateAppRole(user, { appId })
|
||||
}
|
||||
|
||||
exports.getCachedSelf = async (ctx, appId) => {
|
||||
// this has to be tenant aware, can't depend on the context to find it out
|
||||
// running some middlewares before the tenancy causes context to break
|
||||
const user = await userCache.getUser(ctx.user._id)
|
||||
return processUser(appId, user)
|
||||
return processUser(user, { appId })
|
||||
}
|
||||
|
||||
exports.getRawGlobalUser = async userId => {
|
||||
|
@ -54,12 +56,13 @@ exports.getRawGlobalUser = async userId => {
|
|||
return db.get(getGlobalIDFromUserMetadataID(userId))
|
||||
}
|
||||
|
||||
exports.getGlobalUser = async (appId, userId) => {
|
||||
exports.getGlobalUser = async userId => {
|
||||
let user = await exports.getRawGlobalUser(userId)
|
||||
return processUser(appId, user)
|
||||
return processUser(user)
|
||||
}
|
||||
|
||||
exports.getGlobalUsers = async (appId = null, users = null) => {
|
||||
exports.getGlobalUsers = async (users = null) => {
|
||||
const appId = getAppId()
|
||||
const db = getGlobalDB()
|
||||
let globalUsers
|
||||
if (users) {
|
||||
|
@ -86,11 +89,11 @@ exports.getGlobalUsers = async (appId = null, users = null) => {
|
|||
if (!appId) {
|
||||
return globalUsers
|
||||
}
|
||||
return globalUsers.map(user => exports.updateAppRole(appId, user))
|
||||
return globalUsers.map(user => exports.updateAppRole(user))
|
||||
}
|
||||
|
||||
exports.getGlobalUsersFromMetadata = async (appId, users) => {
|
||||
const globalUsers = await exports.getGlobalUsers(appId, users)
|
||||
exports.getGlobalUsersFromMetadata = async users => {
|
||||
const globalUsers = await exports.getGlobalUsers(users)
|
||||
return users.map(user => {
|
||||
const globalUser = globalUsers.find(
|
||||
globalUser => globalUser && user._id.includes(globalUser._id)
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
const env = require("../environment")
|
||||
const { OBJ_STORE_DIRECTORY } = require("../constants")
|
||||
const { sanitizeKey } = require("@budibase/backend-core/objectStore")
|
||||
const CouchDB = require("../db")
|
||||
const { generateMetadataID } = require("../db/utils")
|
||||
const Readable = require("stream").Readable
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
const BB_CDN = "https://cdn.budi.live"
|
||||
|
||||
|
@ -73,8 +73,8 @@ exports.attachmentsRelativeURL = attachmentKey => {
|
|||
)
|
||||
}
|
||||
|
||||
exports.updateEntityMetadata = async (appId, type, entityId, updateFn) => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.updateEntityMetadata = async (type, entityId, updateFn) => {
|
||||
const db = getAppDB()
|
||||
const id = generateMetadataID(type, entityId)
|
||||
// read it to see if it exists, we'll overwrite it no matter what
|
||||
let rev,
|
||||
|
@ -99,14 +99,14 @@ exports.updateEntityMetadata = async (appId, type, entityId, updateFn) => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.saveEntityMetadata = async (appId, type, entityId, metadata) => {
|
||||
return exports.updateEntityMetadata(appId, type, entityId, () => {
|
||||
exports.saveEntityMetadata = async (type, entityId, metadata) => {
|
||||
return exports.updateEntityMetadata(type, entityId, () => {
|
||||
return metadata
|
||||
})
|
||||
}
|
||||
|
||||
exports.deleteEntityMetadata = async (appId, type, entityId) => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.deleteEntityMetadata = async (type, entityId) => {
|
||||
const db = getAppDB()
|
||||
const id = generateMetadataID(type, entityId)
|
||||
let rev
|
||||
try {
|
||||
|
@ -141,16 +141,6 @@ exports.stringToReadStream = string => {
|
|||
})
|
||||
}
|
||||
|
||||
exports.doesDatabaseExist = async dbName => {
|
||||
try {
|
||||
const db = new CouchDB(dbName, { skip_setup: true })
|
||||
const info = await db.info()
|
||||
return info && !info.error
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
exports.formatBytes = bytes => {
|
||||
const units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
|
||||
const byteIncrements = 1024
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
const CouchDB = require("../../db")
|
||||
const { createRoutingView } = require("../../db/views/staticViews")
|
||||
const { ViewNames, getQueryIndex, UNICODE_MAX } = require("../../db/utils")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.getRoutingInfo = async appId => {
|
||||
const db = new CouchDB(appId)
|
||||
exports.getRoutingInfo = async () => {
|
||||
const db = getAppDB()
|
||||
try {
|
||||
const allRouting = await db.query(getQueryIndex(ViewNames.ROUTING), {
|
||||
startKey: "",
|
||||
|
@ -14,8 +14,8 @@ exports.getRoutingInfo = async appId => {
|
|||
// check if the view doesn't exist, it should for all new instances
|
||||
/* istanbul ignore next */
|
||||
if (err != null && err.name === "not_found") {
|
||||
await createRoutingView(appId)
|
||||
return exports.getRoutingInfo(appId)
|
||||
await createRoutingView()
|
||||
return exports.getRoutingInfo()
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
|
|
|
@ -7,10 +7,10 @@ const { deleteFiles } = require("../../utilities/fileSystem/utilities")
|
|||
const { ObjectStoreBuckets } = require("../../constants")
|
||||
const {
|
||||
isProdAppID,
|
||||
getDeployedAppID,
|
||||
getProdAppID,
|
||||
dbExists,
|
||||
} = require("@budibase/backend-core/db")
|
||||
const CouchDB = require("../../db")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
|
||||
const BASE_AUTO_ID = 1
|
||||
|
||||
|
@ -253,26 +253,20 @@ exports.inputProcessing = (
|
|||
/**
|
||||
* This function enriches the input rows with anything they are supposed to contain, for example
|
||||
* link records or attachment links.
|
||||
* @param {string} appId the app in which the request is looking for enriched rows.
|
||||
* @param {object} table the table from which these rows came from originally, this is used to determine
|
||||
* the schema of the rows and then enrich.
|
||||
* @param {object[]|object} rows the rows which are to be enriched.
|
||||
* @param {object} opts used to set some options for the output, such as disabling relationship squashing.
|
||||
* @returns {object[]|object} the enriched rows will be returned.
|
||||
*/
|
||||
exports.outputProcessing = async (
|
||||
{ appId },
|
||||
table,
|
||||
rows,
|
||||
opts = { squash: true }
|
||||
) => {
|
||||
exports.outputProcessing = async (table, rows, opts = { squash: true }) => {
|
||||
let wasArray = true
|
||||
if (!(rows instanceof Array)) {
|
||||
rows = [rows]
|
||||
wasArray = false
|
||||
}
|
||||
// attach any linked row information
|
||||
let enriched = await linkRows.attachFullLinkedDocs(appId, table, rows)
|
||||
let enriched = await linkRows.attachFullLinkedDocs(table, rows)
|
||||
|
||||
// process formulas
|
||||
enriched = processFormulas(table, enriched, { dynamic: true })
|
||||
|
@ -291,18 +285,13 @@ exports.outputProcessing = async (
|
|||
}
|
||||
}
|
||||
if (opts.squash) {
|
||||
enriched = await linkRows.squashLinksToPrimaryDisplay(
|
||||
appId,
|
||||
table,
|
||||
enriched
|
||||
)
|
||||
enriched = await linkRows.squashLinksToPrimaryDisplay(table, enriched)
|
||||
}
|
||||
return wasArray ? enriched : enriched[0]
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up any attachments that were attached to a row.
|
||||
* @param {string} appId The ID of the app from which a row is being deleted.
|
||||
* @param {object} table The table from which a row is being removed.
|
||||
* @param {any} row optional - the row being removed.
|
||||
* @param {any} rows optional - if multiple rows being deleted can do this in bulk.
|
||||
|
@ -311,15 +300,12 @@ exports.outputProcessing = async (
|
|||
* deleted attachment columns.
|
||||
* @return {Promise<void>} When all attachments have been removed this will return.
|
||||
*/
|
||||
exports.cleanupAttachments = async (
|
||||
appId,
|
||||
table,
|
||||
{ row, rows, oldRow, oldTable }
|
||||
) => {
|
||||
exports.cleanupAttachments = async (table, { row, rows, oldRow, oldTable }) => {
|
||||
const appId = getAppId()
|
||||
if (!isProdAppID(appId)) {
|
||||
const prodAppId = getDeployedAppID(appId)
|
||||
const prodAppId = getProdAppID(appId)
|
||||
// if prod exists, then don't allow deleting
|
||||
const exists = await dbExists(CouchDB, prodAppId)
|
||||
const exists = await dbExists(prodAppId)
|
||||
if (exists) {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -52,6 +52,7 @@ exports.getUsageQuotaDoc = async db => {
|
|||
* Given a specified tenantId this will add to the usage object for the specified property.
|
||||
* @param {string} property The property which is to be added to (within the nested usageQuota object).
|
||||
* @param {number} usage The amount (this can be negative) to adjust the number by.
|
||||
* @param {object} opts optional - options such as dryRun, to check what update will do.
|
||||
* @returns {Promise<void>} When this completes the API key will now be up to date - the quota period may have
|
||||
* also been reset after this call.
|
||||
*/
|
||||
|
|
|
@ -23,6 +23,7 @@ const getAppPairs = appIds => {
|
|||
}
|
||||
|
||||
const getAppRows = async appId => {
|
||||
// need to specify the app ID, as this is used for different apps in one call
|
||||
const appDb = new CouchDB(appId)
|
||||
const response = await appDb.allDocs(
|
||||
getRowParams(null, null, {
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
const CouchDB = require("../db")
|
||||
const { InternalTables } = require("../db/utils")
|
||||
const { getGlobalUser } = require("../utilities/global")
|
||||
const { getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.getFullUser = async (ctx, userId) => {
|
||||
const global = await getGlobalUser(ctx.appId, userId)
|
||||
const global = await getGlobalUser(userId)
|
||||
let metadata
|
||||
try {
|
||||
// this will throw an error if the db doesn't exist, or there is no appId
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const db = getAppDB()
|
||||
metadata = await db.get(userId)
|
||||
} catch (err) {
|
||||
// it is fine if there is no user metadata, just remove global db info
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const fetch = require("node-fetch")
|
||||
const env = require("../environment")
|
||||
const { checkSlashesInUrl } = require("./index")
|
||||
const { getDeployedAppID } = require("@budibase/backend-core/db")
|
||||
const { getProdAppID } = require("@budibase/backend-core/db")
|
||||
const { updateAppRole } = require("./global")
|
||||
const { Headers } = require("@budibase/backend-core/constants")
|
||||
const { getTenantId, isTenantIdSet } = require("@budibase/backend-core/tenancy")
|
||||
|
@ -70,15 +70,15 @@ exports.getGlobalSelf = async (ctx, appId = null) => {
|
|||
}
|
||||
let json = await response.json()
|
||||
if (appId) {
|
||||
json = updateAppRole(appId, json)
|
||||
json = updateAppRole(json)
|
||||
}
|
||||
return json
|
||||
}
|
||||
|
||||
exports.removeAppFromUserRoles = async (ctx, appId) => {
|
||||
const deployedAppId = getDeployedAppID(appId)
|
||||
const prodAppId = getProdAppID(appId)
|
||||
const response = await fetch(
|
||||
checkSlashesInUrl(env.WORKER_URL + `/api/global/roles/${deployedAppId}`),
|
||||
checkSlashesInUrl(env.WORKER_URL + `/api/global/roles/${prodAppId}`),
|
||||
request(ctx, {
|
||||
method: "DELETE",
|
||||
})
|
||||
|
|
|
@ -11,7 +11,6 @@ const {
|
|||
upload,
|
||||
ObjectStoreBuckets,
|
||||
} = require("@budibase/backend-core/objectStore")
|
||||
const CouchDB = require("../../../db")
|
||||
const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy")
|
||||
const env = require("../../../environment")
|
||||
const { googleCallbackUrl, oidcCallbackUrl } = require("./auth")
|
||||
|
@ -252,7 +251,7 @@ exports.configChecklist = async function (ctx) {
|
|||
// TODO: Watch get started video
|
||||
|
||||
// Apps exist
|
||||
const apps = await getAllApps(CouchDB, { idsOnly: true })
|
||||
const apps = await getAllApps({ idsOnly: true })
|
||||
|
||||
// They have set up SMTP
|
||||
const smtpConfig = await getScopedFullConfig(db, {
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
const { getAllRoles } = require("@budibase/backend-core/roles")
|
||||
const {
|
||||
getAllApps,
|
||||
getDeployedAppID,
|
||||
getProdAppID,
|
||||
DocumentTypes,
|
||||
} = require("@budibase/backend-core/db")
|
||||
const CouchDB = require("../../../db")
|
||||
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
|
||||
|
||||
exports.fetch = async ctx => {
|
||||
const tenantId = ctx.user.tenantId
|
||||
// always use the dev apps as they'll be most up to date (true)
|
||||
const apps = await getAllApps(CouchDB, { tenantId, all: true })
|
||||
const apps = await getAllApps({ tenantId, all: true })
|
||||
const promises = []
|
||||
for (let app of apps) {
|
||||
// use dev app IDs
|
||||
|
@ -18,7 +18,7 @@ exports.fetch = async ctx => {
|
|||
const roles = await Promise.all(promises)
|
||||
const response = {}
|
||||
for (let app of apps) {
|
||||
const deployedAppId = getDeployedAppID(app.appId)
|
||||
const deployedAppId = getProdAppID(app.appId)
|
||||
response[deployedAppId] = {
|
||||
roles: roles.shift(),
|
||||
name: app.name,
|
||||
|
@ -31,12 +31,14 @@ exports.fetch = async ctx => {
|
|||
|
||||
exports.find = async ctx => {
|
||||
const appId = ctx.params.appId
|
||||
const db = new CouchDB(appId)
|
||||
const app = await db.get(DocumentTypes.APP_METADATA)
|
||||
ctx.body = {
|
||||
roles: await getAllRoles(appId),
|
||||
name: app.name,
|
||||
version: app.version,
|
||||
url: app.url,
|
||||
}
|
||||
await doInAppContext(appId, async () => {
|
||||
const db = getAppDB()
|
||||
const app = await db.get(DocumentTypes.APP_METADATA)
|
||||
ctx.body = {
|
||||
roles: await getAllRoles(),
|
||||
name: app.name,
|
||||
version: app.version,
|
||||
url: app.url,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue