Merge branch 'feature/licensing' into feature/posthog-v2
This commit is contained in:
commit
75d5c0c0b9
|
@ -98,10 +98,6 @@ spec:
|
||||||
value: http://worker-service:{{ .Values.services.worker.port }}
|
value: http://worker-service:{{ .Values.services.worker.port }}
|
||||||
- name: PLATFORM_URL
|
- name: PLATFORM_URL
|
||||||
value: {{ .Values.globals.platformUrl | quote }}
|
value: {{ .Values.globals.platformUrl | quote }}
|
||||||
- name: USE_QUOTAS
|
|
||||||
value: {{ .Values.globals.useQuotas | quote }}
|
|
||||||
- name: EXCLUDE_QUOTAS_TENANTS
|
|
||||||
value: {{ .Values.globals.excludeQuotasTenants | quote }}
|
|
||||||
- name: ACCOUNT_PORTAL_URL
|
- name: ACCOUNT_PORTAL_URL
|
||||||
value: {{ .Values.globals.accountPortalUrl | quote }}
|
value: {{ .Values.globals.accountPortalUrl | quote }}
|
||||||
- name: ACCOUNT_PORTAL_API_KEY
|
- name: ACCOUNT_PORTAL_API_KEY
|
||||||
|
|
|
@ -93,8 +93,6 @@ globals:
|
||||||
logLevel: info
|
logLevel: info
|
||||||
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
||||||
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
||||||
useQuotas: "0"
|
|
||||||
excludeQuotasTenants: "" # comma seperated list of tenants to exclude from quotas
|
|
||||||
accountPortalUrl: ""
|
accountPortalUrl: ""
|
||||||
accountPortalApiKey: ""
|
accountPortalApiKey: ""
|
||||||
cookieDomain: ""
|
cookieDomain: ""
|
||||||
|
|
11
package.json
11
package.json
|
@ -21,7 +21,6 @@
|
||||||
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
|
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
|
||||||
"bootstrap": "lerna link && lerna bootstrap",
|
"bootstrap": "lerna link && lerna bootstrap",
|
||||||
"build": "lerna run build",
|
"build": "lerna run build",
|
||||||
"publishdev": "lerna run publishdev",
|
|
||||||
"publishnpm": "yarn build && lerna publish --force-publish",
|
"publishnpm": "yarn build && lerna publish --force-publish",
|
||||||
"release": "lerna publish patch --yes --force-publish",
|
"release": "lerna publish patch --yes --force-publish",
|
||||||
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop",
|
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop",
|
||||||
|
@ -30,10 +29,12 @@
|
||||||
"nuke:packages": "yarn run restore",
|
"nuke:packages": "yarn run restore",
|
||||||
"nuke:docker": "lerna run --parallel dev:stack:nuke",
|
"nuke:docker": "lerna run --parallel dev:stack:nuke",
|
||||||
"clean": "lerna clean",
|
"clean": "lerna clean",
|
||||||
"kill-port": "kill-port 4001",
|
"kill-builder": "kill-port 3000",
|
||||||
"dev": "yarn run kill-port && lerna link && lerna run --parallel dev:builder --concurrency 1",
|
"kill-server": "kill-port 4001 4002",
|
||||||
"dev:noserver": "lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/server --ignore @budibase/worker",
|
"kill-all": "yarn run kill-builder && yarn run kill-server",
|
||||||
"dev:server": "lerna run --parallel dev:builder --concurrency 1 --scope @budibase/worker --scope @budibase/server",
|
"dev": "yarn run kill-all && lerna link && lerna run --parallel dev:builder --concurrency 1",
|
||||||
|
"dev:noserver": "yarn run kill-builder && lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/server --ignore @budibase/worker",
|
||||||
|
"dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/worker --scope @budibase/server",
|
||||||
"test": "lerna run test",
|
"test": "lerna run test",
|
||||||
"lint:eslint": "eslint packages",
|
"lint:eslint": "eslint packages",
|
||||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
|
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
|
||||||
|
|
|
@ -13,6 +13,7 @@ exports.Cookies = {
|
||||||
|
|
||||||
exports.Headers = {
|
exports.Headers = {
|
||||||
API_KEY: "x-budibase-api-key",
|
API_KEY: "x-budibase-api-key",
|
||||||
|
LICENSE_KEY: "x-budibase-license-key",
|
||||||
API_VER: "x-budibase-api-version",
|
API_VER: "x-budibase-api-version",
|
||||||
APP_ID: "x-budibase-app-id",
|
APP_ID: "x-budibase-app-id",
|
||||||
TYPE: "x-budibase-type",
|
TYPE: "x-budibase-type",
|
||||||
|
|
|
@ -23,6 +23,7 @@ exports.StaticDatabases = {
|
||||||
docs: {
|
docs: {
|
||||||
apiKeys: "apikeys",
|
apiKeys: "apikeys",
|
||||||
usageQuota: "usage_quota",
|
usageQuota: "usage_quota",
|
||||||
|
licenseInfo: "license_info",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
// contains information about tenancy and so on
|
// contains information about tenancy and so on
|
||||||
|
|
|
@ -31,6 +31,7 @@ const UNICODE_MAX = "\ufff0"
|
||||||
exports.ViewNames = {
|
exports.ViewNames = {
|
||||||
USER_BY_EMAIL: "by_email",
|
USER_BY_EMAIL: "by_email",
|
||||||
BY_API_KEY: "by_api_key",
|
BY_API_KEY: "by_api_key",
|
||||||
|
USER_BY_BUILDERS: "by_builders",
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.StaticDatabases = StaticDatabases
|
exports.StaticDatabases = StaticDatabases
|
||||||
|
@ -434,34 +435,9 @@ async function getScopedConfig(db, params) {
|
||||||
return configDoc && configDoc.config ? configDoc.config : configDoc
|
return configDoc && configDoc.config ? configDoc.config : configDoc
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateNewUsageQuotaDoc() {
|
|
||||||
return {
|
|
||||||
_id: StaticDatabases.GLOBAL.docs.usageQuota,
|
|
||||||
quotaReset: Date.now() + 2592000000,
|
|
||||||
usageQuota: {
|
|
||||||
automationRuns: 0,
|
|
||||||
rows: 0,
|
|
||||||
storage: 0,
|
|
||||||
apps: 0,
|
|
||||||
users: 0,
|
|
||||||
views: 0,
|
|
||||||
emails: 0,
|
|
||||||
},
|
|
||||||
usageLimits: {
|
|
||||||
automationRuns: 1000,
|
|
||||||
rows: 4000,
|
|
||||||
apps: 4,
|
|
||||||
storage: 1000,
|
|
||||||
users: 10,
|
|
||||||
emails: 50,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.Replication = Replication
|
exports.Replication = Replication
|
||||||
exports.getScopedConfig = getScopedConfig
|
exports.getScopedConfig = getScopedConfig
|
||||||
exports.generateConfigID = generateConfigID
|
exports.generateConfigID = generateConfigID
|
||||||
exports.getConfigParams = getConfigParams
|
exports.getConfigParams = getConfigParams
|
||||||
exports.getScopedFullConfig = getScopedFullConfig
|
exports.getScopedFullConfig = getScopedFullConfig
|
||||||
exports.generateNewUsageQuotaDoc = generateNewUsageQuotaDoc
|
|
||||||
exports.generateDevInfoID = generateDevInfoID
|
exports.generateDevInfoID = generateDevInfoID
|
||||||
|
|
|
@ -56,10 +56,34 @@ exports.createApiKeyView = async () => {
|
||||||
await db.put(designDoc)
|
await db.put(designDoc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.createUserBuildersView = async () => {
|
||||||
|
const db = getGlobalDB()
|
||||||
|
let designDoc
|
||||||
|
try {
|
||||||
|
designDoc = await db.get("_design/database")
|
||||||
|
} catch (err) {
|
||||||
|
// no design doc, make one
|
||||||
|
designDoc = DesignDoc()
|
||||||
|
}
|
||||||
|
const view = {
|
||||||
|
map: `function(doc) {
|
||||||
|
if (doc.builder && doc.builder.global === true) {
|
||||||
|
emit(doc._id, doc._id)
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
}
|
||||||
|
designDoc.views = {
|
||||||
|
...designDoc.views,
|
||||||
|
[ViewNames.USER_BY_BUILDERS]: view,
|
||||||
|
}
|
||||||
|
await db.put(designDoc)
|
||||||
|
}
|
||||||
|
|
||||||
exports.queryGlobalView = async (viewName, params, db = null) => {
|
exports.queryGlobalView = async (viewName, params, db = null) => {
|
||||||
const CreateFuncByName = {
|
const CreateFuncByName = {
|
||||||
[ViewNames.USER_BY_EMAIL]: exports.createUserEmailView,
|
[ViewNames.USER_BY_EMAIL]: exports.createUserEmailView,
|
||||||
[ViewNames.BY_API_KEY]: exports.createApiKeyView,
|
[ViewNames.BY_API_KEY]: exports.createApiKeyView,
|
||||||
|
[ViewNames.USER_BY_BUILDERS]: exports.createUserBuildersView,
|
||||||
}
|
}
|
||||||
// can pass DB in if working with something specific
|
// can pass DB in if working with something specific
|
||||||
if (!db) {
|
if (!db) {
|
||||||
|
|
|
@ -30,6 +30,7 @@ module.exports = {
|
||||||
PLATFORM_URL: process.env.PLATFORM_URL,
|
PLATFORM_URL: process.env.PLATFORM_URL,
|
||||||
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
|
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
|
||||||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||||
|
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
|
||||||
isTest,
|
isTest,
|
||||||
_set(key, value) {
|
_set(key, value) {
|
||||||
process.env[key] = value
|
process.env[key] = value
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
class BudibaseError extends Error {
|
||||||
|
constructor(message, type, code) {
|
||||||
|
super(message)
|
||||||
|
this.type = type
|
||||||
|
this.code = code
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
BudibaseError,
|
||||||
|
}
|
|
@ -0,0 +1,41 @@
|
||||||
|
const licensing = require("./licensing")
|
||||||
|
|
||||||
|
const codes = {
|
||||||
|
...licensing.codes,
|
||||||
|
}
|
||||||
|
|
||||||
|
const types = {
|
||||||
|
...licensing.types,
|
||||||
|
}
|
||||||
|
|
||||||
|
const context = {
|
||||||
|
...licensing.context,
|
||||||
|
}
|
||||||
|
|
||||||
|
const getPublicError = err => {
|
||||||
|
let error
|
||||||
|
if (err.code || err.type) {
|
||||||
|
// add generic error information
|
||||||
|
error = {
|
||||||
|
code: err.code,
|
||||||
|
type: err.type,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (err.code && context[err.code]) {
|
||||||
|
error = {
|
||||||
|
...error,
|
||||||
|
// get any additional context from this error
|
||||||
|
...context[err.code](err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return error
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
codes,
|
||||||
|
types,
|
||||||
|
UsageLimitError: licensing.UsageLimitError,
|
||||||
|
getPublicError,
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
const { BudibaseError } = require("./base")
|
||||||
|
|
||||||
|
const types = {
|
||||||
|
LICENSE_ERROR: "license_error",
|
||||||
|
}
|
||||||
|
|
||||||
|
const codes = {
|
||||||
|
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
|
||||||
|
}
|
||||||
|
|
||||||
|
const context = {
|
||||||
|
[codes.USAGE_LIMIT_EXCEEDED]: err => {
|
||||||
|
return {
|
||||||
|
limitName: err.limitName,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
class UsageLimitError extends BudibaseError {
|
||||||
|
constructor(message, limitName) {
|
||||||
|
super(message, types.LICENSE_ERROR, codes.USAGE_LIMIT_EXCEEDED)
|
||||||
|
this.limitName = limitName
|
||||||
|
this.status = 400
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
types,
|
||||||
|
codes,
|
||||||
|
context,
|
||||||
|
UsageLimitError,
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
const env = require("../environment")
|
||||||
|
const tenancy = require("../tenancy")
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
|
||||||
|
* The env var is formatted as:
|
||||||
|
* tenant1:feature1:feature2,tenant2:feature1
|
||||||
|
*/
|
||||||
|
const getFeatureFlags = () => {
|
||||||
|
if (!env.TENANT_FEATURE_FLAGS) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const tenantFeatureFlags = {}
|
||||||
|
|
||||||
|
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
|
||||||
|
const [tenantId, ...features] = tenantToFeatures.split(":")
|
||||||
|
|
||||||
|
features.forEach(feature => {
|
||||||
|
if (!tenantFeatureFlags[tenantId]) {
|
||||||
|
tenantFeatureFlags[tenantId] = []
|
||||||
|
}
|
||||||
|
tenantFeatureFlags[tenantId].push(feature)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return tenantFeatureFlags
|
||||||
|
}
|
||||||
|
|
||||||
|
const TENANT_FEATURE_FLAGS = getFeatureFlags()
|
||||||
|
|
||||||
|
exports.isEnabled = featureFlag => {
|
||||||
|
const tenantId = tenancy.getTenantId()
|
||||||
|
|
||||||
|
return (
|
||||||
|
TENANT_FEATURE_FLAGS &&
|
||||||
|
TENANT_FEATURE_FLAGS[tenantId] &&
|
||||||
|
TENANT_FEATURE_FLAGS[tenantId].includes(featureFlag)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.getTenantFeatureFlags = tenantId => {
|
||||||
|
if (TENANT_FEATURE_FLAGS && TENANT_FEATURE_FLAGS[tenantId]) {
|
||||||
|
return TENANT_FEATURE_FLAGS[tenantId]
|
||||||
|
}
|
||||||
|
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.FeatureFlag = {
|
||||||
|
LICENSING: "LICENSING",
|
||||||
|
}
|
|
@ -15,5 +15,9 @@ module.exports = {
|
||||||
auth: require("../auth"),
|
auth: require("../auth"),
|
||||||
constants: require("../constants"),
|
constants: require("../constants"),
|
||||||
migrations: require("../migrations"),
|
migrations: require("../migrations"),
|
||||||
analytics: require("./analytics"),
|
errors: require("./errors"),
|
||||||
|
env: require("./environment"),
|
||||||
|
accounts: require("./cloud/accounts"),
|
||||||
|
tenancy: require("./tenancy"),
|
||||||
|
featureFlags: require("./featureFlags"),
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,24 +2,27 @@ const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||||
|
|
||||||
const { authenticateThirdParty } = require("./third-party-common")
|
const { authenticateThirdParty } = require("./third-party-common")
|
||||||
|
|
||||||
async function authenticate(accessToken, refreshToken, profile, done) {
|
const buildVerifyFn = async saveUserFn => {
|
||||||
const thirdPartyUser = {
|
return (accessToken, refreshToken, profile, done) => {
|
||||||
provider: profile.provider, // should always be 'google'
|
const thirdPartyUser = {
|
||||||
providerType: "google",
|
provider: profile.provider, // should always be 'google'
|
||||||
userId: profile.id,
|
providerType: "google",
|
||||||
profile: profile,
|
userId: profile.id,
|
||||||
email: profile._json.email,
|
profile: profile,
|
||||||
oauth2: {
|
email: profile._json.email,
|
||||||
accessToken: accessToken,
|
oauth2: {
|
||||||
refreshToken: refreshToken,
|
accessToken: accessToken,
|
||||||
},
|
refreshToken: refreshToken,
|
||||||
}
|
},
|
||||||
|
}
|
||||||
|
|
||||||
return authenticateThirdParty(
|
return authenticateThirdParty(
|
||||||
thirdPartyUser,
|
thirdPartyUser,
|
||||||
true, // require local accounts to exist
|
true, // require local accounts to exist
|
||||||
done
|
done,
|
||||||
)
|
saveUserFn
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -27,11 +30,7 @@ async function authenticate(accessToken, refreshToken, profile, done) {
|
||||||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||||
* @returns Dynamically configured Passport Google Strategy
|
* @returns Dynamically configured Passport Google Strategy
|
||||||
*/
|
*/
|
||||||
exports.strategyFactory = async function (
|
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
||||||
config,
|
|
||||||
callbackUrl,
|
|
||||||
verify = authenticate
|
|
||||||
) {
|
|
||||||
try {
|
try {
|
||||||
const { clientID, clientSecret } = config
|
const { clientID, clientSecret } = config
|
||||||
|
|
||||||
|
@ -41,6 +40,7 @@ exports.strategyFactory = async function (
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const verify = buildVerifyFn(saveUserFn)
|
||||||
return new GoogleStrategy(
|
return new GoogleStrategy(
|
||||||
{
|
{
|
||||||
clientID: config.clientID,
|
clientID: config.clientID,
|
||||||
|
@ -55,4 +55,4 @@ exports.strategyFactory = async function (
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// expose for testing
|
// expose for testing
|
||||||
exports.authenticate = authenticate
|
exports.buildVerifyFn = buildVerifyFn
|
||||||
|
|
|
@ -2,46 +2,49 @@ const fetch = require("node-fetch")
|
||||||
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||||
const { authenticateThirdParty } = require("./third-party-common")
|
const { authenticateThirdParty } = require("./third-party-common")
|
||||||
|
|
||||||
/**
|
const buildVerifyFn = saveUserFn => {
|
||||||
* @param {*} issuer The identity provider base URL
|
/**
|
||||||
* @param {*} sub The user ID
|
* @param {*} issuer The identity provider base URL
|
||||||
* @param {*} profile The user profile information. Created by passport from the /userinfo response
|
* @param {*} sub The user ID
|
||||||
* @param {*} jwtClaims The parsed id_token claims
|
* @param {*} profile The user profile information. Created by passport from the /userinfo response
|
||||||
* @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT
|
* @param {*} jwtClaims The parsed id_token claims
|
||||||
* @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
|
* @param {*} accessToken The access_token for contacting the identity provider - may or may not be a JWT
|
||||||
* @param {*} idToken The id_token - always a JWT
|
* @param {*} refreshToken The refresh_token for obtaining a new access_token - usually not a JWT
|
||||||
* @param {*} params The response body from requesting an access_token
|
* @param {*} idToken The id_token - always a JWT
|
||||||
* @param {*} done The passport callback: err, user, info
|
* @param {*} params The response body from requesting an access_token
|
||||||
*/
|
* @param {*} done The passport callback: err, user, info
|
||||||
async function authenticate(
|
*/
|
||||||
issuer,
|
return async (
|
||||||
sub,
|
issuer,
|
||||||
profile,
|
sub,
|
||||||
jwtClaims,
|
profile,
|
||||||
accessToken,
|
jwtClaims,
|
||||||
refreshToken,
|
accessToken,
|
||||||
idToken,
|
refreshToken,
|
||||||
params,
|
idToken,
|
||||||
done
|
params,
|
||||||
) {
|
|
||||||
const thirdPartyUser = {
|
|
||||||
// store the issuer info to enable sync in future
|
|
||||||
provider: issuer,
|
|
||||||
providerType: "oidc",
|
|
||||||
userId: profile.id,
|
|
||||||
profile: profile,
|
|
||||||
email: getEmail(profile, jwtClaims),
|
|
||||||
oauth2: {
|
|
||||||
accessToken: accessToken,
|
|
||||||
refreshToken: refreshToken,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
return authenticateThirdParty(
|
|
||||||
thirdPartyUser,
|
|
||||||
false, // don't require local accounts to exist
|
|
||||||
done
|
done
|
||||||
)
|
) => {
|
||||||
|
const thirdPartyUser = {
|
||||||
|
// store the issuer info to enable sync in future
|
||||||
|
provider: issuer,
|
||||||
|
providerType: "oidc",
|
||||||
|
userId: profile.id,
|
||||||
|
profile: profile,
|
||||||
|
email: getEmail(profile, jwtClaims),
|
||||||
|
oauth2: {
|
||||||
|
accessToken: accessToken,
|
||||||
|
refreshToken: refreshToken,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return authenticateThirdParty(
|
||||||
|
thirdPartyUser,
|
||||||
|
false, // don't require local accounts to exist
|
||||||
|
done,
|
||||||
|
saveUserFn
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -86,7 +89,7 @@ function validEmail(value) {
|
||||||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||||
* @returns Dynamically configured Passport OIDC Strategy
|
* @returns Dynamically configured Passport OIDC Strategy
|
||||||
*/
|
*/
|
||||||
exports.strategyFactory = async function (config, callbackUrl) {
|
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
||||||
try {
|
try {
|
||||||
const { clientID, clientSecret, configUrl } = config
|
const { clientID, clientSecret, configUrl } = config
|
||||||
|
|
||||||
|
@ -106,6 +109,7 @@ exports.strategyFactory = async function (config, callbackUrl) {
|
||||||
|
|
||||||
const body = await response.json()
|
const body = await response.json()
|
||||||
|
|
||||||
|
const verify = buildVerifyFn(saveUserFn)
|
||||||
return new OIDCStrategy(
|
return new OIDCStrategy(
|
||||||
{
|
{
|
||||||
issuer: body.issuer,
|
issuer: body.issuer,
|
||||||
|
@ -116,7 +120,7 @@ exports.strategyFactory = async function (config, callbackUrl) {
|
||||||
clientSecret: clientSecret,
|
clientSecret: clientSecret,
|
||||||
callbackURL: callbackUrl,
|
callbackURL: callbackUrl,
|
||||||
},
|
},
|
||||||
authenticate
|
verify
|
||||||
)
|
)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
|
@ -125,4 +129,4 @@ exports.strategyFactory = async function (config, callbackUrl) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// expose for testing
|
// expose for testing
|
||||||
exports.authenticate = authenticate
|
exports.buildVerifyFn = buildVerifyFn
|
||||||
|
|
|
@ -58,8 +58,10 @@ describe("google", () => {
|
||||||
|
|
||||||
it("delegates authentication to third party common", async () => {
|
it("delegates authentication to third party common", async () => {
|
||||||
const google = require("../google")
|
const google = require("../google")
|
||||||
|
const mockSaveUserFn = jest.fn()
|
||||||
|
const authenticate = await google.buildVerifyFn(mockSaveUserFn)
|
||||||
|
|
||||||
await google.authenticate(
|
await authenticate(
|
||||||
data.accessToken,
|
data.accessToken,
|
||||||
data.refreshToken,
|
data.refreshToken,
|
||||||
profile,
|
profile,
|
||||||
|
@ -69,7 +71,8 @@ describe("google", () => {
|
||||||
expect(authenticateThirdParty).toHaveBeenCalledWith(
|
expect(authenticateThirdParty).toHaveBeenCalledWith(
|
||||||
user,
|
user,
|
||||||
true,
|
true,
|
||||||
mockDone)
|
mockDone,
|
||||||
|
mockSaveUserFn)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -83,8 +83,10 @@ describe("oidc", () => {
|
||||||
|
|
||||||
async function doAuthenticate() {
|
async function doAuthenticate() {
|
||||||
const oidc = require("../oidc")
|
const oidc = require("../oidc")
|
||||||
|
const mockSaveUserFn = jest.fn()
|
||||||
|
const authenticate = await oidc.buildVerifyFn(mockSaveUserFn)
|
||||||
|
|
||||||
await oidc.authenticate(
|
await authenticate(
|
||||||
issuer,
|
issuer,
|
||||||
sub,
|
sub,
|
||||||
profile,
|
profile,
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
const jwt = require("jsonwebtoken")
|
const jwt = require("jsonwebtoken")
|
||||||
const { generateGlobalUserID } = require("../../db/utils")
|
const { generateGlobalUserID } = require("../../db/utils")
|
||||||
const { saveUser } = require("../../utils")
|
|
||||||
const { authError } = require("./utils")
|
const { authError } = require("./utils")
|
||||||
const { newid } = require("../../hashing")
|
const { newid } = require("../../hashing")
|
||||||
const { createASession } = require("../../security/sessions")
|
const { createASession } = require("../../security/sessions")
|
||||||
|
@ -16,8 +15,11 @@ exports.authenticateThirdParty = async function (
|
||||||
thirdPartyUser,
|
thirdPartyUser,
|
||||||
requireLocalAccount = true,
|
requireLocalAccount = true,
|
||||||
done,
|
done,
|
||||||
saveUserFn = saveUser
|
saveUserFn
|
||||||
) {
|
) {
|
||||||
|
if (!saveUserFn) {
|
||||||
|
throw new Error("Save user function must be provided")
|
||||||
|
}
|
||||||
if (!thirdPartyUser.provider) {
|
if (!thirdPartyUser.provider) {
|
||||||
return authError(done, "third party user provider required")
|
return authError(done, "third party user provider required")
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ exports.Databases = {
|
||||||
FLAGS: "flags",
|
FLAGS: "flags",
|
||||||
APP_METADATA: "appMetadata",
|
APP_METADATA: "appMetadata",
|
||||||
QUERY_VARS: "queryVars",
|
QUERY_VARS: "queryVars",
|
||||||
|
LICENSES: "license",
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.SEPARATOR = SEPARATOR
|
exports.SEPARATOR = SEPARATOR
|
||||||
|
|
|
@ -146,6 +146,13 @@ exports.getGlobalUserByEmail = async email => {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.getBuildersCount = async () => {
|
||||||
|
const builders = await queryGlobalView(ViewNames.USER_BY_BUILDERS, {
|
||||||
|
include_docs: false,
|
||||||
|
})
|
||||||
|
return builders.length
|
||||||
|
}
|
||||||
|
|
||||||
exports.saveUser = async (
|
exports.saveUser = async (
|
||||||
user,
|
user,
|
||||||
tenantId,
|
tenantId,
|
||||||
|
@ -259,4 +266,5 @@ exports.platformLogout = async ({ ctx, userId, keepActiveSession }) => {
|
||||||
userId,
|
userId,
|
||||||
sessions.map(({ sessionId }) => sessionId)
|
sessions.map(({ sessionId }) => sessionId)
|
||||||
)
|
)
|
||||||
|
await userCache.invalidateUser(userId)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
<script>
|
<script>
|
||||||
export let wide = false
|
export let wide = false
|
||||||
|
export let maxWidth = "80ch"
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class:wide>
|
<div style="--max-width: {maxWidth}" class:wide>
|
||||||
<slot />
|
<slot />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -12,7 +13,7 @@
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
justify-content: flex-start;
|
justify-content: flex-start;
|
||||||
align-items: stretch;
|
align-items: stretch;
|
||||||
max-width: 80ch;
|
max-width: var(--max-width);
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
padding: calc(var(--spacing-xl) * 2);
|
padding: calc(var(--spacing-xl) * 2);
|
||||||
min-height: calc(100% - var(--spacing-xl) * 4);
|
min-height: calc(100% - var(--spacing-xl) * 4);
|
||||||
|
|
|
@ -16,11 +16,11 @@
|
||||||
easing: easing,
|
easing: easing,
|
||||||
})
|
})
|
||||||
|
|
||||||
$: if (value) $progress = value
|
$: if (value || value === 0) $progress = value
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
class:spectrum-ProgressBar--indeterminate={!value}
|
class:spectrum-ProgressBar--indeterminate={!value && value !== 0}
|
||||||
class:spectrum-ProgressBar--sideLabel={sideLabel}
|
class:spectrum-ProgressBar--sideLabel={sideLabel}
|
||||||
class="spectrum-ProgressBar spectrum-ProgressBar--size{size}"
|
class="spectrum-ProgressBar spectrum-ProgressBar--size{size}"
|
||||||
value={$progress}
|
value={$progress}
|
||||||
|
@ -28,7 +28,7 @@
|
||||||
aria-valuenow={$progress}
|
aria-valuenow={$progress}
|
||||||
aria-valuemin="0"
|
aria-valuemin="0"
|
||||||
aria-valuemax="100"
|
aria-valuemax="100"
|
||||||
style={width ? `width: ${width}px;` : ""}
|
style={width ? `width: ${width};` : ""}
|
||||||
>
|
>
|
||||||
{#if $$slots}
|
{#if $$slots}
|
||||||
<div
|
<div
|
||||||
|
@ -37,7 +37,7 @@
|
||||||
<slot />
|
<slot />
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
{#if value}
|
{#if value || value === 0}
|
||||||
<div
|
<div
|
||||||
class="spectrum-FieldLabel spectrum-ProgressBar-percentage spectrum-FieldLabel--size{size}"
|
class="spectrum-FieldLabel spectrum-ProgressBar-percentage spectrum-FieldLabel--size{size}"
|
||||||
>
|
>
|
||||||
|
@ -47,7 +47,7 @@
|
||||||
<div class="spectrum-ProgressBar-track">
|
<div class="spectrum-ProgressBar-track">
|
||||||
<div
|
<div
|
||||||
class="spectrum-ProgressBar-fill"
|
class="spectrum-ProgressBar-fill"
|
||||||
style={value ? `width: ${$progress}%` : ""}
|
style={value || value === 0 ? `width: ${$progress}%` : ""}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div class="spectrum-ProgressBar-label" hidden="" />
|
<div class="spectrum-ProgressBar-label" hidden="" />
|
||||||
|
|
|
@ -5,12 +5,14 @@
|
||||||
export let serif = false
|
export let serif = false
|
||||||
export let weight = null
|
export let weight = null
|
||||||
export let textAlign = null
|
export let textAlign = null
|
||||||
|
export let color = null
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<p
|
<p
|
||||||
style={`
|
style={`
|
||||||
${weight ? `font-weight:${weight};` : ""}
|
${weight ? `font-weight:${weight};` : ""}
|
||||||
${textAlign ? `text-align:${textAlign};` : ""}
|
${textAlign ? `text-align:${textAlign};` : ""}
|
||||||
|
${color ? `color:${color};` : ""}
|
||||||
`}
|
`}
|
||||||
class="spectrum-Body spectrum-Body--size{size}"
|
class="spectrum-Body spectrum-Body--size{size}"
|
||||||
class:spectrum-Body--serif={serif}
|
class:spectrum-Body--serif={serif}
|
||||||
|
|
|
@ -5,12 +5,13 @@
|
||||||
export let size = "M"
|
export let size = "M"
|
||||||
export let textAlign
|
export let textAlign
|
||||||
export let noPadding = false
|
export let noPadding = false
|
||||||
|
export let weight = "default" // light, heavy, default
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h1
|
<h1
|
||||||
style={textAlign ? `text-align:${textAlign}` : ``}
|
style={textAlign ? `text-align:${textAlign}` : ``}
|
||||||
class:noPadding
|
class:noPadding
|
||||||
class="spectrum-Heading spectrum-Heading--size{size}"
|
class="spectrum-Heading spectrum-Heading--size{size} spectrum-Heading--{weight}"
|
||||||
>
|
>
|
||||||
<slot />
|
<slot />
|
||||||
</h1>
|
</h1>
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
notifications.success("Invitation accepted successfully")
|
notifications.success("Invitation accepted successfully")
|
||||||
$goto("../auth/login")
|
$goto("../auth/login")
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error("Error accepting invitation")
|
notifications.error(error.message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -57,6 +57,10 @@
|
||||||
title: "Updates",
|
title: "Updates",
|
||||||
href: "/builder/portal/settings/update",
|
href: "/builder/portal/settings/update",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
title: "Upgrade",
|
||||||
|
href: "/builder/portal/settings/upgrade",
|
||||||
|
},
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
})
|
})
|
||||||
notifications.success("Successfully created user")
|
notifications.success("Successfully created user")
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
notifications.error("Error creating user")
|
notifications.error(error.message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -0,0 +1,151 @@
|
||||||
|
<script>
|
||||||
|
import {
|
||||||
|
Layout,
|
||||||
|
Heading,
|
||||||
|
Body,
|
||||||
|
Divider,
|
||||||
|
Link,
|
||||||
|
Button,
|
||||||
|
Input,
|
||||||
|
Label,
|
||||||
|
notifications,
|
||||||
|
} from "@budibase/bbui"
|
||||||
|
import { auth, admin } from "stores/portal"
|
||||||
|
import { redirect } from "@roxi/routify"
|
||||||
|
import { processStringSync } from "@budibase/string-templates"
|
||||||
|
import { API } from "api"
|
||||||
|
import { onMount } from "svelte"
|
||||||
|
|
||||||
|
$: license = $auth.user.license
|
||||||
|
$: upgradeUrl = `${$admin.accountPortalUrl}/portal/upgrade`
|
||||||
|
|
||||||
|
$: activateDisabled = !licenseKey || licenseKeyDisabled
|
||||||
|
|
||||||
|
let licenseInfo
|
||||||
|
|
||||||
|
let licenseKeyDisabled = false
|
||||||
|
let licenseKeyType = "text"
|
||||||
|
let licenseKey = ""
|
||||||
|
|
||||||
|
// Make sure page can't be visited directly in cloud
|
||||||
|
$: {
|
||||||
|
if ($admin.cloud) {
|
||||||
|
$redirect("../../portal")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const activate = async () => {
|
||||||
|
await API.activateLicenseKey({ licenseKey })
|
||||||
|
await auth.getSelf()
|
||||||
|
await setLicenseInfo()
|
||||||
|
notifications.success("Successfully activated")
|
||||||
|
}
|
||||||
|
|
||||||
|
const refresh = async () => {
|
||||||
|
try {
|
||||||
|
await API.refreshLicense()
|
||||||
|
await auth.getSelf()
|
||||||
|
notifications.success("Refreshed license")
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err)
|
||||||
|
notifications.error("Error refreshing license")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// deactivate the license key field if there is a license key set
|
||||||
|
$: {
|
||||||
|
if (licenseInfo?.licenseKey) {
|
||||||
|
licenseKey = "**********************************************"
|
||||||
|
licenseKeyType = "password"
|
||||||
|
licenseKeyDisabled = true
|
||||||
|
activateDisabled = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const setLicenseInfo = async () => {
|
||||||
|
licenseInfo = await API.getLicenseInfo()
|
||||||
|
}
|
||||||
|
|
||||||
|
onMount(async () => {
|
||||||
|
await setLicenseInfo()
|
||||||
|
})
|
||||||
|
</script>
|
||||||
|
|
||||||
|
{#if $auth.isAdmin}
|
||||||
|
<Layout noPadding>
|
||||||
|
<Layout gap="XS" noPadding>
|
||||||
|
<Heading size="M">Upgrade</Heading>
|
||||||
|
<Body size="M">
|
||||||
|
{#if license.plan.type === "free"}
|
||||||
|
Upgrade your budibase installation to unlock additional features. To
|
||||||
|
subscribe to a plan visit your <Link size="L" href={upgradeUrl}
|
||||||
|
>Account</Link
|
||||||
|
>.
|
||||||
|
{:else}
|
||||||
|
To manage your plan visit your <Link size="L" href={upgradeUrl}
|
||||||
|
>Account</Link
|
||||||
|
>.
|
||||||
|
{/if}
|
||||||
|
</Body>
|
||||||
|
</Layout>
|
||||||
|
<Divider size="S" />
|
||||||
|
<Layout gap="XS" noPadding>
|
||||||
|
<Heading size="S">Activate</Heading>
|
||||||
|
<Body size="S">Enter your license key below to activate your plan</Body>
|
||||||
|
</Layout>
|
||||||
|
<Layout noPadding>
|
||||||
|
<div class="fields">
|
||||||
|
<div class="field">
|
||||||
|
<Label size="L">License Key</Label>
|
||||||
|
<Input
|
||||||
|
thin
|
||||||
|
bind:value={licenseKey}
|
||||||
|
type={licenseKeyType}
|
||||||
|
disabled={licenseKeyDisabled}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<Button cta on:click={activate} disabled={activateDisabled}
|
||||||
|
>Activate</Button
|
||||||
|
>
|
||||||
|
</div>
|
||||||
|
</Layout>
|
||||||
|
<Divider size="S" />
|
||||||
|
<Layout gap="L" noPadding>
|
||||||
|
<Layout gap="S" noPadding>
|
||||||
|
<Heading size="S">Plan</Heading>
|
||||||
|
<Layout noPadding gap="XXS">
|
||||||
|
<Body size="S">You are currently on the {license.plan.type} plan</Body
|
||||||
|
>
|
||||||
|
<Body size="XS">
|
||||||
|
{processStringSync(
|
||||||
|
"Updated {{ duration time 'millisecond' }} ago",
|
||||||
|
{
|
||||||
|
time:
|
||||||
|
new Date().getTime() -
|
||||||
|
new Date(license.refreshedAt).getTime(),
|
||||||
|
}
|
||||||
|
)}
|
||||||
|
</Body>
|
||||||
|
</Layout>
|
||||||
|
</Layout>
|
||||||
|
<div>
|
||||||
|
<Button secondary on:click={refresh}>Refresh</Button>
|
||||||
|
</div>
|
||||||
|
</Layout>
|
||||||
|
</Layout>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.fields {
|
||||||
|
display: grid;
|
||||||
|
grid-gap: var(--spacing-m);
|
||||||
|
}
|
||||||
|
.field {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 100px 1fr;
|
||||||
|
grid-gap: var(--spacing-l);
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
</style>
|
|
@ -1,14 +0,0 @@
|
||||||
{
|
|
||||||
// Use IntelliSense to learn about possible attributes.
|
|
||||||
// Hover to view descriptions of existing attributes.
|
|
||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"type": "node",
|
|
||||||
"request": "launch",
|
|
||||||
"name": "Publish Dev",
|
|
||||||
"program": "${workspaceFolder}/scripts/publishDev.js"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
|
@ -597,16 +597,6 @@ has@^1.0.3:
|
||||||
dependencies:
|
dependencies:
|
||||||
function-bind "^1.1.1"
|
function-bind "^1.1.1"
|
||||||
|
|
||||||
htmlparser2@^6.0.0:
|
|
||||||
version "6.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7"
|
|
||||||
integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==
|
|
||||||
dependencies:
|
|
||||||
domelementtype "^2.0.1"
|
|
||||||
domhandler "^4.0.0"
|
|
||||||
domutils "^2.5.2"
|
|
||||||
entities "^2.0.0"
|
|
||||||
|
|
||||||
icss-replace-symbols@^1.1.0:
|
icss-replace-symbols@^1.1.0:
|
||||||
version "1.1.0"
|
version "1.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded"
|
resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded"
|
||||||
|
@ -671,11 +661,6 @@ is-module@^1.0.0:
|
||||||
resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591"
|
resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591"
|
||||||
integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=
|
integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=
|
||||||
|
|
||||||
is-plain-object@^5.0.0:
|
|
||||||
version "5.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344"
|
|
||||||
integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==
|
|
||||||
|
|
||||||
is-reference@^1.2.1:
|
is-reference@^1.2.1:
|
||||||
version "1.2.1"
|
version "1.2.1"
|
||||||
resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-1.2.1.tgz#8b2dac0b371f4bc994fdeaba9eb542d03002d0b7"
|
resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-1.2.1.tgz#8b2dac0b371f4bc994fdeaba9eb542d03002d0b7"
|
||||||
|
@ -716,11 +701,6 @@ json5@^1.0.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
minimist "^1.2.0"
|
minimist "^1.2.0"
|
||||||
|
|
||||||
leaflet@^1.7.1:
|
|
||||||
version "1.7.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/leaflet/-/leaflet-1.7.1.tgz#10d684916edfe1bf41d688a3b97127c0322a2a19"
|
|
||||||
integrity sha512-/xwPEBidtg69Q3HlqPdU3DnrXQOvQU/CCHA1tcDQVzOwm91YMYaILjNp7L4Eaw5Z4sOYdbBz6koWyibppd8Zqw==
|
|
||||||
|
|
||||||
lilconfig@^2.0.3:
|
lilconfig@^2.0.3:
|
||||||
version "2.0.4"
|
version "2.0.4"
|
||||||
resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.4.tgz#f4507d043d7058b380b6a8f5cb7bcd4b34cee082"
|
resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.4.tgz#f4507d043d7058b380b6a8f5cb7bcd4b34cee082"
|
||||||
|
@ -789,11 +769,6 @@ nanoid@^3.1.30, nanoid@^3.1.32:
|
||||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.2.0.tgz#62667522da6673971cca916a6d3eff3f415ff80c"
|
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.2.0.tgz#62667522da6673971cca916a6d3eff3f415ff80c"
|
||||||
integrity sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==
|
integrity sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==
|
||||||
|
|
||||||
nanoid@^3.3.1:
|
|
||||||
version "3.3.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.1.tgz#6347a18cac88af88f58af0b3594b723d5e99bb35"
|
|
||||||
integrity sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw==
|
|
||||||
|
|
||||||
node-releases@^2.0.1:
|
node-releases@^2.0.1:
|
||||||
version "2.0.1"
|
version "2.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.1.tgz#3d1d395f204f1f2f29a54358b9fb678765ad2fc5"
|
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.1.tgz#3d1d395f204f1f2f29a54358b9fb678765ad2fc5"
|
||||||
|
@ -847,11 +822,6 @@ p-timeout@^3.2.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
p-finally "^1.0.0"
|
p-finally "^1.0.0"
|
||||||
|
|
||||||
parse-srcset@^1.0.2:
|
|
||||||
version "1.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/parse-srcset/-/parse-srcset-1.0.2.tgz#f2bd221f6cc970a938d88556abc589caaaa2bde1"
|
|
||||||
integrity sha1-8r0iH2zJcKk42IVWq8WJyqqiveE=
|
|
||||||
|
|
||||||
path-is-absolute@^1.0.0:
|
path-is-absolute@^1.0.0:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
|
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
|
||||||
|
@ -1154,15 +1124,6 @@ postcss@^8.2.10:
|
||||||
picocolors "^1.0.0"
|
picocolors "^1.0.0"
|
||||||
source-map-js "^1.0.1"
|
source-map-js "^1.0.1"
|
||||||
|
|
||||||
postcss@^8.3.11:
|
|
||||||
version "8.4.12"
|
|
||||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.12.tgz#1e7de78733b28970fa4743f7da6f3763648b1905"
|
|
||||||
integrity sha512-lg6eITwYe9v6Hr5CncVbK70SoioNQIq81nsaG86ev5hAidQvmOeETBqs7jm43K2F5/Ley3ytDtriImV6TpNiSg==
|
|
||||||
dependencies:
|
|
||||||
nanoid "^3.3.1"
|
|
||||||
picocolors "^1.0.0"
|
|
||||||
source-map-js "^1.0.2"
|
|
||||||
|
|
||||||
promise.series@^0.2.0:
|
promise.series@^0.2.0:
|
||||||
version "0.2.0"
|
version "0.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/promise.series/-/promise.series-0.2.0.tgz#2cc7ebe959fc3a6619c04ab4dbdc9e452d864bbd"
|
resolved "https://registry.yarnpkg.com/promise.series/-/promise.series-0.2.0.tgz#2cc7ebe959fc3a6619c04ab4dbdc9e452d864bbd"
|
||||||
|
@ -1308,23 +1269,6 @@ safe-identifier@^0.4.2:
|
||||||
resolved "https://registry.yarnpkg.com/safe-identifier/-/safe-identifier-0.4.2.tgz#cf6bfca31c2897c588092d1750d30ef501d59fcb"
|
resolved "https://registry.yarnpkg.com/safe-identifier/-/safe-identifier-0.4.2.tgz#cf6bfca31c2897c588092d1750d30ef501d59fcb"
|
||||||
integrity sha512-6pNbSMW6OhAi9j+N8V+U715yBQsaWJ7eyEUaOrawX+isg5ZxhUlV1NipNtgaKHmFGiABwt+ZF04Ii+3Xjkg+8w==
|
integrity sha512-6pNbSMW6OhAi9j+N8V+U715yBQsaWJ7eyEUaOrawX+isg5ZxhUlV1NipNtgaKHmFGiABwt+ZF04Ii+3Xjkg+8w==
|
||||||
|
|
||||||
sanitize-html@^2.7.0:
|
|
||||||
version "2.7.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/sanitize-html/-/sanitize-html-2.7.0.tgz#e106205b468aca932e2f9baf241f24660d34e279"
|
|
||||||
integrity sha512-jfQelabOn5voO7FAfnQF7v+jsA6z9zC/O4ec0z3E35XPEtHYJT/OdUziVWlKW4irCr2kXaQAyXTXDHWAibg1tA==
|
|
||||||
dependencies:
|
|
||||||
deepmerge "^4.2.2"
|
|
||||||
escape-string-regexp "^4.0.0"
|
|
||||||
htmlparser2 "^6.0.0"
|
|
||||||
is-plain-object "^5.0.0"
|
|
||||||
parse-srcset "^1.0.2"
|
|
||||||
postcss "^8.3.11"
|
|
||||||
|
|
||||||
screenfull@^6.0.1:
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/screenfull/-/screenfull-6.0.1.tgz#3b71e6f06b72d817a8d3be73c45ebe71fa8da1ce"
|
|
||||||
integrity sha512-yzQW+j4zMUBQC51xxWaoDYjxOtl8Kn+xvue3p6v/fv2pIi1jH4AldgVLU8TBfFVgH2x3VXlf3+YiA/AYIPlaew==
|
|
||||||
|
|
||||||
serialize-javascript@^4.0.0:
|
serialize-javascript@^4.0.0:
|
||||||
version "4.0.0"
|
version "4.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa"
|
resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa"
|
||||||
|
@ -1344,7 +1288,7 @@ slash@^3.0.0:
|
||||||
resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
|
resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
|
||||||
integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
|
integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
|
||||||
|
|
||||||
source-map-js@^1.0.1, source-map-js@^1.0.2:
|
source-map-js@^1.0.1:
|
||||||
version "1.0.2"
|
version "1.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c"
|
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c"
|
||||||
integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==
|
integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==
|
||||||
|
|
|
@ -22,6 +22,7 @@ import { buildTemplateEndpoints } from "./templates"
|
||||||
import { buildUserEndpoints } from "./user"
|
import { buildUserEndpoints } from "./user"
|
||||||
import { buildSelfEndpoints } from "./self"
|
import { buildSelfEndpoints } from "./self"
|
||||||
import { buildViewEndpoints } from "./views"
|
import { buildViewEndpoints } from "./views"
|
||||||
|
import { buildLicensingEndpoints } from "./licensing"
|
||||||
|
|
||||||
const defaultAPIClientConfig = {
|
const defaultAPIClientConfig = {
|
||||||
/**
|
/**
|
||||||
|
@ -233,5 +234,6 @@ export const createAPIClient = config => {
|
||||||
...buildUserEndpoints(API),
|
...buildUserEndpoints(API),
|
||||||
...buildViewEndpoints(API),
|
...buildViewEndpoints(API),
|
||||||
...buildSelfEndpoints(API),
|
...buildSelfEndpoints(API),
|
||||||
|
...buildLicensingEndpoints(API),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
export const buildLicensingEndpoints = API => ({
|
||||||
|
/**
|
||||||
|
* Activates a self hosted license key
|
||||||
|
*/
|
||||||
|
activateLicenseKey: async data => {
|
||||||
|
return API.post({
|
||||||
|
url: `/api/global/license/activate`,
|
||||||
|
body: data,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the license info - metadata about the license including the
|
||||||
|
* obfuscated license key.
|
||||||
|
*/
|
||||||
|
getLicenseInfo: async () => {
|
||||||
|
return API.get({
|
||||||
|
url: "/api/global/license/info",
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refreshes the license cache
|
||||||
|
*/
|
||||||
|
refreshLicense: async () => {
|
||||||
|
return API.post({
|
||||||
|
url: "/api/global/license/refresh",
|
||||||
|
})
|
||||||
|
},
|
||||||
|
})
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"watch": ["src", "../backend-core"],
|
"watch": ["src", "../backend-core", "../../../budibase-pro/packages/pro"],
|
||||||
"ext": "js,ts,json",
|
"ext": "js,ts,json",
|
||||||
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js"],
|
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js"],
|
||||||
"exec": "ts-node src/index.ts"
|
"exec": "ts-node src/index.ts"
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
"url": "https://github.com/Budibase/budibase.git"
|
"url": "https://github.com/Budibase/budibase.git"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "rimraf dist/ && tsc && mv dist/src/* dist/ && rmdir dist/src/ && yarn postbuild",
|
"build": "rimraf dist/ && tsc -p tsconfig.build.json && mv dist/src/* dist/ && rmdir dist/src/ && yarn postbuild",
|
||||||
"postbuild": "copyfiles -u 1 src/**/*.svelte dist/ && copyfiles -u 1 src/**/*.hbs dist/ && copyfiles -u 1 src/**/*.json dist/",
|
"postbuild": "copyfiles -u 1 src/**/*.svelte dist/ && copyfiles -u 1 src/**/*.hbs dist/ && copyfiles -u 1 src/**/*.json dist/",
|
||||||
"test": "jest --coverage --maxWorkers=2",
|
"test": "jest --coverage --maxWorkers=2",
|
||||||
"test:watch": "jest --watch",
|
"test:watch": "jest --watch",
|
||||||
|
@ -80,7 +80,8 @@
|
||||||
"@google-cloud/firestore": "^5.0.2",
|
"@google-cloud/firestore": "^5.0.2",
|
||||||
"@koa/router": "8.0.0",
|
"@koa/router": "8.0.0",
|
||||||
"@sendgrid/mail": "7.1.1",
|
"@sendgrid/mail": "7.1.1",
|
||||||
"@sentry/node": "^6.0.0",
|
"@sentry/node": "6.17.7",
|
||||||
|
"@types/koa__router": "^8.0.11",
|
||||||
"airtable": "0.10.1",
|
"airtable": "0.10.1",
|
||||||
"arangojs": "7.2.0",
|
"arangojs": "7.2.0",
|
||||||
"aws-sdk": "^2.767.0",
|
"aws-sdk": "^2.767.0",
|
||||||
|
@ -145,14 +146,14 @@
|
||||||
"@types/apidoc": "^0.50.0",
|
"@types/apidoc": "^0.50.0",
|
||||||
"@types/bull": "^3.15.1",
|
"@types/bull": "^3.15.1",
|
||||||
"@types/google-spreadsheet": "^3.1.5",
|
"@types/google-spreadsheet": "^3.1.5",
|
||||||
"@types/jest": "^26.0.23",
|
"@types/jest": "^27.4.1",
|
||||||
"@types/koa": "^2.13.3",
|
"@types/koa": "^2.13.3",
|
||||||
"@types/koa-router": "^7.4.2",
|
"@types/koa-router": "^7.4.2",
|
||||||
"@types/lodash": "4.14.180",
|
"@types/lodash": "4.14.180",
|
||||||
"@types/node": "^15.12.4",
|
"@types/node": "^15.12.4",
|
||||||
"@types/oracledb": "^5.2.1",
|
"@types/oracledb": "^5.2.1",
|
||||||
"@types/redis": "^4.0.11",
|
"@types/redis": "^4.0.11",
|
||||||
"@typescript-eslint/parser": "4.28.0",
|
"@typescript-eslint/parser": "5.12.0",
|
||||||
"apidoc": "^0.50.2",
|
"apidoc": "^0.50.2",
|
||||||
"babel-jest": "^27.0.2",
|
"babel-jest": "^27.0.2",
|
||||||
"copyfiles": "^2.4.1",
|
"copyfiles": "^2.4.1",
|
||||||
|
@ -171,7 +172,7 @@
|
||||||
"swagger-jsdoc": "^6.1.0",
|
"swagger-jsdoc": "^6.1.0",
|
||||||
"ts-jest": "^27.0.3",
|
"ts-jest": "^27.0.3",
|
||||||
"ts-node": "^10.0.0",
|
"ts-node": "^10.0.0",
|
||||||
"typescript": "^4.3.5",
|
"typescript": "^4.5.5",
|
||||||
"update-dotenv": "^1.1.1"
|
"update-dotenv": "^1.1.1"
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
|
|
|
@ -42,6 +42,8 @@ async function init() {
|
||||||
REDIS_URL: "localhost:6379",
|
REDIS_URL: "localhost:6379",
|
||||||
WORKER_URL: "http://localhost:4002",
|
WORKER_URL: "http://localhost:4002",
|
||||||
INTERNAL_API_KEY: "budibase",
|
INTERNAL_API_KEY: "budibase",
|
||||||
|
ACCOUNT_PORTAL_URL: "http://localhost:10001",
|
||||||
|
ACCOUNT_PORTAL_API_KEY: "budibase",
|
||||||
JWT_SECRET: "testsecret",
|
JWT_SECRET: "testsecret",
|
||||||
REDIS_PASSWORD: "budibase",
|
REDIS_PASSWORD: "budibase",
|
||||||
MINIO_ACCESS_KEY: "budibase",
|
MINIO_ACCESS_KEY: "budibase",
|
||||||
|
|
|
@ -1,29 +1,29 @@
|
||||||
const env = require("../../environment")
|
import env from "../../environment"
|
||||||
const packageJson = require("../../../package.json")
|
import packageJson from "../../../package.json"
|
||||||
const {
|
import {
|
||||||
createLinkView,
|
createLinkView,
|
||||||
createRoutingView,
|
createRoutingView,
|
||||||
createAllSearchIndex,
|
createAllSearchIndex,
|
||||||
} = require("../../db/views/staticViews")
|
} from "../../db/views/staticViews"
|
||||||
const {
|
import {
|
||||||
getTemplateStream,
|
getTemplateStream,
|
||||||
createApp,
|
createApp,
|
||||||
deleteApp,
|
deleteApp,
|
||||||
} = require("../../utilities/fileSystem")
|
} from "../../utilities/fileSystem"
|
||||||
const {
|
import {
|
||||||
generateAppID,
|
generateAppID,
|
||||||
getLayoutParams,
|
getLayoutParams,
|
||||||
getScreenParams,
|
getScreenParams,
|
||||||
generateDevAppID,
|
generateDevAppID,
|
||||||
DocumentTypes,
|
DocumentTypes,
|
||||||
AppStatus,
|
AppStatus,
|
||||||
} = require("../../db/utils")
|
} from "../../db/utils"
|
||||||
const {
|
const {
|
||||||
BUILTIN_ROLE_IDS,
|
BUILTIN_ROLE_IDS,
|
||||||
AccessController,
|
AccessController,
|
||||||
} = require("@budibase/backend-core/roles")
|
} = require("@budibase/backend-core/roles")
|
||||||
const { BASE_LAYOUTS } = require("../../constants/layouts")
|
import { BASE_LAYOUTS } from "../../constants/layouts"
|
||||||
const { cloneDeep } = require("lodash/fp")
|
import { cloneDeep } from "lodash/fp"
|
||||||
const { processObject } = require("@budibase/string-templates")
|
const { processObject } = require("@budibase/string-templates")
|
||||||
const {
|
const {
|
||||||
getAllApps,
|
getAllApps,
|
||||||
|
@ -31,24 +31,27 @@ const {
|
||||||
getProdAppID,
|
getProdAppID,
|
||||||
Replication,
|
Replication,
|
||||||
} = require("@budibase/backend-core/db")
|
} = require("@budibase/backend-core/db")
|
||||||
const { USERS_TABLE_SCHEMA } = require("../../constants")
|
import { USERS_TABLE_SCHEMA } from "../../constants"
|
||||||
const { removeAppFromUserRoles } = require("../../utilities/workerRequests")
|
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
|
||||||
const { clientLibraryPath, stringToReadStream } = require("../../utilities")
|
import { clientLibraryPath, stringToReadStream } from "../../utilities"
|
||||||
const { getAllLocks } = require("../../utilities/redis")
|
import { getAllLocks } from "../../utilities/redis"
|
||||||
const {
|
import {
|
||||||
updateClientLibrary,
|
updateClientLibrary,
|
||||||
backupClientLibrary,
|
backupClientLibrary,
|
||||||
revertClientLibrary,
|
revertClientLibrary,
|
||||||
} = require("../../utilities/fileSystem/clientLibrary")
|
} from "../../utilities/fileSystem/clientLibrary"
|
||||||
const { getTenantId, isMultiTenant } = require("@budibase/backend-core/tenancy")
|
const { getTenantId, isMultiTenant } = require("@budibase/backend-core/tenancy")
|
||||||
const { syncGlobalUsers } = require("./user")
|
import { syncGlobalUsers } from "./user"
|
||||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
const { app: appCache } = require("@budibase/backend-core/cache")
|
||||||
const { cleanupAutomations } = require("../../automations/utils")
|
import { cleanupAutomations } from "../../automations/utils"
|
||||||
const {
|
const {
|
||||||
getAppDB,
|
getAppDB,
|
||||||
getProdAppDB,
|
getProdAppDB,
|
||||||
updateAppId,
|
updateAppId,
|
||||||
} = require("@budibase/backend-core/context")
|
} = require("@budibase/backend-core/context")
|
||||||
|
import { getUniqueRows } from "../../utilities/usageQuota/rows"
|
||||||
|
import { quotas } from "@budibase/pro"
|
||||||
|
import { errors } from "@budibase/backend-core"
|
||||||
|
|
||||||
const URL_REGEX_SLASH = /\/|\\/g
|
const URL_REGEX_SLASH = /\/|\\/g
|
||||||
|
|
||||||
|
@ -61,7 +64,7 @@ async function getLayouts() {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
).rows.map(row => row.doc)
|
).rows.map((row: any) => row.doc)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getScreens() {
|
async function getScreens() {
|
||||||
|
@ -72,16 +75,16 @@ async function getScreens() {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
).rows.map(row => row.doc)
|
).rows.map((row: any) => row.doc)
|
||||||
}
|
}
|
||||||
|
|
||||||
function getUserRoleId(ctx) {
|
function getUserRoleId(ctx: any) {
|
||||||
return !ctx.user.role || !ctx.user.role._id
|
return !ctx.user.role || !ctx.user.role._id
|
||||||
? BUILTIN_ROLE_IDS.PUBLIC
|
? BUILTIN_ROLE_IDS.PUBLIC
|
||||||
: ctx.user.role._id
|
: ctx.user.role._id
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getAppUrl = ctx => {
|
export const getAppUrl = (ctx: any) => {
|
||||||
// construct the url
|
// construct the url
|
||||||
let url
|
let url
|
||||||
if (ctx.request.body.url) {
|
if (ctx.request.body.url) {
|
||||||
|
@ -97,29 +100,34 @@ exports.getAppUrl = ctx => {
|
||||||
return url
|
return url
|
||||||
}
|
}
|
||||||
|
|
||||||
const checkAppUrl = (ctx, apps, url, currentAppId) => {
|
const checkAppUrl = (ctx: any, apps: any, url: any, currentAppId?: string) => {
|
||||||
if (currentAppId) {
|
if (currentAppId) {
|
||||||
apps = apps.filter(app => app.appId !== currentAppId)
|
apps = apps.filter((app: any) => app.appId !== currentAppId)
|
||||||
}
|
}
|
||||||
if (apps.some(app => app.url === url)) {
|
if (apps.some((app: any) => app.url === url)) {
|
||||||
ctx.throw(400, "App URL is already in use.")
|
ctx.throw(400, "App URL is already in use.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const checkAppName = (ctx, apps, name, currentAppId) => {
|
const checkAppName = (
|
||||||
|
ctx: any,
|
||||||
|
apps: any,
|
||||||
|
name: any,
|
||||||
|
currentAppId?: string
|
||||||
|
) => {
|
||||||
// TODO: Replace with Joi
|
// TODO: Replace with Joi
|
||||||
if (!name) {
|
if (!name) {
|
||||||
ctx.throw(400, "Name is required")
|
ctx.throw(400, "Name is required")
|
||||||
}
|
}
|
||||||
if (currentAppId) {
|
if (currentAppId) {
|
||||||
apps = apps.filter(app => app.appId !== currentAppId)
|
apps = apps.filter((app: any) => app.appId !== currentAppId)
|
||||||
}
|
}
|
||||||
if (apps.some(app => app.name === name)) {
|
if (apps.some((app: any) => app.name === name)) {
|
||||||
ctx.throw(400, "App name is already in use.")
|
ctx.throw(400, "App name is already in use.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createInstance(template) {
|
async function createInstance(template: any) {
|
||||||
const tenantId = isMultiTenant() ? getTenantId() : null
|
const tenantId = isMultiTenant() ? getTenantId() : null
|
||||||
const baseAppId = generateAppID(tenantId)
|
const baseAppId = generateAppID(tenantId)
|
||||||
const appId = generateDevAppID(baseAppId)
|
const appId = generateDevAppID(baseAppId)
|
||||||
|
@ -160,7 +168,7 @@ async function createInstance(template) {
|
||||||
return { _id: appId }
|
return { _id: appId }
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetch = async ctx => {
|
export const fetch = async (ctx: any) => {
|
||||||
const dev = ctx.query && ctx.query.status === AppStatus.DEV
|
const dev = ctx.query && ctx.query.status === AppStatus.DEV
|
||||||
const all = ctx.query && ctx.query.status === AppStatus.ALL
|
const all = ctx.query && ctx.query.status === AppStatus.ALL
|
||||||
const apps = await getAllApps({ dev, all })
|
const apps = await getAllApps({ dev, all })
|
||||||
|
@ -172,7 +180,7 @@ exports.fetch = async ctx => {
|
||||||
if (app.status !== "development") {
|
if (app.status !== "development") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
const lock = locks.find(lock => lock.appId === app.appId)
|
const lock = locks.find((lock: any) => lock.appId === app.appId)
|
||||||
if (lock) {
|
if (lock) {
|
||||||
app.lockedBy = lock.user
|
app.lockedBy = lock.user
|
||||||
} else {
|
} else {
|
||||||
|
@ -185,7 +193,7 @@ exports.fetch = async ctx => {
|
||||||
ctx.body = apps
|
ctx.body = apps
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchAppDefinition = async ctx => {
|
export const fetchAppDefinition = async (ctx: any) => {
|
||||||
const layouts = await getLayouts()
|
const layouts = await getLayouts()
|
||||||
const userRoleId = getUserRoleId(ctx)
|
const userRoleId = getUserRoleId(ctx)
|
||||||
const accessController = new AccessController()
|
const accessController = new AccessController()
|
||||||
|
@ -200,7 +208,7 @@ exports.fetchAppDefinition = async ctx => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchAppPackage = async ctx => {
|
export const fetchAppPackage = async (ctx: any) => {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||||
const layouts = await getLayouts()
|
const layouts = await getLayouts()
|
||||||
|
@ -221,7 +229,7 @@ exports.fetchAppPackage = async ctx => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.create = async ctx => {
|
const performAppCreate = async (ctx: any) => {
|
||||||
const apps = await getAllApps({ dev: true })
|
const apps = await getAllApps({ dev: true })
|
||||||
const name = ctx.request.body.name
|
const name = ctx.request.body.name
|
||||||
checkAppName(ctx, apps, name)
|
checkAppName(ctx, apps, name)
|
||||||
|
@ -229,7 +237,7 @@ exports.create = async ctx => {
|
||||||
checkAppUrl(ctx, apps, url)
|
checkAppUrl(ctx, apps, url)
|
||||||
|
|
||||||
const { useTemplate, templateKey, templateString } = ctx.request.body
|
const { useTemplate, templateKey, templateString } = ctx.request.body
|
||||||
const instanceConfig = {
|
const instanceConfig: any = {
|
||||||
useTemplate,
|
useTemplate,
|
||||||
key: templateKey,
|
key: templateKey,
|
||||||
templateString,
|
templateString,
|
||||||
|
@ -280,13 +288,41 @@ exports.create = async ctx => {
|
||||||
}
|
}
|
||||||
|
|
||||||
await appCache.invalidateAppMetadata(appId, newApplication)
|
await appCache.invalidateAppMetadata(appId, newApplication)
|
||||||
ctx.status = 200
|
return newApplication
|
||||||
|
}
|
||||||
|
|
||||||
|
const appPostCreate = async (ctx: any, appId: string) => {
|
||||||
|
// app import & template creation
|
||||||
|
if (ctx.request.body.useTemplate === "true") {
|
||||||
|
const rows = await getUniqueRows([appId])
|
||||||
|
const rowCount = rows ? rows.length : 0
|
||||||
|
if (rowCount) {
|
||||||
|
try {
|
||||||
|
await quotas.addRows(rowCount)
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err.code && err.code === errors.codes.USAGE_LIMIT_EXCEEDED) {
|
||||||
|
// this import resulted in row usage exceeding the quota
|
||||||
|
// delete the app
|
||||||
|
// skip pre and post steps as no rows have been added to quotas yet
|
||||||
|
ctx.params.appId = appId
|
||||||
|
await destroyApp(ctx)
|
||||||
|
}
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const create = async (ctx: any) => {
|
||||||
|
const newApplication = await quotas.addApp(() => performAppCreate(ctx))
|
||||||
|
await appPostCreate(ctx, newApplication.appId)
|
||||||
ctx.body = newApplication
|
ctx.body = newApplication
|
||||||
|
ctx.status = 200
|
||||||
}
|
}
|
||||||
|
|
||||||
// This endpoint currently operates as a PATCH rather than a PUT
|
// This endpoint currently operates as a PATCH rather than a PUT
|
||||||
// Thus name and url fields are handled only if present
|
// Thus name and url fields are handled only if present
|
||||||
exports.update = async ctx => {
|
export const update = async (ctx: any) => {
|
||||||
const apps = await getAllApps({ dev: true })
|
const apps = await getAllApps({ dev: true })
|
||||||
// validation
|
// validation
|
||||||
const name = ctx.request.body.name
|
const name = ctx.request.body.name
|
||||||
|
@ -304,7 +340,7 @@ exports.update = async ctx => {
|
||||||
ctx.body = data
|
ctx.body = data
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.updateClient = async ctx => {
|
export const updateClient = async (ctx: any) => {
|
||||||
// Get current app version
|
// Get current app version
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||||
|
@ -326,7 +362,7 @@ exports.updateClient = async ctx => {
|
||||||
ctx.body = data
|
ctx.body = data
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.revertClient = async ctx => {
|
export const revertClient = async (ctx: any) => {
|
||||||
// Check app can be reverted
|
// Check app can be reverted
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||||
|
@ -349,10 +385,15 @@ exports.revertClient = async ctx => {
|
||||||
ctx.body = data
|
ctx.body = data
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.delete = async ctx => {
|
const destroyApp = async (ctx: any) => {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
|
|
||||||
const result = await db.destroy()
|
const result = await db.destroy()
|
||||||
|
if (ctx.query?.unpublish) {
|
||||||
|
await quotas.removePublishedApp()
|
||||||
|
} else {
|
||||||
|
await quotas.removeApp()
|
||||||
|
}
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
if (!env.isTest() && !ctx.query.unpublish) {
|
if (!env.isTest() && !ctx.query.unpublish) {
|
||||||
await deleteApp(ctx.params.appId)
|
await deleteApp(ctx.params.appId)
|
||||||
|
@ -363,12 +404,30 @@ exports.delete = async ctx => {
|
||||||
// make sure the app/role doesn't stick around after the app has been deleted
|
// make sure the app/role doesn't stick around after the app has been deleted
|
||||||
await removeAppFromUserRoles(ctx, ctx.params.appId)
|
await removeAppFromUserRoles(ctx, ctx.params.appId)
|
||||||
await appCache.invalidateAppMetadata(ctx.params.appId)
|
await appCache.invalidateAppMetadata(ctx.params.appId)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
const preDestroyApp = async (ctx: any) => {
|
||||||
|
const rows = await getUniqueRows([ctx.params.appId])
|
||||||
|
ctx.rowCount = rows.length
|
||||||
|
}
|
||||||
|
|
||||||
|
const postDestroyApp = async (ctx: any) => {
|
||||||
|
const rowCount = ctx.rowCount
|
||||||
|
if (rowCount) {
|
||||||
|
await quotas.removeRows(rowCount)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const destroy = async (ctx: any) => {
|
||||||
|
await preDestroyApp(ctx)
|
||||||
|
const result = await destroyApp(ctx)
|
||||||
|
await postDestroyApp(ctx)
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
ctx.body = result
|
ctx.body = result
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.sync = async (ctx, next) => {
|
export const sync = async (ctx: any, next: any) => {
|
||||||
const appId = ctx.params.appId
|
const appId = ctx.params.appId
|
||||||
if (!isDevAppID(appId)) {
|
if (!isDevAppID(appId)) {
|
||||||
ctx.throw(400, "This action cannot be performed for production apps")
|
ctx.throw(400, "This action cannot be performed for production apps")
|
||||||
|
@ -398,7 +457,7 @@ exports.sync = async (ctx, next) => {
|
||||||
let error
|
let error
|
||||||
try {
|
try {
|
||||||
await replication.replicate({
|
await replication.replicate({
|
||||||
filter: function (doc) {
|
filter: function (doc: any) {
|
||||||
return doc._id !== DocumentTypes.APP_METADATA
|
return doc._id !== DocumentTypes.APP_METADATA
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -418,7 +477,7 @@ exports.sync = async (ctx, next) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const updateAppPackage = async (appPackage, appId) => {
|
const updateAppPackage = async (appPackage: any, appId: any) => {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const application = await db.get(DocumentTypes.APP_METADATA)
|
const application = await db.get(DocumentTypes.APP_METADATA)
|
||||||
|
|
||||||
|
@ -437,7 +496,7 @@ const updateAppPackage = async (appPackage, appId) => {
|
||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
const createEmptyAppPackage = async (ctx, app) => {
|
const createEmptyAppPackage = async (ctx: any, app: any) => {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
|
|
||||||
let screensAndLayouts = []
|
let screensAndLayouts = []
|
|
@ -1,20 +1,18 @@
|
||||||
const Deployment = require("./Deployment")
|
import Deployment from "./Deployment"
|
||||||
const {
|
import {
|
||||||
Replication,
|
Replication,
|
||||||
getProdAppID,
|
getProdAppID,
|
||||||
getDevelopmentAppID,
|
getDevelopmentAppID,
|
||||||
} = require("@budibase/backend-core/db")
|
} from "@budibase/backend-core/db"
|
||||||
const { DocumentTypes, getAutomationParams } = require("../../../db/utils")
|
import { DocumentTypes, getAutomationParams } from "../../../db/utils"
|
||||||
const {
|
import { disableAllCrons, enableCronTrigger } from "../../../automations/utils"
|
||||||
disableAllCrons,
|
import { app as appCache } from "@budibase/backend-core/cache"
|
||||||
enableCronTrigger,
|
import {
|
||||||
} = require("../../../automations/utils")
|
|
||||||
const { app: appCache } = require("@budibase/backend-core/cache")
|
|
||||||
const {
|
|
||||||
getAppId,
|
getAppId,
|
||||||
getAppDB,
|
getAppDB,
|
||||||
getProdAppDB,
|
getProdAppDB,
|
||||||
} = require("@budibase/backend-core/context")
|
} from "@budibase/backend-core/context"
|
||||||
|
import { quotas } from "@budibase/pro"
|
||||||
|
|
||||||
// the max time we can wait for an invalidation to complete before considering it failed
|
// the max time we can wait for an invalidation to complete before considering it failed
|
||||||
const MAX_PENDING_TIME_MS = 30 * 60000
|
const MAX_PENDING_TIME_MS = 30 * 60000
|
||||||
|
@ -25,9 +23,10 @@ const DeploymentStatus = {
|
||||||
}
|
}
|
||||||
|
|
||||||
// checks that deployments are in a good state, any pending will be updated
|
// checks that deployments are in a good state, any pending will be updated
|
||||||
async function checkAllDeployments(deployments) {
|
async function checkAllDeployments(deployments: any) {
|
||||||
let updated = false
|
let updated = false
|
||||||
for (let deployment of Object.values(deployments.history)) {
|
let deployment: any
|
||||||
|
for (deployment of Object.values(deployments.history)) {
|
||||||
// check that no deployments have crashed etc and are now stuck
|
// check that no deployments have crashed etc and are now stuck
|
||||||
if (
|
if (
|
||||||
deployment.status === DeploymentStatus.PENDING &&
|
deployment.status === DeploymentStatus.PENDING &&
|
||||||
|
@ -41,7 +40,7 @@ async function checkAllDeployments(deployments) {
|
||||||
return { updated, deployments }
|
return { updated, deployments }
|
||||||
}
|
}
|
||||||
|
|
||||||
async function storeDeploymentHistory(deployment) {
|
async function storeDeploymentHistory(deployment: any) {
|
||||||
const deploymentJSON = deployment.getJSON()
|
const deploymentJSON = deployment.getJSON()
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
|
|
||||||
|
@ -70,7 +69,7 @@ async function storeDeploymentHistory(deployment) {
|
||||||
return deployment
|
return deployment
|
||||||
}
|
}
|
||||||
|
|
||||||
async function initDeployedApp(prodAppId) {
|
async function initDeployedApp(prodAppId: any) {
|
||||||
const db = getProdAppDB()
|
const db = getProdAppDB()
|
||||||
console.log("Reading automation docs")
|
console.log("Reading automation docs")
|
||||||
const automations = (
|
const automations = (
|
||||||
|
@ -79,7 +78,7 @@ async function initDeployedApp(prodAppId) {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
).rows.map(row => row.doc)
|
).rows.map((row: any) => row.doc)
|
||||||
console.log("You have " + automations.length + " automations")
|
console.log("You have " + automations.length + " automations")
|
||||||
const promises = []
|
const promises = []
|
||||||
console.log("Disabling prod crons..")
|
console.log("Disabling prod crons..")
|
||||||
|
@ -93,16 +92,17 @@ async function initDeployedApp(prodAppId) {
|
||||||
console.log("Enabled cron triggers for deployed app..")
|
console.log("Enabled cron triggers for deployed app..")
|
||||||
}
|
}
|
||||||
|
|
||||||
async function deployApp(deployment) {
|
async function deployApp(deployment: any) {
|
||||||
try {
|
try {
|
||||||
const appId = getAppId()
|
const appId = getAppId()
|
||||||
const devAppId = getDevelopmentAppID(appId)
|
const devAppId = getDevelopmentAppID(appId)
|
||||||
const productionAppId = getProdAppID(appId)
|
const productionAppId = getProdAppID(appId)
|
||||||
|
|
||||||
const replication = new Replication({
|
const config: any = {
|
||||||
source: devAppId,
|
source: devAppId,
|
||||||
target: productionAppId,
|
target: productionAppId,
|
||||||
})
|
}
|
||||||
|
const replication = new Replication(config)
|
||||||
|
|
||||||
console.log("Replication object created")
|
console.log("Replication object created")
|
||||||
|
|
||||||
|
@ -119,7 +119,7 @@ async function deployApp(deployment) {
|
||||||
console.log("Deployed app initialised, setting deployment to successful")
|
console.log("Deployed app initialised, setting deployment to successful")
|
||||||
deployment.setStatus(DeploymentStatus.SUCCESS)
|
deployment.setStatus(DeploymentStatus.SUCCESS)
|
||||||
await storeDeploymentHistory(deployment)
|
await storeDeploymentHistory(deployment)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
deployment.setStatus(DeploymentStatus.FAILURE, err.message)
|
deployment.setStatus(DeploymentStatus.FAILURE, err.message)
|
||||||
await storeDeploymentHistory(deployment)
|
await storeDeploymentHistory(deployment)
|
||||||
throw {
|
throw {
|
||||||
|
@ -129,14 +129,11 @@ async function deployApp(deployment) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchDeployments = async function (ctx) {
|
export async function fetchDeployments(ctx: any) {
|
||||||
try {
|
try {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||||
const { updated, deployments } = await checkAllDeployments(
|
const { updated, deployments } = await checkAllDeployments(deploymentDoc)
|
||||||
deploymentDoc,
|
|
||||||
ctx.user
|
|
||||||
)
|
|
||||||
if (updated) {
|
if (updated) {
|
||||||
await db.put(deployments)
|
await db.put(deployments)
|
||||||
}
|
}
|
||||||
|
@ -146,7 +143,7 @@ exports.fetchDeployments = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.deploymentProgress = async function (ctx) {
|
export async function deploymentProgress(ctx: any) {
|
||||||
try {
|
try {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
const deploymentDoc = await db.get(DocumentTypes.DEPLOYMENTS)
|
||||||
|
@ -159,7 +156,20 @@ exports.deploymentProgress = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.deployApp = async function (ctx) {
|
const isFirstDeploy = async () => {
|
||||||
|
try {
|
||||||
|
const db = getProdAppDB()
|
||||||
|
await db.get(DocumentTypes.APP_METADATA)
|
||||||
|
} catch (e: any) {
|
||||||
|
if (e.status === 404) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const _deployApp = async function (ctx: any) {
|
||||||
let deployment = new Deployment()
|
let deployment = new Deployment()
|
||||||
console.log("Deployment object created")
|
console.log("Deployment object created")
|
||||||
deployment.setStatus(DeploymentStatus.PENDING)
|
deployment.setStatus(DeploymentStatus.PENDING)
|
||||||
|
@ -168,7 +178,14 @@ exports.deployApp = async function (ctx) {
|
||||||
console.log("Stored deployment history")
|
console.log("Stored deployment history")
|
||||||
|
|
||||||
console.log("Deploying app...")
|
console.log("Deploying app...")
|
||||||
await deployApp(deployment)
|
|
||||||
|
if (await isFirstDeploy()) {
|
||||||
|
await quotas.addPublishedApp(() => deployApp(deployment))
|
||||||
|
} else {
|
||||||
|
await deployApp(deployment)
|
||||||
|
}
|
||||||
|
|
||||||
ctx.body = deployment
|
ctx.body = deployment
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export { _deployApp as deployApp }
|
|
@ -1,7 +1,7 @@
|
||||||
const { getAllApps } = require("@budibase/backend-core/db")
|
const { getAllApps } = require("@budibase/backend-core/db")
|
||||||
const { updateAppId } = require("@budibase/backend-core/context")
|
const { updateAppId } = require("@budibase/backend-core/context")
|
||||||
import { search as stringSearch } from "./utils"
|
import { search as stringSearch } from "./utils"
|
||||||
import { default as controller } from "../application"
|
import * as controller from "../application"
|
||||||
import { Application } from "../../../definitions/common"
|
import { Application } from "../../../definitions/common"
|
||||||
|
|
||||||
function fixAppID(app: Application, params: any) {
|
function fixAppID(app: Application, params: any) {
|
||||||
|
@ -59,7 +59,7 @@ export async function destroy(ctx: any, next: any) {
|
||||||
// get the app before deleting it
|
// get the app before deleting it
|
||||||
await setResponseApp(ctx)
|
await setResponseApp(ctx)
|
||||||
const body = ctx.body
|
const body = ctx.body
|
||||||
await controller.delete(ctx)
|
await controller.destroy(ctx)
|
||||||
// overwrite the body again
|
// overwrite the body again
|
||||||
ctx.body = body
|
ctx.body = body
|
||||||
await next()
|
await next()
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { search as stringSearch } from "./utils"
|
import { search as stringSearch } from "./utils"
|
||||||
import { default as queryController } from "../query"
|
import * as queryController from "../query"
|
||||||
|
|
||||||
export async function search(ctx: any, next: any) {
|
export async function search(ctx: any, next: any) {
|
||||||
await queryController.fetch(ctx)
|
await queryController.fetch(ctx)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { default as rowController } from "../row"
|
import * as rowController from "../row"
|
||||||
import { addRev } from "./utils"
|
import { addRev } from "./utils"
|
||||||
import { Row } from "../../../definitions/common"
|
import { Row } from "../../../definitions/common"
|
||||||
import { convertBookmark } from "../../../utilities"
|
import { convertBookmark } from "../../../utilities"
|
||||||
|
|
|
@ -1,22 +1,19 @@
|
||||||
const {
|
import { generateQueryID, getQueryParams, isProdAppID } from "../../../db/utils"
|
||||||
generateQueryID,
|
import { BaseQueryVerbs } from "../../../constants"
|
||||||
getQueryParams,
|
import { Thread, ThreadType } from "../../../threads"
|
||||||
isProdAppID,
|
import { save as saveDatasource } from "../datasource"
|
||||||
} = require("../../../db/utils")
|
import { RestImporter } from "./import"
|
||||||
const { BaseQueryVerbs } = require("../../../constants")
|
import { invalidateDynamicVariables } from "../../../threads/utils"
|
||||||
const { Thread, ThreadType } = require("../../../threads")
|
import { QUERY_THREAD_TIMEOUT } from "../../../environment"
|
||||||
const { save: saveDatasource } = require("../datasource")
|
import { getAppDB } from "@budibase/backend-core/context"
|
||||||
const { RestImporter } = require("./import")
|
import { quotas } from "@budibase/pro"
|
||||||
const { invalidateDynamicVariables } = require("../../../threads/utils")
|
|
||||||
const environment = require("../../../environment")
|
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
|
||||||
|
|
||||||
const Runner = new Thread(ThreadType.QUERY, {
|
const Runner = new Thread(ThreadType.QUERY, {
|
||||||
timeoutMs: environment.QUERY_THREAD_TIMEOUT || 10000,
|
timeoutMs: QUERY_THREAD_TIMEOUT || 10000,
|
||||||
})
|
})
|
||||||
|
|
||||||
// simple function to append "readable" to all read queries
|
// simple function to append "readable" to all read queries
|
||||||
function enrichQueries(input) {
|
function enrichQueries(input: any) {
|
||||||
const wasArray = Array.isArray(input)
|
const wasArray = Array.isArray(input)
|
||||||
const queries = wasArray ? input : [input]
|
const queries = wasArray ? input : [input]
|
||||||
for (let query of queries) {
|
for (let query of queries) {
|
||||||
|
@ -27,7 +24,7 @@ function enrichQueries(input) {
|
||||||
return wasArray ? queries : queries[0]
|
return wasArray ? queries : queries[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetch = async function (ctx) {
|
export async function fetch(ctx: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
|
|
||||||
const body = await db.allDocs(
|
const body = await db.allDocs(
|
||||||
|
@ -36,10 +33,10 @@ exports.fetch = async function (ctx) {
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
ctx.body = enrichQueries(body.rows.map(row => row.doc))
|
ctx.body = enrichQueries(body.rows.map((row: any) => row.doc))
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.import = async ctx => {
|
const _import = async (ctx: any) => {
|
||||||
const body = ctx.request.body
|
const body = ctx.request.body
|
||||||
const data = body.data
|
const data = body.data
|
||||||
|
|
||||||
|
@ -49,7 +46,7 @@ exports.import = async ctx => {
|
||||||
let datasourceId
|
let datasourceId
|
||||||
if (!body.datasourceId) {
|
if (!body.datasourceId) {
|
||||||
// construct new datasource
|
// construct new datasource
|
||||||
const info = await importer.getInfo()
|
const info: any = await importer.getInfo()
|
||||||
let datasource = {
|
let datasource = {
|
||||||
type: "datasource",
|
type: "datasource",
|
||||||
source: "REST",
|
source: "REST",
|
||||||
|
@ -77,8 +74,9 @@ exports.import = async ctx => {
|
||||||
}
|
}
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
}
|
}
|
||||||
|
export { _import as import }
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
export async function save(ctx: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const query = ctx.request.body
|
const query = ctx.request.body
|
||||||
|
|
||||||
|
@ -93,7 +91,7 @@ exports.save = async function (ctx) {
|
||||||
ctx.message = `Query ${query.name} saved successfully.`
|
ctx.message = `Query ${query.name} saved successfully.`
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.find = async function (ctx) {
|
export async function find(ctx: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const query = enrichQueries(await db.get(ctx.params.queryId))
|
const query = enrichQueries(await db.get(ctx.params.queryId))
|
||||||
// remove properties that could be dangerous in real app
|
// remove properties that could be dangerous in real app
|
||||||
|
@ -104,7 +102,7 @@ exports.find = async function (ctx) {
|
||||||
ctx.body = query
|
ctx.body = query
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.preview = async function (ctx) {
|
export async function preview(ctx: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
|
|
||||||
const datasource = await db.get(ctx.request.body.datasourceId)
|
const datasource = await db.get(ctx.request.body.datasourceId)
|
||||||
|
@ -114,16 +112,18 @@ exports.preview = async function (ctx) {
|
||||||
ctx.request.body
|
ctx.request.body
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { rows, keys, info, extra } = await Runner.run({
|
const runFn = () =>
|
||||||
appId: ctx.appId,
|
Runner.run({
|
||||||
datasource,
|
appId: ctx.appId,
|
||||||
queryVerb,
|
datasource,
|
||||||
fields,
|
queryVerb,
|
||||||
parameters,
|
fields,
|
||||||
transformer,
|
parameters,
|
||||||
queryId,
|
transformer,
|
||||||
})
|
queryId,
|
||||||
|
})
|
||||||
|
|
||||||
|
const { rows, keys, info, extra } = await quotas.addQuery(runFn)
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
rows,
|
rows,
|
||||||
schemaFields: [...new Set(keys)],
|
schemaFields: [...new Set(keys)],
|
||||||
|
@ -135,7 +135,7 @@ exports.preview = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function execute(ctx, opts = { rowsOnly: false }) {
|
async function execute(ctx: any, opts = { rowsOnly: false }) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
|
|
||||||
const query = await db.get(ctx.params.queryId)
|
const query = await db.get(ctx.params.queryId)
|
||||||
|
@ -153,16 +153,19 @@ async function execute(ctx, opts = { rowsOnly: false }) {
|
||||||
|
|
||||||
// call the relevant CRUD method on the integration class
|
// call the relevant CRUD method on the integration class
|
||||||
try {
|
try {
|
||||||
const { rows, pagination, extra } = await Runner.run({
|
const runFn = () =>
|
||||||
appId: ctx.appId,
|
Runner.run({
|
||||||
datasource,
|
appId: ctx.appId,
|
||||||
queryVerb: query.queryVerb,
|
datasource,
|
||||||
fields: query.fields,
|
queryVerb: query.queryVerb,
|
||||||
pagination: ctx.request.body.pagination,
|
fields: query.fields,
|
||||||
parameters: enrichedParameters,
|
pagination: ctx.request.body.pagination,
|
||||||
transformer: query.transformer,
|
parameters: enrichedParameters,
|
||||||
queryId: ctx.params.queryId,
|
transformer: query.transformer,
|
||||||
})
|
queryId: ctx.params.queryId,
|
||||||
|
})
|
||||||
|
|
||||||
|
const { rows, pagination, extra } = await quotas.addQuery(runFn)
|
||||||
if (opts && opts.rowsOnly) {
|
if (opts && opts.rowsOnly) {
|
||||||
ctx.body = rows
|
ctx.body = rows
|
||||||
} else {
|
} else {
|
||||||
|
@ -173,15 +176,15 @@ async function execute(ctx, opts = { rowsOnly: false }) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.executeV1 = async function (ctx) {
|
export async function executeV1(ctx: any) {
|
||||||
return execute(ctx, { rowsOnly: true })
|
return execute(ctx, { rowsOnly: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.executeV2 = async function (ctx) {
|
export async function executeV2(ctx: any) {
|
||||||
return execute(ctx, { rowsOnly: false })
|
return execute(ctx, { rowsOnly: false })
|
||||||
}
|
}
|
||||||
|
|
||||||
const removeDynamicVariables = async queryId => {
|
const removeDynamicVariables = async (queryId: any) => {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const query = await db.get(queryId)
|
const query = await db.get(queryId)
|
||||||
const datasource = await db.get(query.datasourceId)
|
const datasource = await db.get(query.datasourceId)
|
||||||
|
@ -190,19 +193,19 @@ const removeDynamicVariables = async queryId => {
|
||||||
if (dynamicVariables) {
|
if (dynamicVariables) {
|
||||||
// delete dynamic variables from the datasource
|
// delete dynamic variables from the datasource
|
||||||
datasource.config.dynamicVariables = dynamicVariables.filter(
|
datasource.config.dynamicVariables = dynamicVariables.filter(
|
||||||
dv => dv.queryId !== queryId
|
(dv: any) => dv.queryId !== queryId
|
||||||
)
|
)
|
||||||
await db.put(datasource)
|
await db.put(datasource)
|
||||||
|
|
||||||
// invalidate the deleted variables
|
// invalidate the deleted variables
|
||||||
const variablesToDelete = dynamicVariables.filter(
|
const variablesToDelete = dynamicVariables.filter(
|
||||||
dv => dv.queryId === queryId
|
(dv: any) => dv.queryId === queryId
|
||||||
)
|
)
|
||||||
await invalidateDynamicVariables(variablesToDelete)
|
await invalidateDynamicVariables(variablesToDelete)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.destroy = async function (ctx) {
|
export async function destroy(ctx: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
await removeDynamicVariables(ctx.params.queryId)
|
await removeDynamicVariables(ctx.params.queryId)
|
||||||
await db.remove(ctx.params.queryId, ctx.params.revId)
|
await db.remove(ctx.params.queryId, ctx.params.revId)
|
|
@ -52,7 +52,7 @@ interface RunConfig {
|
||||||
|
|
||||||
module External {
|
module External {
|
||||||
function buildFilters(
|
function buildFilters(
|
||||||
id: string | undefined,
|
id: string | undefined | string[],
|
||||||
filters: SearchFilters,
|
filters: SearchFilters,
|
||||||
table: Table
|
table: Table
|
||||||
) {
|
) {
|
||||||
|
|
|
@ -1,15 +1,16 @@
|
||||||
const internal = require("./internal")
|
import { quotas } from "@budibase/pro"
|
||||||
const external = require("./external")
|
import internal from "./internal"
|
||||||
const { isExternalTable } = require("../../../integrations/utils")
|
import external from "./external"
|
||||||
|
import { isExternalTable } from "../../../integrations/utils"
|
||||||
|
|
||||||
function pickApi(tableId) {
|
function pickApi(tableId: any) {
|
||||||
if (isExternalTable(tableId)) {
|
if (isExternalTable(tableId)) {
|
||||||
return external
|
return external
|
||||||
}
|
}
|
||||||
return internal
|
return internal
|
||||||
}
|
}
|
||||||
|
|
||||||
function getTableId(ctx) {
|
function getTableId(ctx: any) {
|
||||||
if (ctx.request.body && ctx.request.body.tableId) {
|
if (ctx.request.body && ctx.request.body.tableId) {
|
||||||
return ctx.request.body.tableId
|
return ctx.request.body.tableId
|
||||||
}
|
}
|
||||||
|
@ -21,13 +22,13 @@ function getTableId(ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.patch = async ctx => {
|
export async function patch(ctx: any): Promise<any> {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
const body = ctx.request.body
|
const body = ctx.request.body
|
||||||
// if it doesn't have an _id then its save
|
// if it doesn't have an _id then its save
|
||||||
if (body && !body._id) {
|
if (body && !body._id) {
|
||||||
return exports.save(ctx)
|
return save(ctx)
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const { row, table } = await pickApi(tableId).patch(ctx)
|
const { row, table } = await pickApi(tableId).patch(ctx)
|
||||||
|
@ -41,13 +42,13 @@ exports.patch = async ctx => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
const saveRow = async (ctx: any) => {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
const body = ctx.request.body
|
const body = ctx.request.body
|
||||||
// if it has an ID already then its a patch
|
// if it has an ID already then its a patch
|
||||||
if (body && body._id) {
|
if (body && body._id) {
|
||||||
return exports.patch(ctx)
|
return patch(ctx)
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const { row, table } = await pickApi(tableId).save(ctx)
|
const { row, table } = await pickApi(tableId).save(ctx)
|
||||||
|
@ -60,7 +61,11 @@ exports.save = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchView = async function (ctx) {
|
export async function save(ctx: any) {
|
||||||
|
await quotas.addRow(() => saveRow(ctx))
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchView(ctx: any) {
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
try {
|
try {
|
||||||
ctx.body = await pickApi(tableId).fetchView(ctx)
|
ctx.body = await pickApi(tableId).fetchView(ctx)
|
||||||
|
@ -69,7 +74,7 @@ exports.fetchView = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetch = async function (ctx) {
|
export async function fetch(ctx: any) {
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
try {
|
try {
|
||||||
ctx.body = await pickApi(tableId).fetch(ctx)
|
ctx.body = await pickApi(tableId).fetch(ctx)
|
||||||
|
@ -78,7 +83,7 @@ exports.fetch = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.find = async function (ctx) {
|
export async function find(ctx: any) {
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
try {
|
try {
|
||||||
ctx.body = await pickApi(tableId).find(ctx)
|
ctx.body = await pickApi(tableId).find(ctx)
|
||||||
|
@ -87,19 +92,21 @@ exports.find = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.destroy = async function (ctx) {
|
export async function destroy(ctx: any) {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
const inputs = ctx.request.body
|
const inputs = ctx.request.body
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
let response, row
|
let response, row
|
||||||
if (inputs.rows) {
|
if (inputs.rows) {
|
||||||
let { rows } = await pickApi(tableId).bulkDestroy(ctx)
|
let { rows } = await pickApi(tableId).bulkDestroy(ctx)
|
||||||
|
await quotas.removeRows(rows.length)
|
||||||
response = rows
|
response = rows
|
||||||
for (let row of rows) {
|
for (let row of rows) {
|
||||||
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
|
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let resp = await pickApi(tableId).destroy(ctx)
|
let resp = await pickApi(tableId).destroy(ctx)
|
||||||
|
await quotas.removeRow()
|
||||||
response = resp.response
|
response = resp.response
|
||||||
row = resp.row
|
row = resp.row
|
||||||
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
|
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
|
||||||
|
@ -110,7 +117,7 @@ exports.destroy = async function (ctx) {
|
||||||
ctx.body = response
|
ctx.body = response
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.search = async ctx => {
|
export async function search(ctx: any) {
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
try {
|
try {
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
|
@ -120,7 +127,7 @@ exports.search = async ctx => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.validate = async function (ctx) {
|
export async function validate(ctx: any) {
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
try {
|
try {
|
||||||
ctx.body = await pickApi(tableId).validate(ctx)
|
ctx.body = await pickApi(tableId).validate(ctx)
|
||||||
|
@ -129,7 +136,7 @@ exports.validate = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchEnrichedRow = async function (ctx) {
|
export async function fetchEnrichedRow(ctx: any) {
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
try {
|
try {
|
||||||
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
|
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
|
||||||
|
@ -138,7 +145,7 @@ exports.fetchEnrichedRow = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.export = async function (ctx) {
|
export const exportRows = async (ctx: any) => {
|
||||||
const tableId = getTableId(ctx)
|
const tableId = getTableId(ctx)
|
||||||
try {
|
try {
|
||||||
ctx.body = await pickApi(tableId).exportRows(ctx)
|
ctx.body = await pickApi(tableId).exportRows(ctx)
|
|
@ -1,19 +1,19 @@
|
||||||
const linkRows = require("../../../db/linkedRows")
|
import { updateLinks, EventType } from "../../../db/linkedRows"
|
||||||
const { getRowParams, generateTableID } = require("../../../db/utils")
|
import { getRowParams, generateTableID } from "../../../db/utils"
|
||||||
const { FieldTypes } = require("../../../constants")
|
import { FieldTypes } from "../../../constants"
|
||||||
const {
|
import {
|
||||||
TableSaveFunctions,
|
TableSaveFunctions,
|
||||||
hasTypeChanged,
|
hasTypeChanged,
|
||||||
getTable,
|
getTable,
|
||||||
handleDataImport,
|
handleDataImport,
|
||||||
} = require("./utils")
|
} from "./utils"
|
||||||
const usageQuota = require("../../../utilities/usageQuota")
|
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
const { getAppDB } = require("@budibase/backend-core/context")
|
||||||
const env = require("../../../environment")
|
import { isTest } from "../../../environment"
|
||||||
const { cleanupAttachments } = require("../../../utilities/rowProcessor")
|
import { cleanupAttachments } from "../../../utilities/rowProcessor"
|
||||||
const { runStaticFormulaChecks } = require("./bulkFormula")
|
import { runStaticFormulaChecks } from "./bulkFormula"
|
||||||
|
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
export async function save(ctx: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const { dataImport, ...rest } = ctx.request.body
|
const { dataImport, ...rest } = ctx.request.body
|
||||||
let tableToSave = {
|
let tableToSave = {
|
||||||
|
@ -80,10 +80,8 @@ exports.save = async function (ctx) {
|
||||||
|
|
||||||
// update linked rows
|
// update linked rows
|
||||||
try {
|
try {
|
||||||
const linkResp = await linkRows.updateLinks({
|
const linkResp: any = await updateLinks({
|
||||||
eventType: oldTable
|
eventType: oldTable ? EventType.TABLE_UPDATED : EventType.TABLE_SAVE,
|
||||||
? linkRows.EventType.TABLE_UPDATED
|
|
||||||
: linkRows.EventType.TABLE_SAVE,
|
|
||||||
table: tableToSave,
|
table: tableToSave,
|
||||||
oldTable: oldTable,
|
oldTable: oldTable,
|
||||||
})
|
})
|
||||||
|
@ -105,11 +103,11 @@ exports.save = async function (ctx) {
|
||||||
|
|
||||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||||
// has to run after, make sure it has _id
|
// has to run after, make sure it has _id
|
||||||
await runStaticFormulaChecks(tableToSave, { oldTable })
|
await runStaticFormulaChecks(tableToSave, { oldTable, deletion: null })
|
||||||
return tableToSave
|
return tableToSave
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.destroy = async function (ctx) {
|
export async function destroy(ctx: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const tableToDelete = await db.get(ctx.params.tableId)
|
const tableToDelete = await db.get(ctx.params.tableId)
|
||||||
|
|
||||||
|
@ -119,12 +117,14 @@ exports.destroy = async function (ctx) {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
|
await db.bulkDocs(
|
||||||
await usageQuota.update(usageQuota.Properties.ROW, -rows.rows.length)
|
rows.rows.map((row: any) => ({ ...row.doc, _deleted: true }))
|
||||||
|
)
|
||||||
|
await quotas.removeRows(rows.rows.length)
|
||||||
|
|
||||||
// update linked rows
|
// update linked rows
|
||||||
await linkRows.updateLinks({
|
await updateLinks({
|
||||||
eventType: linkRows.EventType.TABLE_DELETE,
|
eventType: EventType.TABLE_DELETE,
|
||||||
table: tableToDelete,
|
table: tableToDelete,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -132,10 +132,10 @@ exports.destroy = async function (ctx) {
|
||||||
await db.remove(tableToDelete)
|
await db.remove(tableToDelete)
|
||||||
|
|
||||||
// remove table search index
|
// remove table search index
|
||||||
if (!env.isTest()) {
|
if (!isTest()) {
|
||||||
const currentIndexes = await db.getIndexes()
|
const currentIndexes = await db.getIndexes()
|
||||||
const existingIndex = currentIndexes.indexes.find(
|
const existingIndex = currentIndexes.indexes.find(
|
||||||
existing => existing.name === `search:${ctx.params.tableId}`
|
(existing: any) => existing.name === `search:${ctx.params.tableId}`
|
||||||
)
|
)
|
||||||
if (existingIndex) {
|
if (existingIndex) {
|
||||||
await db.deleteIndex(existingIndex)
|
await db.deleteIndex(existingIndex)
|
||||||
|
@ -143,12 +143,15 @@ exports.destroy = async function (ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// has to run after, make sure it has _id
|
// has to run after, make sure it has _id
|
||||||
await runStaticFormulaChecks(tableToDelete, { deletion: true })
|
await runStaticFormulaChecks(tableToDelete, {
|
||||||
|
oldTable: null,
|
||||||
|
deletion: true,
|
||||||
|
})
|
||||||
await cleanupAttachments(tableToDelete, { rows })
|
await cleanupAttachments(tableToDelete, { rows })
|
||||||
return tableToDelete
|
return tableToDelete
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.bulkImport = async function (ctx) {
|
export async function bulkImport(ctx: any) {
|
||||||
const table = await getTable(ctx.params.tableId)
|
const table = await getTable(ctx.params.tableId)
|
||||||
const { dataImport } = ctx.request.body
|
const { dataImport } = ctx.request.body
|
||||||
await handleDataImport(ctx.user, table, dataImport)
|
await handleDataImport(ctx.user, table, dataImport)
|
|
@ -1,34 +1,34 @@
|
||||||
const csvParser = require("../../../utilities/csvParser")
|
import { transform } from "../../../utilities/csvParser"
|
||||||
const {
|
import {
|
||||||
getRowParams,
|
getRowParams,
|
||||||
generateRowID,
|
generateRowID,
|
||||||
InternalTables,
|
InternalTables,
|
||||||
getTableParams,
|
getTableParams,
|
||||||
BudibaseInternalDB,
|
BudibaseInternalDB,
|
||||||
} = require("../../../db/utils")
|
} from "../../../db/utils"
|
||||||
const { isEqual } = require("lodash")
|
import { isEqual } from "lodash"
|
||||||
const { AutoFieldSubTypes, FieldTypes } = require("../../../constants")
|
import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
|
||||||
const {
|
import {
|
||||||
inputProcessing,
|
inputProcessing,
|
||||||
cleanupAttachments,
|
cleanupAttachments,
|
||||||
} = require("../../../utilities/rowProcessor")
|
} from "../../../utilities/rowProcessor"
|
||||||
const {
|
import {
|
||||||
USERS_TABLE_SCHEMA,
|
USERS_TABLE_SCHEMA,
|
||||||
SwitchableTypes,
|
SwitchableTypes,
|
||||||
CanSwitchTypes,
|
CanSwitchTypes,
|
||||||
} = require("../../../constants")
|
} from "../../../constants"
|
||||||
const {
|
import {
|
||||||
isExternalTable,
|
isExternalTable,
|
||||||
breakExternalTableId,
|
breakExternalTableId,
|
||||||
isSQL,
|
isSQL,
|
||||||
} = require("../../../integrations/utils")
|
} from "../../../integrations/utils"
|
||||||
const { getViews, saveView } = require("../view/utils")
|
import { getViews, saveView } from "../view/utils"
|
||||||
const viewTemplate = require("../view/viewBuilder")
|
import viewTemplate from "../view/viewBuilder"
|
||||||
const usageQuota = require("../../../utilities/usageQuota")
|
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
const { getAppDB } = require("@budibase/backend-core/context")
|
||||||
const { cloneDeep } = require("lodash/fp")
|
import { cloneDeep } from "lodash/fp"
|
||||||
|
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||||
|
|
||||||
exports.clearColumns = async (table, columnNames) => {
|
export async function clearColumns(table: any, columnNames: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const rows = await db.allDocs(
|
const rows = await db.allDocs(
|
||||||
getRowParams(table._id, null, {
|
getRowParams(table._id, null, {
|
||||||
|
@ -36,18 +36,18 @@ exports.clearColumns = async (table, columnNames) => {
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
return db.bulkDocs(
|
return db.bulkDocs(
|
||||||
rows.rows.map(({ doc }) => {
|
rows.rows.map(({ doc }: any) => {
|
||||||
columnNames.forEach(colName => delete doc[colName])
|
columnNames.forEach((colName: any) => delete doc[colName])
|
||||||
return doc
|
return doc
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
|
export async function checkForColumnUpdates(oldTable: any, updatedTable: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
let updatedRows = []
|
let updatedRows = []
|
||||||
const rename = updatedTable._rename
|
const rename = updatedTable._rename
|
||||||
let deletedColumns = []
|
let deletedColumns: any = []
|
||||||
if (oldTable && oldTable.schema && updatedTable.schema) {
|
if (oldTable && oldTable.schema && updatedTable.schema) {
|
||||||
deletedColumns = Object.keys(oldTable.schema).filter(
|
deletedColumns = Object.keys(oldTable.schema).filter(
|
||||||
colName => updatedTable.schema[colName] == null
|
colName => updatedTable.schema[colName] == null
|
||||||
|
@ -61,14 +61,14 @@ exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
const rawRows = rows.rows.map(({ doc }) => doc)
|
const rawRows = rows.rows.map(({ doc }: any) => doc)
|
||||||
updatedRows = rawRows.map(row => {
|
updatedRows = rawRows.map((row: any) => {
|
||||||
row = cloneDeep(row)
|
row = cloneDeep(row)
|
||||||
if (rename) {
|
if (rename) {
|
||||||
row[rename.updated] = row[rename.old]
|
row[rename.updated] = row[rename.old]
|
||||||
delete row[rename.old]
|
delete row[rename.old]
|
||||||
} else if (deletedColumns.length !== 0) {
|
} else if (deletedColumns.length !== 0) {
|
||||||
deletedColumns.forEach(colName => delete row[colName])
|
deletedColumns.forEach((colName: any) => delete row[colName])
|
||||||
}
|
}
|
||||||
return row
|
return row
|
||||||
})
|
})
|
||||||
|
@ -76,14 +76,14 @@ exports.checkForColumnUpdates = async (oldTable, updatedTable) => {
|
||||||
// cleanup any attachments from object storage for deleted attachment columns
|
// cleanup any attachments from object storage for deleted attachment columns
|
||||||
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
||||||
// Update views
|
// Update views
|
||||||
await exports.checkForViewUpdates(updatedTable, rename, deletedColumns)
|
await checkForViewUpdates(updatedTable, rename, deletedColumns)
|
||||||
delete updatedTable._rename
|
delete updatedTable._rename
|
||||||
}
|
}
|
||||||
return { rows: updatedRows, table: updatedTable }
|
return { rows: updatedRows, table: updatedTable }
|
||||||
}
|
}
|
||||||
|
|
||||||
// makes sure the passed in table isn't going to reset the auto ID
|
// makes sure the passed in table isn't going to reset the auto ID
|
||||||
exports.makeSureTableUpToDate = (table, tableToSave) => {
|
export function makeSureTableUpToDate(table: any, tableToSave: any) {
|
||||||
if (!table) {
|
if (!table) {
|
||||||
return tableToSave
|
return tableToSave
|
||||||
}
|
}
|
||||||
|
@ -91,7 +91,9 @@ exports.makeSureTableUpToDate = (table, tableToSave) => {
|
||||||
tableToSave._rev = table._rev
|
tableToSave._rev = table._rev
|
||||||
// make sure auto IDs are always updated - these are internal
|
// make sure auto IDs are always updated - these are internal
|
||||||
// so the client may not know they have changed
|
// so the client may not know they have changed
|
||||||
for (let [field, column] of Object.entries(table.schema)) {
|
let field: any
|
||||||
|
let column: any
|
||||||
|
for ([field, column] of Object.entries(table.schema)) {
|
||||||
if (
|
if (
|
||||||
column.autocolumn &&
|
column.autocolumn &&
|
||||||
column.subtype === AutoFieldSubTypes.AUTO_ID &&
|
column.subtype === AutoFieldSubTypes.AUTO_ID &&
|
||||||
|
@ -103,30 +105,32 @@ exports.makeSureTableUpToDate = (table, tableToSave) => {
|
||||||
return tableToSave
|
return tableToSave
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.handleDataImport = async (user, table, dataImport) => {
|
export async function handleDataImport(user: any, table: any, dataImport: any) {
|
||||||
if (!dataImport || !dataImport.csvString) {
|
if (!dataImport || !dataImport.csvString) {
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
// Populate the table with rows imported from CSV in a bulk update
|
// Populate the table with rows imported from CSV in a bulk update
|
||||||
const data = await csvParser.transform({
|
const data = await transform({
|
||||||
...dataImport,
|
...dataImport,
|
||||||
existingTable: table,
|
existingTable: table,
|
||||||
})
|
})
|
||||||
|
|
||||||
let finalData = []
|
let finalData: any = []
|
||||||
for (let i = 0; i < data.length; i++) {
|
for (let i = 0; i < data.length; i++) {
|
||||||
let row = data[i]
|
let row = data[i]
|
||||||
row._id = generateRowID(table._id)
|
row._id = generateRowID(table._id)
|
||||||
row.tableId = table._id
|
row.tableId = table._id
|
||||||
const processed = inputProcessing(user, table, row, {
|
const processed: any = inputProcessing(user, table, row, {
|
||||||
noAutoRelationships: true,
|
noAutoRelationships: true,
|
||||||
})
|
})
|
||||||
table = processed.table
|
table = processed.table
|
||||||
row = processed.row
|
row = processed.row
|
||||||
|
|
||||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
let fieldName: any
|
||||||
|
let schema: any
|
||||||
|
for ([fieldName, schema] of Object.entries(table.schema)) {
|
||||||
// check whether the options need to be updated for inclusion as part of the data import
|
// check whether the options need to be updated for inclusion as part of the data import
|
||||||
if (
|
if (
|
||||||
schema.type === FieldTypes.OPTIONS &&
|
schema.type === FieldTypes.OPTIONS &&
|
||||||
|
@ -143,17 +147,13 @@ exports.handleDataImport = async (user, table, dataImport) => {
|
||||||
finalData.push(row)
|
finalData.push(row)
|
||||||
}
|
}
|
||||||
|
|
||||||
await usageQuota.update(usageQuota.Properties.ROW, finalData.length, {
|
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData))
|
||||||
dryRun: true,
|
|
||||||
})
|
|
||||||
await db.bulkDocs(finalData)
|
|
||||||
await usageQuota.update(usageQuota.Properties.ROW, finalData.length)
|
|
||||||
let response = await db.put(table)
|
let response = await db.put(table)
|
||||||
table._rev = response._rev
|
table._rev = response._rev
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.handleSearchIndexes = async table => {
|
export async function handleSearchIndexes(table: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
// create relevant search indexes
|
// create relevant search indexes
|
||||||
if (table.indexes && table.indexes.length > 0) {
|
if (table.indexes && table.indexes.length > 0) {
|
||||||
|
@ -161,12 +161,12 @@ exports.handleSearchIndexes = async table => {
|
||||||
const indexName = `search:${table._id}`
|
const indexName = `search:${table._id}`
|
||||||
|
|
||||||
const existingIndex = currentIndexes.indexes.find(
|
const existingIndex = currentIndexes.indexes.find(
|
||||||
existing => existing.name === indexName
|
(existing: any) => existing.name === indexName
|
||||||
)
|
)
|
||||||
|
|
||||||
if (existingIndex) {
|
if (existingIndex) {
|
||||||
const currentFields = existingIndex.def.fields.map(
|
const currentFields = existingIndex.def.fields.map(
|
||||||
field => Object.keys(field)[0]
|
(field: any) => Object.keys(field)[0]
|
||||||
)
|
)
|
||||||
|
|
||||||
// if index fields have changed, delete the original index
|
// if index fields have changed, delete the original index
|
||||||
|
@ -197,7 +197,7 @@ exports.handleSearchIndexes = async table => {
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.checkStaticTables = table => {
|
export function checkStaticTables(table: any) {
|
||||||
// check user schema has all required elements
|
// check user schema has all required elements
|
||||||
if (table._id === InternalTables.USER_METADATA) {
|
if (table._id === InternalTables.USER_METADATA) {
|
||||||
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
|
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
|
||||||
|
@ -211,7 +211,13 @@ exports.checkStaticTables = table => {
|
||||||
}
|
}
|
||||||
|
|
||||||
class TableSaveFunctions {
|
class TableSaveFunctions {
|
||||||
constructor({ user, oldTable, dataImport }) {
|
db: any
|
||||||
|
user: any
|
||||||
|
oldTable: any
|
||||||
|
dataImport: any
|
||||||
|
rows: any
|
||||||
|
|
||||||
|
constructor({ user, oldTable, dataImport }: any) {
|
||||||
this.db = getAppDB()
|
this.db = getAppDB()
|
||||||
this.user = user
|
this.user = user
|
||||||
this.oldTable = oldTable
|
this.oldTable = oldTable
|
||||||
|
@ -221,25 +227,25 @@ class TableSaveFunctions {
|
||||||
}
|
}
|
||||||
|
|
||||||
// before anything is done
|
// before anything is done
|
||||||
async before(table) {
|
async before(table: any) {
|
||||||
if (this.oldTable) {
|
if (this.oldTable) {
|
||||||
table = exports.makeSureTableUpToDate(this.oldTable, table)
|
table = makeSureTableUpToDate(this.oldTable, table)
|
||||||
}
|
}
|
||||||
table = exports.checkStaticTables(table)
|
table = checkStaticTables(table)
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
// when confirmed valid
|
// when confirmed valid
|
||||||
async mid(table) {
|
async mid(table: any) {
|
||||||
let response = await exports.checkForColumnUpdates(this.oldTable, table)
|
let response = await checkForColumnUpdates(this.oldTable, table)
|
||||||
this.rows = this.rows.concat(response.rows)
|
this.rows = this.rows.concat(response.rows)
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
// after saving
|
// after saving
|
||||||
async after(table) {
|
async after(table: any) {
|
||||||
table = await exports.handleSearchIndexes(table)
|
table = await handleSearchIndexes(table)
|
||||||
table = await exports.handleDataImport(this.user, table, this.dataImport)
|
table = await handleDataImport(this.user, table, this.dataImport)
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -248,21 +254,21 @@ class TableSaveFunctions {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getAllInternalTables = async () => {
|
export async function getAllInternalTables() {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const internalTables = await db.allDocs(
|
const internalTables = await db.allDocs(
|
||||||
getTableParams(null, {
|
getTableParams(null, {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
return internalTables.rows.map(tableDoc => ({
|
return internalTables.rows.map((tableDoc: any) => ({
|
||||||
...tableDoc.doc,
|
...tableDoc.doc,
|
||||||
type: "internal",
|
type: "internal",
|
||||||
sourceId: BudibaseInternalDB._id,
|
sourceId: BudibaseInternalDB._id,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getAllExternalTables = async datasourceId => {
|
export async function getAllExternalTables(datasourceId: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
const datasource = await db.get(datasourceId)
|
const datasource = await db.get(datasourceId)
|
||||||
if (!datasource || !datasource.entities) {
|
if (!datasource || !datasource.entities) {
|
||||||
|
@ -271,24 +277,28 @@ exports.getAllExternalTables = async datasourceId => {
|
||||||
return datasource.entities
|
return datasource.entities
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getExternalTable = async (datasourceId, tableName) => {
|
export async function getExternalTable(datasourceId: any, tableName: any) {
|
||||||
const entities = await exports.getAllExternalTables(datasourceId)
|
const entities = await getAllExternalTables(datasourceId)
|
||||||
return entities[tableName]
|
return entities[tableName]
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getTable = async tableId => {
|
export async function getTable(tableId: any) {
|
||||||
const db = getAppDB()
|
const db = getAppDB()
|
||||||
if (isExternalTable(tableId)) {
|
if (isExternalTable(tableId)) {
|
||||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||||
const datasource = await db.get(datasourceId)
|
const datasource = await db.get(datasourceId)
|
||||||
const table = await exports.getExternalTable(datasourceId, tableName)
|
const table = await getExternalTable(datasourceId, tableName)
|
||||||
return { ...table, sql: isSQL(datasource) }
|
return { ...table, sql: isSQL(datasource) }
|
||||||
} else {
|
} else {
|
||||||
return db.get(tableId)
|
return db.get(tableId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
export async function checkForViewUpdates(
|
||||||
|
table: any,
|
||||||
|
rename: any,
|
||||||
|
deletedColumns: any
|
||||||
|
) {
|
||||||
const views = await getViews()
|
const views = await getViews()
|
||||||
const tableViews = views.filter(view => view.meta.tableId === table._id)
|
const tableViews = views.filter(view => view.meta.tableId === table._id)
|
||||||
|
|
||||||
|
@ -312,7 +322,7 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
||||||
|
|
||||||
// Update filters if required
|
// Update filters if required
|
||||||
if (view.meta.filters) {
|
if (view.meta.filters) {
|
||||||
view.meta.filters.forEach(filter => {
|
view.meta.filters.forEach((filter: any) => {
|
||||||
if (filter.key === rename.old) {
|
if (filter.key === rename.old) {
|
||||||
filter.key = rename.updated
|
filter.key = rename.updated
|
||||||
needsUpdated = true
|
needsUpdated = true
|
||||||
|
@ -320,7 +330,7 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} else if (deletedColumns) {
|
} else if (deletedColumns) {
|
||||||
deletedColumns.forEach(column => {
|
deletedColumns.forEach((column: any) => {
|
||||||
// Remove calculation statement if required
|
// Remove calculation statement if required
|
||||||
if (view.meta.field === column) {
|
if (view.meta.field === column) {
|
||||||
delete view.meta.field
|
delete view.meta.field
|
||||||
|
@ -338,7 +348,7 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
||||||
// Remove filters referencing deleted field if required
|
// Remove filters referencing deleted field if required
|
||||||
if (view.meta.filters && view.meta.filters.length) {
|
if (view.meta.filters && view.meta.filters.length) {
|
||||||
const initialLength = view.meta.filters.length
|
const initialLength = view.meta.filters.length
|
||||||
view.meta.filters = view.meta.filters.filter(filter => {
|
view.meta.filters = view.meta.filters.filter((filter: any) => {
|
||||||
return filter.key !== column
|
return filter.key !== column
|
||||||
})
|
})
|
||||||
if (initialLength !== view.meta.filters.length) {
|
if (initialLength !== view.meta.filters.length) {
|
||||||
|
@ -360,16 +370,20 @@ exports.checkForViewUpdates = async (table, rename, deletedColumns) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.generateForeignKey = (column, relatedTable) => {
|
export function generateForeignKey(column: any, relatedTable: any) {
|
||||||
return `fk_${relatedTable.name}_${column.fieldName}`
|
return `fk_${relatedTable.name}_${column.fieldName}`
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.generateJunctionTableName = (column, table, relatedTable) => {
|
export function generateJunctionTableName(
|
||||||
|
column: any,
|
||||||
|
table: any,
|
||||||
|
relatedTable: any
|
||||||
|
) {
|
||||||
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
|
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.foreignKeyStructure = (keyName, meta = null) => {
|
export function foreignKeyStructure(keyName: any, meta = null) {
|
||||||
const structure = {
|
const structure: any = {
|
||||||
type: FieldTypes.NUMBER,
|
type: FieldTypes.NUMBER,
|
||||||
constraints: {},
|
constraints: {},
|
||||||
name: keyName,
|
name: keyName,
|
||||||
|
@ -380,7 +394,7 @@ exports.foreignKeyStructure = (keyName, meta = null) => {
|
||||||
return structure
|
return structure
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.areSwitchableTypes = (type1, type2) => {
|
export function areSwitchableTypes(type1: any, type2: any) {
|
||||||
if (
|
if (
|
||||||
SwitchableTypes.indexOf(type1) === -1 &&
|
SwitchableTypes.indexOf(type1) === -1 &&
|
||||||
SwitchableTypes.indexOf(type2) === -1
|
SwitchableTypes.indexOf(type2) === -1
|
||||||
|
@ -397,21 +411,24 @@ exports.areSwitchableTypes = (type1, type2) => {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.hasTypeChanged = (table, oldTable) => {
|
export function hasTypeChanged(table: any, oldTable: any) {
|
||||||
if (!oldTable) {
|
if (!oldTable) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for (let [key, field] of Object.entries(oldTable.schema)) {
|
let key: any
|
||||||
|
let field: any
|
||||||
|
for ([key, field] of Object.entries(oldTable.schema)) {
|
||||||
const oldType = field.type
|
const oldType = field.type
|
||||||
if (!table.schema[key]) {
|
if (!table.schema[key]) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
const newType = table.schema[key].type
|
const newType = table.schema[key].type
|
||||||
if (oldType !== newType && !exports.areSwitchableTypes(oldType, newType)) {
|
if (oldType !== newType && !areSwitchableTypes(oldType, newType)) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.TableSaveFunctions = TableSaveFunctions
|
const _TableSaveFunctions = TableSaveFunctions
|
||||||
|
export { _TableSaveFunctions as TableSaveFunctions }
|
|
@ -5,12 +5,14 @@ const {
|
||||||
buildTenancyMiddleware,
|
buildTenancyMiddleware,
|
||||||
buildAppTenancyMiddleware,
|
buildAppTenancyMiddleware,
|
||||||
} = require("@budibase/backend-core/auth")
|
} = require("@budibase/backend-core/auth")
|
||||||
|
const { errors } = require("@budibase/backend-core")
|
||||||
const currentApp = require("../middleware/currentapp")
|
const currentApp = require("../middleware/currentapp")
|
||||||
const compress = require("koa-compress")
|
const compress = require("koa-compress")
|
||||||
const zlib = require("zlib")
|
const zlib = require("zlib")
|
||||||
const { mainRoutes, staticRoutes, publicRoutes } = require("./routes")
|
const { mainRoutes, staticRoutes, publicRoutes } = require("./routes")
|
||||||
const pkg = require("../../package.json")
|
const pkg = require("../../package.json")
|
||||||
const env = require("../environment")
|
const env = require("../environment")
|
||||||
|
const { middleware: pro } = require("@budibase/pro")
|
||||||
|
|
||||||
const router = new Router()
|
const router = new Router()
|
||||||
|
|
||||||
|
@ -54,6 +56,7 @@ router
|
||||||
.use(currentApp)
|
.use(currentApp)
|
||||||
// this middleware will try to use the app ID to determine the tenancy
|
// this middleware will try to use the app ID to determine the tenancy
|
||||||
.use(buildAppTenancyMiddleware())
|
.use(buildAppTenancyMiddleware())
|
||||||
|
.use(pro.licensing())
|
||||||
.use(auditLog)
|
.use(auditLog)
|
||||||
|
|
||||||
// error handling middleware
|
// error handling middleware
|
||||||
|
@ -62,10 +65,12 @@ router.use(async (ctx, next) => {
|
||||||
await next()
|
await next()
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
ctx.status = err.status || err.statusCode || 500
|
ctx.status = err.status || err.statusCode || 500
|
||||||
|
const error = errors.getPublicError(err)
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
message: err.message,
|
message: err.message,
|
||||||
status: ctx.status,
|
status: ctx.status,
|
||||||
validationErrors: err.validation,
|
validationErrors: err.validation,
|
||||||
|
error,
|
||||||
}
|
}
|
||||||
if (env.NODE_ENV !== "jest") {
|
if (env.NODE_ENV !== "jest") {
|
||||||
ctx.log.error(err)
|
ctx.log.error(err)
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
const Router = require("@koa/router")
|
import Router from "@koa/router"
|
||||||
const controller = require("../controllers/application")
|
import * as controller from "../controllers/application"
|
||||||
const authorized = require("../../middleware/authorized")
|
import authorized from "../../middleware/authorized"
|
||||||
const { BUILDER } = require("@budibase/backend-core/permissions")
|
import { BUILDER } from "@budibase/backend-core/permissions"
|
||||||
const usage = require("../../middleware/usageQuota")
|
|
||||||
|
|
||||||
const router = Router()
|
const router = new Router()
|
||||||
|
|
||||||
router
|
router
|
||||||
.post("/api/applications/:appId/sync", authorized(BUILDER), controller.sync)
|
.post("/api/applications/:appId/sync", authorized(BUILDER), controller.sync)
|
||||||
.post("/api/applications", authorized(BUILDER), usage, controller.create)
|
.post("/api/applications", authorized(BUILDER), controller.create)
|
||||||
.get("/api/applications/:appId/definition", controller.fetchAppDefinition)
|
.get("/api/applications/:appId/definition", controller.fetchAppDefinition)
|
||||||
.get("/api/applications", controller.fetch)
|
.get("/api/applications", controller.fetch)
|
||||||
.get("/api/applications/:appId/appPackage", controller.fetchAppPackage)
|
.get("/api/applications/:appId/appPackage", controller.fetchAppPackage)
|
||||||
|
@ -23,11 +22,6 @@ router
|
||||||
authorized(BUILDER),
|
authorized(BUILDER),
|
||||||
controller.revertClient
|
controller.revertClient
|
||||||
)
|
)
|
||||||
.delete(
|
.delete("/api/applications/:appId", authorized(BUILDER), controller.destroy)
|
||||||
"/api/applications/:appId",
|
|
||||||
authorized(BUILDER),
|
|
||||||
usage,
|
|
||||||
controller.delete
|
|
||||||
)
|
|
||||||
|
|
||||||
module.exports = router
|
export default router
|
|
@ -1,62 +0,0 @@
|
||||||
const authRoutes = require("./auth")
|
|
||||||
const layoutRoutes = require("./layout")
|
|
||||||
const screenRoutes = require("./screen")
|
|
||||||
const userRoutes = require("./user")
|
|
||||||
const applicationRoutes = require("./application")
|
|
||||||
const tableRoutes = require("./table")
|
|
||||||
const rowRoutes = require("./row")
|
|
||||||
const viewRoutes = require("./view")
|
|
||||||
const staticRoutes = require("./static")
|
|
||||||
const componentRoutes = require("./component")
|
|
||||||
const automationRoutes = require("./automation")
|
|
||||||
const webhookRoutes = require("./webhook")
|
|
||||||
const roleRoutes = require("./role")
|
|
||||||
const deployRoutes = require("./deploy")
|
|
||||||
const apiKeysRoutes = require("./apikeys")
|
|
||||||
const templatesRoutes = require("./templates")
|
|
||||||
const analyticsRoutes = require("./analytics")
|
|
||||||
const routingRoutes = require("./routing")
|
|
||||||
const integrationRoutes = require("./integration")
|
|
||||||
const permissionRoutes = require("./permission")
|
|
||||||
const datasourceRoutes = require("./datasource")
|
|
||||||
const queryRoutes = require("./query")
|
|
||||||
const backupRoutes = require("./backup")
|
|
||||||
const metadataRoutes = require("./metadata")
|
|
||||||
const devRoutes = require("./dev")
|
|
||||||
const cloudRoutes = require("./cloud")
|
|
||||||
const migrationRoutes = require("./migrations")
|
|
||||||
const publicRoutes = require("./public")
|
|
||||||
|
|
||||||
exports.mainRoutes = [
|
|
||||||
authRoutes,
|
|
||||||
deployRoutes,
|
|
||||||
layoutRoutes,
|
|
||||||
screenRoutes,
|
|
||||||
userRoutes,
|
|
||||||
applicationRoutes,
|
|
||||||
automationRoutes,
|
|
||||||
viewRoutes,
|
|
||||||
componentRoutes,
|
|
||||||
roleRoutes,
|
|
||||||
apiKeysRoutes,
|
|
||||||
templatesRoutes,
|
|
||||||
analyticsRoutes,
|
|
||||||
webhookRoutes,
|
|
||||||
routingRoutes,
|
|
||||||
integrationRoutes,
|
|
||||||
permissionRoutes,
|
|
||||||
datasourceRoutes,
|
|
||||||
queryRoutes,
|
|
||||||
backupRoutes,
|
|
||||||
metadataRoutes,
|
|
||||||
devRoutes,
|
|
||||||
cloudRoutes,
|
|
||||||
// these need to be handled last as they still use /api/:tableId
|
|
||||||
// this could be breaking as koa may recognise other routes as this
|
|
||||||
tableRoutes,
|
|
||||||
rowRoutes,
|
|
||||||
migrationRoutes,
|
|
||||||
]
|
|
||||||
|
|
||||||
exports.publicRoutes = publicRoutes
|
|
||||||
exports.staticRoutes = staticRoutes
|
|
|
@ -0,0 +1,60 @@
|
||||||
|
import authRoutes from "./auth"
|
||||||
|
import layoutRoutes from "./layout"
|
||||||
|
import screenRoutes from "./screen"
|
||||||
|
import userRoutes from "./user"
|
||||||
|
import applicationRoutes from "./application"
|
||||||
|
import tableRoutes from "./table"
|
||||||
|
import rowRoutes from "./row"
|
||||||
|
import viewRoutes from "./view"
|
||||||
|
import componentRoutes from "./component"
|
||||||
|
import automationRoutes from "./automation"
|
||||||
|
import webhookRoutes from "./webhook"
|
||||||
|
import roleRoutes from "./role"
|
||||||
|
import deployRoutes from "./deploy"
|
||||||
|
import apiKeysRoutes from "./apikeys"
|
||||||
|
import templatesRoutes from "./templates"
|
||||||
|
import analyticsRoutes from "./analytics"
|
||||||
|
import routingRoutes from "./routing"
|
||||||
|
import integrationRoutes from "./integration"
|
||||||
|
import permissionRoutes from "./permission"
|
||||||
|
import datasourceRoutes from "./datasource"
|
||||||
|
import queryRoutes from "./query"
|
||||||
|
import backupRoutes from "./backup"
|
||||||
|
import metadataRoutes from "./metadata"
|
||||||
|
import devRoutes from "./dev"
|
||||||
|
import cloudRoutes from "./cloud"
|
||||||
|
import migrationRoutes from "./migrations"
|
||||||
|
|
||||||
|
export { default as staticRoutes } from "./static"
|
||||||
|
export { default as publicRoutes } from "./public"
|
||||||
|
|
||||||
|
export const mainRoutes = [
|
||||||
|
authRoutes,
|
||||||
|
deployRoutes,
|
||||||
|
layoutRoutes,
|
||||||
|
screenRoutes,
|
||||||
|
userRoutes,
|
||||||
|
applicationRoutes,
|
||||||
|
automationRoutes,
|
||||||
|
viewRoutes,
|
||||||
|
componentRoutes,
|
||||||
|
roleRoutes,
|
||||||
|
apiKeysRoutes,
|
||||||
|
templatesRoutes,
|
||||||
|
analyticsRoutes,
|
||||||
|
webhookRoutes,
|
||||||
|
routingRoutes,
|
||||||
|
integrationRoutes,
|
||||||
|
permissionRoutes,
|
||||||
|
datasourceRoutes,
|
||||||
|
queryRoutes,
|
||||||
|
backupRoutes,
|
||||||
|
metadataRoutes,
|
||||||
|
devRoutes,
|
||||||
|
cloudRoutes,
|
||||||
|
// these need to be handled last as they still use /api/:tableId
|
||||||
|
// this could be breaking as koa may recognise other routes as this
|
||||||
|
tableRoutes,
|
||||||
|
rowRoutes,
|
||||||
|
migrationRoutes,
|
||||||
|
]
|
|
@ -3,7 +3,6 @@ import queryEndpoints from "./queries"
|
||||||
import tableEndpoints from "./tables"
|
import tableEndpoints from "./tables"
|
||||||
import rowEndpoints from "./rows"
|
import rowEndpoints from "./rows"
|
||||||
import userEndpoints from "./users"
|
import userEndpoints from "./users"
|
||||||
import usage from "../../../middleware/usageQuota"
|
|
||||||
import authorized from "../../../middleware/authorized"
|
import authorized from "../../../middleware/authorized"
|
||||||
import publicApi from "../../../middleware/publicApi"
|
import publicApi from "../../../middleware/publicApi"
|
||||||
import { paramResource, paramSubResource } from "../../../middleware/resourceId"
|
import { paramResource, paramSubResource } from "../../../middleware/resourceId"
|
||||||
|
@ -114,8 +113,6 @@ function applyRoutes(
|
||||||
// add the authorization middleware, using the correct perm type
|
// add the authorization middleware, using the correct perm type
|
||||||
addMiddleware(endpoints.read, authorized(permType, PermissionLevels.READ))
|
addMiddleware(endpoints.read, authorized(permType, PermissionLevels.READ))
|
||||||
addMiddleware(endpoints.write, authorized(permType, PermissionLevels.WRITE))
|
addMiddleware(endpoints.write, authorized(permType, PermissionLevels.WRITE))
|
||||||
// add the usage quota middleware
|
|
||||||
addMiddleware(endpoints.write, usage)
|
|
||||||
// add the output mapper middleware
|
// add the output mapper middleware
|
||||||
addMiddleware(endpoints.read, mapperMiddleware, { output: true })
|
addMiddleware(endpoints.read, mapperMiddleware, { output: true })
|
||||||
addMiddleware(endpoints.write, mapperMiddleware, { output: true })
|
addMiddleware(endpoints.write, mapperMiddleware, { output: true })
|
||||||
|
@ -130,4 +127,4 @@ applyRoutes(queryEndpoints, PermissionTypes.QUERY, "queryId")
|
||||||
// needs to be applied last for routing purposes, don't override other endpoints
|
// needs to be applied last for routing purposes, don't override other endpoints
|
||||||
applyRoutes(rowEndpoints, PermissionTypes.TABLE, "tableId", "rowId")
|
applyRoutes(rowEndpoints, PermissionTypes.TABLE, "tableId", "rowId")
|
||||||
|
|
||||||
module.exports = publicRouter
|
export default publicRouter
|
||||||
|
|
|
@ -1,18 +1,14 @@
|
||||||
const Router = require("@koa/router")
|
import Router from "@koa/router"
|
||||||
const rowController = require("../controllers/row")
|
import * as rowController from "../controllers/row"
|
||||||
const authorized = require("../../middleware/authorized")
|
import authorized from "../../middleware/authorized"
|
||||||
const usage = require("../../middleware/usageQuota")
|
import { paramResource, paramSubResource } from "../../middleware/resourceId"
|
||||||
const {
|
|
||||||
paramResource,
|
|
||||||
paramSubResource,
|
|
||||||
} = require("../../middleware/resourceId")
|
|
||||||
const {
|
const {
|
||||||
PermissionLevels,
|
PermissionLevels,
|
||||||
PermissionTypes,
|
PermissionTypes,
|
||||||
} = require("@budibase/backend-core/permissions")
|
} = require("@budibase/backend-core/permissions")
|
||||||
const { internalSearchValidator } = require("./utils/validators")
|
const { internalSearchValidator } = require("./utils/validators")
|
||||||
|
|
||||||
const router = Router()
|
const router = new Router()
|
||||||
|
|
||||||
router
|
router
|
||||||
/**
|
/**
|
||||||
|
@ -180,7 +176,6 @@ router
|
||||||
"/api/:tableId/rows",
|
"/api/:tableId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("tableId"),
|
||||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||||
usage,
|
|
||||||
rowController.save
|
rowController.save
|
||||||
)
|
)
|
||||||
/**
|
/**
|
||||||
|
@ -195,7 +190,6 @@ router
|
||||||
"/api/:tableId/rows",
|
"/api/:tableId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("tableId"),
|
||||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||||
usage,
|
|
||||||
rowController.patch
|
rowController.patch
|
||||||
)
|
)
|
||||||
/**
|
/**
|
||||||
|
@ -248,7 +242,6 @@ router
|
||||||
"/api/:tableId/rows",
|
"/api/:tableId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("tableId"),
|
||||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||||
usage,
|
|
||||||
rowController.destroy
|
rowController.destroy
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -269,8 +262,7 @@ router
|
||||||
"/api/:tableId/rows/exportRows",
|
"/api/:tableId/rows/exportRows",
|
||||||
paramResource("tableId"),
|
paramResource("tableId"),
|
||||||
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
authorized(PermissionTypes.TABLE, PermissionLevels.WRITE),
|
||||||
usage,
|
rowController.exportRows
|
||||||
rowController.export
|
|
||||||
)
|
)
|
||||||
|
|
||||||
module.exports = router
|
export default router
|
|
@ -1,19 +1,19 @@
|
||||||
const Router = require("@koa/router")
|
import Router from "@koa/router"
|
||||||
const controller = require("../controllers/static")
|
import * as controller from "../controllers/static"
|
||||||
const { budibaseTempDir } = require("../../utilities/budibaseDir")
|
import { budibaseTempDir } from "../../utilities/budibaseDir"
|
||||||
const authorized = require("../../middleware/authorized")
|
import authorized from "../../middleware/authorized"
|
||||||
const {
|
import {
|
||||||
BUILDER,
|
BUILDER,
|
||||||
PermissionTypes,
|
PermissionTypes,
|
||||||
PermissionLevels,
|
PermissionLevels,
|
||||||
} = require("@budibase/backend-core/permissions")
|
} from "@budibase/backend-core/permissions"
|
||||||
const env = require("../../environment")
|
import * as env from "../../environment"
|
||||||
const { paramResource } = require("../../middleware/resourceId")
|
import { paramResource } from "../../middleware/resourceId"
|
||||||
|
|
||||||
const router = Router()
|
const router = new Router()
|
||||||
|
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
router.param("file", async (file, ctx, next) => {
|
router.param("file", async (file: any, ctx: any, next: any) => {
|
||||||
ctx.file = file && file.includes(".") ? file : "index.html"
|
ctx.file = file && file.includes(".") ? file : "index.html"
|
||||||
if (!ctx.file.startsWith("budibase-client")) {
|
if (!ctx.file.startsWith("budibase-client")) {
|
||||||
return next()
|
return next()
|
||||||
|
@ -52,4 +52,4 @@ router
|
||||||
controller.getSignedUploadURL
|
controller.getSignedUploadURL
|
||||||
)
|
)
|
||||||
|
|
||||||
module.exports = router
|
export default router
|
|
@ -1,31 +1,38 @@
|
||||||
const rowController = require("../../../controllers/row")
|
import * as rowController from "../../../controllers/row"
|
||||||
const appController = require("../../../controllers/application")
|
import * as appController from "../../../controllers/application"
|
||||||
const { AppStatus } = require("../../../../db/utils")
|
import { AppStatus } from "../../../../db/utils"
|
||||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
import { BUILTIN_ROLE_IDS } from "@budibase/backend-core/roles"
|
||||||
const { TENANT_ID } = require("../../../../tests/utilities/structures")
|
import { TENANT_ID } from "../../../../tests/utilities/structures"
|
||||||
const { getAppDB, doInAppContext } = require("@budibase/backend-core/context")
|
import { getAppDB, doInAppContext } from "@budibase/backend-core/context"
|
||||||
const env = require("../../../../environment")
|
import * as env from "../../../../environment"
|
||||||
|
|
||||||
function Request(appId, params) {
|
class Request {
|
||||||
this.appId = appId
|
appId: any
|
||||||
this.params = params
|
params: any
|
||||||
this.request = {}
|
request: any
|
||||||
|
body: any
|
||||||
|
|
||||||
|
constructor(appId: any, params: any) {
|
||||||
|
this.appId = appId
|
||||||
|
this.params = params
|
||||||
|
this.request = {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function runRequest(appId, controlFunc, request) {
|
function runRequest(appId: any, controlFunc: any, request?: any) {
|
||||||
return doInAppContext(appId, async () => {
|
return doInAppContext(appId, async () => {
|
||||||
return controlFunc(request)
|
return controlFunc(request)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getAllTableRows = async config => {
|
export const getAllTableRows = async (config: any) => {
|
||||||
const req = new Request(config.appId, { tableId: config.table._id })
|
const req = new Request(config.appId, { tableId: config.table._id })
|
||||||
await runRequest(config.appId, rowController.fetch, req)
|
await runRequest(config.appId, rowController.fetch, req)
|
||||||
return req.body
|
return req.body
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.clearAllApps = async (tenantId = TENANT_ID) => {
|
export const clearAllApps = async (tenantId = TENANT_ID) => {
|
||||||
const req = { query: { status: AppStatus.DEV }, user: { tenantId } }
|
const req: any = { query: { status: AppStatus.DEV }, user: { tenantId } }
|
||||||
await appController.fetch(req)
|
await appController.fetch(req)
|
||||||
const apps = req.body
|
const apps = req.body
|
||||||
if (!apps || apps.length <= 0) {
|
if (!apps || apps.length <= 0) {
|
||||||
|
@ -34,11 +41,11 @@ exports.clearAllApps = async (tenantId = TENANT_ID) => {
|
||||||
for (let app of apps) {
|
for (let app of apps) {
|
||||||
const { appId } = app
|
const { appId } = app
|
||||||
const req = new Request(null, { appId })
|
const req = new Request(null, { appId })
|
||||||
await runRequest(appId, appController.delete, req)
|
await runRequest(appId, appController.destroy, req)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.clearAllAutomations = async config => {
|
export const clearAllAutomations = async (config: any) => {
|
||||||
const automations = await config.getAllAutomations()
|
const automations = await config.getAllAutomations()
|
||||||
for (let auto of automations) {
|
for (let auto of automations) {
|
||||||
await doInAppContext(config.appId, async () => {
|
await doInAppContext(config.appId, async () => {
|
||||||
|
@ -47,7 +54,12 @@ exports.clearAllAutomations = async config => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.createRequest = (request, method, url, body) => {
|
export const createRequest = (
|
||||||
|
request: any,
|
||||||
|
method: any,
|
||||||
|
url: any,
|
||||||
|
body: any
|
||||||
|
) => {
|
||||||
let req
|
let req
|
||||||
|
|
||||||
if (method === "POST") req = request.post(url).send(body)
|
if (method === "POST") req = request.post(url).send(body)
|
||||||
|
@ -59,7 +71,12 @@ exports.createRequest = (request, method, url, body) => {
|
||||||
return req
|
return req
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.checkBuilderEndpoint = async ({ config, method, url, body }) => {
|
export const checkBuilderEndpoint = async ({
|
||||||
|
config,
|
||||||
|
method,
|
||||||
|
url,
|
||||||
|
body,
|
||||||
|
}: any) => {
|
||||||
const headers = await config.login({
|
const headers = await config.login({
|
||||||
userId: "us_fail",
|
userId: "us_fail",
|
||||||
builder: false,
|
builder: false,
|
||||||
|
@ -71,14 +88,14 @@ exports.checkBuilderEndpoint = async ({ config, method, url, body }) => {
|
||||||
.expect(403)
|
.expect(403)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.checkPermissionsEndpoint = async ({
|
export const checkPermissionsEndpoint = async ({
|
||||||
config,
|
config,
|
||||||
method,
|
method,
|
||||||
url,
|
url,
|
||||||
body,
|
body,
|
||||||
passRole,
|
passRole,
|
||||||
failRole,
|
failRole,
|
||||||
}) => {
|
}: any) => {
|
||||||
const passHeader = await config.login({
|
const passHeader = await config.login({
|
||||||
roleId: passRole,
|
roleId: passRole,
|
||||||
prodApp: true,
|
prodApp: true,
|
||||||
|
@ -106,11 +123,11 @@ exports.checkPermissionsEndpoint = async ({
|
||||||
.expect(403)
|
.expect(403)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getDB = () => {
|
export const getDB = () => {
|
||||||
return getAppDB()
|
return getAppDB()
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.testAutomation = async (config, automation) => {
|
export const testAutomation = async (config: any, automation: any) => {
|
||||||
return runRequest(automation.appId, async () => {
|
return runRequest(automation.appId, async () => {
|
||||||
return await config.request
|
return await config.request
|
||||||
.post(`/api/automations/${automation._id}/test`)
|
.post(`/api/automations/${automation._id}/test`)
|
||||||
|
@ -126,7 +143,7 @@ exports.testAutomation = async (config, automation) => {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.runInProd = async func => {
|
export const runInProd = async (func: any) => {
|
||||||
const nodeEnv = env.NODE_ENV
|
const nodeEnv = env.NODE_ENV
|
||||||
const workerId = env.JEST_WORKER_ID
|
const workerId = env.JEST_WORKER_ID
|
||||||
env._set("NODE_ENV", "PRODUCTION")
|
env._set("NODE_ENV", "PRODUCTION")
|
|
@ -1,6 +1,5 @@
|
||||||
// need to load environment first
|
// need to load environment first
|
||||||
import { ExtendableContext } from "koa"
|
import { ExtendableContext } from "koa"
|
||||||
|
|
||||||
import * as env from "./environment"
|
import * as env from "./environment"
|
||||||
const CouchDB = require("./db")
|
const CouchDB = require("./db")
|
||||||
require("@budibase/backend-core").init(CouchDB)
|
require("@budibase/backend-core").init(CouchDB)
|
||||||
|
@ -15,7 +14,7 @@ const automations = require("./automations/index")
|
||||||
const Sentry = require("@sentry/node")
|
const Sentry = require("@sentry/node")
|
||||||
const fileSystem = require("./utilities/fileSystem")
|
const fileSystem = require("./utilities/fileSystem")
|
||||||
const bullboard = require("./automations/bullboard")
|
const bullboard = require("./automations/bullboard")
|
||||||
const redis = require("./utilities/redis")
|
import redis from "./utilities/redis"
|
||||||
import * as migrations from "./migrations"
|
import * as migrations from "./migrations"
|
||||||
import { analytics } from "@budibase/backend-core"
|
import { analytics } from "@budibase/backend-core"
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
const rowController = require("../../api/controllers/row")
|
import { save } from "../../api/controllers/row"
|
||||||
const automationUtils = require("../automationUtils")
|
import { cleanUpRow, getError } from "../automationUtils"
|
||||||
const usage = require("../../utilities/usageQuota")
|
import { buildCtx } from "./utils"
|
||||||
const { buildCtx } = require("./utils")
|
|
||||||
|
|
||||||
exports.definition = {
|
export const definition = {
|
||||||
name: "Create Row",
|
name: "Create Row",
|
||||||
tagline: "Create a {{inputs.enriched.table.name}} row",
|
tagline: "Create a {{inputs.enriched.table.name}} row",
|
||||||
icon: "TableRowAddBottom",
|
icon: "TableRowAddBottom",
|
||||||
|
@ -59,7 +58,7 @@ exports.definition = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.run = async function ({ inputs, appId, emitter }) {
|
export async function run({ inputs, appId, emitter }: any) {
|
||||||
if (inputs.row == null || inputs.row.tableId == null) {
|
if (inputs.row == null || inputs.row.tableId == null) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
|
@ -69,7 +68,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// have to clean up the row, remove the table from it
|
// have to clean up the row, remove the table from it
|
||||||
const ctx = buildCtx(appId, emitter, {
|
const ctx: any = buildCtx(appId, emitter, {
|
||||||
body: inputs.row,
|
body: inputs.row,
|
||||||
params: {
|
params: {
|
||||||
tableId: inputs.row.tableId,
|
tableId: inputs.row.tableId,
|
||||||
|
@ -77,13 +76,8 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
||||||
})
|
})
|
||||||
|
|
||||||
try {
|
try {
|
||||||
inputs.row = await automationUtils.cleanUpRow(
|
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
|
||||||
inputs.row.tableId,
|
await save(ctx)
|
||||||
inputs.row
|
|
||||||
)
|
|
||||||
await usage.update(usage.Properties.ROW, 1, { dryRun: true })
|
|
||||||
await rowController.save(ctx)
|
|
||||||
await usage.update(usage.Properties.ROW, 1)
|
|
||||||
return {
|
return {
|
||||||
row: inputs.row,
|
row: inputs.row,
|
||||||
response: ctx.body,
|
response: ctx.body,
|
||||||
|
@ -94,7 +88,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
response: automationUtils.getError(err),
|
response: getError(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,9 +1,8 @@
|
||||||
const rowController = require("../../api/controllers/row")
|
import { destroy } from "../../api/controllers/row"
|
||||||
const usage = require("../../utilities/usageQuota")
|
import { buildCtx } from "./utils"
|
||||||
const { buildCtx } = require("./utils")
|
import { getError } from "../automationUtils"
|
||||||
const automationUtils = require("../automationUtils")
|
|
||||||
|
|
||||||
exports.definition = {
|
export const definition = {
|
||||||
description: "Delete a row from your database",
|
description: "Delete a row from your database",
|
||||||
icon: "TableRowRemoveCenter",
|
icon: "TableRowRemoveCenter",
|
||||||
name: "Delete Row",
|
name: "Delete Row",
|
||||||
|
@ -52,7 +51,7 @@ exports.definition = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.run = async function ({ inputs, appId, emitter }) {
|
export async function run({ inputs, appId, emitter }: any) {
|
||||||
if (inputs.id == null || inputs.revision == null) {
|
if (inputs.id == null || inputs.revision == null) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
|
@ -62,7 +61,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let ctx = buildCtx(appId, emitter, {
|
let ctx: any = buildCtx(appId, emitter, {
|
||||||
body: {
|
body: {
|
||||||
_id: inputs.id,
|
_id: inputs.id,
|
||||||
_rev: inputs.revision,
|
_rev: inputs.revision,
|
||||||
|
@ -73,8 +72,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
||||||
})
|
})
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await usage.update(usage.Properties.ROW, -1)
|
await destroy(ctx)
|
||||||
await rowController.destroy(ctx)
|
|
||||||
return {
|
return {
|
||||||
response: ctx.body,
|
response: ctx.body,
|
||||||
row: ctx.row,
|
row: ctx.row,
|
||||||
|
@ -83,7 +81,7 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
response: automationUtils.getError(err),
|
response: getError(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,4 +1,3 @@
|
||||||
jest.mock("../../utilities/usageQuota")
|
|
||||||
jest.mock("../../threads/automation")
|
jest.mock("../../threads/automation")
|
||||||
jest.mock("../../utilities/redis", () => ({
|
jest.mock("../../utilities/redis", () => ({
|
||||||
init: jest.fn(),
|
init: jest.fn(),
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
jest.mock("../../utilities/usageQuota")
|
import * as setup from "./utilities"
|
||||||
|
|
||||||
const usageQuota = require("../../utilities/usageQuota")
|
|
||||||
const setup = require("./utilities")
|
|
||||||
|
|
||||||
describe("test the create row action", () => {
|
describe("test the create row action", () => {
|
||||||
let table, row
|
let table: any
|
||||||
|
let row: any
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
@ -36,20 +34,11 @@ describe("test the create row action", () => {
|
||||||
row: {
|
row: {
|
||||||
tableId: "invalid",
|
tableId: "invalid",
|
||||||
invalid: "invalid",
|
invalid: "invalid",
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
expect(res.success).toEqual(false)
|
expect(res.success).toEqual(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("check usage quota attempts", async () => {
|
|
||||||
await setup.runInProd(async () => {
|
|
||||||
await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
|
||||||
row
|
|
||||||
})
|
|
||||||
expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should check invalid inputs return an error", async () => {
|
it("should check invalid inputs return an error", async () => {
|
||||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
|
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
|
||||||
expect(res.success).toEqual(false)
|
expect(res.success).toEqual(false)
|
|
@ -1,10 +1,9 @@
|
||||||
jest.mock("../../utilities/usageQuota")
|
|
||||||
|
|
||||||
const usageQuota = require("../../utilities/usageQuota")
|
|
||||||
const setup = require("./utilities")
|
const setup = require("./utilities")
|
||||||
|
|
||||||
describe("test the delete row action", () => {
|
describe("test the delete row action", () => {
|
||||||
let table, row, inputs
|
let table: any
|
||||||
|
let row: any
|
||||||
|
let inputs: any
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
@ -37,7 +36,6 @@ describe("test the delete row action", () => {
|
||||||
it("check usage quota attempts", async () => {
|
it("check usage quota attempts", async () => {
|
||||||
await setup.runInProd(async () => {
|
await setup.runInProd(async () => {
|
||||||
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
||||||
expect(usageQuota.update).toHaveBeenCalledWith("rows", -1)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -18,7 +18,6 @@ exports.afterAll = () => {
|
||||||
|
|
||||||
exports.runInProd = async fn => {
|
exports.runInProd = async fn => {
|
||||||
env._set("NODE_ENV", "production")
|
env._set("NODE_ENV", "production")
|
||||||
env._set("USE_QUOTAS", 1)
|
|
||||||
let error
|
let error
|
||||||
try {
|
try {
|
||||||
await fn()
|
await fn()
|
||||||
|
@ -26,7 +25,6 @@ exports.runInProd = async fn => {
|
||||||
error = err
|
error = err
|
||||||
}
|
}
|
||||||
env._set("NODE_ENV", "jest")
|
env._set("NODE_ENV", "jest")
|
||||||
env._set("USE_QUOTAS", null)
|
|
||||||
if (error) {
|
if (error) {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,23 +1,29 @@
|
||||||
const { Thread, ThreadType } = require("../threads")
|
import { Thread, ThreadType } from "../threads"
|
||||||
const { definitions } = require("./triggerInfo")
|
import { definitions } from "./triggerInfo"
|
||||||
const webhooks = require("../api/controllers/webhook")
|
import * as webhooks from "../api/controllers/webhook"
|
||||||
const CouchDB = require("../db")
|
import CouchDB from "../db"
|
||||||
const { queue } = require("./bullboard")
|
import { queue } from "./bullboard"
|
||||||
const newid = require("../db/newid")
|
import newid from "../db/newid"
|
||||||
const { updateEntityMetadata } = require("../utilities")
|
import { updateEntityMetadata } from "../utilities"
|
||||||
const { MetadataTypes, WebhookType } = require("../constants")
|
import { MetadataTypes, WebhookType } from "../constants"
|
||||||
const { getProdAppID } = require("@budibase/backend-core/db")
|
import { getProdAppID } from "@budibase/backend-core/db"
|
||||||
const { cloneDeep } = require("lodash/fp")
|
import { cloneDeep } from "lodash/fp"
|
||||||
const { getAppDB, getAppId } = require("@budibase/backend-core/context")
|
import { getAppDB, getAppId } from "@budibase/backend-core/context"
|
||||||
|
import { tenancy } from "@budibase/backend-core"
|
||||||
|
import { quotas } from "@budibase/pro"
|
||||||
|
|
||||||
const WH_STEP_ID = definitions.WEBHOOK.stepId
|
const WH_STEP_ID = definitions.WEBHOOK.stepId
|
||||||
const CRON_STEP_ID = definitions.CRON.stepId
|
const CRON_STEP_ID = definitions.CRON.stepId
|
||||||
const Runner = new Thread(ThreadType.AUTOMATION)
|
const Runner = new Thread(ThreadType.AUTOMATION)
|
||||||
|
|
||||||
exports.processEvent = async job => {
|
export async function processEvent(job: any) {
|
||||||
try {
|
try {
|
||||||
// need to actually await these so that an error can be captured properly
|
const tenantId = tenancy.getTenantIDFromAppID(job.data.event.appId)
|
||||||
return await Runner.run(job)
|
return await tenancy.doInTenant(tenantId, async () => {
|
||||||
|
// need to actually await these so that an error can be captured properly
|
||||||
|
const runFn = () => Runner.run(job)
|
||||||
|
return quotas.addAutomation(runFn)
|
||||||
|
})
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(
|
console.error(
|
||||||
`${job.data.automation.appId} automation ${job.data.automation._id} was unable to run - ${err}`
|
`${job.data.automation.appId} automation ${job.data.automation._id} was unable to run - ${err}`
|
||||||
|
@ -26,11 +32,15 @@ exports.processEvent = async job => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.updateTestHistory = async (appId, automation, history) => {
|
export async function updateTestHistory(
|
||||||
|
appId: any,
|
||||||
|
automation: any,
|
||||||
|
history: any
|
||||||
|
) {
|
||||||
return updateEntityMetadata(
|
return updateEntityMetadata(
|
||||||
MetadataTypes.AUTOMATION_TEST_HISTORY,
|
MetadataTypes.AUTOMATION_TEST_HISTORY,
|
||||||
automation._id,
|
automation._id,
|
||||||
metadata => {
|
(metadata: any) => {
|
||||||
if (metadata && Array.isArray(metadata.history)) {
|
if (metadata && Array.isArray(metadata.history)) {
|
||||||
metadata.history.push(history)
|
metadata.history.push(history)
|
||||||
} else {
|
} else {
|
||||||
|
@ -43,7 +53,7 @@ exports.updateTestHistory = async (appId, automation, history) => {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.removeDeprecated = definitions => {
|
export function removeDeprecated(definitions: any) {
|
||||||
const base = cloneDeep(definitions)
|
const base = cloneDeep(definitions)
|
||||||
for (let key of Object.keys(base)) {
|
for (let key of Object.keys(base)) {
|
||||||
if (base[key].deprecated) {
|
if (base[key].deprecated) {
|
||||||
|
@ -54,13 +64,15 @@ exports.removeDeprecated = definitions => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// end the repetition and the job itself
|
// end the repetition and the job itself
|
||||||
exports.disableAllCrons = async appId => {
|
export async function disableAllCrons(appId: any) {
|
||||||
const promises = []
|
const promises = []
|
||||||
const jobs = await queue.getRepeatableJobs()
|
const jobs = await queue.getRepeatableJobs()
|
||||||
for (let job of jobs) {
|
for (let job of jobs) {
|
||||||
if (job.key.includes(`${appId}_cron`)) {
|
if (job.key.includes(`${appId}_cron`)) {
|
||||||
promises.push(queue.removeRepeatableByKey(job.key))
|
promises.push(queue.removeRepeatableByKey(job.key))
|
||||||
promises.push(queue.removeJobs(job.id))
|
if (job.id) {
|
||||||
|
promises.push(queue.removeJobs(job.id))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Promise.all(promises)
|
return Promise.all(promises)
|
||||||
|
@ -71,9 +83,9 @@ exports.disableAllCrons = async appId => {
|
||||||
* @param {string} appId The ID of the app in which we are checking for webhooks
|
* @param {string} appId The ID of the app in which we are checking for webhooks
|
||||||
* @param {object|undefined} automation The automation object to be updated.
|
* @param {object|undefined} automation The automation object to be updated.
|
||||||
*/
|
*/
|
||||||
exports.enableCronTrigger = async (appId, automation) => {
|
export async function enableCronTrigger(appId: any, automation: any) {
|
||||||
const trigger = automation ? automation.definition.trigger : null
|
const trigger = automation ? automation.definition.trigger : null
|
||||||
function isCronTrigger(auto) {
|
function isCronTrigger(auto: any) {
|
||||||
return (
|
return (
|
||||||
auto &&
|
auto &&
|
||||||
auto.definition.trigger &&
|
auto.definition.trigger &&
|
||||||
|
@ -84,7 +96,7 @@ exports.enableCronTrigger = async (appId, automation) => {
|
||||||
if (isCronTrigger(automation)) {
|
if (isCronTrigger(automation)) {
|
||||||
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
||||||
const jobId = `${appId}_cron_${newid()}`
|
const jobId = `${appId}_cron_${newid()}`
|
||||||
const job = await queue.add(
|
const job: any = await queue.add(
|
||||||
{
|
{
|
||||||
automation,
|
automation,
|
||||||
event: { appId, timestamp: Date.now() },
|
event: { appId, timestamp: Date.now() },
|
||||||
|
@ -112,13 +124,13 @@ exports.enableCronTrigger = async (appId, automation) => {
|
||||||
* @returns {Promise<object|undefined>} After this is complete the new automation object may have been updated and should be
|
* @returns {Promise<object|undefined>} After this is complete the new automation object may have been updated and should be
|
||||||
* written to DB (this does not write to DB as it would be wasteful to repeat).
|
* written to DB (this does not write to DB as it would be wasteful to repeat).
|
||||||
*/
|
*/
|
||||||
exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
|
export async function checkForWebhooks({ oldAuto, newAuto }: any) {
|
||||||
const appId = getAppId()
|
const appId = getAppId()
|
||||||
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
|
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
|
||||||
const newTrigger = newAuto ? newAuto.definition.trigger : null
|
const newTrigger = newAuto ? newAuto.definition.trigger : null
|
||||||
const triggerChanged =
|
const triggerChanged =
|
||||||
oldTrigger && newTrigger && oldTrigger.id !== newTrigger.id
|
oldTrigger && newTrigger && oldTrigger.id !== newTrigger.id
|
||||||
function isWebhookTrigger(auto) {
|
function isWebhookTrigger(auto: any) {
|
||||||
return (
|
return (
|
||||||
auto &&
|
auto &&
|
||||||
auto.definition.trigger &&
|
auto.definition.trigger &&
|
||||||
|
@ -154,7 +166,7 @@ exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
|
||||||
(!isWebhookTrigger(oldAuto) || triggerChanged) &&
|
(!isWebhookTrigger(oldAuto) || triggerChanged) &&
|
||||||
isWebhookTrigger(newAuto)
|
isWebhookTrigger(newAuto)
|
||||||
) {
|
) {
|
||||||
const ctx = {
|
const ctx: any = {
|
||||||
appId,
|
appId,
|
||||||
request: {
|
request: {
|
||||||
body: new webhooks.Webhook(
|
body: new webhooks.Webhook(
|
||||||
|
@ -184,6 +196,6 @@ exports.checkForWebhooks = async ({ oldAuto, newAuto }) => {
|
||||||
* @param appId {string} the app that is being removed.
|
* @param appId {string} the app that is being removed.
|
||||||
* @return {Promise<void>} clean is complete if this succeeds.
|
* @return {Promise<void>} clean is complete if this succeeds.
|
||||||
*/
|
*/
|
||||||
exports.cleanupAutomations = async appId => {
|
export async function cleanupAutomations(appId: any) {
|
||||||
await exports.disableAllCrons(appId)
|
await disableAllCrons(appId)
|
||||||
}
|
}
|
|
@ -38,8 +38,6 @@ module.exports = {
|
||||||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||||
USE_QUOTAS: process.env.USE_QUOTAS,
|
|
||||||
EXCLUDE_QUOTAS_TENANTS: process.env.EXCLUDE_QUOTAS_TENANTS,
|
|
||||||
REDIS_URL: process.env.REDIS_URL,
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
|
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
const {
|
import {
|
||||||
getUserRoleHierarchy,
|
getUserRoleHierarchy,
|
||||||
getRequiredResourceRole,
|
getRequiredResourceRole,
|
||||||
BUILTIN_ROLE_IDS,
|
BUILTIN_ROLE_IDS,
|
||||||
} = require("@budibase/backend-core/roles")
|
} from "@budibase/backend-core/roles"
|
||||||
const {
|
const {
|
||||||
PermissionTypes,
|
PermissionTypes,
|
||||||
doesHaveBasePermission,
|
doesHaveBasePermission,
|
||||||
|
@ -12,7 +12,7 @@ const { isWebhookEndpoint } = require("./utils")
|
||||||
const { buildCsrfMiddleware } = require("@budibase/backend-core/auth")
|
const { buildCsrfMiddleware } = require("@budibase/backend-core/auth")
|
||||||
const { getAppId } = require("@budibase/backend-core/context")
|
const { getAppId } = require("@budibase/backend-core/context")
|
||||||
|
|
||||||
function hasResource(ctx) {
|
function hasResource(ctx: any) {
|
||||||
return ctx.resourceId != null
|
return ctx.resourceId != null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,7 +24,12 @@ const csrf = buildCsrfMiddleware()
|
||||||
* - Builders can access all resources.
|
* - Builders can access all resources.
|
||||||
* - Otherwise the user must have the required role.
|
* - Otherwise the user must have the required role.
|
||||||
*/
|
*/
|
||||||
const checkAuthorized = async (ctx, resourceRoles, permType, permLevel) => {
|
const checkAuthorized = async (
|
||||||
|
ctx: any,
|
||||||
|
resourceRoles: any,
|
||||||
|
permType: any,
|
||||||
|
permLevel: any
|
||||||
|
) => {
|
||||||
// check if this is a builder api and the user is not a builder
|
// check if this is a builder api and the user is not a builder
|
||||||
const isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global
|
const isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global
|
||||||
const isBuilderApi = permType === PermissionTypes.BUILDER
|
const isBuilderApi = permType === PermissionTypes.BUILDER
|
||||||
|
@ -39,10 +44,10 @@ const checkAuthorized = async (ctx, resourceRoles, permType, permLevel) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const checkAuthorizedResource = async (
|
const checkAuthorizedResource = async (
|
||||||
ctx,
|
ctx: any,
|
||||||
resourceRoles,
|
resourceRoles: any,
|
||||||
permType,
|
permType: any,
|
||||||
permLevel
|
permLevel: any
|
||||||
) => {
|
) => {
|
||||||
// get the user's roles
|
// get the user's roles
|
||||||
const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC
|
const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC
|
||||||
|
@ -53,7 +58,9 @@ const checkAuthorizedResource = async (
|
||||||
// check if the user has the required role
|
// check if the user has the required role
|
||||||
if (resourceRoles.length > 0) {
|
if (resourceRoles.length > 0) {
|
||||||
// deny access if the user doesn't have the required resource role
|
// deny access if the user doesn't have the required resource role
|
||||||
const found = userRoles.find(role => resourceRoles.indexOf(role._id) !== -1)
|
const found = userRoles.find(
|
||||||
|
(role: any) => resourceRoles.indexOf(role._id) !== -1
|
||||||
|
)
|
||||||
if (!found) {
|
if (!found) {
|
||||||
ctx.throw(403, permError)
|
ctx.throw(403, permError)
|
||||||
}
|
}
|
||||||
|
@ -63,9 +70,8 @@ const checkAuthorizedResource = async (
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports =
|
export = (permType: any, permLevel: any = null) =>
|
||||||
(permType, permLevel = null) =>
|
async (ctx: any, next: any) => {
|
||||||
async (ctx, next) => {
|
|
||||||
// webhooks don't need authentication, each webhook unique
|
// webhooks don't need authentication, each webhook unique
|
||||||
// also internal requests (between services) don't need authorized
|
// also internal requests (between services) don't need authorized
|
||||||
if (isWebhookEndpoint(ctx) || ctx.internal) {
|
if (isWebhookEndpoint(ctx) || ctx.internal) {
|
||||||
|
@ -81,7 +87,7 @@ module.exports =
|
||||||
await builderMiddleware(ctx, permType)
|
await builderMiddleware(ctx, permType)
|
||||||
|
|
||||||
// get the resource roles
|
// get the resource roles
|
||||||
let resourceRoles = []
|
let resourceRoles: any = []
|
||||||
const appId = getAppId()
|
const appId = getAppId()
|
||||||
if (appId && hasResource(ctx)) {
|
if (appId && hasResource(ctx)) {
|
||||||
resourceRoles = await getRequiredResourceRole(permLevel, ctx)
|
resourceRoles = await getRequiredResourceRole(permLevel, ctx)
|
|
@ -1,134 +0,0 @@
|
||||||
jest.mock("../../db")
|
|
||||||
jest.mock("../../utilities/usageQuota")
|
|
||||||
jest.mock("@budibase/backend-core/tenancy", () => ({
|
|
||||||
getTenantId: () => "testing123"
|
|
||||||
}))
|
|
||||||
|
|
||||||
const usageQuotaMiddleware = require("../usageQuota")
|
|
||||||
const usageQuota = require("../../utilities/usageQuota")
|
|
||||||
const CouchDB = require("../../db")
|
|
||||||
const env = require("../../environment")
|
|
||||||
|
|
||||||
class TestConfiguration {
|
|
||||||
constructor() {
|
|
||||||
this.throw = jest.fn()
|
|
||||||
this.next = jest.fn()
|
|
||||||
this.middleware = usageQuotaMiddleware
|
|
||||||
this.ctx = {
|
|
||||||
throw: this.throw,
|
|
||||||
next: this.next,
|
|
||||||
appId: "test",
|
|
||||||
request: {
|
|
||||||
body: {}
|
|
||||||
},
|
|
||||||
req: {
|
|
||||||
method: "POST",
|
|
||||||
url: "/applications"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
usageQuota.useQuotas = () => true
|
|
||||||
}
|
|
||||||
|
|
||||||
executeMiddleware() {
|
|
||||||
return this.middleware(this.ctx, this.next)
|
|
||||||
}
|
|
||||||
|
|
||||||
setProd(bool) {
|
|
||||||
if (bool) {
|
|
||||||
env.isDev = () => false
|
|
||||||
env.isProd = () => true
|
|
||||||
this.ctx.user = { tenantId: "test" }
|
|
||||||
} else {
|
|
||||||
env.isDev = () => true
|
|
||||||
env.isProd = () => false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
setMethod(method) {
|
|
||||||
this.ctx.req.method = method
|
|
||||||
}
|
|
||||||
|
|
||||||
setUrl(url) {
|
|
||||||
this.ctx.req.url = url
|
|
||||||
}
|
|
||||||
|
|
||||||
setBody(body) {
|
|
||||||
this.ctx.request.body = body
|
|
||||||
}
|
|
||||||
|
|
||||||
setFiles(files) {
|
|
||||||
this.ctx.request.files = { file: files }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("usageQuota middleware", () => {
|
|
||||||
let config
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
config = new TestConfiguration()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("skips the middleware if there is no usage property or method", async () => {
|
|
||||||
await config.executeMiddleware()
|
|
||||||
expect(config.next).toHaveBeenCalled()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("passes through to next middleware if document already exists", async () => {
|
|
||||||
config.setProd(true)
|
|
||||||
config.setBody({
|
|
||||||
_id: "test",
|
|
||||||
_rev: "test",
|
|
||||||
})
|
|
||||||
|
|
||||||
CouchDB.mockImplementationOnce(() => ({
|
|
||||||
get: async () => true
|
|
||||||
}))
|
|
||||||
|
|
||||||
await config.executeMiddleware()
|
|
||||||
|
|
||||||
expect(config.next).toHaveBeenCalled()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("throws if request has _id, but the document no longer exists", async () => {
|
|
||||||
config.setBody({
|
|
||||||
_id: "123",
|
|
||||||
_rev: "test",
|
|
||||||
})
|
|
||||||
config.setProd(true)
|
|
||||||
|
|
||||||
CouchDB.mockImplementationOnce(() => ({
|
|
||||||
get: async () => {
|
|
||||||
throw new Error()
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
|
|
||||||
await config.executeMiddleware()
|
|
||||||
expect(config.throw).toHaveBeenCalledWith(404, `${config.ctx.request.body._id} does not exist`)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("calculates and persists the correct usage quota for the relevant action", async () => {
|
|
||||||
config.setUrl("/rows")
|
|
||||||
|
|
||||||
await config.executeMiddleware()
|
|
||||||
|
|
||||||
expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
|
|
||||||
expect(config.next).toHaveBeenCalled()
|
|
||||||
})
|
|
||||||
|
|
||||||
// it("calculates the correct file size from a file upload call and adds it to quota", async () => {
|
|
||||||
// config.setUrl("/upload")
|
|
||||||
// config.setProd(true)
|
|
||||||
// config.setFiles([
|
|
||||||
// {
|
|
||||||
// size: 100
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// size: 10000
|
|
||||||
// },
|
|
||||||
// ])
|
|
||||||
// await config.executeMiddleware()
|
|
||||||
|
|
||||||
// expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100)
|
|
||||||
// expect(config.next).toHaveBeenCalled()
|
|
||||||
// })
|
|
||||||
})
|
|
|
@ -1,164 +0,0 @@
|
||||||
const usageQuota = require("../utilities/usageQuota")
|
|
||||||
const { getUniqueRows } = require("../utilities/usageQuota/rows")
|
|
||||||
const {
|
|
||||||
isExternalTable,
|
|
||||||
isRowId: isExternalRowId,
|
|
||||||
} = require("../integrations/utils")
|
|
||||||
const { getAppDB } = require("@budibase/backend-core/context")
|
|
||||||
|
|
||||||
// currently only counting new writes and deletes
|
|
||||||
const METHOD_MAP = {
|
|
||||||
POST: 1,
|
|
||||||
DELETE: -1,
|
|
||||||
}
|
|
||||||
|
|
||||||
const DOMAIN_MAP = {
|
|
||||||
rows: usageQuota.Properties.ROW,
|
|
||||||
// upload: usageQuota.Properties.UPLOAD, // doesn't work yet
|
|
||||||
// views: usageQuota.Properties.VIEW, // doesn't work yet
|
|
||||||
// users: usageQuota.Properties.USER, // doesn't work yet
|
|
||||||
applications: usageQuota.Properties.APPS,
|
|
||||||
// this will not be updated by endpoint calls
|
|
||||||
// instead it will be updated by triggerInfo
|
|
||||||
// automationRuns: usageQuota.Properties.AUTOMATION, // doesn't work yet
|
|
||||||
}
|
|
||||||
|
|
||||||
function getProperty(url) {
|
|
||||||
for (let domain of Object.keys(DOMAIN_MAP)) {
|
|
||||||
if (url.indexOf(domain) !== -1) {
|
|
||||||
return DOMAIN_MAP[domain]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = async (ctx, next) => {
|
|
||||||
if (!usageQuota.useQuotas()) {
|
|
||||||
return next()
|
|
||||||
}
|
|
||||||
|
|
||||||
let usage = METHOD_MAP[ctx.req.method]
|
|
||||||
const property = getProperty(ctx.req.url)
|
|
||||||
if (usage == null || property == null) {
|
|
||||||
return next()
|
|
||||||
}
|
|
||||||
// post request could be a save of a pre-existing entry
|
|
||||||
if (ctx.request.body && ctx.request.body._id && ctx.request.body._rev) {
|
|
||||||
const usageId = ctx.request.body._id
|
|
||||||
try {
|
|
||||||
if (ctx.appId) {
|
|
||||||
const db = getAppDB()
|
|
||||||
await db.get(usageId)
|
|
||||||
}
|
|
||||||
return next()
|
|
||||||
} catch (err) {
|
|
||||||
if (
|
|
||||||
isExternalTable(usageId) ||
|
|
||||||
(ctx.request.body.tableId &&
|
|
||||||
isExternalTable(ctx.request.body.tableId)) ||
|
|
||||||
isExternalRowId(usageId)
|
|
||||||
) {
|
|
||||||
return next()
|
|
||||||
} else {
|
|
||||||
ctx.throw(404, `${usageId} does not exist`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// update usage for uploads to be the total size
|
|
||||||
if (property === usageQuota.Properties.UPLOAD) {
|
|
||||||
const files =
|
|
||||||
ctx.request.files.file.length > 1
|
|
||||||
? Array.from(ctx.request.files.file)
|
|
||||||
: [ctx.request.files.file]
|
|
||||||
usage = files.map(file => file.size).reduce((total, size) => total + size)
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
await performRequest(ctx, next, property, usage)
|
|
||||||
} catch (err) {
|
|
||||||
ctx.throw(400, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const performRequest = async (ctx, next, property, usage) => {
|
|
||||||
const usageContext = {
|
|
||||||
skipNext: false,
|
|
||||||
skipUsage: false,
|
|
||||||
[usageQuota.Properties.APPS]: {},
|
|
||||||
}
|
|
||||||
|
|
||||||
if (usage === -1) {
|
|
||||||
if (PRE_DELETE[property]) {
|
|
||||||
await PRE_DELETE[property](ctx, usageContext)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (PRE_CREATE[property]) {
|
|
||||||
await PRE_CREATE[property](ctx, usageContext)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// run the request
|
|
||||||
if (!usageContext.skipNext) {
|
|
||||||
await usageQuota.update(property, usage, { dryRun: true })
|
|
||||||
await next()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (usage === -1) {
|
|
||||||
if (POST_DELETE[property]) {
|
|
||||||
await POST_DELETE[property](ctx, usageContext)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (POST_CREATE[property]) {
|
|
||||||
await POST_CREATE[property](ctx, usageContext)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// update the usage
|
|
||||||
if (!usageContext.skipUsage) {
|
|
||||||
await usageQuota.update(property, usage)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const appPreDelete = async (ctx, usageContext) => {
|
|
||||||
if (ctx.query.unpublish) {
|
|
||||||
// don't run usage decrement for unpublish
|
|
||||||
usageContext.skipUsage = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// store the row count to delete
|
|
||||||
const rows = await getUniqueRows([ctx.appId])
|
|
||||||
if (rows.length) {
|
|
||||||
usageContext[usageQuota.Properties.APPS] = { rowCount: rows.length }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const appPostDelete = async (ctx, usageContext) => {
|
|
||||||
// delete the app rows from usage
|
|
||||||
const rowCount = usageContext[usageQuota.Properties.APPS].rowCount
|
|
||||||
if (rowCount) {
|
|
||||||
await usageQuota.update(usageQuota.Properties.ROW, -rowCount)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const appPostCreate = async ctx => {
|
|
||||||
// app import & template creation
|
|
||||||
if (ctx.request.body.useTemplate === "true") {
|
|
||||||
const rows = await getUniqueRows([ctx.response.body.appId])
|
|
||||||
const rowCount = rows ? rows.length : 0
|
|
||||||
await usageQuota.update(usageQuota.Properties.ROW, rowCount)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const PRE_DELETE = {
|
|
||||||
[usageQuota.Properties.APPS]: appPreDelete,
|
|
||||||
}
|
|
||||||
|
|
||||||
const POST_DELETE = {
|
|
||||||
[usageQuota.Properties.APPS]: appPostDelete,
|
|
||||||
}
|
|
||||||
|
|
||||||
const PRE_CREATE = {}
|
|
||||||
|
|
||||||
const POST_CREATE = {
|
|
||||||
[usageQuota.Properties.APPS]: appPostCreate,
|
|
||||||
}
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
const { createUserBuildersView } = require("@budibase/backend-core/db")
|
||||||
|
import * as syncDevelopers from "./usageQuotas/syncDevelopers"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Date:
|
||||||
|
* March 2022
|
||||||
|
*
|
||||||
|
* Description:
|
||||||
|
* Create the builder users view and sync the developer count
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const run = async (db: any) => {
|
||||||
|
await createUserBuildersView(db)
|
||||||
|
await syncDevelopers.run()
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
import * as syncPublishedApps from "./usageQuotas/syncPublishedApps"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Date:
|
||||||
|
* March 2022
|
||||||
|
*
|
||||||
|
* Description:
|
||||||
|
* Sync the published apps count
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const run = async (db: any) => {
|
||||||
|
await syncPublishedApps.run()
|
||||||
|
}
|
|
@ -1,4 +1,3 @@
|
||||||
const env = require("../../../environment")
|
|
||||||
const TestConfig = require("../../../tests/utilities/TestConfiguration")
|
const TestConfig = require("../../../tests/utilities/TestConfiguration")
|
||||||
|
|
||||||
const syncApps = jest.fn()
|
const syncApps = jest.fn()
|
||||||
|
@ -14,7 +13,6 @@ describe("run", () => {
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
env._set("USE_QUOTAS", 1)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(config.end)
|
afterAll(config.end)
|
||||||
|
|
|
@ -1,8 +1,3 @@
|
||||||
const { useQuotas } = require("../../../utilities/usageQuota")
|
|
||||||
|
|
||||||
export const runQuotaMigration = async (migration: Function) => {
|
export const runQuotaMigration = async (migration: Function) => {
|
||||||
if (!useQuotas()) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
await migration()
|
await migration()
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
import { getGlobalDB, getTenantId } from "@budibase/backend-core/tenancy"
|
import { getTenantId } from "@budibase/backend-core/tenancy"
|
||||||
import { getAllApps } from "@budibase/backend-core/db"
|
import { getAllApps } from "@budibase/backend-core/db"
|
||||||
import { getUsageQuotaDoc } from "../../../utilities/usageQuota"
|
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||||
|
|
||||||
export const run = async () => {
|
export const run = async () => {
|
||||||
const db = getGlobalDB()
|
|
||||||
// get app count
|
// get app count
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const devApps = await getAllApps({ dev: true })
|
const devApps = await getAllApps({ dev: true })
|
||||||
|
@ -12,7 +11,5 @@ export const run = async () => {
|
||||||
// sync app count
|
// sync app count
|
||||||
const tenantId = getTenantId()
|
const tenantId = getTenantId()
|
||||||
console.log(`[Tenant: ${tenantId}] Syncing app count: ${appCount}`)
|
console.log(`[Tenant: ${tenantId}] Syncing app count: ${appCount}`)
|
||||||
const usageDoc = await getUsageQuotaDoc(db)
|
await quotas.setUsage(appCount, StaticQuotaName.APPS, QuotaUsageType.STATIC)
|
||||||
usageDoc.usageQuota.apps = appCount
|
|
||||||
await db.put(usageDoc)
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
import { getTenantId } from "@budibase/backend-core/tenancy"
|
||||||
|
import { utils } from "@budibase/backend-core"
|
||||||
|
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||||
|
|
||||||
|
export const run = async () => {
|
||||||
|
// get developer count
|
||||||
|
const developerCount = await utils.getBuildersCount()
|
||||||
|
|
||||||
|
// sync developer count
|
||||||
|
const tenantId = getTenantId()
|
||||||
|
console.log(
|
||||||
|
`[Tenant: ${tenantId}] Syncing developer count: ${developerCount}`
|
||||||
|
)
|
||||||
|
await quotas.setUsage(
|
||||||
|
developerCount,
|
||||||
|
StaticQuotaName.DEVELOPERS,
|
||||||
|
QuotaUsageType.STATIC
|
||||||
|
)
|
||||||
|
}
|
|
@ -0,0 +1,21 @@
|
||||||
|
import { getTenantId } from "@budibase/backend-core/tenancy"
|
||||||
|
import { getAllApps } from "@budibase/backend-core/db"
|
||||||
|
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||||
|
|
||||||
|
export const run = async () => {
|
||||||
|
// get app count
|
||||||
|
const opts: any = { dev: false }
|
||||||
|
const prodApps = await getAllApps(opts)
|
||||||
|
const prodAppCount = prodApps ? prodApps.length : 0
|
||||||
|
|
||||||
|
// sync app count
|
||||||
|
const tenantId = getTenantId()
|
||||||
|
console.log(
|
||||||
|
`[Tenant: ${tenantId}] Syncing published app count: ${prodAppCount}`
|
||||||
|
)
|
||||||
|
await quotas.setUsage(
|
||||||
|
prodAppCount,
|
||||||
|
StaticQuotaName.PUBLISHED_APPS,
|
||||||
|
QuotaUsageType.STATIC
|
||||||
|
)
|
||||||
|
}
|
|
@ -1,10 +1,9 @@
|
||||||
import { getGlobalDB, getTenantId } from "@budibase/backend-core/tenancy"
|
import { getTenantId } from "@budibase/backend-core/tenancy"
|
||||||
import { getAllApps } from "@budibase/backend-core/db"
|
import { getAllApps } from "@budibase/backend-core/db"
|
||||||
import { getUsageQuotaDoc } from "../../../utilities/usageQuota"
|
|
||||||
import { getUniqueRows } from "../../../utilities/usageQuota/rows"
|
import { getUniqueRows } from "../../../utilities/usageQuota/rows"
|
||||||
|
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||||
|
|
||||||
export const run = async () => {
|
export const run = async () => {
|
||||||
const db = getGlobalDB()
|
|
||||||
// get all rows in all apps
|
// get all rows in all apps
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const allApps = await getAllApps({ all: true })
|
const allApps = await getAllApps({ all: true })
|
||||||
|
@ -16,7 +15,5 @@ export const run = async () => {
|
||||||
// sync row count
|
// sync row count
|
||||||
const tenantId = getTenantId()
|
const tenantId = getTenantId()
|
||||||
console.log(`[Tenant: ${tenantId}] Syncing row count: ${rowCount}`)
|
console.log(`[Tenant: ${tenantId}] Syncing row count: ${rowCount}`)
|
||||||
const usageDoc = await getUsageQuotaDoc(db)
|
await quotas.setUsage(rowCount, StaticQuotaName.ROWS, QuotaUsageType.STATIC)
|
||||||
usageDoc.usageQuota.rows = rowCount
|
|
||||||
await db.put(usageDoc)
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
|
||||||
const TestConfig = require("../../../../tests/utilities/TestConfiguration")
|
|
||||||
const { getUsageQuotaDoc, update, Properties } = require("../../../../utilities/usageQuota")
|
|
||||||
const syncApps = require("../syncApps")
|
|
||||||
const env = require("../../../../environment")
|
|
||||||
|
|
||||||
describe("syncApps", () => {
|
|
||||||
let config = new TestConfig(false)
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
await config.init()
|
|
||||||
env._set("USE_QUOTAS", 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(config.end)
|
|
||||||
|
|
||||||
it("runs successfully", async () => {
|
|
||||||
// create the usage quota doc and mock usages
|
|
||||||
const db = getGlobalDB()
|
|
||||||
await getUsageQuotaDoc(db)
|
|
||||||
await update(Properties.APPS, 3)
|
|
||||||
|
|
||||||
let usageDoc = await getUsageQuotaDoc(db)
|
|
||||||
expect(usageDoc.usageQuota.apps).toEqual(3)
|
|
||||||
|
|
||||||
// create an extra app to test the migration
|
|
||||||
await config.createApp("quota-test")
|
|
||||||
|
|
||||||
// migrate
|
|
||||||
await syncApps.run()
|
|
||||||
|
|
||||||
// assert the migration worked
|
|
||||||
usageDoc = await getUsageQuotaDoc(db)
|
|
||||||
expect(usageDoc.usageQuota.apps).toEqual(2)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
import TestConfig from "../../../../tests/utilities/TestConfiguration"
|
||||||
|
import * as syncApps from "../syncApps"
|
||||||
|
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||||
|
|
||||||
|
describe("syncApps", () => {
|
||||||
|
let config = new TestConfig(false)
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await config.init()
|
||||||
|
})
|
||||||
|
|
||||||
|
afterAll(config.end)
|
||||||
|
|
||||||
|
it("runs successfully", async () => {
|
||||||
|
// create the usage quota doc and mock usages
|
||||||
|
await quotas.getQuotaUsage()
|
||||||
|
await quotas.setUsage(3, StaticQuotaName.APPS, QuotaUsageType.STATIC)
|
||||||
|
|
||||||
|
let usageDoc = await quotas.getQuotaUsage()
|
||||||
|
expect(usageDoc.usageQuota.apps).toEqual(3)
|
||||||
|
|
||||||
|
// create an extra app to test the migration
|
||||||
|
await config.createApp("quota-test")
|
||||||
|
|
||||||
|
// migrate
|
||||||
|
await syncApps.run()
|
||||||
|
|
||||||
|
// assert the migration worked
|
||||||
|
usageDoc = await quotas.getQuotaUsage()
|
||||||
|
expect(usageDoc.usageQuota.apps).toEqual(2)
|
||||||
|
})
|
||||||
|
})
|
|
@ -1,43 +0,0 @@
|
||||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
|
||||||
const TestConfig = require("../../../../tests/utilities/TestConfiguration")
|
|
||||||
const { getUsageQuotaDoc, update, Properties } = require("../../../../utilities/usageQuota")
|
|
||||||
const syncRows = require("../syncRows")
|
|
||||||
const env = require("../../../../environment")
|
|
||||||
|
|
||||||
describe("syncRows", () => {
|
|
||||||
let config = new TestConfig(false)
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
await config.init()
|
|
||||||
env._set("USE_QUOTAS", 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(config.end)
|
|
||||||
|
|
||||||
it("runs successfully", async () => {
|
|
||||||
// create the usage quota doc and mock usages
|
|
||||||
const db = getGlobalDB()
|
|
||||||
await getUsageQuotaDoc(db)
|
|
||||||
await update(Properties.ROW, 300)
|
|
||||||
|
|
||||||
let usageDoc = await getUsageQuotaDoc(db)
|
|
||||||
expect(usageDoc.usageQuota.rows).toEqual(300)
|
|
||||||
|
|
||||||
// app 1
|
|
||||||
await config.createTable()
|
|
||||||
await config.createRow()
|
|
||||||
// app 2
|
|
||||||
await config.createApp("second-app")
|
|
||||||
await config.createTable()
|
|
||||||
await config.createRow()
|
|
||||||
await config.createRow()
|
|
||||||
|
|
||||||
// migrate
|
|
||||||
await syncRows.run()
|
|
||||||
|
|
||||||
// assert the migration worked
|
|
||||||
usageDoc = await getUsageQuotaDoc(db)
|
|
||||||
expect(usageDoc.usageQuota.rows).toEqual(3)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
import TestConfig from "../../../../tests/utilities/TestConfiguration"
|
||||||
|
import * as syncRows from "../syncRows"
|
||||||
|
import { quotas, QuotaUsageType, StaticQuotaName } from "@budibase/pro"
|
||||||
|
|
||||||
|
describe("syncRows", () => {
|
||||||
|
let config = new TestConfig(false)
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await config.init()
|
||||||
|
})
|
||||||
|
|
||||||
|
afterAll(config.end)
|
||||||
|
|
||||||
|
it("runs successfully", async () => {
|
||||||
|
// create the usage quota doc and mock usages
|
||||||
|
await quotas.getQuotaUsage()
|
||||||
|
await quotas.setUsage(300, StaticQuotaName.ROWS, QuotaUsageType.STATIC)
|
||||||
|
|
||||||
|
let usageDoc = await quotas.getQuotaUsage()
|
||||||
|
expect(usageDoc.usageQuota.rows).toEqual(300)
|
||||||
|
|
||||||
|
// app 1
|
||||||
|
await config.createTable()
|
||||||
|
await config.createRow()
|
||||||
|
// app 2
|
||||||
|
await config.createApp("second-app")
|
||||||
|
await config.createTable()
|
||||||
|
await config.createRow()
|
||||||
|
await config.createRow()
|
||||||
|
|
||||||
|
// migrate
|
||||||
|
await syncRows.run()
|
||||||
|
|
||||||
|
// assert the migration worked
|
||||||
|
usageDoc = await quotas.getQuotaUsage()
|
||||||
|
expect(usageDoc.usageQuota.rows).toEqual(3)
|
||||||
|
})
|
||||||
|
})
|
|
@ -8,6 +8,8 @@ const {
|
||||||
import * as userEmailViewCasing from "./functions/userEmailViewCasing"
|
import * as userEmailViewCasing from "./functions/userEmailViewCasing"
|
||||||
import * as quota1 from "./functions/quotas1"
|
import * as quota1 from "./functions/quotas1"
|
||||||
import * as appUrls from "./functions/appUrls"
|
import * as appUrls from "./functions/appUrls"
|
||||||
|
import * as developerQuota from "./functions/developerQuota"
|
||||||
|
import * as publishedAppsQuota from "./functions/publishedAppsQuota"
|
||||||
|
|
||||||
export interface Migration {
|
export interface Migration {
|
||||||
type: string
|
type: string
|
||||||
|
@ -27,7 +29,7 @@ export interface Migration {
|
||||||
*/
|
*/
|
||||||
export interface MigrationOptions {
|
export interface MigrationOptions {
|
||||||
tenantIds?: string[]
|
tenantIds?: string[]
|
||||||
forced?: {
|
force?: {
|
||||||
[type: string]: string[]
|
[type: string]: string[]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -49,6 +51,16 @@ export const MIGRATIONS: Migration[] = [
|
||||||
opts: { all: true },
|
opts: { all: true },
|
||||||
fn: appUrls.run,
|
fn: appUrls.run,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
type: MIGRATION_TYPES.GLOBAL,
|
||||||
|
name: "developer_quota",
|
||||||
|
fn: developerQuota.run,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: MIGRATION_TYPES.GLOBAL,
|
||||||
|
name: "published_apps_quota",
|
||||||
|
fn: publishedAppsQuota.run,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
export const migrate = async (options?: MigrationOptions) => {
|
export const migrate = async (options?: MigrationOptions) => {
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
declare module "@budibase/backend-core"
|
declare module "@budibase/backend-core"
|
||||||
declare module "@budibase/backend-core/tenancy"
|
declare module "@budibase/backend-core/tenancy"
|
||||||
declare module "@budibase/backend-core/db"
|
declare module "@budibase/backend-core/db"
|
||||||
|
declare module "@budibase/backend-core/context"
|
||||||
|
declare module "@budibase/backend-core/cache"
|
||||||
|
declare module "@budibase/backend-core/permissions"
|
||||||
|
declare module "@budibase/backend-core/roles"
|
||||||
|
|
|
@ -1,13 +1,11 @@
|
||||||
require("./utils").threadSetup()
|
require("./utils").threadSetup()
|
||||||
const env = require("../environment")
|
|
||||||
const actions = require("../automations/actions")
|
const actions = require("../automations/actions")
|
||||||
const automationUtils = require("../automations/automationUtils")
|
const automationUtils = require("../automations/automationUtils")
|
||||||
const AutomationEmitter = require("../events/AutomationEmitter")
|
const AutomationEmitter = require("../events/AutomationEmitter")
|
||||||
const { processObject } = require("@budibase/string-templates")
|
const { processObject } = require("@budibase/string-templates")
|
||||||
const { DEFAULT_TENANT_ID } = require("@budibase/backend-core/constants")
|
const { DEFAULT_TENANT_ID } = require("@budibase/backend-core/constants")
|
||||||
const { DocumentTypes, isDevAppID } = require("../db/utils")
|
const { DocumentTypes } = require("../db/utils")
|
||||||
const { doInTenant } = require("@budibase/backend-core/tenancy")
|
const { doInTenant } = require("@budibase/backend-core/tenancy")
|
||||||
const usage = require("../utilities/usageQuota")
|
|
||||||
const { definitions: triggerDefs } = require("../automations/triggerInfo")
|
const { definitions: triggerDefs } = require("../automations/triggerInfo")
|
||||||
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
|
const { doInAppContext, getAppDB } = require("@budibase/backend-core/context")
|
||||||
|
|
||||||
|
@ -120,11 +118,6 @@ class Orchestrator {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Increment quota for automation runs
|
|
||||||
if (!env.SELF_HOSTED && !isDevAppID(this._appId)) {
|
|
||||||
await usage.update(usage.Properties.AUTOMATION, 1)
|
|
||||||
}
|
|
||||||
return this.executionOutput
|
return this.executionOutput
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
const workerFarm = require("worker-farm")
|
import workerFarm from "worker-farm"
|
||||||
const env = require("../environment")
|
import * as env from "../environment"
|
||||||
|
|
||||||
const ThreadType = {
|
export const ThreadType = {
|
||||||
QUERY: "query",
|
QUERY: "query",
|
||||||
AUTOMATION: "automation",
|
AUTOMATION: "automation",
|
||||||
}
|
}
|
||||||
|
|
||||||
function typeToFile(type) {
|
function typeToFile(type: any) {
|
||||||
let filename = null
|
let filename = null
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case ThreadType.QUERY:
|
case ThreadType.QUERY:
|
||||||
|
@ -21,8 +21,13 @@ function typeToFile(type) {
|
||||||
return require.resolve(filename)
|
return require.resolve(filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
class Thread {
|
export class Thread {
|
||||||
constructor(type, opts = { timeoutMs: null, count: 1 }) {
|
type: any
|
||||||
|
count: any
|
||||||
|
disableThreading: any
|
||||||
|
workers: any
|
||||||
|
|
||||||
|
constructor(type: any, opts: any = { timeoutMs: null, count: 1 }) {
|
||||||
this.type = type
|
this.type = type
|
||||||
this.count = opts.count ? opts.count : 1
|
this.count = opts.count ? opts.count : 1
|
||||||
this.disableThreading =
|
this.disableThreading =
|
||||||
|
@ -31,7 +36,7 @@ class Thread {
|
||||||
this.count === 0 ||
|
this.count === 0 ||
|
||||||
env.isInThread()
|
env.isInThread()
|
||||||
if (!this.disableThreading) {
|
if (!this.disableThreading) {
|
||||||
const workerOpts = {
|
const workerOpts: any = {
|
||||||
autoStart: true,
|
autoStart: true,
|
||||||
maxConcurrentWorkers: this.count,
|
maxConcurrentWorkers: this.count,
|
||||||
}
|
}
|
||||||
|
@ -42,7 +47,7 @@ class Thread {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
run(data) {
|
run(data: any) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
let fncToCall
|
let fncToCall
|
||||||
// if in test then don't use threading
|
// if in test then don't use threading
|
||||||
|
@ -51,7 +56,7 @@ class Thread {
|
||||||
} else {
|
} else {
|
||||||
fncToCall = this.workers
|
fncToCall = this.workers
|
||||||
}
|
}
|
||||||
fncToCall(data, (err, response) => {
|
fncToCall(data, (err: any, response: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject(err)
|
reject(err)
|
||||||
} else {
|
} else {
|
||||||
|
@ -61,6 +66,3 @@ class Thread {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.Thread = Thread
|
|
||||||
module.exports.ThreadType = ThreadType
|
|
|
@ -66,7 +66,8 @@ class InMemoryQueue {
|
||||||
* @param {object} msg A message to be transported over the queue, this should be
|
* @param {object} msg A message to be transported over the queue, this should be
|
||||||
* a JSON message as this is required by Bull.
|
* a JSON message as this is required by Bull.
|
||||||
*/
|
*/
|
||||||
add(msg) {
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
add(msg, repeat) {
|
||||||
if (typeof msg !== "object") {
|
if (typeof msg !== "object") {
|
||||||
throw "Queue only supports carrying JSON."
|
throw "Queue only supports carrying JSON."
|
||||||
}
|
}
|
||||||
|
@ -90,6 +91,11 @@ class InMemoryQueue {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
removeJobs(pattern) {
|
||||||
|
// no-op
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implemented for tests
|
* Implemented for tests
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -1,72 +0,0 @@
|
||||||
const getTenantId = jest.fn()
|
|
||||||
jest.mock("@budibase/backend-core/tenancy", () => ({
|
|
||||||
getTenantId
|
|
||||||
}))
|
|
||||||
const usageQuota = require("../../usageQuota")
|
|
||||||
const env = require("../../../environment")
|
|
||||||
|
|
||||||
class TestConfiguration {
|
|
||||||
constructor() {
|
|
||||||
this.enableQuotas()
|
|
||||||
}
|
|
||||||
|
|
||||||
enableQuotas = () => {
|
|
||||||
env.USE_QUOTAS = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
disableQuotas = () => {
|
|
||||||
env.USE_QUOTAS = null
|
|
||||||
}
|
|
||||||
|
|
||||||
setTenantId = (tenantId) => {
|
|
||||||
getTenantId.mockReturnValue(tenantId)
|
|
||||||
}
|
|
||||||
|
|
||||||
setExcludedTenants = (tenants) => {
|
|
||||||
env.EXCLUDE_QUOTAS_TENANTS = tenants
|
|
||||||
}
|
|
||||||
|
|
||||||
reset = () => {
|
|
||||||
this.disableQuotas()
|
|
||||||
this.setExcludedTenants(null)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("usageQuota", () => {
|
|
||||||
let config
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
config = new TestConfiguration()
|
|
||||||
})
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
config.reset()
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("useQuotas", () => {
|
|
||||||
it("works when no settings have been provided", () => {
|
|
||||||
config.reset()
|
|
||||||
expect(usageQuota.useQuotas()).toBe(false)
|
|
||||||
})
|
|
||||||
it("honours USE_QUOTAS setting", () => {
|
|
||||||
config.disableQuotas()
|
|
||||||
expect(usageQuota.useQuotas()).toBe(false)
|
|
||||||
|
|
||||||
config.enableQuotas()
|
|
||||||
expect(usageQuota.useQuotas()).toBe(true)
|
|
||||||
})
|
|
||||||
it("honours EXCLUDE_QUOTAS_TENANTS setting", () => {
|
|
||||||
config.setTenantId("test")
|
|
||||||
|
|
||||||
// tenantId is in the list
|
|
||||||
config.setExcludedTenants("test, test2, test2")
|
|
||||||
expect(usageQuota.useQuotas()).toBe(false)
|
|
||||||
config.setExcludedTenants("test,test2,test2")
|
|
||||||
expect(usageQuota.useQuotas()).toBe(false)
|
|
||||||
|
|
||||||
// tenantId is not in the list
|
|
||||||
config.setTenantId("other")
|
|
||||||
expect(usageQuota.useQuotas()).toBe(true)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -1,93 +0,0 @@
|
||||||
const env = require("../../environment")
|
|
||||||
const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy")
|
|
||||||
const {
|
|
||||||
StaticDatabases,
|
|
||||||
generateNewUsageQuotaDoc,
|
|
||||||
} = require("@budibase/backend-core/db")
|
|
||||||
|
|
||||||
exports.useQuotas = () => {
|
|
||||||
// check if quotas are enabled
|
|
||||||
if (env.USE_QUOTAS) {
|
|
||||||
// check if there are any tenants without limits
|
|
||||||
if (env.EXCLUDE_QUOTAS_TENANTS) {
|
|
||||||
const excludedTenants = env.EXCLUDE_QUOTAS_TENANTS.replace(
|
|
||||||
/\s/g,
|
|
||||||
""
|
|
||||||
).split(",")
|
|
||||||
const tenantId = getTenantId()
|
|
||||||
if (excludedTenants.includes(tenantId)) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.Properties = {
|
|
||||||
ROW: "rows",
|
|
||||||
UPLOAD: "storage", // doesn't work yet
|
|
||||||
VIEW: "views", // doesn't work yet
|
|
||||||
USER: "users", // doesn't work yet
|
|
||||||
AUTOMATION: "automationRuns", // doesn't work yet
|
|
||||||
APPS: "apps",
|
|
||||||
EMAILS: "emails", // doesn't work yet
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getUsageQuotaDoc = async db => {
|
|
||||||
let quota
|
|
||||||
try {
|
|
||||||
quota = await db.get(StaticDatabases.GLOBAL.docs.usageQuota)
|
|
||||||
} catch (err) {
|
|
||||||
// doc doesn't exist. Create it
|
|
||||||
quota = generateNewUsageQuotaDoc()
|
|
||||||
const response = await db.put(quota)
|
|
||||||
quota._rev = response.rev
|
|
||||||
}
|
|
||||||
|
|
||||||
return quota
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Given a specified tenantId this will add to the usage object for the specified property.
|
|
||||||
* @param {string} property The property which is to be added to (within the nested usageQuota object).
|
|
||||||
* @param {number} usage The amount (this can be negative) to adjust the number by.
|
|
||||||
* @param {object} opts optional - options such as dryRun, to check what update will do.
|
|
||||||
* @returns {Promise<void>} When this completes the API key will now be up to date - the quota period may have
|
|
||||||
* also been reset after this call.
|
|
||||||
*/
|
|
||||||
exports.update = async (property, usage, opts = { dryRun: false }) => {
|
|
||||||
if (!exports.useQuotas()) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const db = getGlobalDB()
|
|
||||||
const quota = await exports.getUsageQuotaDoc(db)
|
|
||||||
|
|
||||||
// increment the quota
|
|
||||||
quota.usageQuota[property] += usage
|
|
||||||
|
|
||||||
if (
|
|
||||||
quota.usageQuota[property] > quota.usageLimits[property] &&
|
|
||||||
usage > 0 // allow for decrementing usage when the quota is already exceeded
|
|
||||||
) {
|
|
||||||
throw new Error(
|
|
||||||
`You have exceeded your usage quota of ${quota.usageLimits[property]} ${property}.`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (quota.usageQuota[property] < 0) {
|
|
||||||
// never go negative if the quota has previously been exceeded
|
|
||||||
quota.usageQuota[property] = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// update the usage quotas
|
|
||||||
if (!opts.dryRun) {
|
|
||||||
await db.put(quota)
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error updating usage quotas for ${property}`, err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
// Used for building with tsc
|
||||||
|
"extends": "./tsconfig.json",
|
||||||
|
"exclude": [
|
||||||
|
"node_modules",
|
||||||
|
"**/*.json",
|
||||||
|
"**/*.spec.js",
|
||||||
|
"**/*.spec.ts"
|
||||||
|
]
|
||||||
|
}
|
|
@ -19,7 +19,7 @@
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"node_modules",
|
"node_modules",
|
||||||
"**/*.json",
|
"**/*.json",
|
||||||
"**/*.spec.ts",
|
"**/*.spec.js",
|
||||||
"**/*.spec.js"
|
// "**/*.spec.ts" // don't exclude spec.ts files for editor support
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -43,7 +43,7 @@
|
||||||
"rollup-plugin-node-globals": "^1.4.0",
|
"rollup-plugin-node-globals": "^1.4.0",
|
||||||
"rollup-plugin-node-resolve": "^5.2.0",
|
"rollup-plugin-node-resolve": "^5.2.0",
|
||||||
"rollup-plugin-terser": "^7.0.2",
|
"rollup-plugin-terser": "^7.0.2",
|
||||||
"typescript": "^4.1.3"
|
"typescript": "^4.5.5"
|
||||||
},
|
},
|
||||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
||||||
}
|
}
|
||||||
|
|
|
@ -4289,10 +4289,10 @@ typeof-article@^0.1.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
kind-of "^3.1.0"
|
kind-of "^3.1.0"
|
||||||
|
|
||||||
typescript@^4.1.3:
|
typescript@^4.5.5:
|
||||||
version "4.4.4"
|
version "4.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.4.tgz#2cd01a1a1f160704d3101fd5a58ff0f9fcb8030c"
|
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.5.tgz#d8c953832d28924a9e3d37c73d729c846c5896f3"
|
||||||
integrity sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA==
|
integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==
|
||||||
|
|
||||||
uglify-js@^3.1.4:
|
uglify-js@^3.1.4:
|
||||||
version "3.14.3"
|
version "3.14.3"
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"watch": ["src", "../backend-core"],
|
"watch": ["src", "../backend-core", "../../../budibase-pro/packages/pro"],
|
||||||
"ext": "js,ts,json",
|
"ext": "js,ts,json",
|
||||||
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js"],
|
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js"],
|
||||||
"exec": "ts-node src/index.ts"
|
"exec": "ts-node src/index.ts"
|
||||||
|
|
|
@ -37,7 +37,7 @@
|
||||||
"@budibase/backend-core": "^1.0.91-alpha.17",
|
"@budibase/backend-core": "^1.0.91-alpha.17",
|
||||||
"@budibase/string-templates": "^1.0.91-alpha.17",
|
"@budibase/string-templates": "^1.0.91-alpha.17",
|
||||||
"@koa/router": "^8.0.0",
|
"@koa/router": "^8.0.0",
|
||||||
"@sentry/node": "^6.0.0",
|
"@sentry/node": "6.17.7",
|
||||||
"@techpass/passport-openidconnect": "^0.3.0",
|
"@techpass/passport-openidconnect": "^0.3.0",
|
||||||
"aws-sdk": "^2.811.0",
|
"aws-sdk": "^2.811.0",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
|
@ -66,8 +66,9 @@
|
||||||
"@types/jest": "^26.0.23",
|
"@types/jest": "^26.0.23",
|
||||||
"@types/koa": "^2.13.3",
|
"@types/koa": "^2.13.3",
|
||||||
"@types/koa-router": "^7.4.2",
|
"@types/koa-router": "^7.4.2",
|
||||||
|
"@types/koa__router": "^8.0.11",
|
||||||
"@types/node": "^15.12.4",
|
"@types/node": "^15.12.4",
|
||||||
"@typescript-eslint/parser": "4.28.0",
|
"@typescript-eslint/parser": "5.12.0",
|
||||||
"copyfiles": "^2.4.1",
|
"copyfiles": "^2.4.1",
|
||||||
"eslint": "^6.8.0",
|
"eslint": "^6.8.0",
|
||||||
"jest": "^27.0.5",
|
"jest": "^27.0.5",
|
||||||
|
@ -78,7 +79,7 @@
|
||||||
"supertest": "^6.1.3",
|
"supertest": "^6.1.3",
|
||||||
"ts-jest": "^27.0.3",
|
"ts-jest": "^27.0.3",
|
||||||
"ts-node": "^10.0.0",
|
"ts-node": "^10.0.0",
|
||||||
"typescript": "4.3.5",
|
"typescript": "4.5.5",
|
||||||
"update-dotenv": "^1.1.1"
|
"update-dotenv": "^1.1.1"
|
||||||
},
|
},
|
||||||
"jest": {
|
"jest": {
|
||||||
|
|
|
@ -21,8 +21,9 @@ const {
|
||||||
isMultiTenant,
|
isMultiTenant,
|
||||||
} = require("@budibase/backend-core/tenancy")
|
} = require("@budibase/backend-core/tenancy")
|
||||||
const env = require("../../../environment")
|
const env = require("../../../environment")
|
||||||
|
import { users } from "@budibase/pro"
|
||||||
|
|
||||||
const ssoCallbackUrl = async (config, type) => {
|
const ssoCallbackUrl = async (config: any, type: any) => {
|
||||||
// incase there is a callback URL from before
|
// incase there is a callback URL from before
|
||||||
if (config && config.callbackURL) {
|
if (config && config.callbackURL) {
|
||||||
return config.callbackURL
|
return config.callbackURL
|
||||||
|
@ -42,15 +43,15 @@ const ssoCallbackUrl = async (config, type) => {
|
||||||
return `${publicConfig.platformUrl}${callbackUrl}`
|
return `${publicConfig.platformUrl}${callbackUrl}`
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.googleCallbackUrl = async config => {
|
export const googleCallbackUrl = async (config: any) => {
|
||||||
return ssoCallbackUrl(config, "google")
|
return ssoCallbackUrl(config, "google")
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.oidcCallbackUrl = async config => {
|
export const oidcCallbackUrl = async (config: any) => {
|
||||||
return ssoCallbackUrl(config, "oidc")
|
return ssoCallbackUrl(config, "oidc")
|
||||||
}
|
}
|
||||||
|
|
||||||
async function authInternal(ctx, user, err = null, info = null) {
|
async function authInternal(ctx: any, user: any, err = null, info = null) {
|
||||||
if (err) {
|
if (err) {
|
||||||
console.error("Authentication error", err)
|
console.error("Authentication error", err)
|
||||||
return ctx.throw(403, info ? info : "Unauthorized")
|
return ctx.throw(403, info ? info : "Unauthorized")
|
||||||
|
@ -71,20 +72,23 @@ async function authInternal(ctx, user, err = null, info = null) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.authenticate = async (ctx, next) => {
|
export const authenticate = async (ctx: any, next: any) => {
|
||||||
return passport.authenticate("local", async (err, user, info) => {
|
return passport.authenticate(
|
||||||
await authInternal(ctx, user, err, info)
|
"local",
|
||||||
ctx.status = 200
|
async (err: any, user: any, info: any) => {
|
||||||
})(ctx, next)
|
await authInternal(ctx, user, err, info)
|
||||||
|
ctx.status = 200
|
||||||
|
}
|
||||||
|
)(ctx, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.setInitInfo = ctx => {
|
export const setInitInfo = (ctx: any) => {
|
||||||
const initInfo = ctx.request.body
|
const initInfo = ctx.request.body
|
||||||
setCookie(ctx, initInfo, Cookies.Init)
|
setCookie(ctx, initInfo, Cookies.Init)
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getInitInfo = ctx => {
|
export const getInitInfo = (ctx: any) => {
|
||||||
try {
|
try {
|
||||||
ctx.body = getCookie(ctx, Cookies.Init) || {}
|
ctx.body = getCookie(ctx, Cookies.Init) || {}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
@ -96,7 +100,7 @@ exports.getInitInfo = ctx => {
|
||||||
/**
|
/**
|
||||||
* Reset the user password, used as part of a forgotten password flow.
|
* Reset the user password, used as part of a forgotten password flow.
|
||||||
*/
|
*/
|
||||||
exports.reset = async ctx => {
|
export const reset = async (ctx: any) => {
|
||||||
const { email } = ctx.request.body
|
const { email } = ctx.request.body
|
||||||
const configured = await isEmailConfigured()
|
const configured = await isEmailConfigured()
|
||||||
if (!configured) {
|
if (!configured) {
|
||||||
|
@ -126,7 +130,7 @@ exports.reset = async ctx => {
|
||||||
/**
|
/**
|
||||||
* Perform the user password update if the provided reset code is valid.
|
* Perform the user password update if the provided reset code is valid.
|
||||||
*/
|
*/
|
||||||
exports.resetUpdate = async ctx => {
|
export const resetUpdate = async (ctx: any) => {
|
||||||
const { resetCode, password } = ctx.request.body
|
const { resetCode, password } = ctx.request.body
|
||||||
try {
|
try {
|
||||||
const { userId } = await checkResetPasswordCode(resetCode)
|
const { userId } = await checkResetPasswordCode(resetCode)
|
||||||
|
@ -142,14 +146,14 @@ exports.resetUpdate = async ctx => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.logout = async ctx => {
|
export const logout = async (ctx: any) => {
|
||||||
if (ctx.user && ctx.user._id) {
|
if (ctx.user && ctx.user._id) {
|
||||||
await platformLogout({ ctx, userId: ctx.user._id })
|
await platformLogout({ ctx, userId: ctx.user._id })
|
||||||
}
|
}
|
||||||
ctx.body = { message: "User logged out." }
|
ctx.body = { message: "User logged out." }
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.datasourcePreAuth = async (ctx, next) => {
|
export const datasourcePreAuth = async (ctx: any, next: any) => {
|
||||||
const provider = ctx.params.provider
|
const provider = ctx.params.provider
|
||||||
const middleware = require(`@budibase/backend-core/middleware`)
|
const middleware = require(`@budibase/backend-core/middleware`)
|
||||||
const handler = middleware.datasource[provider]
|
const handler = middleware.datasource[provider]
|
||||||
|
@ -167,7 +171,7 @@ exports.datasourcePreAuth = async (ctx, next) => {
|
||||||
return handler.preAuth(passport, ctx, next)
|
return handler.preAuth(passport, ctx, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.datasourceAuth = async (ctx, next) => {
|
export const datasourceAuth = async (ctx: any, next: any) => {
|
||||||
const authStateCookie = getCookie(ctx, Cookies.DatasourceAuth)
|
const authStateCookie = getCookie(ctx, Cookies.DatasourceAuth)
|
||||||
const provider = authStateCookie.provider
|
const provider = authStateCookie.provider
|
||||||
const middleware = require(`@budibase/backend-core/middleware`)
|
const middleware = require(`@budibase/backend-core/middleware`)
|
||||||
|
@ -179,7 +183,7 @@ exports.datasourceAuth = async (ctx, next) => {
|
||||||
* The initial call that google authentication makes to take you to the google login screen.
|
* The initial call that google authentication makes to take you to the google login screen.
|
||||||
* On a successful login, you will be redirected to the googleAuth callback route.
|
* On a successful login, you will be redirected to the googleAuth callback route.
|
||||||
*/
|
*/
|
||||||
exports.googlePreAuth = async (ctx, next) => {
|
export const googlePreAuth = async (ctx: any, next: any) => {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
|
|
||||||
const config = await core.db.getScopedConfig(db, {
|
const config = await core.db.getScopedConfig(db, {
|
||||||
|
@ -187,14 +191,14 @@ exports.googlePreAuth = async (ctx, next) => {
|
||||||
workspace: ctx.query.workspace,
|
workspace: ctx.query.workspace,
|
||||||
})
|
})
|
||||||
let callbackUrl = await exports.googleCallbackUrl(config)
|
let callbackUrl = await exports.googleCallbackUrl(config)
|
||||||
const strategy = await google.strategyFactory(config, callbackUrl)
|
const strategy = await google.strategyFactory(config, callbackUrl, users.save)
|
||||||
|
|
||||||
return passport.authenticate(strategy, {
|
return passport.authenticate(strategy, {
|
||||||
scope: ["profile", "email"],
|
scope: ["profile", "email"],
|
||||||
})(ctx, next)
|
})(ctx, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.googleAuth = async (ctx, next) => {
|
export const googleAuth = async (ctx: any, next: any) => {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
|
|
||||||
const config = await core.db.getScopedConfig(db, {
|
const config = await core.db.getScopedConfig(db, {
|
||||||
|
@ -202,12 +206,12 @@ exports.googleAuth = async (ctx, next) => {
|
||||||
workspace: ctx.query.workspace,
|
workspace: ctx.query.workspace,
|
||||||
})
|
})
|
||||||
const callbackUrl = await exports.googleCallbackUrl(config)
|
const callbackUrl = await exports.googleCallbackUrl(config)
|
||||||
const strategy = await google.strategyFactory(config, callbackUrl)
|
const strategy = await google.strategyFactory(config, callbackUrl, users.save)
|
||||||
|
|
||||||
return passport.authenticate(
|
return passport.authenticate(
|
||||||
strategy,
|
strategy,
|
||||||
{ successRedirect: "/", failureRedirect: "/error" },
|
{ successRedirect: "/", failureRedirect: "/error" },
|
||||||
async (err, user, info) => {
|
async (err: any, user: any, info: any) => {
|
||||||
await authInternal(ctx, user, err, info)
|
await authInternal(ctx, user, err, info)
|
||||||
|
|
||||||
ctx.redirect("/")
|
ctx.redirect("/")
|
||||||
|
@ -215,24 +219,24 @@ exports.googleAuth = async (ctx, next) => {
|
||||||
)(ctx, next)
|
)(ctx, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function oidcStrategyFactory(ctx, configId) {
|
async function oidcStrategyFactory(ctx: any, configId: any) {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
const config = await core.db.getScopedConfig(db, {
|
const config = await core.db.getScopedConfig(db, {
|
||||||
type: Configs.OIDC,
|
type: Configs.OIDC,
|
||||||
group: ctx.query.group,
|
group: ctx.query.group,
|
||||||
})
|
})
|
||||||
|
|
||||||
const chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
|
const chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
|
||||||
let callbackUrl = await exports.oidcCallbackUrl(chosenConfig)
|
let callbackUrl = await exports.oidcCallbackUrl(chosenConfig)
|
||||||
|
|
||||||
return oidc.strategyFactory(chosenConfig, callbackUrl)
|
return oidc.strategyFactory(chosenConfig, callbackUrl, users.save)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The initial call that OIDC authentication makes to take you to the configured OIDC login screen.
|
* The initial call that OIDC authentication makes to take you to the configured OIDC login screen.
|
||||||
* On a successful login, you will be redirected to the oidcAuth callback route.
|
* On a successful login, you will be redirected to the oidcAuth callback route.
|
||||||
*/
|
*/
|
||||||
exports.oidcPreAuth = async (ctx, next) => {
|
export const oidcPreAuth = async (ctx: any, next: any) => {
|
||||||
const { configId } = ctx.params
|
const { configId } = ctx.params
|
||||||
const strategy = await oidcStrategyFactory(ctx, configId)
|
const strategy = await oidcStrategyFactory(ctx, configId)
|
||||||
|
|
||||||
|
@ -244,14 +248,14 @@ exports.oidcPreAuth = async (ctx, next) => {
|
||||||
})(ctx, next)
|
})(ctx, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.oidcAuth = async (ctx, next) => {
|
export const oidcAuth = async (ctx: any, next: any) => {
|
||||||
const configId = getCookie(ctx, Cookies.OIDC_CONFIG)
|
const configId = getCookie(ctx, Cookies.OIDC_CONFIG)
|
||||||
const strategy = await oidcStrategyFactory(ctx, configId)
|
const strategy = await oidcStrategyFactory(ctx, configId)
|
||||||
|
|
||||||
return passport.authenticate(
|
return passport.authenticate(
|
||||||
strategy,
|
strategy,
|
||||||
{ successRedirect: "/", failureRedirect: "/error" },
|
{ successRedirect: "/", failureRedirect: "/error" },
|
||||||
async (err, user, info) => {
|
async (err: any, user: any, info: any) => {
|
||||||
await authInternal(ctx, user, err, info)
|
await authInternal(ctx, user, err, info)
|
||||||
|
|
||||||
ctx.redirect("/")
|
ctx.redirect("/")
|
|
@ -0,0 +1,30 @@
|
||||||
|
import { licensing, quotas } from "@budibase/pro"
|
||||||
|
|
||||||
|
export const activate = async (ctx: any) => {
|
||||||
|
const { licenseKey } = ctx.request.body
|
||||||
|
if (!licenseKey) {
|
||||||
|
ctx.throw(400, "licenseKey is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
await licensing.activateLicenseKey(licenseKey)
|
||||||
|
ctx.status = 200
|
||||||
|
}
|
||||||
|
|
||||||
|
export const refresh = async (ctx: any) => {
|
||||||
|
await licensing.cache.refresh()
|
||||||
|
ctx.status = 200
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getInfo = async (ctx: any) => {
|
||||||
|
const licenseInfo = await licensing.getLicenseInfo()
|
||||||
|
if (licenseInfo) {
|
||||||
|
licenseInfo.licenseKey = "*"
|
||||||
|
ctx.body = licenseInfo
|
||||||
|
}
|
||||||
|
ctx.status = 200
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getQuotaUsage = async (ctx: any) => {
|
||||||
|
const usage = await quotas.getQuotaUsage()
|
||||||
|
ctx.body = usage
|
||||||
|
}
|
|
@ -68,6 +68,29 @@ const checkCurrentApp = ctx => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add the attributes that are session based to the current user.
|
||||||
|
*/
|
||||||
|
const addSessionAttributesToUser = ctx => {
|
||||||
|
ctx.body.account = ctx.user.account
|
||||||
|
ctx.body.license = ctx.user.license
|
||||||
|
ctx.body.budibaseAccess = ctx.user.budibaseAccess
|
||||||
|
ctx.body.accountPortalAccess = ctx.user.accountPortalAccess
|
||||||
|
ctx.body.csrfToken = ctx.user.csrfToken
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove the attributes that are session based from the current user,
|
||||||
|
* so that stale values are not written to the db
|
||||||
|
*/
|
||||||
|
const removeSessionAttributesFromUser = ctx => {
|
||||||
|
delete ctx.request.body.csrfToken
|
||||||
|
delete ctx.request.body.account
|
||||||
|
delete ctx.request.body.accountPortalAccess
|
||||||
|
delete ctx.request.body.budibaseAccess
|
||||||
|
delete ctx.request.body.license
|
||||||
|
}
|
||||||
|
|
||||||
exports.getSelf = async ctx => {
|
exports.getSelf = async ctx => {
|
||||||
if (!ctx.user) {
|
if (!ctx.user) {
|
||||||
ctx.throw(403, "User not logged in")
|
ctx.throw(403, "User not logged in")
|
||||||
|
@ -81,11 +104,7 @@ exports.getSelf = async ctx => {
|
||||||
|
|
||||||
// get the main body of the user
|
// get the main body of the user
|
||||||
ctx.body = await getUser(userId)
|
ctx.body = await getUser(userId)
|
||||||
// forward session information not found in db
|
addSessionAttributesToUser(ctx)
|
||||||
ctx.body.account = ctx.user.account
|
|
||||||
ctx.body.budibaseAccess = ctx.user.budibaseAccess
|
|
||||||
ctx.body.accountPortalAccess = ctx.user.accountPortalAccess
|
|
||||||
ctx.body.csrfToken = ctx.user.csrfToken
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.updateSelf = async ctx => {
|
exports.updateSelf = async ctx => {
|
||||||
|
@ -104,8 +123,8 @@ exports.updateSelf = async ctx => {
|
||||||
// don't allow sending up an ID/Rev, always use the existing one
|
// don't allow sending up an ID/Rev, always use the existing one
|
||||||
delete ctx.request.body._id
|
delete ctx.request.body._id
|
||||||
delete ctx.request.body._rev
|
delete ctx.request.body._rev
|
||||||
// don't allow setting the csrf token
|
removeSessionAttributesFromUser(ctx)
|
||||||
delete ctx.request.body.csrfToken
|
|
||||||
const response = await db.put({
|
const response = await db.put({
|
||||||
...user,
|
...user,
|
||||||
...ctx.request.body,
|
...ctx.request.body,
|
||||||
|
|
|
@ -1,15 +1,11 @@
|
||||||
const {
|
const {
|
||||||
getGlobalUserParams,
|
getGlobalUserParams,
|
||||||
StaticDatabases,
|
StaticDatabases,
|
||||||
generateNewUsageQuotaDoc,
|
|
||||||
} = require("@budibase/backend-core/db")
|
} = require("@budibase/backend-core/db")
|
||||||
const {
|
const { getGlobalUserByEmail } = require("@budibase/backend-core/utils")
|
||||||
getGlobalUserByEmail,
|
import { EmailTemplatePurpose } from "../../../constants"
|
||||||
saveUser,
|
import { checkInviteCode } from "../../../utilities/redis"
|
||||||
} = require("@budibase/backend-core/utils")
|
import { sendEmail } from "../../../utilities/email"
|
||||||
const { EmailTemplatePurpose } = require("../../../constants")
|
|
||||||
const { checkInviteCode } = require("../../../utilities/redis")
|
|
||||||
const { sendEmail } = require("../../../utilities/email")
|
|
||||||
const { user: userCache } = require("@budibase/backend-core/cache")
|
const { user: userCache } = require("@budibase/backend-core/cache")
|
||||||
const { invalidateSessions } = require("@budibase/backend-core/sessions")
|
const { invalidateSessions } = require("@budibase/backend-core/sessions")
|
||||||
const accounts = require("@budibase/backend-core/accounts")
|
const accounts = require("@budibase/backend-core/accounts")
|
||||||
|
@ -20,26 +16,28 @@ const {
|
||||||
doesTenantExist,
|
doesTenantExist,
|
||||||
} = require("@budibase/backend-core/tenancy")
|
} = require("@budibase/backend-core/tenancy")
|
||||||
const { removeUserFromInfoDB } = require("@budibase/backend-core/deprovision")
|
const { removeUserFromInfoDB } = require("@budibase/backend-core/deprovision")
|
||||||
const env = require("../../../environment")
|
import env from "../../../environment"
|
||||||
const { syncUserInApps } = require("../../../utilities/appService")
|
import { syncUserInApps } from "../../../utilities/appService"
|
||||||
const { allUsers, getUser } = require("../../utilities")
|
import { quotas, users } from "@budibase/pro"
|
||||||
|
const { errors } = require("@budibase/backend-core")
|
||||||
|
import { allUsers, getUser } from "../../utilities"
|
||||||
|
|
||||||
exports.save = async ctx => {
|
export const save = async (ctx: any) => {
|
||||||
try {
|
try {
|
||||||
const user = await saveUser(ctx.request.body, getTenantId())
|
const user: any = await users.save(ctx.request.body, getTenantId())
|
||||||
// let server know to sync user
|
// let server know to sync user
|
||||||
await syncUserInApps(user._id)
|
await syncUserInApps(user._id)
|
||||||
ctx.body = user
|
ctx.body = user
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
ctx.throw(err.status || 400, err)
|
ctx.throw(err.status || 400, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const parseBooleanParam = param => {
|
const parseBooleanParam = (param: any) => {
|
||||||
return !(param && param === "false")
|
return !(param && param === "false")
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.adminUser = async ctx => {
|
export const adminUser = async (ctx: any) => {
|
||||||
const { email, password, tenantId } = ctx.request.body
|
const { email, password, tenantId } = ctx.request.body
|
||||||
|
|
||||||
// account portal sends a pre-hashed password - honour param to prevent double hashing
|
// account portal sends a pre-hashed password - honour param to prevent double hashing
|
||||||
|
@ -69,10 +67,10 @@ exports.adminUser = async ctx => {
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// don't worry about errors
|
// don't worry about errors
|
||||||
}
|
}
|
||||||
await db.put(generateNewUsageQuotaDoc())
|
await db.put(quotas.generateNewQuotaUsage())
|
||||||
}
|
}
|
||||||
|
|
||||||
if (response.rows.some(row => row.doc.admin)) {
|
if (response.rows.some((row: any) => row.doc.admin)) {
|
||||||
ctx.throw(
|
ctx.throw(
|
||||||
403,
|
403,
|
||||||
"You cannot initialise once an global user has been created."
|
"You cannot initialise once an global user has been created."
|
||||||
|
@ -93,13 +91,13 @@ exports.adminUser = async ctx => {
|
||||||
tenantId,
|
tenantId,
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
ctx.body = await saveUser(user, tenantId, hashPassword, requirePassword)
|
ctx.body = await users.save(user, tenantId, hashPassword, requirePassword)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
ctx.throw(err.status || 400, err)
|
ctx.throw(err.status || 400, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.destroy = async ctx => {
|
export const destroy = async (ctx: any) => {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
const dbUser = await db.get(ctx.params.id)
|
const dbUser = await db.get(ctx.params.id)
|
||||||
|
|
||||||
|
@ -118,6 +116,7 @@ exports.destroy = async ctx => {
|
||||||
|
|
||||||
await removeUserFromInfoDB(dbUser)
|
await removeUserFromInfoDB(dbUser)
|
||||||
await db.remove(dbUser._id, dbUser._rev)
|
await db.remove(dbUser._id, dbUser._rev)
|
||||||
|
await quotas.removeUser(dbUser)
|
||||||
await userCache.invalidateUser(dbUser._id)
|
await userCache.invalidateUser(dbUser._id)
|
||||||
await invalidateSessions(dbUser._id)
|
await invalidateSessions(dbUser._id)
|
||||||
// let server know to sync user
|
// let server know to sync user
|
||||||
|
@ -128,23 +127,23 @@ exports.destroy = async ctx => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// called internally by app server user fetch
|
// called internally by app server user fetch
|
||||||
exports.fetch = async ctx => {
|
export const fetch = async (ctx: any) => {
|
||||||
const users = await allUsers(ctx)
|
const all = await allUsers()
|
||||||
// user hashed password shouldn't ever be returned
|
// user hashed password shouldn't ever be returned
|
||||||
for (let user of users) {
|
for (let user of all) {
|
||||||
if (user) {
|
if (user) {
|
||||||
delete user.password
|
delete user.password
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ctx.body = users
|
ctx.body = all
|
||||||
}
|
}
|
||||||
|
|
||||||
// called internally by app server user find
|
// called internally by app server user find
|
||||||
exports.find = async ctx => {
|
export const find = async (ctx: any) => {
|
||||||
ctx.body = await getUser(ctx.params.id)
|
ctx.body = await getUser(ctx.params.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.tenantUserLookup = async ctx => {
|
export const tenantUserLookup = async (ctx: any) => {
|
||||||
const id = ctx.params.id
|
const id = ctx.params.id
|
||||||
const user = await getTenantUser(id)
|
const user = await getTenantUser(id)
|
||||||
if (user) {
|
if (user) {
|
||||||
|
@ -154,7 +153,7 @@ exports.tenantUserLookup = async ctx => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.invite = async ctx => {
|
export const invite = async (ctx: any) => {
|
||||||
let { email, userInfo } = ctx.request.body
|
let { email, userInfo } = ctx.request.body
|
||||||
const existing = await getGlobalUserByEmail(email)
|
const existing = await getGlobalUserByEmail(email)
|
||||||
if (existing) {
|
if (existing) {
|
||||||
|
@ -164,21 +163,22 @@ exports.invite = async ctx => {
|
||||||
userInfo = {}
|
userInfo = {}
|
||||||
}
|
}
|
||||||
userInfo.tenantId = getTenantId()
|
userInfo.tenantId = getTenantId()
|
||||||
await sendEmail(email, EmailTemplatePurpose.INVITATION, {
|
const opts: any = {
|
||||||
subject: "{{ company }} platform invitation",
|
subject: "{{ company }} platform invitation",
|
||||||
info: userInfo,
|
info: userInfo,
|
||||||
})
|
}
|
||||||
|
await sendEmail(email, EmailTemplatePurpose.INVITATION, opts)
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
message: "Invitation has been sent.",
|
message: "Invitation has been sent.",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.inviteAccept = async ctx => {
|
export const inviteAccept = async (ctx: any) => {
|
||||||
const { inviteCode, password, firstName, lastName } = ctx.request.body
|
const { inviteCode, password, firstName, lastName } = ctx.request.body
|
||||||
try {
|
try {
|
||||||
// info is an extension of the user object that was stored by global
|
// info is an extension of the user object that was stored by global
|
||||||
const { email, info } = await checkInviteCode(inviteCode)
|
const { email, info }: any = await checkInviteCode(inviteCode)
|
||||||
ctx.body = await saveUser(
|
ctx.body = await users.save(
|
||||||
{
|
{
|
||||||
firstName,
|
firstName,
|
||||||
lastName,
|
lastName,
|
||||||
|
@ -188,7 +188,11 @@ exports.inviteAccept = async ctx => {
|
||||||
},
|
},
|
||||||
info.tenantId
|
info.tenantId
|
||||||
)
|
)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
|
if (err.code === errors.codes.USAGE_LIMIT_EXCEEDED) {
|
||||||
|
// explicitly re-throw limit exceeded errors
|
||||||
|
ctx.throw(400, err)
|
||||||
|
}
|
||||||
ctx.throw(400, "Unable to create new user, invitation invalid.")
|
ctx.throw(400, "Unable to create new user, invitation invalid.")
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -8,6 +8,8 @@ const {
|
||||||
buildTenancyMiddleware,
|
buildTenancyMiddleware,
|
||||||
buildCsrfMiddleware,
|
buildCsrfMiddleware,
|
||||||
} = require("@budibase/backend-core/auth")
|
} = require("@budibase/backend-core/auth")
|
||||||
|
const { middleware: pro } = require("@budibase/pro")
|
||||||
|
const { errors } = require("@budibase/backend-core")
|
||||||
|
|
||||||
const PUBLIC_ENDPOINTS = [
|
const PUBLIC_ENDPOINTS = [
|
||||||
// old deprecated endpoints kept for backwards compat
|
// old deprecated endpoints kept for backwards compat
|
||||||
|
@ -98,6 +100,7 @@ router
|
||||||
.use(buildAuthMiddleware(PUBLIC_ENDPOINTS))
|
.use(buildAuthMiddleware(PUBLIC_ENDPOINTS))
|
||||||
.use(buildTenancyMiddleware(PUBLIC_ENDPOINTS, NO_TENANCY_ENDPOINTS))
|
.use(buildTenancyMiddleware(PUBLIC_ENDPOINTS, NO_TENANCY_ENDPOINTS))
|
||||||
.use(buildCsrfMiddleware({ noCsrfPatterns: NO_CSRF_ENDPOINTS }))
|
.use(buildCsrfMiddleware({ noCsrfPatterns: NO_CSRF_ENDPOINTS }))
|
||||||
|
.use(pro.licensing())
|
||||||
// for now no public access is allowed to worker (bar health check)
|
// for now no public access is allowed to worker (bar health check)
|
||||||
.use((ctx, next) => {
|
.use((ctx, next) => {
|
||||||
if (ctx.publicEndpoint) {
|
if (ctx.publicEndpoint) {
|
||||||
|
@ -110,16 +113,18 @@ router
|
||||||
})
|
})
|
||||||
.use(auditLog)
|
.use(auditLog)
|
||||||
|
|
||||||
// error handling middleware
|
// error handling middleware - TODO: This could be moved to backend-core
|
||||||
router.use(async (ctx, next) => {
|
router.use(async (ctx, next) => {
|
||||||
try {
|
try {
|
||||||
await next()
|
await next()
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
ctx.log.error(err)
|
ctx.log.error(err)
|
||||||
ctx.status = err.status || err.statusCode || 500
|
ctx.status = err.status || err.statusCode || 500
|
||||||
|
const error = errors.getPublicError(err)
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
message: err.message,
|
message: err.message,
|
||||||
status: ctx.status,
|
status: ctx.status,
|
||||||
|
error,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
import Router from "@koa/router"
|
||||||
|
import * as controller from "../../controllers/global/license"
|
||||||
|
|
||||||
|
const router = new Router()
|
||||||
|
|
||||||
|
router
|
||||||
|
.post("/api/global/license/activate", controller.activate)
|
||||||
|
.post("/api/global/license/refresh", controller.refresh)
|
||||||
|
.get("/api/global/license/info", controller.getInfo)
|
||||||
|
.get("/api/global/license/usage", controller.getQuotaUsage)
|
||||||
|
|
||||||
|
export = router
|
|
@ -10,6 +10,7 @@ const environmentRoutes = require("./system/environment")
|
||||||
const tenantsRoutes = require("./system/tenants")
|
const tenantsRoutes = require("./system/tenants")
|
||||||
const statusRoutes = require("./system/status")
|
const statusRoutes = require("./system/status")
|
||||||
const selfRoutes = require("./global/self")
|
const selfRoutes = require("./global/self")
|
||||||
|
const licenseRoutes = require("./global/license")
|
||||||
|
|
||||||
exports.routes = [
|
exports.routes = [
|
||||||
configRoutes,
|
configRoutes,
|
||||||
|
@ -24,4 +25,5 @@ exports.routes = [
|
||||||
environmentRoutes,
|
environmentRoutes,
|
||||||
statusRoutes,
|
statusRoutes,
|
||||||
selfRoutes,
|
selfRoutes,
|
||||||
|
licenseRoutes,
|
||||||
]
|
]
|
||||||
|
|
|
@ -75,7 +75,8 @@ describe("/api/global/auth", () => {
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
expect(strategyFactory).toBeCalledWith(
|
expect(strategyFactory).toBeCalledWith(
|
||||||
chosenConfig,
|
chosenConfig,
|
||||||
`http://localhost:10000/api/global/auth/${TENANT_ID}/oidc/callback`
|
`http://localhost:10000/api/global/auth/${TENANT_ID}/oidc/callback`,
|
||||||
|
expect.any(Function)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -544,72 +544,72 @@
|
||||||
"@nodelib/fs.scandir" "2.1.5"
|
"@nodelib/fs.scandir" "2.1.5"
|
||||||
fastq "^1.6.0"
|
fastq "^1.6.0"
|
||||||
|
|
||||||
"@sentry/core@6.17.3":
|
"@sentry/core@6.17.7":
|
||||||
version "6.17.3"
|
version "6.17.7"
|
||||||
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-6.17.3.tgz#44375d8e9f4857bb630d7aebaecc97cfe42052df"
|
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-6.17.7.tgz#f591235c06b1a4e75d748b15c539e071bd3f5cf5"
|
||||||
integrity sha512-h7WgrNL0RVlr8Dceh97ZiXNdmEumDutpoqFijjiX4x72IiC6zSaVD4IsqrdGln+v8iJ3l3lX44HHqzubDub1OQ==
|
integrity sha512-SRhLkD05lQb4eCt1ed9Dz72DKbRDlM8PJix8eC2oJLtwyFTS0IlJNkIYRrbsSKkJUm0VsKcDkzIHvUAgBBQICw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@sentry/hub" "6.17.3"
|
"@sentry/hub" "6.17.7"
|
||||||
"@sentry/minimal" "6.17.3"
|
"@sentry/minimal" "6.17.7"
|
||||||
"@sentry/types" "6.17.3"
|
"@sentry/types" "6.17.7"
|
||||||
"@sentry/utils" "6.17.3"
|
"@sentry/utils" "6.17.7"
|
||||||
tslib "^1.9.3"
|
tslib "^1.9.3"
|
||||||
|
|
||||||
"@sentry/hub@6.17.3":
|
"@sentry/hub@6.17.7":
|
||||||
version "6.17.3"
|
version "6.17.7"
|
||||||
resolved "https://registry.yarnpkg.com/@sentry/hub/-/hub-6.17.3.tgz#9c75f0ce486cfed0635f48c875d92f655c1e5710"
|
resolved "https://registry.yarnpkg.com/@sentry/hub/-/hub-6.17.7.tgz#5c90d661e263dad7da0e0106f1cb90cf797d93a7"
|
||||||
integrity sha512-TDxv8nRvk45xvfQg6zs8GYzQzgo0EMhI3wjQZLiNfW2rzybKmIwVp2x3O4PAc3WPzwg4bYNgSAkYKVlHmYjRCg==
|
integrity sha512-siGzcg+quGOdjRaBGAz6T3ycwHUsGgvalptSJdf5Q783FVFhU+haPul++zGOYURXOgx0RjYGWqagwO8+jljl3Q==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@sentry/types" "6.17.3"
|
"@sentry/types" "6.17.7"
|
||||||
"@sentry/utils" "6.17.3"
|
"@sentry/utils" "6.17.7"
|
||||||
tslib "^1.9.3"
|
tslib "^1.9.3"
|
||||||
|
|
||||||
"@sentry/minimal@6.17.3":
|
"@sentry/minimal@6.17.7":
|
||||||
version "6.17.3"
|
version "6.17.7"
|
||||||
resolved "https://registry.yarnpkg.com/@sentry/minimal/-/minimal-6.17.3.tgz#3e9f0b017f639776c9eaa58694b9be3f03429c78"
|
resolved "https://registry.yarnpkg.com/@sentry/minimal/-/minimal-6.17.7.tgz#f19182047f19b563f40a30d45d2ce9ad7df1ec4e"
|
||||||
integrity sha512-zvGGfHNNA92Lqx6P8ZwOUkmRmAiQl0AQFRXl9So1Ayq9bJRnFLJZv4YFVnp2wE4HXYIlfBYb51+GlGB5LIuPmw==
|
integrity sha512-+/FGem1uXsXikX9wHPw44nevO7YTVjkkiPjyLsvnWMjv64r4Au5s+NQSFHDaytRm9IlU//+OasCAS5VAwHcYRg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@sentry/hub" "6.17.3"
|
"@sentry/hub" "6.17.7"
|
||||||
"@sentry/types" "6.17.3"
|
"@sentry/types" "6.17.7"
|
||||||
tslib "^1.9.3"
|
tslib "^1.9.3"
|
||||||
|
|
||||||
"@sentry/node@^6.0.0":
|
"@sentry/node@6.17.7":
|
||||||
version "6.17.3"
|
version "6.17.7"
|
||||||
resolved "https://registry.yarnpkg.com/@sentry/node/-/node-6.17.3.tgz#517ca6a88ca033320a301bffa097111bbb344f25"
|
resolved "https://registry.yarnpkg.com/@sentry/node/-/node-6.17.7.tgz#c142d93328b29312098276e0cb66ec3b9e805a93"
|
||||||
integrity sha512-LvpB6bCQTytoOlrcQgR80aeEEBi2Sm1hNf+VvoPT6CW7tKI1/6pMWXaNnRu2dpyWS/j6tooz8rd/3dl1SZoGvg==
|
integrity sha512-YlBhEv8YYUimHLpwzUP5lXkagbGxGAfjvsbahhvqf7rRl8Fu5XbatAcSDcx7YE4R0Iox94IfZy95kF2NL4Idow==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@sentry/core" "6.17.3"
|
"@sentry/core" "6.17.7"
|
||||||
"@sentry/hub" "6.17.3"
|
"@sentry/hub" "6.17.7"
|
||||||
"@sentry/tracing" "6.17.3"
|
"@sentry/tracing" "6.17.7"
|
||||||
"@sentry/types" "6.17.3"
|
"@sentry/types" "6.17.7"
|
||||||
"@sentry/utils" "6.17.3"
|
"@sentry/utils" "6.17.7"
|
||||||
cookie "^0.4.1"
|
cookie "^0.4.1"
|
||||||
https-proxy-agent "^5.0.0"
|
https-proxy-agent "^5.0.0"
|
||||||
lru_map "^0.3.3"
|
lru_map "^0.3.3"
|
||||||
tslib "^1.9.3"
|
tslib "^1.9.3"
|
||||||
|
|
||||||
"@sentry/tracing@6.17.3":
|
"@sentry/tracing@6.17.7":
|
||||||
version "6.17.3"
|
version "6.17.7"
|
||||||
resolved "https://registry.yarnpkg.com/@sentry/tracing/-/tracing-6.17.3.tgz#b3841ad3fb1c7df1e21521da0d99c1496038a970"
|
resolved "https://registry.yarnpkg.com/@sentry/tracing/-/tracing-6.17.7.tgz#f4536683b29bb3ac7ddda5ca49494731cec6b619"
|
||||||
integrity sha512-GnHugxw5qkWwYmeQbbrswuWpb0bpYqyJr/dO25QQOCwp+cckQrvBYTMC8zGJG10u94O4el0lQaQnNFz9WF3r6g==
|
integrity sha512-QzIDHOjjdi/0LTdrK2LTC27YEOODI473KD8KmMJ+r9PmjDeIjNzz4hJlPwQSnXR3Mu/8foxGJGXsAt3LNmKzlQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@sentry/hub" "6.17.3"
|
"@sentry/hub" "6.17.7"
|
||||||
"@sentry/minimal" "6.17.3"
|
"@sentry/minimal" "6.17.7"
|
||||||
"@sentry/types" "6.17.3"
|
"@sentry/types" "6.17.7"
|
||||||
"@sentry/utils" "6.17.3"
|
"@sentry/utils" "6.17.7"
|
||||||
tslib "^1.9.3"
|
tslib "^1.9.3"
|
||||||
|
|
||||||
"@sentry/types@6.17.3":
|
"@sentry/types@6.17.7":
|
||||||
version "6.17.3"
|
version "6.17.7"
|
||||||
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-6.17.3.tgz#c5b9bba8111ff26b26c4a056e2a083905e03e7dd"
|
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-6.17.7.tgz#61946a3f6532b8f21251b264f173b02f9ea2458e"
|
||||||
integrity sha512-0AXCjYcfl8Vx26GfyLY4rBQ78Lyt1oND3UozTTMaVXlcKYIjzV+f7TOo5IZx+Kbr6EGUNDLdpA4xfbkWdW/1NA==
|
integrity sha512-iBlJDhrSowZKeqvutY0tCkUjrWqkLFsHrbaQ553r1Nx+/4mxHjzVYtEVGMjZAxQUEbkm0TbnQIkkT7ltglNJ9A==
|
||||||
|
|
||||||
"@sentry/utils@6.17.3":
|
"@sentry/utils@6.17.7":
|
||||||
version "6.17.3"
|
version "6.17.7"
|
||||||
resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-6.17.3.tgz#a3c4c35e18ffb304356288213797c47c2bfdce08"
|
resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-6.17.7.tgz#0574bf914cc129b5e47041b75bb34dfbe0decbba"
|
||||||
integrity sha512-6/2awDIeHSj0JgiC7DDdV1lxvLmf+/BisWhw09dKvmhVQB3ADvQZbohjUgM+Qam5zE0xmZAfQhvuDwC41W8Wnw==
|
integrity sha512-HEEEeKlZtwfQvH0waSKv5FKRFjHkVgkkEiAigXoYGQAlaUIuwRTvZGFnsmBoKMIrA4pARkA00FwwdtMU7ziC8A==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@sentry/types" "6.17.3"
|
"@sentry/types" "6.17.7"
|
||||||
tslib "^1.9.3"
|
tslib "^1.9.3"
|
||||||
|
|
||||||
"@sideway/address@^4.1.3":
|
"@sideway/address@^4.1.3":
|
||||||
|
@ -898,6 +898,13 @@
|
||||||
"@types/koa-compose" "*"
|
"@types/koa-compose" "*"
|
||||||
"@types/node" "*"
|
"@types/node" "*"
|
||||||
|
|
||||||
|
"@types/koa__router@^8.0.11":
|
||||||
|
version "8.0.11"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.11.tgz#d7b37e6db934fc072ea1baa2ab92bc8ac4564f3e"
|
||||||
|
integrity sha512-WXgKWpBsbS14kzmzD9LeFapOIa678h7zvUHxDwXwSx4ETKXhXLVUAToX6jZ/U7EihM7qwyD9W/BZvB0MRu7MTQ==
|
||||||
|
dependencies:
|
||||||
|
"@types/koa" "*"
|
||||||
|
|
||||||
"@types/mime@^1":
|
"@types/mime@^1":
|
||||||
version "1.3.2"
|
version "1.3.2"
|
||||||
resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"
|
resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"
|
||||||
|
@ -967,49 +974,49 @@
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/yargs-parser" "*"
|
"@types/yargs-parser" "*"
|
||||||
|
|
||||||
"@typescript-eslint/parser@4.28.0":
|
"@typescript-eslint/parser@5.12.0":
|
||||||
version "4.28.0"
|
version "5.12.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.28.0.tgz#2404c16751a28616ef3abab77c8e51d680a12caa"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.12.0.tgz#0ca669861813df99ce54916f66f524c625ed2434"
|
||||||
integrity sha512-7x4D22oPY8fDaOCvkuXtYYTQ6mTMmkivwEzS+7iml9F9VkHGbbZ3x4fHRwxAb5KeuSkLqfnYjs46tGx2Nour4A==
|
integrity sha512-MfSwg9JMBojMUoGjUmX+D2stoQj1CBYTCP0qnnVtu9A+YQXVKNtLjasYh+jozOcrb/wau8TCfWOkQTiOAruBog==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@typescript-eslint/scope-manager" "4.28.0"
|
"@typescript-eslint/scope-manager" "5.12.0"
|
||||||
"@typescript-eslint/types" "4.28.0"
|
"@typescript-eslint/types" "5.12.0"
|
||||||
"@typescript-eslint/typescript-estree" "4.28.0"
|
"@typescript-eslint/typescript-estree" "5.12.0"
|
||||||
debug "^4.3.1"
|
debug "^4.3.2"
|
||||||
|
|
||||||
"@typescript-eslint/scope-manager@4.28.0":
|
"@typescript-eslint/scope-manager@5.12.0":
|
||||||
version "4.28.0"
|
version "5.12.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.28.0.tgz#6a3009d2ab64a30fc8a1e257a1a320067f36a0ce"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.12.0.tgz#59619e6e5e2b1ce6cb3948b56014d3a24da83f5e"
|
||||||
integrity sha512-eCALCeScs5P/EYjwo6se9bdjtrh8ByWjtHzOkC4Tia6QQWtQr3PHovxh3TdYTuFcurkYI4rmFsRFpucADIkseg==
|
integrity sha512-GAMobtIJI8FGf1sLlUWNUm2IOkIjvn7laFWyRx7CLrv6nLBI7su+B7lbStqVlK5NdLvHRFiJo2HhiDF7Ki01WQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@typescript-eslint/types" "4.28.0"
|
"@typescript-eslint/types" "5.12.0"
|
||||||
"@typescript-eslint/visitor-keys" "4.28.0"
|
"@typescript-eslint/visitor-keys" "5.12.0"
|
||||||
|
|
||||||
"@typescript-eslint/types@4.28.0":
|
"@typescript-eslint/types@5.12.0":
|
||||||
version "4.28.0"
|
version "5.12.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.28.0.tgz#a33504e1ce7ac51fc39035f5fe6f15079d4dafb0"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.12.0.tgz#5b4030a28222ee01e851836562c07769eecda0b8"
|
||||||
integrity sha512-p16xMNKKoiJCVZY5PW/AfILw2xe1LfruTcfAKBj3a+wgNYP5I9ZEKNDOItoRt53p4EiPV6iRSICy8EPanG9ZVA==
|
integrity sha512-JowqbwPf93nvf8fZn5XrPGFBdIK8+yx5UEGs2QFAYFI8IWYfrzz+6zqlurGr2ctShMaJxqwsqmra3WXWjH1nRQ==
|
||||||
|
|
||||||
"@typescript-eslint/typescript-estree@4.28.0":
|
"@typescript-eslint/typescript-estree@5.12.0":
|
||||||
version "4.28.0"
|
version "5.12.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.28.0.tgz#e66d4e5aa2ede66fec8af434898fe61af10c71cf"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.12.0.tgz#cabf545fd592722f0e2b4104711e63bf89525cd2"
|
||||||
integrity sha512-m19UQTRtxMzKAm8QxfKpvh6OwQSXaW1CdZPoCaQuLwAq7VZMNuhJmZR4g5281s2ECt658sldnJfdpSZZaxUGMQ==
|
integrity sha512-Dd9gVeOqt38QHR0BEA8oRaT65WYqPYbIc5tRFQPkfLquVEFPD1HAtbZT98TLBkEcCkvwDYOAvuSvAD9DnQhMfQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@typescript-eslint/types" "4.28.0"
|
"@typescript-eslint/types" "5.12.0"
|
||||||
"@typescript-eslint/visitor-keys" "4.28.0"
|
"@typescript-eslint/visitor-keys" "5.12.0"
|
||||||
debug "^4.3.1"
|
debug "^4.3.2"
|
||||||
globby "^11.0.3"
|
globby "^11.0.4"
|
||||||
is-glob "^4.0.1"
|
is-glob "^4.0.3"
|
||||||
semver "^7.3.5"
|
semver "^7.3.5"
|
||||||
tsutils "^3.21.0"
|
tsutils "^3.21.0"
|
||||||
|
|
||||||
"@typescript-eslint/visitor-keys@4.28.0":
|
"@typescript-eslint/visitor-keys@5.12.0":
|
||||||
version "4.28.0"
|
version "5.12.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.28.0.tgz#255c67c966ec294104169a6939d96f91c8a89434"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.12.0.tgz#1ac9352ed140b07ba144ebf371b743fdf537ec16"
|
||||||
integrity sha512-PjJyTWwrlrvM5jazxYF5ZPs/nl0kHDZMVbuIcbpawVXaDPelp3+S9zpOz5RmVUfS/fD5l5+ZXNKnWhNYjPzCvw==
|
integrity sha512-cFwTlgnMV6TgezQynx2c/4/tx9Tufbuo9LPzmWqyRC3QC4qTGkAG1C6pBr0/4I10PAI/FlYunI3vJjIcu+ZHMg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@typescript-eslint/types" "4.28.0"
|
"@typescript-eslint/types" "5.12.0"
|
||||||
eslint-visitor-keys "^2.0.0"
|
eslint-visitor-keys "^3.0.0"
|
||||||
|
|
||||||
abab@^2.0.3, abab@^2.0.5:
|
abab@^2.0.3, abab@^2.0.5:
|
||||||
version "2.0.5"
|
version "2.0.5"
|
||||||
|
@ -1876,7 +1883,7 @@ dateformat@^4.5.1:
|
||||||
resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5"
|
resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5"
|
||||||
integrity sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==
|
integrity sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==
|
||||||
|
|
||||||
debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3:
|
debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.3:
|
||||||
version "4.3.3"
|
version "4.3.3"
|
||||||
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664"
|
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664"
|
||||||
integrity sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==
|
integrity sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==
|
||||||
|
@ -2203,10 +2210,10 @@ eslint-visitor-keys@^1.1.0:
|
||||||
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e"
|
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e"
|
||||||
integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==
|
integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==
|
||||||
|
|
||||||
eslint-visitor-keys@^2.0.0:
|
eslint-visitor-keys@^3.0.0:
|
||||||
version "2.1.0"
|
version "3.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303"
|
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826"
|
||||||
integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==
|
integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==
|
||||||
|
|
||||||
eslint@^6.8.0:
|
eslint@^6.8.0:
|
||||||
version "6.8.0"
|
version "6.8.0"
|
||||||
|
@ -2656,7 +2663,7 @@ globals@^12.1.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
type-fest "^0.8.1"
|
type-fest "^0.8.1"
|
||||||
|
|
||||||
globby@^11.0.3:
|
globby@^11.0.4:
|
||||||
version "11.1.0"
|
version "11.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b"
|
resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b"
|
||||||
integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==
|
integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==
|
||||||
|
@ -3017,7 +3024,7 @@ is-generator-function@^1.0.7:
|
||||||
dependencies:
|
dependencies:
|
||||||
has-tostringtag "^1.0.0"
|
has-tostringtag "^1.0.0"
|
||||||
|
|
||||||
is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1:
|
is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1:
|
||||||
version "4.0.3"
|
version "4.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
|
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
|
||||||
integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==
|
integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==
|
||||||
|
@ -5766,10 +5773,10 @@ typedarray-to-buffer@^3.1.5:
|
||||||
dependencies:
|
dependencies:
|
||||||
is-typedarray "^1.0.0"
|
is-typedarray "^1.0.0"
|
||||||
|
|
||||||
typescript@4.3.5:
|
typescript@4.5.5:
|
||||||
version "4.3.5"
|
version "4.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.3.5.tgz#4d1c37cc16e893973c45a06886b7113234f119f4"
|
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.5.tgz#d8c953832d28924a9e3d37c73d729c846c5896f3"
|
||||||
integrity sha512-DqQgihaQ9cUrskJo9kIyW/+g0Vxsk8cDtZ52a3NGh0YNTfpUSArXSohyUGnvbPazEPLu398C0UxmKSOrPumUzA==
|
integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA==
|
||||||
|
|
||||||
uid2@0.0.x:
|
uid2@0.0.x:
|
||||||
version "0.0.4"
|
version "0.0.4"
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue