diff --git a/.github/workflows/release-develop.yml b/.github/workflows/release-develop.yml index ce41fcc3e6..cf0d6f848c 100644 --- a/.github/workflows/release-develop.yml +++ b/.github/workflows/release-develop.yml @@ -7,6 +7,7 @@ on: env: POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }} + INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }} POSTHOG_URL: ${{ secrets.POSTHOG_URL }} SENTRY_DSN: ${{ secrets.SENTRY_DSN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index aaee3923ef..7b38a70eb7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -4,9 +4,16 @@ on: push: branches: - master + workflow_dispatch: + inputs: + release_self_host: + description: 'Release to self hosters? (Y/N)' + required: true + default: 'N' env: POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }} + INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }} POSTHOG_URL: ${{ secrets.POSTHOG_URL }} SENTRY_DSN: ${{ secrets.SENTRY_DSN }} @@ -47,7 +54,19 @@ jobs: uses: "WyriHaximus/github-action-get-previous-tag@v1" - name: Build/release Docker images - run: | + if: ${{ github.event.inputs.release_self_host != 'Y' }} + run: | + docker login -u $DOCKER_USER -p $DOCKER_PASSWORD + yarn build + yarn build:docker + env: + DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} + BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }} + + - name: Build/release Docker images (Self Host) + if: ${{ github.event.inputs.release_self_host == 'Y' }} + run: | docker login -u $DOCKER_USER -p $DOCKER_PASSWORD yarn build yarn build:docker diff --git a/hosting/kubernetes/budibase/Chart.yaml b/hosting/kubernetes/budibase/Chart.yaml index b82cb3bab2..d00b228b0e 100644 --- a/hosting/kubernetes/budibase/Chart.yaml +++ b/hosting/kubernetes/budibase/Chart.yaml @@ -37,5 +37,5 @@ dependencies: condition: services.couchdb.enabled - name: ingress-nginx version: 3.35.0 - repository: https://github.com/kubernetes/ingress-nginx + repository: https://kubernetes.github.io/ingress-nginx condition: services.ingress.nginx diff --git a/hosting/kubernetes/budibase/.helmignore b/hosting/kubernetes/budibase/templates/.helmignore similarity index 100% rename from hosting/kubernetes/budibase/.helmignore rename to hosting/kubernetes/budibase/templates/.helmignore diff --git a/hosting/kubernetes/budibase/templates/app-service-deployment.yaml b/hosting/kubernetes/budibase/templates/app-service-deployment.yaml index b101ab7854..5d9aee2619 100644 --- a/hosting/kubernetes/budibase/templates/app-service-deployment.yaml +++ b/hosting/kubernetes/budibase/templates/app-service-deployment.yaml @@ -94,6 +94,8 @@ spec: value: {{ .Values.globals.sentryDSN }} - name: WORKER_URL value: worker-service:{{ .Values.services.worker.port }} + - name: COOKIE_DOMAIN + value: {{ .Values.globals.cookieDomain | quote }} image: budibase/apps imagePullPolicy: Always name: bbapps diff --git a/hosting/kubernetes/budibase/templates/worker-service-deployment.yaml b/hosting/kubernetes/budibase/templates/worker-service-deployment.yaml index 6c165872c8..98a921a8a6 100644 --- a/hosting/kubernetes/budibase/templates/worker-service-deployment.yaml +++ b/hosting/kubernetes/budibase/templates/worker-service-deployment.yaml @@ -89,6 +89,8 @@ spec: value: {{ .Values.globals.selfHosted | quote }} - name: ACCOUNT_PORTAL_URL value: {{ .Values.globals.accountPortalUrl | quote }} + - name: COOKIE_DOMAIN + value: {{ .Values.globals.cookieDomain | quote }} image: budibase/worker imagePullPolicy: Always name: bbworker diff --git a/hosting/kubernetes/budibase/values.yaml b/hosting/kubernetes/budibase/values.yaml index 1113842c8b..c9b2549b30 100644 --- a/hosting/kubernetes/budibase/values.yaml +++ b/hosting/kubernetes/budibase/values.yaml @@ -90,6 +90,7 @@ globals: logLevel: info selfHosted: 1 accountPortalUrL: "" + cookieDomain: "" createSecrets: true # creates an internal API key, JWT secrets and redis password for you # if createSecrets is set to false, you can hard-code your secrets here diff --git a/lerna.json b/lerna.json index 4f4354e98e..7ebb425d19 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "0.9.125-alpha.17", + "version": "0.9.146-alpha.5", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/package.json b/package.json index f87c3715aa..3df577ca58 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,8 @@ "lint:fix": "yarn run lint:fix:ts && yarn run lint:fix:prettier && yarn run lint:fix:eslint", "test:e2e": "lerna run cy:test", "test:e2e:ci": "lerna run cy:ci", - "build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -", + "build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -", + "build:docker:production": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -", "build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -", "release:helm": "./scripts/release_helm_chart.sh", "multi:enable": "lerna run multi:enable", diff --git a/packages/auth/accounts.js b/packages/auth/accounts.js new file mode 100644 index 0000000000..47ad03456a --- /dev/null +++ b/packages/auth/accounts.js @@ -0,0 +1 @@ +module.exports = require("./src/cloud/accounts") diff --git a/packages/auth/deprovision.js b/packages/auth/deprovision.js new file mode 100644 index 0000000000..b4b8dc6110 --- /dev/null +++ b/packages/auth/deprovision.js @@ -0,0 +1 @@ +module.exports = require("./src/tenancy/deprovision") diff --git a/packages/auth/package.json b/packages/auth/package.json index 6518723cc3..caed9f4b60 100644 --- a/packages/auth/package.json +++ b/packages/auth/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/auth", - "version": "0.9.125-alpha.17", + "version": "0.9.146-alpha.5", "description": "Authentication middlewares for budibase builder and apps", "main": "src/index.js", "author": "Budibase", diff --git a/packages/auth/scripts/jestSetup.js b/packages/auth/scripts/jestSetup.js index 07648f693f..93dbf3fd5a 100644 --- a/packages/auth/scripts/jestSetup.js +++ b/packages/auth/scripts/jestSetup.js @@ -1,5 +1,6 @@ const env = require("../src/environment") +env._set("SELF_HOSTED", "1") env._set("NODE_ENV", "jest") env._set("JWT_SECRET", "test-jwtsecret") env._set("LOG_LEVEL", "silent") diff --git a/packages/auth/src/cache/user.js b/packages/auth/src/cache/user.js index 2b2693ca01..60a2d341a8 100644 --- a/packages/auth/src/cache/user.js +++ b/packages/auth/src/cache/user.js @@ -1,5 +1,7 @@ const redis = require("../redis/authRedis") const { getTenantId, lookupTenantId, getGlobalDB } = require("../tenancy") +const env = require("../environment") +const accounts = require("../cloud/accounts") const EXPIRY_SECONDS = 3600 @@ -9,6 +11,15 @@ const EXPIRY_SECONDS = 3600 const populateFromDB = async (userId, tenantId) => { const user = await getGlobalDB(tenantId).get(userId) user.budibaseAccess = true + + if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { + const account = await accounts.getAccount(user.email) + if (account) { + user.account = account + user.accountPortalAccess = true + } + } + return user } diff --git a/packages/auth/src/cloud/accounts.js b/packages/auth/src/cloud/accounts.js new file mode 100644 index 0000000000..a102df8920 --- /dev/null +++ b/packages/auth/src/cloud/accounts.js @@ -0,0 +1,22 @@ +const API = require("./api") +const env = require("../environment") + +const api = new API(env.ACCOUNT_PORTAL_URL) + +// TODO: Authorization + +exports.getAccount = async email => { + const payload = { + email, + } + const response = await api.post(`/api/accounts/search`, { + body: payload, + }) + const json = await response.json() + + if (response.status !== 200) { + throw Error(`Error getting account by email ${email}`, json) + } + + return json[0] +} diff --git a/packages/auth/src/cloud/api.js b/packages/auth/src/cloud/api.js new file mode 100644 index 0000000000..ffa785d02a --- /dev/null +++ b/packages/auth/src/cloud/api.js @@ -0,0 +1,44 @@ +const fetch = require("node-fetch") +class API { + constructor(host) { + this.host = host + } + + apiCall = + method => + async (url = "", options = {}) => { + if (!options.headers) { + options.headers = {} + } + + if (!options.headers["Content-Type"]) { + options.headers = { + "Content-Type": "application/json", + Accept: "application/json", + ...options.headers, + } + } + + let json = options.headers["Content-Type"] === "application/json" + + const requestOptions = { + method: method, + body: json ? JSON.stringify(options.body) : options.body, + headers: options.headers, + // TODO: See if this is necessary + credentials: "include", + } + + const resp = await fetch(`${this.host}${url}`, requestOptions) + + return resp + } + + post = this.apiCall("POST") + get = this.apiCall("GET") + patch = this.apiCall("PATCH") + del = this.apiCall("DELETE") + put = this.apiCall("PUT") +} + +module.exports = API diff --git a/packages/auth/src/db/constants.js b/packages/auth/src/db/constants.js index 77643ce4c5..ad4f6c9f66 100644 --- a/packages/auth/src/db/constants.js +++ b/packages/auth/src/db/constants.js @@ -12,6 +12,7 @@ exports.StaticDatabases = { name: "global-info", docs: { tenants: "tenants", + usageQuota: "usage_quota", }, }, } diff --git a/packages/auth/src/db/utils.js b/packages/auth/src/db/utils.js index a1a831523e..09e2ff6314 100644 --- a/packages/auth/src/db/utils.js +++ b/packages/auth/src/db/utils.js @@ -368,8 +368,33 @@ async function getScopedConfig(db, params) { return configDoc && configDoc.config ? configDoc.config : configDoc } +function generateNewUsageQuotaDoc() { + return { + _id: StaticDatabases.PLATFORM_INFO.docs.usageQuota, + quotaReset: Date.now() + 2592000000, + usageQuota: { + automationRuns: 0, + rows: 0, + storage: 0, + apps: 0, + users: 0, + views: 0, + emails: 0, + }, + usageLimits: { + automationRuns: 1000, + rows: 4000, + apps: 4, + storage: 1000, + users: 10, + emails: 50, + }, + } +} + exports.Replication = Replication exports.getScopedConfig = getScopedConfig exports.generateConfigID = generateConfigID exports.getConfigParams = getConfigParams exports.getScopedFullConfig = getScopedFullConfig +exports.generateNewUsageQuotaDoc = generateNewUsageQuotaDoc diff --git a/packages/auth/src/environment.js b/packages/auth/src/environment.js index 4d1453837c..7f822090d7 100644 --- a/packages/auth/src/environment.js +++ b/packages/auth/src/environment.js @@ -16,9 +16,14 @@ module.exports = { REDIS_PASSWORD: process.env.REDIS_PASSWORD, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, + AWS_REGION: process.env.AWS_REGION, MINIO_URL: process.env.MINIO_URL, INTERNAL_API_KEY: process.env.INTERNAL_API_KEY, MULTI_TENANCY: process.env.MULTI_TENANCY, + ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL, + DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL, + SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED), + COOKIE_DOMAIN: process.env.COOKIE_DOMAIN, isTest, _set(key, value) { process.env[key] = value diff --git a/packages/auth/src/index.js b/packages/auth/src/index.js index 569456ea10..4aa2c8ab96 100644 --- a/packages/auth/src/index.js +++ b/packages/auth/src/index.js @@ -12,6 +12,7 @@ const { auditLog, tenancy, appTenancy, + authError, } = require("./middleware") const { setDB } = require("./db") const userCache = require("./cache/user") @@ -60,6 +61,7 @@ module.exports = { buildTenancyMiddleware: tenancy, buildAppTenancyMiddleware: appTenancy, auditLog, + authError, }, cache: { user: userCache, diff --git a/packages/auth/src/middleware/index.js b/packages/auth/src/middleware/index.js index 059f20af8b..cf8676a2bc 100644 --- a/packages/auth/src/middleware/index.js +++ b/packages/auth/src/middleware/index.js @@ -2,6 +2,7 @@ const jwt = require("./passport/jwt") const local = require("./passport/local") const google = require("./passport/google") const oidc = require("./passport/oidc") +const { authError } = require("./passport/utils") const authenticated = require("./authenticated") const auditLog = require("./auditLog") const tenancy = require("./tenancy") @@ -16,4 +17,5 @@ module.exports = { auditLog, tenancy, appTenancy, + authError, } diff --git a/packages/auth/src/middleware/passport/google.js b/packages/auth/src/middleware/passport/google.js index 07d6816c0b..cb93844c31 100644 --- a/packages/auth/src/middleware/passport/google.js +++ b/packages/auth/src/middleware/passport/google.js @@ -27,7 +27,11 @@ async function authenticate(accessToken, refreshToken, profile, done) { * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport. * @returns Dynamically configured Passport Google Strategy */ -exports.strategyFactory = async function (config, callbackUrl) { +exports.strategyFactory = async function ( + config, + callbackUrl, + verify = authenticate +) { try { const { clientID, clientSecret } = config @@ -43,7 +47,7 @@ exports.strategyFactory = async function (config, callbackUrl) { clientSecret: config.clientSecret, callbackURL: callbackUrl, }, - authenticate + verify ) } catch (err) { console.error(err) diff --git a/packages/auth/src/middleware/passport/tests/third-party-common.spec.js b/packages/auth/src/middleware/passport/tests/third-party-common.spec.js index ff38a01fbb..1ace65ba40 100644 --- a/packages/auth/src/middleware/passport/tests/third-party-common.spec.js +++ b/packages/auth/src/middleware/passport/tests/third-party-common.spec.js @@ -104,7 +104,7 @@ describe("third party common", () => { _id: id, email: email, } - const response = await db.post(dbUser) + const response = await db.put(dbUser) dbUser._rev = response.rev } diff --git a/packages/auth/src/middleware/passport/third-party-common.js b/packages/auth/src/middleware/passport/third-party-common.js index 7c03944232..c25aa3e0b0 100644 --- a/packages/auth/src/middleware/passport/third-party-common.js +++ b/packages/auth/src/middleware/passport/third-party-common.js @@ -71,7 +71,7 @@ exports.authenticateThirdParty = async function ( dbUser = await syncUser(dbUser, thirdPartyUser) // create or sync the user - const response = await db.post(dbUser) + const response = await db.put(dbUser) dbUser._rev = response.rev // authenticate diff --git a/packages/auth/src/objectStore/index.js b/packages/auth/src/objectStore/index.js index 81bdd06b62..9f271ad80e 100644 --- a/packages/auth/src/objectStore/index.js +++ b/packages/auth/src/objectStore/index.js @@ -73,6 +73,7 @@ exports.ObjectStore = bucket => { AWS.config.update({ accessKeyId: env.MINIO_ACCESS_KEY, secretAccessKey: env.MINIO_SECRET_KEY, + region: env.AWS_REGION, }) const config = { s3ForcePathStyle: true, diff --git a/packages/auth/src/security/permissions.js b/packages/auth/src/security/permissions.js index 03fa5fa562..d0308d783e 100644 --- a/packages/auth/src/security/permissions.js +++ b/packages/auth/src/security/permissions.js @@ -139,8 +139,7 @@ exports.doesHaveResourcePermission = ( // set foundSub to not subResourceId, incase there is no subResource let foundMain = false, foundSub = false - for (let [resource, level] of Object.entries(permissions)) { - const levels = getAllowedLevels(level) + for (let [resource, levels] of Object.entries(permissions)) { if (resource === resourceId && levels.indexOf(permLevel) !== -1) { foundMain = true } @@ -177,10 +176,6 @@ exports.doesHaveBasePermission = (permType, permLevel, permissionIds) => { return false } -exports.higherPermission = (perm1, perm2) => { - return levelToNumber(perm1) > levelToNumber(perm2) ? perm1 : perm2 -} - exports.isPermissionLevelHigherThanRead = level => { return levelToNumber(level) > 1 } diff --git a/packages/auth/src/security/roles.js b/packages/auth/src/security/roles.js index baa8fc40dc..71fbc10132 100644 --- a/packages/auth/src/security/roles.js +++ b/packages/auth/src/security/roles.js @@ -1,6 +1,6 @@ const { getDB } = require("../db") const { cloneDeep } = require("lodash/fp") -const { BUILTIN_PERMISSION_IDS, higherPermission } = require("./permissions") +const { BUILTIN_PERMISSION_IDS } = require("./permissions") const { generateRoleID, getRoleParams, @@ -193,8 +193,17 @@ exports.getUserPermissions = async (appId, userRoleId) => { const permissions = {} for (let role of rolesHierarchy) { if (role.permissions) { - for (let [resource, level] of Object.entries(role.permissions)) { - permissions[resource] = higherPermission(permissions[resource], level) + for (let [resource, levels] of Object.entries(role.permissions)) { + if (!permissions[resource]) { + permissions[resource] = [] + } + const permsSet = new Set(permissions[resource]) + if (Array.isArray(levels)) { + levels.forEach(level => permsSet.add(level)) + } else { + permsSet.add(levels) + } + permissions[resource] = [...permsSet] } } } diff --git a/packages/auth/src/security/sessions.js b/packages/auth/src/security/sessions.js index 328f74c794..83ca9d9bcd 100644 --- a/packages/auth/src/security/sessions.js +++ b/packages/auth/src/security/sessions.js @@ -30,6 +30,10 @@ exports.invalidateSessions = async (userId, sessionId = null) => { sessions.push({ key: makeSessionID(userId, sessionId) }) } else { sessions = await getSessionsForUser(userId) + sessions.forEach( + session => + (session.key = makeSessionID(session.userId, session.sessionId)) + ) } const client = await redis.getSessionClient() const promises = [] diff --git a/packages/auth/src/tenancy/context.js b/packages/auth/src/tenancy/context.js index b1ef5a5807..01d1fdc604 100644 --- a/packages/auth/src/tenancy/context.js +++ b/packages/auth/src/tenancy/context.js @@ -53,6 +53,11 @@ exports.setTenantId = ( // processed later in the chain tenantId = user.tenantId || header || tenantId + // Set the tenantId from the subdomain + if (!tenantId) { + tenantId = ctx.subdomains && ctx.subdomains[0] + } + if (!tenantId && !allowNoTenant) { ctx.throw(403, "Tenant id not set") } diff --git a/packages/auth/src/tenancy/deprovision.js b/packages/auth/src/tenancy/deprovision.js new file mode 100644 index 0000000000..608ca1b84a --- /dev/null +++ b/packages/auth/src/tenancy/deprovision.js @@ -0,0 +1,97 @@ +const { getGlobalUserParams, getAllApps } = require("../db/utils") +const { getDB, getCouch } = require("../db") +const { getGlobalDB } = require("./tenancy") +const { StaticDatabases } = require("../db/constants") + +const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants +const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name + +const removeTenantFromInfoDB = async tenantId => { + try { + const infoDb = getDB(PLATFORM_INFO_DB) + let tenants = await infoDb.get(TENANT_DOC) + tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId) + + await infoDb.put(tenants) + } catch (err) { + console.error(`Error removing tenant ${tenantId} from info db`, err) + throw err + } +} + +exports.removeUserFromInfoDB = async dbUser => { + const infoDb = getDB(PLATFORM_INFO_DB) + const keys = [dbUser._id, dbUser.email] + const userDocs = await infoDb.allDocs({ + keys, + include_docs: true, + }) + const toDelete = userDocs.rows.map(row => { + return { + ...row.doc, + _deleted: true, + } + }) + await infoDb.bulkDocs(toDelete) +} + +const removeUsersFromInfoDB = async tenantId => { + try { + const globalDb = getGlobalDB(tenantId) + const infoDb = getDB(PLATFORM_INFO_DB) + const allUsers = await globalDb.allDocs( + getGlobalUserParams(null, { + include_docs: true, + }) + ) + const allEmails = allUsers.rows.map(row => row.doc.email) + // get the id docs + let keys = allUsers.rows.map(row => row.id) + // and the email docs + keys = keys.concat(allEmails) + // retrieve the docs and delete them + const userDocs = await infoDb.allDocs({ + keys, + include_docs: true, + }) + const toDelete = userDocs.rows.map(row => { + return { + ...row.doc, + _deleted: true, + } + }) + await infoDb.bulkDocs(toDelete) + } catch (err) { + console.error(`Error removing tenant ${tenantId} users from info db`, err) + throw err + } +} + +const removeGlobalDB = async tenantId => { + try { + const globalDb = getGlobalDB(tenantId) + await globalDb.destroy() + } catch (err) { + console.error(`Error removing tenant ${tenantId} users from info db`, err) + throw err + } +} + +const removeTenantApps = async tenantId => { + try { + const apps = await getAllApps(getCouch(), { all: true }) + const destroyPromises = apps.map(app => getDB(app.appId).destroy()) + await Promise.allSettled(destroyPromises) + } catch (err) { + console.error(`Error removing tenant ${tenantId} apps`, err) + throw err + } +} + +// can't live in tenancy package due to circular dependency on db/utils +exports.deleteTenant = async tenantId => { + await removeTenantFromInfoDB(tenantId) + await removeUsersFromInfoDB(tenantId) + await removeGlobalDB(tenantId) + await removeTenantApps(tenantId) +} diff --git a/packages/auth/src/tenancy/tenancy.js b/packages/auth/src/tenancy/tenancy.js index ebd573496c..668bc010ba 100644 --- a/packages/auth/src/tenancy/tenancy.js +++ b/packages/auth/src/tenancy/tenancy.js @@ -73,7 +73,7 @@ exports.tryAddTenant = async (tenantId, userId, email) => { await Promise.all(promises) } -exports.getGlobalDB = (tenantId = null) => { +exports.getGlobalDBName = (tenantId = null) => { // tenant ID can be set externally, for example user API where // new tenants are being created, this may be the case if (!tenantId) { @@ -81,13 +81,16 @@ exports.getGlobalDB = (tenantId = null) => { } let dbName - if (tenantId === DEFAULT_TENANT_ID) { dbName = StaticDatabases.GLOBAL.name } else { dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}` } + return dbName +} +exports.getGlobalDB = (tenantId = null) => { + const dbName = exports.getGlobalDBName(tenantId) return getDB(dbName) } diff --git a/packages/auth/src/utils.js b/packages/auth/src/utils.js index 5936948fd7..93b483c6be 100644 --- a/packages/auth/src/utils.js +++ b/packages/auth/src/utils.js @@ -4,6 +4,7 @@ const { options } = require("./middleware/passport/jwt") const { createUserEmailView } = require("./db/views") const { Headers } = require("./constants") const { getGlobalDB } = require("./tenancy") +const environment = require("./environment") const APP_PREFIX = DocumentTypes.APP + SEPARATOR @@ -66,17 +67,22 @@ exports.getCookie = (ctx, name) => { * @param {string|object} value The value of cookie which will be set. */ exports.setCookie = (ctx, value, name = "builder") => { - if (!value) { - ctx.cookies.set(name) - } else { + if (value) { value = jwt.sign(value, options.secretOrKey) - ctx.cookies.set(name, value, { - maxAge: Number.MAX_SAFE_INTEGER, - path: "/", - httpOnly: false, - overwrite: true, - }) } + + const config = { + maxAge: Number.MAX_SAFE_INTEGER, + path: "/", + httpOnly: false, + overwrite: true, + } + + if (environment.COOKIE_DOMAIN) { + config.domain = environment.COOKIE_DOMAIN + } + + ctx.cookies.set(name, value, config) } /** diff --git a/packages/auth/yarn.lock b/packages/auth/yarn.lock index b6be8ad1e8..35f892669a 100644 --- a/packages/auth/yarn.lock +++ b/packages/auth/yarn.lock @@ -4470,9 +4470,9 @@ tmp@^0.0.33: os-tmpdir "~1.0.2" tmpl@1.0.x: - version "1.0.4" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" - integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-fast-properties@^2.0.0: version "2.0.0" diff --git a/packages/bbui/package.json b/packages/bbui/package.json index e07bb81aba..2bcc2097a9 100644 --- a/packages/bbui/package.json +++ b/packages/bbui/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/bbui", "description": "A UI solution used in the different Budibase projects.", - "version": "0.9.125-alpha.17", + "version": "0.9.146-alpha.5", "license": "AGPL-3.0", "svelte": "src/index.js", "module": "dist/bbui.es.js", diff --git a/packages/bbui/src/Modal/ModalContent.svelte b/packages/bbui/src/Modal/ModalContent.svelte index 678a813a61..09cc4f6c52 100644 --- a/packages/bbui/src/Modal/ModalContent.svelte +++ b/packages/bbui/src/Modal/ModalContent.svelte @@ -14,6 +14,7 @@ export let showConfirmButton = true export let showCloseIcon = true export let onConfirm = undefined + export let onCancel = undefined export let disabled = false export let showDivider = true @@ -28,6 +29,14 @@ } loading = false } + + async function close() { + loading = true + if (!onCancel || (await onCancel()) !== false) { + cancel() + } + loading = false + }
{#if showCancelButton} - + {/if} {#if showConfirmButton}
+ + diff --git a/packages/builder/src/components/backend/DataTable/Table.svelte b/packages/builder/src/components/backend/DataTable/Table.svelte index fcb17a774d..78c3cc37f8 100644 --- a/packages/builder/src/components/backend/DataTable/Table.svelte +++ b/packages/builder/src/components/backend/DataTable/Table.svelte @@ -1,8 +1,7 @@ -
-
- {#if title} - {title} - {/if} - {#if loading} -
- -
- {/if} + +
+
+ {#if title} + {title} + {/if} + {#if loading} +
+ +
+ {/if} +
+
+ + {#if !isUsersTable && selectedRows.length > 0} + + {/if} +
-
- - {#if !isUsersTable && selectedRows.length > 0} - - {/if} -
-
-{#key tableId} - editColumn(e.detail)} - on:editrow={e => editRow(e.detail)} - on:clickrelationship={e => selectRelationship(e.detail)} - /> -{/key} + {#key tableId} +
+
editColumn(e.detail)} + on:editrow={e => editRow(e.detail)} + on:clickrelationship={e => selectRelationship(e.detail)} + on:sort + /> + + {/key} + - + - + diff --git a/packages/builder/src/components/backend/DataTable/buttons/FilterButton.svelte b/packages/builder/src/components/backend/DataTable/buttons/ViewFilterButton.svelte similarity index 100% rename from packages/builder/src/components/backend/DataTable/buttons/FilterButton.svelte rename to packages/builder/src/components/backend/DataTable/buttons/ViewFilterButton.svelte diff --git a/packages/builder/src/components/backend/DataTable/modals/CalculateModal.svelte b/packages/builder/src/components/backend/DataTable/modals/CalculateModal.svelte index 660a822898..50d44eca88 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CalculateModal.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CalculateModal.svelte @@ -1,7 +1,7 @@ diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index 1a7d75f28a..011c9bee43 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -10,6 +10,7 @@ ModalContent, Context, } from "@budibase/bbui" + import { createEventDispatcher } from "svelte" import { cloneDeep } from "lodash/fp" import { tables } from "stores/backend" import { TableNames, UNEDITABLE_USER_FIELDS } from "constants" @@ -30,8 +31,9 @@ const AUTO_TYPE = "auto" const FORMULA_TYPE = FIELDS.FORMULA.type const LINK_TYPE = FIELDS.LINK.type - let fieldDefinitions = cloneDeep(FIELDS) + const dispatch = createEventDispatcher() const { hide } = getContext(Context.Modal) + let fieldDefinitions = cloneDeep(FIELDS) export let field = { type: "string", @@ -81,12 +83,13 @@ if (field.type === AUTO_TYPE) { field = buildAutoColumn($tables.draft.name, field.name, field.subtype) } - tables.saveField({ + await tables.saveField({ originalName, field, primaryDisplay, indexes, }) + dispatch("updatecolumns") } function deleteColumn() { @@ -99,6 +102,7 @@ hide() deletion = false } + dispatch("updatecolumns") } function handleTypeChange(event) { diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditRow.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditRow.svelte index ce52287c99..a60d9ecf31 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditRow.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditRow.svelte @@ -1,4 +1,5 @@ diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditUser.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditUser.svelte index 32f369ce3d..f1de23fb97 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditUser.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditUser.svelte @@ -1,4 +1,5 @@ diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte index 61777c0b7e..2f6ec51233 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte @@ -3,7 +3,7 @@ import { goto } from "@roxi/routify" import { views as viewsStore } from "stores/backend" import { tables } from "stores/backend" - import analytics from "analytics" + import analytics, { Events } from "analytics" let name let field @@ -21,7 +21,7 @@ field, }) notifications.success(`View ${name} created`) - analytics.captureEvent("View Created", { name }) + analytics.captureEvent(Events.VIEW.CREATED, { name }) $goto(`../../view/${name}`) } diff --git a/packages/builder/src/components/backend/DataTable/modals/FilterModal.svelte b/packages/builder/src/components/backend/DataTable/modals/FilterModal.svelte index 170bb75142..9c6f4956b0 100644 --- a/packages/builder/src/components/backend/DataTable/modals/FilterModal.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/FilterModal.svelte @@ -11,7 +11,7 @@ Icon, } from "@budibase/bbui" import { tables, views } from "stores/backend" - import analytics from "analytics" + import analytics, { Events } from "analytics" const CONDITIONS = [ { @@ -65,7 +65,7 @@ function saveView() { views.save(view) notifications.success(`View ${view.name} saved.`) - analytics.captureEvent("Added View Filter", { + analytics.captureEvent(Events.VIEW.ADDED_FILTER, { filters: JSON.stringify(view.filters), }) } diff --git a/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte b/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte index 84c737eb67..3c6fa83c01 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte @@ -1,8 +1,9 @@ {#if $database?._id}
- {#each $datasources.list as datasource, idx} + {#each enrichedDataSources as datasource, idx} 0} text={datasource.name} - opened={openDataSources.includes(datasource._id)} - selected={$datasources.selected === datasource._id} + opened={datasource.open} + selected={datasource.selected} withArrow={true} on:click={() => selectDatasource(datasource)} on:iconClick={() => toggleNode(datasource)} @@ -61,22 +121,21 @@ {/if} - {#if openDataSources.includes(datasource._id)} + {#if datasource.open} + {#each $queries.list.filter(query => query.datasourceId === datasource._id) as query} + onClickQuery(query)} + > + + + {/each} {/if} - - {#each $queries.list.filter(query => query.datasourceId === datasource._id) as query} - onClickQuery(query)} - > - - - {/each} {/each}
{/if} diff --git a/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte b/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte index 4541052acf..f67a46d9a1 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte @@ -1,20 +1,28 @@ {#each Object.keys(schema) as configKey} {#if schema[configKey].type === "object"} - +
+ + +
{:else if schema[configKey].type === "boolean"}
@@ -42,4 +50,11 @@ grid-gap: var(--spacing-l); align-items: center; } + + .form-row.ssl { + display: grid; + grid-template-columns: 20% 20%; + grid-gap: var(--spacing-l); + align-items: center; + } diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte index 9cdd893230..0a04d8dc73 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte @@ -1,74 +1,160 @@ - - + + - - - - - + + + + + + + { + chooseNextModal() + }} + > + + All apps need data. You can connect to a data source below, or add data + to your app using Budibase's built-in database. + +
selectIntegration(INTERNAL)} + class="item hoverable" + > +
+ + Budibase DB +
+
+
+ + +
+ Connect to data source +
+
+ {#each Object.entries(integrations).filter(([key]) => key !== INTERNAL) as [integrationType, schema]} +
selectIntegration(integrationType)} + class="item hoverable" + > +
+ + + + {schema.name || IntegrationNames[integrationType]} +
+
+ {/each} +
+
+
+
+ + diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte new file mode 100644 index 0000000000..56fa26ee0a --- /dev/null +++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte @@ -0,0 +1,72 @@ + + + saveDatasource()} + confirmText={integration.plus + ? "Fetch tables from database" + : "Save and continue to query"} + cancelText="Back" + size="M" +> + + Connect your database to Budibase using the config below. + + + + + + + diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/UpdateDatasourceModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/UpdateDatasourceModal.svelte index f93af59a38..28625aa86e 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/modals/UpdateDatasourceModal.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/UpdateDatasourceModal.svelte @@ -2,7 +2,7 @@ import { datasources } from "stores/backend" import { notifications } from "@budibase/bbui" import { Input, ModalContent, Modal } from "@budibase/bbui" - import analytics from "analytics" + import analytics, { Events } from "analytics" let error = "" let modal @@ -35,7 +35,7 @@ } await datasources.save(updatedDatasource) notifications.success(`Datasource ${name} updated successfully.`) - analytics.captureEvent("Datasource Updated", updatedDatasource) + analytics.captureEvent(Events.DATASOURCE.UPDATED, updatedDatasource) hide() } diff --git a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte index b59e5cda5e..dd8876be27 100644 --- a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte +++ b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte @@ -12,7 +12,7 @@ Layout, } from "@budibase/bbui" import TableDataImport from "../TableDataImport.svelte" - import analytics from "analytics" + import analytics, { Events } from "analytics" import screenTemplates from "builderStore/store/screenTemplates" import { buildAutoColumn, getAutoColumnInformation } from "builderStore/utils" import { NEW_ROW_TEMPLATE } from "builderStore/store/screenTemplates/newRowScreen" @@ -67,7 +67,7 @@ // Create table const table = await tables.save(newTable) notifications.success(`Table ${name} created successfully.`) - analytics.captureEvent("Table Created", { name }) + analytics.captureEvent(Events.TABLE.CREATED, { name }) // Create auto screens if (createAutoscreens) { diff --git a/packages/builder/src/components/deploy/DeployModal.svelte b/packages/builder/src/components/deploy/DeployModal.svelte index 4daa16c7c4..3dcf0c27b1 100644 --- a/packages/builder/src/components/deploy/DeployModal.svelte +++ b/packages/builder/src/components/deploy/DeployModal.svelte @@ -2,7 +2,8 @@ import { onMount, onDestroy } from "svelte" import { Button, Modal, notifications, ModalContent } from "@budibase/bbui" import api from "builderStore/api" - import analytics from "analytics" + import analytics, { Events } from "analytics" + import { store } from "builderStore" const DeploymentStatus = { SUCCESS: "SUCCESS", @@ -23,6 +24,9 @@ if (response.status !== 200) { throw new Error(`status ${response.status}`) } else { + analytics.captureEvent(Events.APP.PUBLISHED, { + appId: $store.appId, + }) notifications.success(`Application published successfully`) } } catch (err) { diff --git a/packages/builder/src/components/design/NavigationPanel/NewScreenModal.svelte b/packages/builder/src/components/design/NavigationPanel/NewScreenModal.svelte index ed0c764956..e02f9d87e5 100644 --- a/packages/builder/src/components/design/NavigationPanel/NewScreenModal.svelte +++ b/packages/builder/src/components/design/NavigationPanel/NewScreenModal.svelte @@ -4,7 +4,7 @@ import { roles } from "stores/backend" import { Input, Select, ModalContent, Toggle } from "@budibase/bbui" import getTemplates from "builderStore/store/screenTemplates" - import analytics from "analytics" + import analytics, { Events } from "analytics" const CONTAINER = "@budibase/standard-components/container" @@ -66,7 +66,7 @@ if (templateIndex !== undefined) { const template = templates[templateIndex] - analytics.captureEvent("Screen Created", { + analytics.captureEvent(Events.SCREEN.CREATED, { template: template.id || template.name, }) } diff --git a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/ConditionalUIDrawer.svelte b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/ConditionalUIDrawer.svelte index 638fd44de6..9f0d5086f6 100644 --- a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/ConditionalUIDrawer.svelte +++ b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/ConditionalUIDrawer.svelte @@ -12,7 +12,7 @@ import { dndzone } from "svelte-dnd-action" import { generate } from "shortid" import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte" - import { OperatorOptions, getValidOperatorsForType } from "helpers/lucene" + import { OperatorOptions, getValidOperatorsForType } from "constants/lucene" import { selectedComponent, store } from "builderStore" import { getComponentForSettingType } from "./componentSettings" import PropertyControl from "./PropertyControl.svelte" diff --git a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/RefreshDataProvider.svelte b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/RefreshDataProvider.svelte new file mode 100644 index 0000000000..93ddca8c3f --- /dev/null +++ b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/RefreshDataProvider.svelte @@ -0,0 +1,35 @@ + + +
+ + @@ -133,7 +135,7 @@ /> {:else if ["string", "longform", "number"].includes(filter.type)} - {:else if filter.type === "options" || "array"} + {:else if ["options", "array"].includes(filter.type)} { dispatch("change", tempValue) diff --git a/packages/builder/src/components/integration/KeyValueBuilder.svelte b/packages/builder/src/components/integration/KeyValueBuilder.svelte index 83977fa2a8..04ab56103e 100644 --- a/packages/builder/src/components/integration/KeyValueBuilder.svelte +++ b/packages/builder/src/components/integration/KeyValueBuilder.svelte @@ -4,6 +4,7 @@ export let defaults export let object = defaults || {} export let readOnly + export let noAddButton let fields = Object.entries(object).map(([name, value]) => ({ name, value })) @@ -12,7 +13,7 @@ {} ) - function addEntry() { + export function addEntry() { fields = [...fields, {}] } @@ -32,7 +33,7 @@ {/if} {/each}
-{#if !readOnly} +{#if !readOnly && !noAddButton}
diff --git a/packages/builder/src/components/start/CreateAppModal.svelte b/packages/builder/src/components/start/CreateAppModal.svelte index 4310d3322e..9ce9d746d7 100644 --- a/packages/builder/src/components/start/CreateAppModal.svelte +++ b/packages/builder/src/components/start/CreateAppModal.svelte @@ -12,7 +12,7 @@ import { admin } from "stores/portal" import { string, mixed, object } from "yup" import api, { get, post } from "builderStore/api" - import analytics from "analytics" + import analytics, { Events } from "analytics" import { onMount } from "svelte" import { capitalise } from "helpers" import { goto } from "@roxi/routify" @@ -98,9 +98,9 @@ throw new Error(appJson.message) } - analytics.captureEvent("App Created", { + analytics.captureEvent(Events.APP.CREATED, { name: $values.name, - appId: appJson._id, + appId: appJson.instance._id, template, }) diff --git a/packages/builder/src/components/upgrade/UpgradeModal.svelte b/packages/builder/src/components/upgrade/UpgradeModal.svelte index f73dcf335f..570dcc06a1 100644 --- a/packages/builder/src/components/upgrade/UpgradeModal.svelte +++ b/packages/builder/src/components/upgrade/UpgradeModal.svelte @@ -4,7 +4,7 @@ let upgradeModal const onConfirm = () => { - window.open("https://accounts.budibase.com/install", "_blank") + window.open("https://account.budibase.app/portal/install", "_blank") } diff --git a/packages/builder/src/constants/index.js b/packages/builder/src/constants/index.js index a892eb2129..c0d283b0ea 100644 --- a/packages/builder/src/constants/index.js +++ b/packages/builder/src/constants/index.js @@ -15,6 +15,20 @@ export const AppStatus = { DEPLOYED: "published", } +export const IntegrationNames = { + POSTGRES: "PostgreSQL", + MONGODB: "MongoDB", + COUCHDB: "CouchDB", + S3: "S3", + MYSQL: "MySQL", + REST: "REST", + DYNAMODB: "DynamoDB", + ELASTICSEARCH: "ElasticSearch", + SQL_SERVER: "SQL Server", + AIRTABLE: "Airtable", + ARANGODB: "ArangoDB", +} + // fields on the user table that cannot be edited export const UNEDITABLE_USER_FIELDS = [ "email", diff --git a/packages/builder/src/constants/lucene.js b/packages/builder/src/constants/lucene.js new file mode 100644 index 0000000000..00da0c29bc --- /dev/null +++ b/packages/builder/src/constants/lucene.js @@ -0,0 +1,97 @@ +/** + * Operator options for lucene queries + */ +export const OperatorOptions = { + Equals: { + value: "equal", + label: "Equals", + }, + NotEquals: { + value: "notEqual", + label: "Not equals", + }, + Empty: { + value: "empty", + label: "Is empty", + }, + NotEmpty: { + value: "notEmpty", + label: "Is not empty", + }, + StartsWith: { + value: "string", + label: "Starts with", + }, + Like: { + value: "fuzzy", + label: "Like", + }, + MoreThan: { + value: "rangeLow", + label: "More than", + }, + LessThan: { + value: "rangeHigh", + label: "Less than", + }, + Contains: { + value: "equal", + label: "Contains", + }, + NotContains: { + value: "notEqual", + label: "Does Not Contain", + }, +} + +/** + * Returns the valid operator options for a certain data type + * @param type the data type + */ +export const getValidOperatorsForType = type => { + const Op = OperatorOptions + if (type === "string") { + return [ + Op.Equals, + Op.NotEquals, + Op.StartsWith, + Op.Like, + Op.Empty, + Op.NotEmpty, + ] + } else if (type === "number") { + return [ + Op.Equals, + Op.NotEquals, + Op.MoreThan, + Op.LessThan, + Op.Empty, + Op.NotEmpty, + ] + } else if (type === "options") { + return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty] + } else if (type === "array") { + return [Op.Contains, Op.NotContains, Op.Empty, Op.NotEmpty] + } else if (type === "boolean") { + return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty] + } else if (type === "longform") { + return [ + Op.Equals, + Op.NotEquals, + Op.StartsWith, + Op.Like, + Op.Empty, + Op.NotEmpty, + ] + } else if (type === "datetime") { + return [ + Op.Equals, + Op.NotEquals, + Op.MoreThan, + Op.LessThan, + Op.Empty, + Op.NotEmpty, + ] + } + return [] +} diff --git a/packages/builder/src/helpers/fetchTableData.js b/packages/builder/src/helpers/fetchTableData.js new file mode 100644 index 0000000000..3623426fbd --- /dev/null +++ b/packages/builder/src/helpers/fetchTableData.js @@ -0,0 +1,206 @@ +// Do not use any aliased imports in common files, as these will be bundled +// by multiple bundlers which may not be able to resolve them +import { writable, derived, get } from "svelte/store" +import * as API from "../builderStore/api" +import { buildLuceneQuery } from "./lucene" + +const defaultOptions = { + tableId: null, + filters: null, + limit: 10, + sortColumn: null, + sortOrder: "ascending", + paginate: true, + schema: null, +} + +export const fetchTableData = opts => { + // Save option set so we can override it later rather than relying on params + let options = { + ...defaultOptions, + ...opts, + } + + // Local non-observable state + let query + let sortType + let lastBookmark + + // Local observable state + const store = writable({ + rows: [], + schema: null, + loading: false, + loaded: false, + bookmarks: [], + pageNumber: 0, + }) + + // Derive certain properties to return + const derivedStore = derived(store, $store => { + return { + ...$store, + hasNextPage: $store.bookmarks[$store.pageNumber + 1] != null, + hasPrevPage: $store.pageNumber > 0, + } + }) + + const fetchPage = async bookmark => { + lastBookmark = bookmark + const { tableId, limit, sortColumn, sortOrder, paginate } = options + store.update($store => ({ ...$store, loading: true })) + const res = await API.post(`/api/${options.tableId}/search`, { + tableId, + query, + limit, + sort: sortColumn, + sortOrder: sortOrder?.toLowerCase() ?? "ascending", + sortType, + paginate, + bookmark, + }) + store.update($store => ({ ...$store, loading: false, loaded: true })) + return await res.json() + } + + // Fetches a fresh set of results from the server + const fetchData = async () => { + const { tableId, schema, sortColumn, filters } = options + + // Ensure table ID exists + if (!tableId) { + return + } + + // Get and enrich schema. + // Ensure there are "name" properties for all fields and that field schema + // are objects + let enrichedSchema = schema + if (!enrichedSchema) { + const definition = await API.get(`/api/tables/${tableId}`) + enrichedSchema = definition?.schema ?? null + } + if (enrichedSchema) { + Object.entries(schema).forEach(([fieldName, fieldSchema]) => { + if (typeof fieldSchema === "string") { + enrichedSchema[fieldName] = { + type: fieldSchema, + name: fieldName, + } + } else { + enrichedSchema[fieldName] = { + ...fieldSchema, + name: fieldName, + } + } + }) + + // Save fixed schema so we can provide it later + options.schema = enrichedSchema + } + + // Ensure schema exists + if (!schema) { + return + } + store.update($store => ({ ...$store, schema })) + + // Work out what sort type to use + if (!sortColumn || !schema[sortColumn]) { + sortType = "string" + } + const type = schema?.[sortColumn]?.type + sortType = type === "number" ? "number" : "string" + + // Build the lucene query + query = buildLuceneQuery(filters) + + // Actually fetch data + const page = await fetchPage() + store.update($store => ({ + ...$store, + loading: false, + loaded: true, + pageNumber: 0, + rows: page.rows, + bookmarks: page.hasNextPage ? [null, page.bookmark] : [null], + })) + } + + // Fetches the next page of data + const nextPage = async () => { + const state = get(derivedStore) + if (state.loading || !options.paginate || !state.hasNextPage) { + return + } + + // Fetch next page + const page = await fetchPage(state.bookmarks[state.pageNumber + 1]) + + // Update state + store.update($store => { + let { bookmarks, pageNumber } = $store + if (page.hasNextPage) { + bookmarks[pageNumber + 2] = page.bookmark + } + return { + ...$store, + pageNumber: pageNumber + 1, + rows: page.rows, + bookmarks, + } + }) + } + + // Fetches the previous page of data + const prevPage = async () => { + const state = get(derivedStore) + if (state.loading || !options.paginate || !state.hasPrevPage) { + return + } + + // Fetch previous page + const page = await fetchPage(state.bookmarks[state.pageNumber - 1]) + + // Update state + store.update($store => { + return { + ...$store, + pageNumber: $store.pageNumber - 1, + rows: page.rows, + } + }) + } + + // Resets the data set and updates options + const update = async newOptions => { + if (newOptions) { + options = { + ...options, + ...newOptions, + } + } + await fetchData() + } + + // Loads the same page again + const refresh = async () => { + if (get(store).loading) { + return + } + const page = await fetchPage(lastBookmark) + store.update($store => ({ ...$store, rows: page.rows })) + } + + // Initially fetch data but don't bother waiting for the result + fetchData() + + // Return our derived store which will be updated over time + return { + subscribe: derivedStore.subscribe, + nextPage, + prevPage, + update, + refresh, + } +} diff --git a/packages/builder/src/helpers/lucene.js b/packages/builder/src/helpers/lucene.js index 18692359e4..03baa751cc 100644 --- a/packages/builder/src/helpers/lucene.js +++ b/packages/builder/src/helpers/lucene.js @@ -1,90 +1,179 @@ -export const OperatorOptions = { - Equals: { - value: "equal", - label: "Equals", - }, - NotEquals: { - value: "notEqual", - label: "Not equals", - }, - Empty: { - value: "empty", - label: "Is empty", - }, - NotEmpty: { - value: "notEmpty", - label: "Is not empty", - }, - StartsWith: { - value: "string", - label: "Starts with", - }, - Like: { - value: "fuzzy", - label: "Like", - }, - MoreThan: { - value: "rangeLow", - label: "More than", - }, - LessThan: { - value: "rangeHigh", - label: "Less than", - }, - Contains: { - value: "equal", - label: "Contains", - }, - NotContains: { - value: "notEqual", - label: "Does Not Contain", - }, +/** + * Builds a lucene JSON query from the filter structure generated in the builder + * @param filter the builder filter structure + */ +export const buildLuceneQuery = filter => { + let query = { + string: {}, + fuzzy: {}, + range: {}, + equal: {}, + notEqual: {}, + empty: {}, + notEmpty: {}, + contains: {}, + notContains: {}, + } + if (Array.isArray(filter)) { + filter.forEach(expression => { + let { operator, field, type, value } = expression + // Parse all values into correct types + if (type === "datetime" && value) { + value = new Date(value).toISOString() + } + if (type === "number") { + value = parseFloat(value) + } + if (type === "boolean") { + value = `${value}`?.toLowerCase() === "true" + } + if (operator.startsWith("range")) { + if (!query.range[field]) { + query.range[field] = { + low: + type === "number" + ? Number.MIN_SAFE_INTEGER + : "0000-00-00T00:00:00.000Z", + high: + type === "number" + ? Number.MAX_SAFE_INTEGER + : "9999-00-00T00:00:00.000Z", + } + } + if (operator === "rangeLow" && value != null && value !== "") { + query.range[field].low = value + } else if (operator === "rangeHigh" && value != null && value !== "") { + query.range[field].high = value + } + } else if (query[operator]) { + if (type === "boolean") { + // Transform boolean filters to cope with null. + // "equals false" needs to be "not equals true" + // "not equals false" needs to be "equals true" + if (operator === "equal" && value === false) { + query.notEqual[field] = true + } else if (operator === "notEqual" && value === false) { + query.equal[field] = true + } else { + query[operator][field] = value + } + } else { + query[operator][field] = value + } + } + }) + } + + return query } -export const getValidOperatorsForType = type => { - const Op = OperatorOptions - if (type === "string") { - return [ - Op.Equals, - Op.NotEquals, - Op.StartsWith, - Op.Like, - Op.Empty, - Op.NotEmpty, - ] - } else if (type === "number") { - return [ - Op.Equals, - Op.NotEquals, - Op.MoreThan, - Op.LessThan, - Op.Empty, - Op.NotEmpty, - ] - } else if (type === "options") { - return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty] - } else if (type === "array") { - return [Op.Contains, Op.NotContains, Op.Empty, Op.NotEmpty] - } else if (type === "boolean") { - return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty] - } else if (type === "longform") { - return [ - Op.Equals, - Op.NotEquals, - Op.StartsWith, - Op.Like, - Op.Empty, - Op.NotEmpty, - ] - } else if (type === "datetime") { - return [ - Op.Equals, - Op.NotEquals, - Op.MoreThan, - Op.LessThan, - Op.Empty, - Op.NotEmpty, - ] +/** + * Performs a client-side lucene search on an array of data + * @param docs the data + * @param query the JSON lucene query + */ +export const luceneQuery = (docs, query) => { + if (!query) { + return docs } - return [] + + // Iterates over a set of filters and evaluates a fail function against a doc + const match = (type, failFn) => doc => { + const filters = Object.entries(query[type] || {}) + for (let i = 0; i < filters.length; i++) { + if (failFn(filters[i][0], filters[i][1], doc)) { + return false + } + } + return true + } + + // Process a string match (fails if the value does not start with the string) + const stringMatch = match("string", (key, value, doc) => { + return !doc[key] || !doc[key].startsWith(value) + }) + + // Process a fuzzy match (treat the same as starts with when running locally) + const fuzzyMatch = match("fuzzy", (key, value, doc) => { + return !doc[key] || !doc[key].startsWith(value) + }) + + // Process a range match + const rangeMatch = match("range", (key, value, doc) => { + return !doc[key] || doc[key] < value.low || doc[key] > value.high + }) + + // Process an equal match (fails if the value is different) + const equalMatch = match("equal", (key, value, doc) => { + return value != null && value !== "" && doc[key] !== value + }) + + // Process a not-equal match (fails if the value is the same) + const notEqualMatch = match("notEqual", (key, value, doc) => { + return value != null && value !== "" && doc[key] === value + }) + + // Process an empty match (fails if the value is not empty) + const emptyMatch = match("empty", (key, value, doc) => { + return doc[key] != null && doc[key] !== "" + }) + + // Process a not-empty match (fails is the value is empty) + const notEmptyMatch = match("notEmpty", (key, value, doc) => { + return doc[key] == null || doc[key] === "" + }) + + // Match a document against all criteria + const docMatch = doc => { + return ( + stringMatch(doc) && + fuzzyMatch(doc) && + rangeMatch(doc) && + equalMatch(doc) && + notEqualMatch(doc) && + emptyMatch(doc) && + notEmptyMatch(doc) + ) + } + + // Process all docs + return docs.filter(docMatch) +} + +/** + * Performs a client-side sort from the equivalent server-side lucene sort + * parameters. + * @param docs the data + * @param sort the sort column + * @param sortOrder the sort order ("ascending" or "descending") + * @param sortType the type of sort ("string" or "number") + */ +export const luceneSort = (docs, sort, sortOrder, sortType = "string") => { + if (!sort || !sortOrder || !sortType) { + return docs + } + const parse = sortType === "string" ? x => `${x}` : x => parseFloat(x) + return docs.slice().sort((a, b) => { + const colA = parse(a[sort]) + const colB = parse(b[sort]) + if (sortOrder === "Descending") { + return colA > colB ? -1 : 1 + } else { + return colA > colB ? 1 : -1 + } + }) +} + +/** + * Limits the specified docs to the specified number of rows from the equivalent + * server-side lucene limit parameters. + * @param docs the data + * @param limit the number of docs to limit to + */ +export const luceneLimit = (docs, limit) => { + const numLimit = parseFloat(limit) + if (isNaN(numLimit)) { + return docs + } + return docs.slice(0, numLimit) } diff --git a/packages/builder/src/helpers/warnings.js b/packages/builder/src/helpers/warnings.js new file mode 100644 index 0000000000..ad943a8578 --- /dev/null +++ b/packages/builder/src/helpers/warnings.js @@ -0,0 +1,16 @@ +export const suppressWarnings = warnings => { + if (!warnings?.length) { + return + } + const regex = new RegExp(warnings.map(x => `(${x})`).join("|"), "gi") + const warn = console.warn + console.warn = (...params) => { + const msg = params[0] + if (msg && typeof msg === "string") { + if (msg.match(regex)) { + return + } + } + warn(...params) + } +} diff --git a/packages/builder/src/main.js b/packages/builder/src/main.js index f0fd0af178..bc5ec4f009 100644 --- a/packages/builder/src/main.js +++ b/packages/builder/src/main.js @@ -7,11 +7,19 @@ import "@spectrum-css/vars/dist/spectrum-light.css" import "@spectrum-css/vars/dist/spectrum-lightest.css" import "@spectrum-css/page/dist/index-vars.css" import "./global.css" - +import { suppressWarnings } from "./helpers/warnings" import loadSpectrumIcons from "@budibase/bbui/spectrum-icons-vite.js" +import App from "./App.svelte" + +// Init spectrum icons loadSpectrumIcons() -import App from "./App.svelte" +// Suppress svelte runtime warnings +suppressWarnings([ + "was created with unknown prop", + "was created without expected prop", + "received an unexpected slot", +]) export default new App({ target: document.getElementById("app"), diff --git a/packages/builder/src/pages/builder/_layout.svelte b/packages/builder/src/pages/builder/_layout.svelte index 4b296854b6..f248210962 100644 --- a/packages/builder/src/pages/builder/_layout.svelte +++ b/packages/builder/src/pages/builder/_layout.svelte @@ -4,44 +4,73 @@ import { onMount } from "svelte" let loaded = false + // don't react to these + let cloud = $admin.cloud + let shouldRedirect = !cloud || $admin.disableAccountPortal $: multiTenancyEnabled = $admin.multiTenancy $: hasAdminUser = $admin?.checklist?.adminUser?.checked $: tenantSet = $auth.tenantSet $: cloud = $admin.cloud + $: user = $auth.user + + const validateTenantId = async () => { + // set the tenant from the url in the cloud + const tenantId = window.location.host.split(".")[0] + + if (!tenantId.includes("localhost:")) { + // user doesn't have permission to access this tenant - kick them out + if (user?.tenantId !== tenantId) { + await auth.logout() + await auth.setOrganisation(null) + } else { + await auth.setOrganisation(tenantId) + } + } + } onMount(async () => { await auth.checkAuth() await admin.init() + + if (cloud && multiTenancyEnabled) { + await validateTenantId() + } + loaded = true }) $: { // We should never see the org or admin user creation screens in the cloud - if (!cloud) { - const apiReady = $admin.loaded && $auth.loaded - // if tenant is not set go to it - if (loaded && apiReady && multiTenancyEnabled && !tenantSet) { - $redirect("./auth/org") - } - // Force creation of an admin user if one doesn't exist - else if (loaded && apiReady && !hasAdminUser) { - $redirect("./admin") - } - } - } - // Redirect to log in at any time if the user isn't authenticated - $: { + const apiReady = $admin.loaded && $auth.loaded + // if tenant is not set go to it if ( + loaded && + shouldRedirect && + apiReady && + multiTenancyEnabled && + !tenantSet + ) { + $redirect("./auth/org") + } + // Force creation of an admin user if one doesn't exist + else if (loaded && shouldRedirect && apiReady && !hasAdminUser) { + $redirect("./admin") + } + // Redirect to log in at any time if the user isn't authenticated + else if ( loaded && (hasAdminUser || cloud) && !$auth.user && !$isActive("./auth") && - !$isActive("./invite") + !$isActive("./invite") && + !$isActive("./admin") ) { const returnUrl = encodeURIComponent(window.location.pathname) $redirect("./auth?", { returnUrl }) - } else if ($auth?.user?.forceResetPassword) { + } + // check if password reset required for user + else if ($auth.user?.forceResetPassword) { $redirect("./auth/reset") } } diff --git a/packages/builder/src/pages/builder/admin/_components/ImportAppsModal.svelte b/packages/builder/src/pages/builder/admin/_components/ImportAppsModal.svelte new file mode 100644 index 0000000000..633147e910 --- /dev/null +++ b/packages/builder/src/pages/builder/admin/_components/ImportAppsModal.svelte @@ -0,0 +1,50 @@ + + + + Please upload the file that was exported from your Cloud environment to get + started + { + value.file = e.detail?.[0] + }} + /> + diff --git a/packages/builder/src/pages/builder/admin/index.svelte b/packages/builder/src/pages/builder/admin/index.svelte index 4d7e39db81..f3a8d62d30 100644 --- a/packages/builder/src/pages/builder/admin/index.svelte +++ b/packages/builder/src/pages/builder/admin/index.svelte @@ -7,18 +7,22 @@ Input, Body, ActionButton, + Modal, } from "@budibase/bbui" import { goto } from "@roxi/routify" import api from "builderStore/api" import { admin, auth } from "stores/portal" import PasswordRepeatInput from "components/common/users/PasswordRepeatInput.svelte" + import ImportAppsModal from "./_components/ImportAppsModal.svelte" import Logo from "assets/bb-emblem.svg" let adminUser = {} let error + let modal $: tenantId = $auth.tenantId $: multiTenancyEnabled = $admin.multiTenancy + $: cloud = $admin.cloud async function save() { try { @@ -38,6 +42,9 @@ } + + +
@@ -66,6 +73,15 @@ > Change organisation + {:else if !cloud} + { + modal.show() + }} + > + Import from cloud + {/if} diff --git a/packages/builder/src/pages/builder/app/[application]/data/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/data/_layout.svelte index 5202bd45f2..f321a2c422 100644 --- a/packages/builder/src/pages/builder/app/[application]/data/_layout.svelte +++ b/packages/builder/src/pages/builder/app/[application]/data/_layout.svelte @@ -1,12 +1,14 @@ + diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/_layout.svelte index 13f8719594..f48be08fd6 100644 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/_layout.svelte +++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/_layout.svelte @@ -2,7 +2,7 @@ import { params } from "@roxi/routify" import { datasources } from "stores/backend" - if ($params.selectedDatasource) { + if ($params.selectedDatasource && !$params.query) { const datasource = $datasources.list.find( m => m._id === $params.selectedDatasource ) diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/_layout.svelte index d05aa882ad..4fa864ce7a 100644 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/_layout.svelte +++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/_layout.svelte @@ -1,14 +1 @@ - - diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/_layout.svelte new file mode 100644 index 0000000000..ed271aae34 --- /dev/null +++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/_layout.svelte @@ -0,0 +1,7 @@ + + + diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/_layout.svelte deleted file mode 100644 index 14f6303e5f..0000000000 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/_layout.svelte +++ /dev/null @@ -1,13 +0,0 @@ - - - diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/index.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/index.svelte deleted file mode 100644 index a68c0dc651..0000000000 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/index.svelte +++ /dev/null @@ -1,16 +0,0 @@ - - -{#if $database?._id && $tables?.selected?.name} - -{:else}Create your first table to start building{/if} - - diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/relationship/[selectedRow]/[selectedField]/index.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/relationship/[selectedRow]/[selectedField]/index.svelte deleted file mode 100644 index eddb5ab598..0000000000 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/relationship/[selectedRow]/[selectedField]/index.svelte +++ /dev/null @@ -1,10 +0,0 @@ - - - diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/relationship/[selectedRow]/index.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/relationship/[selectedRow]/index.svelte deleted file mode 100644 index 8e195ddb12..0000000000 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/relationship/[selectedRow]/index.svelte +++ /dev/null @@ -1,6 +0,0 @@ - - - diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/relationship/index.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/relationship/index.svelte deleted file mode 100644 index 7d081b6976..0000000000 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/[selectedTable]/relationship/index.svelte +++ /dev/null @@ -1,6 +0,0 @@ - - - diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/_layout.svelte deleted file mode 100644 index f957355c5c..0000000000 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/_layout.svelte +++ /dev/null @@ -1,19 +0,0 @@ - - - diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/index.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/index.svelte deleted file mode 100644 index 6d61614145..0000000000 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/table/index.svelte +++ /dev/null @@ -1,21 +0,0 @@ - - -{#if $tables.list.length === 0} - Create your first table to start building -{:else}Select a table to edit{/if} - - diff --git a/packages/builder/src/pages/builder/app/[application]/data/index.svelte b/packages/builder/src/pages/builder/app/[application]/data/index.svelte index 64f95b491d..c27ee7b342 100644 --- a/packages/builder/src/pages/builder/app/[application]/data/index.svelte +++ b/packages/builder/src/pages/builder/app/[application]/data/index.svelte @@ -1,6 +1,22 @@ + diff --git a/packages/builder/src/pages/builder/auth/_layout.svelte b/packages/builder/src/pages/builder/auth/_layout.svelte index ce4e6015da..a76ddfc329 100644 --- a/packages/builder/src/pages/builder/auth/_layout.svelte +++ b/packages/builder/src/pages/builder/auth/_layout.svelte @@ -1,21 +1,23 @@ {#if !$auth.user || $auth.user.forceResetPassword} diff --git a/packages/builder/src/pages/builder/auth/login.svelte b/packages/builder/src/pages/builder/auth/login.svelte index 783e5a4903..f9f2b34578 100644 --- a/packages/builder/src/pages/builder/auth/login.svelte +++ b/packages/builder/src/pages/builder/auth/login.svelte @@ -29,6 +29,7 @@ username, password, }) + if ($auth?.user?.forceResetPassword) { $goto("./reset") } else { diff --git a/packages/builder/src/pages/builder/auth/org.svelte b/packages/builder/src/pages/builder/auth/org.svelte index fea8831935..1e6b58dbe2 100644 --- a/packages/builder/src/pages/builder/auth/org.svelte +++ b/packages/builder/src/pages/builder/auth/org.svelte @@ -9,6 +9,7 @@ let tenantId = get(auth).tenantSet ? get(auth).tenantId : "" $: multiTenancyEnabled = $admin.multiTenancy $: cloud = $admin.cloud + $: disableAccountPortal = $admin.disableAccountPortal async function setOrg() { if (tenantId == null || tenantId === "") { @@ -26,7 +27,7 @@ onMount(async () => { await auth.checkQueryString() - if (!multiTenancyEnabled || cloud) { + if (!multiTenancyEnabled || (cloud && !disableAccountPortal)) { $goto("../") } else { admin.unload() diff --git a/packages/builder/src/pages/builder/index.svelte b/packages/builder/src/pages/builder/index.svelte index fba581a046..fcaa7fc55b 100644 --- a/packages/builder/src/pages/builder/index.svelte +++ b/packages/builder/src/pages/builder/index.svelte @@ -5,11 +5,9 @@ auth.checkQueryString() $: { - if (!$auth.user) { - $redirect(`./auth`) - } else if ($auth.user.builder?.global) { + if ($auth.user?.builder?.global) { $redirect(`./portal`) - } else { + } else if ($auth.user) { $redirect(`./apps`) } } diff --git a/packages/builder/src/pages/builder/portal/apps/index.svelte b/packages/builder/src/pages/builder/portal/apps/index.svelte index a18ec6a8bd..995337da0a 100644 --- a/packages/builder/src/pages/builder/portal/apps/index.svelte +++ b/packages/builder/src/pages/builder/portal/apps/index.svelte @@ -15,8 +15,7 @@ } from "@budibase/bbui" import CreateAppModal from "components/start/CreateAppModal.svelte" import UpdateAppModal from "components/start/UpdateAppModal.svelte" - import api, { del } from "builderStore/api" - import analytics from "analytics" + import { del } from "builderStore/api" import { onMount } from "svelte" import { apps, auth, admin } from "stores/portal" import download from "downloadjs" @@ -36,6 +35,7 @@ let unpublishModal let creatingApp = false let loaded = false + let cloud = $admin.cloud $: enrichedApps = enrichApps($apps, $auth.user, sortBy) @@ -66,19 +66,20 @@ } } - const checkKeys = async () => { - const response = await api.get(`/api/keys/`) - const keys = await response.json() - if (keys.userId) { - analytics.identify(keys.userId) - } - } - const initiateAppCreation = () => { creationModal.show() creatingApp = true } + const initiateAppsExport = () => { + try { + download(`/api/cloud/export`) + notifications.success("Apps exported successfully") + } catch (err) { + notifications.error(`Error exporting apps: ${err}`) + } + } + const initiateAppImport = () => { template = { fromFile: true } creationModal.show() @@ -188,7 +189,6 @@ } onMount(async () => { - checkKeys() await apps.load() loaded = true }) @@ -200,6 +200,9 @@
Apps + {#if cloud} + + {/if} diff --git a/packages/builder/src/pages/builder/portal/manage/auth/index.svelte b/packages/builder/src/pages/builder/portal/manage/auth/index.svelte index 48d9da18f9..c2445e14ae 100644 --- a/packages/builder/src/pages/builder/portal/manage/auth/index.svelte +++ b/packages/builder/src/pages/builder/portal/manage/auth/index.svelte @@ -23,6 +23,7 @@ import api from "builderStore/api" import { organisation, auth, admin } from "stores/portal" import { uuid } from "builderStore/uuid" + import analytics, { Events } from "analytics" $: tenantId = $auth.tenantId $: multiTenancyEnabled = $admin.multiTenancy @@ -209,6 +210,7 @@ providers[res.type]._id = res._id }) notifications.success(`Settings saved.`) + analytics.captureEvent(Events.SSO.SAVED) }) .catch(err => { notifications.error(`Failed to update auth settings. ${err}`) diff --git a/packages/builder/src/pages/builder/portal/manage/email/index.svelte b/packages/builder/src/pages/builder/portal/manage/email/index.svelte index 76d98ed545..5a78623b81 100644 --- a/packages/builder/src/pages/builder/portal/manage/email/index.svelte +++ b/packages/builder/src/pages/builder/portal/manage/email/index.svelte @@ -16,6 +16,7 @@ import { email } from "stores/portal" import api from "builderStore/api" import { cloneDeep } from "lodash/fp" + import analytics, { Events } from "analytics" const ConfigTypes = { SMTP: "smtp", @@ -69,6 +70,7 @@ smtpConfig._rev = json._rev smtpConfig._id = json._id notifications.success(`Settings saved.`) + analytics.captureEvent(Events.SMTP.SAVED) } } diff --git a/packages/builder/src/pages/builder/portal/manage/users/_components/AddUserModal.svelte b/packages/builder/src/pages/builder/portal/manage/users/_components/AddUserModal.svelte index 9504f73b68..25a69af1c8 100644 --- a/packages/builder/src/pages/builder/portal/manage/users/_components/AddUserModal.svelte +++ b/packages/builder/src/pages/builder/portal/manage/users/_components/AddUserModal.svelte @@ -10,6 +10,7 @@ } from "@budibase/bbui" import { createValidationStore, emailValidator } from "helpers/validation" import { users } from "stores/portal" + import analytics, { Events } from "analytics" export let disabled @@ -25,6 +26,7 @@ notifications.error(res.message) } else { notifications.success(res.message) + analytics.captureEvent(Events.USER.INVITE, { type: selected }) } } diff --git a/packages/builder/src/pages/builder/portal/settings/organisation.svelte b/packages/builder/src/pages/builder/portal/settings/organisation.svelte index be8b60e6e7..a182103c4e 100644 --- a/packages/builder/src/pages/builder/portal/settings/organisation.svelte +++ b/packages/builder/src/pages/builder/portal/settings/organisation.svelte @@ -7,13 +7,11 @@ Divider, Label, Input, - Toggle, Dropzone, notifications, } from "@budibase/bbui" - import { auth, organisation } from "stores/portal" + import { auth, organisation, admin } from "stores/portal" import { post } from "builderStore/api" - import analytics from "analytics" import { writable } from "svelte/store" import { redirect } from "@roxi/routify" @@ -25,7 +23,6 @@ } const values = writable({ - analytics: !analytics.disabled(), company: $organisation.company, platformUrl: $organisation.platformUrl, logo: $organisation.logoUrl @@ -48,13 +45,6 @@ async function saveConfig() { loading = true - // Set analytics preference - if ($values.analytics) { - analytics.optIn() - } else { - analytics.optOut() - } - // Upload logo if required if ($values.logo && !$values.logo.url) { await uploadLogo($values.logo) @@ -118,34 +108,22 @@
- - - Platform - Here you can set up general platform settings. - -
-
- - -
-
- - + {#if !$admin.cloud} + - Analytics - - If you would like to send analytics that help us make Budibase better, - please let us know below. - + Platform + Here you can set up general platform settings. - -
- +
+
+ + +
- + {/if} +
+ +
{/if} diff --git a/packages/builder/src/stores/backend/datasources.js b/packages/builder/src/stores/backend/datasources.js index 5c6ed3f2cb..7c74074e03 100644 --- a/packages/builder/src/stores/backend/datasources.js +++ b/packages/builder/src/stores/backend/datasources.js @@ -1,4 +1,4 @@ -import { writable } from "svelte/store" +import { writable, get } from "svelte/store" import { queries, tables, views } from "./" import api from "../../builderStore/api" @@ -8,7 +8,8 @@ export const INITIAL_DATASOURCE_VALUES = { } export function createDatasourcesStore() { - const { subscribe, update, set } = writable(INITIAL_DATASOURCE_VALUES) + const store = writable(INITIAL_DATASOURCE_VALUES) + const { subscribe, update, set } = store return { subscribe, @@ -21,7 +22,15 @@ export function createDatasourcesStore() { fetch: async () => { const response = await api.get(`/api/datasources`) const json = await response.json() - update(state => ({ ...state, list: json, selected: null })) + + // Clear selected if it no longer exists, otherwise keep it + const selected = get(store).selected + let nextSelected = null + if (selected && json.find(source => source._id === selected)) { + nextSelected = selected + } + + update(state => ({ ...state, list: json, selected: nextSelected })) return json }, select: async datasourceId => { @@ -58,7 +67,7 @@ export function createDatasourcesStore() { }) return json }, - save: async datasource => { + save: async (datasource, fetchSchema = false) => { let response if (datasource._id) { response = await api.put( @@ -66,7 +75,10 @@ export function createDatasourcesStore() { datasource ) } else { - response = await api.post("/api/datasources", datasource) + response = await api.post("/api/datasources", { + datasource: datasource, + fetchSchema, + }) } const json = await response.json() diff --git a/packages/builder/src/stores/backend/queries.js b/packages/builder/src/stores/backend/queries.js index 2eeae29b9d..020a0c9420 100644 --- a/packages/builder/src/stores/backend/queries.js +++ b/packages/builder/src/stores/backend/queries.js @@ -1,5 +1,5 @@ import { writable, get } from "svelte/store" -import { datasources, integrations, tables } from "./" +import { datasources, integrations, tables, views } from "./" import api from "builderStore/api" export function createQueriesStore() { @@ -55,10 +55,9 @@ export function createQueriesStore() { }, select: query => { update(state => ({ ...state, selected: query._id })) - tables.update(state => ({ - ...state, - selected: null, - })) + views.unselect() + tables.unselect() + datasources.unselect() }, unselect: () => { update(state => ({ ...state, selected: null })) diff --git a/packages/builder/src/stores/backend/tables.js b/packages/builder/src/stores/backend/tables.js index e0b614a63e..161877f660 100644 --- a/packages/builder/src/stores/backend/tables.js +++ b/packages/builder/src/stores/backend/tables.js @@ -95,7 +95,13 @@ export function createTablesStore() { selected: {}, })) }, - saveField: ({ originalName, field, primaryDisplay = false, indexes }) => { + saveField: async ({ + originalName, + field, + primaryDisplay = false, + indexes, + }) => { + let promise update(state => { // delete the original if renaming // need to handle if the column had no name, empty string @@ -126,9 +132,12 @@ export function createTablesStore() { ...state.draft.schema, [field.name]: cloneDeep(field), } - save(state.draft) + promise = save(state.draft) return state }) + if (promise) { + await promise + } }, deleteField: field => { update(state => { diff --git a/packages/builder/src/stores/backend/views.js b/packages/builder/src/stores/backend/views.js index 0b15d18fa5..14c7bf92a4 100644 --- a/packages/builder/src/stores/backend/views.js +++ b/packages/builder/src/stores/backend/views.js @@ -16,6 +16,7 @@ export function createViewsStore() { ...state, selected: view, })) + tables.unselect() queries.unselect() datasources.unselect() }, diff --git a/packages/builder/src/stores/portal/admin.js b/packages/builder/src/stores/portal/admin.js index 44ff63a082..ebe8294060 100644 --- a/packages/builder/src/stores/portal/admin.js +++ b/packages/builder/src/stores/portal/admin.js @@ -7,6 +7,7 @@ export function createAdminStore() { loaded: false, multiTenancy: false, cloud: false, + disableAccountPortal: false, accountPortalUrl: "", onboardingProgress: 0, checklist: { @@ -47,12 +48,14 @@ export function createAdminStore() { async function getEnvironment() { let multiTenancyEnabled = false let cloud = false + let disableAccountPortal = false let accountPortalUrl = "" try { const response = await api.get(`/api/system/environment`) const json = await response.json() multiTenancyEnabled = json.multiTenancy cloud = json.cloud + disableAccountPortal = json.disableAccountPortal accountPortalUrl = json.accountPortalUrl } catch (err) { // just let it stay disabled @@ -60,6 +63,7 @@ export function createAdminStore() { admin.update(store => { store.multiTenancy = multiTenancyEnabled store.cloud = cloud + store.disableAccountPortal = disableAccountPortal store.accountPortalUrl = accountPortalUrl return store }) diff --git a/packages/builder/src/stores/portal/auth.js b/packages/builder/src/stores/portal/auth.js index fe8f87cfb2..f522095473 100644 --- a/packages/builder/src/stores/portal/auth.js +++ b/packages/builder/src/stores/portal/auth.js @@ -1,6 +1,7 @@ import { derived, writable, get } from "svelte/store" import api from "../../builderStore/api" import { admin } from "stores/portal" +import analytics from "analytics" export function createAuthStore() { const auth = writable({ @@ -49,6 +50,19 @@ export function createAuthStore() { } return store }) + + if (user) { + analytics.activate().then(() => { + analytics.identify(user._id, user) + analytics.showChat({ + email: user.email, + created_at: user.createdAt || Date.now(), + name: user.name, + user_id: user._id, + tenant: user.tenantId, + }) + }) + } } async function setOrganisation(tenantId) { @@ -66,6 +80,7 @@ export function createAuthStore() { return { subscribe: store.subscribe, + setOrganisation: setOrganisation, checkQueryString: async () => { const urlParams = new URLSearchParams(window.location.search) if (urlParams.has("tenantId")) { diff --git a/packages/builder/vite.config.js b/packages/builder/vite.config.js index d8b8dbba1d..12b45e7cf8 100644 --- a/packages/builder/vite.config.js +++ b/packages/builder/vite.config.js @@ -22,6 +22,9 @@ export default ({ mode }) => { isProduction ? "production" : "development" ), "process.env.POSTHOG_TOKEN": JSON.stringify(process.env.POSTHOG_TOKEN), + "process.env.INTERCOM_TOKEN": JSON.stringify( + process.env.INTERCOM_TOKEN + ), "process.env.POSTHOG_URL": JSON.stringify(process.env.POSTHOG_URL), "process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN), }), diff --git a/packages/builder/yarn.lock b/packages/builder/yarn.lock index 5257ba0c37..443d00680b 100644 --- a/packages/builder/yarn.lock +++ b/packages/builder/yarn.lock @@ -5582,9 +5582,9 @@ tmp@~0.2.1: rimraf "^3.0.0" tmpl@1.0.x: - version "1.0.4" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" - integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-fast-properties@^2.0.0: version "2.0.0" diff --git a/packages/cli/package.json b/packages/cli/package.json index b956d1d27b..55bc2bb3de 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/cli", - "version": "0.9.125-alpha.17", + "version": "0.9.146-alpha.5", "description": "Budibase CLI, for developers, self hosting and migrations.", "main": "src/index.js", "bin": { diff --git a/packages/client/manifest.json b/packages/client/manifest.json index 7bef9c2e4b..2e64b1fb4c 100644 --- a/packages/client/manifest.json +++ b/packages/client/manifest.json @@ -2389,6 +2389,7 @@ "icon": "Data", "illegalChildren": ["section"], "hasChildren": true, + "actions": ["RefreshDatasource"], "settings": [ { "type": "dataSource", diff --git a/packages/client/package.json b/packages/client/package.json index 217797bf56..aebaf3f903 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/client", - "version": "0.9.125-alpha.17", + "version": "0.9.146-alpha.5", "license": "MPL-2.0", "module": "dist/budibase-client.js", "main": "dist/budibase-client.js", @@ -19,8 +19,9 @@ "dev:builder": "rollup -cw" }, "dependencies": { - "@budibase/bbui": "^0.9.125-alpha.17", - "@budibase/string-templates": "^0.9.125-alpha.17", + "@budibase/bbui": "^0.9.146-alpha.5", + "@budibase/standard-components": "^0.9.139", + "@budibase/string-templates": "^0.9.146-alpha.5", "regexparam": "^1.3.0", "shortid": "^2.2.15", "svelte-spa-router": "^3.0.5" diff --git a/packages/client/rollup.config.js b/packages/client/rollup.config.js index f404f93c4c..a814303069 100644 --- a/packages/client/rollup.config.js +++ b/packages/client/rollup.config.js @@ -58,6 +58,10 @@ export default { find: "sdk", replacement: path.resolve("./src/sdk"), }, + { + find: "builder", + replacement: path.resolve("../builder"), + }, ], }), svelte({ diff --git a/packages/client/src/components/ClientApp.svelte b/packages/client/src/components/ClientApp.svelte index fb9117832f..7d231b3762 100644 --- a/packages/client/src/components/ClientApp.svelte +++ b/packages/client/src/components/ClientApp.svelte @@ -24,7 +24,7 @@ import HoverIndicator from "components/preview/HoverIndicator.svelte" import CustomThemeWrapper from "./CustomThemeWrapper.svelte" import DNDHandler from "components/preview/DNDHandler.svelte" - import ErrorSVG from "../../../builder/assets/error.svg" + import ErrorSVG from "builder/assets/error.svg" // Provide contexts setContext("sdk", SDK) diff --git a/packages/client/src/components/app/DataProvider.svelte b/packages/client/src/components/app/DataProvider.svelte index bdc9001445..991c41b77d 100644 --- a/packages/client/src/components/app/DataProvider.svelte +++ b/packages/client/src/components/app/DataProvider.svelte @@ -6,7 +6,7 @@ luceneQuery, luceneSort, luceneLimit, - } from "utils/lucene" + } from "builder/src/helpers/lucene" import Placeholder from "./Placeholder.svelte" export let dataSource diff --git a/packages/client/src/stores/state.js b/packages/client/src/stores/state.js index cb20149de8..ce977c4333 100644 --- a/packages/client/src/stores/state.js +++ b/packages/client/src/stores/state.js @@ -1,5 +1,5 @@ import { writable, get, derived } from "svelte/store" -import { localStorageStore } from "../../../builder/src/builderStore/store/localStorage" +import { localStorageStore } from "builder/src/builderStore/store/localStorage" import { appStore } from "./app" const createStateStore = () => { diff --git a/packages/client/src/utils/buttonActions.js b/packages/client/src/utils/buttonActions.js index aeefe6163c..11aa033c1d 100644 --- a/packages/client/src/utils/buttonActions.js +++ b/packages/client/src/utils/buttonActions.js @@ -88,7 +88,7 @@ const validateFormHandler = async (action, context) => { ) } -const refreshDatasourceHandler = async (action, context) => { +const refreshDataProviderHandler = async (action, context) => { return await executeActionHandler( context, action.parameters.componentId, @@ -139,7 +139,7 @@ const handlerMap = { ["Execute Query"]: queryExecutionHandler, ["Trigger Automation"]: triggerAutomationHandler, ["Validate Form"]: validateFormHandler, - ["Refresh Datasource"]: refreshDatasourceHandler, + ["Refresh Data Provider"]: refreshDataProviderHandler, ["Log Out"]: logoutHandler, ["Clear Form"]: clearFormHandler, ["Close Screen Modal"]: closeScreenModalHandler, diff --git a/packages/client/src/utils/conditions.js b/packages/client/src/utils/conditions.js index 964a63d3fd..2791fa169e 100644 --- a/packages/client/src/utils/conditions.js +++ b/packages/client/src/utils/conditions.js @@ -1,4 +1,4 @@ -import { buildLuceneQuery, luceneQuery } from "./lucene" +import { buildLuceneQuery, luceneQuery } from "builder/src/helpers/lucene" export const getActiveConditions = conditions => { if (!conditions?.length) { diff --git a/packages/client/src/utils/lucene.js b/packages/client/src/utils/lucene.js deleted file mode 100644 index 03baa751cc..0000000000 --- a/packages/client/src/utils/lucene.js +++ /dev/null @@ -1,179 +0,0 @@ -/** - * Builds a lucene JSON query from the filter structure generated in the builder - * @param filter the builder filter structure - */ -export const buildLuceneQuery = filter => { - let query = { - string: {}, - fuzzy: {}, - range: {}, - equal: {}, - notEqual: {}, - empty: {}, - notEmpty: {}, - contains: {}, - notContains: {}, - } - if (Array.isArray(filter)) { - filter.forEach(expression => { - let { operator, field, type, value } = expression - // Parse all values into correct types - if (type === "datetime" && value) { - value = new Date(value).toISOString() - } - if (type === "number") { - value = parseFloat(value) - } - if (type === "boolean") { - value = `${value}`?.toLowerCase() === "true" - } - if (operator.startsWith("range")) { - if (!query.range[field]) { - query.range[field] = { - low: - type === "number" - ? Number.MIN_SAFE_INTEGER - : "0000-00-00T00:00:00.000Z", - high: - type === "number" - ? Number.MAX_SAFE_INTEGER - : "9999-00-00T00:00:00.000Z", - } - } - if (operator === "rangeLow" && value != null && value !== "") { - query.range[field].low = value - } else if (operator === "rangeHigh" && value != null && value !== "") { - query.range[field].high = value - } - } else if (query[operator]) { - if (type === "boolean") { - // Transform boolean filters to cope with null. - // "equals false" needs to be "not equals true" - // "not equals false" needs to be "equals true" - if (operator === "equal" && value === false) { - query.notEqual[field] = true - } else if (operator === "notEqual" && value === false) { - query.equal[field] = true - } else { - query[operator][field] = value - } - } else { - query[operator][field] = value - } - } - }) - } - - return query -} - -/** - * Performs a client-side lucene search on an array of data - * @param docs the data - * @param query the JSON lucene query - */ -export const luceneQuery = (docs, query) => { - if (!query) { - return docs - } - - // Iterates over a set of filters and evaluates a fail function against a doc - const match = (type, failFn) => doc => { - const filters = Object.entries(query[type] || {}) - for (let i = 0; i < filters.length; i++) { - if (failFn(filters[i][0], filters[i][1], doc)) { - return false - } - } - return true - } - - // Process a string match (fails if the value does not start with the string) - const stringMatch = match("string", (key, value, doc) => { - return !doc[key] || !doc[key].startsWith(value) - }) - - // Process a fuzzy match (treat the same as starts with when running locally) - const fuzzyMatch = match("fuzzy", (key, value, doc) => { - return !doc[key] || !doc[key].startsWith(value) - }) - - // Process a range match - const rangeMatch = match("range", (key, value, doc) => { - return !doc[key] || doc[key] < value.low || doc[key] > value.high - }) - - // Process an equal match (fails if the value is different) - const equalMatch = match("equal", (key, value, doc) => { - return value != null && value !== "" && doc[key] !== value - }) - - // Process a not-equal match (fails if the value is the same) - const notEqualMatch = match("notEqual", (key, value, doc) => { - return value != null && value !== "" && doc[key] === value - }) - - // Process an empty match (fails if the value is not empty) - const emptyMatch = match("empty", (key, value, doc) => { - return doc[key] != null && doc[key] !== "" - }) - - // Process a not-empty match (fails is the value is empty) - const notEmptyMatch = match("notEmpty", (key, value, doc) => { - return doc[key] == null || doc[key] === "" - }) - - // Match a document against all criteria - const docMatch = doc => { - return ( - stringMatch(doc) && - fuzzyMatch(doc) && - rangeMatch(doc) && - equalMatch(doc) && - notEqualMatch(doc) && - emptyMatch(doc) && - notEmptyMatch(doc) - ) - } - - // Process all docs - return docs.filter(docMatch) -} - -/** - * Performs a client-side sort from the equivalent server-side lucene sort - * parameters. - * @param docs the data - * @param sort the sort column - * @param sortOrder the sort order ("ascending" or "descending") - * @param sortType the type of sort ("string" or "number") - */ -export const luceneSort = (docs, sort, sortOrder, sortType = "string") => { - if (!sort || !sortOrder || !sortType) { - return docs - } - const parse = sortType === "string" ? x => `${x}` : x => parseFloat(x) - return docs.slice().sort((a, b) => { - const colA = parse(a[sort]) - const colB = parse(b[sort]) - if (sortOrder === "Descending") { - return colA > colB ? -1 : 1 - } else { - return colA > colB ? 1 : -1 - } - }) -} - -/** - * Limits the specified docs to the specified number of rows from the equivalent - * server-side lucene limit parameters. - * @param docs the data - * @param limit the number of docs to limit to - */ -export const luceneLimit = (docs, limit) => { - const numLimit = parseFloat(limit) - if (isNaN(numLimit)) { - return docs - } - return docs.slice(0, numLimit) -} diff --git a/packages/client/yarn.lock b/packages/client/yarn.lock index 1e2c654b21..bdfb4fb699 100644 --- a/packages/client/yarn.lock +++ b/packages/client/yarn.lock @@ -28,10 +28,59 @@ chalk "^2.0.0" js-tokens "^4.0.0" -"@budibase/bbui@^0.9.125-alpha.17": - version "0.9.133" - resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.133.tgz#91a2fb24abaaf91d2cb1e00eb51c493c1290f9ad" - integrity sha512-xbMmc/hee1QRNW7TrbGUBmLr1hMHXqUDA6rdl9N2PGfHFuFWbqlD8PWYanHmLevVet+CjkuKGPSbBghFK2pQyQ== +"@budibase/bbui@^0.9.139": + version "0.9.142" + resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.142.tgz#7edbda7967c9e5dfc96e5be5231656e5aab8d0e3" + integrity sha512-m2YlqqH87T4RwqD/oGhH6twHIgvFv4oUMEhKpkgLsbxjXVLVD0OOF7WqjpDnSa4khVQaixjdkI/Jiw2qhBUSaA== + dependencies: + "@adobe/spectrum-css-workflow-icons" "^1.2.1" + "@spectrum-css/actionbutton" "^1.0.1" + "@spectrum-css/actiongroup" "^1.0.1" + "@spectrum-css/avatar" "^3.0.2" + "@spectrum-css/button" "^3.0.1" + "@spectrum-css/buttongroup" "^3.0.2" + "@spectrum-css/checkbox" "^3.0.2" + "@spectrum-css/dialog" "^3.0.1" + "@spectrum-css/divider" "^1.0.3" + "@spectrum-css/dropzone" "^3.0.2" + "@spectrum-css/fieldgroup" "^3.0.2" + "@spectrum-css/fieldlabel" "^3.0.1" + "@spectrum-css/icon" "^3.0.1" + "@spectrum-css/illustratedmessage" "^3.0.2" + "@spectrum-css/inputgroup" "^3.0.2" + "@spectrum-css/label" "^2.0.10" + "@spectrum-css/link" "^3.1.1" + "@spectrum-css/menu" "^3.0.1" + "@spectrum-css/modal" "^3.0.1" + "@spectrum-css/pagination" "^3.0.3" + "@spectrum-css/picker" "^1.0.1" + "@spectrum-css/popover" "^3.0.1" + "@spectrum-css/progressbar" "^1.0.2" + "@spectrum-css/progresscircle" "^1.0.2" + "@spectrum-css/radio" "^3.0.2" + "@spectrum-css/search" "^3.0.2" + "@spectrum-css/sidenav" "^3.0.2" + "@spectrum-css/statuslight" "^3.0.2" + "@spectrum-css/stepper" "^3.0.3" + "@spectrum-css/switch" "^1.0.2" + "@spectrum-css/table" "^3.0.1" + "@spectrum-css/tabs" "^3.0.1" + "@spectrum-css/tags" "^3.0.2" + "@spectrum-css/textfield" "^3.0.1" + "@spectrum-css/toast" "^3.0.1" + "@spectrum-css/tooltip" "^3.0.3" + "@spectrum-css/treeview" "^3.0.2" + "@spectrum-css/typography" "^3.0.1" + "@spectrum-css/underlay" "^2.0.9" + "@spectrum-css/vars" "^3.0.1" + dayjs "^1.10.4" + svelte-flatpickr "^3.1.0" + svelte-portal "^1.0.0" + +"@budibase/bbui@^0.9.146-alpha.3": + version "0.9.146" + resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.146.tgz#7689b2c0f148321e62969181e3f6549f03dd3e78" + integrity sha512-Mq0oMyaN18Dg5e0IPtPXSGmu/TS4B74gW+l2ypJDNTzSRm934DOAPghDgkb53rFNZhsovCYjixJZmesUcv2o3g== dependencies: "@adobe/spectrum-css-workflow-icons" "^1.2.1" "@spectrum-css/actionbutton" "^1.0.1" @@ -105,10 +154,28 @@ to-gfm-code-block "^0.1.1" year "^0.2.1" -"@budibase/string-templates@^0.9.125-alpha.17": - version "0.9.133" - resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.133.tgz#221d81e080dc4485dcffa989d16e2bbed39f9055" - integrity sha512-SMHcSPwHYdAqol9YCcMoYawp5/ETr9TqGZCUsL+hUUq+LritPwu/miQ++SVvRTQbOR7Mker0S9LO3H8mwYkW8w== +"@budibase/standard-components@^0.9.139": + version "0.9.139" + resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.139.tgz#cf8e2b759ae863e469e50272b3ca87f2827e66e3" + integrity sha512-Av0u9Eq2jerjhG6Atta+c0mOQGgE5K0QI3cm+8s/3Vki6/PXkO1YL5Alo3BOn9ayQAVZ/xp4rtZPuN/rzRibHw== + dependencies: + "@budibase/bbui" "^0.9.139" + "@spectrum-css/button" "^3.0.3" + "@spectrum-css/card" "^3.0.3" + "@spectrum-css/divider" "^1.0.3" + "@spectrum-css/link" "^3.1.3" + "@spectrum-css/page" "^3.0.1" + "@spectrum-css/typography" "^3.0.2" + "@spectrum-css/vars" "^3.0.1" + apexcharts "^3.22.1" + dayjs "^1.10.5" + svelte-apexcharts "^1.0.2" + svelte-flatpickr "^3.1.0" + +"@budibase/string-templates@^0.9.146-alpha.3": + version "0.9.146" + resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.146.tgz#85249c7a8777a5f0c280af6f6d0e3d3ff0bf20b5" + integrity sha512-4f91SVUaTKseB+j7ycWbP54XiqiFZ6bZvcKgzsg1mLF+VVJ1/ALUsLvCRaj6SlcSHrhhALiGVR1z18KOyBWoKw== dependencies: "@budibase/handlebars-helpers" "^0.11.4" dayjs "^1.10.4" @@ -2169,9 +2236,9 @@ is-extglob@^2.1.1: integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-glob@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: is-extglob "^2.1.1" diff --git a/packages/server/package.json b/packages/server/package.json index 57d1391d5d..ac62454ba9 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/server", "email": "hi@budibase.com", - "version": "0.9.125-alpha.17", + "version": "0.9.146-alpha.5", "description": "Budibase Web Server", "main": "src/index.js", "repository": { @@ -23,6 +23,7 @@ "format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write", "lint": "eslint --fix src/", "lint:fix": "yarn run format && yarn run lint", + "initialise": "node scripts/initialise.js", "multi:enable": "node scripts/multiTenancy.js enable", "multi:disable": "node scripts/multiTenancy.js disable", "selfhost:enable": "node scripts/selfhost.js enable", @@ -49,8 +50,7 @@ "!src/automations/tests/**/*", "!src/utilities/fileProcessor.js", "!src/utilities/fileSystem/**/*", - "!src/utilities/redis.js", - "!src/api/controllers/row/internalSearch.js" + "!src/utilities/redis.js" ], "coverageReporters": [ "lcov", @@ -64,9 +64,9 @@ "author": "Budibase", "license": "AGPL-3.0-or-later", "dependencies": { - "@budibase/auth": "^0.9.125-alpha.17", - "@budibase/client": "^0.9.125-alpha.17", - "@budibase/string-templates": "^0.9.125-alpha.17", + "@budibase/auth": "^0.9.146-alpha.5", + "@budibase/client": "^0.9.146-alpha.5", + "@budibase/string-templates": "^0.9.146-alpha.5", "@elastic/elasticsearch": "7.10.0", "@koa/router": "8.0.0", "@sendgrid/mail": "7.1.1", @@ -96,14 +96,16 @@ "koa-session": "5.12.0", "koa-static": "5.0.0", "lodash": "4.17.21", + "memorystream": "^0.3.1", "mongodb": "3.6.3", "mssql": "6.2.3", - "mysql": "^2.18.1", + "mysql": "2.18.1", "node-fetch": "2.6.0", "open": "7.3.0", "pg": "8.5.1", "pino-pretty": "4.0.0", "pouchdb": "7.2.1", + "pouchdb-adapter-memory": "^7.2.1", "pouchdb-all-dbs": "1.0.2", "pouchdb-find": "^7.2.2", "pouchdb-replication-stream": "1.2.9", @@ -118,6 +120,7 @@ "devDependencies": { "@babel/core": "^7.14.3", "@babel/preset-env": "^7.14.4", + "@budibase/standard-components": "^0.9.139", "@jest/test-sequencer": "^24.8.0", "@types/bull": "^3.15.1", "@types/jest": "^26.0.23", @@ -132,7 +135,6 @@ "express": "^4.17.1", "jest": "^27.0.5", "nodemon": "^2.0.4", - "pouchdb-adapter-memory": "^7.2.1", "prettier": "^2.3.1", "rimraf": "^3.0.2", "supertest": "^4.0.2", diff --git a/packages/server/scripts/dev/manage.js b/packages/server/scripts/dev/manage.js index 2557f88adf..bd91056f84 100644 --- a/packages/server/scripts/dev/manage.js +++ b/packages/server/scripts/dev/manage.js @@ -37,7 +37,7 @@ async function init() { const envFileJson = { PORT: 4001, MINIO_URL: "http://localhost:10000/", - COUCH_DB_URL: "http://@localhost:10000/db/", + COUCH_DB_URL: "http://budibase:budibase@localhost:10000/db/", REDIS_URL: "localhost:6379", WORKER_URL: "http://localhost:4002", INTERNAL_API_KEY: "budibase", @@ -48,6 +48,7 @@ async function init() { COUCH_DB_PASSWORD: "budibase", COUCH_DB_USER: "budibase", SELF_HOSTED: 1, + DISABLE_ACCOUNT_PORTAL: "", MULTI_TENANCY: "", } let envFile = "" diff --git a/packages/server/scripts/integrations/pg-json/docker-compose.yml b/packages/server/scripts/integrations/pg-json/docker-compose.yml new file mode 100644 index 0000000000..6bc307a86d --- /dev/null +++ b/packages/server/scripts/integrations/pg-json/docker-compose.yml @@ -0,0 +1,28 @@ +version: "3.8" +services: + db: + container_name: postgres-json + image: postgres + restart: always + environment: + POSTGRES_USER: root + POSTGRES_PASSWORD: root + POSTGRES_DB: main + ports: + - "5432:5432" + volumes: + #- pg_data:/var/lib/postgresql/data/ + - ./init.sql:/docker-entrypoint-initdb.d/init.sql + + pgadmin: + container_name: pgadmin-json + image: dpage/pgadmin4 + restart: always + environment: + PGADMIN_DEFAULT_EMAIL: root@root.com + PGADMIN_DEFAULT_PASSWORD: root + ports: + - "5050:80" + +#volumes: +# pg_data: diff --git a/packages/server/scripts/integrations/pg-json/init.sql b/packages/server/scripts/integrations/pg-json/init.sql new file mode 100644 index 0000000000..06a5b4901d --- /dev/null +++ b/packages/server/scripts/integrations/pg-json/init.sql @@ -0,0 +1,22 @@ +SELECT 'CREATE DATABASE main' +WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec +CREATE TABLE jsonTable ( + id character varying(32), + data jsonb, + text text +); + +INSERT INTO jsonTable (id, data) VALUES ('1', '{"id": 1, "age": 1, "name": "Mike", "newline": "this is text with a\n newline in it"}'); + +CREATE VIEW jsonView AS SELECT + x.id, + x.age, + x.name, + x.newline +FROM + jsonTable c, + LATERAL jsonb_to_record(c.data) x (id character varying(32), + age BIGINT, + name TEXT, + newline TEXT + ); diff --git a/packages/server/scripts/integrations/pg-json/reset.sh b/packages/server/scripts/integrations/pg-json/reset.sh new file mode 100755 index 0000000000..32778bd11f --- /dev/null +++ b/packages/server/scripts/integrations/pg-json/reset.sh @@ -0,0 +1,3 @@ +#!/bin/bash +docker-compose down +docker volume prune -f diff --git a/packages/server/scripts/integrations/postgres/docker-compose.yml b/packages/server/scripts/integrations/postgres/docker-compose.yml index e2bba9f38e..4dfcb0e1ad 100644 --- a/packages/server/scripts/integrations/postgres/docker-compose.yml +++ b/packages/server/scripts/integrations/postgres/docker-compose.yml @@ -15,7 +15,7 @@ services: - ./init.sql:/docker-entrypoint-initdb.d/init.sql pgadmin: - container_name: pgadmin + container_name: pgadmin-pg image: dpage/pgadmin4 restart: always environment: diff --git a/packages/server/scripts/integrations/service-vehicles/docker-compose.yml b/packages/server/scripts/integrations/service-vehicles/docker-compose.yml new file mode 100644 index 0000000000..7473e540db --- /dev/null +++ b/packages/server/scripts/integrations/service-vehicles/docker-compose.yml @@ -0,0 +1,28 @@ +version: "3.8" +services: + db: + container_name: postgres-vehicle + image: postgres + restart: always + environment: + POSTGRES_USER: root + POSTGRES_PASSWORD: root + POSTGRES_DB: main + ports: + - "5432:5432" + volumes: + #- pg_data:/var/lib/postgresql/data/ + - ./init.sql:/docker-entrypoint-initdb.d/init.sql + + pgadmin: + container_name: pgadmin + image: dpage/pgadmin4 + restart: always + environment: + PGADMIN_DEFAULT_EMAIL: root@root.com + PGADMIN_DEFAULT_PASSWORD: root + ports: + - "5050:80" + +#volumes: +# pg_data: diff --git a/packages/server/scripts/integrations/service-vehicles/init.sql b/packages/server/scripts/integrations/service-vehicles/init.sql new file mode 100644 index 0000000000..3e0485313e --- /dev/null +++ b/packages/server/scripts/integrations/service-vehicles/init.sql @@ -0,0 +1,52 @@ +SELECT 'CREATE DATABASE main' +WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec +CREATE TABLE Vehicles ( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ), + Registration text COLLATE pg_catalog."default", + Make text COLLATE pg_catalog."default", + Model text COLLATE pg_catalog."default", + Colour text COLLATE pg_catalog."default", + Year smallint, + CONSTRAINT Vehicles_pkey PRIMARY KEY (id) +); + +CREATE TABLE ServiceLog ( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ), + Description text COLLATE pg_catalog."default", + VehicleId bigint, + ServiceDate timestamp without time zone, + Category text COLLATE pg_catalog."default", + Mileage bigint, + CONSTRAINT ServiceLog_pkey PRIMARY KEY (id), + CONSTRAINT vehicle_foreign_key FOREIGN KEY (VehicleId) + REFERENCES Vehicles (id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +INSERT INTO Vehicles (Registration, Make, Model, Colour, Year) +VALUES ('FAZ 9837','Volkswagen','Polo','White',2002); +INSERT INTO Vehicles (Registration, Make, Model, Colour, Year) +VALUES ('JHI 8827','BMW','M3','Black',2013); +INSERT INTO Vehicles (Registration, Make, Model, Colour, Year) +VALUES ('D903PI','Volvo','XC40','Grey',2014); +INSERT INTO Vehicles (Registration, Make, Model, Colour, Year) +VALUES ('YFI002','Volkswagen','Golf','Dark Blue',2018); +INSERT INTO Vehicles (Registration, Make, Model, Colour, Year) +VALUES ('HGT5677','Skoda','Octavia','Graphite',2009); +INSERT INTO Vehicles (Registration, Make, Model, Colour, Year) +VALUES ('PPF9276','Skoda','Octavia','Graphite',2021); +INSERT INTO Vehicles (Registration, Make, Model, Colour, Year) +VALUES ('J893FT','Toyota','Corolla','Red',2015); +INSERT INTO Vehicles (Registration, Make, Model, Colour, Year) +VALUES ('MJK776','Honda','HR-V','Silver',2015); + + +INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage) +VALUES ('Change front brakes', 1, '2021-05-04', 'Brakes', 20667); +INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage) +VALUES ('Tyres - full set', 1, '2021-05-04', 'Brakes', 20667); +INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage) +VALUES ('Engine tune up', 2, '2021-07-14', 'Brakes', 50889); +INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage) +VALUES ('Replace transmission', 3, '2021-09-26', 'Transmission', 98002); diff --git a/packages/server/scripts/integrations/service-vehicles/reset.sh b/packages/server/scripts/integrations/service-vehicles/reset.sh new file mode 100755 index 0000000000..32778bd11f --- /dev/null +++ b/packages/server/scripts/integrations/service-vehicles/reset.sh @@ -0,0 +1,3 @@ +#!/bin/bash +docker-compose down +docker volume prune -f diff --git a/packages/server/scripts/jestSetup.js b/packages/server/scripts/jestSetup.js index 0cff339fc2..1f3551bf5f 100644 --- a/packages/server/scripts/jestSetup.js +++ b/packages/server/scripts/jestSetup.js @@ -1,6 +1,7 @@ const { tmpdir } = require("os") const env = require("../src/environment") +env._set("SELF_HOSTED", "1") env._set("NODE_ENV", "jest") env._set("JWT_SECRET", "test-jwtsecret") env._set("CLIENT_ID", "test-client-id") diff --git a/packages/server/src/api/controllers/analytics.js b/packages/server/src/api/controllers/analytics.js index d6e1a9ce5b..eb64bc87b9 100644 --- a/packages/server/src/api/controllers/analytics.js +++ b/packages/server/src/api/controllers/analytics.js @@ -2,6 +2,6 @@ const env = require("../../environment") exports.isEnabled = async function (ctx) { ctx.body = { - enabled: env.ENABLE_ANALYTICS === "true", + enabled: !env.SELF_HOSTED && env.ENABLE_ANALYTICS === "true", } } diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js index da0014c5f8..3a0b0f8ed8 100644 --- a/packages/server/src/api/controllers/application.js +++ b/packages/server/src/api/controllers/application.js @@ -31,7 +31,7 @@ const { getDeployedApps, removeAppFromUserRoles, } = require("../../utilities/workerRequests") -const { clientLibraryPath } = require("../../utilities") +const { clientLibraryPath, stringToReadStream } = require("../../utilities") const { getAllLocks } = require("../../utilities/redis") const { updateClientLibrary, @@ -114,8 +114,13 @@ async function createInstance(template) { // replicate the template data to the instance DB // this is currently very hard to test, downloading and importing template files - /* istanbul ignore next */ - if (template && template.useTemplate === "true") { + if (template && template.templateString) { + const { ok } = await db.load(stringToReadStream(template.templateString)) + if (!ok) { + throw "Error loading database dump from memory." + } + } else if (template && template.useTemplate === "true") { + /* istanbul ignore next */ const { ok } = await db.load(await getTemplateStream(template)) if (!ok) { throw "Error loading database dump from template." @@ -191,10 +196,11 @@ exports.fetchAppPackage = async function (ctx) { } exports.create = async function (ctx) { - const { useTemplate, templateKey } = ctx.request.body + const { useTemplate, templateKey, templateString } = ctx.request.body const instanceConfig = { useTemplate, key: templateKey, + templateString, } if (ctx.request.files && ctx.request.files.templateFile) { instanceConfig.file = ctx.request.files.templateFile @@ -230,7 +236,12 @@ exports.create = async function (ctx) { const response = await db.put(newApplication, { force: true }) newApplication._rev = response.rev - await createEmptyAppPackage(ctx, newApplication) + // Only create the default home screens and layout if we aren't importing + // an app + if (useTemplate !== "true") { + await createEmptyAppPackage(ctx, newApplication) + } + /* istanbul ignore next */ if (!env.isTest()) { await createApp(appId) diff --git a/packages/server/src/api/controllers/cloud.js b/packages/server/src/api/controllers/cloud.js new file mode 100644 index 0000000000..aac79bb9dd --- /dev/null +++ b/packages/server/src/api/controllers/cloud.js @@ -0,0 +1,92 @@ +const env = require("../../environment") +const { getAllApps } = require("@budibase/auth/db") +const CouchDB = require("../../db") +const { + exportDB, + sendTempFile, + readFileSync, +} = require("../../utilities/fileSystem") +const { stringToReadStream } = require("../../utilities") +const { getGlobalDBName, getGlobalDB } = require("@budibase/auth/tenancy") +const { create } = require("./application") +const { getDocParams, DocumentTypes, isDevAppID } = require("../../db/utils") + +async function createApp(appName, appImport) { + const ctx = { + request: { + body: { + templateString: appImport, + name: appName, + }, + }, + } + return create(ctx) +} + +exports.exportApps = async ctx => { + if (env.SELF_HOSTED || !env.MULTI_TENANCY) { + ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.") + } + const apps = await getAllApps(CouchDB, { all: true }) + const globalDBString = await exportDB(getGlobalDBName()) + let allDBs = { + global: globalDBString, + } + for (let app of apps) { + // only export the dev apps as they will be the latest, the user can republish the apps + // in their self hosted environment + if (isDevAppID(app._id)) { + allDBs[app.name] = await exportDB(app._id) + } + } + const filename = `cloud-export-${new Date().getTime()}.txt` + ctx.attachment(filename) + ctx.body = sendTempFile(JSON.stringify(allDBs)) +} + +async function getAllDocType(db, docType) { + const response = await db.allDocs( + getDocParams(docType, null, { + include_docs: true, + }) + ) + return response.rows.map(row => row.doc) +} + +exports.importApps = async ctx => { + if (!env.SELF_HOSTED || env.MULTI_TENANCY) { + ctx.throw(400, "Importing only allowed in self hosted environments.") + } + const apps = await getAllApps(CouchDB, { all: true }) + if ( + apps.length !== 0 || + !ctx.request.files || + !ctx.request.files.importFile + ) { + ctx.throw( + 400, + "Import file is required and environment must be fresh to import apps." + ) + } + const importFile = ctx.request.files.importFile + const importString = readFileSync(importFile.path) + const dbs = JSON.parse(importString) + const globalDbImport = dbs.global + // remove from the list of apps + delete dbs.global + const globalDb = getGlobalDB() + // load the global db first + await globalDb.load(stringToReadStream(globalDbImport)) + for (let [appName, appImport] of Object.entries(dbs)) { + await createApp(appName, appImport) + } + // once apps are created clean up the global db + let users = await getAllDocType(globalDb, DocumentTypes.USER) + for (let user of users) { + delete user.tenantId + } + await globalDb.bulkDocs(users) + ctx.body = { + message: "Apps successfully imported.", + } +} diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js index 38b6e68932..2ff7c7f9b8 100644 --- a/packages/server/src/api/controllers/datasource.js +++ b/packages/server/src/api/controllers/datasource.js @@ -41,17 +41,12 @@ exports.fetch = async function (ctx) { exports.buildSchemaFromDb = async function (ctx) { const db = new CouchDB(ctx.appId) - const datasourceId = ctx.params.datasourceId - const datasource = await db.get(datasourceId) + const datasource = await db.get(ctx.params.datasourceId) - const Connector = integrations[datasource.source] + const tables = await buildSchemaHelper(datasource) + datasource.entities = tables - // Connect to the DB and build the schema - const connector = new Connector(datasource.config) - await connector.buildSchema(datasource._id, datasource.entities) - datasource.entities = connector.tables - - const response = await db.post(datasource) + const response = await db.put(datasource) datasource._rev = response.rev ctx.body = datasource @@ -81,15 +76,21 @@ exports.update = async function (ctx) { exports.save = async function (ctx) { const db = new CouchDB(ctx.appId) - const plus = ctx.request.body.plus + const plus = ctx.request.body.datasource.plus + const fetchSchema = ctx.request.body.fetchSchema const datasource = { _id: generateDatasourceID({ plus }), type: plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE, - ...ctx.request.body, + ...ctx.request.body.datasource, } - const response = await db.post(datasource) + if (fetchSchema) { + let tables = await buildSchemaHelper(datasource) + datasource.entities = tables + } + + const response = await db.put(datasource) datasource._rev = response.rev // Drain connection pools when configuration is changed @@ -133,3 +134,14 @@ exports.query = async function (ctx) { ctx.throw(400, err) } } + +const buildSchemaHelper = async datasource => { + const Connector = integrations[datasource.source] + + // Connect to the DB and build the schema + const connector = new Connector(datasource.config) + await connector.buildSchema(datasource._id, datasource.entities) + datasource.entities = connector.tables + + return connector.tables +} diff --git a/packages/server/src/api/controllers/permission.js b/packages/server/src/api/controllers/permission.js index e269f8c41d..6c02663649 100644 --- a/packages/server/src/api/controllers/permission.js +++ b/packages/server/src/api/controllers/permission.js @@ -1,9 +1,4 @@ -const { - getBuiltinPermissions, - PermissionLevels, - isPermissionLevelHigherThanRead, - higherPermission, -} = require("@budibase/auth/permissions") +const { getBuiltinPermissions } = require("@budibase/auth/permissions") const { isBuiltin, getDBRoleID, @@ -16,6 +11,7 @@ const { CURRENTLY_SUPPORTED_LEVELS, getBasePermissions, } = require("../../utilities/security") +const { removeFromArray } = require("../../utilities") const PermissionUpdateType = { REMOVE: "remove", @@ -24,22 +20,6 @@ const PermissionUpdateType = { const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS -// quick function to perform a bit of weird logic, make sure fetch calls -// always say a write role also has read permission -function fetchLevelPerms(permissions, level, roleId) { - if (!permissions) { - permissions = {} - } - permissions[level] = roleId - if ( - isPermissionLevelHigherThanRead(level) && - !permissions[PermissionLevels.READ] - ) { - permissions[PermissionLevels.READ] = roleId - } - return permissions -} - // utility function to stop this repetition - permissions always stored under roles async function getAllDBRoles(db) { const body = await db.allDocs( @@ -74,23 +54,31 @@ async function updatePermissionOnRole( for (let role of dbRoles) { let updated = false const rolePermissions = role.permissions ? role.permissions : {} + // make sure its an array, also handle migrating + if ( + !rolePermissions[resourceId] || + !Array.isArray(rolePermissions[resourceId]) + ) { + rolePermissions[resourceId] = + typeof rolePermissions[resourceId] === "string" + ? [rolePermissions[resourceId]] + : [] + } // handle the removal/updating the role which has this permission first // the updating (role._id !== dbRoleId) is required because a resource/level can // only be permitted in a single role (this reduces hierarchy confusion and simplifies // the general UI for this, rather than needing to show everywhere it is used) if ( (role._id !== dbRoleId || remove) && - rolePermissions[resourceId] === level + rolePermissions[resourceId].indexOf(level) !== -1 ) { - delete rolePermissions[resourceId] + removeFromArray(rolePermissions[resourceId], level) updated = true } // handle the adding, we're on the correct role, at it to this if (!remove && role._id === dbRoleId) { - rolePermissions[resourceId] = higherPermission( - rolePermissions[resourceId], - level - ) + const set = new Set(rolePermissions[resourceId]) + rolePermissions[resourceId] = [...set.add(level)] updated = true } // handle the update, add it to bulk docs to perform at end @@ -127,12 +115,11 @@ exports.fetch = async function (ctx) { continue } const roleId = getExternalRoleID(role._id) - for (let [resource, level] of Object.entries(role.permissions)) { - permissions[resource] = fetchLevelPerms( - permissions[resource], - level, - roleId - ) + for (let [resource, levelArr] of Object.entries(role.permissions)) { + const levels = Array.isArray(levelArr) ? [levelArr] : levelArr + const perms = {} + levels.forEach(level => (perms[level] = roleId)) + permissions[resource] = perms } } // apply the base permissions @@ -157,12 +144,13 @@ exports.getResourcePerms = async function (ctx) { for (let level of SUPPORTED_LEVELS) { // update the various roleIds in the resource permissions for (let role of roles) { - if (role.permissions && role.permissions[resourceId] === level) { - permissions = fetchLevelPerms( - permissions, - level, - getExternalRoleID(role._id) - ) + const rolePerms = role.permissions + if ( + rolePerms && + (rolePerms[resourceId] === level || + rolePerms[resourceId].indexOf(level) !== -1) + ) { + permissions[level] = getExternalRoleID(role._id) } } } diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index eced518604..75c3e9b492 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -437,7 +437,11 @@ module External { for (let [colName, { isMany, rows, tableId }] of Object.entries( related )) { - const table = this.getTable(tableId) + const table: Table = this.getTable(tableId) + // if its not the foreign key skip it, nothing to do + if (table.primary && table.primary.indexOf(colName) !== -1) { + continue + } for (let row of rows) { const filters = buildFilters(generateIdForRow(row, table), {}, table) // safety check, if there are no filters on deletion bad things happen @@ -540,6 +544,9 @@ module External { extra: { idFilter: buildFilters(id || generateIdForRow(row, table), {}, table), }, + meta: { + table, + }, } // can't really use response right now const response = await makeExternalQuery(appId, json) diff --git a/packages/server/src/api/controllers/row/internal.js b/packages/server/src/api/controllers/row/internal.js index 2299a20580..76c8188523 100644 --- a/packages/server/src/api/controllers/row/internal.js +++ b/packages/server/src/api/controllers/row/internal.js @@ -10,12 +10,21 @@ const userController = require("../user") const { inputProcessing, outputProcessing, + processAutoColumn, } = require("../../../utilities/rowProcessor") const { FieldTypes } = require("../../../constants") const { isEqual } = require("lodash") const { validate, findRow } = require("./utils") const { fullSearch, paginatedSearch } = require("./internalSearch") const { getGlobalUsersFromMetadata } = require("../../../utilities/global") +const inMemoryViews = require("../../../db/inMemoryView") +const env = require("../../../environment") +const { + migrateToInMemoryView, + migrateToDesignView, + getFromDesignDoc, + getFromMemoryDoc, +} = require("../view/utils") const CALCULATION_TYPES = { SUM: "sum", @@ -25,17 +34,75 @@ const CALCULATION_TYPES = { async function storeResponse(ctx, db, row, oldTable, table) { row.type = "row" - const response = await db.put(row) // don't worry about rev, tables handle rev/lastID updates + // if another row has been written since processing this will + // handle the auto ID clash if (!isEqual(oldTable, table)) { - await db.put(table) + try { + await db.put(table) + } catch (err) { + if (err.status === 409) { + const updatedTable = await db.get(table._id) + let response = processAutoColumn(null, updatedTable, row, { + reprocessing: true, + }) + await db.put(response.table) + row = response.row + } else { + throw err + } + } } + const response = await db.put(row) row._rev = response.rev // process the row before return, to include relationships row = await outputProcessing(ctx, table, row, { squash: false }) return { row, table } } +// doesn't do the outputProcessing +async function getRawTableData(ctx, db, tableId) { + let rows + if (tableId === InternalTables.USER_METADATA) { + await userController.fetchMetadata(ctx) + rows = ctx.body + } else { + const response = await db.allDocs( + getRowParams(tableId, null, { + include_docs: true, + }) + ) + rows = response.rows.map(row => row.doc) + } + return rows +} + +async function getView(db, viewName) { + let mainGetter = env.SELF_HOSTED ? getFromDesignDoc : getFromMemoryDoc + let secondaryGetter = env.SELF_HOSTED ? getFromMemoryDoc : getFromDesignDoc + let migration = env.SELF_HOSTED ? migrateToDesignView : migrateToInMemoryView + let viewInfo, + migrate = false + try { + viewInfo = await mainGetter(db, viewName) + } catch (err) { + // check if it can be retrieved from design doc (needs migrated) + if (err.status !== 404) { + viewInfo = null + } else { + viewInfo = await secondaryGetter(db, viewName) + migrate = !!viewInfo + } + } + if (migrate) { + await migration(db, viewName) + } + if (!viewInfo) { + throw "View does not exist." + } + return viewInfo +} + exports.patch = async ctx => { const appId = ctx.appId const db = new CouchDB(appId) @@ -139,15 +206,18 @@ exports.fetchView = async ctx => { const db = new CouchDB(appId) const { calculation, group, field } = ctx.query - const designDoc = await db.get("_design/database") - const viewInfo = designDoc.views[viewName] - if (!viewInfo) { - throw "View does not exist." + const viewInfo = await getView(db, viewName) + let response + if (env.SELF_HOSTED) { + response = await db.query(`database/${viewName}`, { + include_docs: !calculation, + group: !!group, + }) + } else { + const tableId = viewInfo.meta.tableId + const data = await getRawTableData(ctx, db, tableId) + response = await inMemoryViews.runView(viewInfo, calculation, group, data) } - const response = await db.query(`database/${viewName}`, { - include_docs: !calculation, - group: !!group, - }) let rows if (!calculation) { @@ -191,19 +261,9 @@ exports.fetch = async ctx => { const appId = ctx.appId const db = new CouchDB(appId) - let rows, - table = await db.get(ctx.params.tableId) - if (ctx.params.tableId === InternalTables.USER_METADATA) { - await userController.fetchMetadata(ctx) - rows = ctx.body - } else { - const response = await db.allDocs( - getRowParams(ctx.params.tableId, null, { - include_docs: true, - }) - ) - rows = response.rows.map(row => row.doc) - } + const tableId = ctx.params.tableId + let table = await db.get(tableId) + let rows = await getRawTableData(ctx, db, tableId) return outputProcessing(ctx, table, rows) } @@ -286,6 +346,11 @@ exports.bulkDestroy = async ctx => { } exports.search = async ctx => { + // Fetch the whole table when running in cypress, as search doesn't work + if (env.isCypress()) { + return { rows: await exports.fetch(ctx) } + } + const appId = ctx.appId const { tableId } = ctx.params const db = new CouchDB(appId) diff --git a/packages/server/src/api/controllers/row/utils.js b/packages/server/src/api/controllers/row/utils.js index cb9a5e166c..ca6c782713 100644 --- a/packages/server/src/api/controllers/row/utils.js +++ b/packages/server/src/api/controllers/row/utils.js @@ -5,6 +5,7 @@ const { InternalTables } = require("../../../db/utils") const userController = require("../user") const { FieldTypes } = require("../../../constants") const { integrations } = require("../../../integrations") +const { processStringSync } = require("@budibase/string-templates") validateJs.extend(validateJs.validators.datetime, { parse: function (value) { @@ -73,6 +74,11 @@ exports.validate = async ({ appId, tableId, row, table }) => { errors[fieldName] = "Field not in list" } }) + } else if (table.schema[fieldName].type === FieldTypes.FORMULA) { + res = validateJs.single( + processStringSync(table.schema[fieldName].formula, row), + constraints + ) } else { res = validateJs.single(row[fieldName], constraints) } diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js index 60b5167f66..c7b72cf1c8 100644 --- a/packages/server/src/api/controllers/table/index.js +++ b/packages/server/src/api/controllers/table/index.js @@ -145,7 +145,7 @@ exports.save = async function (ctx) { if (updatedRows && updatedRows.length !== 0) { await db.bulkDocs(updatedRows) } - const result = await db.post(tableToSave) + const result = await db.put(tableToSave) tableToSave._rev = result.rev tableToSave = await tableSaveFunctions.after(tableToSave) diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js index 154a9ba8f5..d263002da6 100644 --- a/packages/server/src/api/controllers/table/utils.js +++ b/packages/server/src/api/controllers/table/utils.js @@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => { // Populate the table with rows imported from CSV in a bulk update const data = await csvParser.transform(dataImport) + let finalData = [] for (let i = 0; i < data.length; i++) { let row = data[i] row._id = generateRowID(table._id) row.tableId = table._id - const processed = inputProcessing(user, table, row) + const processed = inputProcessing(user, table, row, { + noAutoRelationships: true, + }) table = processed.table row = processed.row - // make sure link rows are up to date - row = await linkRows.updateLinks({ - appId, - eventType: linkRows.EventType.ROW_SAVE, - row, - tableId: row.tableId, - table, - }) - for (let [fieldName, schema] of Object.entries(table.schema)) { // check whether the options need to be updated for inclusion as part of the data import if ( @@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => { ] } } - data[i] = row + + // make sure link rows are up to date + finalData.push( + linkRows.updateLinks({ + appId, + eventType: linkRows.EventType.ROW_SAVE, + row, + tableId: row.tableId, + table, + }) + ) } - await db.bulkDocs(data) + await db.bulkDocs(await Promise.all(finalData)) let response = await db.put(table) table._rev = response._rev } diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js index 3d0f236fce..ecaee0f32f 100644 --- a/packages/server/src/api/controllers/view/index.js +++ b/packages/server/src/api/controllers/view/index.js @@ -2,127 +2,93 @@ const CouchDB = require("../../../db") const viewTemplate = require("./viewBuilder") const { apiFileReturn } = require("../../../utilities/fileSystem") const exporters = require("./exporters") +const { saveView, getView, getViews, deleteView } = require("./utils") const { fetchView } = require("../row") -const { ViewNames } = require("../../../db/utils") -const controller = { - fetch: async ctx => { - const db = new CouchDB(ctx.appId) - const designDoc = await db.get("_design/database") - const response = [] - - for (let name of Object.keys(designDoc.views)) { - // Only return custom views, not built ins - if (Object.values(ViewNames).indexOf(name) !== -1) { - continue - } - response.push({ - name, - ...designDoc.views[name], - }) - } - - ctx.body = response - }, - save: async ctx => { - const db = new CouchDB(ctx.appId) - const { originalName, ...viewToSave } = ctx.request.body - const designDoc = await db.get("_design/database") - const view = viewTemplate(viewToSave) - - if (!viewToSave.name) { - ctx.throw(400, "Cannot create view without a name") - } - - designDoc.views = { - ...designDoc.views, - [viewToSave.name]: view, - } - - // view has been renamed - if (originalName) { - delete designDoc.views[originalName] - } - - await db.put(designDoc) - - // add views to table document - const table = await db.get(ctx.request.body.tableId) - if (!table.views) table.views = {} - if (!view.meta.schema) { - view.meta.schema = table.schema - } - table.views[viewToSave.name] = view.meta - - if (originalName) { - delete table.views[originalName] - } - - await db.put(table) - - ctx.body = { - ...table.views[viewToSave.name], - name: viewToSave.name, - } - }, - destroy: async ctx => { - const db = new CouchDB(ctx.appId) - const designDoc = await db.get("_design/database") - const viewName = decodeURI(ctx.params.viewName) - const view = designDoc.views[viewName] - delete designDoc.views[viewName] - - await db.put(designDoc) - - const table = await db.get(view.meta.tableId) - delete table.views[viewName] - await db.put(table) - - ctx.body = view - }, - exportView: async ctx => { - const db = new CouchDB(ctx.appId) - const designDoc = await db.get("_design/database") - const viewName = decodeURI(ctx.query.view) - - const view = designDoc.views[viewName] - const format = ctx.query.format - if (!format) { - ctx.throw(400, "Format must be specified, either csv or json") - } - - if (view) { - ctx.params.viewName = viewName - // Fetch view rows - ctx.query = { - group: view.meta.groupBy, - calculation: view.meta.calculation, - stats: !!view.meta.field, - field: view.meta.field, - } - } else { - // table all_ view - /* istanbul ignore next */ - ctx.params.viewName = viewName - } - - await fetchView(ctx) - - let schema = view && view.meta && view.meta.schema - if (!schema) { - const tableId = ctx.params.tableId || view.meta.tableId - const table = await db.get(tableId) - schema = table.schema - } - - // Export part - let headers = Object.keys(schema) - const exporter = exporters[format] - const filename = `${viewName}.${format}` - // send down the file - ctx.attachment(filename) - ctx.body = apiFileReturn(exporter(headers, ctx.body)) - }, +exports.fetch = async ctx => { + const db = new CouchDB(ctx.appId) + ctx.body = await getViews(db) } -module.exports = controller +exports.save = async ctx => { + const db = new CouchDB(ctx.appId) + const { originalName, ...viewToSave } = ctx.request.body + const view = viewTemplate(viewToSave) + + if (!viewToSave.name) { + ctx.throw(400, "Cannot create view without a name") + } + + await saveView(db, originalName, viewToSave.name, view) + + // add views to table document + const table = await db.get(ctx.request.body.tableId) + if (!table.views) table.views = {} + if (!view.meta.schema) { + view.meta.schema = table.schema + } + table.views[viewToSave.name] = view.meta + if (originalName) { + delete table.views[originalName] + } + await db.put(table) + + ctx.body = { + ...table.views[viewToSave.name], + name: viewToSave.name, + } +} + +exports.destroy = async ctx => { + const db = new CouchDB(ctx.appId) + const viewName = decodeURI(ctx.params.viewName) + const view = await deleteView(db, viewName) + const table = await db.get(view.meta.tableId) + delete table.views[viewName] + await db.put(table) + + ctx.body = view +} + +exports.exportView = async ctx => { + const db = new CouchDB(ctx.appId) + const viewName = decodeURI(ctx.query.view) + const view = await getView(db, viewName) + + const format = ctx.query.format + if (!format) { + ctx.throw(400, "Format must be specified, either csv or json") + } + + if (view) { + ctx.params.viewName = viewName + // Fetch view rows + ctx.query = { + group: view.meta.groupBy, + calculation: view.meta.calculation, + stats: !!view.meta.field, + field: view.meta.field, + } + } else { + // table all_ view + /* istanbul ignore next */ + ctx.params.viewName = viewName + } + + await fetchView(ctx) + + let schema = view && view.meta && view.meta.schema + if (!schema) { + const tableId = ctx.params.tableId || view.meta.tableId + const table = await db.get(tableId) + schema = table.schema + } + + // Export part + let headers = Object.keys(schema) + const exporter = exporters[format] + const filename = `${viewName}.${format}` + // send down the file + ctx.attachment(filename) + ctx.body = apiFileReturn(exporter(headers, ctx.body)) +} diff --git a/packages/server/src/api/controllers/view/utils.js b/packages/server/src/api/controllers/view/utils.js new file mode 100644 index 0000000000..1f3b980882 --- /dev/null +++ b/packages/server/src/api/controllers/view/utils.js @@ -0,0 +1,136 @@ +const { + ViewNames, + generateMemoryViewID, + getMemoryViewParams, +} = require("../../../db/utils") +const env = require("../../../environment") + +exports.getView = async (db, viewName) => { + if (env.SELF_HOSTED) { + const designDoc = await db.get("_design/database") + return designDoc.views[viewName] + } else { + const viewDoc = await db.get(generateMemoryViewID(viewName)) + return viewDoc.view + } +} + +exports.getViews = async db => { + const response = [] + if (env.SELF_HOSTED) { + const designDoc = await db.get("_design/database") + for (let name of Object.keys(designDoc.views)) { + // Only return custom views, not built ins + if (Object.values(ViewNames).indexOf(name) !== -1) { + continue + } + response.push({ + name, + ...designDoc.views[name], + }) + } + } else { + const views = ( + await db.allDocs( + getMemoryViewParams({ + include_docs: true, + }) + ) + ).rows.map(row => row.doc) + for (let viewDoc of views) { + response.push({ + name: viewDoc.name, + ...viewDoc.view, + }) + } + } + return response +} + +exports.saveView = async (db, originalName, viewName, viewTemplate) => { + if (env.SELF_HOSTED) { + const designDoc = await db.get("_design/database") + designDoc.views = { + ...designDoc.views, + [viewName]: viewTemplate, + } + // view has been renamed + if (originalName) { + delete designDoc.views[originalName] + } + await db.put(designDoc) + } else { + const id = generateMemoryViewID(viewName) + const originalId = originalName ? generateMemoryViewID(originalName) : null + const viewDoc = { + _id: id, + view: viewTemplate, + name: viewName, + tableId: viewTemplate.meta.tableId, + } + try { + const old = await db.get(id) + if (originalId) { + const originalDoc = await db.get(originalId) + await db.remove(originalDoc._id, originalDoc._rev) + } + if (old && old._rev) { + viewDoc._rev = old._rev + } + } catch (err) { + // didn't exist, just skip + } + await db.put(viewDoc) + } +} + +exports.deleteView = async (db, viewName) => { + if (env.SELF_HOSTED) { + const designDoc = await db.get("_design/database") + const view = designDoc.views[viewName] + delete designDoc.views[viewName] + await db.put(designDoc) + return view + } else { + const id = generateMemoryViewID(viewName) + const viewDoc = await db.get(id) + await db.remove(viewDoc._id, viewDoc._rev) + return viewDoc.view + } +} + +exports.migrateToInMemoryView = async (db, viewName) => { + // delete the view initially + const designDoc = await db.get("_design/database") + const view = designDoc.views[viewName] + delete designDoc.views[viewName] + await db.put(designDoc) + await exports.saveView(db, null, viewName, view) +} + +exports.migrateToDesignView = async (db, viewName) => { + let view = await db.get(generateMemoryViewID(viewName)) + const designDoc = await db.get("_design/database") + designDoc.views[viewName] = view.view + await db.put(designDoc) + await db.remove(view._id, view._rev) +} + +exports.getFromDesignDoc = async (db, viewName) => { + const designDoc = await db.get("_design/database") + let view = designDoc.views[viewName] + if (view == null) { + throw { status: 404, message: "Unable to get view" } + } + return view +} + +exports.getFromMemoryDoc = async (db, viewName) => { + let view = await db.get(generateMemoryViewID(viewName)) + if (view) { + view = view.view + } else { + throw { status: 404, message: "Unable to get view" } + } + return view +} diff --git a/packages/server/src/api/routes/application.js b/packages/server/src/api/routes/application.js index c1d39acbd5..4d67a0f4f4 100644 --- a/packages/server/src/api/routes/application.js +++ b/packages/server/src/api/routes/application.js @@ -2,11 +2,12 @@ const Router = require("@koa/router") const controller = require("../controllers/application") const authorized = require("../../middleware/authorized") const { BUILDER } = require("@budibase/auth/permissions") +const usage = require("../../middleware/usageQuota") const router = Router() router - .post("/api/applications", authorized(BUILDER), controller.create) + .post("/api/applications", authorized(BUILDER), usage, controller.create) .get("/api/applications/:appId/definition", controller.fetchAppDefinition) .get("/api/applications", controller.fetch) .get("/api/applications/:appId/appPackage", controller.fetchAppPackage) @@ -21,6 +22,11 @@ router authorized(BUILDER), controller.revertClient ) - .delete("/api/applications/:appId", authorized(BUILDER), controller.delete) + .delete( + "/api/applications/:appId", + authorized(BUILDER), + usage, + controller.delete + ) module.exports = router diff --git a/packages/server/src/api/routes/cloud.js b/packages/server/src/api/routes/cloud.js new file mode 100644 index 0000000000..214473f43f --- /dev/null +++ b/packages/server/src/api/routes/cloud.js @@ -0,0 +1,13 @@ +const Router = require("@koa/router") +const controller = require("../controllers/cloud") +const authorized = require("../../middleware/authorized") +const { BUILDER } = require("@budibase/auth/permissions") + +const router = Router() + +router + .get("/api/cloud/export", authorized(BUILDER), controller.exportApps) + // has to be public, only run if apps don't exist + .post("/api/cloud/import", controller.importApps) + +module.exports = router diff --git a/packages/server/src/api/routes/index.js b/packages/server/src/api/routes/index.js index 2e1353df98..29d0cd42b4 100644 --- a/packages/server/src/api/routes/index.js +++ b/packages/server/src/api/routes/index.js @@ -24,6 +24,7 @@ const hostingRoutes = require("./hosting") const backupRoutes = require("./backup") const metadataRoutes = require("./metadata") const devRoutes = require("./dev") +const cloudRoutes = require("./cloud") exports.mainRoutes = [ authRoutes, @@ -49,6 +50,7 @@ exports.mainRoutes = [ backupRoutes, metadataRoutes, devRoutes, + cloudRoutes, // these need to be handled last as they still use /api/:tableId // this could be breaking as koa may recognise other routes as this tableRoutes, diff --git a/packages/server/src/api/routes/tests/datasource.spec.js b/packages/server/src/api/routes/tests/datasource.spec.js index 98a99717fd..b6d94f714d 100644 --- a/packages/server/src/api/routes/tests/datasource.spec.js +++ b/packages/server/src/api/routes/tests/datasource.spec.js @@ -94,7 +94,8 @@ describe("/datasources", () => { .expect(200) // this is mock data, can't test it expect(res.body).toBeDefined() - expect(pg.queryMock).toHaveBeenCalledWith(`select "users"."name" as "users.name", "users"."age" as "users.age" from "users" where "users"."name" ilike $1 limit $2`, ["John%", 5000]) + const expSql = `select "users"."name" as "users.name", "users"."age" as "users.age" from (select * from "users" where "users"."name" ilike $1 limit $2) as "users"` + expect(pg.queryMock).toHaveBeenCalledWith(expSql, ["John%", 5000]) }) }) diff --git a/packages/server/src/api/routes/tests/query.spec.js b/packages/server/src/api/routes/tests/query.spec.js index eadd475ed4..716817509b 100644 --- a/packages/server/src/api/routes/tests/query.spec.js +++ b/packages/server/src/api/routes/tests/query.spec.js @@ -1,6 +1,7 @@ // mock out postgres for this jest.mock("pg") +const { findLastKey } = require("lodash/fp") const setup = require("./utilities") const { checkBuilderEndpoint } = require("./utilities/TestFunctions") const { basicQuery, basicDatasource } = setup.structures @@ -19,10 +20,10 @@ describe("/queries", () => { }) async function createInvalidIntegration() { - const datasource = await config.createDatasource({ - ...basicDatasource(), + const datasource = await config.createDatasource({datasource: { + ...basicDatasource().datasource, source: "INVALID_INTEGRATION", - }) + }}) const query = await config.createQuery() return { datasource, query } } @@ -183,11 +184,14 @@ describe("/queries", () => { }) it("should fail with invalid integration type", async () => { - const { query } = await createInvalidIntegration() + const { query, datasource } = await createInvalidIntegration() await request .post(`/api/queries/${query._id}`) .send({ + datasourceId: datasource._id, parameters: {}, + fields: {}, + queryVerb: "read", }) .set(config.defaultHeaders()) .expect(400) diff --git a/packages/server/src/api/routes/tests/role.spec.js b/packages/server/src/api/routes/tests/role.spec.js index ad42ef180a..d74a84b2b2 100644 --- a/packages/server/src/api/routes/tests/role.spec.js +++ b/packages/server/src/api/routes/tests/role.spec.js @@ -72,7 +72,7 @@ describe("/roles", () => { .expect(200) expect(res.body.length).toBeGreaterThan(0) const power = res.body.find(role => role._id === BUILTIN_ROLE_IDS.POWER) - expect(power.permissions[table._id]).toEqual("read") + expect(power.permissions[table._id]).toEqual(["read"]) }) }) diff --git a/packages/server/src/api/routes/tests/row.spec.js b/packages/server/src/api/routes/tests/row.spec.js index d089d7775d..01284552c5 100644 --- a/packages/server/src/api/routes/tests/row.spec.js +++ b/packages/server/src/api/routes/tests/row.spec.js @@ -317,7 +317,7 @@ describe("/rows", () => { await request .get(`/api/views/derp`) .set(config.defaultHeaders()) - .expect(400) + .expect(404) }) it("should be able to run on a view", async () => { @@ -394,4 +394,4 @@ describe("/rows", () => { }) }) }) -}) \ No newline at end of file +}) diff --git a/packages/server/src/api/routes/tests/view.spec.js b/packages/server/src/api/routes/tests/view.spec.js index 458da6e023..b1c5f655c6 100644 --- a/packages/server/src/api/routes/tests/view.spec.js +++ b/packages/server/src/api/routes/tests/view.spec.js @@ -205,7 +205,7 @@ describe("/views", () => { }) describe("exportView", () => { - it("should be able to delete a view", async () => { + it("should be able to export a view", async () => { await config.createTable(priceTable()) await config.createRow() const view = await config.createView() diff --git a/packages/server/src/api/routes/user.js b/packages/server/src/api/routes/user.js index b3b486fe45..d171870215 100644 --- a/packages/server/src/api/routes/user.js +++ b/packages/server/src/api/routes/user.js @@ -5,7 +5,6 @@ const { PermissionLevels, PermissionTypes, } = require("@budibase/auth/permissions") -const usage = require("../../middleware/usageQuota") const router = Router() @@ -28,13 +27,11 @@ router .post( "/api/users/metadata/self", authorized(PermissionTypes.USER, PermissionLevels.WRITE), - usage, controller.updateSelfMetadata ) .delete( "/api/users/metadata/:id", authorized(PermissionTypes.USER, PermissionLevels.WRITE), - usage, controller.destroyMetadata ) diff --git a/packages/server/src/api/routes/view.js b/packages/server/src/api/routes/view.js index 7d390805c6..b72fe1ac26 100644 --- a/packages/server/src/api/routes/view.js +++ b/packages/server/src/api/routes/view.js @@ -8,7 +8,6 @@ const { PermissionTypes, PermissionLevels, } = require("@budibase/auth/permissions") -const usage = require("../../middleware/usageQuota") const router = Router() @@ -25,9 +24,8 @@ router "/api/views/:viewName", paramResource("viewName"), authorized(BUILDER), - usage, viewController.destroy ) - .post("/api/views", authorized(BUILDER), usage, viewController.save) + .post("/api/views", authorized(BUILDER), viewController.save) module.exports = router diff --git a/packages/server/src/automations/steps/createRow.js b/packages/server/src/automations/steps/createRow.js index 9706126438..47d0b4eb99 100644 --- a/packages/server/src/automations/steps/createRow.js +++ b/packages/server/src/automations/steps/createRow.js @@ -2,6 +2,7 @@ const rowController = require("../../api/controllers/row") const automationUtils = require("../automationUtils") const env = require("../../environment") const usage = require("../../utilities/usageQuota") +const { buildCtx } = require("./utils") exports.definition = { name: "Create Row", @@ -59,7 +60,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs, appId, apiKey, emitter }) { +exports.run = async function ({ inputs, appId, emitter }) { if (inputs.row == null || inputs.row.tableId == null) { return { success: false, @@ -69,16 +70,12 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) { } } // have to clean up the row, remove the table from it - const ctx = { + const ctx = buildCtx(appId, emitter, { + body: inputs.row, params: { tableId: inputs.row.tableId, }, - request: { - body: inputs.row, - }, - appId, - eventEmitter: emitter, - } + }) try { inputs.row = await automationUtils.cleanUpRow( @@ -86,8 +83,8 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) { inputs.row.tableId, inputs.row ) - if (env.isProd()) { - await usage.update(apiKey, usage.Properties.ROW, 1) + if (env.USE_QUOTAS) { + await usage.update(usage.Properties.ROW, 1) } await rowController.save(ctx) return { diff --git a/packages/server/src/automations/steps/deleteRow.js b/packages/server/src/automations/steps/deleteRow.js index 26623d628b..225f00c5df 100644 --- a/packages/server/src/automations/steps/deleteRow.js +++ b/packages/server/src/automations/steps/deleteRow.js @@ -1,6 +1,7 @@ const rowController = require("../../api/controllers/row") const env = require("../../environment") const usage = require("../../utilities/usageQuota") +const { buildCtx } = require("./utils") exports.definition = { description: "Delete a row from your database", @@ -51,7 +52,7 @@ exports.definition = { }, } -exports.run = async function ({ inputs, appId, apiKey, emitter }) { +exports.run = async function ({ inputs, appId, emitter }) { if (inputs.id == null || inputs.revision == null) { return { success: false, @@ -60,23 +61,20 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) { }, } } - let ctx = { + + let ctx = buildCtx(appId, emitter, { + body: { + _id: inputs.id, + _rev: inputs.revision, + }, params: { tableId: inputs.tableId, }, - request: { - body: { - _id: inputs.id, - _rev: inputs.revision, - }, - }, - appId, - eventEmitter: emitter, - } + }) try { if (env.isProd()) { - await usage.update(apiKey, usage.Properties.ROW, -1) + await usage.update(usage.Properties.ROW, -1) } await rowController.destroy(ctx) return { diff --git a/packages/server/src/automations/steps/queryRows.js b/packages/server/src/automations/steps/queryRows.js index 64b757418e..3c4bb422a0 100644 --- a/packages/server/src/automations/steps/queryRows.js +++ b/packages/server/src/automations/steps/queryRows.js @@ -1,6 +1,7 @@ const rowController = require("../../api/controllers/row") const tableController = require("../../api/controllers/table") const { FieldTypes } = require("../../constants") +const { buildCtx } = require("./utils") const SortOrders = { ASCENDING: "ascending", @@ -70,12 +71,11 @@ exports.definition = { } async function getTable(appId, tableId) { - const ctx = { + const ctx = buildCtx(appId, null, { params: { id: tableId, }, - appId, - } + }) await tableController.find(ctx) return ctx.body } @@ -89,21 +89,18 @@ exports.run = async function ({ inputs, appId }) { sortType = fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING } - const ctx = { + const ctx = buildCtx(appId, null, { params: { tableId, }, - request: { - body: { - sortOrder, - sortType, - sort: sortColumn, - query: filters || {}, - limit, - }, + body: { + sortOrder, + sortType, + sort: sortColumn, + query: filters || {}, + limit, }, - appId, - } + }) try { await rowController.search(ctx) return { diff --git a/packages/server/src/automations/steps/sendSmtpEmail.js b/packages/server/src/automations/steps/sendSmtpEmail.js index 9e4b5a6a3c..07a3059215 100644 --- a/packages/server/src/automations/steps/sendSmtpEmail.js +++ b/packages/server/src/automations/steps/sendSmtpEmail.js @@ -53,7 +53,7 @@ exports.run = async function ({ inputs }) { contents = "

No content

" } try { - let response = await sendSmtpEmail(to, from, subject, contents) + let response = await sendSmtpEmail(to, from, subject, contents, true) return { success: true, response, diff --git a/packages/server/src/automations/steps/updateRow.js b/packages/server/src/automations/steps/updateRow.js index ac5eb16fcd..94f77bc801 100644 --- a/packages/server/src/automations/steps/updateRow.js +++ b/packages/server/src/automations/steps/updateRow.js @@ -1,5 +1,6 @@ const rowController = require("../../api/controllers/row") const automationUtils = require("../automationUtils") +const { buildCtx } = require("./utils") exports.definition = { name: "Update Row", @@ -72,19 +73,15 @@ exports.run = async function ({ inputs, appId, emitter }) { } // have to clean up the row, remove the table from it - const ctx = { + const ctx = buildCtx(appId, emitter, { + body: { + ...inputs.row, + _id: inputs.rowId, + }, params: { rowId: inputs.rowId, }, - request: { - body: { - ...inputs.row, - _id: inputs.rowId, - }, - }, - appId, - eventEmitter: emitter, - } + }) try { inputs.row = await automationUtils.cleanUpRowById( diff --git a/packages/server/src/automations/tests/automation.spec.js b/packages/server/src/automations/tests/automation.spec.js index 83b7b81a75..9444995ca1 100644 --- a/packages/server/src/automations/tests/automation.spec.js +++ b/packages/server/src/automations/tests/automation.spec.js @@ -13,8 +13,6 @@ const { makePartial } = require("../../tests/utilities") const { cleanInputValues } = require("../automationUtils") const setup = require("./utilities") -usageQuota.getAPIKey.mockReturnValue({ apiKey: "test" }) - describe("Run through some parts of the automations system", () => { let config = setup.getConfig() diff --git a/packages/server/src/automations/tests/createRow.spec.js b/packages/server/src/automations/tests/createRow.spec.js index 1004711d87..a04fc7aad4 100644 --- a/packages/server/src/automations/tests/createRow.spec.js +++ b/packages/server/src/automations/tests/createRow.spec.js @@ -46,7 +46,7 @@ describe("test the create row action", () => { await setup.runStep(setup.actions.CREATE_ROW.stepId, { row }) - expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1) + expect(usageQuota.update).toHaveBeenCalledWith("rows", 1) }) }) diff --git a/packages/server/src/automations/tests/deleteRow.spec.js b/packages/server/src/automations/tests/deleteRow.spec.js index a3d73d3bf6..21246f22d0 100644 --- a/packages/server/src/automations/tests/deleteRow.spec.js +++ b/packages/server/src/automations/tests/deleteRow.spec.js @@ -37,7 +37,7 @@ describe("test the delete row action", () => { it("check usage quota attempts", async () => { await setup.runInProd(async () => { await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs) - expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1) + expect(usageQuota.update).toHaveBeenCalledWith("rows", -1) }) }) diff --git a/packages/server/src/automations/thread.js b/packages/server/src/automations/thread.js index a3e81a2274..ef12494165 100644 --- a/packages/server/src/automations/thread.js +++ b/packages/server/src/automations/thread.js @@ -4,8 +4,10 @@ const AutomationEmitter = require("../events/AutomationEmitter") const { processObject } = require("@budibase/string-templates") const { DEFAULT_TENANT_ID } = require("@budibase/auth").constants const CouchDB = require("../db") -const { DocumentTypes } = require("../db/utils") +const { DocumentTypes, isDevAppID } = require("../db/utils") const { doInTenant } = require("@budibase/auth/tenancy") +const env = require("../environment") +const usage = require("../utilities/usageQuota") const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId @@ -80,7 +82,6 @@ class Orchestrator { return stepFn({ inputs: step.inputs, appId: this._appId, - apiKey: automation.apiKey, emitter: this._emitter, context: this._context, }) @@ -95,6 +96,11 @@ class Orchestrator { return err } } + + // Increment quota for automation runs + if (!env.SELF_HOSTED && !isDevAppID(this._appId)) { + usage.update(usage.Properties.AUTOMATION, 1) + } return this.executionOutput } } diff --git a/packages/server/src/db/inMemoryView.js b/packages/server/src/db/inMemoryView.js new file mode 100644 index 0000000000..892617e068 --- /dev/null +++ b/packages/server/src/db/inMemoryView.js @@ -0,0 +1,48 @@ +const PouchDB = require("pouchdb") +const memory = require("pouchdb-adapter-memory") +const newid = require("./newid") + +PouchDB.plugin(memory) +const Pouch = PouchDB.defaults({ + prefix: undefined, + adapter: "memory", +}) + +exports.runView = async (view, calculation, group, data) => { + // use a different ID each time for the DB, make sure they + // are always unique for each query, don't want overlap + // which could cause 409s + const db = new Pouch(newid()) + // write all the docs to the in memory Pouch (remove revs) + await db.bulkDocs( + data.map(row => ({ + ...row, + _rev: undefined, + })) + ) + let fn = (doc, emit) => emit(doc._id) + eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)")) + const queryFns = { + meta: view.meta, + map: fn, + } + if (view.reduce) { + queryFns.reduce = view.reduce + } + const response = await db.query(queryFns, { + include_docs: !calculation, + group: !!group, + }) + // need to fix the revs to be totally accurate + for (let row of response.rows) { + if (!row._rev || !row._id) { + continue + } + const found = data.find(possible => possible._id === row._id) + if (found) { + row._rev = found._rev + } + } + await db.destroy() + return response +} diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.js index 67412e7e89..303cd085c1 100644 --- a/packages/server/src/db/linkedRows/index.js +++ b/packages/server/src/db/linkedRows/index.js @@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) { // create DBs const db = new CouchDB(appId) const linkedRowIds = links.map(link => link.id) - let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map( + const uniqueRowIds = [...new Set(linkedRowIds)] + let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map( row => row.doc ) + // convert the unique db rows back to a full list of linked rows + const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id)) // need to handle users as specific cases let [users, other] = partition(linked, linkRow => linkRow._id.startsWith(USER_METDATA_PREFIX) @@ -112,7 +115,7 @@ exports.updateLinks = async function (args) { let linkController = new LinkController(args) try { if ( - !(await linkController.doesTableHaveLinkedFields()) && + !(await linkController.doesTableHaveLinkedFields(table)) && (oldTable == null || !(await linkController.doesTableHaveLinkedFields(oldTable))) ) { diff --git a/packages/server/src/db/utils.js b/packages/server/src/db/utils.js index ec1c267fa2..17b19bba49 100644 --- a/packages/server/src/db/utils.js +++ b/packages/server/src/db/utils.js @@ -39,6 +39,7 @@ const DocumentTypes = { QUERY: "query", DEPLOYMENTS: "deployments", METADATA: "metadata", + MEM_VIEW: "view", } const ViewNames = { @@ -109,6 +110,8 @@ function getDocParams(docType, docId = null, otherProps = {}) { } } +exports.getDocParams = getDocParams + /** * Gets parameters for retrieving tables, this is a utility function for the getDocParams function. */ @@ -348,6 +351,14 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => { return getDocParams(DocumentTypes.METADATA, docId, otherProps) } +exports.generateMemoryViewID = viewName => { + return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}` +} + +exports.getMemoryViewParams = (otherProps = {}) => { + return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps) +} + /** * This can be used with the db.allDocs to get a list of IDs */ diff --git a/packages/server/src/definitions/datasource.ts b/packages/server/src/definitions/datasource.ts index 48fd24e1cf..2daef8eda7 100644 --- a/packages/server/src/definitions/datasource.ts +++ b/packages/server/src/definitions/datasource.ts @@ -1,3 +1,5 @@ +import { Table } from "./common" + export enum Operation { CREATE = "CREATE", READ = "READ", @@ -136,6 +138,9 @@ export interface QueryJson { sort?: SortJson paginate?: PaginationJson body?: object + meta?: { + table?: Table + } extra?: { idFilter?: SearchFilters } diff --git a/packages/server/src/environment.js b/packages/server/src/environment.js index 9e029e440a..f528a78729 100644 --- a/packages/server/src/environment.js +++ b/packages/server/src/environment.js @@ -13,6 +13,10 @@ function isDev() { ) } +function isCypress() { + return process.env.NODE_ENV === "cypress" +} + let LOADED = false if (!LOADED && isDev() && !isTest()) { require("dotenv").config() @@ -40,6 +44,7 @@ module.exports = { NODE_ENV: process.env.NODE_ENV, JEST_WORKER_ID: process.env.JEST_WORKER_ID, BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT, + DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL, // minor SALT_ROUNDS: process.env.SALT_ROUNDS, LOGGER: process.env.LOGGER, @@ -61,8 +66,16 @@ module.exports = { module.exports[key] = value }, isTest, + isCypress, isDev, isProd: () => { return !isDev() }, } + +// convert any strings to numbers if required, like "0" would be true otherwise +for (let [key, value] of Object.entries(module.exports)) { + if (typeof value === "string" && !isNaN(parseInt(value))) { + module.exports[key] = parseInt(value) + } +} diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index b59bac5a5a..c5e9bdb0bb 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -1,7 +1,5 @@ import { Knex, knex } from "knex" const BASE_LIMIT = 5000 -// if requesting a single row then need to up the limit for the sake of joins -const SINGLE_ROW_LIMIT = 100 import { QueryJson, SearchFilters, @@ -146,46 +144,48 @@ function buildCreate( function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery { let { endpoint, resource, filters, sort, paginate, relationships } = json const tableName = endpoint.entityId - let query: KnexQuery = knex(tableName) // select all if not specified if (!resource) { resource = { fields: [] } } + let selectStatement: string | string[] = "*" // handle select if (resource.fields && resource.fields.length > 0) { // select the resources as the format "table.columnName" - this is what is provided // by the resource builder further up - query = query.select(resource.fields.map(field => `${field} as ${field}`)) - } else { - query = query.select("*") + selectStatement = resource.fields.map(field => `${field} as ${field}`) + } + let foundLimit = limit || BASE_LIMIT + // handle pagination + let foundOffset: number | null = null + if (paginate && paginate.page && paginate.limit) { + // @ts-ignore + const page = paginate.page <= 1 ? 0 : paginate.page - 1 + const offset = page * paginate.limit + foundLimit = paginate.limit + foundOffset = offset + } else if (paginate && paginate.limit) { + foundLimit = paginate.limit + } + // start building the query + let query: KnexQuery = knex(tableName).limit(foundLimit) + if (foundOffset) { + query = query.offset(foundOffset) } - // handle where - query = addFilters(tableName, query, filters) - // handle join - query = addRelationships(query, tableName, relationships) - // handle sorting if (sort) { for (let [key, value] of Object.entries(sort)) { const direction = value === SortDirection.ASCENDING ? "asc" : "desc" query = query.orderBy(key, direction) } } - let foundLimit = limit || BASE_LIMIT - // handle pagination - if (paginate && paginate.page && paginate.limit) { + query = addFilters(tableName, query, filters) + // @ts-ignore + let preQuery: KnexQuery = knex({ // @ts-ignore - const page = paginate.page <= 1 ? 0 : paginate.page - 1 - const offset = page * paginate.limit - foundLimit = paginate.limit - query = query.offset(offset) - } else if (paginate && paginate.limit) { - foundLimit = paginate.limit - } - if (foundLimit === 1) { - foundLimit = SINGLE_ROW_LIMIT - } - query = query.limit(foundLimit) - return query + [tableName]: query, + }).select(selectStatement) + // handle joins + return addRelationships(preQuery, tableName, relationships) } function buildUpdate( diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index 3ce21675d9..c17cca0745 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -12,7 +12,11 @@ import { getSqlQuery } from "./utils" module MySQLModule { const mysql = require("mysql") const Sql = require("./base/sql") - const { buildExternalTableId, convertType } = require("./utils") + const { + buildExternalTableId, + convertType, + copyExistingPropsOver, + } = require("./utils") const { FieldTypes } = require("../constants") interface MySQLConfig { @@ -104,7 +108,7 @@ module MySQLModule { client: any, query: SqlQuery, connect: boolean = true - ): Promise { + ): Promise { // Node MySQL is callback based, so we must wrap our call in a promise return new Promise((resolve, reject) => { if (connect) { @@ -194,18 +198,7 @@ module MySQLModule { } } - // add the existing relationships from the entities if they exist, to prevent them from being overridden - if (entities && entities[tableName]) { - const existingTableSchema = entities[tableName].schema - for (let key in existingTableSchema) { - if (!existingTableSchema.hasOwnProperty(key)) { - continue - } - if (existingTableSchema[key].type === "link") { - tables[tableName].schema[key] = existingTableSchema[key] - } - } - } + copyExistingPropsOver(tableName, tables, entities) } this.client.end() @@ -249,6 +242,23 @@ module MySQLModule { return internalQuery(this.client, input, false) } + // when creating if an ID has been inserted need to make sure + // the id filter is enriched with it before trying to retrieve the row + checkLookupKeys(results: any, json: QueryJson) { + if (!results?.insertId || !json.meta?.table || !json.meta.table.primary) { + return json + } + const primaryKey = json.meta.table.primary?.[0] + json.extra = { + idFilter: { + equal: { + [primaryKey]: results.insertId, + }, + }, + } + return json + } + async query(json: QueryJson) { const operation = this._operation(json) this.client.connect() @@ -261,7 +271,7 @@ module MySQLModule { const results = await internalQuery(this.client, input, false) // same as delete, manage returning if (operation === Operation.CREATE || operation === Operation.UPDATE) { - row = this.getReturningRow(json) + row = this.getReturningRow(this.checkLookupKeys(results, json)) } this.client.end() if (operation !== Operation.READ) { diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index dd46652871..332ba8544d 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -12,7 +12,14 @@ module PostgresModule { const { Pool } = require("pg") const Sql = require("./base/sql") const { FieldTypes } = require("../constants") - const { buildExternalTableId, convertType } = require("./utils") + const { + buildExternalTableId, + convertType, + copyExistingPropsOver, + } = require("./utils") + const { escapeDangerousCharacters } = require("../utilities") + + const JSON_REGEX = /'{.*}'::json/s interface PostgresConfig { host: string @@ -84,13 +91,27 @@ module PostgresModule { bigint: FieldTypes.NUMBER, decimal: FieldTypes.NUMBER, smallint: FieldTypes.NUMBER, + real: FieldTypes.NUMBER, + "double precision": FieldTypes.NUMBER, timestamp: FieldTypes.DATETIME, time: FieldTypes.DATETIME, boolean: FieldTypes.BOOLEAN, json: FieldTypes.JSON, + date: FieldTypes.DATETIME, } async function internalQuery(client: any, query: SqlQuery) { + // need to handle a specific issue with json data types in postgres, + // new lines inside the JSON data will break it + if (query && query.sql) { + const matches = query.sql.match(JSON_REGEX) + if (matches && matches.length > 0) { + for (let match of matches) { + const escaped = escapeDangerousCharacters(match) + query.sql = query.sql.replace(match, escaped) + } + } + } try { return await client.query(query.sql, query.bindings || []) } catch (err) { @@ -105,7 +126,7 @@ module PostgresModule { private readonly config: PostgresConfig COLUMNS_SQL = - "select * from information_schema.columns where table_schema = 'public'" + "select * from information_schema.columns where not table_schema = 'information_schema' and not table_schema = 'pg_catalog'" PRIMARY_KEYS_SQL = ` select tc.table_schema, tc.table_name, kc.column_name as primary_key @@ -173,31 +194,30 @@ module PostgresModule { name: tableName, schema: {}, } - - // add the existing relationships from the entities if they exist, to prevent them from being overridden - if (entities && entities[tableName]) { - const existingTableSchema = entities[tableName].schema - for (let key in existingTableSchema) { - if (!existingTableSchema.hasOwnProperty(key)) { - continue - } - if (existingTableSchema[key].type === "link") { - tables[tableName].schema[key] = existingTableSchema[key] - } - } - } } const type: string = convertType(column.data_type, TYPE_MAP) - const isAuto: boolean = + const identity = !!( + column.identity_generation || + column.identity_start || + column.identity_increment + ) + const hasDefault = typeof column.column_default === "string" && column.column_default.startsWith("nextval") + const isGenerated = + column.is_generated && column.is_generated !== "NEVER" + const isAuto: boolean = hasDefault || identity || isGenerated tables[tableName].schema[columnName] = { autocolumn: isAuto, name: columnName, type, } } + + for (let tableName of Object.keys(tables)) { + copyExistingPropsOver(tableName, tables, entities) + } this.tables = tables } diff --git a/packages/server/src/integrations/tests/sql.spec.js b/packages/server/src/integrations/tests/sql.spec.js index fa8bcd1d86..64cdda215f 100644 --- a/packages/server/src/integrations/tests/sql.spec.js +++ b/packages/server/src/integrations/tests/sql.spec.js @@ -57,7 +57,7 @@ describe("SQL query builder", () => { const query = sql._query(generateReadJson()) expect(query).toEqual({ bindings: [limit], - sql: `select * from "${TABLE_NAME}" limit $1` + sql: `select * from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"` }) }) @@ -68,7 +68,7 @@ describe("SQL query builder", () => { })) expect(query).toEqual({ bindings: [limit], - sql: `select "${TABLE_NAME}"."name" as "${nameProp}", "${TABLE_NAME}"."age" as "${ageProp}" from "${TABLE_NAME}" limit $1` + sql: `select "${TABLE_NAME}"."name" as "${nameProp}", "${TABLE_NAME}"."age" as "${ageProp}" from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"` }) }) @@ -82,7 +82,7 @@ describe("SQL query builder", () => { })) expect(query).toEqual({ bindings: ["John%", limit], - sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" ilike $1 limit $2` + sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" ilike $1 limit $2) as "${TABLE_NAME}"` }) }) @@ -99,7 +99,7 @@ describe("SQL query builder", () => { })) expect(query).toEqual({ bindings: [2, 10, limit], - sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3` + sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3) as "${TABLE_NAME}"` }) }) @@ -115,7 +115,7 @@ describe("SQL query builder", () => { })) expect(query).toEqual({ bindings: [10, "John", limit], - sql: `select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3` + sql: `select * from (select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3) as "${TABLE_NAME}"` }) }) @@ -160,7 +160,7 @@ describe("SQL query builder", () => { const query = new Sql("mssql", 10)._query(generateReadJson()) expect(query).toEqual({ bindings: [10], - sql: `select top (@p0) * from [${TABLE_NAME}]` + sql: `select * from (select top (@p0) * from [${TABLE_NAME}]) as [${TABLE_NAME}]` }) }) @@ -168,7 +168,7 @@ describe("SQL query builder", () => { const query = new Sql("mysql", 10)._query(generateReadJson()) expect(query).toEqual({ bindings: [10], - sql: `select * from \`${TABLE_NAME}\` limit ?` + sql: `select * from (select * from \`${TABLE_NAME}\` limit ?) as \`${TABLE_NAME}\`` }) }) }) diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts index 5b247213c0..6e3dc6f684 100644 --- a/packages/server/src/integrations/utils.ts +++ b/packages/server/src/integrations/utils.ts @@ -82,3 +82,25 @@ export function isIsoDateString(str: string) { let d = new Date(str) return d.toISOString() === str } + +// add the existing relationships from the entities if they exist, to prevent them from being overridden +export function copyExistingPropsOver( + tableName: string, + tables: { [key: string]: any }, + entities: { [key: string]: any } +) { + if (entities && entities[tableName]) { + if (entities[tableName].primaryDisplay) { + tables[tableName].primaryDisplay = entities[tableName].primaryDisplay + } + const existingTableSchema = entities[tableName].schema + for (let key in existingTableSchema) { + if (!existingTableSchema.hasOwnProperty(key)) { + continue + } + if (existingTableSchema[key].type === "link") { + tables[tableName].schema[key] = existingTableSchema[key] + } + } + } +} diff --git a/packages/server/src/middleware/tests/usageQuota.spec.js b/packages/server/src/middleware/tests/usageQuota.spec.js index 97d9c7794a..7a64b1a039 100644 --- a/packages/server/src/middleware/tests/usageQuota.spec.js +++ b/packages/server/src/middleware/tests/usageQuota.spec.js @@ -6,6 +6,9 @@ jest.mock("../../environment", () => ({ isDev: () => true, _set: () => {}, })) +jest.mock("@budibase/auth/tenancy", () => ({ + getTenantId: () => "testing123" +})) const usageQuotaMiddleware = require("../usageQuota") const usageQuota = require("../../utilities/usageQuota") @@ -39,7 +42,7 @@ class TestConfiguration { if (bool) { env.isDev = () => false env.isProd = () => true - this.ctx.auth = { apiKey: "test" } + this.ctx.user = { tenantId: "test" } } else { env.isDev = () => true env.isProd = () => false @@ -114,7 +117,7 @@ describe("usageQuota middleware", () => { await config.executeMiddleware() - expect(usageQuota.update).toHaveBeenCalledWith("test", "rows", 1) + expect(usageQuota.update).toHaveBeenCalledWith("rows", 1) expect(config.next).toHaveBeenCalled() }) @@ -131,7 +134,7 @@ describe("usageQuota middleware", () => { ]) await config.executeMiddleware() - expect(usageQuota.update).toHaveBeenCalledWith("test", "storage", 10100) + expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100) expect(config.next).toHaveBeenCalled() }) }) \ No newline at end of file diff --git a/packages/server/src/middleware/usageQuota.js b/packages/server/src/middleware/usageQuota.js index 4647878721..de54c93200 100644 --- a/packages/server/src/middleware/usageQuota.js +++ b/packages/server/src/middleware/usageQuota.js @@ -1,6 +1,10 @@ const CouchDB = require("../db") const usageQuota = require("../utilities/usageQuota") const env = require("../environment") +const { getTenantId } = require("@budibase/auth/tenancy") + +// tenants without limits +const EXCLUDED_TENANTS = ["bb", "default", "bbtest", "bbstaging"] // currently only counting new writes and deletes const METHOD_MAP = { @@ -13,6 +17,7 @@ const DOMAIN_MAP = { upload: usageQuota.Properties.UPLOAD, views: usageQuota.Properties.VIEW, users: usageQuota.Properties.USER, + applications: usageQuota.Properties.APPS, // this will not be updated by endpoint calls // instead it will be updated by triggerInfo automationRuns: usageQuota.Properties.AUTOMATION, @@ -27,8 +32,10 @@ function getProperty(url) { } module.exports = async (ctx, next) => { + const tenantId = getTenantId() + // if in development or a self hosted cloud usage quotas should not be executed - if (env.isDev() || env.SELF_HOSTED) { + if (env.isDev() || env.SELF_HOSTED || EXCLUDED_TENANTS.includes(tenantId)) { return next() } @@ -57,9 +64,9 @@ module.exports = async (ctx, next) => { usage = files.map(file => file.size).reduce((total, size) => total + size) } try { - await usageQuota.update(ctx.auth.apiKey, property, usage) + await usageQuota.update(property, usage) return next() } catch (err) { - ctx.throw(403, err) + ctx.throw(400, err) } } diff --git a/packages/server/src/tests/utilities/structures.js b/packages/server/src/tests/utilities/structures.js index e4b2c7e1f0..9c900fec09 100644 --- a/packages/server/src/tests/utilities/structures.js +++ b/packages/server/src/tests/utilities/structures.js @@ -70,10 +70,12 @@ exports.basicRole = () => { exports.basicDatasource = () => { return { - type: "datasource", - name: "Test", - source: "POSTGRES", - config: {}, + datasource: { + type: "datasource", + name: "Test", + source: "POSTGRES", + config: {}, + }, } } diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.js index 5226fd66ca..6fee7b4283 100644 --- a/packages/server/src/utilities/fileSystem/index.js +++ b/packages/server/src/utilities/fileSystem/index.js @@ -19,6 +19,7 @@ const { USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX, } = require("../../db/utils") +const MemoryStream = require("memorystream") const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules") @@ -111,24 +112,85 @@ exports.apiFileReturn = contents => { * to the temporary backup file (to return via API if required). */ exports.performBackup = async (appId, backupName) => { - const path = join(budibaseTempDir(), backupName) - const writeStream = fs.createWriteStream(path) - // perform couch dump - const instanceDb = new CouchDB(appId) - await instanceDb.dump(writeStream, { - // filter out anything that has a user metadata structure in its ID + return exports.exportDB(appId, { + exportName: backupName, filter: doc => !( doc._id.includes(USER_METDATA_PREFIX) || doc.includes(LINK_USER_METADATA_PREFIX) ), }) +} + +/** + * exports a DB to either file or a variable (memory). + * @param {string} dbName the DB which is to be exported. + * @param {string} exportName optional - the file name to export to, if not in memory. + * @param {function} filter optional - a filter function to clear out any un-wanted docs. + * @return Either the file stream or the variable (if no export name provided). + */ +exports.exportDB = async ( + dbName, + { exportName, filter } = { exportName: undefined, filter: undefined } +) => { + let stream, + appString = "", + path = null + if (exportName) { + path = join(budibaseTempDir(), exportName) + stream = fs.createWriteStream(path) + } else { + stream = new MemoryStream() + stream.on("data", chunk => { + appString += chunk.toString() + }) + } + // perform couch dump + const instanceDb = new CouchDB(dbName) + await instanceDb.dump(stream, { + filter, + }) + // just in memory, return the final string + if (!exportName) { + return appString + } // write the file to the object store - await streamUpload( - ObjectStoreBuckets.BACKUPS, - join(appId, backupName), - fs.createReadStream(path) - ) + if (env.SELF_HOSTED) { + await streamUpload( + ObjectStoreBuckets.BACKUPS, + join(dbName, exportName), + fs.createReadStream(path) + ) + } + return fs.createReadStream(path) +} + +/** + * Writes the provided contents to a temporary file, which can be used briefly. + * @param {string} fileContents contents which will be written to a temp file. + * @return {string} the path to the temp file. + */ +exports.storeTempFile = fileContents => { + const path = join(budibaseTempDir(), uuid()) + fs.writeFileSync(path, fileContents) + return path +} + +/** + * Utility function for getting a file read stream - a simple in memory buffered read + * stream doesn't work for pouchdb. + */ +exports.stringToFileStream = contents => { + const path = exports.storeTempFile(contents) + return fs.createReadStream(path) +} + +/** + * Creates a temp file and returns it from the API. + * @param {string} fileContents the contents to be returned in file. + */ +exports.sendTempFile = fileContents => { + const path = exports.storeTempFile(fileContents) return fs.createReadStream(path) } diff --git a/packages/server/src/utilities/index.js b/packages/server/src/utilities/index.js index a81f9ddcf5..e568ba063c 100644 --- a/packages/server/src/utilities/index.js +++ b/packages/server/src/utilities/index.js @@ -3,6 +3,7 @@ const { OBJ_STORE_DIRECTORY } = require("../constants") const { sanitizeKey } = require("@budibase/auth/src/objectStore") const CouchDB = require("../db") const { generateMetadataID } = require("../db/utils") +const Readable = require("stream").Readable const BB_CDN = "https://cdn.budi.live" @@ -10,6 +11,14 @@ exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms)) exports.isDev = env.isDev +exports.removeFromArray = (array, element) => { + const index = array.indexOf(element) + if (index !== -1) { + array.splice(index, 1) + } + return array +} + /** * Makes sure that a URL has the correct number of slashes, while maintaining the * http(s):// double slashes. @@ -106,3 +115,22 @@ exports.deleteEntityMetadata = async (appId, type, entityId) => { await db.remove(id, rev) } } + +exports.escapeDangerousCharacters = string => { + return string + .replace(/[\\]/g, "\\\\") + .replace(/[\b]/g, "\\b") + .replace(/[\f]/g, "\\f") + .replace(/[\n]/g, "\\n") + .replace(/[\r]/g, "\\r") + .replace(/[\t]/g, "\\t") +} + +exports.stringToReadStream = string => { + return new Readable({ + read() { + this.push(string) + this.push(null) + }, + }) +} diff --git a/packages/server/src/utilities/rowProcessor/index.js b/packages/server/src/utilities/rowProcessor/index.js index bb4ac98bb7..07549dd8a8 100644 --- a/packages/server/src/utilities/rowProcessor/index.js +++ b/packages/server/src/utilities/rowProcessor/index.js @@ -89,10 +89,16 @@ const TYPE_TRANSFORM_MAP = { * @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields. * @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing. * @param {Object} row The row which is to be updated with information for the auto columns. + * @param {Object} opts specific options for function to carry out optional features. * @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated * for automatic ID purposes. */ -function processAutoColumn(user, table, row) { +function processAutoColumn( + user, + table, + row, + opts = { reprocessing: false, noAutoRelationships: false } +) { let now = new Date().toISOString() // if a row doesn't have a revision then it doesn't exist yet const creating = !row._rev @@ -102,7 +108,7 @@ function processAutoColumn(user, table, row) { } switch (schema.subtype) { case AutoFieldSubTypes.CREATED_BY: - if (creating) { + if (creating && !opts.reprocessing && !opts.noAutoRelationships) { row[key] = [user.userId] } break @@ -112,7 +118,9 @@ function processAutoColumn(user, table, row) { } break case AutoFieldSubTypes.UPDATED_BY: - row[key] = [user.userId] + if (!opts.reprocessing && !opts.noAutoRelationships) { + row[key] = [user.userId] + } break case AutoFieldSubTypes.UPDATED_AT: row[key] = now @@ -127,6 +135,7 @@ function processAutoColumn(user, table, row) { } return { table, row } } +exports.processAutoColumn = processAutoColumn /** * This will coerce a value to the correct types based on the type transform map @@ -151,9 +160,15 @@ exports.coerce = (row, type) => { * @param {object} user the user which is performing the input. * @param {object} row the row which is being created/updated. * @param {object} table the table which the row is being saved to. + * @param {object} opts some input processing options (like disabling auto-column relationships). * @returns {object} the row which has been prepared to be written to the DB. */ -exports.inputProcessing = (user = {}, table, row) => { +exports.inputProcessing = ( + user = {}, + table, + row, + opts = { noAutoRelationships: false } +) => { let clonedRow = cloneDeep(row) // need to copy the table so it can be differenced on way out const copiedTable = cloneDeep(table) @@ -176,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => { } } // handle auto columns - this returns an object like {table, row} - return processAutoColumn(user, copiedTable, clonedRow) + return processAutoColumn(user, copiedTable, clonedRow, opts) } /** diff --git a/packages/server/src/utilities/usageQuota.js b/packages/server/src/utilities/usageQuota.js index bfe71a4093..80fddb8303 100644 --- a/packages/server/src/utilities/usageQuota.js +++ b/packages/server/src/utilities/usageQuota.js @@ -1,41 +1,9 @@ const env = require("../environment") -const { apiKeyTable } = require("../db/dynamoClient") - -const DEFAULT_USAGE = { - rows: 0, - storage: 0, - views: 0, - automationRuns: 0, - users: 0, -} - -const DEFAULT_PLAN = { - rows: 1000, - // 1 GB - storage: 8589934592, - views: 10, - automationRuns: 100, - users: 10000, -} - -function buildUpdateParams(key, property, usage) { - return { - primary: key, - condition: - "attribute_exists(#quota) AND attribute_exists(#limits) AND #quota.#prop < #limits.#prop AND #quotaReset > :now", - expression: "ADD #quota.#prop :usage", - names: { - "#quota": "usageQuota", - "#prop": property, - "#limits": "usageLimits", - "#quotaReset": "quotaReset", - }, - values: { - ":usage": usage, - ":now": Date.now(), - }, - } -} +const { getGlobalDB } = require("@budibase/auth/tenancy") +const { + StaticDatabases, + generateNewUsageQuotaDoc, +} = require("@budibase/auth/db") function getNewQuotaReset() { return Date.now() + 2592000000 @@ -47,59 +15,59 @@ exports.Properties = { VIEW: "views", USER: "users", AUTOMATION: "automationRuns", + APPS: "apps", + EMAILS: "emails", } -exports.getAPIKey = async appId => { - if (!env.USE_QUOTAS) { - return { apiKey: null } +async function getUsageQuotaDoc(db) { + let quota + try { + quota = await db.get(StaticDatabases.PLATFORM_INFO.docs.usageQuota) + } catch (err) { + // doc doesn't exist. Create it + quota = await db.post(generateNewUsageQuotaDoc()) } - return apiKeyTable.get({ primary: appId }) + + return quota } /** - * Given a specified API key this will add to the usage object for the specified property. - * @param {string} apiKey The API key which is to be updated. + * Given a specified tenantId this will add to the usage object for the specified property. * @param {string} property The property which is to be added to (within the nested usageQuota object). * @param {number} usage The amount (this can be negative) to adjust the number by. * @returns {Promise} When this completes the API key will now be up to date - the quota period may have * also been reset after this call. */ -exports.update = async (apiKey, property, usage) => { +exports.update = async (property, usage) => { if (!env.USE_QUOTAS) { return } + try { - await apiKeyTable.update(buildUpdateParams(apiKey, property, usage)) - } catch (err) { - // conditional check means the condition failed, need to check why - if (err.code === "ConditionalCheckFailedException") { - // get the API key so we can check it - const keyObj = await apiKeyTable.get({ primary: apiKey }) - // the usage quota or usage limits didn't exist - if (keyObj && (keyObj.usageQuota == null || keyObj.usageLimits == null)) { - keyObj.usageQuota = - keyObj.usageQuota == null ? DEFAULT_USAGE : keyObj.usageQuota - keyObj.usageLimits = - keyObj.usageLimits == null ? DEFAULT_PLAN : keyObj.usageLimits - keyObj.quotaReset = getNewQuotaReset() - await apiKeyTable.put({ item: keyObj }) - return - } - // we have in fact breached the reset period - else if (keyObj && keyObj.quotaReset <= Date.now()) { - // update the quota reset period and reset the values for all properties - keyObj.quotaReset = getNewQuotaReset() - for (let prop of Object.keys(keyObj.usageQuota)) { - if (prop === property) { - keyObj.usageQuota[prop] = usage > 0 ? usage : 0 - } else { - keyObj.usageQuota[prop] = 0 - } - } - await apiKeyTable.put({ item: keyObj }) - return + const db = getGlobalDB() + const quota = await getUsageQuotaDoc(db) + + // Check if the quota needs reset + if (Date.now() >= quota.quotaReset) { + quota.quotaReset = getNewQuotaReset() + for (let prop of Object.keys(quota.usageQuota)) { + quota.usageQuota[prop] = 0 } } + + // increment the quota + quota.usageQuota[property] += usage + + if (quota.usageQuota[property] >= quota.usageLimits[property]) { + throw new Error( + `You have exceeded your usage quota of ${quota.usageLimits[property]} ${property}.` + ) + } + + // update the usage quotas + await db.put(quota) + } catch (err) { + console.error(`Error updating usage quotas for ${property}`, err) throw err } } diff --git a/packages/server/src/utilities/workerRequests.js b/packages/server/src/utilities/workerRequests.js index 377658084f..2ace265ca0 100644 --- a/packages/server/src/utilities/workerRequests.js +++ b/packages/server/src/utilities/workerRequests.js @@ -34,7 +34,7 @@ function request(ctx, request) { exports.request = request // have to pass in the tenant ID as this could be coming from an automation -exports.sendSmtpEmail = async (to, from, subject, contents) => { +exports.sendSmtpEmail = async (to, from, subject, contents, automation) => { // tenant ID will be set in header const response = await fetch( checkSlashesInUrl(env.WORKER_URL + `/api/global/email/send`), @@ -46,6 +46,7 @@ exports.sendSmtpEmail = async (to, from, subject, contents) => { contents, subject, purpose: "custom", + automation, }, }) ) diff --git a/packages/server/yarn.lock b/packages/server/yarn.lock index 6e7e7a868d..938077b90d 100644 --- a/packages/server/yarn.lock +++ b/packages/server/yarn.lock @@ -943,10 +943,10 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/auth@^0.9.125-alpha.17": - version "0.9.133" - resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.133.tgz#280d581820c9069b6bc021f88178c215ee48ad08" - integrity sha512-DL7zIYRXE6xSKE/qbHMf/SX3+bceGxM4xzUmLTk4OHtEOP/vaUJr35tkhznAZF7VpUR9Yh20D6/Zw8z/3sxj/A== +"@budibase/auth@^0.9.146-alpha.3": + version "0.9.146" + resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.146.tgz#920fe02a78ca17903b72ccde307ca3e82b4176ad" + integrity sha512-T7DhI3WIolD0CjO2pRCEZfJBpJce4cmZWTFRIZ8lBnKe/6dxkK9fNrkZDYRhRkMwQbDQXoARADZM1hAfgUsSMg== dependencies: "@techpass/passport-openidconnect" "^0.3.0" aws-sdk "^2.901.0" @@ -966,10 +966,10 @@ uuid "^8.3.2" zlib "^1.0.5" -"@budibase/bbui@^0.9.133": - version "0.9.133" - resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.133.tgz#91a2fb24abaaf91d2cb1e00eb51c493c1290f9ad" - integrity sha512-xbMmc/hee1QRNW7TrbGUBmLr1hMHXqUDA6rdl9N2PGfHFuFWbqlD8PWYanHmLevVet+CjkuKGPSbBghFK2pQyQ== +"@budibase/bbui@^0.9.139": + version "0.9.139" + resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.139.tgz#e6cfc90e8f6c2aa3526fc6a7bef251bccdaf51bb" + integrity sha512-HllzXwfCnxqlV/ifdOR4Got6yrvK2rUFwKUWQIcYU0wk8h6hwYmLehP7HqgBa6l8+bvO1Ep9g+rjP2xJPJG21w== dependencies: "@adobe/spectrum-css-workflow-icons" "^1.2.1" "@spectrum-css/actionbutton" "^1.0.1" @@ -1015,14 +1015,63 @@ svelte-flatpickr "^3.1.0" svelte-portal "^1.0.0" -"@budibase/client@^0.9.125-alpha.17": - version "0.9.133" - resolved "https://registry.yarnpkg.com/@budibase/client/-/client-0.9.133.tgz#43748e189e9b92d99d1281ab62bd2c5ebed5dbab" - integrity sha512-JrduL9iVMGalZyIUQ+1UN/dhrOZNRJwXU8B4r/eWhVoJf3f3bCuNfpMoT2LN3HY4ooyu37VehD+J5bdDsvlNPw== +"@budibase/bbui@^0.9.146": + version "0.9.146" + resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.146.tgz#7689b2c0f148321e62969181e3f6549f03dd3e78" + integrity sha512-Mq0oMyaN18Dg5e0IPtPXSGmu/TS4B74gW+l2ypJDNTzSRm934DOAPghDgkb53rFNZhsovCYjixJZmesUcv2o3g== dependencies: - "@budibase/bbui" "^0.9.133" - "@budibase/standard-components" "^0.9.133" - "@budibase/string-templates" "^0.9.133" + "@adobe/spectrum-css-workflow-icons" "^1.2.1" + "@spectrum-css/actionbutton" "^1.0.1" + "@spectrum-css/actiongroup" "^1.0.1" + "@spectrum-css/avatar" "^3.0.2" + "@spectrum-css/button" "^3.0.1" + "@spectrum-css/buttongroup" "^3.0.2" + "@spectrum-css/checkbox" "^3.0.2" + "@spectrum-css/dialog" "^3.0.1" + "@spectrum-css/divider" "^1.0.3" + "@spectrum-css/dropzone" "^3.0.2" + "@spectrum-css/fieldgroup" "^3.0.2" + "@spectrum-css/fieldlabel" "^3.0.1" + "@spectrum-css/icon" "^3.0.1" + "@spectrum-css/illustratedmessage" "^3.0.2" + "@spectrum-css/inputgroup" "^3.0.2" + "@spectrum-css/label" "^2.0.10" + "@spectrum-css/link" "^3.1.1" + "@spectrum-css/menu" "^3.0.1" + "@spectrum-css/modal" "^3.0.1" + "@spectrum-css/pagination" "^3.0.3" + "@spectrum-css/picker" "^1.0.1" + "@spectrum-css/popover" "^3.0.1" + "@spectrum-css/progressbar" "^1.0.2" + "@spectrum-css/progresscircle" "^1.0.2" + "@spectrum-css/radio" "^3.0.2" + "@spectrum-css/search" "^3.0.2" + "@spectrum-css/sidenav" "^3.0.2" + "@spectrum-css/statuslight" "^3.0.2" + "@spectrum-css/stepper" "^3.0.3" + "@spectrum-css/switch" "^1.0.2" + "@spectrum-css/table" "^3.0.1" + "@spectrum-css/tabs" "^3.0.1" + "@spectrum-css/tags" "^3.0.2" + "@spectrum-css/textfield" "^3.0.1" + "@spectrum-css/toast" "^3.0.1" + "@spectrum-css/tooltip" "^3.0.3" + "@spectrum-css/treeview" "^3.0.2" + "@spectrum-css/typography" "^3.0.1" + "@spectrum-css/underlay" "^2.0.9" + "@spectrum-css/vars" "^3.0.1" + dayjs "^1.10.4" + svelte-flatpickr "^3.1.0" + svelte-portal "^1.0.0" + +"@budibase/client@^0.9.146-alpha.3": + version "0.9.146" + resolved "https://registry.yarnpkg.com/@budibase/client/-/client-0.9.146.tgz#d3b1bbd67245ab5a3870ccb580b9fc76f0344fd6" + integrity sha512-vd/bMmiQVghFH3Pa9jrGXjYAAKo+lGrwWyfUSdXAb4XP6gCSnMK5BXf8NliNrQzQVmruYT+2rGMsnc+9q4lW1g== + dependencies: + "@budibase/bbui" "^0.9.146" + "@budibase/standard-components" "^0.9.139" + "@budibase/string-templates" "^0.9.146" regexparam "^1.3.0" shortid "^2.2.15" svelte-spa-router "^3.0.5" @@ -1055,12 +1104,12 @@ to-gfm-code-block "^0.1.1" year "^0.2.1" -"@budibase/standard-components@^0.9.133": - version "0.9.133" - resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.133.tgz#789c02b45dc3853b003822c09e18ce7ece4dfa29" - integrity sha512-xcuwTxsqk1J/YmM4YjThO/Fm0eJ+aZWm0kbFgfN+dNN9fuPlsPOLmlVEWeOUPmBa5XfRyDbx6lDYj0PPEK8CvA== +"@budibase/standard-components@^0.9.139": + version "0.9.139" + resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.139.tgz#cf8e2b759ae863e469e50272b3ca87f2827e66e3" + integrity sha512-Av0u9Eq2jerjhG6Atta+c0mOQGgE5K0QI3cm+8s/3Vki6/PXkO1YL5Alo3BOn9ayQAVZ/xp4rtZPuN/rzRibHw== dependencies: - "@budibase/bbui" "^0.9.133" + "@budibase/bbui" "^0.9.139" "@spectrum-css/button" "^3.0.3" "@spectrum-css/card" "^3.0.3" "@spectrum-css/divider" "^1.0.3" @@ -1073,10 +1122,10 @@ svelte-apexcharts "^1.0.2" svelte-flatpickr "^3.1.0" -"@budibase/string-templates@^0.9.125-alpha.17", "@budibase/string-templates@^0.9.133": - version "0.9.133" - resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.133.tgz#221d81e080dc4485dcffa989d16e2bbed39f9055" - integrity sha512-SMHcSPwHYdAqol9YCcMoYawp5/ETr9TqGZCUsL+hUUq+LritPwu/miQ++SVvRTQbOR7Mker0S9LO3H8mwYkW8w== +"@budibase/string-templates@^0.9.146", "@budibase/string-templates@^0.9.146-alpha.3": + version "0.9.146" + resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.146.tgz#85249c7a8777a5f0c280af6f6d0e3d3ff0bf20b5" + integrity sha512-4f91SVUaTKseB+j7ycWbP54XiqiFZ6bZvcKgzsg1mLF+VVJ1/ALUsLvCRaj6SlcSHrhhALiGVR1z18KOyBWoKw== dependencies: "@budibase/handlebars-helpers" "^0.11.4" dayjs "^1.10.4" @@ -2921,9 +2970,9 @@ aws-sdk@^2.767.0: xml2js "0.4.19" aws-sdk@^2.901.0: - version "2.989.0" - resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.989.0.tgz#ed3cce6b94856b469784bc3312a0b64438b9fe67" - integrity sha512-sMjvqeF9mEOxXkhOAUjCrBt2iYafclkmaIbgSdjJ+te7zKXeReqrc6P3VgIGUxU8kwmdSro0n1NjrXbzKQJhcw== + version "2.997.0" + resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.997.0.tgz#8598a5dd7bc6b6833a2fc3d737fba89020a79418" + integrity sha512-PiuDmC5hN+FsyLvl7GsZAnS6hQpo1pP+Ax2u8gyL19QlbBLwlhsFQF29vPcYatyv6WUxr51o6uymJdPxQg6uEA== dependencies: buffer "4.9.2" events "1.1.1" @@ -8131,6 +8180,11 @@ memory-pager@^1.0.2: resolved "https://registry.yarnpkg.com/memory-pager/-/memory-pager-1.5.0.tgz#d8751655d22d384682741c972f2c3d6dfa3e66b5" integrity sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg== +memorystream@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2" + integrity sha1-htcJCzDORV1j+64S3aUaR93K+bI= + merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" @@ -8334,7 +8388,7 @@ mute-stream@0.0.8: resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== -mysql@^2.18.1: +mysql@2.18.1: version "2.18.1" resolved "https://registry.yarnpkg.com/mysql/-/mysql-2.18.1.tgz#2254143855c5a8c73825e4522baf2ea021766717" integrity sha512-Bca+gk2YWmqp2Uf6k5NFEurwY/0td0cpebAucFpY/3jhrwrVGuxU2uQFCHjU19SJfje0yQvi+rVWdq78hR5lig== @@ -8908,9 +8962,9 @@ passport-oauth1@1.x.x: utils-merge "1.x.x" passport-oauth2@1.x.x: - version "1.6.0" - resolved "https://registry.yarnpkg.com/passport-oauth2/-/passport-oauth2-1.6.0.tgz#5f599735e0ea40ea3027643785f81a3a9b4feb50" - integrity sha512-emXPLqLcVEcLFR/QvQXZcwLmfK8e9CqvMgmOFJxcNT3okSFMtUbRRKpY20x5euD+01uHsjjCa07DYboEeLXYiw== + version "1.6.1" + resolved "https://registry.yarnpkg.com/passport-oauth2/-/passport-oauth2-1.6.1.tgz#c5aee8f849ce8bd436c7f81d904a3cd1666f181b" + integrity sha512-ZbV43Hq9d/SBSYQ22GOiglFsjsD1YY/qdiptA+8ej+9C1dL1TVB+mBE5kDH/D4AJo50+2i8f4bx0vg4/yDDZCQ== dependencies: base64url "3.x.x" oauth "0.9.x" @@ -11110,9 +11164,9 @@ tmp@^0.0.33: os-tmpdir "~1.0.2" tmpl@1.0.x: - version "1.0.4" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" - integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-buffer@^1.1.1: version "1.1.1" diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json index 605348b061..bff798cf57 100644 --- a/packages/string-templates/package.json +++ b/packages/string-templates/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/string-templates", - "version": "0.9.125-alpha.17", + "version": "0.9.146-alpha.5", "description": "Handlebars wrapper for Budibase templating.", "main": "src/index.cjs", "module": "dist/bundle.mjs", diff --git a/packages/string-templates/yarn.lock b/packages/string-templates/yarn.lock index 0188a9ec1d..82f99d7b31 100644 --- a/packages/string-templates/yarn.lock +++ b/packages/string-templates/yarn.lock @@ -4633,9 +4633,9 @@ time-stamp@^1.0.1: integrity sha1-dkpaEa9QVhkhsTPztE5hhofg9cM= tmpl@1.0.x: - version "1.0.4" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" - integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-fast-properties@^2.0.0: version "2.0.0" diff --git a/packages/worker/package.json b/packages/worker/package.json index 2b3b969b06..7255ddb1f6 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/worker", "email": "hi@budibase.com", - "version": "0.9.125-alpha.17", + "version": "0.9.146-alpha.5", "description": "Budibase background service", "main": "src/index.js", "repository": { @@ -25,8 +25,8 @@ "author": "Budibase", "license": "AGPL-3.0-or-later", "dependencies": { - "@budibase/auth": "^0.9.125-alpha.17", - "@budibase/string-templates": "^0.9.125-alpha.17", + "@budibase/auth": "^0.9.146-alpha.5", + "@budibase/string-templates": "^0.9.146-alpha.5", "@koa/router": "^8.0.0", "@techpass/passport-openidconnect": "^0.3.0", "aws-sdk": "^2.811.0", diff --git a/packages/worker/scripts/dev/manage.js b/packages/worker/scripts/dev/manage.js index f9a931110e..4eb29847bb 100644 --- a/packages/worker/scripts/dev/manage.js +++ b/packages/worker/scripts/dev/manage.js @@ -21,7 +21,8 @@ async function init() { COUCH_DB_PASSWORD: "budibase", // empty string is false MULTI_TENANCY: "", - ACCOUNT_PORTAL_URL: "http://localhost:3001", + DISABLE_ACCOUNT_PORTAL: "", + ACCOUNT_PORTAL_URL: "http://localhost:10001", } let envFile = "" Object.keys(envFileJson).forEach(key => { diff --git a/packages/worker/scripts/jestSetup.js b/packages/worker/scripts/jestSetup.js index 374edfb946..89a517279a 100644 --- a/packages/worker/scripts/jestSetup.js +++ b/packages/worker/scripts/jestSetup.js @@ -1,5 +1,6 @@ const env = require("../src/environment") +env._set("SELF_HOSTED", "1") env._set("NODE_ENV", "jest") env._set("JWT_SECRET", "test-jwtsecret") env._set("LOG_LEVEL", "silent") diff --git a/packages/worker/src/api/controllers/global/configs.js b/packages/worker/src/api/controllers/global/configs.js index aa83fd695f..c0c300e4db 100644 --- a/packages/worker/src/api/controllers/global/configs.js +++ b/packages/worker/src/api/controllers/global/configs.js @@ -10,6 +10,7 @@ const email = require("../../../utilities/email") const { upload, ObjectStoreBuckets } = require("@budibase/auth").objectStore const CouchDB = require("../../../db") const { getGlobalDB } = require("@budibase/auth/tenancy") +const env = require("../../../environment") exports.save = async function (ctx) { const db = getGlobalDB() @@ -174,7 +175,13 @@ exports.upload = async function (ctx) { const file = ctx.request.files.file const { type, name } = ctx.params - const bucket = ObjectStoreBuckets.GLOBAL + let bucket + if (env.SELF_HOSTED) { + bucket = ObjectStoreBuckets.GLOBAL + } else { + bucket = ObjectStoreBuckets.GLOBAL_CLOUD + } + const key = `${type}/${name}` await upload({ bucket, diff --git a/packages/worker/src/api/controllers/global/email.js b/packages/worker/src/api/controllers/global/email.js index 57b78a6d7a..e194a30862 100644 --- a/packages/worker/src/api/controllers/global/email.js +++ b/packages/worker/src/api/controllers/global/email.js @@ -2,8 +2,16 @@ const { sendEmail } = require("../../../utilities/email") const { getGlobalDB } = require("@budibase/auth/tenancy") exports.sendEmail = async ctx => { - let { workspaceId, email, userId, purpose, contents, from, subject } = - ctx.request.body + let { + workspaceId, + email, + userId, + purpose, + contents, + from, + subject, + automation, + } = ctx.request.body let user if (userId) { const db = getGlobalDB() @@ -15,6 +23,7 @@ exports.sendEmail = async ctx => { contents, from, subject, + automation, }) ctx.body = { ...response, diff --git a/packages/worker/src/api/controllers/global/users.js b/packages/worker/src/api/controllers/global/users.js index 8f754e2922..e43513de5e 100644 --- a/packages/worker/src/api/controllers/global/users.js +++ b/packages/worker/src/api/controllers/global/users.js @@ -1,8 +1,8 @@ const { generateGlobalUserID, getGlobalUserParams, - StaticDatabases, + generateNewUsageQuotaDoc, } = require("@budibase/auth/db") const { hash, getGlobalUserByEmail } = require("@budibase/auth").utils const { UserStatus, EmailTemplatePurpose } = require("../../../constants") @@ -11,6 +11,7 @@ const { sendEmail } = require("../../../utilities/email") const { user: userCache } = require("@budibase/auth/cache") const { invalidateSessions } = require("@budibase/auth/sessions") const CouchDB = require("../../../db") +const accounts = require("@budibase/auth/accounts") const { getGlobalDB, getTenantId, @@ -18,6 +19,8 @@ const { tryAddTenant, updateTenantId, } = require("@budibase/auth/tenancy") +const { removeUserFromInfoDB } = require("@budibase/auth/deprovision") +const env = require("../../../environment") const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name @@ -31,7 +34,12 @@ async function allUsers() { return response.rows.map(row => row.doc) } -async function saveUser(user, tenantId, hashPassword = true) { +async function saveUser( + user, + tenantId, + hashPassword = true, + requirePassword = true +) { if (!tenantId) { throw "No tenancy specified." } @@ -43,9 +51,26 @@ async function saveUser(user, tenantId, hashPassword = true) { // make sure another user isn't using the same email let dbUser if (email) { + // check budibase users inside the tenant dbUser = await getGlobalUserByEmail(email) if (dbUser != null && (dbUser._id !== _id || Array.isArray(dbUser))) { - throw "Email address already in use." + throw `Email address ${email} already in use.` + } + + // check budibase users in other tenants + if (env.MULTI_TENANCY) { + dbUser = await getTenantUser(email) + if (dbUser != null && dbUser.tenantId !== tenantId) { + throw `Email address ${email} already in use.` + } + } + + // check root account users in account portal + if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { + const account = await accounts.getAccount(email) + if (account && account.verified && account.tenantId !== tenantId) { + throw `Email address ${email} already in use.` + } } } else { dbUser = await db.get(_id) @@ -57,12 +82,13 @@ async function saveUser(user, tenantId, hashPassword = true) { hashedPassword = hashPassword ? await hash(password) : password } else if (dbUser) { hashedPassword = dbUser.password - } else { + } else if (requirePassword) { throw "Password must be specified." } _id = _id || generateGlobalUserID() user = { + createdAt: Date.now(), ...dbUser, ...user, _id, @@ -106,16 +132,21 @@ exports.save = async ctx => { } } +const parseBooleanParam = param => { + if (param && param === "false") { + return false + } else { + return true + } +} + exports.adminUser = async ctx => { const { email, password, tenantId } = ctx.request.body // account portal sends a pre-hashed password - honour param to prevent double hashing - let hashPassword = ctx.request.query.hashPassword - if (hashPassword && hashPassword == "false") { - hashPassword = false - } else { - hashPassword = true - } + const hashPassword = parseBooleanParam(ctx.request.query.hashPassword) + // account portal sends no password for SSO users + const requirePassword = parseBooleanParam(ctx.request.query.requirePassword) if (await doesTenantExist(tenantId)) { ctx.throw(403, "Organisation already exists.") @@ -128,6 +159,22 @@ exports.adminUser = async ctx => { }) ) + // write usage quotas for cloud + if (!env.SELF_HOSTED) { + // could be a scenario where it exists, make sure its clean + try { + const usageQuota = await db.get( + StaticDatabases.PLATFORM_INFO.docs.usageQuota + ) + if (usageQuota) { + await db.remove(usageQuota._id, usageQuota._rev) + } + } catch (err) { + // don't worry about errors + } + await db.post(generateNewUsageQuotaDoc()) + } + if (response.rows.some(row => row.doc.admin)) { ctx.throw( 403, @@ -138,6 +185,7 @@ exports.adminUser = async ctx => { const user = { email: email, password: password, + createdAt: Date.now(), roles: {}, builder: { global: true, @@ -148,7 +196,7 @@ exports.adminUser = async ctx => { tenantId, } try { - ctx.body = await saveUser(user, tenantId, hashPassword) + ctx.body = await saveUser(user, tenantId, hashPassword, requirePassword) } catch (err) { ctx.throw(err.status || 400, err) } @@ -157,6 +205,7 @@ exports.adminUser = async ctx => { exports.destroy = async ctx => { const db = getGlobalDB() const dbUser = await db.get(ctx.params.id) + await removeUserFromInfoDB(dbUser) await db.remove(dbUser._id, dbUser._rev) await userCache.invalidateUser(dbUser._id) await invalidateSessions(dbUser._id) @@ -249,13 +298,22 @@ exports.find = async ctx => { ctx.body = user } -exports.tenantUserLookup = async ctx => { - const id = ctx.params.id - // lookup, could be email or userId, either will return a doc +// lookup, could be email or userId, either will return a doc +const getTenantUser = async identifier => { const db = new CouchDB(PLATFORM_INFO_DB) try { - ctx.body = await db.get(id) + return await db.get(identifier) } catch (err) { + return null + } +} + +exports.tenantUserLookup = async ctx => { + const id = ctx.params.id + const user = await getTenantUser(id) + if (user) { + ctx.body = user + } else { ctx.throw(400, "No tenant user found.") } } diff --git a/packages/worker/src/api/controllers/global/workspaces.js b/packages/worker/src/api/controllers/global/workspaces.js index 95a1ec296d..48a710c92d 100644 --- a/packages/worker/src/api/controllers/global/workspaces.js +++ b/packages/worker/src/api/controllers/global/workspaces.js @@ -11,7 +11,7 @@ exports.save = async function (ctx) { } try { - const response = await db.post(workspaceDoc) + const response = await db.put(workspaceDoc) ctx.body = { _id: response.id, _rev: response.rev, diff --git a/packages/worker/src/api/controllers/system/environment.js b/packages/worker/src/api/controllers/system/environment.js index 305ccd7937..a4022561d4 100644 --- a/packages/worker/src/api/controllers/system/environment.js +++ b/packages/worker/src/api/controllers/system/environment.js @@ -3,7 +3,8 @@ const env = require("../../../environment") exports.fetch = async ctx => { ctx.body = { multiTenancy: !!env.MULTI_TENANCY, - cloud: !(env.SELF_HOSTED === "1"), + cloud: !env.SELF_HOSTED, accountPortalUrl: env.ACCOUNT_PORTAL_URL, + disableAccountPortal: env.DISABLE_ACCOUNT_PORTAL, } } diff --git a/packages/worker/src/api/controllers/system/tenants.js b/packages/worker/src/api/controllers/system/tenants.js index e053216dd9..a96c5e5f9f 100644 --- a/packages/worker/src/api/controllers/system/tenants.js +++ b/packages/worker/src/api/controllers/system/tenants.js @@ -1,5 +1,7 @@ const CouchDB = require("../../../db") const { StaticDatabases } = require("@budibase/auth/db") +const { getTenantId } = require("@budibase/auth/tenancy") +const { deleteTenant } = require("@budibase/auth/deprovision") exports.exists = async ctx => { const tenantId = ctx.request.params @@ -31,3 +33,19 @@ exports.fetch = async ctx => { } ctx.body = tenants } + +exports.delete = async ctx => { + const tenantId = getTenantId() + + if (ctx.params.tenantId !== tenantId) { + ctx.throw(403, "Unauthorized") + } + + try { + await deleteTenant(tenantId) + ctx.status = 204 + } catch (err) { + ctx.log.error(err) + throw err + } +} diff --git a/packages/worker/src/api/routes/global/users.js b/packages/worker/src/api/routes/global/users.js index 9af249260d..1a04944a30 100644 --- a/packages/worker/src/api/routes/global/users.js +++ b/packages/worker/src/api/routes/global/users.js @@ -10,7 +10,7 @@ function buildAdminInitValidation() { return joiValidator.body( Joi.object({ email: Joi.string().required(), - password: Joi.string().required(), + password: Joi.string(), tenantId: Joi.string().required(), }) .required() diff --git a/packages/worker/src/api/routes/system/tenants.js b/packages/worker/src/api/routes/system/tenants.js index 223ba9f26e..49c7509a67 100644 --- a/packages/worker/src/api/routes/system/tenants.js +++ b/packages/worker/src/api/routes/system/tenants.js @@ -7,5 +7,6 @@ const router = Router() router .get("/api/system/tenants/:tenantId/exists", controller.exists) .get("/api/system/tenants", adminOnly, controller.fetch) + .delete("/api/system/tenants/:tenantId", adminOnly, controller.delete) module.exports = router diff --git a/packages/worker/src/constants/templates/base.hbs b/packages/worker/src/constants/templates/base.hbs index 960d6faff1..438197b5d2 100644 --- a/packages/worker/src/constants/templates/base.hbs +++ b/packages/worker/src/constants/templates/base.hbs @@ -19,7 +19,7 @@ } a { - color: #3869D4; + color: #3869D4 !important; } a img { @@ -115,8 +115,8 @@ border-bottom: 10px solid #3869D4; border-left: 18px solid #3869D4; display: inline-block; - color: #FFF; - text-decoration: none; + color: #FFF !important; + text-decoration: none !important; border-radius: 3px; box-shadow: 0 2px 3px rgba(0, 0, 0, 0.16); -webkit-text-size-adjust: none; diff --git a/packages/worker/src/environment.js b/packages/worker/src/environment.js index 12113c087c..63115ea836 100644 --- a/packages/worker/src/environment.js +++ b/packages/worker/src/environment.js @@ -18,7 +18,7 @@ if (!LOADED && isDev() && !isTest()) { module.exports = { NODE_ENV: process.env.NODE_ENV, - SELF_HOSTED: process.env.SELF_HOSTED, + SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED), PORT: process.env.PORT, CLUSTER_PORT: process.env.CLUSTER_PORT, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, @@ -32,7 +32,14 @@ module.exports = { REDIS_PASSWORD: process.env.REDIS_PASSWORD, INTERNAL_API_KEY: process.env.INTERNAL_API_KEY, MULTI_TENANCY: process.env.MULTI_TENANCY, + DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL, ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL, + SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED, + SMTP_USER: process.env.SMTP_USER, + SMTP_PASSWORD: process.env.SMTP_PASSWORD, + SMTP_HOST: process.env.SMTP_HOST, + SMTP_PORT: process.env.SMTP_PORT, + SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS, _set(key, value) { process.env[key] = value module.exports[key] = value diff --git a/packages/worker/src/utilities/email.js b/packages/worker/src/utilities/email.js index d22933ef36..14c836952e 100644 --- a/packages/worker/src/utilities/email.js +++ b/packages/worker/src/utilities/email.js @@ -1,4 +1,5 @@ const nodemailer = require("nodemailer") +const env = require("../environment") const { getScopedConfig } = require("@budibase/auth/db") const { EmailTemplatePurpose, TemplateTypes, Configs } = require("../constants") const { getTemplateByPurpose } = require("../constants/templates") @@ -101,16 +102,35 @@ async function buildEmail(purpose, email, context, { user, contents } = {}) { * Utility function for finding most valid SMTP configuration. * @param {object} db The CouchDB database which is to be looked up within. * @param {string|null} workspaceId If using finer grain control of configs a workspace can be used. + * @param {boolean|null} automation Whether or not the configuration is being fetched for an email automation. * @return {Promise} returns the SMTP configuration if it exists */ -async function getSmtpConfiguration(db, workspaceId = null) { +async function getSmtpConfiguration(db, workspaceId = null, automation) { const params = { type: Configs.SMTP, } if (workspaceId) { params.workspace = workspaceId } - return getScopedConfig(db, params) + + const customConfig = getScopedConfig(db, params) + + if (customConfig) { + return customConfig + } + + // Use an SMTP fallback configuration from env variables + if (!automation && env.SMTP_FALLBACK_ENABLED) { + return { + port: env.SMTP_PORT, + host: env.SMTP_HOST, + secure: false, + auth: { + user: env.SMTP_USER, + pass: env.SMTP_PASSWORD, + }, + } + } } /** @@ -118,8 +138,8 @@ async function getSmtpConfiguration(db, workspaceId = null) { * @return {Promise} returns true if there is a configuration that can be used. */ exports.isEmailConfigured = async (workspaceId = null) => { - // when "testing" simply return true - if (TEST_MODE) { + // when "testing" or smtp fallback is enabled simply return true + if (TEST_MODE || env.SMTP_FALLBACK_ENABLED) { return true } const db = getGlobalDB() @@ -138,16 +158,17 @@ exports.isEmailConfigured = async (workspaceId = null) => { * @param {string|undefined} contents If sending a custom email then can supply contents which will be added to it. * @param {string|undefined} subject A custom subject can be specified if the config one is not desired. * @param {object|undefined} info Pass in a structure of information to be stored alongside the invitation. + * @param {boolean|undefined} disableFallback Prevent email being sent from SMTP fallback to avoid spam. * @return {Promise} returns details about the attempt to send email, e.g. if it is successful; based on * nodemailer response. */ exports.sendEmail = async ( email, purpose, - { workspaceId, user, from, contents, subject, info } = {} + { workspaceId, user, from, contents, subject, info, automation } = {} ) => { const db = getGlobalDB() - let config = (await getSmtpConfiguration(db, workspaceId)) || {} + let config = (await getSmtpConfiguration(db, workspaceId, automation)) || {} if (Object.keys(config).length === 0 && !TEST_MODE) { throw "Unable to find SMTP configuration." } diff --git a/packages/worker/yarn.lock b/packages/worker/yarn.lock index 59dec93830..ab206dcf18 100644 --- a/packages/worker/yarn.lock +++ b/packages/worker/yarn.lock @@ -287,68 +287,6 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/auth@^0.9.128": - version "0.9.128" - resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.128.tgz#6bb6c716b6647b7e9362e3faf12b191650ea0ad4" - integrity sha512-WCcrtAXilT/4++7PdzyTYgrdVqZcKhUev3NcGrFQf7WbDhkVCuigWbb8Q01KXODjbs0BZC0RshVv/PxrgLbBQA== - dependencies: - "@techpass/passport-openidconnect" "^0.3.0" - aws-sdk "^2.901.0" - bcryptjs "^2.4.3" - cls-hooked "^4.2.2" - ioredis "^4.27.1" - jsonwebtoken "^8.5.1" - koa-passport "^4.1.4" - lodash "^4.17.21" - node-fetch "^2.6.1" - passport-google-auth "^1.0.2" - passport-google-oauth "^2.0.0" - passport-jwt "^4.0.0" - passport-local "^1.0.0" - sanitize-s3-objectkey "^0.0.1" - tar-fs "^2.1.1" - uuid "^8.3.2" - zlib "^1.0.5" - -"@budibase/handlebars-helpers@^0.11.4": - version "0.11.5" - resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.11.5.tgz#e9cc90a44e94ad536992cf10906829b633e94bc5" - integrity sha512-ZxpyNtTHxS8Y+yTicbgWvYDAydooUSjOf3Y+wmTE2d4NpDgO0g0IjepLfZV+KASv9XBr//ylJdjE4hClX9NTFw== - dependencies: - array-sort "^1.0.0" - define-property "^2.0.2" - extend-shallow "^3.0.2" - "falsey" "^1.0.0" - for-in "^1.0.2" - get-object "^0.2.0" - get-value "^3.0.1" - handlebars "^4.7.7" - handlebars-utils "^1.0.6" - has-value "^2.0.2" - helper-date "^1.0.1" - helper-markdown "^1.0.0" - helper-md "^0.2.2" - html-tag "^2.0.0" - is-even "^1.0.0" - is-glob "^4.0.1" - kind-of "^6.0.3" - micromatch "^3.1.5" - relative "^3.0.2" - striptags "^3.1.1" - to-gfm-code-block "^0.1.1" - year "^0.2.1" - -"@budibase/string-templates@^0.9.128": - version "0.9.128" - resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.128.tgz#50ee46dc0d726d481bd5139cd0b38364649a8463" - integrity sha512-4TzmnX2o5S2cts08ukB86El4wYm7cHuV2t6a7yDMGPe1mWeKP1WEtVF6rKhXEdbPTiotW8oYondOlgOP7DT9lA== - dependencies: - "@budibase/handlebars-helpers" "^0.11.4" - dayjs "^1.10.4" - handlebars "^4.7.6" - handlebars-utils "^1.0.6" - lodash "^4.17.20" - "@cnakazawa/watch@^1.0.3": version "1.0.4" resolved "https://registry.yarnpkg.com/@cnakazawa/watch/-/watch-1.0.4.tgz#f864ae85004d0fcab6f50be9141c4da368d1656a" @@ -941,7 +879,7 @@ anymatch@~3.1.1: normalize-path "^3.0.0" picomatch "^2.0.4" -argparse@^1.0.10, argparse@^1.0.7: +argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== @@ -978,15 +916,6 @@ arr-union@^3.1.0: resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= -array-sort@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-sort/-/array-sort-1.0.0.tgz#e4c05356453f56f53512a7d1d6123f2c54c0a88a" - integrity sha512-ihLeJkonmdiAsD7vpgN3CRcx2J2S0TiYW+IS/5zHBI7mKUq3ySvBdzzBfD236ubDBQFiiyG3SWCPc+msQ9KoYg== - dependencies: - default-compare "^1.0.0" - get-value "^2.0.6" - kind-of "^5.0.2" - array-unique@^0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" @@ -1019,20 +948,6 @@ ast-types@0.9.6: resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.9.6.tgz#102c9e9e9005d3e7e3829bf0c4fa24ee862ee9b9" integrity sha1-ECyenpAF0+fjgpvwxPok7oYu6bk= -async-hook-jl@^1.7.6: - version "1.7.6" - resolved "https://registry.yarnpkg.com/async-hook-jl/-/async-hook-jl-1.7.6.tgz#4fd25c2f864dbaf279c610d73bf97b1b28595e68" - integrity sha512-gFaHkFfSxTjvoxDMYqDuGHlcRyUuamF8s+ZTtJdDzqjws4mCt7v0vuV79/E2Wr2/riMQgtG4/yUtXWs1gZ7JMg== - dependencies: - stack-chain "^1.3.7" - -async@~2.1.4: - version "2.1.5" - resolved "https://registry.yarnpkg.com/async/-/async-2.1.5.tgz#e587c68580994ac67fc56ff86d3ac56bdbe810bc" - integrity sha1-5YfGhYCZSsZ/xW/4bTrFa9voELw= - dependencies: - lodash "^4.14.0" - asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" @@ -1048,13 +963,6 @@ atomic-sleep@^1.0.0: resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== -autolinker@~0.28.0: - version "0.28.1" - resolved "https://registry.yarnpkg.com/autolinker/-/autolinker-0.28.1.tgz#0652b491881879f0775dace0cdca3233942a4e47" - integrity sha1-BlK0kYgYefB3XazgzcoyM5QqTkc= - dependencies: - gulp-header "^1.7.1" - aws-sdk@^2.811.0: version "2.811.0" resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.811.0.tgz#a7e4040b2ee7d8b825b142ed5179d36dc3f315c4" @@ -1070,21 +978,6 @@ aws-sdk@^2.811.0: uuid "3.3.2" xml2js "0.4.19" -aws-sdk@^2.901.0: - version "2.989.0" - resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.989.0.tgz#ed3cce6b94856b469784bc3312a0b64438b9fe67" - integrity sha512-sMjvqeF9mEOxXkhOAUjCrBt2iYafclkmaIbgSdjJ+te7zKXeReqrc6P3VgIGUxU8kwmdSro0n1NjrXbzKQJhcw== - dependencies: - buffer "4.9.2" - events "1.1.1" - ieee754 "1.1.13" - jmespath "0.15.0" - querystring "0.2.0" - sax "1.2.1" - url "0.10.3" - uuid "3.3.2" - xml2js "0.4.19" - aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" @@ -1211,15 +1104,6 @@ binary-extensions@^2.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== -bl@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" - integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" - boxen@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/boxen/-/boxen-4.2.0.tgz#e411b62357d6d6d36587c8ac3d5d974daa070e64" @@ -1464,11 +1348,6 @@ chokidar@^3.2.2: optionalDependencies: fsevents "~2.3.1" -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - ci-info@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" @@ -1515,20 +1394,6 @@ clone-response@^1.0.2: dependencies: mimic-response "^1.0.0" -cls-hooked@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/cls-hooked/-/cls-hooked-4.2.2.tgz#ad2e9a4092680cdaffeb2d3551da0e225eae1908" - integrity sha512-J4Xj5f5wq/4jAvcdgoGsL3G103BtWpZrMo8NEinRltN+xpTZdI+M38pyQqhuFU/P792xkMFvnKSf+Lm81U1bxw== - dependencies: - async-hook-jl "^1.7.6" - emitter-listener "^1.0.1" - semver "^5.4.1" - -cluster-key-slot@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz#30474b2a981fb12172695833052bc0d01336d10d" - integrity sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw== - co-body@^5.1.1: version "5.2.0" resolved "https://registry.yarnpkg.com/co-body/-/co-body-5.2.0.tgz#5a0a658c46029131e0e3a306f67647302f71c124" @@ -1630,13 +1495,6 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-with-sourcemaps@*: - version "1.1.0" - resolved "https://registry.yarnpkg.com/concat-with-sourcemaps/-/concat-with-sourcemaps-1.1.0.tgz#d4ea93f05ae25790951b99e7b3b09e3908a4082e" - integrity sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg== - dependencies: - source-map "^0.6.1" - configstore@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96" @@ -1756,23 +1614,11 @@ data-urls@^2.0.0: whatwg-mimetype "^2.3.0" whatwg-url "^8.0.0" -date.js@^0.3.1: - version "0.3.3" - resolved "https://registry.yarnpkg.com/date.js/-/date.js-0.3.3.tgz#ef1e92332f507a638795dbb985e951882e50bbda" - integrity sha512-HgigOS3h3k6HnW011nAb43c5xx5rBXk8P2v/WIT9Zv4koIaVXiH2BURguI78VVp+5Qc076T7OR378JViCnZtBw== - dependencies: - debug "~3.1.0" - dateformat@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-3.0.3.tgz#a6e37499a4d9a9cf85ef5872044d62901c9889ae" integrity sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q== -dayjs@^1.10.4: - version "1.10.7" - resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.10.7.tgz#2cf5f91add28116748440866a0a1d26f3a6ce468" - integrity sha512-P6twpd70BcPK34K26uJ1KT3wlhpuOAPoMwJzpsIWUxHZ7wpmbdZL/hQqBDfz7hGurYSa5PhzdhDHtt319hL3ig== - debug@^2.2.0, debug@^2.3.3: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" @@ -1794,13 +1640,6 @@ debug@^4.1.0, debug@^4.1.1: dependencies: ms "2.1.2" -debug@^4.3.1: - version "4.3.2" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.2.tgz#f0a49c18ac8779e31d4a0c6029dfb76873c7428b" - integrity sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw== - dependencies: - ms "2.1.2" - debug@~3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" @@ -1857,13 +1696,6 @@ deepmerge@^4.2.2: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -default-compare@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/default-compare/-/default-compare-1.0.0.tgz#cb61131844ad84d84788fb68fd01681ca7781a2f" - integrity sha512-QWfXlM0EkAbqOCbD/6HjdwT19j7WCkMyiRhWilc4H9/5h/RzTF9gv5LYh1+CmDV5d1rki6KAWLtQale0xt20eQ== - dependencies: - kind-of "^5.0.2" - defer-to-connect@^1.0.1: version "1.1.3" resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" @@ -1919,11 +1751,6 @@ delegates@^1.0.0: resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= -denque@^1.1.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/denque/-/denque-1.5.1.tgz#07f670e29c9a78f8faecb2566a1e2c11929c5cbf" - integrity sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw== - depd@^1.1.2, depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" @@ -2011,13 +1838,6 @@ electron-to-chromium@^1.3.719: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.720.tgz#f5d66df8754d993006b7b2ded15ff7738c58bd94" integrity sha512-B6zLTxxaOFP4WZm6DrvgRk8kLFYWNhQ5TrHMC0l5WtkMXhU5UbnvWoTfeEwqOruUSlNMhVLfYak7REX6oC5Yfw== -emitter-listener@^1.0.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/emitter-listener/-/emitter-listener-1.1.2.tgz#56b140e8f6992375b3d7cb2cab1cc7432d9632e8" - integrity sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ== - dependencies: - shimmer "^1.2.0" - emittery@^0.7.1: version "0.7.2" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.7.2.tgz#25595908e13af0f5674ab419396e2fb394cdfa82" @@ -2048,7 +1868,7 @@ encoding-down@^6.3.0: level-codec "^9.0.0" level-errors "^2.0.0" -end-of-stream@^1.1.0, end-of-stream@^1.4.1: +end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== @@ -2062,11 +1882,6 @@ end-stream@~0.1.0: dependencies: write-stream "~0.4.3" -ent@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" - integrity sha1-6WQhkyWiHQX0RGai9obtbOX13R0= - errno@~0.1.1: version "0.1.8" resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" @@ -2308,11 +2123,6 @@ falafel@^1.0.1: isarray "0.0.1" object-keys "^1.0.6" -"falsey@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/falsey/-/falsey-1.0.0.tgz#71bdd775c24edad9f2f5c015ce8be24400bb5d7d" - integrity sha512-zMDNZ/Ipd8MY0+346CPvhzP1AsiVyNfTOayJza4reAIWf72xbkuFUDcJNxSAsQE1b9Bu0wijKb8Ngnh/a7fI5w== - fast-deep-equal@^3.1.1: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" @@ -2439,16 +2249,6 @@ fresh@~0.5.2: resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - -fs-exists-sync@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz#982d6893af918e72d08dec9e8673ff2b5a8d6add" - integrity sha1-mC1ok6+RjnLQjeyehnP/K1qNat0= - fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -2488,14 +2288,6 @@ get-intrinsic@^1.0.2: has "^1.0.3" has-symbols "^1.0.1" -get-object@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/get-object/-/get-object-0.2.0.tgz#d92ff7d5190c64530cda0543dac63a3d47fe8c0c" - integrity sha1-2S/31RkMZFMM2gVD2sY6PUf+jAw= - dependencies: - is-number "^2.0.2" - isobject "^0.2.0" - get-package-type@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" @@ -2520,13 +2312,6 @@ get-value@^2.0.3, get-value@^2.0.6: resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= -get-value@^3.0.0, get-value@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-3.0.1.tgz#5efd2a157f1d6a516d7524e124ac52d0a39ef5a8" - integrity sha512-mKZj9JLQrwMBtj5wxi6MH8Z5eSKaERpAwjg43dPtlGI1ZVEgH/qC7T8/6R2OBSUA+zzHBZgICsVJaEIV2tKTDA== - dependencies: - isobject "^3.0.1" - getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" @@ -2576,32 +2361,6 @@ globals@^11.1.0: resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== -google-auth-library@~0.10.0: - version "0.10.0" - resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-0.10.0.tgz#6e15babee85fd1dd14d8d128a295b6838d52136e" - integrity sha1-bhW6vuhf0d0U2NEoopW2g41SE24= - dependencies: - gtoken "^1.2.1" - jws "^3.1.4" - lodash.noop "^3.0.1" - request "^2.74.0" - -google-p12-pem@^0.1.0: - version "0.1.2" - resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-0.1.2.tgz#33c46ab021aa734fa0332b3960a9a3ffcb2f3177" - integrity sha1-M8RqsCGqc0+gMys5YKmj/8svMXc= - dependencies: - node-forge "^0.7.1" - -googleapis@^16.0.0: - version "16.1.0" - resolved "https://registry.yarnpkg.com/googleapis/-/googleapis-16.1.0.tgz#0f19f2d70572d918881a0f626e3b1a2fa8629576" - integrity sha1-Dxny1wVy2RiIGg9ibjsaL6hilXY= - dependencies: - async "~2.1.4" - google-auth-library "~0.10.0" - string-template "~1.0.0" - got@^11.8.1: version "11.8.1" resolved "https://registry.yarnpkg.com/got/-/got-11.8.1.tgz#df04adfaf2e782babb3daabc79139feec2f7e85d" @@ -2651,45 +2410,6 @@ growly@^1.3.0: resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" integrity sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE= -gtoken@^1.2.1: - version "1.2.3" - resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-1.2.3.tgz#5509571b8afd4322e124cf66cf68115284c476d8" - integrity sha512-wQAJflfoqSgMWrSBk9Fg86q+sd6s7y6uJhIvvIPz++RElGlMtEqsdAR2oWwZ/WTEtp7P9xFbJRrT976oRgzJ/w== - dependencies: - google-p12-pem "^0.1.0" - jws "^3.0.0" - mime "^1.4.1" - request "^2.72.0" - -gulp-header@^1.7.1: - version "1.8.12" - resolved "https://registry.yarnpkg.com/gulp-header/-/gulp-header-1.8.12.tgz#ad306be0066599127281c4f8786660e705080a84" - integrity sha512-lh9HLdb53sC7XIZOYzTXM4lFuXElv3EVkSDhsd7DoJBj7hm+Ni7D3qYbb+Rr8DuM8nRanBvkVO9d7askreXGnQ== - dependencies: - concat-with-sourcemaps "*" - lodash.template "^4.4.0" - through2 "^2.0.0" - -handlebars-utils@^1.0.2, handlebars-utils@^1.0.4, handlebars-utils@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/handlebars-utils/-/handlebars-utils-1.0.6.tgz#cb9db43362479054782d86ffe10f47abc76357f9" - integrity sha512-d5mmoQXdeEqSKMtQQZ9WkiUcO1E3tPbWxluCK9hVgIDPzQa9WsKo3Lbe/sGflTe7TomHEeZaOgwIkyIr1kfzkw== - dependencies: - kind-of "^6.0.0" - typeof-article "^0.1.1" - -handlebars@^4.7.6, handlebars@^4.7.7: - version "4.7.7" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" - integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== - dependencies: - minimist "^1.2.5" - neo-async "^2.6.0" - source-map "^0.6.1" - wordwrap "^1.0.0" - optionalDependencies: - uglify-js "^3.1.4" - har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" @@ -2736,14 +2456,6 @@ has-value@^1.0.0: has-values "^1.0.0" isobject "^3.0.0" -has-value@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-2.0.2.tgz#d0f12e8780ba8e90e66ad1a21c707fdb67c25658" - integrity sha512-ybKOlcRsK2MqrM3Hmz/lQxXHZ6ejzSPzpNabKB45jb5qDgJvKPa3SdapTsTLwEb9WltgWpOmNax7i+DzNOk4TA== - dependencies: - get-value "^3.0.0" - has-values "^2.0.1" - has-values@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" @@ -2757,13 +2469,6 @@ has-values@^1.0.0: is-number "^3.0.0" kind-of "^4.0.0" -has-values@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-2.0.1.tgz#3876200ff86d8a8546a9264a952c17d5fc17579d" - integrity sha512-+QdH3jOmq9P8GfdjFg0eJudqx1FqU62NQJ4P16rOEHeRdl7ckgwn6uqQjzYE0ZoHVV/e5E2esuJ5Gl5+HUW19w== - dependencies: - kind-of "^6.0.2" - has-yarn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77" @@ -2776,39 +2481,6 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" -helper-date@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/helper-date/-/helper-date-1.0.1.tgz#12fedea3ad8e44a7ca4c4efb0ff4104a5120cffb" - integrity sha512-wU3VOwwTJvGr/w5rZr3cprPHO+hIhlblTJHD6aFBrKLuNbf4lAmkawd2iK3c6NbJEvY7HAmDpqjOFSI5/+Ey2w== - dependencies: - date.js "^0.3.1" - handlebars-utils "^1.0.4" - moment "^2.18.1" - -helper-markdown@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/helper-markdown/-/helper-markdown-1.0.0.tgz#ee7e9fc554675007d37eb90f7853b13ce74f3e10" - integrity sha512-AnDqMS4ejkQK0MXze7pA9TM3pu01ZY+XXsES6gEE0RmCGk5/NIfvTn0NmItfyDOjRAzyo9z6X7YHbHX4PzIvOA== - dependencies: - handlebars-utils "^1.0.2" - highlight.js "^9.12.0" - remarkable "^1.7.1" - -helper-md@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/helper-md/-/helper-md-0.2.2.tgz#c1f59d7e55bbae23362fd8a0e971607aec69d41f" - integrity sha1-wfWdflW7riM2L9ig6XFgeuxp1B8= - dependencies: - ent "^2.2.0" - extend-shallow "^2.0.1" - fs-exists-sync "^0.1.0" - remarkable "^1.6.2" - -highlight.js@^9.12.0: - version "9.18.5" - resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-9.18.5.tgz#d18a359867f378c138d6819edfc2a8acd5f29825" - integrity sha512-a5bFyofd/BHCX52/8i8uJkjr9DYwXIPnM/plwI6W7ezItLGqzt7X2G2nXuYSfsIJdkwwj/g9DG1LkcGJI/dDoA== - hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" @@ -2826,14 +2498,6 @@ html-escaper@^2.0.0: resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== -html-tag@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/html-tag/-/html-tag-2.0.0.tgz#36c3bc8d816fd30b570d5764a497a641640c2fed" - integrity sha512-XxzooSo6oBoxBEUazgjdXj7VwTn/iSTSZzTYKzYY6I916tkaYzypHxy+pbVU1h+0UQ9JlVf5XkNQyxOAiiQO1g== - dependencies: - is-self-closing "^1.0.1" - kind-of "^6.0.0" - http-assert@^1.3.0: version "1.4.1" resolved "https://registry.yarnpkg.com/http-assert/-/http-assert-1.4.1.tgz#c5f725d677aa7e873ef736199b89686cceb37878" @@ -2964,7 +2628,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: +inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -2992,23 +2656,6 @@ inline-process-browser@^1.0.0: falafel "^1.0.1" through2 "^0.6.5" -ioredis@^4.27.1: - version "4.27.9" - resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.27.9.tgz#c27bbade9724f0b8f84c279fb1d567be785ba33d" - integrity sha512-hAwrx9F+OQ0uIvaJefuS3UTqW+ByOLyLIV+j0EH8ClNVxvFyH9Vmb08hCL4yje6mDYT5zMquShhypkd50RRzkg== - dependencies: - cluster-key-slot "^1.1.0" - debug "^4.3.1" - denque "^1.1.0" - lodash.defaults "^4.2.0" - lodash.flatten "^4.4.0" - lodash.isarguments "^3.1.0" - p-map "^2.1.0" - redis-commands "1.7.0" - redis-errors "^1.2.0" - redis-parser "^3.0.0" - standard-as-callback "^2.1.0" - is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" @@ -3096,13 +2743,6 @@ is-docker@^2.0.0: resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== -is-even@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-even/-/is-even-1.0.0.tgz#76b5055fbad8d294a86b6a949015e1c97b717c06" - integrity sha1-drUFX7rY0pSoa2qUkBXhyXtxfAY= - dependencies: - is-odd "^0.1.2" - is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" @@ -3160,13 +2800,6 @@ is-npm@^4.0.0: resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-4.0.0.tgz#c90dd8380696df87a7a6d823c20d0b12bbe3c84d" integrity sha512-96ECIfh9xtDDlPylNPXhzjsykHsMJZ18ASpaWzQyBr4YRTcVjUvzaHayDAES2oU/3KpljhHUjtSRNiDwi0F0ig== -is-number@^2.0.2: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" - integrity sha1-Afy7s5NGOlSPL0ZszhbezknbkI8= - dependencies: - kind-of "^3.0.2" - is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" @@ -3184,13 +2817,6 @@ is-obj@^2.0.0: resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982" integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== -is-odd@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/is-odd/-/is-odd-0.1.2.tgz#bc573b5ce371ef2aad6e6f49799b72bef13978a7" - integrity sha1-vFc7XONx7yqtbm9JeZtyvvE5eKc= - dependencies: - is-number "^3.0.0" - is-path-inside@^3.0.1: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" @@ -3208,13 +2834,6 @@ is-potential-custom-element-name@^1.0.0: resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== -is-self-closing@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-self-closing/-/is-self-closing-1.0.1.tgz#5f406b527c7b12610176320338af0fa3896416e4" - integrity sha512-E+60FomW7Blv5GXTlYee2KDrnG6srxF7Xt1SjrhWUGUEsTFIqY/nq2y3DaftCsgUMdh89V07IVfhY9KIJhLezg== - dependencies: - self-closing-tags "^1.0.1" - is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" @@ -3261,7 +2880,7 @@ isarray@0.0.1: resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= -isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: +isarray@1.0.0, isarray@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= @@ -3271,11 +2890,6 @@ isexe@^2.0.0: resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= -isobject@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-0.2.0.tgz#a3432192f39b910b5f02cc989487836ec70aa85e" - integrity sha1-o0MhkvObkQtfAsyYlIeDbscKqF4= - isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" @@ -3820,7 +3434,7 @@ json5@^2.1.2: dependencies: minimist "^1.2.5" -jsonwebtoken@^8.2.0, jsonwebtoken@^8.5.1: +jsonwebtoken@^8.2.0: version "8.5.1" resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== @@ -3875,7 +3489,7 @@ jwa@^1.4.1: ecdsa-sig-formatter "1.0.11" safe-buffer "^5.0.1" -jws@^3.0.0, jws@^3.1.4, jws@^3.2.2: +jws@^3.2.2: version "3.2.2" resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== @@ -3904,7 +3518,7 @@ keyv@^4.0.0: dependencies: json-buffer "3.0.1" -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.1.0, kind-of@^3.2.0: +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= @@ -3918,12 +3532,12 @@ kind-of@^4.0.0: dependencies: is-buffer "^1.1.5" -kind-of@^5.0.0, kind-of@^5.0.2: +kind-of@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== -kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: +kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.3" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== @@ -4184,31 +3798,11 @@ locate-path@^5.0.0: dependencies: p-locate "^4.1.0" -lodash._reinterpolate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d" - integrity sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0= - -lodash.defaults@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c" - integrity sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw= - -lodash.flatten@^4.4.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" - integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8= - lodash.includes@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" integrity sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8= -lodash.isarguments@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a" - integrity sha1-L1c9hcaiQon/AGY7SRwdM4/zRYo= - lodash.isboolean@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" @@ -4234,32 +3828,12 @@ lodash.isstring@^4.0.1: resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" integrity sha1-1SfftUVuynzJu5XV2ur4i6VKVFE= -lodash.noop@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/lodash.noop/-/lodash.noop-3.0.1.tgz#38188f4d650a3a474258439b96ec45b32617133c" - integrity sha1-OBiPTWUKOkdCWEObluxFsyYXEzw= - lodash.once@^4.0.0: version "4.1.1" resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= -lodash.template@^4.4.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-4.5.0.tgz#f976195cf3f347d0d5f52483569fe8031ccce8ab" - integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A== - dependencies: - lodash._reinterpolate "^3.0.0" - lodash.templatesettings "^4.0.0" - -lodash.templatesettings@^4.0.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz#e481310f049d3cf6d47e912ad09313b154f0fb33" - integrity sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ== - dependencies: - lodash._reinterpolate "^3.0.0" - -lodash@^4.14.0, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: +lodash@^4.17.19, lodash@^4.7.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -4339,7 +3913,7 @@ methods@^1.1.2: resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= -micromatch@^3.1.4, micromatch@^3.1.5: +micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== @@ -4395,11 +3969,6 @@ mime-types@^2.1.18, mime-types@~2.1.24: dependencies: mime-db "1.44.0" -mime@^1.4.1: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - mime@^2.4.6: version "2.5.2" resolved "https://registry.yarnpkg.com/mime/-/mime-2.5.2.tgz#6e3dc6cc2b9510643830e5f19d5cb753da5eeabe" @@ -4440,11 +4009,6 @@ mixin-deep@^1.2.0: for-in "^1.0.2" is-extendable "^1.0.1" -mkdirp-classic@^0.5.2: - version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" - integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== - mkdirp@^0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" @@ -4452,11 +4016,6 @@ mkdirp@^0.5.0: dependencies: minimist "^1.2.5" -moment@^2.18.1: - version "2.29.1" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3" - integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== - mri@1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/mri/-/mri-1.1.4.tgz#7cb1dd1b9b40905f1fac053abe25b6720f44744a" @@ -4509,11 +4068,6 @@ negotiator@0.6.2: resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== -neo-async@^2.6.0: - version "2.6.2" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" - integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== - nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" @@ -4529,11 +4083,6 @@ node-fetch@^2.6.1: resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== -node-forge@^0.7.1: - version "0.7.6" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac" - integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw== - node-gyp-build@~4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" @@ -4760,11 +4309,6 @@ p-locate@^4.1.0: dependencies: p-limit "^2.2.0" -p-map@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" - integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== - p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" @@ -4805,14 +4349,6 @@ pascalcase@^0.1.1: resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= -passport-google-auth@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/passport-google-auth/-/passport-google-auth-1.0.2.tgz#8b300b5aa442ef433de1d832ed3112877d0b2938" - integrity sha1-izALWqRC70M94dgy7TESh30LKTg= - dependencies: - googleapis "^16.0.0" - passport-strategy "1.x" - passport-google-oauth1@1.x.x: version "1.0.0" resolved "https://registry.yarnpkg.com/passport-google-oauth1/-/passport-google-oauth1-1.0.0.tgz#af74a803df51ec646f66a44d82282be6f108e0cc" @@ -4870,7 +4406,7 @@ passport-oauth2@1.x.x: uid2 "0.0.x" utils-merge "1.x.x" -passport-strategy@1.x, passport-strategy@1.x.x, passport-strategy@^1.0.0: +passport-strategy@1.x.x, passport-strategy@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4" integrity sha1-tVOaqPwiWj0a0XlHbd8ja0QPUuQ= @@ -5160,11 +4696,6 @@ private@^0.1.6, private@~0.1.5: resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - prompts@^2.0.1: version "2.4.1" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.1.tgz#befd3b1195ba052f9fd2fde8a486c4e82ee77f61" @@ -5309,7 +4840,7 @@ readable-stream@1.1.14: isarray "0.0.1" string_decoder "~0.10.x" -"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: +"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.4.0, readable-stream@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -5333,19 +4864,6 @@ readable-stream@~0.0.2: resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-0.0.4.tgz#f32d76e3fb863344a548d79923007173665b3b8d" integrity sha1-8y124/uGM0SlSNeZIwBxc2ZbO40= -readable-stream@~2.3.6: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - readdirp@~3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e" @@ -5373,23 +4891,6 @@ recast@^0.11.17: private "~0.1.5" source-map "~0.5.0" -redis-commands@1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/redis-commands/-/redis-commands-1.7.0.tgz#15a6fea2d58281e27b1cd1acfb4b293e278c3a89" - integrity sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ== - -redis-errors@^1.0.0, redis-errors@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/redis-errors/-/redis-errors-1.2.0.tgz#eb62d2adb15e4eaf4610c04afe1529384250abad" - integrity sha1-62LSrbFeTq9GEMBK/hUpOEJQq60= - -redis-parser@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/redis-parser/-/redis-parser-3.0.0.tgz#b66d828cdcafe6b4b8a428a7def4c6bcac31c8b4" - integrity sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ= - dependencies: - redis-errors "^1.0.0" - regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" @@ -5412,21 +4913,6 @@ registry-url@^5.0.0: dependencies: rc "^1.2.8" -relative@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/relative/-/relative-3.0.2.tgz#0dcd8ec54a5d35a3c15e104503d65375b5a5367f" - integrity sha1-Dc2OxUpdNaPBXhBFA9ZTdbWlNn8= - dependencies: - isobject "^2.0.0" - -remarkable@^1.6.2, remarkable@^1.7.1: - version "1.7.4" - resolved "https://registry.yarnpkg.com/remarkable/-/remarkable-1.7.4.tgz#19073cb960398c87a7d6546eaa5e50d2022fcd00" - integrity sha512-e6NKUXgX95whv7IgddywbeN/ItCkWbISmc2DiqHJb0wTrqZIexqdco5b8Z3XZoo/48IdNVKM9ZCvTPJ4F5uvhg== - dependencies: - argparse "^1.0.10" - autolinker "~0.28.0" - remove-trailing-separator@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" @@ -5458,7 +4944,7 @@ request-promise-native@^1.0.9: stealthy-require "^1.1.1" tough-cookie "^2.3.3" -request@^2.72.0, request@^2.74.0, request@^2.88.0, request@^2.88.2: +request@^2.88.0, request@^2.88.2: version "2.88.2" resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== @@ -5563,7 +5049,7 @@ rsvp@^4.8.4: resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.5.tgz#c8f155311d167f68f21e168df71ec5b083113734" integrity sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA== -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@5.1.2, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== @@ -5600,11 +5086,6 @@ sane@^4.0.3: minimist "^1.1.1" walker "~1.0.5" -sanitize-s3-objectkey@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/sanitize-s3-objectkey/-/sanitize-s3-objectkey-0.0.1.tgz#efa9887cd45275b40234fb4bb12fc5754fe64e7e" - integrity sha512-ZTk7aqLxy4sD40GWcYWoLfbe05XLmkKvh6vGKe13ADlei24xlezcvjgKy1qRArlaIbIMYaqK7PCalvZtulZlaQ== - sax@1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" @@ -5622,11 +5103,6 @@ saxes@^5.0.1: dependencies: xmlchars "^2.2.0" -self-closing-tags@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/self-closing-tags/-/self-closing-tags-1.0.1.tgz#6c5fa497994bb826b484216916371accee490a5d" - integrity sha512-7t6hNbYMxM+VHXTgJmxwgZgLGktuXtVVD5AivWzNTdJBM4DBjnDKDzkf2SrNjihaArpeJYNjxkELBu1evI4lQA== - semver-diff@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b" @@ -5634,7 +5110,7 @@ semver-diff@^3.1.1: dependencies: semver "^6.3.0" -"semver@2 || 3 || 4 || 5", semver@^5.4.1, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1: +"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.6.0, semver@^5.7.1: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== @@ -5715,11 +5191,6 @@ shellwords@^0.1.1: resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" integrity sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww== -shimmer@^1.2.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/shimmer/-/shimmer-1.2.1.tgz#610859f7de327b587efebf501fb43117f9aff337" - integrity sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw== - side-channel@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" @@ -5900,11 +5371,6 @@ sshpk@^1.7.0: safer-buffer "^2.0.2" tweetnacl "~0.14.0" -stack-chain@^1.3.7: - version "1.3.7" - resolved "https://registry.yarnpkg.com/stack-chain/-/stack-chain-1.3.7.tgz#d192c9ff4ea6a22c94c4dd459171e3f00cea1285" - integrity sha1-0ZLJ/06moiyUxN1FkXHj8AzqEoU= - stack-utils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.3.tgz#cd5f030126ff116b78ccb3c027fe302713b61277" @@ -5912,11 +5378,6 @@ stack-utils@^2.0.2: dependencies: escape-string-regexp "^2.0.0" -standard-as-callback@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/standard-as-callback/-/standard-as-callback-2.1.0.tgz#8953fc05359868a77b5b9739a665c5977bb7df45" - integrity sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A== - static-extend@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" @@ -5953,11 +5414,6 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -string-template@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/string-template/-/string-template-1.0.0.tgz#9e9f2233dc00f218718ec379a28a5673ecca8b96" - integrity sha1-np8iM9wA8hhxjsN5oopWc+zKi5Y= - string-width@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -5988,13 +5444,6 @@ string_decoder@~0.10.x: resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - strip-ansi@^5.1.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" @@ -6034,11 +5483,6 @@ strip-json-comments@~2.0.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= -striptags@^3.1.1: - version "3.2.0" - resolved "https://registry.yarnpkg.com/striptags/-/striptags-3.2.0.tgz#cc74a137db2de8b0b9a370006334161f7dd67052" - integrity sha512-g45ZOGzHDMe2bdYMdIvdAfCQkCTDMGBazSw1ypMowwGIee7ZQ5dU0rBJ8Jqgl+jAKIv4dbeE1jscZq9wid1Tkw== - sublevel-pouchdb@7.2.2: version "7.2.2" resolved "https://registry.yarnpkg.com/sublevel-pouchdb/-/sublevel-pouchdb-7.2.2.tgz#49e46cd37883bf7ff5006d7c5b9bcc7bcc1f422f" @@ -6101,27 +5545,6 @@ symbol-tree@^3.2.4: resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== -tar-fs@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.1.4" - -tar-stream@^2.1.4: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" - integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - term-size@^2.1.0: version "2.2.1" resolved "https://registry.yarnpkg.com/term-size/-/term-size-2.2.1.tgz#2a6a54840432c2fb6320fea0f415531e90189f54" @@ -6165,14 +5588,6 @@ through2@^0.6.2, through2@^0.6.5: readable-stream ">=1.0.33-1 <1.1.0-0" xtend ">=4.0.0 <4.1.0-0" -through2@^2.0.0: - version "2.0.5" - resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" - integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== - dependencies: - readable-stream "~2.3.6" - xtend "~4.0.1" - through@~2.3.4: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" @@ -6184,20 +5599,15 @@ tiny-queue@^0.2.0: integrity sha1-JaZ/LG4lOyypQZd7XvdELvl6YEY= tmpl@1.0.x: - version "1.0.4" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" - integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= -to-gfm-code-block@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/to-gfm-code-block/-/to-gfm-code-block-0.1.1.tgz#25d045a5fae553189e9637b590900da732d8aa82" - integrity sha1-JdBFpfrlUxielje1kJANpzLYqoI= - to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" @@ -6330,18 +5740,6 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typeof-article@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/typeof-article/-/typeof-article-0.1.1.tgz#9f07e733c3fbb646ffa9e61c08debacd460e06af" - integrity sha1-nwfnM8P7tkb/qeYcCN66zUYOBq8= - dependencies: - kind-of "^3.1.0" - -uglify-js@^3.1.4: - version "3.14.2" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.14.2.tgz#d7dd6a46ca57214f54a2d0a43cad0f35db82ac99" - integrity sha512-rtPMlmcO4agTUfz10CbgJ1k6UAoXM2gWb3GoMPPZB/+/Ackf8lNWk11K4rYi2D0apgoFRLtQOZhb+/iGNJq26A== - uid2@0.0.x: version "0.0.3" resolved "https://registry.yarnpkg.com/uid2/-/uid2-0.0.3.tgz#483126e11774df2f71b8b639dcd799c376162b82" @@ -6459,7 +5857,7 @@ use@^3.1.0: resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== -util-deprecate@^1.0.1, util-deprecate@~1.0.1: +util-deprecate@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= @@ -6484,7 +5882,7 @@ uuid@^3.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuid@^8.3.0, uuid@^8.3.2: +uuid@^8.3.0: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== @@ -6616,11 +6014,6 @@ word-wrap@~1.2.3: resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== -wordwrap@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= - wrap-ansi@^6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" @@ -6692,7 +6085,7 @@ xmlchars@^2.2.0: resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== -"xtend@>=4.0.0 <4.1.0-0", xtend@^4.0.2, xtend@~4.0.0, xtend@~4.0.1: +"xtend@>=4.0.0 <4.1.0-0", xtend@^4.0.2, xtend@~4.0.0: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== @@ -6732,17 +6125,7 @@ yargs@^15.4.1: y18n "^4.0.0" yargs-parser "^18.1.2" -year@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/year/-/year-0.2.1.tgz#4083ae520a318b23ec86037f3000cb892bdf9bb0" - integrity sha1-QIOuUgoxiyPshgN/MADLiSvfm7A= - ylru@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/ylru/-/ylru-1.2.1.tgz#f576b63341547989c1de7ba288760923b27fe84f" integrity sha512-faQrqNMzcPCHGVC2aaOINk13K+aaBDUPjGWl0teOXywElLjyVAB6Oe2jj62jHYtwsU49jXhScYbvPENK+6zAvQ== - -zlib@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/zlib/-/zlib-1.0.5.tgz#6e7c972fc371c645a6afb03ab14769def114fcc0" - integrity sha1-bnyXL8NxxkWmr7A6sUdp3vEU/MA=