diff --git a/README.md b/README.md index 0f4cfe31c2..7d11ea570f 100644 --- a/README.md +++ b/README.md @@ -201,9 +201,6 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
seoulaja

🌍
Maurits Lourens

⚠️ 💻 - -
Rory Powell

🚇 ⚠️ 💻 - diff --git a/charts/budibase/templates/app-service-deployment.yaml b/charts/budibase/templates/app-service-deployment.yaml index 8086c0ab20..d9def8c641 100644 --- a/charts/budibase/templates/app-service-deployment.yaml +++ b/charts/budibase/templates/app-service-deployment.yaml @@ -99,13 +99,17 @@ spec: - name: PLATFORM_URL value: {{ .Values.globals.platformUrl | quote }} - name: USE_QUOTAS - value: "1" + value: {{ .Values.globals.useQuotas | quote }} + - name: EXCLUDE_QUOTAS_TENANTS + value: {{ .Values.globals.excludeQuotasTenants | quote }} - name: ACCOUNT_PORTAL_URL value: {{ .Values.globals.accountPortalUrl | quote }} - name: ACCOUNT_PORTAL_API_KEY value: {{ .Values.globals.accountPortalApiKey | quote }} - name: COOKIE_DOMAIN value: {{ .Values.globals.cookieDomain | quote }} + - name: HTTP_MIGRATIONS + value: {{ .Values.globals.httpMigrations | quote }} image: budibase/apps:{{ .Values.globals.appVersion }} imagePullPolicy: Always name: bbapps diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml index 4666d01c70..bb582f69c4 100644 --- a/charts/budibase/values.yaml +++ b/charts/budibase/values.yaml @@ -93,10 +93,13 @@ globals: logLevel: info selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs + useQuotas: "0" + excludeQuotasTenants: "" # comma seperated list of tenants to exclude from quotas accountPortalUrl: "" accountPortalApiKey: "" cookieDomain: "" platformUrl: "" + httpMigrations: "0" createSecrets: true # creates an internal API key, JWT secrets and redis password for you @@ -239,7 +242,8 @@ couchdb: hosts: - chart-example.local path: / - annotations: [] + annotations: + [] # kubernetes.io/ingress.class: nginx # kubernetes.io/tls-acme: "true" tls: diff --git a/lerna.json b/lerna.json index 78af255b1f..a7c955e9de 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "1.0.44-alpha.9", + "version": "1.0.49-alpha.4", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/packages/backend-core/db.js b/packages/backend-core/db.js index a7b38821a7..47854ca9c7 100644 --- a/packages/backend-core/db.js +++ b/packages/backend-core/db.js @@ -1,4 +1,5 @@ module.exports = { ...require("./src/db/utils"), ...require("./src/db/constants"), + ...require("./src/db/views"), } diff --git a/packages/backend-core/migrations.js b/packages/backend-core/migrations.js new file mode 100644 index 0000000000..2de19ebf65 --- /dev/null +++ b/packages/backend-core/migrations.js @@ -0,0 +1 @@ +module.exports = require("./src/migrations") diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index 6069396490..27a55ab020 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/backend-core", - "version": "1.0.44-alpha.9", + "version": "1.0.49-alpha.4", "description": "Budibase backend core libraries used in server and worker", "main": "src/index.js", "author": "Budibase", diff --git a/packages/backend-core/src/auth.js b/packages/backend-core/src/auth.js index 7f66d887ae..f6d53522d5 100644 --- a/packages/backend-core/src/auth.js +++ b/packages/backend-core/src/auth.js @@ -12,6 +12,8 @@ const { tenancy, appTenancy, authError, + csrf, + internalApi, } = require("./middleware") // Strategies @@ -42,4 +44,6 @@ module.exports = { buildAppTenancyMiddleware: appTenancy, auditLog, authError, + buildCsrfMiddleware: csrf, + internalApi, } diff --git a/packages/backend-core/src/constants.js b/packages/backend-core/src/constants.js index 8e6b01608e..559dc0e6b2 100644 --- a/packages/backend-core/src/constants.js +++ b/packages/backend-core/src/constants.js @@ -7,8 +7,8 @@ exports.Cookies = { CurrentApp: "budibase:currentapp", Auth: "budibase:auth", Init: "budibase:init", + DatasourceAuth: "budibase:datasourceauth", OIDC_CONFIG: "budibase:oidc:config", - RETURN_URL: "budibase:returnurl", } exports.Headers = { @@ -18,6 +18,7 @@ exports.Headers = { TYPE: "x-budibase-type", TENANT_ID: "x-budibase-tenant-id", TOKEN: "x-budibase-token", + CSRF_TOKEN: "x-csrf-token", } exports.GlobalRoles = { diff --git a/packages/backend-core/src/db/constants.js b/packages/backend-core/src/db/constants.js index ecdaae5bad..2affb09c7c 100644 --- a/packages/backend-core/src/db/constants.js +++ b/packages/backend-core/src/db/constants.js @@ -21,6 +21,7 @@ exports.StaticDatabases = { name: "global-db", docs: { apiKeys: "apikeys", + usageQuota: "usage_quota", }, }, // contains information about tenancy and so on @@ -28,7 +29,6 @@ exports.StaticDatabases = { name: "global-info", docs: { tenants: "tenants", - usageQuota: "usage_quota", }, }, } diff --git a/packages/backend-core/src/db/utils.js b/packages/backend-core/src/db/utils.js index 5830de4721..2bc5462646 100644 --- a/packages/backend-core/src/db/utils.js +++ b/packages/backend-core/src/db/utils.js @@ -450,7 +450,7 @@ async function getScopedConfig(db, params) { function generateNewUsageQuotaDoc() { return { - _id: StaticDatabases.PLATFORM_INFO.docs.usageQuota, + _id: StaticDatabases.GLOBAL.docs.usageQuota, quotaReset: Date.now() + 2592000000, usageQuota: { automationRuns: 0, diff --git a/packages/backend-core/src/index.js b/packages/backend-core/src/index.js index cd3a3f5c97..b0bc524d9b 100644 --- a/packages/backend-core/src/index.js +++ b/packages/backend-core/src/index.js @@ -14,4 +14,5 @@ module.exports = { cache: require("../cache"), auth: require("../auth"), constants: require("../constants"), + migrations: require("../migrations"), } diff --git a/packages/backend-core/src/middleware/authenticated.js b/packages/backend-core/src/middleware/authenticated.js index 87bd4d35ce..4978f7b9dc 100644 --- a/packages/backend-core/src/middleware/authenticated.js +++ b/packages/backend-core/src/middleware/authenticated.js @@ -60,6 +60,7 @@ module.exports = ( } else { user = await getUser(userId, session.tenantId) } + user.csrfToken = session.csrfToken delete user.password authenticated = true } catch (err) { diff --git a/packages/backend-core/src/middleware/csrf.js b/packages/backend-core/src/middleware/csrf.js new file mode 100644 index 0000000000..12bd9473e6 --- /dev/null +++ b/packages/backend-core/src/middleware/csrf.js @@ -0,0 +1,78 @@ +const { Headers } = require("../constants") +const { buildMatcherRegex, matches } = require("./matchers") + +/** + * GET, HEAD and OPTIONS methods are considered safe operations + * + * POST, PUT, PATCH, and DELETE methods, being state changing verbs, + * should have a CSRF token attached to the request + */ +const EXCLUDED_METHODS = ["GET", "HEAD", "OPTIONS"] + +/** + * There are only three content type values that can be used in cross domain requests. + * If any other value is used, e.g. application/json, the browser will first make a OPTIONS + * request which will be protected by CORS. + */ +const INCLUDED_CONTENT_TYPES = [ + "application/x-www-form-urlencoded", + "multipart/form-data", + "text/plain", +] + +/** + * Validate the CSRF token generated aganst the user session. + * Compare the token with the x-csrf-token header. + * + * If the token is not found within the request or the value provided + * does not match the value within the user session, the request is rejected. + * + * CSRF protection provided using the 'Synchronizer Token Pattern' + * https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern + * + */ +module.exports = (opts = { noCsrfPatterns: [] }) => { + const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns) + return async (ctx, next) => { + // don't apply for excluded paths + const found = matches(ctx, noCsrfOptions) + if (found) { + return next() + } + + // don't apply for the excluded http methods + if (EXCLUDED_METHODS.indexOf(ctx.method) !== -1) { + return next() + } + + // don't apply when the content type isn't supported + let contentType = ctx.get("content-type") + ? ctx.get("content-type").toLowerCase() + : "" + if ( + !INCLUDED_CONTENT_TYPES.filter(type => contentType.includes(type)).length + ) { + return next() + } + + // don't apply csrf when the internal api key has been used + if (ctx.internal) { + return next() + } + + // apply csrf when there is a token in the session (new logins) + // in future there should be a hard requirement that the token is present + const userToken = ctx.user.csrfToken + if (!userToken) { + return next() + } + + // reject if no token in request or mismatch + const requestToken = ctx.get(Headers.CSRF_TOKEN) + if (!requestToken || requestToken !== userToken) { + ctx.throw(403, "Invalid CSRF token") + } + + return next() + } +} diff --git a/packages/backend-core/src/middleware/index.js b/packages/backend-core/src/middleware/index.js index cf8676a2bc..5878479152 100644 --- a/packages/backend-core/src/middleware/index.js +++ b/packages/backend-core/src/middleware/index.js @@ -7,6 +7,9 @@ const authenticated = require("./authenticated") const auditLog = require("./auditLog") const tenancy = require("./tenancy") const appTenancy = require("./appTenancy") +const internalApi = require("./internalApi") +const datasourceGoogle = require("./passport/datasource/google") +const csrf = require("./csrf") module.exports = { google, @@ -18,4 +21,9 @@ module.exports = { tenancy, appTenancy, authError, + internalApi, + datasource: { + google: datasourceGoogle, + }, + csrf, } diff --git a/packages/backend-core/src/middleware/internalApi.js b/packages/backend-core/src/middleware/internalApi.js new file mode 100644 index 0000000000..275d559a9e --- /dev/null +++ b/packages/backend-core/src/middleware/internalApi.js @@ -0,0 +1,14 @@ +const env = require("../environment") +const { Headers } = require("../constants") + +/** + * API Key only endpoint. + */ +module.exports = async (ctx, next) => { + const apiKey = ctx.request.headers[Headers.API_KEY] + if (apiKey !== env.INTERNAL_API_KEY) { + ctx.throw(403, "Unauthorized") + } + + return next() +} diff --git a/packages/backend-core/src/middleware/passport/datasource/google.js b/packages/backend-core/src/middleware/passport/datasource/google.js new file mode 100644 index 0000000000..bfc2e4a61e --- /dev/null +++ b/packages/backend-core/src/middleware/passport/datasource/google.js @@ -0,0 +1,76 @@ +const { getScopedConfig } = require("../../../db/utils") +const { getGlobalDB } = require("../../../tenancy") +const google = require("../google") +const { Configs, Cookies } = require("../../../constants") +const { clearCookie, getCookie } = require("../../../utils") +const { getDB } = require("../../../db") + +async function preAuth(passport, ctx, next) { + const db = getGlobalDB() + // get the relevant config + const config = await getScopedConfig(db, { + type: Configs.GOOGLE, + workspace: ctx.query.workspace, + }) + const publicConfig = await getScopedConfig(db, { + type: Configs.SETTINGS, + }) + let callbackUrl = `${publicConfig.platformUrl}/api/global/auth/datasource/google/callback` + const strategy = await google.strategyFactory(config, callbackUrl) + + if (!ctx.query.appId || !ctx.query.datasourceId) { + ctx.throw(400, "appId and datasourceId query params not present.") + } + + return passport.authenticate(strategy, { + scope: ["profile", "email", "https://www.googleapis.com/auth/spreadsheets"], + accessType: "offline", + prompt: "consent", + })(ctx, next) +} + +async function postAuth(passport, ctx, next) { + const db = getGlobalDB() + + const config = await getScopedConfig(db, { + type: Configs.GOOGLE, + workspace: ctx.query.workspace, + }) + + const publicConfig = await getScopedConfig(db, { + type: Configs.SETTINGS, + }) + + let callbackUrl = `${publicConfig.platformUrl}/api/global/auth/datasource/google/callback` + const strategy = await google.strategyFactory( + config, + callbackUrl, + (accessToken, refreshToken, profile, done) => { + clearCookie(ctx, Cookies.DatasourceAuth) + done(null, { accessToken, refreshToken }) + } + ) + + const authStateCookie = getCookie(ctx, Cookies.DatasourceAuth) + + return passport.authenticate( + strategy, + { successRedirect: "/", failureRedirect: "/error" }, + async (err, tokens) => { + // update the DB for the datasource with all the user info + const db = getDB(authStateCookie.appId) + const datasource = await db.get(authStateCookie.datasourceId) + if (!datasource.config) { + datasource.config = {} + } + datasource.config.auth = { type: "google", ...tokens } + await db.put(datasource) + ctx.redirect( + `/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}` + ) + } + )(ctx, next) +} + +exports.preAuth = preAuth +exports.postAuth = postAuth diff --git a/packages/backend-core/src/migrations/index.js b/packages/backend-core/src/migrations/index.js index 7492e94511..e2ed75d407 100644 --- a/packages/backend-core/src/migrations/index.js +++ b/packages/backend-core/src/migrations/index.js @@ -1,18 +1,17 @@ +const { DEFAULT_TENANT_ID } = require("../constants") const { DocumentTypes } = require("../db/constants") -const { getGlobalDB } = require("../tenancy") +const { getAllApps } = require("../db/utils") +const environment = require("../environment") +const { + doInTenant, + getTenantIds, + getGlobalDBName, + getTenantId, +} = require("../tenancy") -exports.MIGRATION_DBS = { - GLOBAL_DB: "GLOBAL_DB", -} - -exports.MIGRATIONS = { - USER_EMAIL_VIEW_CASING: "user_email_view_casing", -} - -const DB_LOOKUP = { - [exports.MIGRATION_DBS.GLOBAL_DB]: [ - exports.MIGRATIONS.USER_EMAIL_VIEW_CASING, - ], +exports.MIGRATION_TYPES = { + GLOBAL: "global", // run once, recorded in global db, global db is provided as an argument + APP: "app", // run per app, recorded in each app db, app db is provided as an argument } exports.getMigrationsDoc = async db => { @@ -26,36 +25,90 @@ exports.getMigrationsDoc = async db => { } } -exports.migrateIfRequired = async (migrationDb, migrationName, migrateFn) => { - try { - let db - if (migrationDb === exports.MIGRATION_DBS.GLOBAL_DB) { - db = getGlobalDB() - } else { - throw new Error(`Unrecognised migration db [${migrationDb}]`) - } +const runMigration = async (CouchDB, migration, options = {}) => { + const tenantId = getTenantId() + const migrationType = migration.type + const migrationName = migration.name - if (!DB_LOOKUP[migrationDb].includes(migrationName)) { - throw new Error( - `Unrecognised migration name [${migrationName}] for db [${migrationDb}]` + // get the db to store the migration in + let dbNames + if (migrationType === exports.MIGRATION_TYPES.GLOBAL) { + dbNames = [getGlobalDBName()] + } else if (migrationType === exports.MIGRATION_TYPES.APP) { + const apps = await getAllApps(CouchDB, migration.opts) + dbNames = apps.map(app => app.appId) + } else { + throw new Error( + `[Tenant: ${tenantId}] Unrecognised migration type [${migrationType}]` + ) + } + + // run the migration against each db + for (const dbName of dbNames) { + const db = new CouchDB(dbName) + try { + const doc = await exports.getMigrationsDoc(db) + + // exit if the migration has been performed already + if (doc[migrationName]) { + if ( + options.force && + options.force[migrationType] && + options.force[migrationType].includes(migrationName) + ) { + console.log( + `[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Forcing` + ) + } else { + // the migration has already been performed + continue + } + } + + console.log( + `[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Running` ) + // run the migration with tenant context + await migration.fn(db) + console.log( + `[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Complete` + ) + + // mark as complete + doc[migrationName] = Date.now() + await db.put(doc) + } catch (err) { + console.error( + `[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Error: `, + err + ) + throw err } - - const doc = await exports.getMigrationsDoc(db) - // exit if the migration has been performed - if (doc[migrationName]) { - return - } - - console.log(`Performing migration: ${migrationName}`) - await migrateFn() - console.log(`Migration complete: ${migrationName}`) - - // mark as complete - doc[migrationName] = Date.now() - await db.put(doc) - } catch (err) { - console.error(`Error performing migration: ${migrationName}: `, err) - throw err } } + +exports.runMigrations = async (CouchDB, migrations, options = {}) => { + console.log("Running migrations") + let tenantIds + if (environment.MULTI_TENANCY) { + if (!options.tenantIds || !options.tenantIds.length) { + // run for all tenants + tenantIds = await getTenantIds() + } + } else { + // single tenancy + tenantIds = [DEFAULT_TENANT_ID] + } + + // for all tenants + for (const tenantId of tenantIds) { + // for all migrations + for (const migration of migrations) { + // run the migration + await doInTenant(tenantId, () => + runMigration(CouchDB, migration, options) + ) + } + } + console.log("Migrations complete") +} diff --git a/packages/backend-core/src/migrations/tests/__snapshots__/index.spec.js.snap b/packages/backend-core/src/migrations/tests/__snapshots__/index.spec.js.snap index e9a18eadde..222c3b1228 100644 --- a/packages/backend-core/src/migrations/tests/__snapshots__/index.spec.js.snap +++ b/packages/backend-core/src/migrations/tests/__snapshots__/index.spec.js.snap @@ -3,7 +3,7 @@ exports[`migrations should match snapshot 1`] = ` Object { "_id": "migrations", - "_rev": "1-af6c272fe081efafecd2ea49a8fcbb40", - "user_email_view_casing": 1487076708000, + "_rev": "1-6277abc4e3db950221768e5a2618a059", + "test": 1487076708000, } `; diff --git a/packages/backend-core/src/migrations/tests/index.spec.js b/packages/backend-core/src/migrations/tests/index.spec.js index 0ed16fc184..12a2e54cb3 100644 --- a/packages/backend-core/src/migrations/tests/index.spec.js +++ b/packages/backend-core/src/migrations/tests/index.spec.js @@ -1,7 +1,7 @@ require("../../tests/utilities/dbConfig") -const { migrateIfRequired, MIGRATION_DBS, MIGRATIONS, getMigrationsDoc } = require("../index") -const database = require("../../db") +const { runMigrations, getMigrationsDoc } = require("../index") +const CouchDB = require("../../db").getCouch() const { StaticDatabases, } = require("../../db/utils") @@ -13,8 +13,14 @@ describe("migrations", () => { const migrationFunction = jest.fn() + const MIGRATIONS = [{ + type: "global", + name: "test", + fn: migrationFunction + }] + beforeEach(() => { - db = database.getDB(StaticDatabases.GLOBAL.name) + db = new CouchDB(StaticDatabases.GLOBAL.name) }) afterEach(async () => { @@ -22,39 +28,29 @@ describe("migrations", () => { await db.destroy() }) - const validMigration = () => { - return migrateIfRequired(MIGRATION_DBS.GLOBAL_DB, MIGRATIONS.USER_EMAIL_VIEW_CASING, migrationFunction) + const migrate = () => { + return runMigrations(CouchDB, MIGRATIONS) } it("should run a new migration", async () => { - await validMigration() + await migrate() expect(migrationFunction).toHaveBeenCalled() + const doc = await getMigrationsDoc(db) + expect(doc.test).toBeDefined() }) it("should match snapshot", async () => { - await validMigration() + await migrate() const doc = await getMigrationsDoc(db) expect(doc).toMatchSnapshot() }) it("should skip a previously run migration", async () => { - await validMigration() - await validMigration() + await migrate() + const previousMigrationTime = await getMigrationsDoc(db).test + await migrate() + const currentMigrationTime = await getMigrationsDoc(db).test expect(migrationFunction).toHaveBeenCalledTimes(1) + expect(currentMigrationTime).toBe(previousMigrationTime) }) - - it("should reject an unknown migration name", async () => { - expect(async () => { - await migrateIfRequired(MIGRATION_DBS.GLOBAL_DB, "bogus_name", migrationFunction) - }).rejects.toThrow() - expect(migrationFunction).not.toHaveBeenCalled() - }) - - it("should reject an unknown database name", async () => { - expect(async () => { - await migrateIfRequired("bogus_db", MIGRATIONS.USER_EMAIL_VIEW_CASING, migrationFunction) - }).rejects.toThrow() - expect(migrationFunction).not.toHaveBeenCalled() - }) - }) \ No newline at end of file diff --git a/packages/backend-core/src/security/sessions.js b/packages/backend-core/src/security/sessions.js index ad21627bd9..bbe6be299d 100644 --- a/packages/backend-core/src/security/sessions.js +++ b/packages/backend-core/src/security/sessions.js @@ -1,4 +1,5 @@ const redis = require("../redis/authRedis") +const { v4: uuidv4 } = require("uuid") // a week in seconds const EXPIRY_SECONDS = 86400 * 7 @@ -16,6 +17,9 @@ function makeSessionID(userId, sessionId) { exports.createASession = async (userId, session) => { const client = await redis.getSessionClient() const sessionId = session.sessionId + if (!session.csrfToken) { + session.csrfToken = uuidv4() + } session = { createdAt: new Date().toISOString(), lastAccessedAt: new Date().toISOString(), diff --git a/packages/backend-core/src/tenancy/tenancy.js b/packages/backend-core/src/tenancy/tenancy.js index 2cd05ea925..de597eac01 100644 --- a/packages/backend-core/src/tenancy/tenancy.js +++ b/packages/backend-core/src/tenancy/tenancy.js @@ -148,3 +148,15 @@ exports.isUserInAppTenant = (appId, user = null) => { const tenantId = exports.getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID return tenantId === userTenantId } + +exports.getTenantIds = async () => { + const db = getDB(PLATFORM_INFO_DB) + let tenants + try { + tenants = await db.get(TENANT_DOC) + } catch (err) { + // if theres an error the doc doesn't exist, no tenants exist + return [] + } + return (tenants && tenants.tenantIds) || [] +} diff --git a/packages/backend-core/src/utils.js b/packages/backend-core/src/utils.js index 85dd32946f..6c71c51b9d 100644 --- a/packages/backend-core/src/utils.js +++ b/packages/backend-core/src/utils.js @@ -20,9 +20,6 @@ const { hash } = require("./hashing") const userCache = require("./cache/user") const env = require("./environment") const { getUserSessions, invalidateSessions } = require("./security/sessions") -const { migrateIfRequired } = require("./migrations") -const { USER_EMAIL_VIEW_CASING } = require("./migrations").MIGRATIONS -const { GLOBAL_DB } = require("./migrations").MIGRATION_DBS const APP_PREFIX = DocumentTypes.APP + SEPARATOR @@ -96,12 +93,7 @@ exports.getCookie = (ctx, name) => { * @param {string|object} value The value of cookie which will be set. * @param {object} opts options like whether to sign. */ -exports.setCookie = ( - ctx, - value, - name = "builder", - opts = { sign: true, requestDomain: false } -) => { +exports.setCookie = (ctx, value, name = "builder", opts = { sign: true }) => { if (value && opts && opts.sign) { value = jwt.sign(value, options.secretOrKey) } @@ -113,7 +105,7 @@ exports.setCookie = ( overwrite: true, } - if (environment.COOKIE_DOMAIN && !opts.requestDomain) { + if (environment.COOKIE_DOMAIN) { config.domain = environment.COOKIE_DOMAIN } @@ -149,11 +141,6 @@ exports.getGlobalUserByEmail = async email => { } const db = getGlobalDB() - await migrateIfRequired(GLOBAL_DB, USER_EMAIL_VIEW_CASING, async () => { - // re-create the view with latest changes - await createUserEmailView(db) - }) - try { let users = ( await db.query(`database/${ViewNames.USER_BY_EMAIL}`, { diff --git a/packages/backend-core/yarn.lock b/packages/backend-core/yarn.lock index f28f2f932f..fc70e3d6a1 100644 --- a/packages/backend-core/yarn.lock +++ b/packages/backend-core/yarn.lock @@ -3410,9 +3410,9 @@ node-fetch@2.6.0: integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA== node-fetch@^2.6.1: - version "2.6.6" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.6.tgz#1751a7c01834e8e1697758732e9efb6eeadfaf89" - integrity sha512-Z8/6vRlTUChSdIgMa51jxQ4lrw/Jy5SOW10ObaA47/RElsAN2c5Pn8bTgFGWn/ibwzXTE8qwr1Yzx28vsecXEA== + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: whatwg-url "^5.0.0" diff --git a/packages/bbui/package.json b/packages/bbui/package.json index c1d3f4957f..8ae8c7983b 100644 --- a/packages/bbui/package.json +++ b/packages/bbui/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/bbui", "description": "A UI solution used in the different Budibase projects.", - "version": "1.0.44-alpha.9", + "version": "1.0.49-alpha.4", "license": "MPL-2.0", "svelte": "src/index.js", "module": "dist/bbui.es.js", diff --git a/packages/bbui/src/Form/Core/Dropzone.svelte b/packages/bbui/src/Form/Core/Dropzone.svelte index f7fed78b70..6b8022a36c 100644 --- a/packages/bbui/src/Form/Core/Dropzone.svelte +++ b/packages/bbui/src/Form/Core/Dropzone.svelte @@ -147,7 +147,9 @@ preview {:else}
-
{selectedImage.extension}
+
+ {selectedImage.name || "Unknown file"} +
Preview not supported
{/if} @@ -359,18 +361,21 @@ white-space: nowrap; width: 0; margin-right: 10px; + user-select: all; } .placeholder { display: flex; flex-direction: column; justify-content: center; align-items: center; + text-align: center; } .extension { color: var(--spectrum-global-color-gray-600); text-transform: uppercase; font-weight: 600; margin-bottom: 5px; + user-select: all; } .nav { diff --git a/packages/bbui/src/Form/Field.svelte b/packages/bbui/src/Form/Field.svelte index a2e41072a5..5033c28f7d 100644 --- a/packages/bbui/src/Form/Field.svelte +++ b/packages/bbui/src/Form/Field.svelte @@ -6,11 +6,12 @@ export let label = null export let labelPosition = "above" export let error = null + export let tooltip = ""
{#if label} - + {/if}
diff --git a/packages/bbui/src/Form/FieldLabel.svelte b/packages/bbui/src/Form/FieldLabel.svelte index b070df8cae..3606d77c7b 100644 --- a/packages/bbui/src/Form/FieldLabel.svelte +++ b/packages/bbui/src/Form/FieldLabel.svelte @@ -1,19 +1,24 @@ - + + + diff --git a/packages/bbui/src/Tooltip/TooltipWrapper.svelte b/packages/bbui/src/Tooltip/TooltipWrapper.svelte new file mode 100644 index 0000000000..c587dec1dc --- /dev/null +++ b/packages/bbui/src/Tooltip/TooltipWrapper.svelte @@ -0,0 +1,60 @@ + + +
+ + {#if tooltip} +
+
(showTooltip = true)} + on:mouseleave={() => (showTooltip = false)} + > + +
+ {#if showTooltip} +
+ +
+ {/if} +
+ {/if} +
+ + diff --git a/packages/builder/package.json b/packages/builder/package.json index 960a320c53..4bacc69112 100644 --- a/packages/builder/package.json +++ b/packages/builder/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/builder", - "version": "1.0.44-alpha.9", + "version": "1.0.49-alpha.4", "license": "GPL-3.0", "private": true, "scripts": { @@ -65,10 +65,10 @@ } }, "dependencies": { - "@budibase/bbui": "^1.0.44-alpha.9", - "@budibase/client": "^1.0.44-alpha.9", + "@budibase/bbui": "^1.0.49-alpha.4", + "@budibase/client": "^1.0.49-alpha.4", "@budibase/colorpicker": "1.1.2", - "@budibase/string-templates": "^1.0.44-alpha.9", + "@budibase/string-templates": "^1.0.49-alpha.4", "@sentry/browser": "5.19.1", "@spectrum-css/page": "^3.0.1", "@spectrum-css/vars": "^3.0.1", diff --git a/packages/builder/src/builderStore/api.js b/packages/builder/src/builderStore/api.js index 897d3a74db..a932799701 100644 --- a/packages/builder/src/builderStore/api.js +++ b/packages/builder/src/builderStore/api.js @@ -1,11 +1,20 @@ import { store } from "./index" import { get as svelteGet } from "svelte/store" import { removeCookie, Cookies } from "./cookies" +import { auth } from "stores/portal" const apiCall = method => async (url, body, headers = { "Content-Type": "application/json" }) => { headers["x-budibase-app-id"] = svelteGet(store).appId + headers["x-budibase-api-version"] = "1" + + // add csrf token if authenticated + const user = svelteGet(auth).user + if (user && user.csrfToken) { + headers["x-csrf-token"] = user.csrfToken + } + const json = headers["Content-Type"] === "application/json" const resp = await fetch(url, { method: method, diff --git a/packages/builder/src/builderStore/index.js b/packages/builder/src/builderStore/index.js index 23704556ad..5181e756c6 100644 --- a/packages/builder/src/builderStore/index.js +++ b/packages/builder/src/builderStore/index.js @@ -1,6 +1,5 @@ import { getFrontendStore } from "./store/frontend" import { getAutomationStore } from "./store/automation" -import { getHostingStore } from "./store/hosting" import { getThemeStore } from "./store/theme" import { derived, writable } from "svelte/store" import { FrontendTypes, LAYOUT_NAMES } from "../constants" @@ -9,7 +8,6 @@ import { findComponent } from "./componentUtils" export const store = getFrontendStore() export const automationStore = getAutomationStore() export const themeStore = getThemeStore() -export const hostingStore = getHostingStore() export const currentAsset = derived(store, $store => { const type = $store.currentFrontEndType diff --git a/packages/builder/src/builderStore/store/frontend.js b/packages/builder/src/builderStore/store/frontend.js index fdfe450edf..0d740e08e0 100644 --- a/packages/builder/src/builderStore/store/frontend.js +++ b/packages/builder/src/builderStore/store/frontend.js @@ -2,7 +2,6 @@ import { get, writable } from "svelte/store" import { cloneDeep } from "lodash/fp" import { allScreens, - hostingStore, currentAsset, mainLayout, selectedComponent, @@ -100,7 +99,6 @@ export const getFrontendStore = () => { version: application.version, revertableVersion: application.revertableVersion, })) - await hostingStore.actions.fetch() // Initialise backend stores const [_integrations] = await Promise.all([ diff --git a/packages/builder/src/builderStore/store/hosting.js b/packages/builder/src/builderStore/store/hosting.js deleted file mode 100644 index fb174c2663..0000000000 --- a/packages/builder/src/builderStore/store/hosting.js +++ /dev/null @@ -1,34 +0,0 @@ -import { writable } from "svelte/store" -import api, { get } from "../api" - -const INITIAL_HOSTING_UI_STATE = { - appUrl: "", - deployedApps: {}, - deployedAppNames: [], - deployedAppUrls: [], -} - -export const getHostingStore = () => { - const store = writable({ ...INITIAL_HOSTING_UI_STATE }) - store.actions = { - fetch: async () => { - const response = await api.get("/api/hosting/urls") - const urls = await response.json() - store.update(state => { - state.appUrl = urls.app - return state - }) - }, - fetchDeployedApps: async () => { - let deployments = await (await get("/api/hosting/apps")).json() - store.update(state => { - state.deployedApps = deployments - state.deployedAppNames = Object.values(deployments).map(app => app.name) - state.deployedAppUrls = Object.values(deployments).map(app => app.url) - return state - }) - return deployments - }, - } - return store -} diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index 1fa5c6e073..0d73f3d36d 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -22,8 +22,10 @@ RelationshipTypes, ALLOWABLE_STRING_OPTIONS, ALLOWABLE_NUMBER_OPTIONS, + ALLOWABLE_JSON_OPTIONS, ALLOWABLE_STRING_TYPES, ALLOWABLE_NUMBER_TYPES, + ALLOWABLE_JSON_TYPES, SWITCHABLE_TYPES, } from "constants/backend" import { getAutoColumnInformation, buildAutoColumn } from "builderStore/utils" @@ -150,6 +152,7 @@ delete field.subtype delete field.tableId delete field.relationshipType + delete field.formulaType // Add in defaults and initial definition const definition = fieldDefinitions[event.detail?.toUpperCase()] @@ -161,6 +164,9 @@ if (field.type === LINK_TYPE) { field.relationshipType = RelationshipTypes.MANY_TO_MANY } + if (field.type === FORMULA_TYPE) { + field.formulaType = "dynamic" + } } function onChangeRequired(e) { @@ -241,6 +247,11 @@ ALLOWABLE_NUMBER_TYPES.indexOf(field.type) !== -1 ) { return ALLOWABLE_NUMBER_OPTIONS + } else if ( + originalName && + ALLOWABLE_JSON_TYPES.indexOf(field.type) !== -1 + ) { + return ALLOWABLE_JSON_OPTIONS } else if (!external) { return [ ...Object.values(fieldDefinitions), @@ -431,8 +442,22 @@ error={errors.relatedName} /> {:else if field.type === FORMULA_TYPE} + {#if !table.sql} + (field.subtype = e.detail)} options={Object.entries(getAutoColumnInformation())} diff --git a/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/PlusConfigForm.svelte b/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/PlusConfigForm.svelte index 819fb32e45..8805505c8d 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/PlusConfigForm.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/TableIntegrationMenu/PlusConfigForm.svelte @@ -188,7 +188,7 @@ {:else} No tables found. {/if} -{#if plusTables?.length !== 0} +{#if plusTables?.length !== 0 && integration.relationships}
Relationships diff --git a/packages/builder/src/components/backend/DatasourceNavigator/_components/GoogleButton.svelte b/packages/builder/src/components/backend/DatasourceNavigator/_components/GoogleButton.svelte new file mode 100644 index 0000000000..091e332832 --- /dev/null +++ b/packages/builder/src/components/backend/DatasourceNavigator/_components/GoogleButton.svelte @@ -0,0 +1,47 @@ + + + { + let ds = datasource + if (!ds) { + ds = await preAuthStep() + } + window.open( + `/api/global/auth/${tenantId}/datasource/google?datasourceId=${datasource._id}&appId=${$store.appId}`, + "_blank" + ) + }} +> +
+ google icon +

Sign in with Google

+
+
+ + diff --git a/packages/builder/src/components/backend/DatasourceNavigator/icons/GoogleSheets.svelte b/packages/builder/src/components/backend/DatasourceNavigator/icons/GoogleSheets.svelte new file mode 100644 index 0000000000..0d376e4400 --- /dev/null +++ b/packages/builder/src/components/backend/DatasourceNavigator/icons/GoogleSheets.svelte @@ -0,0 +1,184 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/packages/builder/src/components/backend/DatasourceNavigator/icons/index.js b/packages/builder/src/components/backend/DatasourceNavigator/icons/index.js index 56ae03dcc3..350fccf73f 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/icons/index.js +++ b/packages/builder/src/components/backend/DatasourceNavigator/icons/index.js @@ -11,6 +11,7 @@ import ArangoDB from "./ArangoDB.svelte" import Rest from "./Rest.svelte" import Budibase from "./Budibase.svelte" import Oracle from "./Oracle.svelte" +import GoogleSheets from "./GoogleSheets.svelte" export default { BUDIBASE: Budibase, @@ -26,4 +27,5 @@ export default { ARANGODB: ArangoDB, REST: Rest, ORACLE: Oracle, + GOOGLE_SHEETS: GoogleSheets, } diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte index dc5831b905..71df33b967 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte @@ -6,6 +6,7 @@ import { IntegrationNames, IntegrationTypes } from "constants/backend" import CreateTableModal from "components/backend/TableNavigator/modals/CreateTableModal.svelte" import DatasourceConfigModal from "components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte" + import GoogleDatasourceConfigModal from "components/backend/DatasourceNavigator/modals/GoogleDatasourceConfigModal.svelte" import { createRestDatasource } from "builderStore/datasource" import { goto } from "@roxi/routify" import ImportRestQueriesModal from "./ImportRestQueriesModal.svelte" @@ -38,6 +39,7 @@ plus: selected.plus, config, schema: selected.datasource, + auth: selected.auth, } checkShowImport() } @@ -79,7 +81,11 @@ - + {#if integration?.auth?.type === "google"} + + {:else} + + {/if} diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte index da8c0515b7..97168358cf 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte @@ -51,13 +51,9 @@ >Connect your database to Budibase using the config below. - - - diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/GoogleDatasourceConfigModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/GoogleDatasourceConfigModal.svelte new file mode 100644 index 0000000000..7d03dafeb9 --- /dev/null +++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/GoogleDatasourceConfigModal.svelte @@ -0,0 +1,29 @@ + + + modal.show()} + cancelText="Back" + size="L" +> + + Authenticate with your google account to use the {IntegrationNames[ + datasource.type + ]} integration. + + save(datasource, true)} /> + diff --git a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte index 997864e165..a3b7ca81a6 100644 --- a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte +++ b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte @@ -53,16 +53,23 @@ } // Create table - const table = await tables.save(newTable) - notifications.success(`Table ${name} created successfully.`) - analytics.captureEvent(Events.TABLE.CREATED, { name }) + let table + try { + table = await tables.save(newTable) + notifications.success(`Table ${name} created successfully.`) + analytics.captureEvent(Events.TABLE.CREATED, { name }) - // Navigate to new table - const currentUrl = $url() - const path = currentUrl.endsWith("data") - ? `./table/${table._id}` - : `../../table/${table._id}` - $goto(path) + // Navigate to new table + const currentUrl = $url() + const path = currentUrl.endsWith("data") + ? `./table/${table._id}` + : `../../table/${table._id}` + $goto(path) + } catch (e) { + notifications.error(e) + // reload in case the table was created + await tables.fetch() + } } diff --git a/packages/builder/src/components/deploy/DeploymentHistory.svelte b/packages/builder/src/components/deploy/DeploymentHistory.svelte index f6bbcef4d4..36c2433c27 100644 --- a/packages/builder/src/components/deploy/DeploymentHistory.svelte +++ b/packages/builder/src/components/deploy/DeploymentHistory.svelte @@ -6,7 +6,7 @@ import api from "builderStore/api" import { notifications } from "@budibase/bbui" import CreateWebhookDeploymentModal from "./CreateWebhookDeploymentModal.svelte" - import { store, hostingStore } from "builderStore" + import { store } from "builderStore" const DeploymentStatus = { SUCCESS: "SUCCESS", @@ -37,7 +37,7 @@ let poll let deployments = [] let urlComponent = $store.url || `/${appId}` - let deploymentUrl = `${$hostingStore.appUrl}${urlComponent}` + let deploymentUrl = `${urlComponent}` const formatDate = (date, format) => Intl.DateTimeFormat("en-GB", DATE_OPTIONS[format]).format(date) diff --git a/packages/builder/src/components/design/AppPreview/AppThemeSelect.svelte b/packages/builder/src/components/design/AppPreview/AppThemeSelect.svelte index ec51688219..a1a5a7a242 100644 --- a/packages/builder/src/components/design/AppPreview/AppThemeSelect.svelte +++ b/packages/builder/src/components/design/AppPreview/AppThemeSelect.svelte @@ -1,6 +1,7 @@
@@ -27,7 +39,7 @@ value={$store.theme} options={themeOptions} placeholder={null} - on:change={e => store.actions.theme.save(e.detail)} + on:change={e => onChangeTheme(e.detail)} />
diff --git a/packages/builder/src/components/design/AppPreview/ThemeEditor.svelte b/packages/builder/src/components/design/AppPreview/ThemeEditor.svelte index 7b36de4fa8..14747191c2 100644 --- a/packages/builder/src/components/design/AppPreview/ThemeEditor.svelte +++ b/packages/builder/src/components/design/AppPreview/ThemeEditor.svelte @@ -19,7 +19,7 @@ primaryColor: "var(--spectrum-global-color-blue-600)", primaryColorHover: "var(--spectrum-global-color-blue-500)", buttonBorderRadius: "16px", - navBackground: "var(--spectrum-global-color-gray-100)", + navBackground: "var(--spectrum-global-color-gray-50)", navTextColor: "var(--spectrum-global-color-gray-800)", } @@ -52,7 +52,14 @@ } const resetTheme = () => { - store.actions.customTheme.save(null) + const theme = get(store).theme + store.actions.customTheme.save({ + ...defaultTheme, + navBackground: + theme === "spectrum--light" + ? "var(--spectrum-global-color-gray-50)" + : "var(--spectrum-global-color-gray-100)", + }) } diff --git a/packages/builder/src/components/design/AppPreview/componentStructure.json b/packages/builder/src/components/design/AppPreview/componentStructure.json index 5385a8852b..0b888ba10b 100644 --- a/packages/builder/src/components/design/AppPreview/componentStructure.json +++ b/packages/builder/src/components/design/AppPreview/componentStructure.json @@ -44,7 +44,8 @@ "relationshipfield", "daterangepicker", "multifieldselect", - "jsonfield" + "jsonfield", + "s3upload" ] }, { diff --git a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/ButtonActionEditor/actions/ExecuteQuery.svelte b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/ButtonActionEditor/actions/ExecuteQuery.svelte index 88c7e87054..8c438e4b22 100644 --- a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/ButtonActionEditor/actions/ExecuteQuery.svelte +++ b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/ButtonActionEditor/actions/ExecuteQuery.svelte @@ -55,8 +55,8 @@
+ import { Select, Label } from "@budibase/bbui" + import { currentAsset } from "builderStore" + import { findAllMatchingComponents } from "builderStore/componentUtils" + + export let parameters + + $: components = findAllMatchingComponents($currentAsset.props, component => + component._component.endsWith("s3upload") + ) + + +
+ + {:else if ["options", "array"].includes(filter.type)} + import { Select } from "@budibase/bbui" + import { datasources } from "stores/backend" + + export let value = null + + $: dataSources = $datasources.list + .filter(ds => ds.source === "S3" && !ds.config?.endpoint) + .map(ds => ({ + label: ds.name, + value: ds._id, + })) + + + onBindingChange(binding.name, evt.detail)} value={runtimeToReadableBinding( - bindableOptions, + bindings, customParams?.[binding.name] )} - {bindableOptions} + {bindings} /> {:else} deleteQueryBinding(idx)} /> diff --git a/packages/builder/src/components/integration/QueryViewer.svelte b/packages/builder/src/components/integration/QueryViewer.svelte index f14d1d2b88..c6ae7c4ce8 100644 --- a/packages/builder/src/components/integration/QueryViewer.svelte +++ b/packages/builder/src/components/integration/QueryViewer.svelte @@ -120,7 +120,7 @@ config={integrationInfo.extra} /> {/if} - + {/if}
{#if shouldShowQueryConfig} diff --git a/packages/builder/src/components/start/CreateAppModal.svelte b/packages/builder/src/components/start/CreateAppModal.svelte index 60065b6eef..3efd0231aa 100644 --- a/packages/builder/src/components/start/CreateAppModal.svelte +++ b/packages/builder/src/components/start/CreateAppModal.svelte @@ -1,100 +1,46 @@ {#if template?.fromFile} { $values.file = e.detail?.[0] - $touched.file = true + $validation.touched.file = true }} /> {/if} ($touched.name = true)} + error={$validation.touched.name && $validation.errors.name} + on:blur={() => ($validation.touched.name = true)} label="Name" placeholder={$auth.user.firstName - ? `${$auth.user.firstName}'s app` + ? `${$auth.user.firstName}s app` : "My app"} /> + ($validation.touched.url = true)} + label="URL" + placeholder={$values.name + ? "/" + encodeURIComponent($values.name).toLowerCase() + : "/"} + /> diff --git a/packages/builder/src/components/start/UpdateAppModal.svelte b/packages/builder/src/components/start/UpdateAppModal.svelte index 432b13c7c3..7549876fc0 100644 --- a/packages/builder/src/components/start/UpdateAppModal.svelte +++ b/packages/builder/src/components/start/UpdateAppModal.svelte @@ -1,120 +1,75 @@ - - - Update the name of your app. - ($touched.name = true)} - on:change={() => (dirty = true)} - label="Name" - /> - - + + Update the name of your app. + ($validation.touched.name = true)} + label="Name" + /> + ($validation.touched.url = true)} + label="URL" + placeholder={$values.name + ? "/" + encodeURIComponent($values.name).toLowerCase() + : "/"} + /> + diff --git a/packages/builder/src/constants/backend/index.js b/packages/builder/src/constants/backend/index.js index c1eea5b0ef..d07c245b21 100644 --- a/packages/builder/src/constants/backend/index.js +++ b/packages/builder/src/constants/backend/index.js @@ -148,20 +148,23 @@ export const RelationshipTypes = { } export const ALLOWABLE_STRING_OPTIONS = [FIELDS.STRING, FIELDS.OPTIONS] - export const ALLOWABLE_STRING_TYPES = ALLOWABLE_STRING_OPTIONS.map( opt => opt.type ) export const ALLOWABLE_NUMBER_OPTIONS = [FIELDS.NUMBER, FIELDS.BOOLEAN] - export const ALLOWABLE_NUMBER_TYPES = ALLOWABLE_NUMBER_OPTIONS.map( opt => opt.type ) -export const SWITCHABLE_TYPES = ALLOWABLE_NUMBER_TYPES.concat( - ALLOWABLE_STRING_TYPES -) +export const ALLOWABLE_JSON_OPTIONS = [FIELDS.JSON, FIELDS.ARRAY] +export const ALLOWABLE_JSON_TYPES = ALLOWABLE_JSON_OPTIONS.map(opt => opt.type) + +export const SWITCHABLE_TYPES = [ + ...ALLOWABLE_STRING_TYPES, + ...ALLOWABLE_NUMBER_TYPES, + ...ALLOWABLE_JSON_TYPES, +] export const IntegrationTypes = { POSTGRES: "POSTGRES", @@ -177,6 +180,7 @@ export const IntegrationTypes = { ARANGODB: "ARANGODB", ORACLE: "ORACLE", INTERNAL: "INTERNAL", + GOOGLE_SHEETS: "GOOGLE_SHEETS", } export const IntegrationNames = { @@ -193,6 +197,7 @@ export const IntegrationNames = { [IntegrationTypes.ARANGODB]: "ArangoDB", [IntegrationTypes.ORACLE]: "Oracle", [IntegrationTypes.INTERNAL]: "Internal", + [IntegrationTypes.GOOGLE_SHEETS]: "Google Sheets", } export const SchemaTypeOptions = [ diff --git a/packages/builder/src/constants/index.js b/packages/builder/src/constants/index.js index 04f12672e8..abeaadc718 100644 --- a/packages/builder/src/constants/index.js +++ b/packages/builder/src/constants/index.js @@ -15,6 +15,22 @@ export const AppStatus = { DEPLOYED: "published", } +export const IntegrationNames = { + POSTGRES: "PostgreSQL", + MONGODB: "MongoDB", + COUCHDB: "CouchDB", + S3: "S3", + MYSQL: "MySQL", + REST: "REST", + DYNAMODB: "DynamoDB", + ELASTICSEARCH: "ElasticSearch", + SQL_SERVER: "SQL Server", + AIRTABLE: "Airtable", + ARANGODB: "ArangoDB", + ORACLE: "Oracle", + GOOGLE_SHEETS: "Google Sheets", +} + // fields on the user table that cannot be edited export const UNEDITABLE_USER_FIELDS = [ "email", @@ -36,4 +52,7 @@ export const LAYOUT_NAMES = { export const BUDIBASE_INTERNAL_DB = "bb_internal" +// one or more word characters and whitespace export const APP_NAME_REGEX = /^[\w\s]+$/ +// zero or more non-whitespace characters +export const APP_URL_REGEX = /^\S*$/ diff --git a/packages/builder/src/constants/lucene.js b/packages/builder/src/constants/lucene.js index 132790739c..8a6bf57b5f 100644 --- a/packages/builder/src/constants/lucene.js +++ b/packages/builder/src/constants/lucene.js @@ -59,24 +59,26 @@ export const NoEmptyFilterStrings = [ */ export const getValidOperatorsForType = type => { const Op = OperatorOptions + const stringOps = [ + Op.Equals, + Op.NotEquals, + Op.StartsWith, + Op.Like, + Op.Empty, + Op.NotEmpty, + ] + const numOps = [ + Op.Equals, + Op.NotEquals, + Op.MoreThan, + Op.LessThan, + Op.Empty, + Op.NotEmpty, + ] if (type === "string") { - return [ - Op.Equals, - Op.NotEquals, - Op.StartsWith, - Op.Like, - Op.Empty, - Op.NotEmpty, - ] + return stringOps } else if (type === "number") { - return [ - Op.Equals, - Op.NotEquals, - Op.MoreThan, - Op.LessThan, - Op.Empty, - Op.NotEmpty, - ] + return numOps } else if (type === "options") { return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty] } else if (type === "array") { @@ -84,23 +86,11 @@ export const getValidOperatorsForType = type => { } else if (type === "boolean") { return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty] } else if (type === "longform") { - return [ - Op.Equals, - Op.NotEquals, - Op.StartsWith, - Op.Like, - Op.Empty, - Op.NotEmpty, - ] + return stringOps } else if (type === "datetime") { - return [ - Op.Equals, - Op.NotEquals, - Op.MoreThan, - Op.LessThan, - Op.Empty, - Op.NotEmpty, - ] + return numOps + } else if (type === "formula") { + return stringOps.concat([Op.MoreThan, Op.LessThan]) } return [] } diff --git a/packages/builder/src/helpers/searchFields.js b/packages/builder/src/helpers/searchFields.js index 650e04a680..a9c837d570 100644 --- a/packages/builder/src/helpers/searchFields.js +++ b/packages/builder/src/helpers/searchFields.js @@ -27,5 +27,8 @@ export function getFields(fields, { allowLinks } = { allowLinks: true }) { filteredFields = filteredFields.concat(getTableFields(linkField)) } } - return filteredFields + const staticFormulaFields = fields.filter( + field => field.type === "formula" && field.formulaType === "static" + ) + return filteredFields.concat(staticFormulaFields) } diff --git a/packages/builder/src/helpers/validation/validation.js b/packages/builder/src/helpers/validation/validation.js index 8d80d720a1..db5dfe4430 100644 --- a/packages/builder/src/helpers/validation/validation.js +++ b/packages/builder/src/helpers/validation/validation.js @@ -1,5 +1,7 @@ import { writable, derived } from "svelte/store" +// DEPRECATED - Use the yup based validators for future validation + export function createValidationStore(initialValue, ...validators) { let touched = false diff --git a/packages/builder/src/helpers/validation/validators.js b/packages/builder/src/helpers/validation/validators.js index 036487fd50..f842f11313 100644 --- a/packages/builder/src/helpers/validation/validators.js +++ b/packages/builder/src/helpers/validation/validators.js @@ -1,3 +1,5 @@ +// TODO: Convert to yup based validators + export function emailValidator(value) { return ( (value && diff --git a/packages/builder/src/helpers/validation/yup/app.js b/packages/builder/src/helpers/validation/yup/app.js new file mode 100644 index 0000000000..de0f86446c --- /dev/null +++ b/packages/builder/src/helpers/validation/yup/app.js @@ -0,0 +1,83 @@ +import { string, mixed } from "yup" +import { APP_NAME_REGEX, APP_URL_REGEX } from "constants" + +export const name = (validation, { apps, currentApp } = { apps: [] }) => { + validation.addValidator( + "name", + string() + .trim() + .required("Your application must have a name") + .matches( + APP_NAME_REGEX, + "App name must be letters, numbers and spaces only" + ) + .test( + "non-existing-app-name", + "Another app with the same name already exists", + value => { + if (!value) { + // exit early, above validator will fail + return true + } + if (currentApp) { + // filter out the current app if present + apps = apps.filter(app => app.appId !== currentApp.appId) + } + return !apps + .map(app => app.name) + .some(appName => appName.toLowerCase() === value.toLowerCase()) + } + ) + ) +} + +export const url = (validation, { apps, currentApp } = { apps: [] }) => { + validation.addValidator( + "url", + string() + .nullable() + .matches(APP_URL_REGEX, "App URL must not contain spaces") + .test( + "non-existing-app-url", + "Another app with the same URL already exists", + value => { + // url is nullable + if (!value) { + return true + } + if (currentApp) { + // filter out the current app if present + apps = apps.filter(app => app.appId !== currentApp.appId) + } + return !apps + .map(app => app.url) + .some(appUrl => appUrl?.toLowerCase() === value.toLowerCase()) + } + ) + .test("valid-url", "Not a valid URL", value => { + // url is nullable + if (!value) { + return true + } + // make it clear that this is a url path and cannot be a full url + return ( + value.startsWith("/") && + !value.includes("http") && + !value.includes("www") && + !value.includes(".") && + value.length > 1 // just '/' is not valid + ) + }) + ) +} + +export const file = (validation, { template } = {}) => { + const templateToUse = + template && Object.keys(template).length === 0 ? null : template + validation.addValidator( + "file", + templateToUse?.fromFile + ? mixed().required("Please choose a file to import") + : null + ) +} diff --git a/packages/builder/src/helpers/validation/yup/index.js b/packages/builder/src/helpers/validation/yup/index.js new file mode 100644 index 0000000000..6783ad7e58 --- /dev/null +++ b/packages/builder/src/helpers/validation/yup/index.js @@ -0,0 +1,66 @@ +import { capitalise } from "helpers" +import { object } from "yup" +import { writable, get } from "svelte/store" +import { notifications } from "@budibase/bbui" + +export const createValidationStore = () => { + const DEFAULT = { + errors: {}, + touched: {}, + valid: false, + } + + const validator = {} + const validation = writable(DEFAULT) + + const addValidator = (propertyName, propertyValidator) => { + if (!propertyValidator || !propertyName) { + return + } + validator[propertyName] = propertyValidator + } + + const check = async values => { + const obj = object().shape(validator) + // clear the previous errors + const properties = Object.keys(validator) + properties.forEach(property => (get(validation).errors[property] = null)) + + let validationError = false + try { + await obj.validate(values, { abortEarly: false }) + } catch (error) { + if (!error.inner) { + notifications.error("Unexpected validation error", error) + validationError = true + } else { + error.inner.forEach(err => { + validation.update(store => { + store.errors[err.path] = capitalise(err.message) + return store + }) + }) + } + } + + let valid + if (properties.length && !validationError) { + valid = await obj.isValid(values) + } else { + // don't say valid until validators have been loaded + valid = false + } + + validation.update(store => { + store.valid = valid + return store + }) + } + + return { + subscribe: validation.subscribe, + set: validation.set, + check, + addValidator, + } +} diff --git a/packages/builder/src/pages/builder/_layout.svelte b/packages/builder/src/pages/builder/_layout.svelte index 12a544096a..1d41af15e7 100644 --- a/packages/builder/src/pages/builder/_layout.svelte +++ b/packages/builder/src/pages/builder/_layout.svelte @@ -61,7 +61,7 @@ await auth.setInitInfo({ init_template: $params["?template"] }) } - await auth.checkAuth() + await auth.getSelf() await admin.init() if (useAccountPortal && multiTenancyEnabled) { diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/index.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/index.svelte index 98b7859305..bb4cc6e1fb 100644 --- a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/index.svelte +++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/index.svelte @@ -19,8 +19,8 @@ import { IntegrationTypes } from "constants/backend" import { isEqual } from "lodash" import { cloneDeep } from "lodash/fp" - import ImportRestQueriesModal from "components/backend/DatasourceNavigator/modals/ImportRestQueriesModal.svelte" + let importQueriesModal let changed diff --git a/packages/builder/src/pages/builder/apps/index.svelte b/packages/builder/src/pages/builder/apps/index.svelte index aafc28cd92..c98e749e45 100644 --- a/packages/builder/src/pages/builder/apps/index.svelte +++ b/packages/builder/src/pages/builder/apps/index.svelte @@ -12,7 +12,7 @@ Modal, } from "@budibase/bbui" import { onMount } from "svelte" - import { apps, organisation, auth, admin } from "stores/portal" + import { apps, organisation, auth } from "stores/portal" import { goto } from "@roxi/routify" import { AppStatus } from "constants" import { gradient } from "actions" @@ -34,7 +34,6 @@ const publishedAppsOnly = app => app.status === AppStatus.DEPLOYED $: publishedApps = $apps.filter(publishedAppsOnly) - $: isCloud = $admin.cloud $: userApps = $auth.user?.builder?.global ? publishedApps : publishedApps.filter(app => @@ -42,7 +41,11 @@ ) function getUrl(app) { - return !isCloud ? `/app/${encodeURIComponent(app.name)}` : `/${app.prodId}` + if (app.url) { + return `/app${app.url}` + } else { + return `/${app.prodId}` + } } diff --git a/packages/builder/src/pages/builder/auth/reset.svelte b/packages/builder/src/pages/builder/auth/reset.svelte index e38a5d8b24..5e5b615d73 100644 --- a/packages/builder/src/pages/builder/auth/reset.svelte +++ b/packages/builder/src/pages/builder/auth/reset.svelte @@ -31,6 +31,7 @@ } onMount(async () => { + await auth.getSelf() await organisation.init() }) diff --git a/packages/builder/src/pages/builder/portal/apps/index.svelte b/packages/builder/src/pages/builder/portal/apps/index.svelte index 047c60e979..bf783fdb86 100644 --- a/packages/builder/src/pages/builder/portal/apps/index.svelte +++ b/packages/builder/src/pages/builder/portal/apps/index.svelte @@ -49,7 +49,6 @@ $: filteredApps = enrichedApps.filter(app => app?.name?.toLowerCase().includes(searchTerm.toLowerCase()) ) - $: isCloud = $admin.cloud const enrichApps = (apps, user, sortBy) => { const enrichedApps = apps.map(app => ({ @@ -80,7 +79,7 @@ } const initiateAppCreation = () => { - template = {} + template = null creationModal.show() creatingApp = true } @@ -162,12 +161,10 @@ } const viewApp = app => { - if (!isCloud && app.deployed) { - // special case to use the short form name if self hosted - window.open(`/app/${encodeURIComponent(app.name)}`) + if (app.url) { + window.open(`/app${app.url}`) } else { - const id = app.deployed ? app.prodId : app.devId - window.open(`/${id}`, "_blank") + window.open(`/${app.prodId}`) } } @@ -340,6 +337,14 @@ }} class="template-card" > + + +
+ + + + + {selectedApp?.name}? -
{ const safePath = column.name.split(".").map(safe).join(".") + const stringType = column.type === "string" || column.type === "formula" enrichedFilter.push({ field: column.name, - operator: column.type === "string" ? "string" : "equal", - type: column.type === "string" ? "string" : "number", + operator: stringType ? "string" : "equal", + type: stringType ? "string" : "number", valueType: "Binding", value: `{{ ${safe(formId)}.${safePath} }}`, }) diff --git a/packages/client/src/components/app/dynamic-filter/FilterModal.svelte b/packages/client/src/components/app/dynamic-filter/FilterModal.svelte index f303c69aaf..16d5bb0ee5 100644 --- a/packages/client/src/components/app/dynamic-filter/FilterModal.svelte +++ b/packages/client/src/components/app/dynamic-filter/FilterModal.svelte @@ -19,10 +19,14 @@ export let schemaFields export let filters = [] - const BannedTypes = ["link", "attachment", "formula", "json"] + const BannedTypes = ["link", "attachment", "json"] $: fieldOptions = (schemaFields ?? []) - .filter(field => !BannedTypes.includes(field.type)) + .filter( + field => + !BannedTypes.includes(field.type) || + (field.type === "formula" && field.formulaType === "static") + ) .map(field => field.name) const addFilter = () => { @@ -114,7 +118,7 @@ on:change={e => onOperatorChange(filter, e.detail)} placeholder={null} /> - {#if ["string", "longform", "number"].includes(filter.type)} + {#if ["string", "longform", "number", "formula"].includes(filter.type)} {:else if ["options", "array"].includes(filter.type)} - {:else if fieldSchema?.type && fieldSchema?.type !== type && type !== "options"} + {:else if schemaType && schemaType !== type && type !== "options"} diff --git a/packages/client/src/components/app/forms/S3Upload.svelte b/packages/client/src/components/app/forms/S3Upload.svelte new file mode 100644 index 0000000000..c35fd7f194 --- /dev/null +++ b/packages/client/src/components/app/forms/S3Upload.svelte @@ -0,0 +1,143 @@ + + + +
+ {#if fieldState} + { + fieldApi.setValue(e.detail) + }} + {processFiles} + {handleFileTooLarge} + maximum={1} + fileSizeLimit={MaxFileSize} + /> + {/if} + {#if loading} +
+
+ +
+ {/if} +
+ + + diff --git a/packages/client/src/components/app/forms/index.js b/packages/client/src/components/app/forms/index.js index ab1f7d20ed..0ff82cea94 100644 --- a/packages/client/src/components/app/forms/index.js +++ b/packages/client/src/components/app/forms/index.js @@ -12,3 +12,4 @@ export { default as relationshipfield } from "./RelationshipField.svelte" export { default as passwordfield } from "./PasswordField.svelte" export { default as formstep } from "./FormStep.svelte" export { default as jsonfield } from "./JSONField.svelte" +export { default as s3upload } from "./S3Upload.svelte" diff --git a/packages/client/src/sdk.js b/packages/client/src/sdk.js index 1c73361dc8..9803730541 100644 --- a/packages/client/src/sdk.js +++ b/packages/client/src/sdk.js @@ -5,6 +5,7 @@ import { routeStore, screenStore, builderStore, + uploadStore, } from "stores" import { styleable } from "utils/styleable" import { linkable } from "utils/linkable" @@ -20,6 +21,7 @@ export default { routeStore, screenStore, builderStore, + uploadStore, styleable, linkable, getAction, diff --git a/packages/client/src/stores/index.js b/packages/client/src/stores/index.js index 02c848120c..9f6e5f6f50 100644 --- a/packages/client/src/stores/index.js +++ b/packages/client/src/stores/index.js @@ -9,6 +9,7 @@ export { confirmationStore } from "./confirmation" export { peekStore } from "./peek" export { stateStore } from "./state" export { themeStore } from "./theme" +export { uploadStore } from "./uploads.js" // Context stores are layered and duplicated, so it is not a singleton export { createContextStore } from "./context" diff --git a/packages/client/src/stores/uploads.js b/packages/client/src/stores/uploads.js new file mode 100644 index 0000000000..b41e8411b8 --- /dev/null +++ b/packages/client/src/stores/uploads.js @@ -0,0 +1,42 @@ +import { writable, get } from "svelte/store" + +export const createUploadStore = () => { + const store = writable([]) + + // Registers a new file upload component + const registerFileUpload = (componentId, callback) => { + if (!componentId || !callback) { + return + } + + store.update(state => { + state.push({ + componentId, + callback, + }) + return state + }) + } + + // Unregisters a file upload component + const unregisterFileUpload = componentId => { + store.update(state => state.filter(c => c.componentId !== componentId)) + } + + // Processes a file upload for a given component ID + const processFileUpload = async componentId => { + if (!componentId) { + return + } + + const component = get(store).find(c => c.componentId === componentId) + return await component?.callback() + } + + return { + subscribe: store.subscribe, + actions: { registerFileUpload, unregisterFileUpload, processFileUpload }, + } +} + +export const uploadStore = createUploadStore() diff --git a/packages/client/src/utils/buttonActions.js b/packages/client/src/utils/buttonActions.js index 2ef324d23c..560aaa59c4 100644 --- a/packages/client/src/utils/buttonActions.js +++ b/packages/client/src/utils/buttonActions.js @@ -5,6 +5,7 @@ import { confirmationStore, authStore, stateStore, + uploadStore, } from "stores" import { saveRow, deleteRow, executeQuery, triggerAutomation } from "api" import { ActionTypes } from "constants" @@ -169,6 +170,17 @@ const updateStateHandler = action => { } } +const s3UploadHandler = async action => { + const { componentId } = action.parameters + if (!componentId) { + return + } + const res = await uploadStore.actions.processFileUpload(componentId) + return { + publicUrl: res?.publicUrl, + } +} + const handlerMap = { ["Save Row"]: saveRowHandler, ["Duplicate Row"]: duplicateRowHandler, @@ -183,6 +195,7 @@ const handlerMap = { ["Close Screen Modal"]: closeScreenModalHandler, ["Change Form Step"]: changeFormStepHandler, ["Update State"]: updateStateHandler, + ["Upload File to S3"]: s3UploadHandler, } const confirmTextMap = { diff --git a/packages/client/src/utils/fetch/DataFetch.js b/packages/client/src/utils/fetch/DataFetch.js index 884e12feb1..7fe53bb8af 100644 --- a/packages/client/src/utils/fetch/DataFetch.js +++ b/packages/client/src/utils/fetch/DataFetch.js @@ -110,12 +110,6 @@ export default class DataFetch { */ async getInitialData() { const { datasource, filter, sortColumn, paginate } = this.options - const tableId = datasource?.tableId - - // Ensure table ID exists - if (!tableId) { - return - } // Fetch datasource definition and determine feature flags const definition = await this.constructor.getDefinition(datasource) diff --git a/packages/client/src/utils/fetch/QueryFetch.js b/packages/client/src/utils/fetch/QueryFetch.js index f9052886f3..76aca2a855 100644 --- a/packages/client/src/utils/fetch/QueryFetch.js +++ b/packages/client/src/utils/fetch/QueryFetch.js @@ -16,7 +16,12 @@ export default class QueryFetch extends DataFetch { if (!datasource?._id) { return null } - return await fetchQueryDefinition(datasource._id) + const definition = await fetchQueryDefinition(datasource._id) + // After getting the definition of query, it loses "fields" attribute because of security reason from the server. However, this attribute needs to be inside of defintion for pagination. + if (!definition.fields) { + definition.fields = datasource.fields + } + return definition } async getData() { diff --git a/packages/client/yarn.lock b/packages/client/yarn.lock index f61cc01f61..7a6c780a87 100644 --- a/packages/client/yarn.lock +++ b/packages/client/yarn.lock @@ -115,6 +115,15 @@ magic-string "^0.25.7" resolve "^1.17.0" +"@rollup/plugin-inject@^4.0.0": + version "4.0.4" + resolved "https://registry.yarnpkg.com/@rollup/plugin-inject/-/plugin-inject-4.0.4.tgz#fbeee66e9a700782c4f65c8b0edbafe58678fbc2" + integrity sha512-4pbcU4J/nS+zuHk+c+OL3WtmEQhqxlZ9uqfjQMQDOHOPld7PsCd8k5LWs8h5wjwJN7MgnAn768F2sDxEP4eNFQ== + dependencies: + "@rollup/pluginutils" "^3.1.0" + estree-walker "^2.0.1" + magic-string "^0.25.7" + "@rollup/plugin-node-resolve@^11.2.1": version "11.2.1" resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" @@ -387,13 +396,6 @@ abab@^2.0.3, abab@^2.0.5: resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== -abstract-leveldown@~0.12.0, abstract-leveldown@~0.12.1: - version "0.12.4" - resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-0.12.4.tgz#29e18e632e60e4e221d5810247852a63d7b2e410" - integrity sha1-KeGOYy5g5OIh1YECR4UqY9ey5BA= - dependencies: - xtend "~3.0.0" - acorn-globals@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" @@ -407,11 +409,6 @@ acorn-walk@^7.1.1: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== -acorn@^5.7.3: - version "5.7.4" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.4.tgz#3e8d8a9947d0599a1796d10225d7432f4a4acf5e" - integrity sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg== - acorn@^7.1.1: version "7.4.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" @@ -460,16 +457,6 @@ apexcharts@^3.19.2, apexcharts@^3.22.1: svg.resize.js "^1.4.3" svg.select.js "^3.0.1" -asn1.js@^5.2.0: - version "5.4.1" - resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-5.4.1.tgz#11a980b84ebb91781ce35b0fdc2ee294e3783f07" - integrity sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA== - dependencies: - bn.js "^4.0.0" - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - safer-buffer "^2.1.0" - asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" @@ -485,23 +472,6 @@ big.js@^5.2.2: resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== -bl@~0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/bl/-/bl-0.8.2.tgz#c9b6bca08d1bc2ea00fc8afb4f1a5fd1e1c66e4e" - integrity sha1-yba8oI0bwuoA/Ir7Txpf0eHGbk4= - dependencies: - readable-stream "~1.0.26" - -bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9: - version "4.12.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" - integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== - -bn.js@^5.0.0, bn.js@^5.1.1: - version "5.2.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.0.tgz#358860674396c6997771a9d051fcc1b57d4ae002" - integrity sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw== - boolbase@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" @@ -515,79 +485,11 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -brorand@^1.0.1, brorand@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" - integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= - browser-process-hrtime@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserify-aes@^1.0.0, browserify-aes@^1.0.4: - version "1.2.0" - resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" - integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== - dependencies: - buffer-xor "^1.0.3" - cipher-base "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.3" - inherits "^2.0.1" - safe-buffer "^5.0.1" - -browserify-cipher@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" - integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== - dependencies: - browserify-aes "^1.0.4" - browserify-des "^1.0.0" - evp_bytestokey "^1.0.0" - -browserify-des@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" - integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== - dependencies: - cipher-base "^1.0.1" - des.js "^1.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - -browserify-fs@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browserify-fs/-/browserify-fs-1.0.0.tgz#f075aa8a729d4d1716d066620e386fcc1311a96f" - integrity sha1-8HWqinKdTRcW0GZiDjhvzBMRqW8= - dependencies: - level-filesystem "^1.0.1" - level-js "^2.1.3" - levelup "^0.18.2" - -browserify-rsa@^4.0.0, browserify-rsa@^4.0.1: - version "4.1.0" - resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.1.0.tgz#b2fd06b5b75ae297f7ce2dc651f918f5be158c8d" - integrity sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog== - dependencies: - bn.js "^5.0.0" - randombytes "^2.0.1" - -browserify-sign@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.1.tgz#eaf4add46dd54be3bb3b36c0cf15abbeba7956c3" - integrity sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg== - dependencies: - bn.js "^5.1.1" - browserify-rsa "^4.0.1" - create-hash "^1.2.0" - create-hmac "^1.1.7" - elliptic "^6.5.3" - inherits "^2.0.4" - parse-asn1 "^5.1.5" - readable-stream "^3.6.0" - safe-buffer "^5.2.0" - browserslist@^4.0.0, browserslist@^4.16.0, browserslist@^4.16.6: version "4.18.1" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.18.1.tgz#60d3920f25b6860eb917c6c7b185576f4d8b017f" @@ -599,21 +501,11 @@ browserslist@^4.0.0, browserslist@^4.16.0, browserslist@^4.16.6: node-releases "^2.0.1" picocolors "^1.0.0" -buffer-es6@^4.9.2, buffer-es6@^4.9.3: - version "4.9.3" - resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" - integrity sha1-8mNHuC33b9N+GLy1KIxJcM/VxAQ= - buffer-from@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== -buffer-xor@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" - integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= - builtin-modules@^3.1.0: version "3.2.0" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" @@ -651,19 +543,6 @@ chalk@^4.1.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" - integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -clone@~0.1.9: - version "0.1.19" - resolved "https://registry.yarnpkg.com/clone/-/clone-0.1.19.tgz#613fb68639b26a494ac53253e15b1a6bd88ada85" - integrity sha1-YT+2hjmyaklKxTJT4Vsaa9iK2oU= - color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" @@ -720,16 +599,6 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-stream@^1.4.4: - version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" - integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== - dependencies: - buffer-from "^1.0.0" - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" - concat-with-sourcemaps@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/concat-with-sourcemaps/-/concat-with-sourcemaps-1.1.0.tgz#d4ea93f05ae25790951b99e7b3b09e3908a4082e" @@ -737,59 +606,6 @@ concat-with-sourcemaps@^1.1.0: dependencies: source-map "^0.6.1" -core-util-is@~1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" - integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== - -create-ecdh@^4.0.0: - version "4.0.4" - resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e" - integrity sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A== - dependencies: - bn.js "^4.1.0" - elliptic "^6.5.3" - -create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" - integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== - dependencies: - cipher-base "^1.0.1" - inherits "^2.0.1" - md5.js "^1.3.4" - ripemd160 "^2.0.1" - sha.js "^2.4.0" - -create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" - integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== - dependencies: - cipher-base "^1.0.3" - create-hash "^1.1.0" - inherits "^2.0.1" - ripemd160 "^2.0.0" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -crypto-browserify@^3.11.0: - version "3.12.0" - resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" - integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== - dependencies: - browserify-cipher "^1.0.0" - browserify-sign "^4.0.0" - create-ecdh "^4.0.0" - create-hash "^1.1.0" - create-hmac "^1.1.0" - diffie-hellman "^5.0.0" - inherits "^2.0.1" - pbkdf2 "^3.0.3" - public-encrypt "^4.0.0" - randombytes "^2.0.0" - randomfill "^1.0.3" - css-declaration-sorter@^6.0.3: version "6.1.3" resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.1.3.tgz#e9852e4cf940ba79f509d9425b137d1f94438dc2" @@ -936,35 +752,11 @@ deepmerge@^4.2.2: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -deferred-leveldown@~0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-0.2.0.tgz#2cef1f111e1c57870d8bbb8af2650e587cd2f5b4" - integrity sha1-LO8fER4cV4cNi7uK8mUOWHzS9bQ= - dependencies: - abstract-leveldown "~0.12.1" - delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= -des.js@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" - integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== - dependencies: - inherits "^2.0.1" - minimalistic-assert "^1.0.0" - -diffie-hellman@^5.0.0: - version "5.0.3" - resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" - integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== - dependencies: - bn.js "^4.1.0" - miller-rabin "^4.0.0" - randombytes "^2.0.0" - dom-serializer@^1.0.1: version "1.3.2" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.3.2.tgz#6206437d32ceefaec7161803230c7a20bc1b4d91" @@ -1007,19 +799,6 @@ electron-to-chromium@^1.3.896: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.900.tgz#5be2c5818a2a012c511b4b43e87b6ab7a296d4f5" integrity sha512-SuXbQD8D4EjsaBaJJxySHbC+zq8JrFfxtb4GIr4E9n1BcROyMcRrJCYQNpJ9N+Wjf5mFp7Wp0OHykd14JNEzzQ== -elliptic@^6.5.3: - version "6.5.4" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" - integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== - dependencies: - bn.js "^4.11.9" - brorand "^1.1.0" - hash.js "^1.0.0" - hmac-drbg "^1.0.1" - inherits "^2.0.4" - minimalistic-assert "^1.0.1" - minimalistic-crypto-utils "^1.0.1" - emojis-list@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" @@ -1030,13 +809,6 @@ entities@^2.0.0: resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== -errno@^0.1.1, errno@~0.1.1: - version "0.1.8" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" - integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== - dependencies: - prr "~1.0.1" - escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" @@ -1074,11 +846,6 @@ estree-walker@^0.2.1: resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.2.1.tgz#bdafe8095383d8414d5dc2ecf4c9173b6db9412e" integrity sha1-va/oCVOD2EFNXcLs9MkXO225QS4= -estree-walker@^0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.5.2.tgz#d3850be7529c9580d815600b53126515e146dd39" - integrity sha512-XpCnW/AE10ws/kDAs37cngSkvgIR8aN3G0MS85m7dUpuK2EREo9VJ00uvw6Dg/hXEpfsE1I1TvJOJr+Z+TL+ig== - estree-walker@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.6.1.tgz#53049143f40c6eb918b23671d1fe3219f3a1b362" @@ -1104,14 +871,6 @@ eventemitter3@^4.0.4: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== -evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" - integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== - dependencies: - md5.js "^1.3.4" - safe-buffer "^5.1.1" - fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" @@ -1122,11 +881,6 @@ flatpickr@^4.5.2: resolved "https://registry.yarnpkg.com/flatpickr/-/flatpickr-4.6.9.tgz#9a13383e8a6814bda5d232eae3fcdccb97dc1499" integrity sha512-F0azNNi8foVWKSF+8X+ZJzz8r9sE1G4hl06RyceIaLvyltKvDl6vqk9Lm/6AUUCi5HWaIjiUbk7UpeE/fOXOpw== -foreach@~2.0.1: - version "2.0.5" - resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" - integrity sha1-C+4AUBiusmDQo6865ljdATbsG5k= - form-data@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" @@ -1160,13 +914,6 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== -fwd-stream@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/fwd-stream/-/fwd-stream-1.0.4.tgz#ed281cabed46feecf921ee32dc4c50b372ac7cfa" - integrity sha1-7Sgcq+1G/uz5Ie4y3ExQs3KsfPo= - dependencies: - readable-stream "~1.0.26-4" - generic-names@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/generic-names/-/generic-names-2.0.1.tgz#f8a378ead2ccaa7a34f0317b05554832ae41b872" @@ -1208,32 +955,6 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" -hash-base@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33" - integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== - dependencies: - inherits "^2.0.4" - readable-stream "^3.6.0" - safe-buffer "^5.2.0" - -hash.js@^1.0.0, hash.js@^1.0.3: - version "1.1.7" - resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" - integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== - dependencies: - inherits "^2.0.3" - minimalistic-assert "^1.0.1" - -hmac-drbg@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" - integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= - dependencies: - hash.js "^1.0.3" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.1" - html-encoding-sniffer@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" @@ -1275,11 +996,6 @@ icss-utils@^5.0.0: resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== -idb-wrapper@^1.5.0: - version "1.7.2" - resolved "https://registry.yarnpkg.com/idb-wrapper/-/idb-wrapper-1.7.2.tgz#8251afd5e77fe95568b1c16152eb44b396767ea2" - integrity sha512-zfNREywMuf0NzDo9mVsL0yegjsirJxHpKHvWcyRozIqQy89g0a3U+oBPOCN4cc0oCiOuYgZHimzaW/R46G1Mpg== - import-cwd@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-3.0.0.tgz#20845547718015126ea9b3676b7592fb8bd4cf92" @@ -1294,11 +1010,6 @@ import-from@^3.0.0: dependencies: resolve-from "^5.0.0" -indexof@~0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" - integrity sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10= - inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" @@ -1307,7 +1018,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: +inherits@2: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -1329,11 +1040,6 @@ is-module@^1.0.0: resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= -is-object@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/is-object/-/is-object-0.1.2.tgz#00efbc08816c33cfc4ac8251d132e10dc65098d7" - integrity sha1-AO+8CIFsM8/ErIJR0TLhDcZQmNc= - is-potential-custom-element-name@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" @@ -1351,26 +1057,6 @@ is-resolvable@^1.1.0: resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" integrity sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg== -is@~0.2.6: - version "0.2.7" - resolved "https://registry.yarnpkg.com/is/-/is-0.2.7.tgz#3b34a2c48f359972f35042849193ae7264b63562" - integrity sha1-OzSixI81mXLzUEKEkZOucmS2NWI= - -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" - integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= - -isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= - -isbuffer@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/isbuffer/-/isbuffer-0.0.0.tgz#38c146d9df528b8bf9b0701c3d43cf12df3fc39b" - integrity sha1-OMFG2d9Si4v5sHAcPUPPEt8/w5s= - jest-worker@^26.2.1: version "26.6.2" resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" @@ -1432,91 +1118,6 @@ jsonfile@^4.0.0: optionalDependencies: graceful-fs "^4.1.6" -level-blobs@^0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/level-blobs/-/level-blobs-0.1.7.tgz#9ab9b97bb99f1edbf9f78a3433e21ed56386bdaf" - integrity sha1-mrm5e7mfHtv594o0M+Ie1WOGva8= - dependencies: - level-peek "1.0.6" - once "^1.3.0" - readable-stream "^1.0.26-4" - -level-filesystem@^1.0.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/level-filesystem/-/level-filesystem-1.2.0.tgz#a00aca9919c4a4dfafdca6a8108d225aadff63b3" - integrity sha1-oArKmRnEpN+v3KaoEI0iWq3/Y7M= - dependencies: - concat-stream "^1.4.4" - errno "^0.1.1" - fwd-stream "^1.0.4" - level-blobs "^0.1.7" - level-peek "^1.0.6" - level-sublevel "^5.2.0" - octal "^1.0.0" - once "^1.3.0" - xtend "^2.2.0" - -level-fix-range@2.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/level-fix-range/-/level-fix-range-2.0.0.tgz#c417d62159442151a19d9a2367868f1724c2d548" - integrity sha1-xBfWIVlEIVGhnZojZ4aPFyTC1Ug= - dependencies: - clone "~0.1.9" - -level-fix-range@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/level-fix-range/-/level-fix-range-1.0.2.tgz#bf15b915ae36d8470c821e883ddf79cd16420828" - integrity sha1-vxW5Fa422EcMgh6IPd95zRZCCCg= - -"level-hooks@>=4.4.0 <5": - version "4.5.0" - resolved "https://registry.yarnpkg.com/level-hooks/-/level-hooks-4.5.0.tgz#1b9ae61922930f3305d1a61fc4d83c8102c0dd93" - integrity sha1-G5rmGSKTDzMF0aYfxNg8gQLA3ZM= - dependencies: - string-range "~1.2" - -level-js@^2.1.3: - version "2.2.4" - resolved "https://registry.yarnpkg.com/level-js/-/level-js-2.2.4.tgz#bc055f4180635d4489b561c9486fa370e8c11697" - integrity sha1-vAVfQYBjXUSJtWHJSG+jcOjBFpc= - dependencies: - abstract-leveldown "~0.12.0" - idb-wrapper "^1.5.0" - isbuffer "~0.0.0" - ltgt "^2.1.2" - typedarray-to-buffer "~1.0.0" - xtend "~2.1.2" - -level-peek@1.0.6, level-peek@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/level-peek/-/level-peek-1.0.6.tgz#bec51c72a82ee464d336434c7c876c3fcbcce77f" - integrity sha1-vsUccqgu5GTTNkNMfIdsP8vM538= - dependencies: - level-fix-range "~1.0.2" - -level-sublevel@^5.2.0: - version "5.2.3" - resolved "https://registry.yarnpkg.com/level-sublevel/-/level-sublevel-5.2.3.tgz#744c12c72d2e72be78dde3b9b5cd84d62191413a" - integrity sha1-dEwSxy0ucr543eO5tc2E1iGRQTo= - dependencies: - level-fix-range "2.0" - level-hooks ">=4.4.0 <5" - string-range "~1.2.1" - xtend "~2.0.4" - -levelup@^0.18.2: - version "0.18.6" - resolved "https://registry.yarnpkg.com/levelup/-/levelup-0.18.6.tgz#e6a01cb089616c8ecc0291c2a9bd3f0c44e3e5eb" - integrity sha1-5qAcsIlhbI7MApHCqb0/DETj5es= - dependencies: - bl "~0.8.1" - deferred-leveldown "~0.2.0" - errno "~0.1.1" - prr "~0.0.0" - readable-stream "~1.0.26" - semver "~2.3.1" - xtend "~3.0.0" - levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" @@ -1559,18 +1160,6 @@ lodash@^4.7.0: resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== -ltgt@^2.1.2: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.1.tgz#f35ca91c493f7b73da0e07495304f17b31f87ee5" - integrity sha1-81ypHEk/e3PaDgdJUwTxezH4fuU= - -magic-string@^0.22.5: - version "0.22.5" - resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.22.5.tgz#8e9cf5afddf44385c1da5bc2a6a0dbd10b03657e" - integrity sha512-oreip9rJZkzvA8Qzk9HFs8fZGF/u7H/gtrE8EN6RjKJ9kh2HlC+yQ2QezifqTZfGyiuAV0dRv5a+y/8gBb1m9w== - dependencies: - vlq "^0.2.2" - magic-string@^0.25.7: version "0.25.7" resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051" @@ -1578,15 +1167,6 @@ magic-string@^0.25.7: dependencies: sourcemap-codec "^1.4.4" -md5.js@^1.3.4: - version "1.3.5" - resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" - integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - mdn-data@2.0.14: version "2.0.14" resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" @@ -1597,14 +1177,6 @@ merge-stream@^2.0.0: resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== -miller-rabin@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" - integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== - dependencies: - bn.js "^4.0.0" - brorand "^1.0.1" - mime-db@1.51.0: version "1.51.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c" @@ -1617,16 +1189,6 @@ mime-types@^2.1.12: dependencies: mime-db "1.51.0" -minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimalistic-crypto-utils@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" - integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= - minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" @@ -1676,25 +1238,6 @@ nwsapi@^2.2.0: resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== -object-keys@~0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-0.2.0.tgz#cddec02998b091be42bf1035ae32e49f1cb6ea67" - integrity sha1-zd7AKZiwkb5CvxA1rjLknxy26mc= - dependencies: - foreach "~2.0.1" - indexof "~0.0.1" - is "~0.2.6" - -object-keys@~0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-0.4.0.tgz#28a6aae7428dd2c3a92f3d95f21335dd204e0336" - integrity sha1-KKaq50KN0sOpLz2V8hM13SBOAzY= - -octal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/octal/-/octal-1.0.0.tgz#63e7162a68efbeb9e213588d58e989d1e5c4530b" - integrity sha1-Y+cWKmjvvrniE1iNWOmJ0eXEUws= - once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" @@ -1734,17 +1277,6 @@ p-timeout@^3.2.0: dependencies: p-finally "^1.0.0" -parse-asn1@^5.0.0, parse-asn1@^5.1.5: - version "5.1.6" - resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.6.tgz#385080a3ec13cb62a62d39409cb3e88844cdaed4" - integrity sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw== - dependencies: - asn1.js "^5.2.0" - browserify-aes "^1.0.0" - evp_bytestokey "^1.0.0" - pbkdf2 "^3.0.3" - safe-buffer "^5.1.1" - parse5@6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" @@ -1760,17 +1292,6 @@ path-parse@^1.0.6: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -pbkdf2@^3.0.3: - version "3.1.2" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.2.tgz#dd822aa0887580e52f1a039dc3eda108efae3075" - integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA== - dependencies: - create-hash "^1.1.2" - create-hmac "^1.1.4" - ripemd160 "^2.0.1" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - picocolors@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" @@ -2068,110 +1589,28 @@ prelude-ls@~1.1.2: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= -process-es6@^0.11.2, process-es6@^0.11.6: - version "0.11.6" - resolved "https://registry.yarnpkg.com/process-es6/-/process-es6-0.11.6.tgz#c6bb389f9a951f82bd4eb169600105bd2ff9c778" - integrity sha1-xrs4n5qVH4K9TrFpYAEFvS/5x3g= - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - promise.series@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/promise.series/-/promise.series-0.2.0.tgz#2cc7ebe959fc3a6619c04ab4dbdc9e452d864bbd" integrity sha1-LMfr6Vn8OmYZwEq029yeRS2GS70= -prr@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a" - integrity sha1-GoS4WQgyVQFBGFPQCB7j+obikmo= - -prr@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" - integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= - psl@^1.1.33: version "1.8.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== -public-encrypt@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" - integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== - dependencies: - bn.js "^4.1.0" - browserify-rsa "^4.0.0" - create-hash "^1.1.0" - parse-asn1 "^5.0.0" - randombytes "^2.0.1" - safe-buffer "^5.1.2" - punycode@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== -randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.1.0: +randombytes@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" -randomfill@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" - integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== - dependencies: - randombytes "^2.0.5" - safe-buffer "^5.1.0" - -readable-stream@^1.0.26-4: - version "1.1.14" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" - integrity sha1-fPTFTvZI44EwhMY23SB54WbAgdk= - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.1" - isarray "0.0.1" - string_decoder "~0.10.x" - -readable-stream@^2.2.2: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" - integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -readable-stream@~1.0.26, readable-stream@~1.0.26-4: - version "1.0.34" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" - integrity sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw= - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.1" - isarray "0.0.1" - string_decoder "~0.10.x" - regexparam@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-2.0.0.tgz#059476767d5f5f87f735fc7922d133fd1a118c8c" @@ -2200,14 +1639,6 @@ resolve@^1.17.0, resolve@^1.19.0: is-core-module "^2.2.0" path-parse "^1.0.6" -ripemd160@^2.0.0, ripemd160@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" - integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - rollup-plugin-json@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/rollup-plugin-json/-/rollup-plugin-json-4.0.0.tgz#a18da0a4b30bf5ca1ee76ddb1422afbb84ae2b9e" @@ -2215,27 +1646,12 @@ rollup-plugin-json@^4.0.0: dependencies: rollup-pluginutils "^2.5.0" -rollup-plugin-node-builtins@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/rollup-plugin-node-builtins/-/rollup-plugin-node-builtins-2.1.2.tgz#24a1fed4a43257b6b64371d8abc6ce1ab14597e9" - integrity sha1-JKH+1KQyV7a2Q3HYq8bOGrFFl+k= +rollup-plugin-polyfill-node@^0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/rollup-plugin-polyfill-node/-/rollup-plugin-polyfill-node-0.8.0.tgz#859c070822f5e38d221e5b4238cb34aa894c2b19" + integrity sha512-C4UeKedOmOBkB3FgR+z/v9kzRwV1Q/H8xWs1u1+CNe4XOV6hINfOrcO+TredKxYvopCmr+WKUSNsFUnD1RLHgQ== dependencies: - browserify-fs "^1.0.0" - buffer-es6 "^4.9.2" - crypto-browserify "^3.11.0" - process-es6 "^0.11.2" - -rollup-plugin-node-globals@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/rollup-plugin-node-globals/-/rollup-plugin-node-globals-1.4.0.tgz#5e1f24a9bb97c0ef51249f625e16c7e61b7c020b" - integrity sha512-xRkB+W/m1KLIzPUmG0ofvR+CPNcvuCuNdjVBVS7ALKSxr3EDhnzNceGkGi1m8MToSli13AzKFYH4ie9w3I5L3g== - dependencies: - acorn "^5.7.3" - buffer-es6 "^4.9.3" - estree-walker "^0.5.2" - magic-string "^0.22.5" - process-es6 "^0.11.6" - rollup-pluginutils "^2.3.1" + "@rollup/plugin-inject" "^4.0.0" rollup-plugin-postcss@^4.0.0: version "4.0.1" @@ -2289,7 +1705,7 @@ rollup-pluginutils@^1.3.1: estree-walker "^0.2.1" minimatch "^3.0.2" -rollup-pluginutils@^2.3.1, rollup-pluginutils@^2.5.0, rollup-pluginutils@^2.8.2: +rollup-pluginutils@^2.5.0, rollup-pluginutils@^2.8.2: version "2.8.2" resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz#72f2af0748b592364dbd3389e600e5a9444a351e" integrity sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ== @@ -2303,22 +1719,17 @@ rollup@^2.44.0: optionalDependencies: fsevents "~2.3.2" -safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@^5.1.0: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@^5.2.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - safe-identifier@^0.4.2: version "0.4.2" resolved "https://registry.yarnpkg.com/safe-identifier/-/safe-identifier-0.4.2.tgz#cf6bfca31c2897c588092d1750d30ef501d59fcb" integrity sha512-6pNbSMW6OhAi9j+N8V+U715yBQsaWJ7eyEUaOrawX+isg5ZxhUlV1NipNtgaKHmFGiABwt+ZF04Ii+3Xjkg+8w== -"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.1.0: +"safer-buffer@>= 2.1.2 < 3": version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== @@ -2330,11 +1741,6 @@ saxes@^5.0.1: dependencies: xmlchars "^2.2.0" -semver@~2.3.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-2.3.2.tgz#b9848f25d6cf36333073ec9ef8856d42f1233e52" - integrity sha1-uYSPJdbPNjMwc+ye+IVtQvEjPlI= - serialize-javascript@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" @@ -2342,14 +1748,6 @@ serialize-javascript@^4.0.0: dependencies: randombytes "^2.1.0" -sha.js@^2.4.0, sha.js@^2.4.8: - version "2.4.11" - resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" - integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - shortid@^2.2.15: version "2.2.16" resolved "https://registry.yarnpkg.com/shortid/-/shortid-2.2.16.tgz#b742b8f0cb96406fd391c76bfc18a67a57fe5608" @@ -2400,23 +1798,6 @@ string-hash@^1.1.1: resolved "https://registry.yarnpkg.com/string-hash/-/string-hash-1.1.3.tgz#e8aafc0ac1855b4666929ed7dd1275df5d6c811b" integrity sha1-6Kr8CsGFW0Zmkp7X3RJ1311sgRs= -string-range@~1.2, string-range@~1.2.1: - version "1.2.2" - resolved "https://registry.yarnpkg.com/string-range/-/string-range-1.2.2.tgz#a893ed347e72299bc83befbbf2a692a8d239d5dd" - integrity sha1-qJPtNH5yKZvIO++78qaSqNI51d0= - -string_decoder@^1.1.1, string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -string_decoder@~0.10.x: - version "0.10.31" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" - integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= - style-inject@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/style-inject/-/style-inject-0.3.0.tgz#d21c477affec91811cc82355832a700d22bf8dd3" @@ -2585,31 +1966,16 @@ type-check@~0.3.2: dependencies: prelude-ls "~1.1.2" -typedarray-to-buffer@~1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-1.0.4.tgz#9bb8ba0e841fb3f4cf1fe7c245e9f3fa8a5fe99c" - integrity sha1-m7i6DoQfs/TPH+fCRenz+opf6Zw= - -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= - universalify@^0.1.0, universalify@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== -util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: +util-deprecate@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= -vlq@^0.2.2: - version "0.2.3" - resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" - integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow== - w3c-hr-time@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" @@ -2680,31 +2046,6 @@ xmlchars@^2.2.0: resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== -xtend@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-2.2.0.tgz#eef6b1f198c1c8deafad8b1765a04dad4a01c5a9" - integrity sha1-7vax8ZjByN6vrYsXZaBNrUoBxak= - -xtend@~2.0.4: - version "2.0.6" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-2.0.6.tgz#5ea657a6dba447069c2e59c58a1138cb0c5e6cee" - integrity sha1-XqZXptukRwacLlnFihE4ywxebO4= - dependencies: - is-object "~0.1.2" - object-keys "~0.2.0" - -xtend@~2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-2.1.2.tgz#6efecc2a4dad8e6962c4901b337ce7ba87b5d28b" - integrity sha1-bv7MKk2tjmlixJAbM3znuoe10os= - dependencies: - object-keys "~0.4.0" - -xtend@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-3.0.0.tgz#5cce7407baf642cba7becda568111c493f59665a" - integrity sha1-XM50B7r2Qsunvs2laBEcST9ZZlo= - yaml@^1.10.2: version "1.10.2" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" diff --git a/packages/server/nodemon.json b/packages/server/nodemon.json index e3a38c0b7d..3c0f052aa0 100644 --- a/packages/server/nodemon.json +++ b/packages/server/nodemon.json @@ -1,5 +1,5 @@ { - "watch": ["src", "../auth"], + "watch": ["src", "../backend-core"], "ext": "js,ts,json", "ignore": ["src/**/*.spec.ts", "src/**/*.spec.js"], "exec": "ts-node src/index.ts" diff --git a/packages/server/package.json b/packages/server/package.json index a444c5d092..13898325ec 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/server", "email": "hi@budibase.com", - "version": "1.0.44-alpha.9", + "version": "1.0.49-alpha.4", "description": "Budibase Web Server", "main": "src/index.ts", "repository": { @@ -70,9 +70,9 @@ "license": "GPL-3.0", "dependencies": { "@apidevtools/swagger-parser": "^10.0.3", - "@budibase/backend-core": "^1.0.44-alpha.9", - "@budibase/client": "^1.0.44-alpha.9", - "@budibase/string-templates": "^1.0.44-alpha.9", + "@budibase/backend-core": "^1.0.49-alpha.4", + "@budibase/client": "^1.0.49-alpha.4", + "@budibase/string-templates": "^1.0.49-alpha.4", "@bull-board/api": "^3.7.0", "@bull-board/koa": "^3.7.0", "@elastic/elasticsearch": "7.10.0", @@ -92,6 +92,8 @@ "fix-path": "3.0.0", "form-data": "^4.0.0", "fs-extra": "8.1.0", + "google-auth-library": "^7.11.0", + "google-spreadsheet": "^3.2.0", "jimp": "0.16.1", "joi": "17.2.1", "js-yaml": "^4.1.0", @@ -110,7 +112,7 @@ "mongodb": "3.6.3", "mssql": "6.2.3", "mysql2": "^2.3.1", - "node-fetch": "2.6.0", + "node-fetch": "2.6.7", "open": "^8.4.0", "pg": "8.5.1", "pino-pretty": "4.0.0", @@ -139,6 +141,7 @@ "@jest/test-sequencer": "^24.8.0", "@types/apidoc": "^0.50.0", "@types/bull": "^3.15.1", + "@types/google-spreadsheet": "^3.1.5", "@types/jest": "^26.0.23", "@types/koa": "^2.13.3", "@types/koa-router": "^7.4.2", diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js index 48e2b19a9d..eb1f7bc5e6 100644 --- a/packages/server/src/api/controllers/application.js +++ b/packages/server/src/api/controllers/application.js @@ -33,10 +33,7 @@ const { Replication, } = require("@budibase/backend-core/db") const { USERS_TABLE_SCHEMA } = require("../../constants") -const { - getDeployedApps, - removeAppFromUserRoles, -} = require("../../utilities/workerRequests") +const { removeAppFromUserRoles } = require("../../utilities/workerRequests") const { clientLibraryPath, stringToReadStream } = require("../../utilities") const { getAllLocks } = require("../../utilities/redis") const { @@ -78,31 +75,44 @@ function getUserRoleId(ctx) { : ctx.user.role._id } -async function getAppUrlIfNotInUse(ctx) { +exports.getAppUrl = ctx => { + // construct the url let url if (ctx.request.body.url) { + // if the url is provided, use that url = encodeURI(ctx.request.body.url) } else if (ctx.request.body.name) { + // otherwise use the name url = encodeURI(`${ctx.request.body.name}`) } if (url) { url = `/${url.replace(URL_REGEX_SLASH, "")}`.toLowerCase() } - if (!env.SELF_HOSTED) { - return url - } - const deployedApps = await getDeployedApps() - if ( - url && - deployedApps[url] != null && - ctx.params != null && - deployedApps[url].appId !== ctx.params.appId - ) { - ctx.throw(400, "App name/URL is already in use.") - } return url } +const checkAppUrl = (ctx, apps, url, currentAppId) => { + if (currentAppId) { + apps = apps.filter(app => app.appId !== currentAppId) + } + if (apps.some(app => app.url === url)) { + ctx.throw(400, "App URL is already in use.") + } +} + +const checkAppName = (ctx, apps, name, currentAppId) => { + // TODO: Replace with Joi + if (!name) { + ctx.throw(400, "Name is required") + } + if (currentAppId) { + apps = apps.filter(app => app.appId !== currentAppId) + } + if (apps.some(app => app.name === name)) { + ctx.throw(400, "App name is already in use.") + } +} + async function createInstance(template) { const tenantId = isMultiTenant() ? getTenantId() : null const baseAppId = generateAppID(tenantId) @@ -206,6 +216,12 @@ exports.fetchAppPackage = async ctx => { } exports.create = async ctx => { + const apps = await getAllApps(CouchDB, { dev: true }) + const name = ctx.request.body.name + checkAppName(ctx, apps, name) + const url = exports.getAppUrl(ctx) + checkAppUrl(ctx, apps, url) + const { useTemplate, templateKey, templateString } = ctx.request.body const instanceConfig = { useTemplate, @@ -218,7 +234,6 @@ exports.create = async ctx => { const instance = await createInstance(instanceConfig) const appId = instance._id - const url = await getAppUrlIfNotInUse(ctx) const db = new CouchDB(appId) let _rev try { @@ -235,7 +250,7 @@ exports.create = async ctx => { type: "app", version: packageJson.version, componentLibraries: ["@budibase/standard-components"], - name: ctx.request.body.name, + name: name, url: url, template: ctx.request.body.template, instance: instance, @@ -262,8 +277,22 @@ exports.create = async ctx => { ctx.body = newApplication } +// This endpoint currently operates as a PATCH rather than a PUT +// Thus name and url fields are handled only if present exports.update = async ctx => { - const data = await updateAppPackage(ctx, ctx.request.body, ctx.params.appId) + const apps = await getAllApps(CouchDB, { dev: true }) + // validation + const name = ctx.request.body.name + if (name) { + checkAppName(ctx, apps, name, ctx.params.appId) + } + const url = await exports.getAppUrl(ctx) + if (url) { + checkAppUrl(ctx, apps, url, ctx.params.appId) + ctx.request.body.url = url + } + + const data = await updateAppPackage(ctx.request.body, ctx.params.appId) ctx.status = 200 ctx.body = data } @@ -285,7 +314,7 @@ exports.updateClient = async ctx => { version: packageJson.version, revertableVersion: currentVersion, } - const data = await updateAppPackage(ctx, appPackageUpdates, ctx.params.appId) + const data = await updateAppPackage(appPackageUpdates, ctx.params.appId) ctx.status = 200 ctx.body = data } @@ -308,7 +337,7 @@ exports.revertClient = async ctx => { version: application.revertableVersion, revertableVersion: null, } - const data = await updateAppPackage(ctx, appPackageUpdates, ctx.params.appId) + const data = await updateAppPackage(appPackageUpdates, ctx.params.appId) ctx.status = 200 ctx.body = data } @@ -381,12 +410,11 @@ exports.sync = async (ctx, next) => { } } -const updateAppPackage = async (ctx, appPackage, appId) => { - const url = await getAppUrlIfNotInUse(ctx) +const updateAppPackage = async (appPackage, appId) => { const db = new CouchDB(appId) const application = await db.get(DocumentTypes.APP_METADATA) - const newAppPackage = { ...application, ...appPackage, url } + const newAppPackage = { ...application, ...appPackage } if (appPackage._rev !== application._rev) { newAppPackage._rev = application._rev } diff --git a/packages/server/src/api/controllers/auth.js b/packages/server/src/api/controllers/auth.js index 3f680225af..f1b665c069 100644 --- a/packages/server/src/api/controllers/auth.js +++ b/packages/server/src/api/controllers/auth.js @@ -16,6 +16,8 @@ exports.fetchSelf = async ctx => { const user = await getFullUser(ctx, userId) // this shouldn't be returned by the app self delete user.roles + // forward the csrf token from the session + user.csrfToken = ctx.user.csrfToken if (appId) { const db = new CouchDB(appId) @@ -24,6 +26,8 @@ exports.fetchSelf = async ctx => { try { const userTable = await db.get(InternalTables.USER_METADATA) const metadata = await db.get(userId) + // make sure there is never a stale csrf token + delete metadata.csrfToken // specifically needs to make sure is enriched ctx.body = await outputProcessing(ctx, userTable, { ...user, diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js index f08b622c3e..5ab3c0a865 100644 --- a/packages/server/src/api/controllers/datasource.js +++ b/packages/server/src/api/controllers/datasource.js @@ -38,6 +38,13 @@ exports.fetch = async function (ctx) { ) ).rows.map(row => row.doc) + for (let datasource of datasources) { + if (datasource.config && datasource.config.auth) { + // strip secrets from response so they don't show in the network request + delete datasource.config.auth + } + } + ctx.body = [bbInternalDb, ...datasources] } @@ -94,8 +101,13 @@ exports.update = async function (ctx) { const db = new CouchDB(ctx.appId) const datasourceId = ctx.params.datasourceId let datasource = await db.get(datasourceId) + const auth = datasource.config.auth await invalidateVariables(datasource, ctx.request.body) datasource = { ...datasource, ...ctx.request.body } + if (auth && !ctx.request.body.auth) { + // don't strip auth config from DB + datasource.config.auth = auth + } const response = await db.put(datasource) datasource._rev = response.rev diff --git a/packages/server/src/api/controllers/hosting.js b/packages/server/src/api/controllers/hosting.js deleted file mode 100644 index 0360643942..0000000000 --- a/packages/server/src/api/controllers/hosting.js +++ /dev/null @@ -1,22 +0,0 @@ -const CouchDB = require("../../db") -const { getDeployedApps } = require("../../utilities/workerRequests") -const { getScopedConfig } = require("@budibase/backend-core/db") -const { Configs } = require("@budibase/backend-core/constants") -const { checkSlashesInUrl } = require("../../utilities") - -exports.fetchUrls = async ctx => { - const appId = ctx.appId - const db = new CouchDB(appId) - const settings = await getScopedConfig(db, { type: Configs.SETTINGS }) - let appUrl = "http://localhost:10000/app" - if (settings && settings["platformUrl"]) { - appUrl = checkSlashesInUrl(`${settings["platformUrl"]}/app`) - } - ctx.body = { - app: appUrl, - } -} - -exports.getDeployedApps = async ctx => { - ctx.body = await getDeployedApps() -} diff --git a/packages/server/src/api/controllers/migrations.js b/packages/server/src/api/controllers/migrations.js new file mode 100644 index 0000000000..6a890349c3 --- /dev/null +++ b/packages/server/src/api/controllers/migrations.js @@ -0,0 +1,13 @@ +const { migrate, MIGRATIONS } = require("../../migrations") + +exports.migrate = async ctx => { + const options = ctx.request.body + // don't await as can take a while, just return + migrate(options) + ctx.status = 200 +} + +exports.fetchDefinitions = async ctx => { + ctx.body = MIGRATIONS + ctx.status = 200 +} diff --git a/packages/server/src/api/controllers/query/index.js b/packages/server/src/api/controllers/query/index.js index 21db1eebbf..9cf7612e8a 100644 --- a/packages/server/src/api/controllers/query/index.js +++ b/packages/server/src/api/controllers/query/index.js @@ -141,6 +141,16 @@ async function execute(ctx, opts = { rowsOnly: false }) { const query = await db.get(ctx.params.queryId) const datasource = await db.get(query.datasourceId) + const enrichedParameters = ctx.request.body.parameters || {} + // make sure parameters are fully enriched with defaults + if (query && query.parameters) { + for (let parameter of query.parameters) { + if (!enrichedParameters[parameter.name]) { + enrichedParameters[parameter.name] = parameter.default + } + } + } + // call the relevant CRUD method on the integration class try { const { rows, pagination, extra } = await Runner.run({ @@ -149,7 +159,7 @@ async function execute(ctx, opts = { rowsOnly: false }) { queryVerb: query.queryVerb, fields: query.fields, pagination: ctx.request.body.pagination, - parameters: ctx.request.body.parameters, + parameters: enrichedParameters, transformer: query.transformer, queryId: ctx.params.queryId, }) @@ -178,8 +188,9 @@ const removeDynamicVariables = async (db, queryId) => { if (dynamicVariables) { // delete dynamic variables from the datasource - const newVariables = dynamicVariables.filter(dv => dv.queryId !== queryId) - datasource.config.dynamicVariables = newVariables + datasource.config.dynamicVariables = dynamicVariables.filter( + dv => dv.queryId !== queryId + ) await db.put(datasource) // invalidate the deleted variables diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index af199561dc..0bffd134c1 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -525,7 +525,7 @@ module External { const linkTable = this.getTable(tableId) // @ts-ignore const linkPrimary = linkTable.primary[0] - const rows = related[key].rows || [] + const rows = related[key]?.rows || [] const found = rows.find( (row: { [key: string]: any }) => row[linkPrimary] === relationship.id || diff --git a/packages/server/src/api/controllers/row/internal.js b/packages/server/src/api/controllers/row/internal.js index 75caaf2fda..0e9c2e651d 100644 --- a/packages/server/src/api/controllers/row/internal.js +++ b/packages/server/src/api/controllers/row/internal.js @@ -1,8 +1,8 @@ const CouchDB = require("../../../db") const linkRows = require("../../../db/linkedRows") const { - getRowParams, generateRowID, + getRowParams, DocumentTypes, InternalTables, } = require("../../../db/utils") @@ -10,11 +10,9 @@ const userController = require("../user") const { inputProcessing, outputProcessing, - processAutoColumn, cleanupAttachments, } = require("../../../utilities/rowProcessor") const { FieldTypes } = require("../../../constants") -const { isEqual } = require("lodash") const { validate, findRow } = require("./utils") const { fullSearch, paginatedSearch } = require("./internalSearch") const { getGlobalUsersFromMetadata } = require("../../../utilities/global") @@ -27,6 +25,7 @@ const { getFromMemoryDoc, } = require("../view/utils") const { cloneDeep } = require("lodash/fp") +const { finaliseRow, updateRelatedFormula } = require("./staticFormula") const CALCULATION_TYPES = { SUM: "sum", @@ -34,51 +33,6 @@ const CALCULATION_TYPES = { STATS: "stats", } -async function storeResponse(ctx, db, row, oldTable, table) { - row.type = "row" - // don't worry about rev, tables handle rev/lastID updates - // if another row has been written since processing this will - // handle the auto ID clash - if (!isEqual(oldTable, table)) { - try { - await db.put(table) - } catch (err) { - if (err.status === 409) { - const updatedTable = await db.get(table._id) - let response = processAutoColumn(null, updatedTable, row, { - reprocessing: true, - }) - await db.put(response.table) - row = response.row - } else { - throw err - } - } - } - const response = await db.put(row) - row._rev = response.rev - // process the row before return, to include relationships - row = await outputProcessing(ctx, table, row, { squash: false }) - return { row, table } -} - -// doesn't do the outputProcessing -async function getRawTableData(ctx, db, tableId) { - let rows - if (tableId === InternalTables.USER_METADATA) { - await userController.fetchMetadata(ctx) - rows = ctx.body - } else { - const response = await db.allDocs( - getRowParams(tableId, null, { - include_docs: true, - }) - ) - rows = response.rows.map(row => row.doc) - } - return rows -} - async function getView(db, viewName) { let mainGetter = env.SELF_HOSTED ? getFromDesignDoc : getFromMemoryDoc let secondaryGetter = env.SELF_HOSTED ? getFromMemoryDoc : getFromDesignDoc @@ -105,6 +59,22 @@ async function getView(db, viewName) { return viewInfo } +async function getRawTableData(ctx, db, tableId) { + let rows + if (tableId === InternalTables.USER_METADATA) { + await userController.fetchMetadata(ctx) + rows = ctx.body + } else { + const response = await db.allDocs( + getRowParams(tableId, null, { + include_docs: true, + }) + ) + rows = response.rows.map(row => row.doc) + } + return rows +} + exports.patch = async ctx => { const appId = ctx.appId const db = new CouchDB(appId) @@ -162,7 +132,10 @@ exports.patch = async ctx => { return { row: ctx.body, table } } - return storeResponse(ctx, db, row, dbTable, table) + return finaliseRow(ctx.appId, table, row, { + oldTable: dbTable, + updateFormula: true, + }) } exports.save = async function (ctx) { @@ -196,7 +169,10 @@ exports.save = async function (ctx) { table, }) - return storeResponse(ctx, db, row, dbTable, table) + return finaliseRow(ctx.appId, table, row, { + oldTable: dbTable, + updateFormula: true, + }) } exports.fetchView = async ctx => { @@ -302,6 +278,8 @@ exports.destroy = async function (ctx) { }) // remove any attachments that were on the row from object storage await cleanupAttachments(appId, table, { row }) + // remove any static formula + await updateRelatedFormula(appId, table, row) let response if (ctx.params.tableId === InternalTables.USER_METADATA) { @@ -350,6 +328,7 @@ exports.bulkDestroy = async ctx => { } // remove any attachments that were on the rows from object storage await cleanupAttachments(appId, table, { rows }) + await updateRelatedFormula(appId, table, rows) await Promise.all(updates) return { response: { ok: true }, rows } } diff --git a/packages/server/src/api/controllers/row/internalSearch.js b/packages/server/src/api/controllers/row/internalSearch.js index 3a2586331a..a185386b7a 100644 --- a/packages/server/src/api/controllers/row/internalSearch.js +++ b/packages/server/src/api/controllers/row/internalSearch.js @@ -37,22 +37,30 @@ class QueryBuilder { } setLimit(limit) { - this.limit = limit + if (limit != null) { + this.limit = limit + } return this } setSort(sort) { - this.sort = sort + if (sort != null) { + this.sort = sort + } return this } setSortOrder(sortOrder) { - this.sortOrder = sortOrder + if (sortOrder != null) { + this.sortOrder = sortOrder + } return this } setSortType(sortType) { - this.sortType = sortType + if (sortType != null) { + this.sortType = sortType + } return this } diff --git a/packages/server/src/api/controllers/row/staticFormula.js b/packages/server/src/api/controllers/row/staticFormula.js new file mode 100644 index 0000000000..fc0edd1cb4 --- /dev/null +++ b/packages/server/src/api/controllers/row/staticFormula.js @@ -0,0 +1,157 @@ +const CouchDB = require("../../../db") +const { getRowParams } = require("../../../db/utils") +const { + outputProcessing, + processAutoColumn, + processFormulas, +} = require("../../../utilities/rowProcessor") +const { FieldTypes, FormulaTypes } = require("../../../constants") +const { isEqual } = require("lodash") +const { cloneDeep } = require("lodash/fp") + +/** + * This function runs through a list of enriched rows, looks at the rows which + * are related and then checks if they need the state of their formulas + * updated. + * NOTE: this will only for affect static formulas. + */ +exports.updateRelatedFormula = async (appId, table, enrichedRows) => { + const db = new CouchDB(appId) + // no formula to update, we're done + if (!table.relatedFormula) { + return + } + let promises = [] + for (let enrichedRow of Array.isArray(enrichedRows) + ? enrichedRows + : [enrichedRows]) { + // the related rows by tableId + let relatedRows = {} + for (let [key, field] of Object.entries(enrichedRow)) { + const columnDefinition = table.schema[key] + if (columnDefinition && columnDefinition.type === FieldTypes.LINK) { + const relatedTableId = columnDefinition.tableId + if (!relatedRows[relatedTableId]) { + relatedRows[relatedTableId] = [] + } + relatedRows[relatedTableId] = relatedRows[relatedTableId].concat(field) + } + } + for (let tableId of table.relatedFormula) { + let relatedTable + try { + // no rows to update, skip + if (!relatedRows[tableId] || relatedRows[tableId].length === 0) { + continue + } + relatedTable = await db.get(tableId) + } catch (err) { + // no error scenario, table doesn't seem to exist anymore, ignore + } + for (let column of Object.values(relatedTable.schema)) { + // needs updated in related rows + if ( + column.type === FieldTypes.FORMULA && + column.formulaType === FormulaTypes.STATIC + ) { + // re-enrich rows for all the related, don't update the related formula for them + promises = promises.concat( + relatedRows[tableId].map(related => + exports.finaliseRow(appId, relatedTable, related, { + updateFormula: false, + }) + ) + ) + } + } + } + } + await Promise.all(promises) +} + +exports.updateAllFormulasInTable = async (appId, table) => { + const db = new CouchDB(appId) + // start by getting the raw rows (which will be written back to DB after update) + let rows = ( + await db.allDocs( + getRowParams(table._id, null, { + include_docs: true, + }) + ) + ).rows.map(row => row.doc) + // now enrich the rows, note the clone so that we have the base state of the + // rows so that we don't write any of the enriched information back + let enrichedRows = await outputProcessing({ appId }, table, cloneDeep(rows), { + squash: false, + }) + const updatedRows = [] + for (let row of rows) { + // find the enriched row, if found process the formulas + const enrichedRow = enrichedRows.find(enriched => enriched._id === row._id) + if (enrichedRow) { + const processed = processFormulas(table, cloneDeep(row), { + dynamic: false, + contextRows: enrichedRow, + }) + // values have changed, need to add to bulk docs to update + if (!isEqual(processed, row)) { + updatedRows.push(processed) + } + } + } + await db.bulkDocs(updatedRows) +} + +/** + * This function runs at the end of the save/patch functions of the row controller, all this + * really does is enrich the row, handle any static formula processing, then return the enriched + * row. The reason we need to return the enriched row is that the automation row created trigger + * expects the row to be totally enriched/contain all relationships. + */ +exports.finaliseRow = async ( + appId, + table, + row, + { oldTable, updateFormula } = { updateFormula: true } +) => { + const db = new CouchDB(appId) + row.type = "row" + // process the row before return, to include relationships + let enrichedRow = await outputProcessing({ appId }, table, cloneDeep(row), { + squash: false, + }) + // use enriched row to generate formulas for saving, specifically only use as context + row = processFormulas(table, row, { + dynamic: false, + contextRows: enrichedRow, + }) + + // don't worry about rev, tables handle rev/lastID updates + // if another row has been written since processing this will + // handle the auto ID clash + if (oldTable && !isEqual(oldTable, table)) { + try { + await db.put(table) + } catch (err) { + if (err.status === 409) { + const updatedTable = await db.get(table._id) + let response = processAutoColumn(null, updatedTable, row, { + reprocessing: true, + }) + await db.put(response.table) + row = response.row + } else { + throw err + } + } + } + const response = await db.put(row) + // for response, calculate the formulas for the enriched row + enrichedRow._rev = response.rev + enrichedRow = await processFormulas(table, enrichedRow, { dynamic: false }) + // this updates the related formulas in other rows based on the relations to this row + if (updateFormula) { + await exports.updateRelatedFormula(appId, table, enrichedRow) + } + return { row: enrichedRow, table } +} diff --git a/packages/server/src/api/controllers/row/utils.js b/packages/server/src/api/controllers/row/utils.js index 3fb0cc78d8..51bc03eba4 100644 --- a/packages/server/src/api/controllers/row/utils.js +++ b/packages/server/src/api/controllers/row/utils.js @@ -52,10 +52,7 @@ exports.validate = async ({ appId, tableId, row, table }) => { const constraints = cloneDeep(table.schema[fieldName].constraints) const type = table.schema[fieldName].type // special case for options, need to always allow unselected (null) - if ( - (type === FieldTypes.OPTIONS || type === FieldTypes.ARRAY) && - constraints.inclusion - ) { + if (type === FieldTypes.OPTIONS && constraints.inclusion) { constraints.inclusion.push(null) } let res diff --git a/packages/server/src/api/controllers/static/index.js b/packages/server/src/api/controllers/static/index.js index 51fc2df3e0..11bb14e282 100644 --- a/packages/server/src/api/controllers/static/index.js +++ b/packages/server/src/api/controllers/static/index.js @@ -5,7 +5,7 @@ const { resolve, join } = require("../../../utilities/centralPath") const uuid = require("uuid") const { ObjectStoreBuckets } = require("../../../constants") const { processString } = require("@budibase/string-templates") -const { getDeployedApps } = require("../../../utilities/workerRequests") +const { getAllApps } = require("@budibase/backend-core/db") const CouchDB = require("../../../db") const { loadHandlebarsFile, @@ -17,6 +17,8 @@ const { clientLibraryPath } = require("../../../utilities") const { upload } = require("../../../utilities/fileSystem") const { attachmentsRelativeURL } = require("../../../utilities") const { DocumentTypes } = require("../../../db/utils") +const AWS = require("aws-sdk") +const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1" async function prepareUpload({ s3Key, bucket, metadata, file }) { const response = await upload({ @@ -37,12 +39,18 @@ async function prepareUpload({ s3Key, bucket, metadata, file }) { } } -async function checkForSelfHostedURL(ctx) { - // the "appId" component of the URL may actually be a specific self hosted URL +async function getAppIdFromUrl(ctx) { + // the "appId" component of the URL can be the id or the custom url let possibleAppUrl = `/${encodeURI(ctx.params.appId).toLowerCase()}` - const apps = await getDeployedApps() - if (apps[possibleAppUrl] && apps[possibleAppUrl].appId) { - return apps[possibleAppUrl].appId + + // search prod apps for a url that matches, exclude dev where id is always used + const apps = await getAllApps(CouchDB, { dev: false }) + const app = apps.filter( + a => a.url && a.url.toLowerCase() === possibleAppUrl + )[0] + + if (app && app.appId) { + return app.appId } else { return ctx.params.appId } @@ -75,10 +83,7 @@ exports.uploadFile = async function (ctx) { } exports.serveApp = async function (ctx) { - let appId = ctx.params.appId - if (env.SELF_HOSTED) { - appId = await checkForSelfHostedURL(ctx) - } + let appId = await getAppIdFromUrl(ctx) const App = require("./templates/BudibaseApp.svelte").default const db = new CouchDB(appId, { skip_setup: true }) const appInfo = await db.get(DocumentTypes.APP_METADATA) @@ -104,3 +109,51 @@ exports.serveClientLibrary = async function (ctx) { root: join(NODE_MODULES_PATH, "@budibase", "client", "dist"), }) } + +exports.getSignedUploadURL = async function (ctx) { + const database = new CouchDB(ctx.appId) + + // Ensure datasource is valid + let datasource + try { + const { datasourceId } = ctx.params + datasource = await database.get(datasourceId) + if (!datasource) { + ctx.throw(400, "The specified datasource could not be found") + } + } catch (error) { + ctx.throw(400, "The specified datasource could not be found") + } + + // Ensure we aren't using a custom endpoint + if (datasource?.config?.endpoint) { + ctx.throw(400, "S3 datasources with custom endpoints are not supported") + } + + // Determine type of datasource and generate signed URL + let signedUrl + let publicUrl + if (datasource.source === "S3") { + const { bucket, key } = ctx.request.body || {} + if (!bucket || !key) { + ctx.throw(400, "bucket and key values are required") + return + } + try { + const s3 = new AWS.S3({ + region: AWS_REGION, + accessKeyId: datasource?.config?.accessKeyId, + secretAccessKey: datasource?.config?.secretAccessKey, + apiVersion: "2006-03-01", + signatureVersion: "v4", + }) + const params = { Bucket: bucket, Key: key } + signedUrl = s3.getSignedUrl("putObject", params) + publicUrl = `https://${bucket}.s3.${AWS_REGION}.amazonaws.com/${key}` + } catch (error) { + ctx.throw(400, error) + } + } + + ctx.body = { signedUrl, publicUrl } +} diff --git a/packages/server/src/api/controllers/table/bulkFormula.js b/packages/server/src/api/controllers/table/bulkFormula.js new file mode 100644 index 0000000000..1866d8e650 --- /dev/null +++ b/packages/server/src/api/controllers/table/bulkFormula.js @@ -0,0 +1,183 @@ +const CouchDB = require("../../../db") +const { FieldTypes, FormulaTypes } = require("../../../constants") +const { getAllInternalTables, clearColumns } = require("./utils") +const { doesContainStrings } = require("@budibase/string-templates") +const { cloneDeep } = require("lodash/fp") +const { isEqual, uniq } = require("lodash") +const { updateAllFormulasInTable } = require("../row/staticFormula") + +function isStaticFormula(column) { + return ( + column.type === FieldTypes.FORMULA && + column.formulaType === FormulaTypes.STATIC + ) +} + +/** + * This retrieves the formula columns from a table schema that use a specified column name + * in the formula. + */ +function getFormulaThatUseColumn(table, columnNames) { + let formula = [] + columnNames = Array.isArray(columnNames) ? columnNames : [columnNames] + for (let column of Object.values(table.schema)) { + // not a static formula, or doesn't contain a relationship + if (!isStaticFormula(column)) { + continue + } + if (!doesContainStrings(column.formula, columnNames)) { + continue + } + formula.push(column.name) + } + return formula +} + +/** + * This functions checks for when a related table, column or related column is deleted, if any + * tables need to have the formula column removed. + */ +async function checkIfFormulaNeedsCleared( + appId, + table, + { oldTable, deletion } +) { + const db = new CouchDB(appId) + // start by retrieving all tables, remove the current table from the list + const tables = (await getAllInternalTables(appId)).filter( + tbl => tbl._id !== table._id + ) + const schemaToUse = oldTable ? oldTable.schema : table.schema + let removedColumns = Object.values(schemaToUse).filter( + column => deletion || !table.schema[column.name] + ) + // remove any formula columns that used related columns + for (let removed of removedColumns) { + let tableToUse = table + // if relationship, get the related table + if (removed.type === FieldTypes.LINK) { + tableToUse = tables.find(table => table._id === removed.tableId) + } + const columnsToDelete = getFormulaThatUseColumn(tableToUse, removed.name) + if (columnsToDelete.length > 0) { + await clearColumns(db, table, columnsToDelete) + } + // need a special case, where a column has been removed from this table, but was used + // in a different, related tables formula + if (!table.relatedFormula) { + continue + } + for (let relatedTableId of table.relatedFormula) { + const relatedColumns = Object.values(table.schema).filter( + column => column.tableId === relatedTableId + ) + const relatedTable = tables.find(table => table._id === relatedTableId) + // look to see if the column was used in a relationship formula, + // relationships won't be used for this + if (relatedTable && relatedColumns && removed.type !== FieldTypes.LINK) { + let relatedFormulaToRemove = [] + for (let column of relatedColumns) { + relatedFormulaToRemove = relatedFormulaToRemove.concat( + getFormulaThatUseColumn(relatedTable, [ + column.fieldName, + removed.name, + ]) + ) + } + if (relatedFormulaToRemove.length > 0) { + await clearColumns(db, relatedTable, uniq(relatedFormulaToRemove)) + } + } + } + } +} + +/** + * This function adds a note to related tables that they are + * used in a static formula - so that the link controller + * can manage hydrating related rows formula fields. This is + * specifically only for static formula. + */ +async function updateRelatedFormulaLinksOnTables( + appId, + table, + { deletion } = { deletion: false } +) { + const db = new CouchDB(appId) + // start by retrieving all tables, remove the current table from the list + const tables = (await getAllInternalTables(appId)).filter( + tbl => tbl._id !== table._id + ) + // clone the tables, so we can compare at end + const initialTables = cloneDeep(tables) + // first find the related column names + const relatedColumns = Object.values(table.schema).filter( + col => col.type === FieldTypes.LINK + ) + // we start by removing the formula field from all tables + for (let otherTable of tables) { + if (!otherTable.relatedFormula) { + continue + } + const index = otherTable.relatedFormula.indexOf(table._id) + if (index !== -1) { + otherTable.relatedFormula.splice(index, 1) + } + } + // if deleting, just remove the table IDs, don't try add + if (!deletion) { + for (let relatedCol of relatedColumns) { + let columns = getFormulaThatUseColumn(table, relatedCol.name) + if (!columns || columns.length === 0) { + continue + } + const relatedTable = tables.find( + related => related._id === relatedCol.tableId + ) + // check if the table is already in the list of related formula, if it isn't, then add it + if ( + relatedTable && + (!relatedTable.relatedFormula || + !relatedTable.relatedFormula.includes(table._id)) + ) { + relatedTable.relatedFormula = relatedTable.relatedFormula + ? [...relatedTable.relatedFormula, table._id] + : [table._id] + } + } + } + // now we just need to compare all the tables and see if any need saved + for (let initial of initialTables) { + const found = tables.find(tbl => initial._id === tbl._id) + if (found && !isEqual(initial, found)) { + await db.put(found) + } + } +} + +async function checkIfFormulaUpdated(appId, table, { oldTable }) { + // look to see if any formula values have changed + const shouldUpdate = Object.values(table.schema).find( + column => + isStaticFormula(column) && + (!oldTable || + !oldTable.schema[column.name] || + !isEqual(oldTable.schema[column.name], column)) + ) + // if a static formula column has updated, then need to run the update + if (shouldUpdate != null) { + await updateAllFormulasInTable(appId, table) + } +} + +exports.runStaticFormulaChecks = async ( + appId, + table, + { oldTable, deletion } +) => { + await updateRelatedFormulaLinksOnTables(appId, table, { deletion }) + await checkIfFormulaNeedsCleared(appId, table, { oldTable, deletion }) + if (!deletion) { + await checkIfFormulaUpdated(appId, table, { oldTable }) + } +} diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js index 20dc10017d..2f6bfd0cb3 100644 --- a/packages/server/src/api/controllers/table/index.js +++ b/packages/server/src/api/controllers/table/index.js @@ -3,12 +3,8 @@ const internal = require("./internal") const external = require("./external") const csvParser = require("../../../utilities/csvParser") const { isExternalTable, isSQL } = require("../../../integrations/utils") -const { - getTableParams, - getDatasourceParams, - BudibaseInternalDB, -} = require("../../../db/utils") -const { getTable } = require("./utils") +const { getDatasourceParams } = require("../../../db/utils") +const { getTable, getAllInternalTables } = require("./utils") function pickApi({ tableId, table }) { if (table && !tableId) { @@ -26,17 +22,7 @@ function pickApi({ tableId, table }) { exports.fetch = async function (ctx) { const db = new CouchDB(ctx.appId) - const internalTables = await db.allDocs( - getTableParams(null, { - include_docs: true, - }) - ) - - const internal = internalTables.rows.map(tableDoc => ({ - ...tableDoc.doc, - type: "internal", - sourceId: BudibaseInternalDB._id, - })) + const internal = await getAllInternalTables(ctx.appId) const externalTables = await db.allDocs( getDatasourceParams("plus", { diff --git a/packages/server/src/api/controllers/table/internal.js b/packages/server/src/api/controllers/table/internal.js index 10a5c9746a..f38a114c25 100644 --- a/packages/server/src/api/controllers/table/internal.js +++ b/packages/server/src/api/controllers/table/internal.js @@ -8,6 +8,9 @@ const { getTable, handleDataImport, } = require("./utils") +const usageQuota = require("../../../utilities/usageQuota") +const { cleanupAttachments } = require("../../../utilities/rowProcessor") +const { runStaticFormulaChecks } = require("./bulkFormula") exports.save = async function (ctx) { const appId = ctx.appId @@ -103,7 +106,8 @@ exports.save = async function (ctx) { tableToSave._rev = result.rev tableToSave = await tableSaveFunctions.after(tableToSave) - + // has to run after, make sure it has _id + await runStaticFormulaChecks(appId, tableToSave, { oldTable }) return tableToSave } @@ -119,6 +123,7 @@ exports.destroy = async function (ctx) { }) ) await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true }))) + await usageQuota.update(usageQuota.Properties.ROW, -rows.rows.length) // update linked rows await linkRows.updateLinks({ @@ -139,6 +144,9 @@ exports.destroy = async function (ctx) { await db.deleteIndex(existingIndex) } + // has to run after, make sure it has _id + await runStaticFormulaChecks(appId, tableToDelete, { deletion: true }) + await cleanupAttachments(appId, tableToDelete, { rows }) return tableToDelete } diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js index 459566ce91..f1907666c9 100644 --- a/packages/server/src/api/controllers/table/utils.js +++ b/packages/server/src/api/controllers/table/utils.js @@ -4,11 +4,20 @@ const { getRowParams, generateRowID, InternalTables, + getTableParams, + BudibaseInternalDB, } = require("../../../db/utils") -const { isEqual } = require("lodash/fp") +const { isEqual } = require("lodash") const { AutoFieldSubTypes, FieldTypes } = require("../../../constants") -const { inputProcessing } = require("../../../utilities/rowProcessor") -const { USERS_TABLE_SCHEMA, SwitchableTypes } = require("../../../constants") +const { + inputProcessing, + cleanupAttachments, +} = require("../../../utilities/rowProcessor") +const { + USERS_TABLE_SCHEMA, + SwitchableTypes, + CanSwitchTypes, +} = require("../../../constants") const { isExternalTable, breakExternalTableId, @@ -16,8 +25,25 @@ const { } = require("../../../integrations/utils") const { getViews, saveView } = require("../view/utils") const viewTemplate = require("../view/viewBuilder") +const usageQuota = require("../../../utilities/usageQuota") +const { cloneDeep } = require("lodash/fp") -exports.checkForColumnUpdates = async (db, oldTable, updatedTable) => { +exports.clearColumns = async (appId, table, columnNames) => { + const db = new CouchDB(appId) + const rows = await db.allDocs( + getRowParams(table._id, null, { + include_docs: true, + }) + ) + return db.bulkDocs( + rows.rows.map(({ doc }) => { + columnNames.forEach(colName => delete doc[colName]) + return doc + }) + ) +} + +exports.checkForColumnUpdates = async (appId, db, oldTable, updatedTable) => { let updatedRows = [] const rename = updatedTable._rename let deletedColumns = [] @@ -34,16 +60,20 @@ exports.checkForColumnUpdates = async (db, oldTable, updatedTable) => { include_docs: true, }) ) - updatedRows = rows.rows.map(({ doc }) => { + const rawRows = rows.rows.map(({ doc }) => doc) + updatedRows = rawRows.map(row => { + row = cloneDeep(row) if (rename) { - doc[rename.updated] = doc[rename.old] - delete doc[rename.old] + row[rename.updated] = row[rename.old] + delete row[rename.old] } else if (deletedColumns.length !== 0) { - deletedColumns.forEach(colName => delete doc[colName]) + deletedColumns.forEach(colName => delete row[colName]) } - return doc + return row }) + // cleanup any attachments from object storage for deleted attachment columns + await cleanupAttachments(appId, updatedTable, { oldTable, rows: rawRows }) // Update views await exports.checkForViewUpdates(db, updatedTable, rename, deletedColumns) delete updatedTable._rename @@ -112,7 +142,11 @@ exports.handleDataImport = async (appId, user, table, dataImport) => { finalData.push(row) } + await usageQuota.update(usageQuota.Properties.ROW, finalData.length, { + dryRun: true, + }) await db.bulkDocs(finalData) + await usageQuota.update(usageQuota.Properties.ROW, finalData.length) let response = await db.put(table) table._rev = response._rev return table @@ -200,6 +234,7 @@ class TableSaveFunctions { // when confirmed valid async mid(table) { let response = await exports.checkForColumnUpdates( + this.appId, this.db, this.oldTable, table @@ -225,6 +260,20 @@ class TableSaveFunctions { } } +exports.getAllInternalTables = async appId => { + const db = new CouchDB(appId) + const internalTables = await db.allDocs( + getTableParams(null, { + include_docs: true, + }) + ) + return internalTables.rows.map(tableDoc => ({ + ...tableDoc.doc, + type: "internal", + sourceId: BudibaseInternalDB._id, + })) +} + exports.getAllExternalTables = async (appId, datasourceId) => { const db = new CouchDB(appId) const datasource = await db.get(datasourceId) @@ -343,6 +392,23 @@ exports.foreignKeyStructure = (keyName, meta = null) => { return structure } +exports.areSwitchableTypes = (type1, type2) => { + if ( + SwitchableTypes.indexOf(type1) === -1 && + SwitchableTypes.indexOf(type2) === -1 + ) { + return false + } + for (let option of CanSwitchTypes) { + const index1 = option.indexOf(type1), + index2 = option.indexOf(type2) + if (index1 !== -1 && index2 !== -1 && index1 !== index2) { + return true + } + } + return false +} + exports.hasTypeChanged = (table, oldTable) => { if (!oldTable) { return false @@ -353,7 +419,7 @@ exports.hasTypeChanged = (table, oldTable) => { continue } const newType = table.schema[key].type - if (oldType !== newType && SwitchableTypes.indexOf(oldType) === -1) { + if (oldType !== newType && !exports.areSwitchableTypes(oldType, newType)) { return true } } diff --git a/packages/server/src/api/controllers/user.js b/packages/server/src/api/controllers/user.js index d87afc4309..1bd8bd6a12 100644 --- a/packages/server/src/api/controllers/user.js +++ b/packages/server/src/api/controllers/user.js @@ -167,6 +167,8 @@ exports.updateSelfMetadata = async function (ctx) { ctx.request.body._id = ctx.user._id // make sure no stale rev delete ctx.request.body._rev + // make sure no csrf token + delete ctx.request.body.csrfToken await exports.updateMetadata(ctx) } diff --git a/packages/server/src/api/routes/hosting.js b/packages/server/src/api/routes/hosting.js deleted file mode 100644 index 5af2d3c622..0000000000 --- a/packages/server/src/api/routes/hosting.js +++ /dev/null @@ -1,13 +0,0 @@ -const Router = require("@koa/router") -const controller = require("../controllers/hosting") -const authorized = require("../../middleware/authorized") -const { BUILDER } = require("@budibase/backend-core/permissions") - -const router = Router() - -router - .get("/api/hosting/urls", authorized(BUILDER), controller.fetchUrls) - // this isn't risky, doesn't return anything about apps other than names and URLs - .get("/api/hosting/apps", controller.getDeployedApps) - -module.exports = router diff --git a/packages/server/src/api/routes/index.js b/packages/server/src/api/routes/index.js index 29d0cd42b4..8ded7104b0 100644 --- a/packages/server/src/api/routes/index.js +++ b/packages/server/src/api/routes/index.js @@ -20,11 +20,11 @@ const integrationRoutes = require("./integration") const permissionRoutes = require("./permission") const datasourceRoutes = require("./datasource") const queryRoutes = require("./query") -const hostingRoutes = require("./hosting") const backupRoutes = require("./backup") const metadataRoutes = require("./metadata") const devRoutes = require("./dev") const cloudRoutes = require("./cloud") +const migrationRoutes = require("./migrations") exports.mainRoutes = [ authRoutes, @@ -46,7 +46,6 @@ exports.mainRoutes = [ permissionRoutes, datasourceRoutes, queryRoutes, - hostingRoutes, backupRoutes, metadataRoutes, devRoutes, @@ -55,6 +54,7 @@ exports.mainRoutes = [ // this could be breaking as koa may recognise other routes as this tableRoutes, rowRoutes, + migrationRoutes, ] exports.staticRoutes = staticRoutes diff --git a/packages/server/src/api/routes/migrations.js b/packages/server/src/api/routes/migrations.js new file mode 100644 index 0000000000..01e573edb3 --- /dev/null +++ b/packages/server/src/api/routes/migrations.js @@ -0,0 +1,14 @@ +const Router = require("@koa/router") +const migrationsController = require("../controllers/migrations") +const router = Router() +const { internalApi } = require("@budibase/backend-core/auth") + +router + .post("/api/migrations/run", internalApi, migrationsController.migrate) + .get( + "/api/migrations/definitions", + internalApi, + migrationsController.fetchDefinitions + ) + +module.exports = router diff --git a/packages/server/src/api/routes/static.js b/packages/server/src/api/routes/static.js index b40d5fe4e9..8a1e529a59 100644 --- a/packages/server/src/api/routes/static.js +++ b/packages/server/src/api/routes/static.js @@ -46,5 +46,10 @@ router ) // TODO: this likely needs to be secured in some way .get("/:appId/:path*", controller.serveApp) + .post( + "/api/attachments/:datasourceId/url", + authorized(PermissionTypes.TABLE, PermissionLevels.READ), + controller.getSignedUploadURL + ) module.exports = router diff --git a/packages/server/src/api/routes/tests/application.spec.js b/packages/server/src/api/routes/tests/application.spec.js index ce1cb80435..d2273a31b8 100644 --- a/packages/server/src/api/routes/tests/application.spec.js +++ b/packages/server/src/api/routes/tests/application.spec.js @@ -53,8 +53,8 @@ describe("/applications", () => { describe("fetch", () => { it("lists all applications", async () => { - await config.createApp(request, "app1") - await config.createApp(request, "app2") + await config.createApp("app1") + await config.createApp("app2") const res = await request .get(`/api/applications?status=${AppStatus.DEV}`) diff --git a/packages/server/src/api/routes/tests/auth.spec.js b/packages/server/src/api/routes/tests/auth.spec.js index c50780a8d5..fa26eb83ac 100644 --- a/packages/server/src/api/routes/tests/auth.spec.js +++ b/packages/server/src/api/routes/tests/auth.spec.js @@ -13,10 +13,9 @@ describe("/authenticate", () => { describe("fetch self", () => { it("should be able to fetch self", async () => { - const headers = await config.login() const res = await request .get(`/api/self`) - .set(headers) + .set(config.defaultHeaders()) .expect("Content-Type", /json/) .expect(200) expect(res.body._id).toEqual(generateUserMetadataID("us_uuid1")) diff --git a/packages/server/src/api/routes/tests/hosting.spec.js b/packages/server/src/api/routes/tests/hosting.spec.js deleted file mode 100644 index 241a0daf06..0000000000 --- a/packages/server/src/api/routes/tests/hosting.spec.js +++ /dev/null @@ -1,36 +0,0 @@ -// mock out node fetch for this -jest.mock("node-fetch") - -const { checkBuilderEndpoint } = require("./utilities/TestFunctions") -const setup = require("./utilities") - -describe("/hosting", () => { - let request = setup.getRequest() - let config = setup.getConfig() - let app - - afterAll(setup.afterAll) - - beforeEach(async () => { - app = await config.init() - }) - - describe("fetchUrls", () => { - it("should be able to fetch current app URLs", async () => { - const res = await request - .get(`/api/hosting/urls`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.app).toEqual(`http://localhost:10000/app`) - }) - - it("should apply authorization to endpoint", async () => { - await checkBuilderEndpoint({ - config, - method: "GET", - url: `/api/hosting/urls`, - }) - }) - }) -}) \ No newline at end of file diff --git a/packages/server/src/api/routes/tests/static.spec.js b/packages/server/src/api/routes/tests/static.spec.js new file mode 100644 index 0000000000..2ba9a6e8c8 --- /dev/null +++ b/packages/server/src/api/routes/tests/static.spec.js @@ -0,0 +1,98 @@ +jest.mock("node-fetch") +jest.mock("aws-sdk", () => ({ + config: { + update: jest.fn(), + }, + DynamoDB: { + DocumentClient: jest.fn(), + }, + S3: jest.fn(() => ({ + getSignedUrl: jest.fn(() => { + return "my-url" + }), + })), +})) + +const setup = require("./utilities") + +describe("/attachments", () => { + let request = setup.getRequest() + let config = setup.getConfig() + let app + + afterAll(setup.afterAll) + + beforeEach(async () => { + app = await config.init() + }) + + describe("generateSignedUrls", () => { + let datasource + + beforeEach(async () => { + datasource = await config.createDatasource({ + datasource: { + type: "datasource", + name: "Test", + source: "S3", + config: {}, + }, + }) + }) + + it("should be able to generate a signed upload URL", async () => { + const bucket = "foo" + const key = "bar" + const res = await request + .post(`/api/attachments/${datasource._id}/url`) + .send({ bucket, key }) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + expect(res.body.signedUrl).toEqual("my-url") + expect(res.body.publicUrl).toEqual( + `https://${bucket}.s3.eu-west-1.amazonaws.com/${key}` + ) + }) + + it("should handle an invalid datasource ID", async () => { + const res = await request + .post(`/api/attachments/foo/url`) + .send({ + bucket: "foo", + key: "bar", + }) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(400) + expect(res.body.message).toEqual( + "The specified datasource could not be found" + ) + }) + + it("should require a bucket parameter", async () => { + const res = await request + .post(`/api/attachments/${datasource._id}/url`) + .send({ + bucket: undefined, + key: "bar", + }) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(400) + expect(res.body.message).toEqual("bucket and key values are required") + }) + + it("should require a key parameter", async () => { + const res = await request + .post(`/api/attachments/${datasource._id}/url`) + .send({ + bucket: "foo", + }) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(400) + expect(res.body.message).toEqual("bucket and key values are required") + }) + }) +}) diff --git a/packages/server/src/app.ts b/packages/server/src/app.ts index 060169a777..0c0ef68ad9 100644 --- a/packages/server/src/app.ts +++ b/packages/server/src/app.ts @@ -1,7 +1,7 @@ // need to load environment first import { ExtendableContext } from "koa" -const env = require("./environment") +import * as env from "./environment" const CouchDB = require("./db") require("@budibase/backend-core").init(CouchDB) const Koa = require("koa") @@ -16,6 +16,7 @@ const Sentry = require("@sentry/node") const fileSystem = require("./utilities/fileSystem") const bullboard = require("./automations/bullboard") const redis = require("./utilities/redis") +import * as migrations from "./migrations" const app = new Koa() @@ -84,13 +85,25 @@ module.exports = server.listen(env.PORT || 0, async () => { await automations.init() }) -process.on("uncaughtException", err => { - console.error(err) +const shutdown = () => { server.close() server.destroy() +} + +process.on("uncaughtException", err => { + console.error(err) + shutdown() }) process.on("SIGTERM", () => { - server.close() - server.destroy() + shutdown() }) + +// run migrations on startup if not done via http +// not recommended in a clustered environment +if (!env.HTTP_MIGRATIONS) { + migrations.migrate().catch(err => { + console.error("Error performing migrations. Exiting.\n", err) + shutdown() + }) +} diff --git a/packages/server/src/automations/steps/createRow.js b/packages/server/src/automations/steps/createRow.js index 8e5b44cc06..1937121062 100644 --- a/packages/server/src/automations/steps/createRow.js +++ b/packages/server/src/automations/steps/createRow.js @@ -1,6 +1,5 @@ const rowController = require("../../api/controllers/row") const automationUtils = require("../automationUtils") -const env = require("../../environment") const usage = require("../../utilities/usageQuota") const { buildCtx } = require("./utils") @@ -83,10 +82,9 @@ exports.run = async function ({ inputs, appId, emitter }) { inputs.row.tableId, inputs.row ) - if (env.USE_QUOTAS) { - await usage.update(usage.Properties.ROW, 1) - } + await usage.update(usage.Properties.ROW, 1, { dryRun: true }) await rowController.save(ctx) + await usage.update(usage.Properties.ROW, 1) return { row: inputs.row, response: ctx.body, diff --git a/packages/server/src/automations/steps/deleteRow.js b/packages/server/src/automations/steps/deleteRow.js index c7bee577a5..e41e5ad263 100644 --- a/packages/server/src/automations/steps/deleteRow.js +++ b/packages/server/src/automations/steps/deleteRow.js @@ -1,5 +1,4 @@ const rowController = require("../../api/controllers/row") -const env = require("../../environment") const usage = require("../../utilities/usageQuota") const { buildCtx } = require("./utils") const automationUtils = require("../automationUtils") @@ -74,9 +73,7 @@ exports.run = async function ({ inputs, appId, emitter }) { }) try { - if (env.isProd()) { - await usage.update(usage.Properties.ROW, -1) - } + await usage.update(usage.Properties.ROW, -1) await rowController.destroy(ctx) return { response: ctx.body, diff --git a/packages/server/src/constants/index.js b/packages/server/src/constants/index.js index b63b71d2c2..16a98e5c58 100644 --- a/packages/server/src/constants/index.js +++ b/packages/server/src/constants/index.js @@ -45,19 +45,27 @@ exports.FieldTypes = { INTERNAL: "internal", } -exports.SwitchableTypes = [ - exports.FieldTypes.STRING, - exports.FieldTypes.OPTIONS, - exports.FieldTypes.NUMBER, - exports.FieldTypes.BOOLEAN, +exports.CanSwitchTypes = [ + [exports.FieldTypes.JSON, exports.FieldTypes.ARRAY], + [exports.FieldTypes.STRING, exports.FieldTypes.OPTIONS], + [exports.FieldTypes.BOOLEAN, exports.FieldTypes.NUMBER], ] +exports.SwitchableTypes = exports.CanSwitchTypes.reduce((prev, current) => + prev ? prev.concat(current) : current +) + exports.RelationshipTypes = { ONE_TO_MANY: "one-to-many", MANY_TO_ONE: "many-to-one", MANY_TO_MANY: "many-to-many", } +exports.FormulaTypes = { + STATIC: "static", + DYNAMIC: "dynamic", +} + exports.AuthTypes = { APP: "app", BUILDER: "builder", @@ -75,6 +83,10 @@ exports.DataSourceOperation = { DELETE_TABLE: "DELETE_TABLE", } +exports.DatasourceAuthTypes = { + GOOGLE: "google", +} + exports.SortDirection = { ASCENDING: "ASCENDING", DESCENDING: "DESCENDING", diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.js index 6835719e5f..eab287aa33 100644 --- a/packages/server/src/db/linkedRows/index.js +++ b/packages/server/src/db/linkedRows/index.js @@ -72,7 +72,7 @@ async function getLinksForRows(appId, rows) { ) } -async function getFullLinkedDocs(ctx, appId, links) { +async function getFullLinkedDocs(appId, links) { // create DBs const db = new CouchDB(appId) const linkedRowIds = links.map(link => link.id) @@ -146,13 +146,12 @@ exports.updateLinks = async function (args) { /** * Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row. * This is required for formula fields, this may only be utilised internally (for now). - * @param {object} ctx The request which is looking for rows. + * @param {string} appId The ID of the app which this request is in the context of. * @param {object} table The table from which the rows originated. * @param {array} rows The rows which are to be enriched. * @return {Promise<*>} returns the rows with all of the enriched relationships on it. */ -exports.attachFullLinkedDocs = async (ctx, table, rows) => { - const appId = ctx.appId +exports.attachFullLinkedDocs = async (appId, table, rows) => { const linkedTableIds = getLinkedTableIDs(table) if (linkedTableIds.length === 0) { return rows @@ -166,7 +165,7 @@ exports.attachFullLinkedDocs = async (ctx, table, rows) => { // clear any existing links that could be dupe'd rows = clearRelationshipFields(table, rows) // now get the docs and combine into the rows - let linked = await getFullLinkedDocs(ctx, appId, links) + let linked = await getFullLinkedDocs(appId, links) const linkedTables = [] for (let row of rows) { for (let link of links.filter(link => link.thisId === row._id)) { diff --git a/packages/server/src/definitions/common.ts b/packages/server/src/definitions/common.ts index 472471855c..16885973f5 100644 --- a/packages/server/src/definitions/common.ts +++ b/packages/server/src/definitions/common.ts @@ -17,6 +17,8 @@ export interface FieldSchema { autocolumn?: boolean throughFrom?: string throughTo?: string + formula?: string + formulaType?: string main?: boolean meta?: { toTable: string @@ -46,6 +48,7 @@ export interface Table extends Base { schema: TableSchema primaryDisplay?: string sourceId?: string + relatedFormula?: string[] constrained?: string[] } diff --git a/packages/server/src/definitions/datasource.ts b/packages/server/src/definitions/datasource.ts index ee71a7b08c..efac92334e 100644 --- a/packages/server/src/definitions/datasource.ts +++ b/packages/server/src/definitions/datasource.ts @@ -47,6 +47,7 @@ export enum SourceNames { ARANGODB = "ARANGODB", REST = "REST", ORACLE = "ORACLE", + GOOGLE_SHEETS = "GOOGLE_SHEETS", } export enum IncludeRelationships { @@ -86,6 +87,8 @@ export interface ExtraQueryConfig { export interface Integration { docs: string plus?: boolean + auth?: { type: string } + relationships?: boolean description: string friendlyName: string datasource: {} diff --git a/packages/server/src/environment.js b/packages/server/src/environment.js index a92e113851..99343937d9 100644 --- a/packages/server/src/environment.js +++ b/packages/server/src/environment.js @@ -38,10 +38,12 @@ module.exports = { MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, USE_QUOTAS: process.env.USE_QUOTAS, + EXCLUDE_QUOTAS_TENANTS: process.env.EXCLUDE_QUOTAS_TENANTS, REDIS_URL: process.env.REDIS_URL, REDIS_PASSWORD: process.env.REDIS_PASSWORD, INTERNAL_API_KEY: process.env.INTERNAL_API_KEY, MULTI_TENANCY: process.env.MULTI_TENANCY, + HTTP_MIGRATIONS: process.env.HTTP_MIGRATIONS, // environment NODE_ENV: process.env.NODE_ENV, JEST_WORKER_ID: process.env.JEST_WORKER_ID, diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts new file mode 100644 index 0000000000..5f76e5b548 --- /dev/null +++ b/packages/server/src/integrations/googlesheets.ts @@ -0,0 +1,353 @@ +import { + DatasourceFieldTypes, + Integration, + QueryJson, + QueryTypes, +} from "../definitions/datasource" +import { OAuth2Client } from "google-auth-library" +import { DatasourcePlus } from "./base/datasourcePlus" +import { Row, Table, TableSchema } from "../definitions/common" +import { buildExternalTableId } from "./utils" +import { DataSourceOperation, FieldTypes } from "../constants" +import { GoogleSpreadsheet } from "google-spreadsheet" +import { table } from "console" + +module GoogleSheetsModule { + const { getGlobalDB } = require("@budibase/backend-core/tenancy") + const { getScopedConfig } = require("@budibase/backend-core/db") + const { Configs } = require("@budibase/backend-core/constants") + + interface GoogleSheetsConfig { + spreadsheetId: string + auth: OAuthClientConfig + } + + interface OAuthClientConfig { + appId: string + accessToken: string + refreshToken: string + } + + const SCHEMA: Integration = { + plus: true, + auth: { + type: "google", + }, + relationships: false, + docs: "https://developers.google.com/sheets/api/quickstart/nodejs", + description: + "Create and collaborate on online spreadsheets in real-time and from any device. ", + friendlyName: "Google Sheets", + datasource: { + spreadsheetId: { + type: DatasourceFieldTypes.STRING, + required: true, + }, + }, + query: { + create: { + type: QueryTypes.FIELDS, + fields: { + sheet: { + type: DatasourceFieldTypes.STRING, + required: true, + }, + row: { + type: QueryTypes.JSON, + required: true, + }, + }, + }, + read: { + type: QueryTypes.FIELDS, + fields: { + sheet: { + type: DatasourceFieldTypes.STRING, + required: true, + }, + }, + }, + update: { + type: QueryTypes.FIELDS, + fields: { + sheet: { + type: DatasourceFieldTypes.STRING, + required: true, + }, + rowIndex: { + type: DatasourceFieldTypes.STRING, + required: true, + }, + row: { + type: QueryTypes.JSON, + required: true, + }, + }, + }, + delete: { + type: QueryTypes.FIELDS, + fields: { + sheet: { + type: DatasourceFieldTypes.STRING, + required: true, + }, + rowIndex: { + type: DatasourceFieldTypes.NUMBER, + required: true, + }, + }, + }, + }, + } + + class GoogleSheetsIntegration implements DatasourcePlus { + private readonly config: GoogleSheetsConfig + private client: any + public tables: Record = {} + public schemaErrors: Record = {} + + constructor(config: GoogleSheetsConfig) { + this.config = config + const spreadsheetId = this.cleanSpreadsheetUrl(this.config.spreadsheetId) + this.client = new GoogleSpreadsheet(spreadsheetId) + } + + /** + * Pull the spreadsheet ID out from a valid google sheets URL + * @param spreadsheetId - the URL or standard spreadsheetId of the google sheet + * @returns spreadsheet Id of the google sheet + */ + cleanSpreadsheetUrl(spreadsheetId: string) { + if (!spreadsheetId) { + throw new Error( + "You must set a spreadsheet ID in your configuration to fetch tables." + ) + } + const parts = spreadsheetId.split("/") + return parts.length > 5 ? parts[5] : spreadsheetId + } + + async connect() { + try { + // Initialise oAuth client + const db = getGlobalDB() + const googleConfig = await getScopedConfig(db, { + type: Configs.GOOGLE, + }) + const oauthClient = new OAuth2Client({ + clientId: googleConfig.clientID, + clientSecret: googleConfig.clientSecret, + }) + oauthClient.credentials.access_token = this.config.auth.accessToken + oauthClient.credentials.refresh_token = this.config.auth.refreshToken + this.client.useOAuth2Client(oauthClient) + await this.client.loadInfo() + } catch (err) { + console.error("Error connecting to google sheets", err) + throw err + } + } + + async buildSchema(datasourceId: string) { + await this.connect() + const sheets = await this.client.sheetsByIndex + const tables: Record = {} + for (let sheet of sheets) { + // must fetch rows to determine schema + await sheet.getRows() + // build schema + const schema: TableSchema = {} + + // build schema from headers + for (let header of sheet.headerValues) { + schema[header] = { + name: header, + type: FieldTypes.STRING, + } + } + + // create tables + tables[sheet.title] = { + _id: buildExternalTableId(datasourceId, sheet.title), + name: sheet.title, + primary: ["rowNumber"], + schema, + } + } + + this.tables = tables + } + + async query(json: QueryJson) { + const sheet = json.endpoint.entityId + + const handlers = { + [DataSourceOperation.CREATE]: () => + this.create({ sheet, row: json.body }), + [DataSourceOperation.READ]: () => this.read({ sheet }), + [DataSourceOperation.UPDATE]: () => + this.update({ + // exclude the header row and zero index + rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2, + sheet, + row: json.body, + }), + [DataSourceOperation.DELETE]: () => + this.delete({ + // exclude the header row and zero index + rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2, + sheet, + }), + [DataSourceOperation.CREATE_TABLE]: () => + this.createTable(json?.table?.name), + [DataSourceOperation.UPDATE_TABLE]: () => this.updateTable(json.table), + [DataSourceOperation.DELETE_TABLE]: () => + this.deleteTable(json?.table?.name), + } + + const internalQueryMethod = handlers[json.endpoint.operation] + + return await internalQueryMethod() + } + + buildRowObject(headers: string[], values: string[], rowNumber: number) { + const rowObject: { rowNumber: number; [key: string]: any } = { rowNumber } + for (let i = 0; i < headers.length; i++) { + rowObject._id = rowNumber + rowObject[headers[i]] = values[i] + } + return rowObject + } + + async createTable(name?: string) { + try { + await this.connect() + const sheet = await this.client.addSheet({ title: name }) + return sheet + } catch (err) { + console.error("Error creating new table in google sheets", err) + throw err + } + } + + async updateTable(table?: any) { + try { + await this.connect() + const sheet = await this.client.sheetsByTitle[table.name] + await sheet.loadHeaderRow() + + if (table._rename) { + const headers = [] + for (let header of sheet.headerValues) { + if (header === table._rename.old) { + headers.push(table._rename.updated) + } else { + headers.push(header) + } + } + await sheet.setHeaderRow(headers) + } else { + let newField = Object.keys(table.schema).find( + key => !sheet.headerValues.includes(key) + ) + await sheet.setHeaderRow([...sheet.headerValues, newField]) + } + } catch (err) { + console.error("Error updating table in google sheets", err) + throw err + } + } + + async deleteTable(sheet: any) { + try { + await this.connect() + const sheetToDelete = await this.client.sheetsByTitle[sheet] + return await sheetToDelete.delete() + } catch (err) { + console.error("Error deleting table in google sheets", err) + throw err + } + } + + async create(query: { sheet: string; row: any }) { + try { + await this.connect() + const sheet = await this.client.sheetsByTitle[query.sheet] + const rowToInsert = + typeof query.row === "string" ? JSON.parse(query.row) : query.row + const row = await sheet.addRow(rowToInsert) + return [ + this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber), + ] + } catch (err) { + console.error("Error writing to google sheets", err) + throw err + } + } + + async read(query: { sheet: string }) { + try { + await this.connect() + const sheet = await this.client.sheetsByTitle[query.sheet] + const rows = await sheet.getRows() + const headerValues = sheet.headerValues + const response = [] + for (let row of rows) { + response.push( + this.buildRowObject(headerValues, row._rawData, row._rowNumber) + ) + } + return response + } catch (err) { + console.error("Error reading from google sheets", err) + throw err + } + } + + async update(query: { sheet: string; rowIndex: number; row: any }) { + try { + await this.connect() + const sheet = await this.client.sheetsByTitle[query.sheet] + const rows = await sheet.getRows() + const row = rows[query.rowIndex] + if (row) { + const updateValues = query.row + for (let key in updateValues) { + row[key] = updateValues[key] + } + await row.save() + return [ + this.buildRowObject( + sheet.headerValues, + row._rawData, + row._rowNumber + ), + ] + } else { + throw new Error("Row does not exist.") + } + } catch (err) { + console.error("Error reading from google sheets", err) + throw err + } + } + + async delete(query: { sheet: string; rowIndex: number }) { + await this.connect() + const sheet = await this.client.sheetsByTitle[query.sheet] + const rows = await sheet.getRows() + const row = rows[query.rowIndex] + if (row) { + await row.delete() + return [{ deleted: query.rowIndex }] + } else { + throw new Error("Row does not exist.") + } + } + } + + module.exports = { + schema: SCHEMA, + integration: GoogleSheetsIntegration, + } +} diff --git a/packages/server/src/integrations/index.ts b/packages/server/src/integrations/index.ts index 8f2f083fc5..4679e658b6 100644 --- a/packages/server/src/integrations/index.ts +++ b/packages/server/src/integrations/index.ts @@ -9,6 +9,7 @@ const airtable = require("./airtable") const mysql = require("./mysql") const arangodb = require("./arangodb") const rest = require("./rest") +const googlesheets = require("./googlesheets") const { SourceNames } = require("../definitions/datasource") const DEFINITIONS = { @@ -23,6 +24,7 @@ const DEFINITIONS = { [SourceNames.MYSQL]: mysql.schema, [SourceNames.ARANGODB]: arangodb.schema, [SourceNames.REST]: rest.schema, + [SourceNames.GOOGLE_SHEETS]: googlesheets.schema, } const INTEGRATIONS = { @@ -37,6 +39,7 @@ const INTEGRATIONS = { [SourceNames.MYSQL]: mysql.integration, [SourceNames.ARANGODB]: arangodb.integration, [SourceNames.REST]: rest.integration, + [SourceNames.GOOGLE_SHEETS]: googlesheets.integration, } // optionally add oracle integration if the oracle binary can be installed diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts index b9e643e26a..8fe8fedcc8 100644 --- a/packages/server/src/integrations/utils.ts +++ b/packages/server/src/integrations/utils.ts @@ -178,6 +178,7 @@ function shouldCopySpecialColumn( ) { return ( column.type === FieldTypes.OPTIONS || + column.type === FieldTypes.ARRAY || ((!fetchedColumn || fetchedColumn.type === FieldTypes.NUMBER) && column.type === FieldTypes.BOOLEAN) ) diff --git a/packages/server/src/middleware/authorized.js b/packages/server/src/middleware/authorized.js index b463895a80..7125ec3246 100644 --- a/packages/server/src/middleware/authorized.js +++ b/packages/server/src/middleware/authorized.js @@ -9,11 +9,59 @@ const { } = require("@budibase/backend-core/permissions") const builderMiddleware = require("./builder") const { isWebhookEndpoint } = require("./utils") +const { buildCsrfMiddleware } = require("@budibase/backend-core/auth") function hasResource(ctx) { return ctx.resourceId != null } +const csrf = buildCsrfMiddleware() + +/** + * Apply authorization to the requested resource: + * - If this is a builder resource the user must be a builder. + * - Builders can access all resources. + * - Otherwise the user must have the required role. + */ +const checkAuthorized = async (ctx, resourceRoles, permType, permLevel) => { + // check if this is a builder api and the user is not a builder + const isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global + const isBuilderApi = permType === PermissionTypes.BUILDER + if (isBuilderApi && !isBuilder) { + return ctx.throw(403, "Not Authorized") + } + + // check for resource authorization + if (!isBuilder) { + await checkAuthorizedResource(ctx, resourceRoles, permType, permLevel) + } +} + +const checkAuthorizedResource = async ( + ctx, + resourceRoles, + permType, + permLevel +) => { + // get the user's roles + const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC + const userRoles = await getUserRoleHierarchy(ctx.appId, roleId, { + idOnly: false, + }) + const permError = "User does not have permission" + // check if the user has the required role + if (resourceRoles.length > 0) { + // deny access if the user doesn't have the required resource role + const found = userRoles.find(role => resourceRoles.indexOf(role._id) !== -1) + if (!found) { + ctx.throw(403, permError) + } + // fallback to the base permissions when no resource roles are found + } else if (!doesHaveBasePermission(permType, permLevel, userRoles)) { + ctx.throw(403, permError) + } +} + module.exports = (permType, permLevel = null) => async (ctx, next) => { @@ -31,40 +79,26 @@ module.exports = // to find API endpoints which are builder focused await builderMiddleware(ctx, permType) - const isAuthed = ctx.isAuthenticated - // builders for now have permission to do anything - let isBuilder = ctx.user && ctx.user.builder && ctx.user.builder.global - const isBuilderApi = permType === PermissionTypes.BUILDER - if (isBuilder) { + // get the resource roles + let resourceRoles = [] + if (ctx.appId && hasResource(ctx)) { + resourceRoles = await getRequiredResourceRole(ctx.appId, permLevel, ctx) + } + + // if the resource is public, proceed + const isPublicResource = resourceRoles.includes(BUILTIN_ROLE_IDS.PUBLIC) + if (isPublicResource) { return next() - } else if (isBuilderApi && !isBuilder) { - return ctx.throw(403, "Not Authorized") } - // need to check this first, in-case public access, don't check authed until last - const roleId = ctx.roleId || BUILTIN_ROLE_IDS.PUBLIC - const hierarchy = await getUserRoleHierarchy(ctx.appId, roleId, { - idOnly: false, - }) - const permError = "User does not have permission" - let possibleRoleIds = [] - if (hasResource(ctx)) { - possibleRoleIds = await getRequiredResourceRole(ctx.appId, permLevel, ctx) - } - // check if we found a role, if not fallback to base permissions - if (possibleRoleIds.length > 0) { - const found = hierarchy.find( - role => possibleRoleIds.indexOf(role._id) !== -1 - ) - return found ? next() : ctx.throw(403, permError) - } else if (!doesHaveBasePermission(permType, permLevel, hierarchy)) { - ctx.throw(403, permError) + // check authenticated + if (!ctx.isAuthenticated) { + return ctx.throw(403, "Session not authenticated") } - // if they are not authed, then anything using the authorized middleware will fail - if (!isAuthed) { - ctx.throw(403, "Session not authenticated") - } + // check authorized + await checkAuthorized(ctx, resourceRoles, permType, permLevel) - return next() + // csrf protection + return csrf(ctx, next) } diff --git a/packages/server/src/middleware/currentapp.js b/packages/server/src/middleware/currentapp.js index e11aefdf1c..69f80c895b 100644 --- a/packages/server/src/middleware/currentapp.js +++ b/packages/server/src/middleware/currentapp.js @@ -47,15 +47,6 @@ module.exports = async (ctx, next) => { (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) ) { clearCookie(ctx, Cookies.CurrentApp) - // have to set the return url on the server side as client side is not available - setCookie(ctx, ctx.url, Cookies.RETURN_URL, { - // don't sign so the browser can easily read - sign: false, - // use the request domain to match how ui handles the return url cookie. - // it's important we don't use the shared domain here as the builder - // can't delete from it without awareness of the domain. - requestDomain: true, - }) return ctx.redirect("/") } diff --git a/packages/server/src/middleware/tests/authorized.spec.js b/packages/server/src/middleware/tests/authorized.spec.js index 9775965b5a..04ef6e2b07 100644 --- a/packages/server/src/middleware/tests/authorized.spec.js +++ b/packages/server/src/middleware/tests/authorized.spec.js @@ -17,6 +17,7 @@ class TestConfiguration { this.middleware = authorizedMiddleware(role) this.next = jest.fn() this.throw = jest.fn() + this.headers = {} this.ctx = { headers: {}, request: { @@ -25,7 +26,8 @@ class TestConfiguration { appId: "", auth: {}, next: this.next, - throw: this.throw + throw: this.throw, + get: (name) => this.headers[name], } } @@ -46,7 +48,7 @@ class TestConfiguration { } setAuthenticated(isAuthed) { - this.ctx.auth = { authenticated: isAuthed } + this.ctx.isAuthenticated = isAuthed } setRequestUrl(url) { @@ -107,7 +109,7 @@ describe("Authorization middleware", () => { expect(config.next).toHaveBeenCalled() }) - it("throws if the user has only builder permissions", async () => { + it("throws if the user does not have builder permissions", async () => { config.setEnvironment(false) config.setMiddlewareRequiredPermission(PermissionTypes.BUILDER) config.setUser({ @@ -133,7 +135,7 @@ describe("Authorization middleware", () => { expect(config.next).toHaveBeenCalled() }) - it("throws if the user session is not authenticated after permission checks", async () => { + it("throws if the user session is not authenticated", async () => { config.setUser({ role: { _id: "" diff --git a/packages/server/src/middleware/tests/usageQuota.spec.js b/packages/server/src/middleware/tests/usageQuota.spec.js index ae47d19207..1282615a50 100644 --- a/packages/server/src/middleware/tests/usageQuota.spec.js +++ b/packages/server/src/middleware/tests/usageQuota.spec.js @@ -1,11 +1,5 @@ jest.mock("../../db") jest.mock("../../utilities/usageQuota") -jest.mock("../../environment", () => ({ - isTest: () => true, - isProd: () => false, - isDev: () => true, - _set: () => {}, -})) jest.mock("@budibase/backend-core/tenancy", () => ({ getTenantId: () => "testing123" })) @@ -29,9 +23,10 @@ class TestConfiguration { }, req: { method: "POST", - url: "/rows" + url: "/applications" } } + usageQuota.useQuotas = () => true } executeMiddleware() { @@ -113,7 +108,6 @@ describe("usageQuota middleware", () => { it("calculates and persists the correct usage quota for the relevant action", async () => { config.setUrl("/rows") - config.setProd(true) await config.executeMiddleware() @@ -121,20 +115,20 @@ describe("usageQuota middleware", () => { expect(config.next).toHaveBeenCalled() }) - it("calculates the correct file size from a file upload call and adds it to quota", async () => { - config.setUrl("/upload") - config.setProd(true) - config.setFiles([ - { - size: 100 - }, - { - size: 10000 - }, - ]) - await config.executeMiddleware() + // it("calculates the correct file size from a file upload call and adds it to quota", async () => { + // config.setUrl("/upload") + // config.setProd(true) + // config.setFiles([ + // { + // size: 100 + // }, + // { + // size: 10000 + // }, + // ]) + // await config.executeMiddleware() - expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100) - expect(config.next).toHaveBeenCalled() - }) + // expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100) + // expect(config.next).toHaveBeenCalled() + // }) }) \ No newline at end of file diff --git a/packages/server/src/middleware/usageQuota.js b/packages/server/src/middleware/usageQuota.js index bb5fa4de3f..2cd0836113 100644 --- a/packages/server/src/middleware/usageQuota.js +++ b/packages/server/src/middleware/usageQuota.js @@ -1,15 +1,11 @@ const CouchDB = require("../db") const usageQuota = require("../utilities/usageQuota") -const env = require("../environment") -const { getTenantId } = require("@budibase/backend-core/tenancy") +const { getUniqueRows } = require("../utilities/usageQuota/rows") const { isExternalTable, isRowId: isExternalRowId, } = require("../integrations/utils") -// tenants without limits -const EXCLUDED_TENANTS = ["bb", "default", "bbtest", "bbstaging"] - // currently only counting new writes and deletes const METHOD_MAP = { POST: 1, @@ -18,13 +14,13 @@ const METHOD_MAP = { const DOMAIN_MAP = { rows: usageQuota.Properties.ROW, - upload: usageQuota.Properties.UPLOAD, - views: usageQuota.Properties.VIEW, - users: usageQuota.Properties.USER, + // upload: usageQuota.Properties.UPLOAD, // doesn't work yet + // views: usageQuota.Properties.VIEW, // doesn't work yet + // users: usageQuota.Properties.USER, // doesn't work yet applications: usageQuota.Properties.APPS, // this will not be updated by endpoint calls // instead it will be updated by triggerInfo - automationRuns: usageQuota.Properties.AUTOMATION, + // automationRuns: usageQuota.Properties.AUTOMATION, // doesn't work yet } function getProperty(url) { @@ -36,10 +32,7 @@ function getProperty(url) { } module.exports = async (ctx, next) => { - const tenantId = getTenantId() - - // if in development or a self hosted cloud usage quotas should not be executed - if (env.isDev() || env.SELF_HOSTED || EXCLUDED_TENANTS.includes(tenantId)) { + if (!usageQuota.useQuotas()) { return next() } @@ -80,9 +73,92 @@ module.exports = async (ctx, next) => { usage = files.map(file => file.size).reduce((total, size) => total + size) } try { - await usageQuota.update(property, usage) - return next() + await performRequest(ctx, next, property, usage) } catch (err) { ctx.throw(400, err) } } + +const performRequest = async (ctx, next, property, usage) => { + const usageContext = { + skipNext: false, + skipUsage: false, + [usageQuota.Properties.APPS]: {}, + } + + if (usage === -1) { + if (PRE_DELETE[property]) { + await PRE_DELETE[property](ctx, usageContext) + } + } else { + if (PRE_CREATE[property]) { + await PRE_CREATE[property](ctx, usageContext) + } + } + + // run the request + if (!usageContext.skipNext) { + await usageQuota.update(property, usage, { dryRun: true }) + await next() + } + + if (usage === -1) { + if (POST_DELETE[property]) { + await POST_DELETE[property](ctx, usageContext) + } + } else { + if (POST_CREATE[property]) { + await POST_CREATE[property](ctx, usageContext) + } + } + + // update the usage + if (!usageContext.skipUsage) { + await usageQuota.update(property, usage) + } +} + +const appPreDelete = async (ctx, usageContext) => { + if (ctx.query.unpublish) { + // don't run usage decrement for unpublish + usageContext.skipUsage = true + return + } + + // store the row count to delete + const rows = await getUniqueRows([ctx.appId]) + if (rows.length) { + usageContext[usageQuota.Properties.APPS] = { rowCount: rows.length } + } +} + +const appPostDelete = async (ctx, usageContext) => { + // delete the app rows from usage + const rowCount = usageContext[usageQuota.Properties.APPS].rowCount + if (rowCount) { + await usageQuota.update(usageQuota.Properties.ROW, -rowCount) + } +} + +const appPostCreate = async ctx => { + // app import & template creation + if (ctx.request.body.useTemplate === "true") { + const rows = await getUniqueRows([ctx.response.body.appId]) + const rowCount = rows ? rows.length : 0 + await usageQuota.update(usageQuota.Properties.ROW, rowCount) + } +} + +const PRE_DELETE = { + [usageQuota.Properties.APPS]: appPreDelete, +} + +const POST_DELETE = { + [usageQuota.Properties.APPS]: appPostDelete, +} + +const PRE_CREATE = {} + +const POST_CREATE = { + [usageQuota.Properties.APPS]: appPostCreate, +} diff --git a/packages/server/src/migrations/functions/appUrls.ts b/packages/server/src/migrations/functions/appUrls.ts new file mode 100644 index 0000000000..1446fcafc0 --- /dev/null +++ b/packages/server/src/migrations/functions/appUrls.ts @@ -0,0 +1,34 @@ +const { DocumentTypes } = require("@budibase/backend-core/db") +import { getAppUrl } from "../../api/controllers/application" + +/** + * Date: + * January 2022 + * + * Description: + * Add the url to the app metadata if it doesn't exist + */ +export const run = async (appDb: any) => { + let metadata + try { + metadata = await appDb.get(DocumentTypes.APP_METADATA) + } catch (e) { + // sometimes the metadata document doesn't exist + // exit early instead of failing the migration + console.error("Error retrieving app metadata. Skipping", e) + return + } + + if (!metadata.url) { + const context = { + request: { + body: { + name: metadata.name, + }, + }, + } + metadata.url = getAppUrl(context) + console.log(`Adding url to app: ${metadata.url}`) + await appDb.put(metadata) + } +} diff --git a/packages/server/src/migrations/functions/quotas1.ts b/packages/server/src/migrations/functions/quotas1.ts new file mode 100644 index 0000000000..500aa68f51 --- /dev/null +++ b/packages/server/src/migrations/functions/quotas1.ts @@ -0,0 +1,20 @@ +import { runQuotaMigration } from "./usageQuotas" +import * as syncApps from "./usageQuotas/syncApps" +import * as syncRows from "./usageQuotas/syncRows" + +/** + * Date: + * January 2022 + * + * Description: + * Synchronise the app and row quotas to the state of the db after it was + * discovered that the quota resets were still in place and the row quotas + * weren't being decremented correctly. + */ + +export const run = async () => { + await runQuotaMigration(async () => { + await syncApps.run() + await syncRows.run() + }) +} diff --git a/packages/server/src/migrations/functions/tests/appUrls.spec.js b/packages/server/src/migrations/functions/tests/appUrls.spec.js new file mode 100644 index 0000000000..d3f080dfd4 --- /dev/null +++ b/packages/server/src/migrations/functions/tests/appUrls.spec.js @@ -0,0 +1,29 @@ +const { DocumentTypes } = require("@budibase/backend-core/db") +const env = require("../../../environment") +const TestConfig = require("../../../tests/utilities/TestConfiguration") + +const migration = require("../appUrls") + +describe("run", () => { + let config = new TestConfig(false) + const CouchDB = config.getCouch() + + beforeEach(async () => { + await config.init() + }) + + afterAll(config.end) + + it("runs successfully", async () => { + const app = await config.createApp("testApp") + const appDb = new CouchDB(app.appId) + let metadata = await appDb.get(DocumentTypes.APP_METADATA) + delete metadata.url + await appDb.put(metadata) + + await migration.run(appDb) + + metadata = await appDb.get(DocumentTypes.APP_METADATA) + expect(metadata.url).toEqual("/testapp") + }) +}) diff --git a/packages/server/src/migrations/functions/tests/quotas1.spec.js b/packages/server/src/migrations/functions/tests/quotas1.spec.js new file mode 100644 index 0000000000..df8703e9a0 --- /dev/null +++ b/packages/server/src/migrations/functions/tests/quotas1.spec.js @@ -0,0 +1,27 @@ +const env = require("../../../environment") +const TestConfig = require("../../../tests/utilities/TestConfiguration") + +const syncApps = jest.fn() +const syncRows = jest.fn() + +jest.mock("../usageQuotas/syncApps", () => ({ run: syncApps }) ) +jest.mock("../usageQuotas/syncRows", () => ({ run: syncRows }) ) + +const migration = require("../quotas1") + +describe("run", () => { + let config = new TestConfig(false) + + beforeEach(async () => { + await config.init() + env._set("USE_QUOTAS", 1) + }) + + afterAll(config.end) + + it("runs ", async () => { + await migration.run() + expect(syncApps).toHaveBeenCalledTimes(1) + expect(syncRows).toHaveBeenCalledTimes(1) + }) +}) diff --git a/packages/server/src/migrations/functions/tests/userEmailViewCasing.spec.js b/packages/server/src/migrations/functions/tests/userEmailViewCasing.spec.js new file mode 100644 index 0000000000..c0d7823cbf --- /dev/null +++ b/packages/server/src/migrations/functions/tests/userEmailViewCasing.spec.js @@ -0,0 +1,25 @@ +const TestConfig = require("../../../tests/utilities/TestConfiguration") +const { getGlobalDB } = require("@budibase/backend-core/tenancy") + +// mock email view creation +const coreDb = require("@budibase/backend-core/db") +const createUserEmailView = jest.fn() +coreDb.createUserEmailView = createUserEmailView + +const migration = require("../userEmailViewCasing") + +describe("run", () => { + let config = new TestConfig(false) + const globalDb = getGlobalDB() + + beforeEach(async () => { + await config.init() + }) + + afterAll(config.end) + + it("runs successfully", async () => { + await migration.run(globalDb) + expect(createUserEmailView).toHaveBeenCalledTimes(1) + }) +}) diff --git a/packages/server/src/migrations/functions/usageQuotas/index.ts b/packages/server/src/migrations/functions/usageQuotas/index.ts new file mode 100644 index 0000000000..16c4bf1d89 --- /dev/null +++ b/packages/server/src/migrations/functions/usageQuotas/index.ts @@ -0,0 +1,8 @@ +const { useQuotas } = require("../../../utilities/usageQuota") + +export const runQuotaMigration = async (migration: Function) => { + if (!useQuotas()) { + return + } + await migration() +} diff --git a/packages/server/src/migrations/functions/usageQuotas/syncApps.ts b/packages/server/src/migrations/functions/usageQuotas/syncApps.ts new file mode 100644 index 0000000000..0fba4f0f7f --- /dev/null +++ b/packages/server/src/migrations/functions/usageQuotas/syncApps.ts @@ -0,0 +1,18 @@ +const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") +const { getAllApps } = require("@budibase/backend-core/db") +import CouchDB from "../../../db" +import { getUsageQuotaDoc } from "../../../utilities/usageQuota" + +export const run = async () => { + const db = getGlobalDB() + // get app count + const devApps = await getAllApps(CouchDB, { dev: true }) + const appCount = devApps ? devApps.length : 0 + + // sync app count + const tenantId = getTenantId() + console.log(`[Tenant: ${tenantId}] Syncing app count: ${appCount}`) + const usageDoc = await getUsageQuotaDoc(db) + usageDoc.usageQuota.apps = appCount + await db.put(usageDoc) +} diff --git a/packages/server/src/migrations/functions/usageQuotas/syncRows.ts b/packages/server/src/migrations/functions/usageQuotas/syncRows.ts new file mode 100644 index 0000000000..58767d0c0a --- /dev/null +++ b/packages/server/src/migrations/functions/usageQuotas/syncRows.ts @@ -0,0 +1,21 @@ +const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") +const { getAllApps } = require("@budibase/backend-core/db") +import CouchDB from "../../../db" +import { getUsageQuotaDoc } from "../../../utilities/usageQuota" +import { getUniqueRows } from "../../../utilities/usageQuota/rows" + +export const run = async () => { + const db = getGlobalDB() + // get all rows in all apps + const allApps = await getAllApps(CouchDB, { all: true }) + const appIds = allApps ? allApps.map((app: { appId: any }) => app.appId) : [] + const rows = await getUniqueRows(appIds) + const rowCount = rows ? rows.length : 0 + + // sync row count + const tenantId = getTenantId() + console.log(`[Tenant: ${tenantId}] Syncing row count: ${rowCount}`) + const usageDoc = await getUsageQuotaDoc(db) + usageDoc.usageQuota.rows = rowCount + await db.put(usageDoc) +} diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.js b/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.js new file mode 100644 index 0000000000..7c74cbfe9a --- /dev/null +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.js @@ -0,0 +1,37 @@ +const { getGlobalDB } = require("@budibase/backend-core/tenancy") +const TestConfig = require("../../../../tests/utilities/TestConfiguration") +const { getUsageQuotaDoc, update, Properties } = require("../../../../utilities/usageQuota") +const syncApps = require("../syncApps") +const env = require("../../../../environment") + +describe("syncApps", () => { + let config = new TestConfig(false) + + beforeEach(async () => { + await config.init() + env._set("USE_QUOTAS", 1) + }) + + afterAll(config.end) + + it("runs successfully", async () => { + // create the usage quota doc and mock usages + const db = getGlobalDB() + await getUsageQuotaDoc(db) + await update(Properties.APPS, 3) + + let usageDoc = await getUsageQuotaDoc(db) + expect(usageDoc.usageQuota.apps).toEqual(3) + + // create an extra app to test the migration + await config.createApp("quota-test") + + // migrate + await syncApps.run() + + // assert the migration worked + usageDoc = await getUsageQuotaDoc(db) + expect(usageDoc.usageQuota.apps).toEqual(2) + }) +}) + diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.js b/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.js new file mode 100644 index 0000000000..034d0eb067 --- /dev/null +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.js @@ -0,0 +1,43 @@ +const { getGlobalDB } = require("@budibase/backend-core/tenancy") +const TestConfig = require("../../../../tests/utilities/TestConfiguration") +const { getUsageQuotaDoc, update, Properties } = require("../../../../utilities/usageQuota") +const syncRows = require("../syncRows") +const env = require("../../../../environment") + +describe("syncRows", () => { + let config = new TestConfig(false) + + beforeEach(async () => { + await config.init() + env._set("USE_QUOTAS", 1) + }) + + afterAll(config.end) + + it("runs successfully", async () => { + // create the usage quota doc and mock usages + const db = getGlobalDB() + await getUsageQuotaDoc(db) + await update(Properties.ROW, 300) + + let usageDoc = await getUsageQuotaDoc(db) + expect(usageDoc.usageQuota.rows).toEqual(300) + + // app 1 + await config.createTable() + await config.createRow() + // app 2 + await config.createApp("second-app") + await config.createTable() + await config.createRow() + await config.createRow() + + // migrate + await syncRows.run() + + // assert the migration worked + usageDoc = await getUsageQuotaDoc(db) + expect(usageDoc.usageQuota.rows).toEqual(3) + }) +}) + diff --git a/packages/server/src/migrations/functions/userEmailViewCasing.ts b/packages/server/src/migrations/functions/userEmailViewCasing.ts new file mode 100644 index 0000000000..16f55655ab --- /dev/null +++ b/packages/server/src/migrations/functions/userEmailViewCasing.ts @@ -0,0 +1,13 @@ +const { createUserEmailView } = require("@budibase/backend-core/db") + +/** + * Date: + * October 2021 + * + * Description: + * Recreate the user email view to include latest changes i.e. lower casing the email address + */ + +export const run = async (db: any) => { + await createUserEmailView(db) +} diff --git a/packages/server/src/migrations/index.ts b/packages/server/src/migrations/index.ts new file mode 100644 index 0000000000..966041e0c9 --- /dev/null +++ b/packages/server/src/migrations/index.ts @@ -0,0 +1,56 @@ +import CouchDB from "../db" +const { + MIGRATION_TYPES, + runMigrations, +} = require("@budibase/backend-core/migrations") + +// migration functions +import * as userEmailViewCasing from "./functions/userEmailViewCasing" +import * as quota1 from "./functions/quotas1" +import * as appUrls from "./functions/appUrls" + +export interface Migration { + type: string + name: string + opts?: object + fn: Function +} + +/** + * e.g. + * { + * tenantIds: ['bb'], + * force: { + * global: ['quota_1'] + * } + * } + */ +export interface MigrationOptions { + tenantIds?: string[] + forced?: { + [type: string]: string[] + } +} + +export const MIGRATIONS: Migration[] = [ + { + type: MIGRATION_TYPES.GLOBAL, + name: "user_email_view_casing", + fn: userEmailViewCasing.run, + }, + { + type: MIGRATION_TYPES.GLOBAL, + name: "quotas_1", + fn: quota1.run, + }, + { + type: MIGRATION_TYPES.APP, + name: "app_urls", + opts: { all: true }, + fn: appUrls.run, + }, +] + +export const migrate = async (options?: MigrationOptions) => { + await runMigrations(CouchDB, MIGRATIONS, options) +} diff --git a/packages/server/src/tests/utilities/TestConfiguration.js b/packages/server/src/tests/utilities/TestConfiguration.js index 06d81593a1..68aa68dc66 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.js +++ b/packages/server/src/tests/utilities/TestConfiguration.js @@ -22,10 +22,12 @@ const { getGlobalDB } = require("@budibase/backend-core/tenancy") const { createASession } = require("@budibase/backend-core/sessions") const { user: userCache } = require("@budibase/backend-core/cache") const CouchDB = require("../../db") +const newid = require("../../db/newid") core.init(CouchDB) const GLOBAL_USER_ID = "us_uuid1" const EMAIL = "babs@babs.com" +const CSRF_TOKEN = "e3727778-7af0-4226-b5eb-f43cbe60a306" class TestConfiguration { constructor(openServer = true) { @@ -48,6 +50,10 @@ class TestConfiguration { return this.appId } + getCouch() { + return CouchDB + } + async _req(config, params, controlFunc) { const request = {} // fake cookies, we don't need them @@ -85,7 +91,11 @@ class TestConfiguration { roles: roles || {}, tenantId: TENANT_ID, } - await createASession(id, { sessionId: "sessionid", tenantId: TENANT_ID }) + await createASession(id, { + sessionId: "sessionid", + tenantId: TENANT_ID, + csrfToken: CSRF_TOKEN, + }) if (builder) { user.builder = { global: true } } else { @@ -98,7 +108,8 @@ class TestConfiguration { } } - async init(appName = "test_application") { + // use a new id as the name to avoid name collisions + async init(appName = newid()) { await this.globalUser() return this.createApp(appName) } @@ -131,6 +142,7 @@ class TestConfiguration { `${Cookies.Auth}=${authToken}`, `${Cookies.CurrentApp}=${appToken}`, ], + [Headers.CSRF_TOKEN]: CSRF_TOKEN, } if (this.appId) { headers[Headers.APP_ID] = this.appId @@ -424,10 +436,6 @@ class TestConfiguration { roles: { [this.prodAppId]: roleId }, }) } - await createASession(userId, { - sessionId: "sessionid", - tenantId: TENANT_ID, - }) // have to fake this const auth = { userId, diff --git a/packages/server/src/threads/query.js b/packages/server/src/threads/query.js index 35164ac642..ff3e101d48 100644 --- a/packages/server/src/threads/query.js +++ b/packages/server/src/threads/query.js @@ -78,6 +78,11 @@ class QueryRunner { return this.execute() } + // check for undefined response + if (!rows) { + rows = [] + } + // needs to an array for next step if (!Array.isArray(rows)) { rows = [rows] diff --git a/packages/server/src/utilities/rowProcessor/index.js b/packages/server/src/utilities/rowProcessor/index.js index 4f5d72c179..dc56312d63 100644 --- a/packages/server/src/utilities/rowProcessor/index.js +++ b/packages/server/src/utilities/rowProcessor/index.js @@ -180,6 +180,8 @@ function processAutoColumn( } exports.processAutoColumn = processAutoColumn +exports.processFormulas = processFormulas + /** * This will coerce a value to the correct types based on the type transform map * @param {object} row The value to coerce @@ -229,11 +231,12 @@ exports.inputProcessing = ( } continue } - // specific case to delete formula values if they get saved - // type coercion cannot completely remove the field, so have to do it here + // remove any formula values, they are to be generated if (field.type === FieldTypes.FORMULA) { delete clonedRow[key] - } else { + } + // otherwise coerce what is there to correct types + else { clonedRow[key] = exports.coerce(value, field.type) } } @@ -250,7 +253,7 @@ exports.inputProcessing = ( /** * This function enriches the input rows with anything they are supposed to contain, for example * link records or attachment links. - * @param {object} ctx the request which is looking for enriched rows. + * @param {string} appId the app in which the request is looking for enriched rows. * @param {object} table the table from which these rows came from originally, this is used to determine * the schema of the rows and then enrich. * @param {object[]|object} rows the rows which are to be enriched. @@ -258,22 +261,21 @@ exports.inputProcessing = ( * @returns {object[]|object} the enriched rows will be returned. */ exports.outputProcessing = async ( - ctx, + { appId }, table, rows, opts = { squash: true } ) => { - const appId = ctx.appId let wasArray = true if (!(rows instanceof Array)) { rows = [rows] wasArray = false } // attach any linked row information - let enriched = await linkRows.attachFullLinkedDocs(ctx, table, rows) + let enriched = await linkRows.attachFullLinkedDocs(appId, table, rows) // process formulas - enriched = processFormulas(table, enriched) + enriched = processFormulas(table, enriched, { dynamic: true }) // update the attachments URL depending on hosting for (let [property, column] of Object.entries(table.schema)) { @@ -305,9 +307,15 @@ exports.outputProcessing = async ( * @param {any} row optional - the row being removed. * @param {any} rows optional - if multiple rows being deleted can do this in bulk. * @param {any} oldRow optional - if updating a row this will determine the difference. + * @param {any} oldTable optional - if updating a table, can supply the old table to look for + * deleted attachment columns. * @return {Promise} When all attachments have been removed this will return. */ -exports.cleanupAttachments = async (appId, table, { row, rows, oldRow }) => { +exports.cleanupAttachments = async ( + appId, + table, + { row, rows, oldRow, oldTable } +) => { if (!isProdAppID(appId)) { const prodAppId = getDeployedAppID(appId) // if prod exists, then don't allow deleting @@ -322,12 +330,16 @@ exports.cleanupAttachments = async (appId, table, { row, rows, oldRow }) => { files = files.concat(row[key].map(attachment => attachment.key)) } } - for (let [key, schema] of Object.entries(table.schema)) { + const schemaToUse = oldTable ? oldTable.schema : table.schema + for (let [key, schema] of Object.entries(schemaToUse)) { if (schema.type !== FieldTypes.ATTACHMENT) { continue } - // if updating, need to manage the differences - if (oldRow && row) { + // old table had this column, new table doesn't - delete it + if (oldTable && !table.schema[key]) { + rows.forEach(row => addFiles(row, key)) + } else if (oldRow && row) { + // if updating, need to manage the differences files = files.concat(getRemovedAttachmentKeys(oldRow, row, key)) } else if (row) { addFiles(row, key) diff --git a/packages/server/src/utilities/rowProcessor/utils.js b/packages/server/src/utilities/rowProcessor/utils.js index 8cbecfe6c5..95b7828084 100644 --- a/packages/server/src/utilities/rowProcessor/utils.js +++ b/packages/server/src/utilities/rowProcessor/utils.js @@ -1,23 +1,39 @@ -const { FieldTypes } = require("../../constants") +const { FieldTypes, FormulaTypes } = require("../../constants") const { processStringSync } = require("@budibase/string-templates") /** * Looks through the rows provided and finds formulas - which it then processes. */ -exports.processFormulas = (table, rows) => { +exports.processFormulas = ( + table, + rows, + { dynamic, contextRows } = { dynamic: true } +) => { const single = !Array.isArray(rows) if (single) { rows = [rows] + contextRows = contextRows ? [contextRows] : contextRows } for (let [column, schema] of Object.entries(table.schema)) { - if (schema.type !== FieldTypes.FORMULA) { + const isStatic = schema.formulaType === FormulaTypes.STATIC + if ( + schema.type !== FieldTypes.FORMULA || + (dynamic && isStatic) || + (!dynamic && !isStatic) + ) { continue } // iterate through rows and process formula - rows = rows.map(row => ({ - ...row, - [column]: processStringSync(schema.formula, row), - })) + for (let i = 0; i < rows.length; i++) { + if (schema.formula) { + let row = rows[i] + let context = contextRows ? contextRows[i] : row + rows[i] = { + ...row, + [column]: processStringSync(schema.formula, context), + } + } + } } return single ? rows[0] : rows } diff --git a/packages/server/src/utilities/tests/usageQuota/usageQuota.spec.js b/packages/server/src/utilities/tests/usageQuota/usageQuota.spec.js new file mode 100644 index 0000000000..dcd7578f59 --- /dev/null +++ b/packages/server/src/utilities/tests/usageQuota/usageQuota.spec.js @@ -0,0 +1,72 @@ +const getTenantId = jest.fn() +jest.mock("@budibase/backend-core/tenancy", () => ({ + getTenantId +})) +const usageQuota = require("../../usageQuota") +const env = require("../../../environment") + +class TestConfiguration { + constructor() { + this.enableQuotas() + } + + enableQuotas = () => { + env.USE_QUOTAS = 1 + } + + disableQuotas = () => { + env.USE_QUOTAS = null + } + + setTenantId = (tenantId) => { + getTenantId.mockReturnValue(tenantId) + } + + setExcludedTenants = (tenants) => { + env.EXCLUDE_QUOTAS_TENANTS = tenants + } + + reset = () => { + this.disableQuotas() + this.setExcludedTenants(null) + } +} + +describe("usageQuota", () => { + let config + + beforeEach(() => { + config = new TestConfiguration() + }) + + afterEach(() => { + config.reset() + }) + + describe("useQuotas", () => { + it("works when no settings have been provided", () => { + config.reset() + expect(usageQuota.useQuotas()).toBe(false) + }) + it("honours USE_QUOTAS setting", () => { + config.disableQuotas() + expect(usageQuota.useQuotas()).toBe(false) + + config.enableQuotas() + expect(usageQuota.useQuotas()).toBe(true) + }) + it("honours EXCLUDE_QUOTAS_TENANTS setting", () => { + config.setTenantId("test") + + // tenantId is in the list + config.setExcludedTenants("test, test2, test2") + expect(usageQuota.useQuotas()).toBe(false) + config.setExcludedTenants("test,test2,test2") + expect(usageQuota.useQuotas()).toBe(false) + + // tenantId is not in the list + config.setTenantId("other") + expect(usageQuota.useQuotas()).toBe(true) + }) + }) +}) \ No newline at end of file diff --git a/packages/server/src/utilities/usageQuota.js b/packages/server/src/utilities/usageQuota.js deleted file mode 100644 index 73fe83a170..0000000000 --- a/packages/server/src/utilities/usageQuota.js +++ /dev/null @@ -1,73 +0,0 @@ -const env = require("../environment") -const { getGlobalDB } = require("@budibase/backend-core/tenancy") -const { - StaticDatabases, - generateNewUsageQuotaDoc, -} = require("@budibase/backend-core/db") - -function getNewQuotaReset() { - return Date.now() + 2592000000 -} - -exports.Properties = { - ROW: "rows", - UPLOAD: "storage", - VIEW: "views", - USER: "users", - AUTOMATION: "automationRuns", - APPS: "apps", - EMAILS: "emails", -} - -async function getUsageQuotaDoc(db) { - let quota - try { - quota = await db.get(StaticDatabases.PLATFORM_INFO.docs.usageQuota) - } catch (err) { - // doc doesn't exist. Create it - quota = await db.post(generateNewUsageQuotaDoc()) - } - - return quota -} - -/** - * Given a specified tenantId this will add to the usage object for the specified property. - * @param {string} property The property which is to be added to (within the nested usageQuota object). - * @param {number} usage The amount (this can be negative) to adjust the number by. - * @returns {Promise} When this completes the API key will now be up to date - the quota period may have - * also been reset after this call. - */ -exports.update = async (property, usage) => { - if (!env.USE_QUOTAS) { - return - } - - try { - const db = getGlobalDB() - const quota = await getUsageQuotaDoc(db) - - // Check if the quota needs reset - if (Date.now() >= quota.quotaReset) { - quota.quotaReset = getNewQuotaReset() - for (let prop of Object.keys(quota.usageQuota)) { - quota.usageQuota[prop] = 0 - } - } - - // increment the quota - quota.usageQuota[property] += usage - - if (quota.usageQuota[property] > quota.usageLimits[property]) { - throw new Error( - `You have exceeded your usage quota of ${quota.usageLimits[property]} ${property}.` - ) - } - - // update the usage quotas - await db.put(quota) - } catch (err) { - console.error(`Error updating usage quotas for ${property}`, err) - throw err - } -} diff --git a/packages/server/src/utilities/usageQuota/index.js b/packages/server/src/utilities/usageQuota/index.js new file mode 100644 index 0000000000..b0ff310aa3 --- /dev/null +++ b/packages/server/src/utilities/usageQuota/index.js @@ -0,0 +1,92 @@ +const env = require("../../environment") +const { getGlobalDB, getTenantId } = require("@budibase/backend-core/tenancy") +const { + StaticDatabases, + generateNewUsageQuotaDoc, +} = require("@budibase/backend-core/db") + +exports.useQuotas = () => { + // check if quotas are enabled + if (env.USE_QUOTAS) { + // check if there are any tenants without limits + if (env.EXCLUDE_QUOTAS_TENANTS) { + const excludedTenants = env.EXCLUDE_QUOTAS_TENANTS.replace( + /\s/g, + "" + ).split(",") + const tenantId = getTenantId() + if (excludedTenants.includes(tenantId)) { + return false + } + } + return true + } + return false +} + +exports.Properties = { + ROW: "rows", + UPLOAD: "storage", // doesn't work yet + VIEW: "views", // doesn't work yet + USER: "users", // doesn't work yet + AUTOMATION: "automationRuns", // doesn't work yet + APPS: "apps", + EMAILS: "emails", // doesn't work yet +} + +exports.getUsageQuotaDoc = async db => { + let quota + try { + quota = await db.get(StaticDatabases.GLOBAL.docs.usageQuota) + } catch (err) { + // doc doesn't exist. Create it + quota = generateNewUsageQuotaDoc() + const response = await db.put(quota) + quota._rev = response.rev + } + + return quota +} + +/** + * Given a specified tenantId this will add to the usage object for the specified property. + * @param {string} property The property which is to be added to (within the nested usageQuota object). + * @param {number} usage The amount (this can be negative) to adjust the number by. + * @returns {Promise} When this completes the API key will now be up to date - the quota period may have + * also been reset after this call. + */ +exports.update = async (property, usage, opts = { dryRun: false }) => { + if (!exports.useQuotas()) { + return + } + + try { + const db = getGlobalDB() + const quota = await exports.getUsageQuotaDoc(db) + + // increment the quota + quota.usageQuota[property] += usage + + if ( + quota.usageQuota[property] > quota.usageLimits[property] && + usage > 0 // allow for decrementing usage when the quota is already exceeded + ) { + throw new Error( + `You have exceeded your usage quota of ${quota.usageLimits[property]} ${property}.` + ) + } + + if (quota.usageQuota[property] < 0) { + // never go negative if the quota has previously been exceeded + quota.usageQuota[property] = 0 + } + + // update the usage quotas + if (!opts.dryRun) { + await db.put(quota) + } + } catch (err) { + console.error(`Error updating usage quotas for ${property}`, err) + throw err + } +} diff --git a/packages/server/src/utilities/usageQuota/rows.js b/packages/server/src/utilities/usageQuota/rows.js new file mode 100644 index 0000000000..67ad07410d --- /dev/null +++ b/packages/server/src/utilities/usageQuota/rows.js @@ -0,0 +1,74 @@ +const { getRowParams, USER_METDATA_PREFIX } = require("../../db/utils") +const CouchDB = require("../../db") +const { isDevAppID, getDevelopmentAppID } = require("@budibase/backend-core/db") + +const ROW_EXCLUSIONS = [USER_METDATA_PREFIX] + +const getAppPairs = appIds => { + // collect the app ids into dev / prod pairs + // keyed by the dev app id + const pairs = {} + for (let appId of appIds) { + const devId = getDevelopmentAppID(appId) + if (!pairs[devId]) { + pairs[devId] = {} + } + if (isDevAppID(appId)) { + pairs[devId].devId = appId + } else { + pairs[devId].prodId = appId + } + } + return pairs +} + +const getAppRows = async appId => { + const appDb = new CouchDB(appId) + const response = await appDb.allDocs( + getRowParams(null, null, { + include_docs: false, + }) + ) + return response.rows + .map(r => r.id) + .filter(id => { + for (let exclusion of ROW_EXCLUSIONS) { + if (id.startsWith(exclusion)) { + return false + } + } + return true + }) +} + +/** + * Return a set of all rows in the given app ids. + * The returned rows will be unique on a per dev/prod app basis. + * Rows duplicates may exist across apps due to data import so they are not filtered out. + */ +exports.getUniqueRows = async appIds => { + let uniqueRows = [] + const pairs = getAppPairs(appIds) + + for (let pair of Object.values(pairs)) { + let appRows = [] + for (let appId of [pair.devId, pair.prodId]) { + if (!appId) { + continue + } + try { + appRows.push(await getAppRows(appId)) + } catch (e) { + console.error(e) + // don't error out if we can't count the app rows, just continue + } + } + + // ensure uniqueness on a per app pair basis + // this can't be done on all rows because app import results in + // duplicate row ids across apps + uniqueRows = uniqueRows.concat(...new Set(appRows)) + } + + return uniqueRows +} diff --git a/packages/server/src/utilities/usageQuota/usageQuoteReset.js b/packages/server/src/utilities/usageQuota/usageQuoteReset.js new file mode 100644 index 0000000000..ff5a1aa00e --- /dev/null +++ b/packages/server/src/utilities/usageQuota/usageQuoteReset.js @@ -0,0 +1,18 @@ +// UNUSED CODE +// Preserved for future use + +/* eslint-disable no-unused-vars */ + +function getNewQuotaReset() { + return Date.now() + 2592000000 +} + +function resetQuotasIfRequired(quota) { + // Check if the quota needs reset + if (Date.now() >= quota.quotaReset) { + quota.quotaReset = getNewQuotaReset() + for (let prop of Object.keys(quota.usageQuota)) { + quota.usageQuota[prop] = 0 + } + } +} diff --git a/packages/server/src/utilities/workerRequests.js b/packages/server/src/utilities/workerRequests.js index 92dffc8d3f..5e46f1678f 100644 --- a/packages/server/src/utilities/workerRequests.js +++ b/packages/server/src/utilities/workerRequests.js @@ -58,29 +58,6 @@ exports.sendSmtpEmail = async (to, from, subject, contents, automation) => { return response.json() } -exports.getDeployedApps = async () => { - try { - const response = await fetch( - checkSlashesInUrl(env.WORKER_URL + `/api/apps`), - request(null, { - method: "GET", - }) - ) - const json = await response.json() - const apps = {} - for (let [key, value] of Object.entries(json)) { - if (value.url) { - value.url = value.url.toLowerCase() - apps[key] = value - } - } - return apps - } catch (err) { - // error, cannot determine deployed apps, don't stop app creation - sort this later - return {} - } -} - exports.getGlobalSelf = async (ctx, appId = null) => { const endpoint = `/api/global/users/self` const response = await fetch( diff --git a/packages/server/yarn.lock b/packages/server/yarn.lock index 2f64782a81..df4e50c48f 100644 --- a/packages/server/yarn.lock +++ b/packages/server/yarn.lock @@ -983,10 +983,10 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/backend-core@^1.0.27-alpha.13": - version "1.0.27-alpha.13" - resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.27-alpha.13.tgz#89f46e081eb7b342f483fd0eccd72c42b2b2fa6c" - integrity sha512-NiasBvZ5wTpvANG9AjuO34DHMTqWQWSpabLcgwBY0tNG4ekh+wvSCPjCcUvN/bBpOzrVMQ8C4hmS4pvv342BhQ== +"@budibase/backend-core@^1.0.46-alpha.5": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.47.tgz#af1e501e20f8a648a40fe7d336b89e65f058c803" + integrity sha512-nj+MC2j6WEH+6LEJhs+zMbnm4BRGCaX7kXvlyq7EXA9h6QOxrNkB/PNFqEumkMJGjorkZAQ/qe8MUEjcE26QBw== dependencies: "@techpass/passport-openidconnect" "^0.3.0" aws-sdk "^2.901.0" @@ -1056,10 +1056,10 @@ svelte-flatpickr "^3.2.3" svelte-portal "^1.0.0" -"@budibase/bbui@^1.0.35": - version "1.0.35" - resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.35.tgz#a51886886772257d31e2c6346dbec46fe0c9fd85" - integrity sha512-8qeAzTujtO7uvhj+dMiyW4BTkQ7dC4xF1CNIwyuTnDwIeFDlXYgNb09VVRs3+nWcX2e2eC53EUs1RnLUoSlTsw== +"@budibase/bbui@^1.0.47": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.47.tgz#df2848b89f881fe603e7156855d6a6c31d4f58bf" + integrity sha512-RRm/BgK5aSx2/vGjMGljw240/48Ksc3/h4yB1nhQj8Xx3fKhlGnWDvWNy+sakvA6+fJvEXuti8RoxHtQ6lXmqA== dependencies: "@adobe/spectrum-css-workflow-icons" "^1.2.1" "@spectrum-css/actionbutton" "^1.0.1" @@ -1106,14 +1106,14 @@ svelte-flatpickr "^3.2.3" svelte-portal "^1.0.0" -"@budibase/client@^1.0.27-alpha.13": - version "1.0.35" - resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.35.tgz#b832e7e7e35032fb35fe5492fbb721db1da15394" - integrity sha512-maL3V29PQb9VjgnPZq44GSDZCuamAGp01bheUeJxEeskjQqZUdf8QC7Frf1mT+ZjgKJf3gU6qtFOxmWRbVzVbw== +"@budibase/client@^1.0.46-alpha.5": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.47.tgz#ce9e2fbd300e5dc389ea29a3a3347897f096c824" + integrity sha512-jB/al8v+nY/VLc6sH5Jt9JzWONVo+24/cI95iXlZSV5xwiKIVGj4+2F5QjKZ0c9Gm7SrrfP2T571N+4XaXNCGg== dependencies: - "@budibase/bbui" "^1.0.35" + "@budibase/bbui" "^1.0.47" "@budibase/standard-components" "^0.9.139" - "@budibase/string-templates" "^1.0.35" + "@budibase/string-templates" "^1.0.47" regexparam "^1.3.0" shortid "^2.2.15" svelte-spa-router "^3.0.5" @@ -1163,10 +1163,10 @@ svelte-apexcharts "^1.0.2" svelte-flatpickr "^3.1.0" -"@budibase/string-templates@^1.0.27-alpha.13", "@budibase/string-templates@^1.0.35": - version "1.0.35" - resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.35.tgz#a888f1e9327bb36416336a91a95a43cb34e6a42d" - integrity sha512-8HxSv0ru+cgSmphqtOm1pmBM8rc0TRC/6RQGzQefmFFQFfm/SBLAVLLWRmZxAOYTxt4mittGWeL4y05FqEuocg== +"@budibase/string-templates@^1.0.46-alpha.5", "@budibase/string-templates@^1.0.47": + version "1.0.47" + resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.47.tgz#626b9fc4542c7b36a0ae24e820d25a704c527bec" + integrity sha512-87BUfOPr8FGKH8Pt88jhKNGT9PcOmkLRCeen4xi1dI113pAQznBO9vgV+cXOChUBBEQka9Rrt85LMJXidiwVgg== dependencies: "@budibase/handlebars-helpers" "^0.11.7" dayjs "^1.10.4" @@ -2417,6 +2417,11 @@ dependencies: "@types/node" "*" +"@types/google-spreadsheet@^3.1.5": + version "3.1.5" + resolved "https://registry.yarnpkg.com/@types/google-spreadsheet/-/google-spreadsheet-3.1.5.tgz#2bdc6f9f5372551e0506cb6ef3f562adcf44fc2e" + integrity sha512-7N+mDtZ1pmya2RRFPPl4KYc2TRgiqCNBLUZfyrKfER+u751JgCO+C24/LzF70UmUm/zhHUbzRZ5mtfaxekQ1ZQ== + "@types/graceful-fs@^4.1.2": version "4.1.5" resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" @@ -3193,6 +3198,11 @@ array-unique@^0.3.2: resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= +arrify@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa" + integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== + asap@^2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" @@ -3495,7 +3505,7 @@ base62@^1.1.0: resolved "https://registry.yarnpkg.com/base62/-/base62-1.2.8.tgz#1264cb0fb848d875792877479dbe8bae6bae3428" integrity sha512-V6YHUbjLxN1ymqNLb1DPHoU1CpfdL7d2YTIp5W3U4hhoG4hhxNmsFDs66M9EXxBiSEke5Bt5dwdfMwwZF70iLA== -base64-js@^1.0.2, base64-js@^1.3.1: +base64-js@^1.0.2, base64-js@^1.3.0, base64-js@^1.3.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== @@ -3535,6 +3545,11 @@ big.js@^5.2.2: resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== +bignumber.js@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.0.1.tgz#8d7ba124c882bfd8e43260c67475518d0689e4e5" + integrity sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA== + binary-extensions@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" @@ -4826,7 +4841,7 @@ ecc-jsbn@~0.1.1: jsbn "~0.1.0" safer-buffer "^2.1.0" -ecdsa-sig-formatter@1.0.11: +ecdsa-sig-formatter@1.0.11, ecdsa-sig-formatter@^1.0.11: version "1.0.11" resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== @@ -5490,7 +5505,7 @@ extend-shallow@^3.0.0, extend-shallow@^3.0.2: assign-symbols "^1.0.0" is-extendable "^1.0.1" -extend@^3.0.0, extend@~3.0.2: +extend@^3.0.0, extend@^3.0.2, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== @@ -5569,6 +5584,11 @@ fast-safe-stringify@^2.0.7, fast-safe-stringify@^2.0.8: resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== +fast-text-encoding@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz#ec02ac8e01ab8a319af182dae2681213cfe9ce53" + integrity sha512-dtm4QZH9nZtcDt8qJiOH9fcQd1NAgi+K1O2DbE6GG1PPCK/BWfOH3idCTRQ4ImXRUOyopDEgDEnVEE7Y/2Wrig== + fast-url-parser@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/fast-url-parser/-/fast-url-parser-1.1.3.tgz#f4af3ea9f34d8a271cf58ad2b3759f431f0b318d" @@ -5919,6 +5939,25 @@ functional-red-black-tree@^1.0.1: resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= +gaxios@^4.0.0: + version "4.3.2" + resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-4.3.2.tgz#845827c2dc25a0213c8ab4155c7a28910f5be83f" + integrity sha512-T+ap6GM6UZ0c4E6yb1y/hy2UB6hTrqhglp3XfmU9qbLCGRYhLVV5aRPpC4EmoG8N8zOnkYCgoBz+ScvGAARY6Q== + dependencies: + abort-controller "^3.0.0" + extend "^3.0.2" + https-proxy-agent "^5.0.0" + is-stream "^2.0.0" + node-fetch "^2.6.1" + +gcp-metadata@^4.2.0: + version "4.3.1" + resolved "https://registry.yarnpkg.com/gcp-metadata/-/gcp-metadata-4.3.1.tgz#fb205fe6a90fef2fd9c85e6ba06e5559ee1eefa9" + integrity sha512-x850LS5N7V1F3UcV7PoupzGsyD6iVwTVvsh3tbXfkctZnBnjW5yu5z1/3k3SehF7TyoTIe78rJs02GMMy+LF+A== + dependencies: + gaxios "^4.0.0" + json-bigint "^1.0.0" + generate-function@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.3.1.tgz#f069617690c10c868e73b8465746764f97c3479f" @@ -6116,6 +6155,36 @@ globby@^11.0.3: merge2 "^1.3.0" slash "^3.0.0" +google-auth-library@^6.1.3: + version "6.1.6" + resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-6.1.6.tgz#deacdcdb883d9ed6bac78bb5d79a078877fdf572" + integrity sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ== + dependencies: + arrify "^2.0.0" + base64-js "^1.3.0" + ecdsa-sig-formatter "^1.0.11" + fast-text-encoding "^1.0.0" + gaxios "^4.0.0" + gcp-metadata "^4.2.0" + gtoken "^5.0.4" + jws "^4.0.0" + lru-cache "^6.0.0" + +google-auth-library@^7.11.0: + version "7.11.0" + resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-7.11.0.tgz#b63699c65037310a424128a854ba7e736704cbdb" + integrity sha512-3S5jn2quRumvh9F/Ubf7GFrIq71HZ5a6vqosgdIu105kkk0WtSqc2jGCRqtWWOLRS8SX3AHACMOEDxhyWAQIcg== + dependencies: + arrify "^2.0.0" + base64-js "^1.3.0" + ecdsa-sig-formatter "^1.0.11" + fast-text-encoding "^1.0.0" + gaxios "^4.0.0" + gcp-metadata "^4.2.0" + gtoken "^5.0.4" + jws "^4.0.0" + lru-cache "^6.0.0" + google-auth-library@~0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-0.10.0.tgz#6e15babee85fd1dd14d8d128a295b6838d52136e" @@ -6133,6 +6202,22 @@ google-p12-pem@^0.1.0: dependencies: node-forge "^0.7.1" +google-p12-pem@^3.0.3: + version "3.1.2" + resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-3.1.2.tgz#c3d61c2da8e10843ff830fdb0d2059046238c1d4" + integrity sha512-tjf3IQIt7tWCDsa0ofDQ1qqSCNzahXDxdAGJDbruWqu3eCg5CKLYKN+hi0s6lfvzYZ1GDVr+oDF9OOWlDSdf0A== + dependencies: + node-forge "^0.10.0" + +google-spreadsheet@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/google-spreadsheet/-/google-spreadsheet-3.2.0.tgz#ce8aa75c15705aa950ad52b091a6fc4d33dcb329" + integrity sha512-z7XMaqb+26rdo8p51r5O03u8aPLAPzn5YhOXYJPcf2hdMVr0dUbIARgdkRdmGiBeoV/QoU/7VNhq1MMCLZv3kQ== + dependencies: + axios "^0.21.4" + google-auth-library "^6.1.3" + lodash "^4.17.21" + googleapis@^16.0.0: version "16.1.0" resolved "https://registry.yarnpkg.com/googleapis/-/googleapis-16.1.0.tgz#0f19f2d70572d918881a0f626e3b1a2fa8629576" @@ -6197,6 +6282,15 @@ gtoken@^1.2.1: mime "^1.4.1" request "^2.72.0" +gtoken@^5.0.4: + version "5.3.1" + resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-5.3.1.tgz#c1c2598a826f2b5df7c6bb53d7be6cf6d50c3c78" + integrity sha512-yqOREjzLHcbzz1UrQoxhBtpk8KjrVhuqPE7od1K2uhyxG2BHjKZetlbLw/SPZak/QqTIQW+addS+EcjqQsZbwQ== + dependencies: + gaxios "^4.0.0" + google-p12-pem "^3.0.3" + jws "^4.0.0" + gulp-header@^1.7.1: version "1.8.12" resolved "https://registry.yarnpkg.com/gulp-header/-/gulp-header-1.8.12.tgz#ad306be0066599127281c4f8786660e705080a84" @@ -8084,6 +8178,13 @@ jsesc@~0.5.0: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= +json-bigint@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-bigint/-/json-bigint-1.0.0.tgz#ae547823ac0cad8398667f8cd9ef4730f5b01ff1" + integrity sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ== + dependencies: + bignumber.js "^9.0.0" + json-buffer@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" @@ -8197,6 +8298,15 @@ jwa@^1.4.1: ecdsa-sig-formatter "1.0.11" safe-buffer "^5.0.1" +jwa@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/jwa/-/jwa-2.0.0.tgz#a7e9c3f29dae94027ebcaf49975c9345593410fc" + integrity sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA== + dependencies: + buffer-equal-constant-time "1.0.1" + ecdsa-sig-formatter "1.0.11" + safe-buffer "^5.0.1" + jws@3.x.x, jws@^3.0.0, jws@^3.1.4, jws@^3.2.2: version "3.2.2" resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" @@ -8205,6 +8315,14 @@ jws@3.x.x, jws@^3.0.0, jws@^3.1.4, jws@^3.2.2: jwa "^1.4.1" safe-buffer "^5.0.1" +jws@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jws/-/jws-4.0.0.tgz#2d4e8cf6a318ffaa12615e9dec7e86e6c97310f4" + integrity sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg== + dependencies: + jwa "^2.0.0" + safe-buffer "^5.0.1" + keygrip@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/keygrip/-/keygrip-1.0.3.tgz#399d709f0aed2bab0a059e0cdd3a5023a053e1dc" @@ -9313,18 +9431,23 @@ node-fetch@2.4.1: resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.4.1.tgz#b2e38f1117b8acbedbe0524f041fb3177188255d" integrity sha512-P9UbpFK87NyqBZzUuDBDz4f6Yiys8xm8j7ACDbi6usvFm6KItklQUKjeoqTrYS/S1k6I8oaOC2YLLDr/gg26Mw== -node-fetch@2.6.0, node-fetch@^2.6.0: +node-fetch@2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd" integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA== -node-fetch@^2.6.1: - version "2.6.6" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.6.tgz#1751a7c01834e8e1697758732e9efb6eeadfaf89" - integrity sha512-Z8/6vRlTUChSdIgMa51jxQ4lrw/Jy5SOW10ObaA47/RElsAN2c5Pn8bTgFGWn/ibwzXTE8qwr1Yzx28vsecXEA== +node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1: + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: whatwg-url "^5.0.0" +node-forge@^0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" + integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== + node-forge@^0.7.1: version "0.7.6" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac" diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json index 278d60cabe..941162ce93 100644 --- a/packages/string-templates/package.json +++ b/packages/string-templates/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/string-templates", - "version": "1.0.44-alpha.9", + "version": "1.0.49-alpha.4", "description": "Handlebars wrapper for Budibase templating.", "main": "src/index.cjs", "module": "dist/bundle.mjs", diff --git a/packages/string-templates/src/helpers/javascript.js b/packages/string-templates/src/helpers/javascript.js index 9231283e89..0173be0b54 100644 --- a/packages/string-templates/src/helpers/javascript.js +++ b/packages/string-templates/src/helpers/javascript.js @@ -1,5 +1,6 @@ const { atob } = require("../utilities") const { cloneDeep } = require("lodash/fp") +const { LITERAL_MARKER } = require("../helpers/constants") // The method of executing JS scripts depends on the bundle being built. // This setter is used in the entrypoint (either index.cjs or index.mjs). @@ -46,8 +47,9 @@ module.exports.processJS = (handlebars, context) => { $: path => getContextValue(path, cloneDeep(context)), } - // Create a sandbox with out context and run the JS - return runJS(js, sandboxContext) + // Create a sandbox with our context and run the JS + const res = { data: runJS(js, sandboxContext) } + return `{{${LITERAL_MARKER} js_result-${JSON.stringify(res)}}}` } catch (error) { return "Error while executing JS" } diff --git a/packages/string-templates/src/index.cjs b/packages/string-templates/src/index.cjs index 5d05f0f57f..bc9a410813 100644 --- a/packages/string-templates/src/index.cjs +++ b/packages/string-templates/src/index.cjs @@ -15,6 +15,8 @@ module.exports.processStringSync = templates.processStringSync module.exports.processObjectSync = templates.processObjectSync module.exports.processString = templates.processString module.exports.processObject = templates.processObject +module.exports.doesContainStrings = templates.doesContainStrings +module.exports.doesContainString = templates.doesContainString /** * Use vm2 to run JS scripts in a node env diff --git a/packages/string-templates/src/index.js b/packages/string-templates/src/index.js index 820b8da290..616981995d 100644 --- a/packages/string-templates/src/index.js +++ b/packages/string-templates/src/index.js @@ -3,6 +3,7 @@ const { registerAll } = require("./helpers/index") const processors = require("./processors") const { atob, btoa } = require("./utilities") const manifest = require("../manifest.json") +const { FIND_HBS_REGEX } = require("./utilities") const hbsInstance = handlebars.create() registerAll(hbsInstance) @@ -26,7 +27,7 @@ function testObject(object) { * @param {object|array} object The input structure which is to be recursed, it is important to note that * if the structure contains any cycles then this will fail. * @param {object} context The context that handlebars should fill data from. - * @param {object} opts optional - specify some options for processing. + * @param {object|null} opts optional - specify some options for processing. * @returns {Promise} The structure input, as fully updated as possible. */ module.exports.processObject = async (object, context, opts) => { @@ -57,7 +58,7 @@ module.exports.processObject = async (object, context, opts) => { * then nothing will occur. * @param {string} string The template string which is the filled from the context object. * @param {object} context An object of information which will be used to enrich the string. - * @param {object} opts optional - specify some options for processing. + * @param {object|null} opts optional - specify some options for processing. * @returns {Promise} The enriched string, all templates should have been replaced if they can be. */ module.exports.processString = async (string, context, opts) => { @@ -71,7 +72,7 @@ module.exports.processString = async (string, context, opts) => { * @param {object|array} object The input structure which is to be recursed, it is important to note that * if the structure contains any cycles then this will fail. * @param {object} context The context that handlebars should fill data from. - * @param {object} opts optional - specify some options for processing. + * @param {object|null} opts optional - specify some options for processing. * @returns {object|array} The structure input, as fully updated as possible. */ module.exports.processObjectSync = (object, context, opts) => { @@ -92,7 +93,7 @@ module.exports.processObjectSync = (object, context, opts) => { * then nothing will occur. This is a pure sync call and therefore does not have the full functionality of the async call. * @param {string} string The template string which is the filled from the context object. * @param {object} context An object of information which will be used to enrich the string. - * @param {object} opts optional - specify some options for processing. + * @param {object|null} opts optional - specify some options for processing. * @returns {string} The enriched string, all templates should have been replaced if they can be. */ module.exports.processStringSync = (string, context, opts) => { @@ -112,9 +113,10 @@ module.exports.processStringSync = (string, context, opts) => { const template = instance.compile(string, { strict: false, }) + const now = Math.floor(Date.now() / 1000) * 1000 return processors.postprocess( template({ - now: new Date().toISOString(), + now: new Date(now).toISOString(), ...context, }) ) @@ -221,3 +223,47 @@ module.exports.decodeJSBinding = handlebars => { } return atob(match[1]) } + +/** + * Same as the doesContainString function, but will check for all the strings + * before confirming it contains. + * @param {string} template The template string to search. + * @param {string[]} strings The strings to look for. + * @returns {boolean} Will return true if all strings found in HBS statement. + */ +module.exports.doesContainStrings = (template, strings) => { + let regexp = new RegExp(FIND_HBS_REGEX) + let matches = template.match(regexp) + if (matches == null) { + return false + } + for (let match of matches) { + let hbs = match + if (exports.isJSBinding(match)) { + hbs = exports.decodeJSBinding(match) + } + let allFound = true + for (let string of strings) { + if (!hbs.includes(string)) { + allFound = false + } + } + if (allFound) { + return true + } + } + return false +} + +/** + * This function looks in the supplied template for handlebars instances, if they contain + * JS the JS will be decoded and then the supplied string will be looked for. For example + * if the template "Hello, your name is {{ related }}" this function would return that true + * for the string "related" but not for "name" as it is not within the handlebars statement. + * @param {string} template A template string to search for handlebars instances. + * @param {string} string The word or sentence to search for. + * @returns {boolean} The this return true if the string is found, false if not. + */ +module.exports.doesContainString = (template, string) => { + return exports.doesContainStrings(template, [string]) +} diff --git a/packages/string-templates/src/index.mjs b/packages/string-templates/src/index.mjs index 446e71ef88..a592ae26d5 100644 --- a/packages/string-templates/src/index.mjs +++ b/packages/string-templates/src/index.mjs @@ -15,6 +15,8 @@ export const processStringSync = templates.processStringSync export const processObjectSync = templates.processObjectSync export const processString = templates.processString export const processObject = templates.processObject +export const doesContainStrings = templates.doesContainStrings +export const doesContainString = templates.doesContainString /** * Use polyfilled vm to run JS scripts in a browser Env diff --git a/packages/string-templates/src/processors/postprocessor.js b/packages/string-templates/src/processors/postprocessor.js index 4d1c84013a..7fc3f663fe 100644 --- a/packages/string-templates/src/processors/postprocessor.js +++ b/packages/string-templates/src/processors/postprocessor.js @@ -36,6 +36,11 @@ module.exports.processors = [ return value === "true" case "object": return JSON.parse(value) + case "js_result": + // We use the literal helper to process the result of JS expressions + // as we want to be able to return any types. + // We wrap the value in an abject to be able to use undefined properly. + return JSON.parse(value).data } return value }), diff --git a/packages/string-templates/test/basic.spec.js b/packages/string-templates/test/basic.spec.js index 2fd6505410..490c0aa514 100644 --- a/packages/string-templates/test/basic.spec.js +++ b/packages/string-templates/test/basic.spec.js @@ -4,6 +4,8 @@ const { isValid, makePropSafe, getManifest, + encodeJSBinding, + doesContainString, } = require("../src/index.cjs") describe("Test that the string processing works correctly", () => { @@ -157,3 +159,20 @@ describe("check full stops that are safe", () => { expect(output).toEqual("1") }) }) + +describe("check does contain string function", () => { + it("should work for a simple case", () => { + const hbs = "hello {{ name }}" + expect(doesContainString(hbs, "name")).toEqual(true) + }) + + it("should reject a case where its in the string, but not the handlebars", () => { + const hbs = "hello {{ name }}" + expect(doesContainString(hbs, "hello")).toEqual(false) + }) + + it("should handle if its in javascript", () => { + const js = encodeJSBinding(`return $("foo")`) + expect(doesContainString(js, "foo")).toEqual(true) + }) +}) diff --git a/packages/string-templates/test/javascript.spec.js b/packages/string-templates/test/javascript.spec.js index 05cc80331a..5363f37e02 100644 --- a/packages/string-templates/test/javascript.spec.js +++ b/packages/string-templates/test/javascript.spec.js @@ -7,7 +7,7 @@ const processJS = (js, context) => { describe("Test the JavaScript helper", () => { it("should execute a simple expression", () => { const output = processJS(`return 1 + 2`) - expect(output).toBe("3") + expect(output).toBe(3) }) it("should be able to use primitive bindings", () => { @@ -50,6 +50,52 @@ describe("Test the JavaScript helper", () => { expect(output).toBe("shazbat") }) + it("should be able to return an object", () => { + const output = processJS(`return $("foo")`, { + foo: { + bar: { + baz: "shazbat", + }, + }, + }) + expect(output.bar.baz).toBe("shazbat") + }) + + it("should be able to return an array", () => { + const output = processJS(`return $("foo")`, { + foo: ["a", "b", "c"], + }) + expect(output[2]).toBe("c") + }) + + it("should be able to return null", () => { + const output = processJS(`return $("foo")`, { + foo: null, + }) + expect(output).toBe(null) + }) + + it("should be able to return undefined", () => { + const output = processJS(`return $("foo")`, { + foo: undefined, + }) + expect(output).toBe(undefined) + }) + + it("should be able to return 0", () => { + const output = processJS(`return $("foo")`, { + foo: 0, + }) + expect(output).toBe(0) + }) + + it("should be able to return an empty string", () => { + const output = processJS(`return $("foo")`, { + foo: "", + }) + expect(output).toBe("") + }) + it("should be able to use a deep array binding", () => { const output = processJS(`return $("foo.0.bar")`, { foo: [ diff --git a/packages/worker/nodemon.json b/packages/worker/nodemon.json index 8a9e288584..02edece53a 100644 --- a/packages/worker/nodemon.json +++ b/packages/worker/nodemon.json @@ -1,5 +1,5 @@ { - "watch": ["src", "../auth"], + "watch": ["src", "../backend-core"], "ext": "js,ts,json", "ignore": ["src/**/*.spec.ts", "src/**/*.spec.js"], "exec": "ts-node src/index.ts" diff --git a/packages/worker/package.json b/packages/worker/package.json index 5e0a1b69cf..6951d00f3c 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/worker", "email": "hi@budibase.com", - "version": "1.0.44-alpha.9", + "version": "1.0.49-alpha.4", "description": "Budibase background service", "main": "src/index.ts", "repository": { @@ -34,8 +34,8 @@ "author": "Budibase", "license": "GPL-3.0", "dependencies": { - "@budibase/backend-core": "^1.0.44-alpha.9", - "@budibase/string-templates": "^1.0.44-alpha.9", + "@budibase/backend-core": "^1.0.49-alpha.4", + "@budibase/string-templates": "^1.0.49-alpha.4", "@koa/router": "^8.0.0", "@sentry/node": "^6.0.0", "@techpass/passport-openidconnect": "^0.3.0", diff --git a/packages/worker/src/api/controllers/app.js b/packages/worker/src/api/controllers/app.js deleted file mode 100644 index c08603e57e..0000000000 --- a/packages/worker/src/api/controllers/app.js +++ /dev/null @@ -1,30 +0,0 @@ -const { - getAllApps, - getDeployedAppID, - isProdAppID, -} = require("@budibase/backend-core/db") -const CouchDB = require("../../db") - -const URL_REGEX_SLASH = /\/|\\/g - -exports.getApps = async ctx => { - const apps = await getAllApps(CouchDB, { all: true }) - const body = {} - for (let app of apps) { - let url = app.url || encodeURI(`${app.name}`) - url = `/${url.replace(URL_REGEX_SLASH, "")}` - const appId = app.appId, - isProd = isProdAppID(app.appId) - if (!body[url]) { - body[url] = { - appId: getDeployedAppID(appId), - name: app.name, - url, - deployed: isProd, - } - } else { - body[url].deployed = isProd || body[url].deployed - } - } - ctx.body = body -} diff --git a/packages/worker/src/api/controllers/global/auth.js b/packages/worker/src/api/controllers/global/auth.js index 44ee99aee7..7b0e50c099 100644 --- a/packages/worker/src/api/controllers/global/auth.js +++ b/packages/worker/src/api/controllers/global/auth.js @@ -74,10 +74,7 @@ async function authInternal(ctx, user, err = null, info = null) { exports.authenticate = async (ctx, next) => { return passport.authenticate("local", async (err, user, info) => { await authInternal(ctx, user, err, info) - - delete user.token - - ctx.body = { user } + ctx.status = 200 })(ctx, next) } @@ -147,6 +144,32 @@ exports.logout = async ctx => { ctx.body = { message: "User logged out." } } +exports.datasourcePreAuth = async (ctx, next) => { + const provider = ctx.params.provider + const middleware = require(`@budibase/backend-core/middleware`) + const handler = middleware.datasource[provider] + + setCookie( + ctx, + { + provider, + appId: ctx.query.appId, + datasourceId: ctx.query.datasourceId, + }, + Cookies.DatasourceAuth + ) + + return handler.preAuth(passport, ctx, next) +} + +exports.datasourceAuth = async (ctx, next) => { + const authStateCookie = getCookie(ctx, Cookies.DatasourceAuth) + const provider = authStateCookie.provider + const middleware = require(`@budibase/backend-core/middleware`) + const handler = middleware.datasource[provider] + return handler.postAuth(passport, ctx, next) +} + /** * The initial call that google authentication makes to take you to the google login screen. * On a successful login, you will be redirected to the googleAuth callback route. diff --git a/packages/worker/src/api/controllers/global/users.js b/packages/worker/src/api/controllers/global/users.js index 152bc9f5ae..f2d89e103a 100644 --- a/packages/worker/src/api/controllers/global/users.js +++ b/packages/worker/src/api/controllers/global/users.js @@ -73,16 +73,14 @@ exports.adminUser = async ctx => { if (!env.SELF_HOSTED) { // could be a scenario where it exists, make sure its clean try { - const usageQuota = await db.get( - StaticDatabases.PLATFORM_INFO.docs.usageQuota - ) + const usageQuota = await db.get(StaticDatabases.GLOBAL.docs.usageQuota) if (usageQuota) { await db.remove(usageQuota._id, usageQuota._rev) } } catch (err) { // don't worry about errors } - await db.post(generateNewUsageQuotaDoc()) + await db.put(generateNewUsageQuotaDoc()) } if (response.rows.some(row => row.doc.admin)) { @@ -174,6 +172,7 @@ exports.getSelf = async ctx => { ctx.body.account = ctx.user.account ctx.body.budibaseAccess = ctx.user.budibaseAccess ctx.body.accountPortalAccess = ctx.user.accountPortalAccess + ctx.body.csrfToken = ctx.user.csrfToken } exports.updateSelf = async ctx => { @@ -192,6 +191,8 @@ exports.updateSelf = async ctx => { // don't allow sending up an ID/Rev, always use the existing one delete ctx.request.body._id delete ctx.request.body._rev + // don't allow setting the csrf token + delete ctx.request.body.csrfToken const response = await db.put({ ...user, ...ctx.request.body, diff --git a/packages/worker/src/api/index.js b/packages/worker/src/api/index.js index a83b39e6cf..607d8283f9 100644 --- a/packages/worker/src/api/index.js +++ b/packages/worker/src/api/index.js @@ -6,6 +6,7 @@ const { buildAuthMiddleware, auditLog, buildTenancyMiddleware, + buildCsrfMiddleware, } = require("@budibase/backend-core/auth") const PUBLIC_ENDPOINTS = [ @@ -68,6 +69,10 @@ const NO_TENANCY_ENDPOINTS = [ }, ] +// most public endpoints are gets, but some are posts +// add them all to be safe +const NO_CSRF_ENDPOINTS = [...PUBLIC_ENDPOINTS] + const router = new Router() router .use( @@ -85,6 +90,7 @@ router .use("/health", ctx => (ctx.status = 200)) .use(buildAuthMiddleware(PUBLIC_ENDPOINTS)) .use(buildTenancyMiddleware(PUBLIC_ENDPOINTS, NO_TENANCY_ENDPOINTS)) + .use(buildCsrfMiddleware({ noCsrfPatterns: NO_CSRF_ENDPOINTS })) // for now no public access is allowed to worker (bar health check) .use((ctx, next) => { if (ctx.publicEndpoint) { diff --git a/packages/worker/src/api/routes/app.js b/packages/worker/src/api/routes/app.js deleted file mode 100644 index 86004cb674..0000000000 --- a/packages/worker/src/api/routes/app.js +++ /dev/null @@ -1,8 +0,0 @@ -const Router = require("@koa/router") -const controller = require("../controllers/app") - -const router = Router() - -router.get("/api/apps", controller.getApps) - -module.exports = router diff --git a/packages/worker/src/api/routes/global/auth.js b/packages/worker/src/api/routes/global/auth.js index 20c615b85a..373bf5736a 100644 --- a/packages/worker/src/api/routes/global/auth.js +++ b/packages/worker/src/api/routes/global/auth.js @@ -63,8 +63,17 @@ router updateTenant, authController.googlePreAuth ) + .get( + "/api/global/auth/:tenantId/datasource/:provider", + updateTenant, + authController.datasourcePreAuth + ) // single tenancy endpoint .get("/api/global/auth/google/callback", authController.googleAuth) + .get( + "/api/global/auth/datasource/:provider/callback", + authController.datasourceAuth + ) // multi-tenancy endpoint .get( "/api/global/auth/:tenantId/google/callback", diff --git a/packages/worker/src/api/routes/index.js b/packages/worker/src/api/routes/index.js index 3d2d770408..8c0cb02e6f 100644 --- a/packages/worker/src/api/routes/index.js +++ b/packages/worker/src/api/routes/index.js @@ -8,14 +8,12 @@ const roleRoutes = require("./global/roles") const sessionRoutes = require("./global/sessions") const environmentRoutes = require("./system/environment") const tenantsRoutes = require("./system/tenants") -const appRoutes = require("./app") exports.routes = [ configRoutes, userRoutes, workspaceRoutes, authRoutes, - appRoutes, templateRoutes, tenantsRoutes, emailRoutes, diff --git a/packages/worker/src/api/routes/tests/utilities/TestConfiguration.js b/packages/worker/src/api/routes/tests/utilities/TestConfiguration.js index 34ce01263d..6b6c0e24b3 100644 --- a/packages/worker/src/api/routes/tests/utilities/TestConfiguration.js +++ b/packages/worker/src/api/routes/tests/utilities/TestConfiguration.js @@ -2,12 +2,12 @@ const env = require("../../../../environment") const controllers = require("./controllers") const supertest = require("supertest") const { jwt } = require("@budibase/backend-core/auth") -const { Cookies } = require("@budibase/backend-core/constants") +const { Cookies, Headers } = require("@budibase/backend-core/constants") const { Configs, LOGO_URL } = require("../../../../constants") const { getGlobalUserByEmail } = require("@budibase/backend-core/utils") const { createASession } = require("@budibase/backend-core/sessions") const { newid } = require("@budibase/backend-core/src/hashing") -const { TENANT_ID } = require("./structures") +const { TENANT_ID, CSRF_TOKEN } = require("./structures") const core = require("@budibase/backend-core") const CouchDB = require("../../../../db") const { doInTenant } = require("@budibase/backend-core/tenancy") @@ -72,6 +72,7 @@ class TestConfiguration { await createASession("us_uuid1", { sessionId: "sessionid", tenantId: TENANT_ID, + csrfToken: CSRF_TOKEN, }) } @@ -98,6 +99,7 @@ class TestConfiguration { return { Accept: "application/json", ...this.cookieHeader([`${Cookies.Auth}=${authToken}`]), + [Headers.CSRF_TOKEN]: CSRF_TOKEN, } } diff --git a/packages/worker/src/api/routes/tests/utilities/structures.js b/packages/worker/src/api/routes/tests/utilities/structures.js index 16701ac3d7..45f1f0077c 100644 --- a/packages/worker/src/api/routes/tests/utilities/structures.js +++ b/packages/worker/src/api/routes/tests/utilities/structures.js @@ -1 +1,2 @@ exports.TENANT_ID = "default" +exports.CSRF_TOKEN = "e3727778-7af0-4226-b5eb-f43cbe60a306" diff --git a/packages/worker/yarn.lock b/packages/worker/yarn.lock index c83954cda7..48df51e68f 100644 --- a/packages/worker/yarn.lock +++ b/packages/worker/yarn.lock @@ -15,16 +15,16 @@ integrity sha512-m7OkX0IdKLKPpBlJtF561YJal5y/jyI5fNfWbPxh2D/nbzzGI4qRyrD8xO2jB24u7l+5I2a43scCG2IrfjC50Q== "@babel/core@^7.1.0", "@babel/core@^7.12.3", "@babel/core@^7.7.2", "@babel/core@^7.8.0": - version "7.16.10" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.16.10.tgz#ebd034f8e7ac2b6bfcdaa83a161141a646f74b50" - integrity sha512-pbiIdZbCiMx/MM6toR+OfXarYix3uz0oVsnNtfdAGTcCTu3w/JGF8JhirevXLBJUu0WguSZI12qpKnx7EeMyLA== + version "7.16.12" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.16.12.tgz#5edc53c1b71e54881315923ae2aedea2522bb784" + integrity sha512-dK5PtG1uiN2ikk++5OzSYsitZKny4wOCD0nrO4TqnW4BVBTQ2NGS3NgilvT/TEyxTST7LNyWV/T4tXDoD3fOgg== dependencies: "@babel/code-frame" "^7.16.7" "@babel/generator" "^7.16.8" "@babel/helper-compilation-targets" "^7.16.7" "@babel/helper-module-transforms" "^7.16.7" "@babel/helpers" "^7.16.7" - "@babel/parser" "^7.16.10" + "@babel/parser" "^7.16.12" "@babel/template" "^7.16.7" "@babel/traverse" "^7.16.10" "@babel/types" "^7.16.8" @@ -152,10 +152,10 @@ chalk "^2.0.0" js-tokens "^4.0.0" -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.10", "@babel/parser@^7.16.7": - version "7.16.10" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.16.10.tgz#aba1b1cb9696a24a19f59c41af9cf17d1c716a88" - integrity sha512-Sm/S9Or6nN8uiFsQU1yodyDW3MWXQhFeqzMPM+t8MJjM+pLsnFVxFZzkpXKvUXh+Gz9cbMoYYs484+Jw/NTEFQ== +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.10", "@babel/parser@^7.16.12", "@babel/parser@^7.16.7": + version "7.16.12" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.16.12.tgz#9474794f9a650cf5e2f892444227f98e28cdf8b6" + integrity sha512-VfaV15po8RiZssrkPweyvbGVSe4x2y+aciFCgn0n0/SJMR22cwofRV1mtnJQYcSB1wUTaA/X1LnA3es66MCO5A== "@babel/plugin-syntax-async-generators@^7.8.4": version "7.8.4" @@ -286,10 +286,10 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/backend-core@^1.0.44-alpha.6": - version "1.0.44" - resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.44.tgz#4ea6139ea304310b8972ba6a0ba70d24eb5a065c" - integrity sha512-sa9CJxMYkqMKPCwsfvhQMMz3s9vFPnNoWBOp221P8Y3D0vXN5MeH1+fWNrZAd0D7azwI1hqJdImCn7YEg0A81g== +"@budibase/backend-core@^1.0.49-alpha.4": + version "1.0.49-alpha.4" + resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.49-alpha.4.tgz#c9620f95a06e77f665b2a64c32eeb1f355841a73" + integrity sha512-j9+GXXZXvtShX1jMnkRH43eJjjqdAqbmlR5zmguw6TI2Ft7sjB9FZ+/NK07X58Uvc0sVFizw/n6iKGQOUwWDdg== dependencies: "@techpass/passport-openidconnect" "^0.3.0" aws-sdk "^2.901.0" @@ -311,9 +311,9 @@ zlib "^1.0.5" "@budibase/handlebars-helpers@^0.11.7": - version "0.11.7" - resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.11.7.tgz#8e5f9843d7dd10503e9f608555a96ccf4d836c46" - integrity sha512-PvGHAv22cWSFExs1kc0WglwsmCEUEOqWvSp6JCFZwtc3qAAr5yMfLK8WGVQ63ALvyzWZiyxF+yrlzeeaohCMJw== + version "0.11.8" + resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.11.8.tgz#6953d29673a8c5c407e096c0a84890465c7ce841" + integrity sha512-ggWJUt0GqsHFAEup5tlWlcrmYML57nKhpNGGLzVsqXVYN8eVmf3xluYmmMe7fDYhQH0leSprrdEXmsdFQF3HAQ== dependencies: array-sort "^1.0.0" define-property "^2.0.2" @@ -324,8 +324,6 @@ handlebars "^4.7.7" handlebars-utils "^1.0.6" has-value "^2.0.2" - helper-date "^1.0.1" - helper-markdown "^1.0.0" helper-md "^0.2.2" html-tag "^2.0.0" is-even "^1.0.0" @@ -337,10 +335,10 @@ to-gfm-code-block "^0.1.1" year "^0.2.1" -"@budibase/string-templates@^1.0.44-alpha.6": - version "1.0.44" - resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.44.tgz#4fd1f013fcc500adbfd645ebc655063f185ba994" - integrity sha512-+hbml5/yfpFx0QHPJsn7ERCSOmFI8XvXM9XjAO18eCKGxak9bIjtYnkbo8zjWU25vTwLgV3qLXRpEsbRidDu7A== +"@budibase/string-templates@^1.0.49-alpha.4": + version "1.0.49-alpha.4" + resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.49-alpha.4.tgz#cb3c138fe734436d404314f0af691a8f6fbb7e8f" + integrity sha512-1v2marwEfziTGihtr2PHDo2rMGRB3WWxC2CCobjSFN0sPy1PlRHoF3QZVutMqoh0C4gVZ50rww1vw8xj2rNgvQ== dependencies: "@budibase/handlebars-helpers" "^0.11.7" dayjs "^1.10.4" @@ -607,72 +605,72 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" -"@sentry/core@6.16.1": - version "6.16.1" - resolved "https://registry.yarnpkg.com/@sentry/core/-/core-6.16.1.tgz#d9f7a75f641acaddf21b6aafa7a32e142f68f17c" - integrity sha512-UFI0264CPUc5cR1zJH+S2UPOANpm6dLJOnsvnIGTjsrwzR0h8Hdl6rC2R/GPq+WNbnipo9hkiIwDlqbqvIU5vw== +"@sentry/core@6.17.3": + version "6.17.3" + resolved "https://registry.yarnpkg.com/@sentry/core/-/core-6.17.3.tgz#44375d8e9f4857bb630d7aebaecc97cfe42052df" + integrity sha512-h7WgrNL0RVlr8Dceh97ZiXNdmEumDutpoqFijjiX4x72IiC6zSaVD4IsqrdGln+v8iJ3l3lX44HHqzubDub1OQ== dependencies: - "@sentry/hub" "6.16.1" - "@sentry/minimal" "6.16.1" - "@sentry/types" "6.16.1" - "@sentry/utils" "6.16.1" + "@sentry/hub" "6.17.3" + "@sentry/minimal" "6.17.3" + "@sentry/types" "6.17.3" + "@sentry/utils" "6.17.3" tslib "^1.9.3" -"@sentry/hub@6.16.1": - version "6.16.1" - resolved "https://registry.yarnpkg.com/@sentry/hub/-/hub-6.16.1.tgz#526e19db51f4412da8634734044c605b936a7b80" - integrity sha512-4PGtg6AfpqMkreTpL7ymDeQ/U1uXv03bKUuFdtsSTn/FRf9TLS4JB0KuTZCxfp1IRgAA+iFg6B784dDkT8R9eg== +"@sentry/hub@6.17.3": + version "6.17.3" + resolved "https://registry.yarnpkg.com/@sentry/hub/-/hub-6.17.3.tgz#9c75f0ce486cfed0635f48c875d92f655c1e5710" + integrity sha512-TDxv8nRvk45xvfQg6zs8GYzQzgo0EMhI3wjQZLiNfW2rzybKmIwVp2x3O4PAc3WPzwg4bYNgSAkYKVlHmYjRCg== dependencies: - "@sentry/types" "6.16.1" - "@sentry/utils" "6.16.1" + "@sentry/types" "6.17.3" + "@sentry/utils" "6.17.3" tslib "^1.9.3" -"@sentry/minimal@6.16.1": - version "6.16.1" - resolved "https://registry.yarnpkg.com/@sentry/minimal/-/minimal-6.16.1.tgz#6a9506a92623d2ff1fc17d60989688323326772e" - integrity sha512-dq+mI1EQIvUM+zJtGCVgH3/B3Sbx4hKlGf2Usovm9KoqWYA+QpfVBholYDe/H2RXgO7LFEefDLvOdHDkqeJoyA== +"@sentry/minimal@6.17.3": + version "6.17.3" + resolved "https://registry.yarnpkg.com/@sentry/minimal/-/minimal-6.17.3.tgz#3e9f0b017f639776c9eaa58694b9be3f03429c78" + integrity sha512-zvGGfHNNA92Lqx6P8ZwOUkmRmAiQl0AQFRXl9So1Ayq9bJRnFLJZv4YFVnp2wE4HXYIlfBYb51+GlGB5LIuPmw== dependencies: - "@sentry/hub" "6.16.1" - "@sentry/types" "6.16.1" + "@sentry/hub" "6.17.3" + "@sentry/types" "6.17.3" tslib "^1.9.3" "@sentry/node@^6.0.0": - version "6.16.1" - resolved "https://registry.yarnpkg.com/@sentry/node/-/node-6.16.1.tgz#d92916da3e95d23e1ada274e97d6bf369e74ac51" - integrity sha512-SeDDoug2kUxeF1D7JGPa3h5EXxKtmA01mITBPYx5xbJ0sMksnv5I5bC1SJ8arRRzq6+W1C4IEeDBQtrVCk6ixA== + version "6.17.3" + resolved "https://registry.yarnpkg.com/@sentry/node/-/node-6.17.3.tgz#517ca6a88ca033320a301bffa097111bbb344f25" + integrity sha512-LvpB6bCQTytoOlrcQgR80aeEEBi2Sm1hNf+VvoPT6CW7tKI1/6pMWXaNnRu2dpyWS/j6tooz8rd/3dl1SZoGvg== dependencies: - "@sentry/core" "6.16.1" - "@sentry/hub" "6.16.1" - "@sentry/tracing" "6.16.1" - "@sentry/types" "6.16.1" - "@sentry/utils" "6.16.1" + "@sentry/core" "6.17.3" + "@sentry/hub" "6.17.3" + "@sentry/tracing" "6.17.3" + "@sentry/types" "6.17.3" + "@sentry/utils" "6.17.3" cookie "^0.4.1" https-proxy-agent "^5.0.0" lru_map "^0.3.3" tslib "^1.9.3" -"@sentry/tracing@6.16.1": - version "6.16.1" - resolved "https://registry.yarnpkg.com/@sentry/tracing/-/tracing-6.16.1.tgz#32fba3e07748e9a955055afd559a65996acb7d71" - integrity sha512-MPSbqXX59P+OEeST+U2V/8Hu/8QjpTUxTNeNyTHWIbbchdcMMjDbXTS3etCgajZR6Ro+DHElOz5cdSxH6IBGlA== +"@sentry/tracing@6.17.3": + version "6.17.3" + resolved "https://registry.yarnpkg.com/@sentry/tracing/-/tracing-6.17.3.tgz#b3841ad3fb1c7df1e21521da0d99c1496038a970" + integrity sha512-GnHugxw5qkWwYmeQbbrswuWpb0bpYqyJr/dO25QQOCwp+cckQrvBYTMC8zGJG10u94O4el0lQaQnNFz9WF3r6g== dependencies: - "@sentry/hub" "6.16.1" - "@sentry/minimal" "6.16.1" - "@sentry/types" "6.16.1" - "@sentry/utils" "6.16.1" + "@sentry/hub" "6.17.3" + "@sentry/minimal" "6.17.3" + "@sentry/types" "6.17.3" + "@sentry/utils" "6.17.3" tslib "^1.9.3" -"@sentry/types@6.16.1": - version "6.16.1" - resolved "https://registry.yarnpkg.com/@sentry/types/-/types-6.16.1.tgz#4917607115b30315757c2cf84f80bac5100b8ac0" - integrity sha512-Wh354g30UsJ5kYJbercektGX4ZMc9MHU++1NjeN2bTMnbofEcpUDWIiKeulZEY65IC1iU+1zRQQgtYO+/hgCUQ== +"@sentry/types@6.17.3": + version "6.17.3" + resolved "https://registry.yarnpkg.com/@sentry/types/-/types-6.17.3.tgz#c5b9bba8111ff26b26c4a056e2a083905e03e7dd" + integrity sha512-0AXCjYcfl8Vx26GfyLY4rBQ78Lyt1oND3UozTTMaVXlcKYIjzV+f7TOo5IZx+Kbr6EGUNDLdpA4xfbkWdW/1NA== -"@sentry/utils@6.16.1": - version "6.16.1" - resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-6.16.1.tgz#1b9e14c2831b6e8b816f7021b9876133bf2be008" - integrity sha512-7ngq/i4R8JZitJo9Sl8PDnjSbDehOxgr1vsoMmerIsyRZ651C/8B+jVkMhaAPgSdyJ0AlE3O7DKKTP1FXFw9qw== +"@sentry/utils@6.17.3": + version "6.17.3" + resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-6.17.3.tgz#a3c4c35e18ffb304356288213797c47c2bfdce08" + integrity sha512-6/2awDIeHSj0JgiC7DDdV1lxvLmf+/BisWhw09dKvmhVQB3ADvQZbohjUgM+Qam5zE0xmZAfQhvuDwC41W8Wnw== dependencies: - "@sentry/types" "6.16.1" + "@sentry/types" "6.17.3" tslib "^1.9.3" "@sideway/address@^4.1.3": @@ -698,9 +696,9 @@ integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== "@sindresorhus/is@^4.0.0": - version "4.3.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.3.0.tgz#344fd9bf808a84567ba563d00cc54b2f428dbab1" - integrity sha512-wwOvh0eO3PiTEivGJWiZ+b946SlMSb4pe+y+Ur/4S87cwo09pYi+FWHHnbrM3W9W7cBYKDqQXcrFYjYUCOJUEQ== + version "4.4.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.4.0.tgz#e277e5bdbdf7cb1e20d320f02f5e2ed113cd3185" + integrity sha512-QppPM/8l3Mawvh4rn9CNEYIU9bxpXUCRMaX9yUpvBk1nMKusLKpfXGDEKExKaPhLzcn3lzil7pR6rnJ11HgeRQ== "@sinonjs/commons@^1.7.0": version "1.8.3" @@ -967,9 +965,9 @@ integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== "@types/node@*": - version "17.0.10" - resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.10.tgz#616f16e9d3a2a3d618136b1be244315d95bd7cab" - integrity sha512-S/3xB4KzyFxYGCppyDt68yzBU9ysL88lSdIah4D6cptdcltc4NCPCAMc0+PCpg/lLIyC7IPvj2Z52OJWeIUkog== + version "17.0.13" + resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.13.tgz#5ed7ed7c662948335fcad6c412bb42d99ea754e3" + integrity sha512-Y86MAxASe25hNzlDbsviXl8jQHb0RDvKt4c40ZJQ1Don0AAL0STLZSs4N+6gLEO55pedy7r2cLwS+ZDxPm/2Bw== "@types/node@^15.12.4": version "15.14.9" @@ -1369,14 +1367,14 @@ autolinker@~0.28.0: gulp-header "^1.7.1" aws-sdk@^2.811.0, aws-sdk@^2.901.0: - version "2.1060.0" - resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1060.0.tgz#3b09be037571e4046d1076138e90e6184148cb81" - integrity sha512-c734/CZiVSsuVnEkx/7dodI8ndgOnxiCTwwlEFlMxdAZfLLJplteFwi6c/J2GZQktvz2ysV/HVtNKcJkasYJzw== + version "2.1066.0" + resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1066.0.tgz#2a9b00d983f3c740a7adda18d4e9a5c34d4d3887" + integrity sha512-9BZPdJgIvau8Jf2l3PxInNqQd733uKLqGGDywMV71duxNTLgdBZe2zvCkbgl22+ldC8R2LVMdS64DzchfQIxHg== dependencies: buffer "4.9.2" events "1.1.1" ieee754 "1.1.13" - jmespath "0.15.0" + jmespath "0.16.0" querystring "0.2.0" sax "1.2.1" url "0.10.3" @@ -1620,11 +1618,16 @@ buffer@^5.1.0, buffer@^5.5.0, buffer@^5.6.0: base64-js "^1.3.1" ieee754 "^1.1.13" -bytes@3.1.1, bytes@^3.0.0: +bytes@3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.1.tgz#3f018291cb4cbad9accb6e6970bca9c8889e879a" integrity sha512-dWe4nWO/ruEOY7HkUJ5gFt1DCFV9zPRoJr8pV0/ASQermOZjtq8jMjOprC0Kd10GLN+l7xaUPvxzJFWtxGu8Fg== +bytes@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" @@ -1708,9 +1711,9 @@ camelcase@^6.2.0: integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== caniuse-lite@^1.0.30001286: - version "1.0.30001300" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001300.tgz#11ab6c57d3eb6f964cba950401fd00a146786468" - integrity sha512-cVjiJHWGcNlJi8TZVKNMnvMid3Z3TTdDHmLDzlOdIiZq138Exvo0G+G0wTdVYolxKb4AYwC+38pxodiInVtJSA== + version "1.0.30001304" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001304.tgz#38af55ed3fc8220cb13e35e6e7309c8c65a05559" + integrity sha512-bdsfZd6K6ap87AGqSHJP/s1V+U6Z5lyrcbBu3ovbCCf8cSYpwTtGrCBObMpJqwxfTbLW6YTIdbb1jEeTelcpYQ== caseless@~0.12.0: version "0.12.0" @@ -2091,13 +2094,6 @@ data-urls@^2.0.0: whatwg-mimetype "^2.3.0" whatwg-url "^8.0.0" -date.js@^0.3.1: - version "0.3.3" - resolved "https://registry.yarnpkg.com/date.js/-/date.js-0.3.3.tgz#ef1e92332f507a638795dbb985e951882e50bbda" - integrity sha512-HgigOS3h3k6HnW011nAb43c5xx5rBXk8P2v/WIT9Zv4koIaVXiH2BURguI78VVp+5Qc076T7OR378JViCnZtBw== - dependencies: - debug "~3.1.0" - dateformat@^4.5.1: version "4.6.3" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-4.6.3.tgz#556fa6497e5217fedb78821424f8a1c22fa3f4b5" @@ -2129,13 +2125,6 @@ debug@^3.1.0, debug@^3.2.7: dependencies: ms "^2.1.1" -debug@~3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" - integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== - dependencies: - ms "2.0.0" - decimal.js@^10.2.1: version "10.3.1" resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783" @@ -2263,9 +2252,9 @@ depd@~1.1.2: integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= destroy@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" - integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= + version "1.1.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.1.0.tgz#b77ae22e472d85437141319d32ae40b344dff38a" + integrity sha512-R5QZrOXxSs0JDUIU/VANvRJlQVMts9C0L76HToQdPdlftfZCE7W6dyH0G4GZ5UW9fRqUOhAoCE2aGekuu+3HjQ== detect-newline@^3.0.0: version "3.1.0" @@ -2367,9 +2356,9 @@ ee-first@1.1.1: integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= electron-to-chromium@^1.4.17: - version "1.4.49" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.49.tgz#5b6a3dc032590beef4be485a4b0b3fe7d0e3dfd7" - integrity sha512-k/0t1TRfonHIp8TJKfjBu2cKj8MqYTiEpOhci+q7CVEE5xnCQnx1pTa+V8b/sdhe4S3PR4p4iceEQWhGrKQORQ== + version "1.4.58" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.58.tgz#cd980b08338210b591c25492857a518fe286b1d4" + integrity sha512-7LXwnKyqcEaMFVXOer+2JPfFs1D+ej7yRRrfZoIH1YlLQZ81OvBNwSCBBLtExVkoMQQgOWwO0FbZVge6U/8rhQ== emitter-listener@^1.0.1: version "1.1.2" @@ -2739,9 +2728,9 @@ fast-levenshtein@~2.0.6: integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= fast-redact@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/fast-redact/-/fast-redact-3.0.2.tgz#c940ba7162dde3aeeefc522926ae8c5231412904" - integrity sha512-YN+CYfCVRVMUZOUPeinHNKgytM1wPI/C/UCLEi56EsY2dwwvI00kIJHJoI7pMVqGoMew8SMZ2SSfHKHULHXDsg== + version "3.1.0" + resolved "https://registry.yarnpkg.com/fast-redact/-/fast-redact-3.1.0.tgz#37c26cda9cab70bc04393f7ba1feb2d176da6c6b" + integrity sha512-dir8LOnvialLxiXDPESMDHGp82CHi6ZEYTVkcvdn5d7psdv9ZkkButXrOeXST4aqreIRR+N7CYlsrwFuorurVg== fast-safe-stringify@^2.0.7, fast-safe-stringify@^2.0.8, fast-safe-stringify@^2.1.1: version "2.1.1" @@ -3143,7 +3132,7 @@ gulp-header@^1.7.1: lodash.template "^4.4.0" through2 "^2.0.0" -handlebars-utils@^1.0.2, handlebars-utils@^1.0.4, handlebars-utils@^1.0.6: +handlebars-utils@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/handlebars-utils/-/handlebars-utils-1.0.6.tgz#cb9db43362479054782d86ffe10f47abc76357f9" integrity sha512-d5mmoQXdeEqSKMtQQZ9WkiUcO1E3tPbWxluCK9hVgIDPzQa9WsKo3Lbe/sGflTe7TomHEeZaOgwIkyIr1kfzkw== @@ -3256,24 +3245,6 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" -helper-date@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/helper-date/-/helper-date-1.0.1.tgz#12fedea3ad8e44a7ca4c4efb0ff4104a5120cffb" - integrity sha512-wU3VOwwTJvGr/w5rZr3cprPHO+hIhlblTJHD6aFBrKLuNbf4lAmkawd2iK3c6NbJEvY7HAmDpqjOFSI5/+Ey2w== - dependencies: - date.js "^0.3.1" - handlebars-utils "^1.0.4" - moment "^2.18.1" - -helper-markdown@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/helper-markdown/-/helper-markdown-1.0.0.tgz#ee7e9fc554675007d37eb90f7853b13ce74f3e10" - integrity sha512-AnDqMS4ejkQK0MXze7pA9TM3pu01ZY+XXsES6gEE0RmCGk5/NIfvTn0NmItfyDOjRAzyo9z6X7YHbHX4PzIvOA== - dependencies: - handlebars-utils "^1.0.2" - highlight.js "^9.12.0" - remarkable "^1.7.1" - helper-md@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/helper-md/-/helper-md-0.2.2.tgz#c1f59d7e55bbae23362fd8a0e971607aec69d41f" @@ -3289,11 +3260,6 @@ hexoid@1.0.0: resolved "https://registry.yarnpkg.com/hexoid/-/hexoid-1.0.0.tgz#ad10c6573fb907de23d9ec63a711267d9dc9bc18" integrity sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g== -highlight.js@^9.12.0: - version "9.18.5" - resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-9.18.5.tgz#d18a359867f378c138d6819edfc2a8acd5f29825" - integrity sha512-a5bFyofd/BHCX52/8i8uJkjr9DYwXIPnM/plwI6W7ezItLGqzt7X2G2nXuYSfsIJdkwwj/g9DG1LkcGJI/dDoA== - html-encoding-sniffer@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" @@ -3562,7 +3528,7 @@ is-class-hotfix@~0.0.6: resolved "https://registry.yarnpkg.com/is-class-hotfix/-/is-class-hotfix-0.0.6.tgz#a527d31fb23279281dde5f385c77b5de70a72435" integrity sha512-0n+pzCC6ICtVr/WXnN2f03TK/3BfXY7me4cjCAqT8TYXEl0+JBRoqBo94JJHXcyDSLUeWbNX8Fvy5g5RJdAstQ== -is-core-module@^2.8.0: +is-core-module@^2.8.1: version "2.8.1" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.1.tgz#f59fdfca701d5879d0a6b100a40aa1560ce27211" integrity sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA== @@ -4249,15 +4215,20 @@ jest@^27.0.5: import-local "^3.0.2" jest-cli "^27.4.7" -jmespath@0.15.0, jmespath@^0.15.0: +jmespath@0.16.0: + version "0.16.0" + resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.16.0.tgz#b15b0a85dfd4d930d43e69ed605943c802785076" + integrity sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw== + +jmespath@^0.15.0: version "0.15.0" resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217" integrity sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc= joi@^17.4.0: - version "17.5.0" - resolved "https://registry.yarnpkg.com/joi/-/joi-17.5.0.tgz#7e66d0004b5045d971cf416a55fb61d33ac6e011" - integrity sha512-R7hR50COp7StzLnDi4ywOXHrBrgNXuUUfJWIR5lPY5Bm/pOD3jZaTwpluUXVLRWcoWZxkrHBBJ5hLxgnlehbdw== + version "17.6.0" + resolved "https://registry.yarnpkg.com/joi/-/joi-17.6.0.tgz#0bb54f2f006c09a96e75ce687957bd04290054b2" + integrity sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw== dependencies: "@hapi/hoek" "^9.0.0" "@hapi/topo" "^5.0.0" @@ -4979,11 +4950,6 @@ mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -moment@^2.18.1: - version "2.29.1" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3" - integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== - mri@1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/mri/-/mri-1.1.4.tgz#7cb1dd1b9b40905f1fac053abe25b6720f44744a" @@ -5484,9 +5450,9 @@ pino@^6.13.0: sonic-boom "^1.0.2" pirates@^4.0.4: - version "4.0.4" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.4.tgz#07df81e61028e402735cdd49db701e4885b4e6e6" - integrity sha512-ZIrVPH+A52Dw84R0L3/VS9Op04PuQ2SEoJL6bkshmiTic/HldyW9Tf7oH5mhJZBK7NmDx27vSMrYEXPXclpDKw== + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== pkg-dir@^4.2.0: version "4.2.0" @@ -5927,7 +5893,7 @@ relative@^3.0.2: dependencies: isobject "^2.0.0" -remarkable@^1.6.2, remarkable@^1.7.1: +remarkable@^1.6.2: version "1.7.4" resolved "https://registry.yarnpkg.com/remarkable/-/remarkable-1.7.4.tgz#19073cb960398c87a7d6546eaa5e50d2022fcd00" integrity sha512-e6NKUXgX95whv7IgddywbeN/ItCkWbISmc2DiqHJb0wTrqZIexqdco5b8Z3XZoo/48IdNVKM9ZCvTPJ4F5uvhg== @@ -6017,11 +5983,11 @@ resolve.exports@^1.1.0: integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== resolve@^1.20.0: - version "1.21.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.21.0.tgz#b51adc97f3472e6a5cf4444d34bc9d6b9037591f" - integrity sha512-3wCbTpk5WJlyE4mSOtDLhqQmGFi0/TD9VPwmiolnk8U0wRgMEktqCXd3vy5buTO3tljvalNvKrjHEfrd2WpEKA== + version "1.22.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198" + integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== dependencies: - is-core-module "^2.8.0" + is-core-module "^2.8.1" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" @@ -6881,9 +6847,9 @@ typescript@4.3.5: integrity sha512-DqQgihaQ9cUrskJo9kIyW/+g0Vxsk8cDtZ52a3NGh0YNTfpUSArXSohyUGnvbPazEPLu398C0UxmKSOrPumUzA== uglify-js@^3.1.4: - version "3.14.5" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.14.5.tgz#cdabb7d4954231d80cb4a927654c4655e51f4859" - integrity sha512-qZukoSxOG0urUTvjc2ERMTcAy+BiFh3weWAkeurLwjrCba73poHmG3E36XEjd/JGukMzwTL7uCxZiAexj8ppvQ== + version "3.15.0" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.15.0.tgz#2d6a689d94783cab43975721977a13c2afec28f1" + integrity sha512-x+xdeDWq7FiORDvyIJ0q/waWd4PhjBNOm5dQUOq2AKC0IEjxOS66Ha9tctiVDGcRQuh69K7fgU5oRuTK4cysSg== uid2@0.0.x: version "0.0.4"