From d3c4c99e167354c682b2585834ca656c6c910772 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 24 Nov 2022 18:48:51 +0000 Subject: [PATCH] Complete conversion of backend-core to Typescript. --- packages/backend-core/accounts.js | 1 - packages/backend-core/auth.js | 1 - packages/backend-core/cache.js | 9 - packages/backend-core/constants.js | 1 - packages/backend-core/context.js | 24 - packages/backend-core/db.js | 1 - packages/backend-core/deprovision.js | 1 - packages/backend-core/encryption.js | 1 - packages/backend-core/logging.js | 1 - packages/backend-core/middleware.js | 1 - packages/backend-core/migrations.js | 1 - packages/backend-core/objectStore.js | 4 - packages/backend-core/permissions.js | 1 - packages/backend-core/plugins.js | 3 - packages/backend-core/redis.js | 5 - packages/backend-core/roles.js | 1 - packages/backend-core/sessions.js | 1 - .../cache/{appMetadata.js => appMetadata.ts} | 24 +- packages/backend-core/src/cache/base/index.ts | 8 +- packages/backend-core/src/cache/generic.js | 30 -- packages/backend-core/src/cache/generic.ts | 30 ++ packages/backend-core/src/cache/index.ts | 4 + .../src/cache/{user.js => user.ts} | 23 +- packages/backend-core/src/cloud/api.js | 42 -- packages/backend-core/src/cloud/api.ts | 55 +++ packages/backend-core/src/constants.js | 44 -- packages/backend-core/src/context/identity.ts | 2 +- packages/backend-core/src/db/utils.ts | 2 +- packages/backend-core/src/events/analytics.ts | 4 +- packages/backend-core/src/events/backfill.ts | 14 +- .../backend-core/src/events/identification.ts | 8 +- .../events/processors/posthog/rateLimiting.ts | 4 +- .../posthog/tests/PosthogProcessor.spec.ts | 6 +- .../src/featureFlags/{index.js => index.ts} | 22 +- .../src/{helpers.js => helpers.ts} | 2 +- packages/backend-core/src/index.ts | 18 +- packages/backend-core/src/installation.ts | 12 +- .../middleware/{adminOnly.js => adminOnly.ts} | 4 +- .../backend-core/src/middleware/auditLog.js | 4 - .../backend-core/src/middleware/auditLog.ts | 6 + .../src/middleware/authenticated.ts | 12 +- .../{builderOnly.js => builderOnly.ts} | 4 +- .../{builderOrAdmin.js => builderOrAdmin.ts} | 4 +- .../src/middleware/{csrf.js => csrf.ts} | 11 +- packages/backend-core/src/middleware/index.ts | 30 +- .../{internalApi.js => internalApi.ts} | 7 +- .../{joi-validator.js => joi-validator.ts} | 17 +- .../datasource/{google.js => google.ts} | 44 +- .../passport/{google.js => google.ts} | 38 +- .../src/middleware/passport/jwt.js | 18 - .../src/middleware/passport/jwt.ts | 19 + .../passport/{local.js => local.ts} | 34 +- .../middleware/passport/{oidc.js => oidc.ts} | 71 +-- .../passport/tests/third-party-common.spec.js | 2 +- ...-party-common.js => third-party-common.ts} | 42 +- .../passport/{utils.js => utils.ts} | 16 +- .../backend-core/src/objectStore/index.ts | 428 +----------------- .../src/objectStore/objectStore.ts | 426 +++++++++++++++++ .../src/objectStore/{utils.js => utils.ts} | 13 +- packages/backend-core/src/pino.js | 11 - packages/backend-core/src/pino.ts | 13 + packages/backend-core/src/pkg/cache.ts | 13 - packages/backend-core/src/pkg/context.ts | 26 -- packages/backend-core/src/pkg/objectStore.ts | 4 - packages/backend-core/src/pkg/redis.ts | 13 - packages/backend-core/src/pkg/utils.ts | 4 - .../src/plugin/{utils.js => utils.ts} | 16 +- .../backend-core/src/queue/inMemoryQueue.ts | 2 +- packages/backend-core/src/redis/index.ts | 284 +----------- packages/backend-core/src/redis/init.js | 69 --- packages/backend-core/src/redis/init.ts | 72 +++ packages/backend-core/src/redis/redis.ts | 279 ++++++++++++ .../src/redis/{utils.js => utils.ts} | 84 ++-- packages/backend-core/src/security/apiKeys.js | 1 - .../security/{encryption.js => encryption.ts} | 14 +- .../src/{hashing.js => utils/hashing.ts} | 10 +- packages/backend-core/src/utils/index.ts | 2 + .../backend-core/src/{ => utils}/utils.ts | 16 +- packages/backend-core/tenancy.js | 1 - packages/backend-core/utils.js | 4 - .../server/src/api/controllers/application.ts | 2 +- packages/types/src/documents/global/config.ts | 20 +- packages/types/src/documents/global/user.ts | 37 +- packages/types/src/sdk/context.ts | 2 +- .../src/api/controllers/global/configs.ts | 8 +- .../src/api/controllers/global/users.ts | 2 +- .../src/api/controllers/system/restore.ts | 2 +- 87 files changed, 1318 insertions(+), 1354 deletions(-) delete mode 100644 packages/backend-core/accounts.js delete mode 100644 packages/backend-core/auth.js delete mode 100644 packages/backend-core/cache.js delete mode 100644 packages/backend-core/constants.js delete mode 100644 packages/backend-core/context.js delete mode 100644 packages/backend-core/db.js delete mode 100644 packages/backend-core/deprovision.js delete mode 100644 packages/backend-core/encryption.js delete mode 100644 packages/backend-core/logging.js delete mode 100644 packages/backend-core/middleware.js delete mode 100644 packages/backend-core/migrations.js delete mode 100644 packages/backend-core/objectStore.js delete mode 100644 packages/backend-core/permissions.js delete mode 100644 packages/backend-core/plugins.js delete mode 100644 packages/backend-core/redis.js delete mode 100644 packages/backend-core/roles.js delete mode 100644 packages/backend-core/sessions.js rename packages/backend-core/src/cache/{appMetadata.js => appMetadata.ts} (79%) delete mode 100644 packages/backend-core/src/cache/generic.js create mode 100644 packages/backend-core/src/cache/generic.ts create mode 100644 packages/backend-core/src/cache/index.ts rename packages/backend-core/src/cache/{user.js => user.ts} (68%) delete mode 100644 packages/backend-core/src/cloud/api.js create mode 100644 packages/backend-core/src/cloud/api.ts delete mode 100644 packages/backend-core/src/constants.js rename packages/backend-core/src/featureFlags/{index.js => index.ts} (70%) rename packages/backend-core/src/{helpers.js => helpers.ts} (85%) rename packages/backend-core/src/middleware/{adminOnly.js => adminOnly.ts} (63%) delete mode 100644 packages/backend-core/src/middleware/auditLog.js create mode 100644 packages/backend-core/src/middleware/auditLog.ts rename packages/backend-core/src/middleware/{builderOnly.js => builderOnly.ts} (64%) rename packages/backend-core/src/middleware/{builderOrAdmin.js => builderOrAdmin.ts} (71%) rename packages/backend-core/src/middleware/{csrf.js => csrf.ts} (89%) rename packages/backend-core/src/middleware/{internalApi.js => internalApi.ts} (53%) rename packages/backend-core/src/middleware/{joi-validator.js => joi-validator.ts} (61%) rename packages/backend-core/src/middleware/passport/datasource/{google.js => google.ts} (71%) rename packages/backend-core/src/middleware/passport/{google.js => google.ts} (63%) delete mode 100644 packages/backend-core/src/middleware/passport/jwt.js create mode 100644 packages/backend-core/src/middleware/passport/jwt.ts rename packages/backend-core/src/middleware/passport/{local.js => local.ts} (73%) rename packages/backend-core/src/middleware/passport/{oidc.js => oidc.ts} (73%) rename packages/backend-core/src/middleware/passport/{third-party-common.js => third-party-common.ts} (77%) rename packages/backend-core/src/middleware/passport/{utils.js => utils.ts} (64%) create mode 100644 packages/backend-core/src/objectStore/objectStore.ts rename packages/backend-core/src/objectStore/{utils.js => utils.ts} (71%) delete mode 100644 packages/backend-core/src/pino.js create mode 100644 packages/backend-core/src/pino.ts delete mode 100644 packages/backend-core/src/pkg/cache.ts delete mode 100644 packages/backend-core/src/pkg/context.ts delete mode 100644 packages/backend-core/src/pkg/objectStore.ts delete mode 100644 packages/backend-core/src/pkg/redis.ts delete mode 100644 packages/backend-core/src/pkg/utils.ts rename packages/backend-core/src/plugin/{utils.js => utils.ts} (89%) delete mode 100644 packages/backend-core/src/redis/init.js create mode 100644 packages/backend-core/src/redis/init.ts create mode 100644 packages/backend-core/src/redis/redis.ts rename packages/backend-core/src/redis/{utils.js => utils.ts} (68%) delete mode 100644 packages/backend-core/src/security/apiKeys.js rename packages/backend-core/src/security/{encryption.js => encryption.ts} (73%) rename packages/backend-core/src/{hashing.js => utils/hashing.ts} (59%) create mode 100644 packages/backend-core/src/utils/index.ts rename packages/backend-core/src/{ => utils}/utils.ts (94%) delete mode 100644 packages/backend-core/tenancy.js delete mode 100644 packages/backend-core/utils.js diff --git a/packages/backend-core/accounts.js b/packages/backend-core/accounts.js deleted file mode 100644 index 47ad03456a..0000000000 --- a/packages/backend-core/accounts.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/cloud/accounts") diff --git a/packages/backend-core/auth.js b/packages/backend-core/auth.js deleted file mode 100644 index bbfe3d41dd..0000000000 --- a/packages/backend-core/auth.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/auth") diff --git a/packages/backend-core/cache.js b/packages/backend-core/cache.js deleted file mode 100644 index c8bd3c9b6f..0000000000 --- a/packages/backend-core/cache.js +++ /dev/null @@ -1,9 +0,0 @@ -const generic = require("./src/cache/generic") - -module.exports = { - user: require("./src/cache/user"), - app: require("./src/cache/appMetadata"), - writethrough: require("./src/cache/writethrough"), - ...generic, - cache: generic, -} diff --git a/packages/backend-core/constants.js b/packages/backend-core/constants.js deleted file mode 100644 index 4abb7703db..0000000000 --- a/packages/backend-core/constants.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/constants") diff --git a/packages/backend-core/context.js b/packages/backend-core/context.js deleted file mode 100644 index c6fa87a337..0000000000 --- a/packages/backend-core/context.js +++ /dev/null @@ -1,24 +0,0 @@ -const { - getAppDB, - getDevAppDB, - getProdAppDB, - getAppId, - updateAppId, - doInAppContext, - doInTenant, - doInContext, -} = require("./src/context") - -const identity = require("./src/context/identity") - -module.exports = { - getAppDB, - getDevAppDB, - getProdAppDB, - getAppId, - updateAppId, - doInAppContext, - doInTenant, - identity, - doInContext, -} diff --git a/packages/backend-core/db.js b/packages/backend-core/db.js deleted file mode 100644 index f7004972d5..0000000000 --- a/packages/backend-core/db.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/db") diff --git a/packages/backend-core/deprovision.js b/packages/backend-core/deprovision.js deleted file mode 100644 index 672da214ff..0000000000 --- a/packages/backend-core/deprovision.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/context/deprovision") diff --git a/packages/backend-core/encryption.js b/packages/backend-core/encryption.js deleted file mode 100644 index 4ccb6e3a99..0000000000 --- a/packages/backend-core/encryption.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/security/encryption") diff --git a/packages/backend-core/logging.js b/packages/backend-core/logging.js deleted file mode 100644 index da40fe3100..0000000000 --- a/packages/backend-core/logging.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/logging") diff --git a/packages/backend-core/middleware.js b/packages/backend-core/middleware.js deleted file mode 100644 index 30fec96239..0000000000 --- a/packages/backend-core/middleware.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/middleware") diff --git a/packages/backend-core/migrations.js b/packages/backend-core/migrations.js deleted file mode 100644 index 2de19ebf65..0000000000 --- a/packages/backend-core/migrations.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/migrations") diff --git a/packages/backend-core/objectStore.js b/packages/backend-core/objectStore.js deleted file mode 100644 index 3ee433f224..0000000000 --- a/packages/backend-core/objectStore.js +++ /dev/null @@ -1,4 +0,0 @@ -module.exports = { - ...require("./src/objectStore"), - ...require("./src/objectStore/utils"), -} diff --git a/packages/backend-core/permissions.js b/packages/backend-core/permissions.js deleted file mode 100644 index 42f37c9c7e..0000000000 --- a/packages/backend-core/permissions.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/security/permissions") diff --git a/packages/backend-core/plugins.js b/packages/backend-core/plugins.js deleted file mode 100644 index 018e214dcb..0000000000 --- a/packages/backend-core/plugins.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = { - ...require("./src/plugin"), -} diff --git a/packages/backend-core/redis.js b/packages/backend-core/redis.js deleted file mode 100644 index 1f7a48540a..0000000000 --- a/packages/backend-core/redis.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - Client: require("./src/redis"), - utils: require("./src/redis/utils"), - clients: require("./src/redis/init"), -} diff --git a/packages/backend-core/roles.js b/packages/backend-core/roles.js deleted file mode 100644 index 158bcdb6b8..0000000000 --- a/packages/backend-core/roles.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/security/roles") diff --git a/packages/backend-core/sessions.js b/packages/backend-core/sessions.js deleted file mode 100644 index c07efa2380..0000000000 --- a/packages/backend-core/sessions.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/security/sessions") diff --git a/packages/backend-core/src/cache/appMetadata.js b/packages/backend-core/src/cache/appMetadata.ts similarity index 79% rename from packages/backend-core/src/cache/appMetadata.js rename to packages/backend-core/src/cache/appMetadata.ts index a7ff0d2fc1..d24c4a3140 100644 --- a/packages/backend-core/src/cache/appMetadata.js +++ b/packages/backend-core/src/cache/appMetadata.ts @@ -1,6 +1,6 @@ -const redis = require("../redis/init") -const { doWithDB } = require("../db") -const { DocumentType } = require("../db/constants") +import { getAppClient } from "../redis/init" +import { doWithDB, DocumentType } from "../db" +import { Database } from "@budibase/types" const AppState = { INVALID: "invalid", @@ -10,17 +10,17 @@ const EXPIRY_SECONDS = 3600 /** * The default populate app metadata function */ -const populateFromDB = async appId => { +async function populateFromDB(appId: string) { return doWithDB( appId, - db => { + (db: Database) => { return db.get(DocumentType.APP_METADATA) }, { skip_setup: true } ) } -const isInvalid = metadata => { +function isInvalid(metadata?: { state: string }) { return !metadata || metadata.state === AppState.INVALID } @@ -31,15 +31,15 @@ const isInvalid = metadata => { * @param {string} appId the id of the app to get metadata from. * @returns {object} the app metadata. */ -exports.getAppMetadata = async appId => { - const client = await redis.getAppClient() +export async function getAppMetadata(appId: string) { + const client = await getAppClient() // try cache let metadata = await client.get(appId) if (!metadata) { - let expiry = EXPIRY_SECONDS + let expiry: number | undefined = EXPIRY_SECONDS try { metadata = await populateFromDB(appId) - } catch (err) { + } catch (err: any) { // app DB left around, but no metadata, it is invalid if (err && err.status === 404) { metadata = { state: AppState.INVALID } @@ -74,11 +74,11 @@ exports.getAppMetadata = async appId => { * @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with. * @return {Promise} will respond with success when cache is updated. */ -exports.invalidateAppMetadata = async (appId, newMetadata = null) => { +export async function invalidateAppMetadata(appId: string, newMetadata?: any) { if (!appId) { throw "Cannot invalidate if no app ID provided." } - const client = await redis.getAppClient() + const client = await getAppClient() await client.delete(appId) if (newMetadata) { await client.store(appId, newMetadata, EXPIRY_SECONDS) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index f3216531f4..ab620a900e 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -1,6 +1,6 @@ import { getTenantId } from "../../context" -import redis from "../../redis/init" -import RedisWrapper from "../../redis" +import * as redis from "../../redis/init" +import { Client } from "../../redis" function generateTenantKey(key: string) { const tenantId = getTenantId() @@ -8,9 +8,9 @@ function generateTenantKey(key: string) { } export = class BaseCache { - client: RedisWrapper | undefined + client: Client | undefined - constructor(client: RedisWrapper | undefined = undefined) { + constructor(client: Client | undefined = undefined) { this.client = client } diff --git a/packages/backend-core/src/cache/generic.js b/packages/backend-core/src/cache/generic.js deleted file mode 100644 index 26ef0c6bb0..0000000000 --- a/packages/backend-core/src/cache/generic.js +++ /dev/null @@ -1,30 +0,0 @@ -const BaseCache = require("./base") - -const GENERIC = new BaseCache() - -exports.CacheKeys = { - CHECKLIST: "checklist", - INSTALLATION: "installation", - ANALYTICS_ENABLED: "analyticsEnabled", - UNIQUE_TENANT_ID: "uniqueTenantId", - EVENTS: "events", - BACKFILL_METADATA: "backfillMetadata", - EVENTS_RATE_LIMIT: "eventsRateLimit", -} - -exports.TTL = { - ONE_MINUTE: 600, - ONE_HOUR: 3600, - ONE_DAY: 86400, -} - -function performExport(funcName) { - return (...args) => GENERIC[funcName](...args) -} - -exports.keys = performExport("keys") -exports.get = performExport("get") -exports.store = performExport("store") -exports.delete = performExport("delete") -exports.withCache = performExport("withCache") -exports.bustCache = performExport("bustCache") diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts new file mode 100644 index 0000000000..d8a54e4a3f --- /dev/null +++ b/packages/backend-core/src/cache/generic.ts @@ -0,0 +1,30 @@ +const BaseCache = require("./base") + +const GENERIC = new BaseCache() + +export enum CacheKey { + CHECKLIST = "checklist", + INSTALLATION = "installation", + ANALYTICS_ENABLED = "analyticsEnabled", + UNIQUE_TENANT_ID = "uniqueTenantId", + EVENTS = "events", + BACKFILL_METADATA = "backfillMetadata", + EVENTS_RATE_LIMIT = "eventsRateLimit", +} + +export enum TTL { + ONE_MINUTE = 600, + ONE_HOUR = 3600, + ONE_DAY = 86400, +} + +function performExport(funcName: string) { + return (...args: any) => GENERIC[funcName](...args) +} + +export const keys = performExport("keys") +export const get = performExport("get") +export const store = performExport("store") +export const destroy = performExport("delete") +export const withCache = performExport("withCache") +export const bustCache = performExport("bustCache") diff --git a/packages/backend-core/src/cache/index.ts b/packages/backend-core/src/cache/index.ts new file mode 100644 index 0000000000..1b057aabd1 --- /dev/null +++ b/packages/backend-core/src/cache/index.ts @@ -0,0 +1,4 @@ +export * as generic from "./generic" +export * as user from "./user" +export * as app from "./appMetadata" +export * as writethrough from "./writethrough" diff --git a/packages/backend-core/src/cache/user.js b/packages/backend-core/src/cache/user.ts similarity index 68% rename from packages/backend-core/src/cache/user.js rename to packages/backend-core/src/cache/user.ts index 130da1915e..a128465cd6 100644 --- a/packages/backend-core/src/cache/user.js +++ b/packages/backend-core/src/cache/user.ts @@ -1,15 +1,16 @@ -const redis = require("../redis/init") -const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy") -const env = require("../environment") -const accounts = require("../cloud/accounts") +import * as redis from "../redis/init" +import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy" +import env from "../environment" +import * as accounts from "../cloud/accounts" +import { Database } from "@budibase/types" const EXPIRY_SECONDS = 3600 /** * The default populate user function */ -const populateFromDB = async (userId, tenantId) => { - const user = await doWithGlobalDB(tenantId, db => db.get(userId)) +async function populateFromDB(userId: string, tenantId: string) { + const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId)) user.budibaseAccess = true if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { const account = await accounts.getAccount(user.email) @@ -31,7 +32,11 @@ const populateFromDB = async (userId, tenantId) => { * @param {*} populateUser function to provide the user for re-caching. default to couch db * @returns */ -exports.getUser = async (userId, tenantId = null, populateUser = null) => { +export async function getUser( + userId: string, + tenantId?: string, + populateUser?: any +) { if (!populateUser) { populateUser = populateFromDB } @@ -47,7 +52,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => { let user = await client.get(userId) if (!user) { user = await populateUser(userId, tenantId) - client.store(userId, user, EXPIRY_SECONDS) + await client.store(userId, user, EXPIRY_SECONDS) } if (user && !user.tenantId && tenantId) { // make sure the tenant ID is always correct/set @@ -56,7 +61,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => { return user } -exports.invalidateUser = async userId => { +export async function invalidateUser(userId: string) { const client = await redis.getUserClient() await client.delete(userId) } diff --git a/packages/backend-core/src/cloud/api.js b/packages/backend-core/src/cloud/api.js deleted file mode 100644 index d4d4b6c8bb..0000000000 --- a/packages/backend-core/src/cloud/api.js +++ /dev/null @@ -1,42 +0,0 @@ -const fetch = require("node-fetch") -class API { - constructor(host) { - this.host = host - } - - apiCall = - method => - async (url = "", options = {}) => { - if (!options.headers) { - options.headers = {} - } - - if (!options.headers["Content-Type"]) { - options.headers = { - "Content-Type": "application/json", - Accept: "application/json", - ...options.headers, - } - } - - let json = options.headers["Content-Type"] === "application/json" - - const requestOptions = { - method: method, - body: json ? JSON.stringify(options.body) : options.body, - headers: options.headers, - // TODO: See if this is necessary - credentials: "include", - } - - return await fetch(`${this.host}${url}`, requestOptions) - } - - post = this.apiCall("POST") - get = this.apiCall("GET") - patch = this.apiCall("PATCH") - del = this.apiCall("DELETE") - put = this.apiCall("PUT") -} - -module.exports = API diff --git a/packages/backend-core/src/cloud/api.ts b/packages/backend-core/src/cloud/api.ts new file mode 100644 index 0000000000..287c447271 --- /dev/null +++ b/packages/backend-core/src/cloud/api.ts @@ -0,0 +1,55 @@ +import fetch from "node-fetch" + +export = class API { + host: string + + constructor(host: string) { + this.host = host + } + + async apiCall(method: string, url: string, options?: any) { + if (!options.headers) { + options.headers = {} + } + + if (!options.headers["Content-Type"]) { + options.headers = { + "Content-Type": "application/json", + Accept: "application/json", + ...options.headers, + } + } + + let json = options.headers["Content-Type"] === "application/json" + + const requestOptions = { + method: method, + body: json ? JSON.stringify(options.body) : options.body, + headers: options.headers, + // TODO: See if this is necessary + credentials: "include", + } + + return await fetch(`${this.host}${url}`, requestOptions) + } + + async post(url: string, options?: any) { + return this.apiCall("POST", url, options) + } + + async get(url: string, options?: any) { + return this.apiCall("GET", url, options) + } + + async patch(url: string, options?: any) { + return this.apiCall("PATCH", url, options) + } + + async del(url: string, options?: any) { + return this.apiCall("DELETE", url, options) + } + + async put(url: string, options?: any) { + return this.apiCall("PUT", url, options) + } +} diff --git a/packages/backend-core/src/constants.js b/packages/backend-core/src/constants.js deleted file mode 100644 index 7fda17f6f2..0000000000 --- a/packages/backend-core/src/constants.js +++ /dev/null @@ -1,44 +0,0 @@ -exports.UserStatus = { - ACTIVE: "active", - INACTIVE: "inactive", -} - -exports.Cookie = { - CurrentApp: "budibase:currentapp", - Auth: "budibase:auth", - Init: "budibase:init", - ACCOUNT_RETURN_URL: "budibase:account:returnurl", - DatasourceAuth: "budibase:datasourceauth", - OIDC_CONFIG: "budibase:oidc:config", -} - -exports.Header = { - API_KEY: "x-budibase-api-key", - LICENSE_KEY: "x-budibase-license-key", - API_VER: "x-budibase-api-version", - APP_ID: "x-budibase-app-id", - TYPE: "x-budibase-type", - PREVIEW_ROLE: "x-budibase-role", - TENANT_ID: "x-budibase-tenant-id", - TOKEN: "x-budibase-token", - CSRF_TOKEN: "x-csrf-token", -} - -exports.GlobalRoles = { - OWNER: "owner", - ADMIN: "admin", - BUILDER: "builder", - WORKSPACE_MANAGER: "workspace_manager", -} - -exports.Config = { - SETTINGS: "settings", - ACCOUNT: "account", - SMTP: "smtp", - GOOGLE: "google", - OIDC: "oidc", - OIDC_LOGOS: "logos_oidc", -} - -exports.MAX_VALID_DATE = new Date(2147483647000) -exports.DEFAULT_TENANT_ID = "default" diff --git a/packages/backend-core/src/context/identity.ts b/packages/backend-core/src/context/identity.ts index 37e1ecf40a..2ae0372a8d 100644 --- a/packages/backend-core/src/context/identity.ts +++ b/packages/backend-core/src/context/identity.ts @@ -18,7 +18,7 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => { } export const doInUserContext = (user: User, task: any) => { - const userContext: UserContext = { + const userContext: any = { ...user, _id: user._id as string, type: IdentityType.USER, diff --git a/packages/backend-core/src/db/utils.ts b/packages/backend-core/src/db/utils.ts index 04feafa008..558dde1e3b 100644 --- a/packages/backend-core/src/db/utils.ts +++ b/packages/backend-core/src/db/utils.ts @@ -1,4 +1,4 @@ -import { newid } from "../hashing" +import { newid } from "../utils" import { DEFAULT_TENANT_ID, Config } from "../constants" import env from "../environment" import { diff --git a/packages/backend-core/src/events/analytics.ts b/packages/backend-core/src/events/analytics.ts index 228805ef82..90cacc902b 100644 --- a/packages/backend-core/src/events/analytics.ts +++ b/packages/backend-core/src/events/analytics.ts @@ -2,7 +2,7 @@ import env from "../environment" import tenancy from "../tenancy" import * as dbUtils from "../db/utils" import { Config } from "../constants" -import { withCache, TTL, CacheKeys } from "../cache/generic" +import { withCache, TTL, CacheKey } from "../cache/generic" export const enabled = async () => { // cloud - always use the environment variable @@ -13,7 +13,7 @@ export const enabled = async () => { // self host - prefer the settings doc // use cache as events have high throughput const enabledInDB = await withCache( - CacheKeys.ANALYTICS_ENABLED, + CacheKey.ANALYTICS_ENABLED, TTL.ONE_DAY, async () => { const settings = await getSettingsDoc() diff --git a/packages/backend-core/src/events/backfill.ts b/packages/backend-core/src/events/backfill.ts index e4577c5ab4..c8025a8e4e 100644 --- a/packages/backend-core/src/events/backfill.ts +++ b/packages/backend-core/src/events/backfill.ts @@ -21,7 +21,7 @@ import { AppCreatedEvent, } from "@budibase/types" import * as context from "../context" -import { CacheKeys } from "../cache/generic" +import { CacheKey } from "../cache/generic" import * as cache from "../cache/generic" // LIFECYCLE @@ -48,18 +48,18 @@ export const end = async () => { // CRUD const getBackfillMetadata = async (): Promise => { - return cache.get(CacheKeys.BACKFILL_METADATA) + return cache.get(CacheKey.BACKFILL_METADATA) } const saveBackfillMetadata = async ( backfill: BackfillMetadata ): Promise => { // no TTL - deleted by backfill - return cache.store(CacheKeys.BACKFILL_METADATA, backfill) + return cache.store(CacheKey.BACKFILL_METADATA, backfill) } const deleteBackfillMetadata = async (): Promise => { - await cache.delete(CacheKeys.BACKFILL_METADATA) + await cache.destroy(CacheKey.BACKFILL_METADATA) } const clearEvents = async () => { @@ -70,7 +70,7 @@ const clearEvents = async () => { for (const key of keys) { // delete each key // don't use tenancy, already in the key - await cache.delete(key, { useTenancy: false }) + await cache.destroy(key, { useTenancy: false }) } } @@ -167,7 +167,7 @@ const getEventKey = (event?: Event, properties?: any) => { const tenantId = context.getTenantId() if (event) { - eventKey = `${CacheKeys.EVENTS}:${tenantId}:${event}` + eventKey = `${CacheKey.EVENTS}:${tenantId}:${event}` // use some properties to make the key more unique const custom = CUSTOM_PROPERTY_SUFFIX[event] @@ -176,7 +176,7 @@ const getEventKey = (event?: Event, properties?: any) => { eventKey = `${eventKey}:${suffix}` } } else { - eventKey = `${CacheKeys.EVENTS}:${tenantId}:*` + eventKey = `${CacheKey.EVENTS}:${tenantId}:*` } return eventKey diff --git a/packages/backend-core/src/events/identification.ts b/packages/backend-core/src/events/identification.ts index 0b4b043837..b93bd44968 100644 --- a/packages/backend-core/src/events/identification.ts +++ b/packages/backend-core/src/events/identification.ts @@ -20,9 +20,9 @@ import { import { processors } from "./processors" import * as dbUtils from "../db/utils" import { Config } from "../constants" -import * as hashing from "../hashing" +import { newid } from "../utils" import * as installation from "../installation" -import { withCache, TTL, CacheKeys } from "../cache/generic" +import { withCache, TTL, CacheKey } from "../cache/generic" const pkg = require("../../package.json") @@ -270,7 +270,7 @@ const getEventTenantId = async (tenantId: string): Promise => { const getUniqueTenantId = async (tenantId: string): Promise => { // make sure this tenantId always matches the tenantId in context return context.doInTenant(tenantId, () => { - return withCache(CacheKeys.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => { + return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => { const db = context.getGlobalDB() const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, { type: Config.SETTINGS, @@ -280,7 +280,7 @@ const getUniqueTenantId = async (tenantId: string): Promise => { if (config.config.uniqueTenantId) { return config.config.uniqueTenantId } else { - uniqueTenantId = `${hashing.newid()}_${tenantId}` + uniqueTenantId = `${newid()}_${tenantId}` config.config.uniqueTenantId = uniqueTenantId await db.put(config) return uniqueTenantId diff --git a/packages/backend-core/src/events/processors/posthog/rateLimiting.ts b/packages/backend-core/src/events/processors/posthog/rateLimiting.ts index 9c7b7876d6..89da10defa 100644 --- a/packages/backend-core/src/events/processors/posthog/rateLimiting.ts +++ b/packages/backend-core/src/events/processors/posthog/rateLimiting.ts @@ -1,5 +1,5 @@ import { Event } from "@budibase/types" -import { CacheKeys, TTL } from "../../../cache/generic" +import { CacheKey, TTL } from "../../../cache/generic" import * as cache from "../../../cache/generic" import * as context from "../../../context" @@ -74,7 +74,7 @@ export const limited = async (event: Event): Promise => { } const eventKey = (event: RateLimitedEvent) => { - let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}` + let key = `${CacheKey.EVENTS_RATE_LIMIT}:${event}` if (isPerApp(event)) { key = key + ":" + context.getAppId() } diff --git a/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts index c9c4ceffe3..349a0427ac 100644 --- a/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts +++ b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts @@ -3,7 +3,7 @@ import PosthogProcessor from "../PosthogProcessor" import { Event, IdentityType, Hosting } from "@budibase/types" const tk = require("timekeeper") import * as cache from "../../../../cache/generic" -import { CacheKeys } from "../../../../cache/generic" +import { CacheKey } from "../../../../cache/generic" import * as context from "../../../../context" const newIdentity = () => { @@ -19,7 +19,7 @@ describe("PosthogProcessor", () => { beforeEach(async () => { jest.clearAllMocks() await cache.bustCache( - `${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` + `${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` ) }) @@ -89,7 +89,7 @@ describe("PosthogProcessor", () => { await processor.processEvent(Event.SERVED_BUILDER, identity, properties) await cache.bustCache( - `${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` + `${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}` ) tk.freeze(new Date(2022, 0, 1, 14, 0)) diff --git a/packages/backend-core/src/featureFlags/index.js b/packages/backend-core/src/featureFlags/index.ts similarity index 70% rename from packages/backend-core/src/featureFlags/index.js rename to packages/backend-core/src/featureFlags/index.ts index 8a8162d0ba..0d59e5129e 100644 --- a/packages/backend-core/src/featureFlags/index.js +++ b/packages/backend-core/src/featureFlags/index.ts @@ -1,17 +1,17 @@ -const env = require("../environment") -const tenancy = require("../tenancy") +import env from "../environment" +import tenancy from "../tenancy" /** * Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant. * The env var is formatted as: * tenant1:feature1:feature2,tenant2:feature1 */ -const getFeatureFlags = () => { +function getFeatureFlags() { if (!env.TENANT_FEATURE_FLAGS) { return } - const tenantFeatureFlags = {} + const tenantFeatureFlags: Record = {} env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => { const [tenantId, ...features] = tenantToFeatures.split(":") @@ -29,13 +29,13 @@ const getFeatureFlags = () => { const TENANT_FEATURE_FLAGS = getFeatureFlags() -exports.isEnabled = featureFlag => { +export function isEnabled(featureFlag: string) { const tenantId = tenancy.getTenantId() - const flags = exports.getTenantFeatureFlags(tenantId) + const flags = getTenantFeatureFlags(tenantId) return flags.includes(featureFlag) } -exports.getTenantFeatureFlags = tenantId => { +export function getTenantFeatureFlags(tenantId: string) { const flags = [] if (TENANT_FEATURE_FLAGS) { @@ -53,8 +53,8 @@ exports.getTenantFeatureFlags = tenantId => { return flags } -exports.TenantFeatureFlag = { - LICENSING: "LICENSING", - GOOGLE_SHEETS: "GOOGLE_SHEETS", - USER_GROUPS: "USER_GROUPS", +export enum TenantFeatureFlag { + LICENSING = "LICENSING", + GOOGLE_SHEETS = "GOOGLE_SHEETS", + USER_GROUPS = "USER_GROUPS", } diff --git a/packages/backend-core/src/helpers.js b/packages/backend-core/src/helpers.ts similarity index 85% rename from packages/backend-core/src/helpers.js rename to packages/backend-core/src/helpers.ts index b402a82cf3..e1e065bd4e 100644 --- a/packages/backend-core/src/helpers.js +++ b/packages/backend-core/src/helpers.ts @@ -4,6 +4,6 @@ * @param {string} url The URL to test and remove any extra double slashes. * @return {string} The updated url. */ -exports.checkSlashesInUrl = url => { +export function checkSlashesInUrl(url: string) { return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2") } diff --git a/packages/backend-core/src/index.ts b/packages/backend-core/src/index.ts index c68c8f0927..ace9bc5034 100644 --- a/packages/backend-core/src/index.ts +++ b/packages/backend-core/src/index.ts @@ -9,26 +9,24 @@ import * as accounts from "./cloud/accounts" import * as installation from "./installation" import env from "./environment" import tenancy from "./tenancy" -import featureFlags from "./featureFlags" +import * as featureFlags from "./featureFlags" import * as sessions from "./security/sessions" import * as deprovisioning from "./context/deprovision" import auth from "./auth" import * as constants from "./constants" import * as dbConstants from "./db/constants" import * as logging from "./logging" -import pino from "./pino" +import * as pino from "./pino" import * as middleware from "./middleware" import plugins from "./plugin" -import encryption from "./security/encryption" +import * as encryption from "./security/encryption" import * as queue from "./queue" import * as db from "./db" - -// mimic the outer package exports -import * as objectStore from "./pkg/objectStore" -import * as utils from "./pkg/utils" -import redis from "./pkg/redis" -import cache from "./pkg/cache" -import context from "./pkg/context" +import * as context from "./context" +import * as cache from "./cache" +import * as objectStore from "./objectStore" +import * as redis from "./redis" +import * as utils from "./utils" const init = (opts: any = {}) => { db.init(opts.db) diff --git a/packages/backend-core/src/installation.ts b/packages/backend-core/src/installation.ts index da9b6c5b76..4e78a508a5 100644 --- a/packages/backend-core/src/installation.ts +++ b/packages/backend-core/src/installation.ts @@ -1,16 +1,16 @@ -import * as hashing from "./hashing" +import { newid } from "./utils" import * as events from "./events" -import { StaticDatabases } from "./db/constants" +import { StaticDatabases } from "./db" import { doWithDB } from "./db" import { Installation, IdentityType } from "@budibase/types" import * as context from "./context" import semver from "semver" -import { bustCache, withCache, TTL, CacheKeys } from "./cache/generic" +import { bustCache, withCache, TTL, CacheKey } from "./cache/generic" const pkg = require("../package.json") export const getInstall = async (): Promise => { - return withCache(CacheKeys.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, { + return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, { useTenancy: false, }) } @@ -28,7 +28,7 @@ const getInstallFromDB = async (): Promise => { if (e.status === 404) { install = { _id: StaticDatabases.PLATFORM_INFO.docs.install, - installId: hashing.newid(), + installId: newid(), version: pkg.version, } const resp = await platformDb.put(install) @@ -50,7 +50,7 @@ const updateVersion = async (version: string): Promise => { const install = await getInstall() install.version = version await platformDb.put(install) - await bustCache(CacheKeys.INSTALLATION) + await bustCache(CacheKey.INSTALLATION) } ) } catch (e: any) { diff --git a/packages/backend-core/src/middleware/adminOnly.js b/packages/backend-core/src/middleware/adminOnly.ts similarity index 63% rename from packages/backend-core/src/middleware/adminOnly.js rename to packages/backend-core/src/middleware/adminOnly.ts index 4bfdf83848..30fdf2907b 100644 --- a/packages/backend-core/src/middleware/adminOnly.js +++ b/packages/backend-core/src/middleware/adminOnly.ts @@ -1,4 +1,6 @@ -module.exports = async (ctx, next) => { +import { BBContext } from "@budibase/types" + +export = async (ctx: BBContext, next: any) => { if ( !ctx.internal && (!ctx.user || !ctx.user.admin || !ctx.user.admin.global) diff --git a/packages/backend-core/src/middleware/auditLog.js b/packages/backend-core/src/middleware/auditLog.js deleted file mode 100644 index c9063ae2e0..0000000000 --- a/packages/backend-core/src/middleware/auditLog.js +++ /dev/null @@ -1,4 +0,0 @@ -module.exports = async (ctx, next) => { - // Placeholder for audit log middleware - return next() -} diff --git a/packages/backend-core/src/middleware/auditLog.ts b/packages/backend-core/src/middleware/auditLog.ts new file mode 100644 index 0000000000..4f4e395856 --- /dev/null +++ b/packages/backend-core/src/middleware/auditLog.ts @@ -0,0 +1,6 @@ +import { BBContext } from "@budibase/types" + +export = async (ctx: BBContext, next: any) => { + // Placeholder for audit log middleware + return next() +} diff --git a/packages/backend-core/src/middleware/authenticated.ts b/packages/backend-core/src/middleware/authenticated.ts index 8a1e52f414..9a582abde2 100644 --- a/packages/backend-core/src/middleware/authenticated.ts +++ b/packages/backend-core/src/middleware/authenticated.ts @@ -6,10 +6,12 @@ import { buildMatcherRegex, matches } from "./matchers" import { SEPARATOR, queryGlobalView, ViewName } from "../db" import { getGlobalDB, doInTenant } from "../tenancy" import { decrypt } from "../security/encryption" -const identity = require("../context/identity") -const env = require("../environment") +import * as identity from "../context/identity" +import env from "../environment" -const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000 +const ONE_MINUTE = env.SESSION_UPDATE_PERIOD + ? parseInt(env.SESSION_UPDATE_PERIOD) + : 60 * 1000 interface FinaliseOpts { authenticated?: boolean @@ -40,13 +42,13 @@ async function checkApiKey(apiKey: string, populateUser?: Function) { return doInTenant(tenantId, async () => { const db = getGlobalDB() // api key is encrypted in the database - const userId = await queryGlobalView( + const userId = (await queryGlobalView( ViewName.BY_API_KEY, { key: apiKey, }, db - ) + )) as string if (userId) { return { valid: true, diff --git a/packages/backend-core/src/middleware/builderOnly.js b/packages/backend-core/src/middleware/builderOnly.ts similarity index 64% rename from packages/backend-core/src/middleware/builderOnly.js rename to packages/backend-core/src/middleware/builderOnly.ts index 2128626db4..e13882d7f6 100644 --- a/packages/backend-core/src/middleware/builderOnly.js +++ b/packages/backend-core/src/middleware/builderOnly.ts @@ -1,4 +1,6 @@ -module.exports = async (ctx, next) => { +import { BBContext } from "@budibase/types" + +export = async (ctx: BBContext, next: any) => { if ( !ctx.internal && (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) diff --git a/packages/backend-core/src/middleware/builderOrAdmin.js b/packages/backend-core/src/middleware/builderOrAdmin.ts similarity index 71% rename from packages/backend-core/src/middleware/builderOrAdmin.js rename to packages/backend-core/src/middleware/builderOrAdmin.ts index 6440766298..26664695f8 100644 --- a/packages/backend-core/src/middleware/builderOrAdmin.js +++ b/packages/backend-core/src/middleware/builderOrAdmin.ts @@ -1,4 +1,6 @@ -module.exports = async (ctx, next) => { +import { BBContext } from "@budibase/types" + +export = async (ctx: BBContext, next: any) => { if ( !ctx.internal && (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) && diff --git a/packages/backend-core/src/middleware/csrf.js b/packages/backend-core/src/middleware/csrf.ts similarity index 89% rename from packages/backend-core/src/middleware/csrf.js rename to packages/backend-core/src/middleware/csrf.ts index 1557740cd6..d19dbd8aed 100644 --- a/packages/backend-core/src/middleware/csrf.js +++ b/packages/backend-core/src/middleware/csrf.ts @@ -1,5 +1,6 @@ -const { Header } = require("../constants") -const { buildMatcherRegex, matches } = require("./matchers") +import { Header } from "../constants" +import { buildMatcherRegex, matches } from "./matchers" +import { BBContext } from "@budibase/types" /** * GET, HEAD and OPTIONS methods are considered safe operations @@ -31,9 +32,9 @@ const INCLUDED_CONTENT_TYPES = [ * https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern * */ -module.exports = (opts = { noCsrfPatterns: [] }) => { +export = (opts = { noCsrfPatterns: [] }) => { const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns) - return async (ctx, next) => { + return async (ctx: BBContext, next: any) => { // don't apply for excluded paths const found = matches(ctx, noCsrfOptions) if (found) { @@ -62,7 +63,7 @@ module.exports = (opts = { noCsrfPatterns: [] }) => { // apply csrf when there is a token in the session (new logins) // in future there should be a hard requirement that the token is present - const userToken = ctx.user.csrfToken + const userToken = ctx.user?.csrfToken if (!userToken) { return next() } diff --git a/packages/backend-core/src/middleware/index.ts b/packages/backend-core/src/middleware/index.ts index 998c231b3d..2b332f5c49 100644 --- a/packages/backend-core/src/middleware/index.ts +++ b/packages/backend-core/src/middleware/index.ts @@ -1,18 +1,18 @@ -const jwt = require("./passport/jwt") -const local = require("./passport/local") -const google = require("./passport/google") -const oidc = require("./passport/oidc") -const { authError, ssoCallbackUrl } = require("./passport/utils") -const authenticated = require("./authenticated") -const auditLog = require("./auditLog") -const tenancy = require("./tenancy") -const internalApi = require("./internalApi") -const datasourceGoogle = require("./passport/datasource/google") -const csrf = require("./csrf") -const adminOnly = require("./adminOnly") -const builderOrAdmin = require("./builderOrAdmin") -const builderOnly = require("./builderOnly") -const joiValidator = require("./joi-validator") +import * as jwt from "./passport/jwt" +import * as local from "./passport/local" +import * as google from "./passport/google" +import * as oidc from "./passport/oidc" +import { authError, ssoCallbackUrl } from "./passport/utils" +import authenticated from "./authenticated" +import auditLog from "./auditLog" +import tenancy from "./tenancy" +import internalApi from "./internalApi" +import * as datasourceGoogle from "./passport/datasource/google" +import csrf from "./csrf" +import adminOnly from "./adminOnly" +import builderOrAdmin from "./builderOrAdmin" +import builderOnly from "./builderOnly" +import * as joiValidator from "./joi-validator" const pkg = { google, diff --git a/packages/backend-core/src/middleware/internalApi.js b/packages/backend-core/src/middleware/internalApi.ts similarity index 53% rename from packages/backend-core/src/middleware/internalApi.js rename to packages/backend-core/src/middleware/internalApi.ts index 05833842ce..f4f08ec2dd 100644 --- a/packages/backend-core/src/middleware/internalApi.js +++ b/packages/backend-core/src/middleware/internalApi.ts @@ -1,10 +1,11 @@ -const env = require("../environment") -const { Header } = require("../constants") +import env from "../environment" +import { Header } from "../constants" +import { BBContext } from "@budibase/types" /** * API Key only endpoint. */ -module.exports = async (ctx, next) => { +export = async (ctx: BBContext, next: any) => { const apiKey = ctx.request.headers[Header.API_KEY] if (apiKey !== env.INTERNAL_API_KEY) { ctx.throw(403, "Unauthorized") diff --git a/packages/backend-core/src/middleware/joi-validator.js b/packages/backend-core/src/middleware/joi-validator.ts similarity index 61% rename from packages/backend-core/src/middleware/joi-validator.js rename to packages/backend-core/src/middleware/joi-validator.ts index 6812dbdd54..d2b86fb4bc 100644 --- a/packages/backend-core/src/middleware/joi-validator.js +++ b/packages/backend-core/src/middleware/joi-validator.ts @@ -1,16 +1,19 @@ -const Joi = require("joi") +import Joi, { ObjectSchema } from "joi" +import { BBContext } from "@budibase/types" -function validate(schema, property) { +function validate(schema: Joi.ObjectSchema, property: string) { // Return a Koa middleware function - return (ctx, next) => { + return (ctx: BBContext, next: any) => { if (!schema) { return next() } let params = null + // @ts-ignore + let reqProp = ctx.request?.[property] if (ctx[property] != null) { params = ctx[property] - } else if (ctx.request[property] != null) { - params = ctx.request[property] + } else if (reqProp != null) { + params = reqProp } // not all schemas have the append property e.g. array schemas @@ -30,10 +33,10 @@ function validate(schema, property) { } } -module.exports.body = schema => { +export function body(schema: Joi.ObjectSchema) { return validate(schema, "body") } -module.exports.params = schema => { +export function params(schema: Joi.ObjectSchema) { return validate(schema, "params") } diff --git a/packages/backend-core/src/middleware/passport/datasource/google.js b/packages/backend-core/src/middleware/passport/datasource/google.ts similarity index 71% rename from packages/backend-core/src/middleware/passport/datasource/google.js rename to packages/backend-core/src/middleware/passport/datasource/google.ts index 7cfd7f55f6..65620d7aa3 100644 --- a/packages/backend-core/src/middleware/passport/datasource/google.js +++ b/packages/backend-core/src/middleware/passport/datasource/google.ts @@ -1,11 +1,15 @@ -const google = require("../google") +import * as google from "../google" +import { Cookie, Config } from "../../../constants" +import { clearCookie, getCookie } from "../../../utils" +import { getScopedConfig, getPlatformUrl, doWithDB } from "../../../db" +import environment from "../../../environment" +import { getGlobalDB } from "../../../tenancy" +import { BBContext, Database, SSOProfile } from "@budibase/types" const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy -const { Cookie, Config } = require("../../../constants") -const { clearCookie, getCookie } = require("../../../utils") -const { getScopedConfig, getPlatformUrl } = require("../../../db/utils") -const { doWithDB } = require("../../../db") -const environment = require("../../../environment") -const { getGlobalDB } = require("../../../tenancy") + +type Passport = { + authenticate: any +} async function fetchGoogleCreds() { // try and get the config from the tenant @@ -22,7 +26,11 @@ async function fetchGoogleCreds() { ) } -async function preAuth(passport, ctx, next) { +export async function preAuth( + passport: Passport, + ctx: BBContext, + next: Function +) { // get the relevant config const googleConfig = await fetchGoogleCreds() const platformUrl = await getPlatformUrl({ tenantAware: false }) @@ -41,7 +49,11 @@ async function preAuth(passport, ctx, next) { })(ctx, next) } -async function postAuth(passport, ctx, next) { +export async function postAuth( + passport: Passport, + ctx: BBContext, + next: Function +) { // get the relevant config const config = await fetchGoogleCreds() const platformUrl = await getPlatformUrl({ tenantAware: false }) @@ -56,15 +68,20 @@ async function postAuth(passport, ctx, next) { clientSecret: config.clientSecret, callbackURL: callbackUrl, }, - (accessToken, refreshToken, profile, done) => { + ( + accessToken: string, + refreshToken: string, + profile: SSOProfile, + done: Function + ) => { clearCookie(ctx, Cookie.DatasourceAuth) done(null, { accessToken, refreshToken }) } ), { successRedirect: "/", failureRedirect: "/error" }, - async (err, tokens) => { + async (err: any, tokens: string[]) => { // update the DB for the datasource with all the user info - await doWithDB(authStateCookie.appId, async db => { + await doWithDB(authStateCookie.appId, async (db: Database) => { const datasource = await db.get(authStateCookie.datasourceId) if (!datasource.config) { datasource.config = {} @@ -78,6 +95,3 @@ async function postAuth(passport, ctx, next) { } )(ctx, next) } - -exports.preAuth = preAuth -exports.postAuth = postAuth diff --git a/packages/backend-core/src/middleware/passport/google.js b/packages/backend-core/src/middleware/passport/google.ts similarity index 63% rename from packages/backend-core/src/middleware/passport/google.js rename to packages/backend-core/src/middleware/passport/google.ts index 7eb1215c1f..deba849233 100644 --- a/packages/backend-core/src/middleware/passport/google.js +++ b/packages/backend-core/src/middleware/passport/google.ts @@ -1,10 +1,15 @@ +import { ssoCallbackUrl } from "./utils" +import { authenticateThirdParty } from "./third-party-common" +import { ConfigType, GoogleConfig, Database, SSOProfile } from "@budibase/types" const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy -const { ssoCallbackUrl } = require("./utils") -const { authenticateThirdParty } = require("./third-party-common") -const { Config } = require("../../../constants") -const buildVerifyFn = saveUserFn => { - return (accessToken, refreshToken, profile, done) => { +export function buildVerifyFn(saveUserFn?: Function) { + return ( + accessToken: string, + refreshToken: string, + profile: SSOProfile, + done: Function + ) => { const thirdPartyUser = { provider: profile.provider, // should always be 'google' providerType: "google", @@ -31,7 +36,11 @@ const buildVerifyFn = saveUserFn => { * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport. * @returns Dynamically configured Passport Google Strategy */ -exports.strategyFactory = async function (config, callbackUrl, saveUserFn) { +export async function strategyFactory( + config: GoogleConfig["config"], + callbackUrl: string, + saveUserFn?: Function +) { try { const { clientID, clientSecret } = config @@ -50,18 +59,15 @@ exports.strategyFactory = async function (config, callbackUrl, saveUserFn) { }, verify ) - } catch (err) { + } catch (err: any) { console.error(err) - throw new Error( - `Error constructing google authentication strategy: ${err}`, - err - ) + throw new Error(`Error constructing google authentication strategy: ${err}`) } } -exports.getCallbackUrl = async function (db, config) { - return ssoCallbackUrl(db, config, Config.GOOGLE) +export async function getCallbackUrl( + db: Database, + config: { callbackURL?: string } +) { + return ssoCallbackUrl(db, config, ConfigType.GOOGLE) } - -// expose for testing -exports.buildVerifyFn = buildVerifyFn diff --git a/packages/backend-core/src/middleware/passport/jwt.js b/packages/backend-core/src/middleware/passport/jwt.js deleted file mode 100644 index 36316264b0..0000000000 --- a/packages/backend-core/src/middleware/passport/jwt.js +++ /dev/null @@ -1,18 +0,0 @@ -const { Cookie } = require("../../constants") -const env = require("../../environment") -const { authError } = require("./utils") - -exports.options = { - secretOrKey: env.JWT_SECRET, - jwtFromRequest: function (ctx) { - return ctx.cookies.get(Cookie.Auth) - }, -} - -exports.authenticate = async function (jwt, done) { - try { - return done(null, jwt) - } catch (err) { - return authError(done, "JWT invalid", err) - } -} diff --git a/packages/backend-core/src/middleware/passport/jwt.ts b/packages/backend-core/src/middleware/passport/jwt.ts new file mode 100644 index 0000000000..95dc8f2656 --- /dev/null +++ b/packages/backend-core/src/middleware/passport/jwt.ts @@ -0,0 +1,19 @@ +import { Cookie } from "../../constants" +import env from "../../environment" +import { authError } from "./utils" +import { BBContext } from "@budibase/types" + +export const options = { + secretOrKey: env.JWT_SECRET, + jwtFromRequest: function (ctx: BBContext) { + return ctx.cookies.get(Cookie.Auth) + }, +} + +export async function authenticate(jwt: Function, done: Function) { + try { + return done(null, jwt) + } catch (err) { + return authError(done, "JWT invalid", err) + } +} diff --git a/packages/backend-core/src/middleware/passport/local.js b/packages/backend-core/src/middleware/passport/local.ts similarity index 73% rename from packages/backend-core/src/middleware/passport/local.js rename to packages/backend-core/src/middleware/passport/local.ts index b955d29102..8b85d3734c 100644 --- a/packages/backend-core/src/middleware/passport/local.js +++ b/packages/backend-core/src/middleware/passport/local.ts @@ -1,18 +1,18 @@ +import { UserStatus } from "../../constants" +import { compare, newid } from "../../utils" +import env from "../../environment" +import * as users from "../../users" +import { authError } from "./utils" +import { createASession } from "../../security/sessions" +import { getTenantId } from "../../tenancy" +import { BBContext } from "@budibase/types" const jwt = require("jsonwebtoken") -const { UserStatus } = require("../../constants") -const { compare } = require("../../hashing") -const env = require("../../environment") -const users = require("../../users") -const { authError } = require("./utils") -const { newid } = require("../../hashing") -const { createASession } = require("../../security/sessions") -const { getTenantId } = require("../../tenancy") const INVALID_ERR = "Invalid credentials" const SSO_NO_PASSWORD = "SSO user does not have a password set" const EXPIRED = "This account has expired. Please reset your password" -exports.options = { +export const options = { passReqToCallback: true, } @@ -24,7 +24,12 @@ exports.options = { * @param {*} done callback from passport to return user information and errors * @returns The authenticated user, or errors if they occur */ -exports.authenticate = async function (ctx, email, password, done) { +export async function authenticate( + ctx: BBContext, + email: string, + password: string, + done: Function +) { if (!email) return authError(done, "Email Required") if (!password) return authError(done, "Password Required") @@ -56,9 +61,9 @@ exports.authenticate = async function (ctx, email, password, done) { const sessionId = newid() const tenantId = getTenantId() - await createASession(dbUser._id, { sessionId, tenantId }) + await createASession(dbUser._id!, { sessionId, tenantId }) - dbUser.token = jwt.sign( + const token = jwt.sign( { userId: dbUser._id, sessionId, @@ -69,7 +74,10 @@ exports.authenticate = async function (ctx, email, password, done) { // Remove users password in payload delete dbUser.password - return done(null, dbUser) + return done(null, { + ...dbUser, + token, + }) } else { return authError(done, INVALID_ERR) } diff --git a/packages/backend-core/src/middleware/passport/oidc.js b/packages/backend-core/src/middleware/passport/oidc.ts similarity index 73% rename from packages/backend-core/src/middleware/passport/oidc.js rename to packages/backend-core/src/middleware/passport/oidc.ts index 55a7033e40..0131737a2c 100644 --- a/packages/backend-core/src/middleware/passport/oidc.js +++ b/packages/backend-core/src/middleware/passport/oidc.ts @@ -1,10 +1,22 @@ -const fetch = require("node-fetch") +import fetch from "node-fetch" +import { authenticateThirdParty } from "./third-party-common" +import { ssoCallbackUrl } from "./utils" +import { + Config, + ConfigType, + OIDCInnerCfg, + Database, + SSOProfile, + ThirdPartyUser, +} from "@budibase/types" const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy -const { authenticateThirdParty } = require("./third-party-common") -const { ssoCallbackUrl } = require("./utils") -const { Config } = require("../../../constants") -const buildVerifyFn = saveUserFn => { +type JwtClaims = { + preferred_username: string + email: string +} + +export function buildVerifyFn(saveUserFn?: Function) { /** * @param {*} issuer The identity provider base URL * @param {*} sub The user ID @@ -17,17 +29,17 @@ const buildVerifyFn = saveUserFn => { * @param {*} done The passport callback: err, user, info */ return async ( - issuer, - sub, - profile, - jwtClaims, - accessToken, - refreshToken, - idToken, - params, - done + issuer: string, + sub: string, + profile: SSOProfile, + jwtClaims: JwtClaims, + accessToken: string, + refreshToken: string, + idToken: string, + params: any, + done: Function ) => { - const thirdPartyUser = { + const thirdPartyUser: ThirdPartyUser = { // store the issuer info to enable sync in future provider: issuer, providerType: "oidc", @@ -53,7 +65,7 @@ const buildVerifyFn = saveUserFn => { * @param {*} profile The structured profile created by passport using the user info endpoint * @param {*} jwtClaims The claims returned in the id token */ -function getEmail(profile, jwtClaims) { +function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) { // profile not guaranteed to contain email e.g. github connected azure ad account if (profile._json.email) { return profile._json.email @@ -77,7 +89,7 @@ function getEmail(profile, jwtClaims) { ) } -function validEmail(value) { +function validEmail(value: string) { return ( value && !!value.match( @@ -91,19 +103,22 @@ function validEmail(value) { * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport. * @returns Dynamically configured Passport OIDC Strategy */ -exports.strategyFactory = async function (config, saveUserFn) { +export async function strategyFactory(config: Config, saveUserFn?: Function) { try { const verify = buildVerifyFn(saveUserFn) const strategy = new OIDCStrategy(config, verify) strategy.name = "oidc" return strategy - } catch (err) { + } catch (err: any) { console.error(err) - throw new Error("Error constructing OIDC authentication strategy", err) + throw new Error(`Error constructing OIDC authentication strategy - ${err}`) } } -exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) { +export async function fetchStrategyConfig( + enrichedConfig: OIDCInnerCfg, + callbackUrl?: string +) { try { const { clientID, clientSecret, configUrl } = enrichedConfig @@ -135,13 +150,15 @@ exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) { } } catch (err) { console.error(err) - throw new Error("Error constructing OIDC authentication configuration", err) + throw new Error( + `Error constructing OIDC authentication configuration - ${err}` + ) } } -exports.getCallbackUrl = async function (db, config) { - return ssoCallbackUrl(db, config, Config.OIDC) +export async function getCallbackUrl( + db: Database, + config: { callbackURL?: string } +) { + return ssoCallbackUrl(db, config, ConfigType.OIDC) } - -// expose for testing -exports.buildVerifyFn = buildVerifyFn diff --git a/packages/backend-core/src/middleware/passport/tests/third-party-common.spec.js b/packages/backend-core/src/middleware/passport/tests/third-party-common.spec.js index 9799045ffc..d377d602f1 100644 --- a/packages/backend-core/src/middleware/passport/tests/third-party-common.spec.js +++ b/packages/backend-core/src/middleware/passport/tests/third-party-common.spec.js @@ -4,7 +4,7 @@ const { data } = require("./utilities/mock-data") const { DEFAULT_TENANT_ID } = require("../../../constants") const { generateGlobalUserID } = require("../../../db/utils") -const { newid } = require("../../../hashing") +const { newid } = require("../../../utils") const { doWithGlobalDB, doInTenant } = require("../../../tenancy") const done = jest.fn() diff --git a/packages/backend-core/src/middleware/passport/third-party-common.js b/packages/backend-core/src/middleware/passport/third-party-common.ts similarity index 77% rename from packages/backend-core/src/middleware/passport/third-party-common.js rename to packages/backend-core/src/middleware/passport/third-party-common.ts index 1c5891fce7..8798ce5298 100644 --- a/packages/backend-core/src/middleware/passport/third-party-common.js +++ b/packages/backend-core/src/middleware/passport/third-party-common.ts @@ -1,21 +1,22 @@ -const env = require("../../environment") +import env from "../../environment" +import { generateGlobalUserID } from "../../db" +import { authError } from "./utils" +import { newid } from "../../utils" +import { createASession } from "../../security/sessions" +import * as users from "../../users" +import { getGlobalDB, getTenantId } from "../../tenancy" +import fetch from "node-fetch" +import { ThirdPartyUser } from "@budibase/types" const jwt = require("jsonwebtoken") -const { generateGlobalUserID } = require("../../db/utils") -const { authError } = require("./utils") -const { newid } = require("../../hashing") -const { createASession } = require("../../security/sessions") -const users = require("../../users") -const { getGlobalDB, getTenantId } = require("../../tenancy") -const fetch = require("node-fetch") /** * Common authentication logic for third parties. e.g. OAuth, OIDC. */ -exports.authenticateThirdParty = async function ( - thirdPartyUser, - requireLocalAccount = true, - done, - saveUserFn +export async function authenticateThirdParty( + thirdPartyUser: ThirdPartyUser, + requireLocalAccount: boolean = true, + done: Function, + saveUserFn?: Function ) { if (!saveUserFn) { throw new Error("Save user function must be provided") @@ -39,7 +40,7 @@ exports.authenticateThirdParty = async function ( // try to load by id try { dbUser = await db.get(userId) - } catch (err) { + } catch (err: any) { // abort when not 404 error if (!err.status || err.status !== 404) { return authError( @@ -81,7 +82,7 @@ exports.authenticateThirdParty = async function ( // create or sync the user try { await saveUserFn(dbUser, false, false) - } catch (err) { + } catch (err: any) { return authError(done, err) } @@ -104,13 +105,16 @@ exports.authenticateThirdParty = async function ( return done(null, dbUser) } -async function syncProfilePicture(user, thirdPartyUser) { - const pictureUrl = thirdPartyUser.profile._json.picture +async function syncProfilePicture( + user: ThirdPartyUser, + thirdPartyUser: ThirdPartyUser +) { + const pictureUrl = thirdPartyUser.profile?._json.picture if (pictureUrl) { const response = await fetch(pictureUrl) if (response.status === 200) { - const type = response.headers.get("content-type") + const type = response.headers.get("content-type") as string if (type.startsWith("image/")) { user.pictureUrl = pictureUrl } @@ -123,7 +127,7 @@ async function syncProfilePicture(user, thirdPartyUser) { /** * @returns a user that has been sync'd with third party information */ -async function syncUser(user, thirdPartyUser) { +async function syncUser(user: ThirdPartyUser, thirdPartyUser: ThirdPartyUser) { // provider user.provider = thirdPartyUser.provider user.providerType = thirdPartyUser.providerType diff --git a/packages/backend-core/src/middleware/passport/utils.js b/packages/backend-core/src/middleware/passport/utils.ts similarity index 64% rename from packages/backend-core/src/middleware/passport/utils.js rename to packages/backend-core/src/middleware/passport/utils.ts index ab199b9f2f..79e1717d18 100644 --- a/packages/backend-core/src/middleware/passport/utils.js +++ b/packages/backend-core/src/middleware/passport/utils.ts @@ -1,6 +1,6 @@ -const { isMultiTenant, getTenantId } = require("../../tenancy") -const { getScopedConfig } = require("../../db/utils") -const { Config } = require("../../constants") +import { isMultiTenant, getTenantId } from "../../tenancy" +import { getScopedConfig } from "../../db" +import { ConfigType, Database, Config } from "@budibase/types" /** * Utility to handle authentication errors. @@ -10,7 +10,7 @@ const { Config } = require("../../constants") * @param {*} err (Optional) error that will be logged */ -exports.authError = function (done, message, err = null) { +export function authError(done: Function, message: string, err?: any) { return done( err, null, // never return a user @@ -18,13 +18,17 @@ exports.authError = function (done, message, err = null) { ) } -exports.ssoCallbackUrl = async (db, config, type) => { +export async function ssoCallbackUrl( + db: Database, + config: { callbackURL?: string }, + type: ConfigType +) { // incase there is a callback URL from before if (config && config.callbackURL) { return config.callbackURL } const publicConfig = await getScopedConfig(db, { - type: Config.SETTINGS, + type: ConfigType.SETTINGS, }) let callbackUrl = `/api/global/auth` diff --git a/packages/backend-core/src/objectStore/index.ts b/packages/backend-core/src/objectStore/index.ts index a1193c0303..2971834f0e 100644 --- a/packages/backend-core/src/objectStore/index.ts +++ b/packages/backend-core/src/objectStore/index.ts @@ -1,426 +1,2 @@ -const sanitize = require("sanitize-s3-objectkey") -import AWS from "aws-sdk" -import stream from "stream" -import fetch from "node-fetch" -import tar from "tar-fs" -const zlib = require("zlib") -import { promisify } from "util" -import { join } from "path" -import fs from "fs" -import env from "../environment" -import { budibaseTempDir, ObjectStoreBuckets } from "./utils" -import { v4 } from "uuid" -import { APP_PREFIX, APP_DEV_PREFIX } from "../db/utils" - -const streamPipeline = promisify(stream.pipeline) -// use this as a temporary store of buckets that are being created -const STATE = { - bucketCreationPromises: {}, -} - -type ListParams = { - ContinuationToken?: string -} - -type UploadParams = { - bucket: string - filename: string - path: string - type?: string - // can be undefined, we will remove it - metadata?: { - [key: string]: string | undefined - } -} - -const CONTENT_TYPE_MAP: any = { - txt: "text/plain", - html: "text/html", - css: "text/css", - js: "application/javascript", - json: "application/json", - gz: "application/gzip", -} -const STRING_CONTENT_TYPES = [ - CONTENT_TYPE_MAP.html, - CONTENT_TYPE_MAP.css, - CONTENT_TYPE_MAP.js, - CONTENT_TYPE_MAP.json, -] - -// does normal sanitization and then swaps dev apps to apps -export function sanitizeKey(input: string) { - return sanitize(sanitizeBucket(input)).replace(/\\/g, "/") -} - -// simply handles the dev app to app conversion -export function sanitizeBucket(input: string) { - return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX) -} - -function publicPolicy(bucketName: string) { - return { - Version: "2012-10-17", - Statement: [ - { - Effect: "Allow", - Principal: { - AWS: ["*"], - }, - Action: "s3:GetObject", - Resource: [`arn:aws:s3:::${bucketName}/*`], - }, - ], - } -} - -const PUBLIC_BUCKETS = [ - ObjectStoreBuckets.APPS, - ObjectStoreBuckets.GLOBAL, - ObjectStoreBuckets.PLUGINS, -] - -/** - * Gets a connection to the object store using the S3 SDK. - * @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from. - * @return {Object} an S3 object store object, check S3 Nodejs SDK for usage. - * @constructor - */ -export const ObjectStore = (bucket: string) => { - const config: any = { - s3ForcePathStyle: true, - signatureVersion: "v4", - apiVersion: "2006-03-01", - accessKeyId: env.MINIO_ACCESS_KEY, - secretAccessKey: env.MINIO_SECRET_KEY, - region: env.AWS_REGION, - } - if (bucket) { - config.params = { - Bucket: sanitizeBucket(bucket), - } - } - if (env.MINIO_URL) { - config.endpoint = env.MINIO_URL - } - return new AWS.S3(config) -} - -/** - * Given an object store and a bucket name this will make sure the bucket exists, - * if it does not exist then it will create it. - */ -export const makeSureBucketExists = async (client: any, bucketName: string) => { - bucketName = sanitizeBucket(bucketName) - try { - await client - .headBucket({ - Bucket: bucketName, - }) - .promise() - } catch (err: any) { - const promises: any = STATE.bucketCreationPromises - const doesntExist = err.statusCode === 404, - noAccess = err.statusCode === 403 - if (promises[bucketName]) { - await promises[bucketName] - } else if (doesntExist || noAccess) { - if (doesntExist) { - // bucket doesn't exist create it - promises[bucketName] = client - .createBucket({ - Bucket: bucketName, - }) - .promise() - await promises[bucketName] - delete promises[bucketName] - } - // public buckets are quite hidden in the system, make sure - // no bucket is set accidentally - if (PUBLIC_BUCKETS.includes(bucketName)) { - await client - .putBucketPolicy({ - Bucket: bucketName, - Policy: JSON.stringify(publicPolicy(bucketName)), - }) - .promise() - } - } else { - throw new Error("Unable to write to object store bucket.") - } - } -} - -/** - * Uploads the contents of a file given the required parameters, useful when - * temp files in use (for example file uploaded as an attachment). - */ -export const upload = async ({ - bucket: bucketName, - filename, - path, - type, - metadata, -}: UploadParams) => { - const extension = filename.split(".").pop() - const fileBytes = fs.readFileSync(path) - - const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) - - let contentType = type - if (!contentType) { - contentType = extension - ? CONTENT_TYPE_MAP[extension.toLowerCase()] - : CONTENT_TYPE_MAP.txt - } - const config: any = { - // windows file paths need to be converted to forward slashes for s3 - Key: sanitizeKey(filename), - Body: fileBytes, - ContentType: contentType, - } - if (metadata && typeof metadata === "object") { - // remove any nullish keys from the metadata object, as these may be considered invalid - for (let key of Object.keys(metadata)) { - if (!metadata[key] || typeof metadata[key] !== "string") { - delete metadata[key] - } - } - config.Metadata = metadata - } - return objectStore.upload(config).promise() -} - -/** - * Similar to the upload function but can be used to send a file stream - * through to the object store. - */ -export const streamUpload = async ( - bucketName: string, - filename: string, - stream: any, - extra = {} -) => { - const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) - - // Set content type for certain known extensions - if (filename?.endsWith(".js")) { - extra = { - ...extra, - ContentType: "application/javascript", - } - } else if (filename?.endsWith(".svg")) { - extra = { - ...extra, - ContentType: "image", - } - } - - const params = { - Bucket: sanitizeBucket(bucketName), - Key: sanitizeKey(filename), - Body: stream, - ...extra, - } - return objectStore.upload(params).promise() -} - -/** - * retrieves the contents of a file from the object store, if it is a known content type it - * will be converted, otherwise it will be returned as a buffer stream. - */ -export const retrieve = async (bucketName: string, filepath: string) => { - const objectStore = ObjectStore(bucketName) - const params = { - Bucket: sanitizeBucket(bucketName), - Key: sanitizeKey(filepath), - } - const response: any = await objectStore.getObject(params).promise() - // currently these are all strings - if (STRING_CONTENT_TYPES.includes(response.ContentType)) { - return response.Body.toString("utf8") - } else { - return response.Body - } -} - -export const listAllObjects = async (bucketName: string, path: string) => { - const objectStore = ObjectStore(bucketName) - const list = (params: ListParams = {}) => { - return objectStore - .listObjectsV2({ - ...params, - Bucket: sanitizeBucket(bucketName), - Prefix: sanitizeKey(path), - }) - .promise() - } - let isTruncated = false, - token, - objects: AWS.S3.Types.Object[] = [] - do { - let params: ListParams = {} - if (token) { - params.ContinuationToken = token - } - const response = await list(params) - if (response.Contents) { - objects = objects.concat(response.Contents) - } - isTruncated = !!response.IsTruncated - } while (isTruncated) - return objects -} - -/** - * Same as retrieval function but puts to a temporary file. - */ -export const retrieveToTmp = async (bucketName: string, filepath: string) => { - bucketName = sanitizeBucket(bucketName) - filepath = sanitizeKey(filepath) - const data = await retrieve(bucketName, filepath) - const outputPath = join(budibaseTempDir(), v4()) - fs.writeFileSync(outputPath, data) - return outputPath -} - -export const retrieveDirectory = async (bucketName: string, path: string) => { - let writePath = join(budibaseTempDir(), v4()) - fs.mkdirSync(writePath) - const objects = await listAllObjects(bucketName, path) - let fullObjects = await Promise.all( - objects.map(obj => retrieve(bucketName, obj.Key!)) - ) - let count = 0 - for (let obj of objects) { - const filename = obj.Key! - const data = fullObjects[count++] - const possiblePath = filename.split("/") - if (possiblePath.length > 1) { - const dirs = possiblePath.slice(0, possiblePath.length - 1) - fs.mkdirSync(join(writePath, ...dirs), { recursive: true }) - } - fs.writeFileSync(join(writePath, ...possiblePath), data) - } - return writePath -} - -/** - * Delete a single file. - */ -export const deleteFile = async (bucketName: string, filepath: string) => { - const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) - const params = { - Bucket: bucketName, - Key: filepath, - } - return objectStore.deleteObject(params) -} - -export const deleteFiles = async (bucketName: string, filepaths: string[]) => { - const objectStore = ObjectStore(bucketName) - await makeSureBucketExists(objectStore, bucketName) - const params = { - Bucket: bucketName, - Delete: { - Objects: filepaths.map((path: any) => ({ Key: path })), - }, - } - return objectStore.deleteObjects(params).promise() -} - -/** - * Delete a path, including everything within. - */ -export const deleteFolder = async ( - bucketName: string, - folder: string -): Promise => { - bucketName = sanitizeBucket(bucketName) - folder = sanitizeKey(folder) - const client = ObjectStore(bucketName) - const listParams = { - Bucket: bucketName, - Prefix: folder, - } - - let response: any = await client.listObjects(listParams).promise() - if (response.Contents.length === 0) { - return - } - const deleteParams: any = { - Bucket: bucketName, - Delete: { - Objects: [], - }, - } - - response.Contents.forEach((content: any) => { - deleteParams.Delete.Objects.push({ Key: content.Key }) - }) - - response = await client.deleteObjects(deleteParams).promise() - // can only empty 1000 items at once - if (response.Deleted.length === 1000) { - return deleteFolder(bucketName, folder) - } -} - -export const uploadDirectory = async ( - bucketName: string, - localPath: string, - bucketPath: string -) => { - bucketName = sanitizeBucket(bucketName) - let uploads = [] - const files = fs.readdirSync(localPath, { withFileTypes: true }) - for (let file of files) { - const path = sanitizeKey(join(bucketPath, file.name)) - const local = join(localPath, file.name) - if (file.isDirectory()) { - uploads.push(uploadDirectory(bucketName, local, path)) - } else { - uploads.push(streamUpload(bucketName, path, fs.createReadStream(local))) - } - } - await Promise.all(uploads) - return files -} - -export const downloadTarballDirect = async ( - url: string, - path: string, - headers = {} -) => { - path = sanitizeKey(path) - const response = await fetch(url, { headers }) - if (!response.ok) { - throw new Error(`unexpected response ${response.statusText}`) - } - - await streamPipeline(response.body, zlib.Unzip(), tar.extract(path)) -} - -export const downloadTarball = async ( - url: string, - bucketName: string, - path: string -) => { - bucketName = sanitizeBucket(bucketName) - path = sanitizeKey(path) - const response = await fetch(url) - if (!response.ok) { - throw new Error(`unexpected response ${response.statusText}`) - } - - const tmpPath = join(budibaseTempDir(), path) - await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath)) - if (!env.isTest() && env.SELF_HOSTED) { - await uploadDirectory(bucketName, tmpPath, path) - } - // return the temporary path incase there is a use for it - return tmpPath -} +export * from "./objectStore" +export * from "./utils" diff --git a/packages/backend-core/src/objectStore/objectStore.ts b/packages/backend-core/src/objectStore/objectStore.ts new file mode 100644 index 0000000000..2ae8848c53 --- /dev/null +++ b/packages/backend-core/src/objectStore/objectStore.ts @@ -0,0 +1,426 @@ +const sanitize = require("sanitize-s3-objectkey") +import AWS from "aws-sdk" +import stream from "stream" +import fetch from "node-fetch" +import tar from "tar-fs" +const zlib = require("zlib") +import { promisify } from "util" +import { join } from "path" +import fs from "fs" +import env from "../environment" +import { budibaseTempDir, ObjectStoreBuckets } from "./utils" +import { v4 } from "uuid" +import { APP_PREFIX, APP_DEV_PREFIX } from "../db" + +const streamPipeline = promisify(stream.pipeline) +// use this as a temporary store of buckets that are being created +const STATE = { + bucketCreationPromises: {}, +} + +type ListParams = { + ContinuationToken?: string +} + +type UploadParams = { + bucket: string + filename: string + path: string + type?: string + // can be undefined, we will remove it + metadata?: { + [key: string]: string | undefined + } +} + +const CONTENT_TYPE_MAP: any = { + txt: "text/plain", + html: "text/html", + css: "text/css", + js: "application/javascript", + json: "application/json", + gz: "application/gzip", +} +const STRING_CONTENT_TYPES = [ + CONTENT_TYPE_MAP.html, + CONTENT_TYPE_MAP.css, + CONTENT_TYPE_MAP.js, + CONTENT_TYPE_MAP.json, +] + +// does normal sanitization and then swaps dev apps to apps +export function sanitizeKey(input: string) { + return sanitize(sanitizeBucket(input)).replace(/\\/g, "/") +} + +// simply handles the dev app to app conversion +export function sanitizeBucket(input: string) { + return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX) +} + +function publicPolicy(bucketName: string) { + return { + Version: "2012-10-17", + Statement: [ + { + Effect: "Allow", + Principal: { + AWS: ["*"], + }, + Action: "s3:GetObject", + Resource: [`arn:aws:s3:::${bucketName}/*`], + }, + ], + } +} + +const PUBLIC_BUCKETS = [ + ObjectStoreBuckets.APPS, + ObjectStoreBuckets.GLOBAL, + ObjectStoreBuckets.PLUGINS, +] + +/** + * Gets a connection to the object store using the S3 SDK. + * @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from. + * @return {Object} an S3 object store object, check S3 Nodejs SDK for usage. + * @constructor + */ +export const ObjectStore = (bucket: string) => { + const config: any = { + s3ForcePathStyle: true, + signatureVersion: "v4", + apiVersion: "2006-03-01", + accessKeyId: env.MINIO_ACCESS_KEY, + secretAccessKey: env.MINIO_SECRET_KEY, + region: env.AWS_REGION, + } + if (bucket) { + config.params = { + Bucket: sanitizeBucket(bucket), + } + } + if (env.MINIO_URL) { + config.endpoint = env.MINIO_URL + } + return new AWS.S3(config) +} + +/** + * Given an object store and a bucket name this will make sure the bucket exists, + * if it does not exist then it will create it. + */ +export const makeSureBucketExists = async (client: any, bucketName: string) => { + bucketName = sanitizeBucket(bucketName) + try { + await client + .headBucket({ + Bucket: bucketName, + }) + .promise() + } catch (err: any) { + const promises: any = STATE.bucketCreationPromises + const doesntExist = err.statusCode === 404, + noAccess = err.statusCode === 403 + if (promises[bucketName]) { + await promises[bucketName] + } else if (doesntExist || noAccess) { + if (doesntExist) { + // bucket doesn't exist create it + promises[bucketName] = client + .createBucket({ + Bucket: bucketName, + }) + .promise() + await promises[bucketName] + delete promises[bucketName] + } + // public buckets are quite hidden in the system, make sure + // no bucket is set accidentally + if (PUBLIC_BUCKETS.includes(bucketName)) { + await client + .putBucketPolicy({ + Bucket: bucketName, + Policy: JSON.stringify(publicPolicy(bucketName)), + }) + .promise() + } + } else { + throw new Error("Unable to write to object store bucket.") + } + } +} + +/** + * Uploads the contents of a file given the required parameters, useful when + * temp files in use (for example file uploaded as an attachment). + */ +export const upload = async ({ + bucket: bucketName, + filename, + path, + type, + metadata, +}: UploadParams) => { + const extension = filename.split(".").pop() + const fileBytes = fs.readFileSync(path) + + const objectStore = ObjectStore(bucketName) + await makeSureBucketExists(objectStore, bucketName) + + let contentType = type + if (!contentType) { + contentType = extension + ? CONTENT_TYPE_MAP[extension.toLowerCase()] + : CONTENT_TYPE_MAP.txt + } + const config: any = { + // windows file paths need to be converted to forward slashes for s3 + Key: sanitizeKey(filename), + Body: fileBytes, + ContentType: contentType, + } + if (metadata && typeof metadata === "object") { + // remove any nullish keys from the metadata object, as these may be considered invalid + for (let key of Object.keys(metadata)) { + if (!metadata[key] || typeof metadata[key] !== "string") { + delete metadata[key] + } + } + config.Metadata = metadata + } + return objectStore.upload(config).promise() +} + +/** + * Similar to the upload function but can be used to send a file stream + * through to the object store. + */ +export const streamUpload = async ( + bucketName: string, + filename: string, + stream: any, + extra = {} +) => { + const objectStore = ObjectStore(bucketName) + await makeSureBucketExists(objectStore, bucketName) + + // Set content type for certain known extensions + if (filename?.endsWith(".js")) { + extra = { + ...extra, + ContentType: "application/javascript", + } + } else if (filename?.endsWith(".svg")) { + extra = { + ...extra, + ContentType: "image", + } + } + + const params = { + Bucket: sanitizeBucket(bucketName), + Key: sanitizeKey(filename), + Body: stream, + ...extra, + } + return objectStore.upload(params).promise() +} + +/** + * retrieves the contents of a file from the object store, if it is a known content type it + * will be converted, otherwise it will be returned as a buffer stream. + */ +export const retrieve = async (bucketName: string, filepath: string) => { + const objectStore = ObjectStore(bucketName) + const params = { + Bucket: sanitizeBucket(bucketName), + Key: sanitizeKey(filepath), + } + const response: any = await objectStore.getObject(params).promise() + // currently these are all strings + if (STRING_CONTENT_TYPES.includes(response.ContentType)) { + return response.Body.toString("utf8") + } else { + return response.Body + } +} + +export const listAllObjects = async (bucketName: string, path: string) => { + const objectStore = ObjectStore(bucketName) + const list = (params: ListParams = {}) => { + return objectStore + .listObjectsV2({ + ...params, + Bucket: sanitizeBucket(bucketName), + Prefix: sanitizeKey(path), + }) + .promise() + } + let isTruncated = false, + token, + objects: AWS.S3.Types.Object[] = [] + do { + let params: ListParams = {} + if (token) { + params.ContinuationToken = token + } + const response = await list(params) + if (response.Contents) { + objects = objects.concat(response.Contents) + } + isTruncated = !!response.IsTruncated + } while (isTruncated) + return objects +} + +/** + * Same as retrieval function but puts to a temporary file. + */ +export const retrieveToTmp = async (bucketName: string, filepath: string) => { + bucketName = sanitizeBucket(bucketName) + filepath = sanitizeKey(filepath) + const data = await retrieve(bucketName, filepath) + const outputPath = join(budibaseTempDir(), v4()) + fs.writeFileSync(outputPath, data) + return outputPath +} + +export const retrieveDirectory = async (bucketName: string, path: string) => { + let writePath = join(budibaseTempDir(), v4()) + fs.mkdirSync(writePath) + const objects = await listAllObjects(bucketName, path) + let fullObjects = await Promise.all( + objects.map(obj => retrieve(bucketName, obj.Key!)) + ) + let count = 0 + for (let obj of objects) { + const filename = obj.Key! + const data = fullObjects[count++] + const possiblePath = filename.split("/") + if (possiblePath.length > 1) { + const dirs = possiblePath.slice(0, possiblePath.length - 1) + fs.mkdirSync(join(writePath, ...dirs), { recursive: true }) + } + fs.writeFileSync(join(writePath, ...possiblePath), data) + } + return writePath +} + +/** + * Delete a single file. + */ +export const deleteFile = async (bucketName: string, filepath: string) => { + const objectStore = ObjectStore(bucketName) + await makeSureBucketExists(objectStore, bucketName) + const params = { + Bucket: bucketName, + Key: filepath, + } + return objectStore.deleteObject(params) +} + +export const deleteFiles = async (bucketName: string, filepaths: string[]) => { + const objectStore = ObjectStore(bucketName) + await makeSureBucketExists(objectStore, bucketName) + const params = { + Bucket: bucketName, + Delete: { + Objects: filepaths.map((path: any) => ({ Key: path })), + }, + } + return objectStore.deleteObjects(params).promise() +} + +/** + * Delete a path, including everything within. + */ +export const deleteFolder = async ( + bucketName: string, + folder: string +): Promise => { + bucketName = sanitizeBucket(bucketName) + folder = sanitizeKey(folder) + const client = ObjectStore(bucketName) + const listParams = { + Bucket: bucketName, + Prefix: folder, + } + + let response: any = await client.listObjects(listParams).promise() + if (response.Contents.length === 0) { + return + } + const deleteParams: any = { + Bucket: bucketName, + Delete: { + Objects: [], + }, + } + + response.Contents.forEach((content: any) => { + deleteParams.Delete.Objects.push({ Key: content.Key }) + }) + + response = await client.deleteObjects(deleteParams).promise() + // can only empty 1000 items at once + if (response.Deleted.length === 1000) { + return deleteFolder(bucketName, folder) + } +} + +export const uploadDirectory = async ( + bucketName: string, + localPath: string, + bucketPath: string +) => { + bucketName = sanitizeBucket(bucketName) + let uploads = [] + const files = fs.readdirSync(localPath, { withFileTypes: true }) + for (let file of files) { + const path = sanitizeKey(join(bucketPath, file.name)) + const local = join(localPath, file.name) + if (file.isDirectory()) { + uploads.push(uploadDirectory(bucketName, local, path)) + } else { + uploads.push(streamUpload(bucketName, path, fs.createReadStream(local))) + } + } + await Promise.all(uploads) + return files +} + +export const downloadTarballDirect = async ( + url: string, + path: string, + headers = {} +) => { + path = sanitizeKey(path) + const response = await fetch(url, { headers }) + if (!response.ok) { + throw new Error(`unexpected response ${response.statusText}`) + } + + await streamPipeline(response.body, zlib.Unzip(), tar.extract(path)) +} + +export const downloadTarball = async ( + url: string, + bucketName: string, + path: string +) => { + bucketName = sanitizeBucket(bucketName) + path = sanitizeKey(path) + const response = await fetch(url) + if (!response.ok) { + throw new Error(`unexpected response ${response.statusText}`) + } + + const tmpPath = join(budibaseTempDir(), path) + await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath)) + if (!env.isTest() && env.SELF_HOSTED) { + await uploadDirectory(bucketName, tmpPath, path) + } + // return the temporary path incase there is a use for it + return tmpPath +} diff --git a/packages/backend-core/src/objectStore/utils.js b/packages/backend-core/src/objectStore/utils.ts similarity index 71% rename from packages/backend-core/src/objectStore/utils.js rename to packages/backend-core/src/objectStore/utils.ts index 2d4faf55d1..f3c9e93943 100644 --- a/packages/backend-core/src/objectStore/utils.js +++ b/packages/backend-core/src/objectStore/utils.ts @@ -1,14 +1,15 @@ -const { join } = require("path") -const { tmpdir } = require("os") -const fs = require("fs") -const env = require("../environment") +import { join } from "path" +import { tmpdir } from "os" +import fs from "fs" +import env from "../environment" /**************************************************** * NOTE: When adding a new bucket - name * * sure that S3 usages (like budibase-infra) * * have been updated to have a unique bucket name. * ****************************************************/ -exports.ObjectStoreBuckets = { +// can't be an enum - only numbers can be used for computed types +export const ObjectStoreBuckets = { BACKUPS: env.BACKUPS_BUCKET_NAME, APPS: env.APPS_BUCKET_NAME, TEMPLATES: env.TEMPLATES_BUCKET_NAME, @@ -22,6 +23,6 @@ if (!fs.existsSync(bbTmp)) { fs.mkdirSync(bbTmp) } -exports.budibaseTempDir = function () { +export function budibaseTempDir() { return bbTmp } diff --git a/packages/backend-core/src/pino.js b/packages/backend-core/src/pino.js deleted file mode 100644 index 69962b3841..0000000000 --- a/packages/backend-core/src/pino.js +++ /dev/null @@ -1,11 +0,0 @@ -const env = require("./environment") - -exports.pinoSettings = () => ({ - prettyPrint: { - levelFirst: true, - }, - level: env.LOG_LEVEL || "error", - autoLogging: { - ignore: req => req.url.includes("/health"), - }, -}) diff --git a/packages/backend-core/src/pino.ts b/packages/backend-core/src/pino.ts new file mode 100644 index 0000000000..4140f428e1 --- /dev/null +++ b/packages/backend-core/src/pino.ts @@ -0,0 +1,13 @@ +import env from "./environment" + +export function pinoSettings() { + return { + prettyPrint: { + levelFirst: true, + }, + level: env.LOG_LEVEL || "error", + autoLogging: { + ignore: (req: { url: string }) => req.url.includes("/health"), + }, + } +} diff --git a/packages/backend-core/src/pkg/cache.ts b/packages/backend-core/src/pkg/cache.ts deleted file mode 100644 index c40a686260..0000000000 --- a/packages/backend-core/src/pkg/cache.ts +++ /dev/null @@ -1,13 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -import * as generic from "../cache/generic" -import * as user from "../cache/user" -import * as app from "../cache/appMetadata" -import * as writethrough from "../cache/writethrough" - -export = { - app, - user, - writethrough, - ...generic, -} diff --git a/packages/backend-core/src/pkg/context.ts b/packages/backend-core/src/pkg/context.ts deleted file mode 100644 index 4915cc6e41..0000000000 --- a/packages/backend-core/src/pkg/context.ts +++ /dev/null @@ -1,26 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -import { - getAppDB, - getDevAppDB, - getProdAppDB, - getAppId, - updateAppId, - doInAppContext, - doInTenant, - doInContext, -} from "../context" - -import * as identity from "../context/identity" - -export = { - getAppDB, - getDevAppDB, - getProdAppDB, - getAppId, - updateAppId, - doInAppContext, - doInTenant, - doInContext, - identity, -} diff --git a/packages/backend-core/src/pkg/objectStore.ts b/packages/backend-core/src/pkg/objectStore.ts deleted file mode 100644 index 0447c6b3c2..0000000000 --- a/packages/backend-core/src/pkg/objectStore.ts +++ /dev/null @@ -1,4 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -export * from "../objectStore" -export * from "../objectStore/utils" diff --git a/packages/backend-core/src/pkg/redis.ts b/packages/backend-core/src/pkg/redis.ts deleted file mode 100644 index 297c2b54f4..0000000000 --- a/packages/backend-core/src/pkg/redis.ts +++ /dev/null @@ -1,13 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -import Client from "../redis" -import utils from "../redis/utils" -import clients from "../redis/init" -import * as redlock from "../redis/redlock" - -export = { - Client, - utils, - clients, - redlock, -} diff --git a/packages/backend-core/src/pkg/utils.ts b/packages/backend-core/src/pkg/utils.ts deleted file mode 100644 index 5272046524..0000000000 --- a/packages/backend-core/src/pkg/utils.ts +++ /dev/null @@ -1,4 +0,0 @@ -// Mimic the outer package export for usage in index.ts -// The outer exports can't be used as they now reference dist directly -export * from "../utils" -export * from "../hashing" diff --git a/packages/backend-core/src/plugin/utils.js b/packages/backend-core/src/plugin/utils.ts similarity index 89% rename from packages/backend-core/src/plugin/utils.js rename to packages/backend-core/src/plugin/utils.ts index b943747483..7b62248bb5 100644 --- a/packages/backend-core/src/plugin/utils.js +++ b/packages/backend-core/src/plugin/utils.ts @@ -1,9 +1,5 @@ -const { - DatasourceFieldType, - QueryType, - PluginType, -} = require("@budibase/types") -const joi = require("joi") +import { DatasourceFieldType, QueryType, PluginType } from "@budibase/types" +import joi from "joi" const DATASOURCE_TYPES = [ "Relational", @@ -14,14 +10,14 @@ const DATASOURCE_TYPES = [ "API", ] -function runJoi(validator, schema) { +function runJoi(validator: joi.Schema, schema: any) { const { error } = validator.validate(schema) if (error) { throw error } } -function validateComponent(schema) { +function validateComponent(schema: any) { const validator = joi.object({ type: joi.string().allow("component").required(), metadata: joi.object().unknown(true).required(), @@ -37,7 +33,7 @@ function validateComponent(schema) { runJoi(validator, schema) } -function validateDatasource(schema) { +function validateDatasource(schema: any) { const fieldValidator = joi.object({ type: joi .string() @@ -86,7 +82,7 @@ function validateDatasource(schema) { runJoi(validator, schema) } -exports.validate = schema => { +export function validate(schema: any) { switch (schema?.type) { case PluginType.COMPONENT: validateComponent(schema) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index eb054766d7..acfff1c7b8 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,5 @@ import events from "events" -import { timeout } from "../../utils" +import { timeout } from "../utils" /** * Bull works with a Job wrapper around all messages that contains a lot more information about diff --git a/packages/backend-core/src/redis/index.ts b/packages/backend-core/src/redis/index.ts index 8a15320ff3..ea4379f048 100644 --- a/packages/backend-core/src/redis/index.ts +++ b/packages/backend-core/src/redis/index.ts @@ -1,278 +1,6 @@ -import RedisWrapper from "../redis" -const env = require("../environment") -// ioredis mock is all in memory -const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis") -const { - addDbPrefix, - removeDbPrefix, - getRedisOptions, - SEPARATOR, - SelectableDatabases, -} = require("./utils") - -const RETRY_PERIOD_MS = 2000 -const STARTUP_TIMEOUT_MS = 5000 -const CLUSTERED = false -const DEFAULT_SELECT_DB = SelectableDatabases.DEFAULT - -// for testing just generate the client once -let CLOSED = false -let CLIENTS: { [key: number]: any } = {} -// if in test always connected -let CONNECTED = env.isTest() - -function pickClient(selectDb: number): any { - return CLIENTS[selectDb] -} - -function connectionError( - selectDb: number, - timeout: NodeJS.Timeout, - err: Error | string -) { - // manually shut down, ignore errors - if (CLOSED) { - return - } - pickClient(selectDb).disconnect() - CLOSED = true - // always clear this on error - clearTimeout(timeout) - CONNECTED = false - console.error("Redis connection failed - " + err) - setTimeout(() => { - init() - }, RETRY_PERIOD_MS) -} - -/** - * Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise - * will return the ioredis client which will be ready to use. - */ -function init(selectDb = DEFAULT_SELECT_DB) { - let timeout: NodeJS.Timeout - CLOSED = false - let client = pickClient(selectDb) - // already connected, ignore - if (client && CONNECTED) { - return - } - // testing uses a single in memory client - if (env.isTest()) { - CLIENTS[selectDb] = new Redis(getRedisOptions()) - } - // start the timer - only allowed 5 seconds to connect - timeout = setTimeout(() => { - if (!CONNECTED) { - connectionError( - selectDb, - timeout, - "Did not successfully connect in timeout" - ) - } - }, STARTUP_TIMEOUT_MS) - - // disconnect any lingering client - if (client) { - client.disconnect() - } - const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED) - - if (CLUSTERED) { - client = new Redis.Cluster([{ host, port }], opts) - } else if (redisProtocolUrl) { - client = new Redis(redisProtocolUrl) - } else { - client = new Redis(opts) - } - // attach handlers - client.on("end", (err: Error) => { - connectionError(selectDb, timeout, err) - }) - client.on("error", (err: Error) => { - connectionError(selectDb, timeout, err) - }) - client.on("connect", () => { - clearTimeout(timeout) - CONNECTED = true - }) - CLIENTS[selectDb] = client -} - -function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) { - return new Promise(resolve => { - if (pickClient(selectDb) == null) { - init() - } else if (CONNECTED) { - resolve("") - return - } - // check if the connection is ready - const interval = setInterval(() => { - if (CONNECTED) { - clearInterval(interval) - resolve("") - } - }, 500) - }) -} - -/** - * Utility function, takes a redis stream and converts it to a promisified response - - * this can only be done with redis streams because they will have an end. - * @param stream A redis stream, specifically as this type of stream will have an end. - * @param client The client to use for further lookups. - * @return {Promise} The final output of the stream - */ -function promisifyStream(stream: any, client: RedisWrapper) { - return new Promise((resolve, reject) => { - const outputKeys = new Set() - stream.on("data", (keys: string[]) => { - keys.forEach(key => { - outputKeys.add(key) - }) - }) - stream.on("error", (err: Error) => { - reject(err) - }) - stream.on("end", async () => { - const keysArray: string[] = Array.from(outputKeys) as string[] - try { - let getPromises = [] - for (let key of keysArray) { - getPromises.push(client.get(key)) - } - const jsonArray = await Promise.all(getPromises) - resolve( - keysArray.map(key => ({ - key: removeDbPrefix(key), - value: JSON.parse(jsonArray.shift()), - })) - ) - } catch (err) { - reject(err) - } - }) - }) -} - -export = class RedisWrapper { - _db: string - _select: number - - constructor(db: string, selectDb: number | null = null) { - this._db = db - this._select = selectDb || DEFAULT_SELECT_DB - } - - getClient() { - return pickClient(this._select) - } - - async init() { - CLOSED = false - init(this._select) - await waitForConnection(this._select) - return this - } - - async finish() { - CLOSED = true - this.getClient().disconnect() - } - - async scan(key = ""): Promise { - const db = this._db - key = `${db}${SEPARATOR}${key}` - let stream - if (CLUSTERED) { - let node = this.getClient().nodes("master") - stream = node[0].scanStream({ match: key + "*", count: 100 }) - } else { - stream = this.getClient().scanStream({ match: key + "*", count: 100 }) - } - return promisifyStream(stream, this.getClient()) - } - - async keys(pattern: string) { - const db = this._db - return this.getClient().keys(addDbPrefix(db, pattern)) - } - - async get(key: string) { - const db = this._db - let response = await this.getClient().get(addDbPrefix(db, key)) - // overwrite the prefixed key - if (response != null && response.key) { - response.key = key - } - // if its not an object just return the response - try { - return JSON.parse(response) - } catch (err) { - return response - } - } - - async bulkGet(keys: string[]) { - const db = this._db - if (keys.length === 0) { - return {} - } - const prefixedKeys = keys.map(key => addDbPrefix(db, key)) - let response = await this.getClient().mget(prefixedKeys) - if (Array.isArray(response)) { - let final: any = {} - let count = 0 - for (let result of response) { - if (result) { - let parsed - try { - parsed = JSON.parse(result) - } catch (err) { - parsed = result - } - final[keys[count]] = parsed - } - count++ - } - return final - } else { - throw new Error(`Invalid response: ${response}`) - } - } - - async store(key: string, value: any, expirySeconds: number | null = null) { - const db = this._db - if (typeof value === "object") { - value = JSON.stringify(value) - } - const prefixedKey = addDbPrefix(db, key) - await this.getClient().set(prefixedKey, value) - if (expirySeconds) { - await this.getClient().expire(prefixedKey, expirySeconds) - } - } - - async getTTL(key: string) { - const db = this._db - const prefixedKey = addDbPrefix(db, key) - return this.getClient().ttl(prefixedKey) - } - - async setExpiry(key: string, expirySeconds: number | null) { - const db = this._db - const prefixedKey = addDbPrefix(db, key) - await this.getClient().expire(prefixedKey, expirySeconds) - } - - async delete(key: string) { - const db = this._db - await this.getClient().del(addDbPrefix(db, key)) - } - - async clear() { - let items = await this.scan() - await Promise.all(items.map((obj: any) => this.delete(obj.key))) - } -} +// Mimic the outer package export for usage in index.ts +// The outer exports can't be used as they now reference dist directly +export { default as Client } from "./redis" +export * as utils from "./utils" +export * as clients from "./init" +export * as redlock from "./redlock" diff --git a/packages/backend-core/src/redis/init.js b/packages/backend-core/src/redis/init.js deleted file mode 100644 index 3150ef2c1c..0000000000 --- a/packages/backend-core/src/redis/init.js +++ /dev/null @@ -1,69 +0,0 @@ -const Client = require("./index") -const utils = require("./utils") - -let userClient, - sessionClient, - appClient, - cacheClient, - writethroughClient, - lockClient - -async function init() { - userClient = await new Client(utils.Databases.USER_CACHE).init() - sessionClient = await new Client(utils.Databases.SESSIONS).init() - appClient = await new Client(utils.Databases.APP_METADATA).init() - cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init() - lockClient = await new Client(utils.Databases.LOCKS).init() - writethroughClient = await new Client( - utils.Databases.WRITE_THROUGH, - utils.SelectableDatabases.WRITE_THROUGH - ).init() -} - -process.on("exit", async () => { - if (userClient) await userClient.finish() - if (sessionClient) await sessionClient.finish() - if (appClient) await appClient.finish() - if (cacheClient) await cacheClient.finish() - if (writethroughClient) await writethroughClient.finish() - if (lockClient) await lockClient.finish() -}) - -module.exports = { - getUserClient: async () => { - if (!userClient) { - await init() - } - return userClient - }, - getSessionClient: async () => { - if (!sessionClient) { - await init() - } - return sessionClient - }, - getAppClient: async () => { - if (!appClient) { - await init() - } - return appClient - }, - getCacheClient: async () => { - if (!cacheClient) { - await init() - } - return cacheClient - }, - getWritethroughClient: async () => { - if (!writethroughClient) { - await init() - } - return writethroughClient - }, - getLockClient: async () => { - if (!lockClient) { - await init() - } - return lockClient - }, -} diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts new file mode 100644 index 0000000000..00329ffb84 --- /dev/null +++ b/packages/backend-core/src/redis/init.ts @@ -0,0 +1,72 @@ +import Client from "./redis" +import * as utils from "./utils" + +let userClient: Client, + sessionClient: Client, + appClient: Client, + cacheClient: Client, + writethroughClient: Client, + lockClient: Client + +async function init() { + userClient = await new Client(utils.Databases.USER_CACHE).init() + sessionClient = await new Client(utils.Databases.SESSIONS).init() + appClient = await new Client(utils.Databases.APP_METADATA).init() + cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init() + lockClient = await new Client(utils.Databases.LOCKS).init() + writethroughClient = await new Client( + utils.Databases.WRITE_THROUGH, + utils.SelectableDatabase.WRITE_THROUGH + ).init() +} + +process.on("exit", async () => { + if (userClient) await userClient.finish() + if (sessionClient) await sessionClient.finish() + if (appClient) await appClient.finish() + if (cacheClient) await cacheClient.finish() + if (writethroughClient) await writethroughClient.finish() + if (lockClient) await lockClient.finish() +}) + +export async function getUserClient() { + if (!userClient) { + await init() + } + return userClient +} + +export async function getSessionClient() { + if (!sessionClient) { + await init() + } + return sessionClient +} + +export async function getAppClient() { + if (!appClient) { + await init() + } + return appClient +} + +export async function getCacheClient() { + if (!cacheClient) { + await init() + } + return cacheClient +} + +export async function getWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} + +export async function getLockClient() { + if (!lockClient) { + await init() + } + return lockClient +} diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts new file mode 100644 index 0000000000..58734fc4f1 --- /dev/null +++ b/packages/backend-core/src/redis/redis.ts @@ -0,0 +1,279 @@ +import env from "../environment" +// ioredis mock is all in memory +const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis") +import { + addDbPrefix, + removeDbPrefix, + getRedisOptions, + SEPARATOR, + SelectableDatabase, +} from "./utils" + +const RETRY_PERIOD_MS = 2000 +const STARTUP_TIMEOUT_MS = 5000 +const CLUSTERED = false +const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT + +// for testing just generate the client once +let CLOSED = false +let CLIENTS: { [key: number]: any } = {} +// if in test always connected +let CONNECTED = env.isTest() + +function pickClient(selectDb: number): any { + return CLIENTS[selectDb] +} + +function connectionError( + selectDb: number, + timeout: NodeJS.Timeout, + err: Error | string +) { + // manually shut down, ignore errors + if (CLOSED) { + return + } + pickClient(selectDb).disconnect() + CLOSED = true + // always clear this on error + clearTimeout(timeout) + CONNECTED = false + console.error("Redis connection failed - " + err) + setTimeout(() => { + init() + }, RETRY_PERIOD_MS) +} + +/** + * Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise + * will return the ioredis client which will be ready to use. + */ +function init(selectDb = DEFAULT_SELECT_DB) { + let timeout: NodeJS.Timeout + CLOSED = false + let client = pickClient(selectDb) + // already connected, ignore + if (client && CONNECTED) { + return + } + // testing uses a single in memory client + if (env.isTest()) { + CLIENTS[selectDb] = new Redis(getRedisOptions()) + } + // start the timer - only allowed 5 seconds to connect + timeout = setTimeout(() => { + if (!CONNECTED) { + connectionError( + selectDb, + timeout, + "Did not successfully connect in timeout" + ) + } + }, STARTUP_TIMEOUT_MS) + + // disconnect any lingering client + if (client) { + client.disconnect() + } + const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED) + + if (CLUSTERED) { + client = new Redis.Cluster([{ host, port }], opts) + } else if (redisProtocolUrl) { + client = new Redis(redisProtocolUrl) + } else { + client = new Redis(opts) + } + // attach handlers + client.on("end", (err: Error) => { + connectionError(selectDb, timeout, err) + }) + client.on("error", (err: Error) => { + connectionError(selectDb, timeout, err) + }) + client.on("connect", () => { + clearTimeout(timeout) + CONNECTED = true + }) + CLIENTS[selectDb] = client +} + +function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) { + return new Promise(resolve => { + if (pickClient(selectDb) == null) { + init() + } else if (CONNECTED) { + resolve("") + return + } + // check if the connection is ready + const interval = setInterval(() => { + if (CONNECTED) { + clearInterval(interval) + resolve("") + } + }, 500) + }) +} + +/** + * Utility function, takes a redis stream and converts it to a promisified response - + * this can only be done with redis streams because they will have an end. + * @param stream A redis stream, specifically as this type of stream will have an end. + * @param client The client to use for further lookups. + * @return {Promise} The final output of the stream + */ +function promisifyStream(stream: any, client: RedisWrapper) { + return new Promise((resolve, reject) => { + const outputKeys = new Set() + stream.on("data", (keys: string[]) => { + keys.forEach(key => { + outputKeys.add(key) + }) + }) + stream.on("error", (err: Error) => { + reject(err) + }) + stream.on("end", async () => { + const keysArray: string[] = Array.from(outputKeys) as string[] + try { + let getPromises = [] + for (let key of keysArray) { + getPromises.push(client.get(key)) + } + const jsonArray = await Promise.all(getPromises) + resolve( + keysArray.map(key => ({ + key: removeDbPrefix(key), + value: JSON.parse(jsonArray.shift()), + })) + ) + } catch (err) { + reject(err) + } + }) + }) +} + +class RedisWrapper { + _db: string + _select: number + + constructor(db: string, selectDb: number | null = null) { + this._db = db + this._select = selectDb || DEFAULT_SELECT_DB + } + + getClient() { + return pickClient(this._select) + } + + async init() { + CLOSED = false + init(this._select) + await waitForConnection(this._select) + return this + } + + async finish() { + CLOSED = true + this.getClient().disconnect() + } + + async scan(key = ""): Promise { + const db = this._db + key = `${db}${SEPARATOR}${key}` + let stream + if (CLUSTERED) { + let node = this.getClient().nodes("master") + stream = node[0].scanStream({ match: key + "*", count: 100 }) + } else { + stream = this.getClient().scanStream({ match: key + "*", count: 100 }) + } + return promisifyStream(stream, this.getClient()) + } + + async keys(pattern: string) { + const db = this._db + return this.getClient().keys(addDbPrefix(db, pattern)) + } + + async get(key: string) { + const db = this._db + let response = await this.getClient().get(addDbPrefix(db, key)) + // overwrite the prefixed key + if (response != null && response.key) { + response.key = key + } + // if its not an object just return the response + try { + return JSON.parse(response) + } catch (err) { + return response + } + } + + async bulkGet(keys: string[]) { + const db = this._db + if (keys.length === 0) { + return {} + } + const prefixedKeys = keys.map(key => addDbPrefix(db, key)) + let response = await this.getClient().mget(prefixedKeys) + if (Array.isArray(response)) { + let final: any = {} + let count = 0 + for (let result of response) { + if (result) { + let parsed + try { + parsed = JSON.parse(result) + } catch (err) { + parsed = result + } + final[keys[count]] = parsed + } + count++ + } + return final + } else { + throw new Error(`Invalid response: ${response}`) + } + } + + async store(key: string, value: any, expirySeconds: number | null = null) { + const db = this._db + if (typeof value === "object") { + value = JSON.stringify(value) + } + const prefixedKey = addDbPrefix(db, key) + await this.getClient().set(prefixedKey, value) + if (expirySeconds) { + await this.getClient().expire(prefixedKey, expirySeconds) + } + } + + async getTTL(key: string) { + const db = this._db + const prefixedKey = addDbPrefix(db, key) + return this.getClient().ttl(prefixedKey) + } + + async setExpiry(key: string, expirySeconds: number | null) { + const db = this._db + const prefixedKey = addDbPrefix(db, key) + await this.getClient().expire(prefixedKey, expirySeconds) + } + + async delete(key: string) { + const db = this._db + await this.getClient().del(addDbPrefix(db, key)) + } + + async clear() { + let items = await this.scan() + await Promise.all(items.map((obj: any) => this.delete(obj.key))) + } +} + +export = RedisWrapper diff --git a/packages/backend-core/src/redis/utils.js b/packages/backend-core/src/redis/utils.ts similarity index 68% rename from packages/backend-core/src/redis/utils.js rename to packages/backend-core/src/redis/utils.ts index af719197b5..4c556ebd54 100644 --- a/packages/backend-core/src/redis/utils.js +++ b/packages/backend-core/src/redis/utils.ts @@ -1,10 +1,10 @@ -const env = require("../environment") +import env from "../environment" const SLOT_REFRESH_MS = 2000 const CONNECT_TIMEOUT_MS = 10000 -const SEPARATOR = "-" const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD +export const SEPARATOR = "-" /** * These Redis databases help us to segment up a Redis keyspace by prepending the @@ -12,23 +12,23 @@ const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD * can be split up a bit; allowing us to use scans on small databases to find some particular * keys within. * If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out - * of the default keyspace and use a separate one - the SelectableDatabases can be used for this. + * of the default keyspace and use a separate one - the SelectableDatabase can be used for this. */ -exports.Databases = { - PW_RESETS: "pwReset", - VERIFICATIONS: "verification", - INVITATIONS: "invitation", - DEV_LOCKS: "devLocks", - DEBOUNCE: "debounce", - SESSIONS: "session", - USER_CACHE: "users", - FLAGS: "flags", - APP_METADATA: "appMetadata", - QUERY_VARS: "queryVars", - LICENSES: "license", - GENERIC_CACHE: "data_cache", - WRITE_THROUGH: "writeThrough", - LOCKS: "locks", +export enum Databases { + PW_RESETS = "pwReset", + VERIFICATIONS = "verification", + INVITATIONS = "invitation", + DEV_LOCKS = "devLocks", + DEBOUNCE = "debounce", + SESSIONS = "session", + USER_CACHE = "users", + FLAGS = "flags", + APP_METADATA = "appMetadata", + QUERY_VARS = "queryVars", + LICENSES = "license", + GENERIC_CACHE = "data_cache", + WRITE_THROUGH = "writeThrough", + LOCKS = "locks", } /** @@ -40,30 +40,28 @@ exports.Databases = { * but if you need to walk through all values in a database periodically then a separate selectable * keyspace should be used. */ -exports.SelectableDatabases = { - DEFAULT: 0, - WRITE_THROUGH: 1, - UNUSED_1: 2, - UNUSED_2: 3, - UNUSED_3: 4, - UNUSED_4: 5, - UNUSED_5: 6, - UNUSED_6: 7, - UNUSED_7: 8, - UNUSED_8: 9, - UNUSED_9: 10, - UNUSED_10: 11, - UNUSED_11: 12, - UNUSED_12: 13, - UNUSED_13: 14, - UNUSED_14: 15, +export enum SelectableDatabase { + DEFAULT = 0, + WRITE_THROUGH = 1, + UNUSED_1 = 2, + UNUSED_2 = 3, + UNUSED_3 = 4, + UNUSED_4 = 5, + UNUSED_5 = 6, + UNUSED_6 = 7, + UNUSED_7 = 8, + UNUSED_8 = 9, + UNUSED_9 = 10, + UNUSED_10 = 11, + UNUSED_11 = 12, + UNUSED_12 = 13, + UNUSED_13 = 14, + UNUSED_14 = 15, } -exports.SEPARATOR = SEPARATOR - -exports.getRedisOptions = (clustered = false) => { +export function getRedisOptions(clustered = false) { let password = REDIS_PASSWORD - let url = REDIS_URL.split("//") + let url: string[] | string = REDIS_URL.split("//") // get rid of the protocol url = url.length > 1 ? url[1] : url[0] // check for a password etc @@ -84,7 +82,7 @@ exports.getRedisOptions = (clustered = false) => { redisProtocolUrl = REDIS_URL } - const opts = { + const opts: any = { connectTimeout: CONNECT_TIMEOUT_MS, } if (clustered) { @@ -92,7 +90,7 @@ exports.getRedisOptions = (clustered = false) => { opts.redisOptions.tls = {} opts.redisOptions.password = password opts.slotsRefreshTimeout = SLOT_REFRESH_MS - opts.dnsLookup = (address, callback) => callback(null, address) + opts.dnsLookup = (address: string, callback: any) => callback(null, address) } else { opts.host = host opts.port = port @@ -101,14 +99,14 @@ exports.getRedisOptions = (clustered = false) => { return { opts, host, port, redisProtocolUrl } } -exports.addDbPrefix = (db, key) => { +export function addDbPrefix(db: string, key: string) { if (key.includes(db)) { return key } return `${db}${SEPARATOR}${key}` } -exports.removeDbPrefix = key => { +export function removeDbPrefix(key: string) { let parts = key.split(SEPARATOR) if (parts.length >= 2) { parts.shift() diff --git a/packages/backend-core/src/security/apiKeys.js b/packages/backend-core/src/security/apiKeys.js deleted file mode 100644 index e90418abb8..0000000000 --- a/packages/backend-core/src/security/apiKeys.js +++ /dev/null @@ -1 +0,0 @@ -exports.lookupApiKey = async () => {} diff --git a/packages/backend-core/src/security/encryption.js b/packages/backend-core/src/security/encryption.ts similarity index 73% rename from packages/backend-core/src/security/encryption.js rename to packages/backend-core/src/security/encryption.ts index c31f597652..a9006f302d 100644 --- a/packages/backend-core/src/security/encryption.js +++ b/packages/backend-core/src/security/encryption.ts @@ -1,5 +1,5 @@ -const crypto = require("crypto") -const env = require("../environment") +import crypto from "crypto" +import env from "../environment" const ALGO = "aes-256-ctr" const SECRET = env.JWT_SECRET @@ -8,13 +8,13 @@ const ITERATIONS = 10000 const RANDOM_BYTES = 16 const STRETCH_LENGTH = 32 -function stretchString(string, salt) { +function stretchString(string: string, salt: Buffer) { return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512") } -exports.encrypt = input => { +export function encrypt(input: string) { const salt = crypto.randomBytes(RANDOM_BYTES) - const stretched = stretchString(SECRET, salt) + const stretched = stretchString(SECRET!, salt) const cipher = crypto.createCipheriv(ALGO, stretched, salt) const base = cipher.update(input) const final = cipher.final() @@ -22,10 +22,10 @@ exports.encrypt = input => { return `${salt.toString("hex")}${SEPARATOR}${encrypted}` } -exports.decrypt = input => { +export function decrypt(input: string) { const [salt, encrypted] = input.split(SEPARATOR) const saltBuffer = Buffer.from(salt, "hex") - const stretched = stretchString(SECRET, saltBuffer) + const stretched = stretchString(SECRET!, saltBuffer) const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer) const base = decipher.update(Buffer.from(encrypted, "hex")) const final = decipher.final() diff --git a/packages/backend-core/src/hashing.js b/packages/backend-core/src/utils/hashing.ts similarity index 59% rename from packages/backend-core/src/hashing.js rename to packages/backend-core/src/utils/hashing.ts index 7524e66043..d9277ee3e7 100644 --- a/packages/backend-core/src/hashing.js +++ b/packages/backend-core/src/utils/hashing.ts @@ -1,18 +1,18 @@ -const env = require("./environment") +import env from "../environment" const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt") -const { v4 } = require("uuid") +import { v4 } from "uuid" const SALT_ROUNDS = env.SALT_ROUNDS || 10 -exports.hash = async data => { +export async function hash(data: string) { const salt = await bcrypt.genSalt(SALT_ROUNDS) return bcrypt.hash(data, salt) } -exports.compare = async (data, encrypted) => { +export async function compare(data: string, encrypted: string) { return bcrypt.compare(data, encrypted) } -exports.newid = function () { +export function newid() { return v4().replace(/-/g, "") } diff --git a/packages/backend-core/src/utils/index.ts b/packages/backend-core/src/utils/index.ts new file mode 100644 index 0000000000..8e663bce52 --- /dev/null +++ b/packages/backend-core/src/utils/index.ts @@ -0,0 +1,2 @@ +export * from "./hashing" +export * from "./utils" diff --git a/packages/backend-core/src/utils.ts b/packages/backend-core/src/utils/utils.ts similarity index 94% rename from packages/backend-core/src/utils.ts rename to packages/backend-core/src/utils/utils.ts index c04d6196b3..d830164414 100644 --- a/packages/backend-core/src/utils.ts +++ b/packages/backend-core/src/utils/utils.ts @@ -4,14 +4,14 @@ import { ViewName, getAllApps, queryGlobalView, -} from "./db" -import { options } from "./middleware/passport/jwt" -import { Header, Cookie, MAX_VALID_DATE } from "./constants" -import env from "./environment" -import userCache from "./cache/user" -import { getSessionsForUser, invalidateSessions } from "./security/sessions" -import * as events from "./events" -import tenancy from "./tenancy" +} from "../db" +import { options } from "../middleware/passport/jwt" +import { Header, Cookie, MAX_VALID_DATE } from "../constants" +import env from "../environment" +import * as userCache from "../cache/user" +import { getSessionsForUser, invalidateSessions } from "../security/sessions" +import * as events from "../events" +import tenancy from "../tenancy" import { App, BBContext, diff --git a/packages/backend-core/tenancy.js b/packages/backend-core/tenancy.js deleted file mode 100644 index 9ca808b74e..0000000000 --- a/packages/backend-core/tenancy.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./src/tenancy") diff --git a/packages/backend-core/utils.js b/packages/backend-core/utils.js deleted file mode 100644 index 2ef920e103..0000000000 --- a/packages/backend-core/utils.js +++ /dev/null @@ -1,4 +0,0 @@ -module.exports = { - ...require("./src/utils"), - ...require("./src/hashing"), -} diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts index 83d7bb0b8d..752353893f 100644 --- a/packages/server/src/api/controllers/application.ts +++ b/packages/server/src/api/controllers/application.ts @@ -390,7 +390,7 @@ const appPostCreate = async (ctx: any, app: App) => { export const create = async (ctx: any) => { const newApplication = await quotas.addApp(() => performAppCreate(ctx)) await appPostCreate(ctx, newApplication) - await cache.bustCache(cache.CacheKeys.CHECKLIST) + await cache.bustCache(cache.CacheKey.CHECKLIST) ctx.body = newApplication ctx.status = 200 } diff --git a/packages/types/src/documents/global/config.ts b/packages/types/src/documents/global/config.ts index 7718b62733..5f050e0616 100644 --- a/packages/types/src/documents/global/config.ts +++ b/packages/types/src/documents/global/config.ts @@ -31,17 +31,19 @@ export interface GoogleConfig extends Config { } } +export interface OIDCInnerCfg { + configUrl: string + clientID: string + clientSecret: string + logo: string + name: string + uuid: string + activated: boolean +} + export interface OIDCConfig extends Config { config: { - configs: { - configUrl: string - clientID: string - clientSecret: string - logo: string - name: string - uuid: string - activated: boolean - }[] + configs: OIDCInnerCfg[] } } diff --git a/packages/types/src/documents/global/user.ts b/packages/types/src/documents/global/user.ts index 9a1fb472f0..1778d6e7c6 100644 --- a/packages/types/src/documents/global/user.ts +++ b/packages/types/src/documents/global/user.ts @@ -1,8 +1,37 @@ import { Document } from "../document" -export interface User extends Document { +export interface SSOProfile { + id: string + name?: { + givenName?: string + familyName?: string + } + _json: { + email: string + picture: string + } + provider?: string +} + +export interface ThirdPartyUser extends Document { + thirdPartyProfile?: SSOProfile["_json"] + firstName?: string + lastName?: string + pictureUrl?: string + profile?: SSOProfile + oauth2?: any + provider?: string + providerType?: string + email: string + userId?: string + forceResetPassword?: boolean +} + +export interface User extends ThirdPartyUser { tenantId: string email: string + userId?: string + forceResetPassword?: boolean roles: UserRoles builder?: { global: boolean @@ -10,14 +39,14 @@ export interface User extends Document { admin?: { global: boolean } - providerType?: string password?: string status?: string createdAt?: number // override the default createdAt behaviour - users sdk historically set this to Date.now() userGroups?: string[] - forceResetPassword?: boolean dayPassRecordedAt?: string - userId?: string + account?: { + authType: string + } } export interface UserRoles { diff --git a/packages/types/src/sdk/context.ts b/packages/types/src/sdk/context.ts index 940ac5a0c3..b3403df8af 100644 --- a/packages/types/src/sdk/context.ts +++ b/packages/types/src/sdk/context.ts @@ -1,5 +1,5 @@ import { User, Account } from "../documents" -import { IdentityType } from "./events/identification" +import { IdentityType } from "./events" export interface BaseContext { _id: string diff --git a/packages/worker/src/api/controllers/global/configs.ts b/packages/worker/src/api/controllers/global/configs.ts index a8ecd78a00..236ab6bfa6 100644 --- a/packages/worker/src/api/controllers/global/configs.ts +++ b/packages/worker/src/api/controllers/global/configs.ts @@ -170,8 +170,8 @@ export async function save(ctx: BBContext) { try { const response = await db.put(ctx.request.body) - await cache.bustCache(cache.CacheKeys.CHECKLIST) - await cache.bustCache(cache.CacheKeys.ANALYTICS_ENABLED) + await cache.bustCache(cache.CacheKey.CHECKLIST) + await cache.bustCache(cache.CacheKey.ANALYTICS_ENABLED) for (const fn of eventFns) { await fn() @@ -371,7 +371,7 @@ export async function destroy(ctx: BBContext) { const { id, rev } = ctx.params try { await db.remove(id, rev) - await cache.delete(cache.CacheKeys.CHECKLIST) + await cache.delete(cache.CacheKey.CHECKLIST) ctx.body = { message: "Config deleted successfully" } } catch (err: any) { ctx.throw(err.status, err) @@ -384,7 +384,7 @@ export async function configChecklist(ctx: BBContext) { try { ctx.body = await cache.withCache( - cache.CacheKeys.CHECKLIST, + cache.CacheKey.CHECKLIST, env.CHECKLIST_CACHE_TTL, async () => { let apps = [] diff --git a/packages/worker/src/api/controllers/global/users.ts b/packages/worker/src/api/controllers/global/users.ts index 7edb1b710a..e913ccee88 100644 --- a/packages/worker/src/api/controllers/global/users.ts +++ b/packages/worker/src/api/controllers/global/users.ts @@ -104,7 +104,7 @@ export const adminUser = async (ctx: any) => { try { // always bust checklist beforehand, if an error occurs but can proceed, don't get // stuck in a cycle - await cache.bustCache(cache.CacheKeys.CHECKLIST) + await cache.bustCache(cache.CacheKey.CHECKLIST) const finalUser = await sdk.users.save(user, { hashPassword, requirePassword, diff --git a/packages/worker/src/api/controllers/system/restore.ts b/packages/worker/src/api/controllers/system/restore.ts index 96a7c61cb4..def6d58a5d 100644 --- a/packages/worker/src/api/controllers/system/restore.ts +++ b/packages/worker/src/api/controllers/system/restore.ts @@ -6,7 +6,7 @@ export async function systemRestored(ctx: BBContext) { if (!env.SELF_HOSTED) { ctx.throw(405, "This operation is not allowed in cloud.") } - await cache.bustCache(cache.CacheKeys.CHECKLIST) + await cache.bustCache(cache.CacheKey.CHECKLIST) ctx.body = { message: "System prepared after restore.", }