Complete conversion of backend-core to Typescript.
This commit is contained in:
parent
bfaef13768
commit
d3c4c99e16
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/cloud/accounts")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/auth")
|
|
@ -1,9 +0,0 @@
|
|||
const generic = require("./src/cache/generic")
|
||||
|
||||
module.exports = {
|
||||
user: require("./src/cache/user"),
|
||||
app: require("./src/cache/appMetadata"),
|
||||
writethrough: require("./src/cache/writethrough"),
|
||||
...generic,
|
||||
cache: generic,
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/constants")
|
|
@ -1,24 +0,0 @@
|
|||
const {
|
||||
getAppDB,
|
||||
getDevAppDB,
|
||||
getProdAppDB,
|
||||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
doInTenant,
|
||||
doInContext,
|
||||
} = require("./src/context")
|
||||
|
||||
const identity = require("./src/context/identity")
|
||||
|
||||
module.exports = {
|
||||
getAppDB,
|
||||
getDevAppDB,
|
||||
getProdAppDB,
|
||||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
doInTenant,
|
||||
identity,
|
||||
doInContext,
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/db")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/context/deprovision")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/security/encryption")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/logging")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/middleware")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/migrations")
|
|
@ -1,4 +0,0 @@
|
|||
module.exports = {
|
||||
...require("./src/objectStore"),
|
||||
...require("./src/objectStore/utils"),
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/security/permissions")
|
|
@ -1,3 +0,0 @@
|
|||
module.exports = {
|
||||
...require("./src/plugin"),
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
module.exports = {
|
||||
Client: require("./src/redis"),
|
||||
utils: require("./src/redis/utils"),
|
||||
clients: require("./src/redis/init"),
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/security/roles")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/security/sessions")
|
|
@ -1,6 +1,6 @@
|
|||
const redis = require("../redis/init")
|
||||
const { doWithDB } = require("../db")
|
||||
const { DocumentType } = require("../db/constants")
|
||||
import { getAppClient } from "../redis/init"
|
||||
import { doWithDB, DocumentType } from "../db"
|
||||
import { Database } from "@budibase/types"
|
||||
|
||||
const AppState = {
|
||||
INVALID: "invalid",
|
||||
|
@ -10,17 +10,17 @@ const EXPIRY_SECONDS = 3600
|
|||
/**
|
||||
* The default populate app metadata function
|
||||
*/
|
||||
const populateFromDB = async appId => {
|
||||
async function populateFromDB(appId: string) {
|
||||
return doWithDB(
|
||||
appId,
|
||||
db => {
|
||||
(db: Database) => {
|
||||
return db.get(DocumentType.APP_METADATA)
|
||||
},
|
||||
{ skip_setup: true }
|
||||
)
|
||||
}
|
||||
|
||||
const isInvalid = metadata => {
|
||||
function isInvalid(metadata?: { state: string }) {
|
||||
return !metadata || metadata.state === AppState.INVALID
|
||||
}
|
||||
|
||||
|
@ -31,15 +31,15 @@ const isInvalid = metadata => {
|
|||
* @param {string} appId the id of the app to get metadata from.
|
||||
* @returns {object} the app metadata.
|
||||
*/
|
||||
exports.getAppMetadata = async appId => {
|
||||
const client = await redis.getAppClient()
|
||||
export async function getAppMetadata(appId: string) {
|
||||
const client = await getAppClient()
|
||||
// try cache
|
||||
let metadata = await client.get(appId)
|
||||
if (!metadata) {
|
||||
let expiry = EXPIRY_SECONDS
|
||||
let expiry: number | undefined = EXPIRY_SECONDS
|
||||
try {
|
||||
metadata = await populateFromDB(appId)
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
// app DB left around, but no metadata, it is invalid
|
||||
if (err && err.status === 404) {
|
||||
metadata = { state: AppState.INVALID }
|
||||
|
@ -74,11 +74,11 @@ exports.getAppMetadata = async appId => {
|
|||
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
|
||||
* @return {Promise<void>} will respond with success when cache is updated.
|
||||
*/
|
||||
exports.invalidateAppMetadata = async (appId, newMetadata = null) => {
|
||||
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
|
||||
if (!appId) {
|
||||
throw "Cannot invalidate if no app ID provided."
|
||||
}
|
||||
const client = await redis.getAppClient()
|
||||
const client = await getAppClient()
|
||||
await client.delete(appId)
|
||||
if (newMetadata) {
|
||||
await client.store(appId, newMetadata, EXPIRY_SECONDS)
|
|
@ -1,6 +1,6 @@
|
|||
import { getTenantId } from "../../context"
|
||||
import redis from "../../redis/init"
|
||||
import RedisWrapper from "../../redis"
|
||||
import * as redis from "../../redis/init"
|
||||
import { Client } from "../../redis"
|
||||
|
||||
function generateTenantKey(key: string) {
|
||||
const tenantId = getTenantId()
|
||||
|
@ -8,9 +8,9 @@ function generateTenantKey(key: string) {
|
|||
}
|
||||
|
||||
export = class BaseCache {
|
||||
client: RedisWrapper | undefined
|
||||
client: Client | undefined
|
||||
|
||||
constructor(client: RedisWrapper | undefined = undefined) {
|
||||
constructor(client: Client | undefined = undefined) {
|
||||
this.client = client
|
||||
}
|
||||
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
const BaseCache = require("./base")
|
||||
|
||||
const GENERIC = new BaseCache()
|
||||
|
||||
exports.CacheKeys = {
|
||||
CHECKLIST: "checklist",
|
||||
INSTALLATION: "installation",
|
||||
ANALYTICS_ENABLED: "analyticsEnabled",
|
||||
UNIQUE_TENANT_ID: "uniqueTenantId",
|
||||
EVENTS: "events",
|
||||
BACKFILL_METADATA: "backfillMetadata",
|
||||
EVENTS_RATE_LIMIT: "eventsRateLimit",
|
||||
}
|
||||
|
||||
exports.TTL = {
|
||||
ONE_MINUTE: 600,
|
||||
ONE_HOUR: 3600,
|
||||
ONE_DAY: 86400,
|
||||
}
|
||||
|
||||
function performExport(funcName) {
|
||||
return (...args) => GENERIC[funcName](...args)
|
||||
}
|
||||
|
||||
exports.keys = performExport("keys")
|
||||
exports.get = performExport("get")
|
||||
exports.store = performExport("store")
|
||||
exports.delete = performExport("delete")
|
||||
exports.withCache = performExport("withCache")
|
||||
exports.bustCache = performExport("bustCache")
|
|
@ -0,0 +1,30 @@
|
|||
const BaseCache = require("./base")
|
||||
|
||||
const GENERIC = new BaseCache()
|
||||
|
||||
export enum CacheKey {
|
||||
CHECKLIST = "checklist",
|
||||
INSTALLATION = "installation",
|
||||
ANALYTICS_ENABLED = "analyticsEnabled",
|
||||
UNIQUE_TENANT_ID = "uniqueTenantId",
|
||||
EVENTS = "events",
|
||||
BACKFILL_METADATA = "backfillMetadata",
|
||||
EVENTS_RATE_LIMIT = "eventsRateLimit",
|
||||
}
|
||||
|
||||
export enum TTL {
|
||||
ONE_MINUTE = 600,
|
||||
ONE_HOUR = 3600,
|
||||
ONE_DAY = 86400,
|
||||
}
|
||||
|
||||
function performExport(funcName: string) {
|
||||
return (...args: any) => GENERIC[funcName](...args)
|
||||
}
|
||||
|
||||
export const keys = performExport("keys")
|
||||
export const get = performExport("get")
|
||||
export const store = performExport("store")
|
||||
export const destroy = performExport("delete")
|
||||
export const withCache = performExport("withCache")
|
||||
export const bustCache = performExport("bustCache")
|
|
@ -0,0 +1,4 @@
|
|||
export * as generic from "./generic"
|
||||
export * as user from "./user"
|
||||
export * as app from "./appMetadata"
|
||||
export * as writethrough from "./writethrough"
|
|
@ -1,15 +1,16 @@
|
|||
const redis = require("../redis/init")
|
||||
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
|
||||
const env = require("../environment")
|
||||
const accounts = require("../cloud/accounts")
|
||||
import * as redis from "../redis/init"
|
||||
import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy"
|
||||
import env from "../environment"
|
||||
import * as accounts from "../cloud/accounts"
|
||||
import { Database } from "@budibase/types"
|
||||
|
||||
const EXPIRY_SECONDS = 3600
|
||||
|
||||
/**
|
||||
* The default populate user function
|
||||
*/
|
||||
const populateFromDB = async (userId, tenantId) => {
|
||||
const user = await doWithGlobalDB(tenantId, db => db.get(userId))
|
||||
async function populateFromDB(userId: string, tenantId: string) {
|
||||
const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId))
|
||||
user.budibaseAccess = true
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(user.email)
|
||||
|
@ -31,7 +32,11 @@ const populateFromDB = async (userId, tenantId) => {
|
|||
* @param {*} populateUser function to provide the user for re-caching. default to couch db
|
||||
* @returns
|
||||
*/
|
||||
exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
||||
export async function getUser(
|
||||
userId: string,
|
||||
tenantId?: string,
|
||||
populateUser?: any
|
||||
) {
|
||||
if (!populateUser) {
|
||||
populateUser = populateFromDB
|
||||
}
|
||||
|
@ -47,7 +52,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
|||
let user = await client.get(userId)
|
||||
if (!user) {
|
||||
user = await populateUser(userId, tenantId)
|
||||
client.store(userId, user, EXPIRY_SECONDS)
|
||||
await client.store(userId, user, EXPIRY_SECONDS)
|
||||
}
|
||||
if (user && !user.tenantId && tenantId) {
|
||||
// make sure the tenant ID is always correct/set
|
||||
|
@ -56,7 +61,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
|||
return user
|
||||
}
|
||||
|
||||
exports.invalidateUser = async userId => {
|
||||
export async function invalidateUser(userId: string) {
|
||||
const client = await redis.getUserClient()
|
||||
await client.delete(userId)
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
const fetch = require("node-fetch")
|
||||
class API {
|
||||
constructor(host) {
|
||||
this.host = host
|
||||
}
|
||||
|
||||
apiCall =
|
||||
method =>
|
||||
async (url = "", options = {}) => {
|
||||
if (!options.headers) {
|
||||
options.headers = {}
|
||||
}
|
||||
|
||||
if (!options.headers["Content-Type"]) {
|
||||
options.headers = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
...options.headers,
|
||||
}
|
||||
}
|
||||
|
||||
let json = options.headers["Content-Type"] === "application/json"
|
||||
|
||||
const requestOptions = {
|
||||
method: method,
|
||||
body: json ? JSON.stringify(options.body) : options.body,
|
||||
headers: options.headers,
|
||||
// TODO: See if this is necessary
|
||||
credentials: "include",
|
||||
}
|
||||
|
||||
return await fetch(`${this.host}${url}`, requestOptions)
|
||||
}
|
||||
|
||||
post = this.apiCall("POST")
|
||||
get = this.apiCall("GET")
|
||||
patch = this.apiCall("PATCH")
|
||||
del = this.apiCall("DELETE")
|
||||
put = this.apiCall("PUT")
|
||||
}
|
||||
|
||||
module.exports = API
|
|
@ -0,0 +1,55 @@
|
|||
import fetch from "node-fetch"
|
||||
|
||||
export = class API {
|
||||
host: string
|
||||
|
||||
constructor(host: string) {
|
||||
this.host = host
|
||||
}
|
||||
|
||||
async apiCall(method: string, url: string, options?: any) {
|
||||
if (!options.headers) {
|
||||
options.headers = {}
|
||||
}
|
||||
|
||||
if (!options.headers["Content-Type"]) {
|
||||
options.headers = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
...options.headers,
|
||||
}
|
||||
}
|
||||
|
||||
let json = options.headers["Content-Type"] === "application/json"
|
||||
|
||||
const requestOptions = {
|
||||
method: method,
|
||||
body: json ? JSON.stringify(options.body) : options.body,
|
||||
headers: options.headers,
|
||||
// TODO: See if this is necessary
|
||||
credentials: "include",
|
||||
}
|
||||
|
||||
return await fetch(`${this.host}${url}`, requestOptions)
|
||||
}
|
||||
|
||||
async post(url: string, options?: any) {
|
||||
return this.apiCall("POST", url, options)
|
||||
}
|
||||
|
||||
async get(url: string, options?: any) {
|
||||
return this.apiCall("GET", url, options)
|
||||
}
|
||||
|
||||
async patch(url: string, options?: any) {
|
||||
return this.apiCall("PATCH", url, options)
|
||||
}
|
||||
|
||||
async del(url: string, options?: any) {
|
||||
return this.apiCall("DELETE", url, options)
|
||||
}
|
||||
|
||||
async put(url: string, options?: any) {
|
||||
return this.apiCall("PUT", url, options)
|
||||
}
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
exports.UserStatus = {
|
||||
ACTIVE: "active",
|
||||
INACTIVE: "inactive",
|
||||
}
|
||||
|
||||
exports.Cookie = {
|
||||
CurrentApp: "budibase:currentapp",
|
||||
Auth: "budibase:auth",
|
||||
Init: "budibase:init",
|
||||
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
|
||||
DatasourceAuth: "budibase:datasourceauth",
|
||||
OIDC_CONFIG: "budibase:oidc:config",
|
||||
}
|
||||
|
||||
exports.Header = {
|
||||
API_KEY: "x-budibase-api-key",
|
||||
LICENSE_KEY: "x-budibase-license-key",
|
||||
API_VER: "x-budibase-api-version",
|
||||
APP_ID: "x-budibase-app-id",
|
||||
TYPE: "x-budibase-type",
|
||||
PREVIEW_ROLE: "x-budibase-role",
|
||||
TENANT_ID: "x-budibase-tenant-id",
|
||||
TOKEN: "x-budibase-token",
|
||||
CSRF_TOKEN: "x-csrf-token",
|
||||
}
|
||||
|
||||
exports.GlobalRoles = {
|
||||
OWNER: "owner",
|
||||
ADMIN: "admin",
|
||||
BUILDER: "builder",
|
||||
WORKSPACE_MANAGER: "workspace_manager",
|
||||
}
|
||||
|
||||
exports.Config = {
|
||||
SETTINGS: "settings",
|
||||
ACCOUNT: "account",
|
||||
SMTP: "smtp",
|
||||
GOOGLE: "google",
|
||||
OIDC: "oidc",
|
||||
OIDC_LOGOS: "logos_oidc",
|
||||
}
|
||||
|
||||
exports.MAX_VALID_DATE = new Date(2147483647000)
|
||||
exports.DEFAULT_TENANT_ID = "default"
|
|
@ -18,7 +18,7 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
|
|||
}
|
||||
|
||||
export const doInUserContext = (user: User, task: any) => {
|
||||
const userContext: UserContext = {
|
||||
const userContext: any = {
|
||||
...user,
|
||||
_id: user._id as string,
|
||||
type: IdentityType.USER,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { newid } from "../hashing"
|
||||
import { newid } from "../utils"
|
||||
import { DEFAULT_TENANT_ID, Config } from "../constants"
|
||||
import env from "../environment"
|
||||
import {
|
||||
|
|
|
@ -2,7 +2,7 @@ import env from "../environment"
|
|||
import tenancy from "../tenancy"
|
||||
import * as dbUtils from "../db/utils"
|
||||
import { Config } from "../constants"
|
||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
||||
import { withCache, TTL, CacheKey } from "../cache/generic"
|
||||
|
||||
export const enabled = async () => {
|
||||
// cloud - always use the environment variable
|
||||
|
@ -13,7 +13,7 @@ export const enabled = async () => {
|
|||
// self host - prefer the settings doc
|
||||
// use cache as events have high throughput
|
||||
const enabledInDB = await withCache(
|
||||
CacheKeys.ANALYTICS_ENABLED,
|
||||
CacheKey.ANALYTICS_ENABLED,
|
||||
TTL.ONE_DAY,
|
||||
async () => {
|
||||
const settings = await getSettingsDoc()
|
||||
|
|
|
@ -21,7 +21,7 @@ import {
|
|||
AppCreatedEvent,
|
||||
} from "@budibase/types"
|
||||
import * as context from "../context"
|
||||
import { CacheKeys } from "../cache/generic"
|
||||
import { CacheKey } from "../cache/generic"
|
||||
import * as cache from "../cache/generic"
|
||||
|
||||
// LIFECYCLE
|
||||
|
@ -48,18 +48,18 @@ export const end = async () => {
|
|||
// CRUD
|
||||
|
||||
const getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {
|
||||
return cache.get(CacheKeys.BACKFILL_METADATA)
|
||||
return cache.get(CacheKey.BACKFILL_METADATA)
|
||||
}
|
||||
|
||||
const saveBackfillMetadata = async (
|
||||
backfill: BackfillMetadata
|
||||
): Promise<void> => {
|
||||
// no TTL - deleted by backfill
|
||||
return cache.store(CacheKeys.BACKFILL_METADATA, backfill)
|
||||
return cache.store(CacheKey.BACKFILL_METADATA, backfill)
|
||||
}
|
||||
|
||||
const deleteBackfillMetadata = async (): Promise<void> => {
|
||||
await cache.delete(CacheKeys.BACKFILL_METADATA)
|
||||
await cache.destroy(CacheKey.BACKFILL_METADATA)
|
||||
}
|
||||
|
||||
const clearEvents = async () => {
|
||||
|
@ -70,7 +70,7 @@ const clearEvents = async () => {
|
|||
for (const key of keys) {
|
||||
// delete each key
|
||||
// don't use tenancy, already in the key
|
||||
await cache.delete(key, { useTenancy: false })
|
||||
await cache.destroy(key, { useTenancy: false })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -167,7 +167,7 @@ const getEventKey = (event?: Event, properties?: any) => {
|
|||
|
||||
const tenantId = context.getTenantId()
|
||||
if (event) {
|
||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:${event}`
|
||||
eventKey = `${CacheKey.EVENTS}:${tenantId}:${event}`
|
||||
|
||||
// use some properties to make the key more unique
|
||||
const custom = CUSTOM_PROPERTY_SUFFIX[event]
|
||||
|
@ -176,7 +176,7 @@ const getEventKey = (event?: Event, properties?: any) => {
|
|||
eventKey = `${eventKey}:${suffix}`
|
||||
}
|
||||
} else {
|
||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:*`
|
||||
eventKey = `${CacheKey.EVENTS}:${tenantId}:*`
|
||||
}
|
||||
|
||||
return eventKey
|
||||
|
|
|
@ -20,9 +20,9 @@ import {
|
|||
import { processors } from "./processors"
|
||||
import * as dbUtils from "../db/utils"
|
||||
import { Config } from "../constants"
|
||||
import * as hashing from "../hashing"
|
||||
import { newid } from "../utils"
|
||||
import * as installation from "../installation"
|
||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
||||
import { withCache, TTL, CacheKey } from "../cache/generic"
|
||||
|
||||
const pkg = require("../../package.json")
|
||||
|
||||
|
@ -270,7 +270,7 @@ const getEventTenantId = async (tenantId: string): Promise<string> => {
|
|||
const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
||||
// make sure this tenantId always matches the tenantId in context
|
||||
return context.doInTenant(tenantId, () => {
|
||||
return withCache(CacheKeys.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
||||
return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
||||
const db = context.getGlobalDB()
|
||||
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, {
|
||||
type: Config.SETTINGS,
|
||||
|
@ -280,7 +280,7 @@ const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
|||
if (config.config.uniqueTenantId) {
|
||||
return config.config.uniqueTenantId
|
||||
} else {
|
||||
uniqueTenantId = `${hashing.newid()}_${tenantId}`
|
||||
uniqueTenantId = `${newid()}_${tenantId}`
|
||||
config.config.uniqueTenantId = uniqueTenantId
|
||||
await db.put(config)
|
||||
return uniqueTenantId
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { Event } from "@budibase/types"
|
||||
import { CacheKeys, TTL } from "../../../cache/generic"
|
||||
import { CacheKey, TTL } from "../../../cache/generic"
|
||||
import * as cache from "../../../cache/generic"
|
||||
import * as context from "../../../context"
|
||||
|
||||
|
@ -74,7 +74,7 @@ export const limited = async (event: Event): Promise<boolean> => {
|
|||
}
|
||||
|
||||
const eventKey = (event: RateLimitedEvent) => {
|
||||
let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}`
|
||||
let key = `${CacheKey.EVENTS_RATE_LIMIT}:${event}`
|
||||
if (isPerApp(event)) {
|
||||
key = key + ":" + context.getAppId()
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ import PosthogProcessor from "../PosthogProcessor"
|
|||
import { Event, IdentityType, Hosting } from "@budibase/types"
|
||||
const tk = require("timekeeper")
|
||||
import * as cache from "../../../../cache/generic"
|
||||
import { CacheKeys } from "../../../../cache/generic"
|
||||
import { CacheKey } from "../../../../cache/generic"
|
||||
import * as context from "../../../../context"
|
||||
|
||||
const newIdentity = () => {
|
||||
|
@ -19,7 +19,7 @@ describe("PosthogProcessor", () => {
|
|||
beforeEach(async () => {
|
||||
jest.clearAllMocks()
|
||||
await cache.bustCache(
|
||||
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -89,7 +89,7 @@ describe("PosthogProcessor", () => {
|
|||
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
||||
|
||||
await cache.bustCache(
|
||||
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||
)
|
||||
|
||||
tk.freeze(new Date(2022, 0, 1, 14, 0))
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
const env = require("../environment")
|
||||
const tenancy = require("../tenancy")
|
||||
import env from "../environment"
|
||||
import tenancy from "../tenancy"
|
||||
|
||||
/**
|
||||
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
|
||||
* The env var is formatted as:
|
||||
* tenant1:feature1:feature2,tenant2:feature1
|
||||
*/
|
||||
const getFeatureFlags = () => {
|
||||
function getFeatureFlags() {
|
||||
if (!env.TENANT_FEATURE_FLAGS) {
|
||||
return
|
||||
}
|
||||
|
||||
const tenantFeatureFlags = {}
|
||||
const tenantFeatureFlags: Record<string, string[]> = {}
|
||||
|
||||
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
|
||||
const [tenantId, ...features] = tenantToFeatures.split(":")
|
||||
|
@ -29,13 +29,13 @@ const getFeatureFlags = () => {
|
|||
|
||||
const TENANT_FEATURE_FLAGS = getFeatureFlags()
|
||||
|
||||
exports.isEnabled = featureFlag => {
|
||||
export function isEnabled(featureFlag: string) {
|
||||
const tenantId = tenancy.getTenantId()
|
||||
const flags = exports.getTenantFeatureFlags(tenantId)
|
||||
const flags = getTenantFeatureFlags(tenantId)
|
||||
return flags.includes(featureFlag)
|
||||
}
|
||||
|
||||
exports.getTenantFeatureFlags = tenantId => {
|
||||
export function getTenantFeatureFlags(tenantId: string) {
|
||||
const flags = []
|
||||
|
||||
if (TENANT_FEATURE_FLAGS) {
|
||||
|
@ -53,8 +53,8 @@ exports.getTenantFeatureFlags = tenantId => {
|
|||
return flags
|
||||
}
|
||||
|
||||
exports.TenantFeatureFlag = {
|
||||
LICENSING: "LICENSING",
|
||||
GOOGLE_SHEETS: "GOOGLE_SHEETS",
|
||||
USER_GROUPS: "USER_GROUPS",
|
||||
export enum TenantFeatureFlag {
|
||||
LICENSING = "LICENSING",
|
||||
GOOGLE_SHEETS = "GOOGLE_SHEETS",
|
||||
USER_GROUPS = "USER_GROUPS",
|
||||
}
|
|
@ -4,6 +4,6 @@
|
|||
* @param {string} url The URL to test and remove any extra double slashes.
|
||||
* @return {string} The updated url.
|
||||
*/
|
||||
exports.checkSlashesInUrl = url => {
|
||||
export function checkSlashesInUrl(url: string) {
|
||||
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
||||
}
|
|
@ -9,26 +9,24 @@ import * as accounts from "./cloud/accounts"
|
|||
import * as installation from "./installation"
|
||||
import env from "./environment"
|
||||
import tenancy from "./tenancy"
|
||||
import featureFlags from "./featureFlags"
|
||||
import * as featureFlags from "./featureFlags"
|
||||
import * as sessions from "./security/sessions"
|
||||
import * as deprovisioning from "./context/deprovision"
|
||||
import auth from "./auth"
|
||||
import * as constants from "./constants"
|
||||
import * as dbConstants from "./db/constants"
|
||||
import * as logging from "./logging"
|
||||
import pino from "./pino"
|
||||
import * as pino from "./pino"
|
||||
import * as middleware from "./middleware"
|
||||
import plugins from "./plugin"
|
||||
import encryption from "./security/encryption"
|
||||
import * as encryption from "./security/encryption"
|
||||
import * as queue from "./queue"
|
||||
import * as db from "./db"
|
||||
|
||||
// mimic the outer package exports
|
||||
import * as objectStore from "./pkg/objectStore"
|
||||
import * as utils from "./pkg/utils"
|
||||
import redis from "./pkg/redis"
|
||||
import cache from "./pkg/cache"
|
||||
import context from "./pkg/context"
|
||||
import * as context from "./context"
|
||||
import * as cache from "./cache"
|
||||
import * as objectStore from "./objectStore"
|
||||
import * as redis from "./redis"
|
||||
import * as utils from "./utils"
|
||||
|
||||
const init = (opts: any = {}) => {
|
||||
db.init(opts.db)
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
import * as hashing from "./hashing"
|
||||
import { newid } from "./utils"
|
||||
import * as events from "./events"
|
||||
import { StaticDatabases } from "./db/constants"
|
||||
import { StaticDatabases } from "./db"
|
||||
import { doWithDB } from "./db"
|
||||
import { Installation, IdentityType } from "@budibase/types"
|
||||
import * as context from "./context"
|
||||
import semver from "semver"
|
||||
import { bustCache, withCache, TTL, CacheKeys } from "./cache/generic"
|
||||
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
|
||||
|
||||
const pkg = require("../package.json")
|
||||
|
||||
export const getInstall = async (): Promise<Installation> => {
|
||||
return withCache(CacheKeys.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
||||
return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
||||
useTenancy: false,
|
||||
})
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ const getInstallFromDB = async (): Promise<Installation> => {
|
|||
if (e.status === 404) {
|
||||
install = {
|
||||
_id: StaticDatabases.PLATFORM_INFO.docs.install,
|
||||
installId: hashing.newid(),
|
||||
installId: newid(),
|
||||
version: pkg.version,
|
||||
}
|
||||
const resp = await platformDb.put(install)
|
||||
|
@ -50,7 +50,7 @@ const updateVersion = async (version: string): Promise<boolean> => {
|
|||
const install = await getInstall()
|
||||
install.version = version
|
||||
await platformDb.put(install)
|
||||
await bustCache(CacheKeys.INSTALLATION)
|
||||
await bustCache(CacheKey.INSTALLATION)
|
||||
}
|
||||
)
|
||||
} catch (e: any) {
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
module.exports = async (ctx, next) => {
|
||||
import { BBContext } from "@budibase/types"
|
||||
|
||||
export = async (ctx: BBContext, next: any) => {
|
||||
if (
|
||||
!ctx.internal &&
|
||||
(!ctx.user || !ctx.user.admin || !ctx.user.admin.global)
|
|
@ -1,4 +0,0 @@
|
|||
module.exports = async (ctx, next) => {
|
||||
// Placeholder for audit log middleware
|
||||
return next()
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
import { BBContext } from "@budibase/types"
|
||||
|
||||
export = async (ctx: BBContext, next: any) => {
|
||||
// Placeholder for audit log middleware
|
||||
return next()
|
||||
}
|
|
@ -6,10 +6,12 @@ import { buildMatcherRegex, matches } from "./matchers"
|
|||
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
|
||||
import { getGlobalDB, doInTenant } from "../tenancy"
|
||||
import { decrypt } from "../security/encryption"
|
||||
const identity = require("../context/identity")
|
||||
const env = require("../environment")
|
||||
import * as identity from "../context/identity"
|
||||
import env from "../environment"
|
||||
|
||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000
|
||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
|
||||
? parseInt(env.SESSION_UPDATE_PERIOD)
|
||||
: 60 * 1000
|
||||
|
||||
interface FinaliseOpts {
|
||||
authenticated?: boolean
|
||||
|
@ -40,13 +42,13 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
|
|||
return doInTenant(tenantId, async () => {
|
||||
const db = getGlobalDB()
|
||||
// api key is encrypted in the database
|
||||
const userId = await queryGlobalView(
|
||||
const userId = (await queryGlobalView(
|
||||
ViewName.BY_API_KEY,
|
||||
{
|
||||
key: apiKey,
|
||||
},
|
||||
db
|
||||
)
|
||||
)) as string
|
||||
if (userId) {
|
||||
return {
|
||||
valid: true,
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
module.exports = async (ctx, next) => {
|
||||
import { BBContext } from "@budibase/types"
|
||||
|
||||
export = async (ctx: BBContext, next: any) => {
|
||||
if (
|
||||
!ctx.internal &&
|
||||
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global)
|
|
@ -1,4 +1,6 @@
|
|||
module.exports = async (ctx, next) => {
|
||||
import { BBContext } from "@budibase/types"
|
||||
|
||||
export = async (ctx: BBContext, next: any) => {
|
||||
if (
|
||||
!ctx.internal &&
|
||||
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global) &&
|
|
@ -1,5 +1,6 @@
|
|||
const { Header } = require("../constants")
|
||||
const { buildMatcherRegex, matches } = require("./matchers")
|
||||
import { Header } from "../constants"
|
||||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { BBContext } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* GET, HEAD and OPTIONS methods are considered safe operations
|
||||
|
@ -31,9 +32,9 @@ const INCLUDED_CONTENT_TYPES = [
|
|||
* https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern
|
||||
*
|
||||
*/
|
||||
module.exports = (opts = { noCsrfPatterns: [] }) => {
|
||||
export = (opts = { noCsrfPatterns: [] }) => {
|
||||
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
|
||||
return async (ctx, next) => {
|
||||
return async (ctx: BBContext, next: any) => {
|
||||
// don't apply for excluded paths
|
||||
const found = matches(ctx, noCsrfOptions)
|
||||
if (found) {
|
||||
|
@ -62,7 +63,7 @@ module.exports = (opts = { noCsrfPatterns: [] }) => {
|
|||
|
||||
// apply csrf when there is a token in the session (new logins)
|
||||
// in future there should be a hard requirement that the token is present
|
||||
const userToken = ctx.user.csrfToken
|
||||
const userToken = ctx.user?.csrfToken
|
||||
if (!userToken) {
|
||||
return next()
|
||||
}
|
|
@ -1,18 +1,18 @@
|
|||
const jwt = require("./passport/jwt")
|
||||
const local = require("./passport/local")
|
||||
const google = require("./passport/google")
|
||||
const oidc = require("./passport/oidc")
|
||||
const { authError, ssoCallbackUrl } = require("./passport/utils")
|
||||
const authenticated = require("./authenticated")
|
||||
const auditLog = require("./auditLog")
|
||||
const tenancy = require("./tenancy")
|
||||
const internalApi = require("./internalApi")
|
||||
const datasourceGoogle = require("./passport/datasource/google")
|
||||
const csrf = require("./csrf")
|
||||
const adminOnly = require("./adminOnly")
|
||||
const builderOrAdmin = require("./builderOrAdmin")
|
||||
const builderOnly = require("./builderOnly")
|
||||
const joiValidator = require("./joi-validator")
|
||||
import * as jwt from "./passport/jwt"
|
||||
import * as local from "./passport/local"
|
||||
import * as google from "./passport/google"
|
||||
import * as oidc from "./passport/oidc"
|
||||
import { authError, ssoCallbackUrl } from "./passport/utils"
|
||||
import authenticated from "./authenticated"
|
||||
import auditLog from "./auditLog"
|
||||
import tenancy from "./tenancy"
|
||||
import internalApi from "./internalApi"
|
||||
import * as datasourceGoogle from "./passport/datasource/google"
|
||||
import csrf from "./csrf"
|
||||
import adminOnly from "./adminOnly"
|
||||
import builderOrAdmin from "./builderOrAdmin"
|
||||
import builderOnly from "./builderOnly"
|
||||
import * as joiValidator from "./joi-validator"
|
||||
|
||||
const pkg = {
|
||||
google,
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
const env = require("../environment")
|
||||
const { Header } = require("../constants")
|
||||
import env from "../environment"
|
||||
import { Header } from "../constants"
|
||||
import { BBContext } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* API Key only endpoint.
|
||||
*/
|
||||
module.exports = async (ctx, next) => {
|
||||
export = async (ctx: BBContext, next: any) => {
|
||||
const apiKey = ctx.request.headers[Header.API_KEY]
|
||||
if (apiKey !== env.INTERNAL_API_KEY) {
|
||||
ctx.throw(403, "Unauthorized")
|
|
@ -1,16 +1,19 @@
|
|||
const Joi = require("joi")
|
||||
import Joi, { ObjectSchema } from "joi"
|
||||
import { BBContext } from "@budibase/types"
|
||||
|
||||
function validate(schema, property) {
|
||||
function validate(schema: Joi.ObjectSchema, property: string) {
|
||||
// Return a Koa middleware function
|
||||
return (ctx, next) => {
|
||||
return (ctx: BBContext, next: any) => {
|
||||
if (!schema) {
|
||||
return next()
|
||||
}
|
||||
let params = null
|
||||
// @ts-ignore
|
||||
let reqProp = ctx.request?.[property]
|
||||
if (ctx[property] != null) {
|
||||
params = ctx[property]
|
||||
} else if (ctx.request[property] != null) {
|
||||
params = ctx.request[property]
|
||||
} else if (reqProp != null) {
|
||||
params = reqProp
|
||||
}
|
||||
|
||||
// not all schemas have the append property e.g. array schemas
|
||||
|
@ -30,10 +33,10 @@ function validate(schema, property) {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports.body = schema => {
|
||||
export function body(schema: Joi.ObjectSchema) {
|
||||
return validate(schema, "body")
|
||||
}
|
||||
|
||||
module.exports.params = schema => {
|
||||
export function params(schema: Joi.ObjectSchema) {
|
||||
return validate(schema, "params")
|
||||
}
|
|
@ -1,11 +1,15 @@
|
|||
const google = require("../google")
|
||||
import * as google from "../google"
|
||||
import { Cookie, Config } from "../../../constants"
|
||||
import { clearCookie, getCookie } from "../../../utils"
|
||||
import { getScopedConfig, getPlatformUrl, doWithDB } from "../../../db"
|
||||
import environment from "../../../environment"
|
||||
import { getGlobalDB } from "../../../tenancy"
|
||||
import { BBContext, Database, SSOProfile } from "@budibase/types"
|
||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||
const { Cookie, Config } = require("../../../constants")
|
||||
const { clearCookie, getCookie } = require("../../../utils")
|
||||
const { getScopedConfig, getPlatformUrl } = require("../../../db/utils")
|
||||
const { doWithDB } = require("../../../db")
|
||||
const environment = require("../../../environment")
|
||||
const { getGlobalDB } = require("../../../tenancy")
|
||||
|
||||
type Passport = {
|
||||
authenticate: any
|
||||
}
|
||||
|
||||
async function fetchGoogleCreds() {
|
||||
// try and get the config from the tenant
|
||||
|
@ -22,7 +26,11 @@ async function fetchGoogleCreds() {
|
|||
)
|
||||
}
|
||||
|
||||
async function preAuth(passport, ctx, next) {
|
||||
export async function preAuth(
|
||||
passport: Passport,
|
||||
ctx: BBContext,
|
||||
next: Function
|
||||
) {
|
||||
// get the relevant config
|
||||
const googleConfig = await fetchGoogleCreds()
|
||||
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
||||
|
@ -41,7 +49,11 @@ async function preAuth(passport, ctx, next) {
|
|||
})(ctx, next)
|
||||
}
|
||||
|
||||
async function postAuth(passport, ctx, next) {
|
||||
export async function postAuth(
|
||||
passport: Passport,
|
||||
ctx: BBContext,
|
||||
next: Function
|
||||
) {
|
||||
// get the relevant config
|
||||
const config = await fetchGoogleCreds()
|
||||
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
||||
|
@ -56,15 +68,20 @@ async function postAuth(passport, ctx, next) {
|
|||
clientSecret: config.clientSecret,
|
||||
callbackURL: callbackUrl,
|
||||
},
|
||||
(accessToken, refreshToken, profile, done) => {
|
||||
(
|
||||
accessToken: string,
|
||||
refreshToken: string,
|
||||
profile: SSOProfile,
|
||||
done: Function
|
||||
) => {
|
||||
clearCookie(ctx, Cookie.DatasourceAuth)
|
||||
done(null, { accessToken, refreshToken })
|
||||
}
|
||||
),
|
||||
{ successRedirect: "/", failureRedirect: "/error" },
|
||||
async (err, tokens) => {
|
||||
async (err: any, tokens: string[]) => {
|
||||
// update the DB for the datasource with all the user info
|
||||
await doWithDB(authStateCookie.appId, async db => {
|
||||
await doWithDB(authStateCookie.appId, async (db: Database) => {
|
||||
const datasource = await db.get(authStateCookie.datasourceId)
|
||||
if (!datasource.config) {
|
||||
datasource.config = {}
|
||||
|
@ -78,6 +95,3 @@ async function postAuth(passport, ctx, next) {
|
|||
}
|
||||
)(ctx, next)
|
||||
}
|
||||
|
||||
exports.preAuth = preAuth
|
||||
exports.postAuth = postAuth
|
|
@ -1,10 +1,15 @@
|
|||
import { ssoCallbackUrl } from "./utils"
|
||||
import { authenticateThirdParty } from "./third-party-common"
|
||||
import { ConfigType, GoogleConfig, Database, SSOProfile } from "@budibase/types"
|
||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||
const { ssoCallbackUrl } = require("./utils")
|
||||
const { authenticateThirdParty } = require("./third-party-common")
|
||||
const { Config } = require("../../../constants")
|
||||
|
||||
const buildVerifyFn = saveUserFn => {
|
||||
return (accessToken, refreshToken, profile, done) => {
|
||||
export function buildVerifyFn(saveUserFn?: Function) {
|
||||
return (
|
||||
accessToken: string,
|
||||
refreshToken: string,
|
||||
profile: SSOProfile,
|
||||
done: Function
|
||||
) => {
|
||||
const thirdPartyUser = {
|
||||
provider: profile.provider, // should always be 'google'
|
||||
providerType: "google",
|
||||
|
@ -31,7 +36,11 @@ const buildVerifyFn = saveUserFn => {
|
|||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||
* @returns Dynamically configured Passport Google Strategy
|
||||
*/
|
||||
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
||||
export async function strategyFactory(
|
||||
config: GoogleConfig["config"],
|
||||
callbackUrl: string,
|
||||
saveUserFn?: Function
|
||||
) {
|
||||
try {
|
||||
const { clientID, clientSecret } = config
|
||||
|
||||
|
@ -50,18 +59,15 @@ exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
|||
},
|
||||
verify
|
||||
)
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
throw new Error(
|
||||
`Error constructing google authentication strategy: ${err}`,
|
||||
err
|
||||
)
|
||||
throw new Error(`Error constructing google authentication strategy: ${err}`)
|
||||
}
|
||||
}
|
||||
|
||||
exports.getCallbackUrl = async function (db, config) {
|
||||
return ssoCallbackUrl(db, config, Config.GOOGLE)
|
||||
export async function getCallbackUrl(
|
||||
db: Database,
|
||||
config: { callbackURL?: string }
|
||||
) {
|
||||
return ssoCallbackUrl(db, config, ConfigType.GOOGLE)
|
||||
}
|
||||
|
||||
// expose for testing
|
||||
exports.buildVerifyFn = buildVerifyFn
|
|
@ -1,18 +0,0 @@
|
|||
const { Cookie } = require("../../constants")
|
||||
const env = require("../../environment")
|
||||
const { authError } = require("./utils")
|
||||
|
||||
exports.options = {
|
||||
secretOrKey: env.JWT_SECRET,
|
||||
jwtFromRequest: function (ctx) {
|
||||
return ctx.cookies.get(Cookie.Auth)
|
||||
},
|
||||
}
|
||||
|
||||
exports.authenticate = async function (jwt, done) {
|
||||
try {
|
||||
return done(null, jwt)
|
||||
} catch (err) {
|
||||
return authError(done, "JWT invalid", err)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
import { Cookie } from "../../constants"
|
||||
import env from "../../environment"
|
||||
import { authError } from "./utils"
|
||||
import { BBContext } from "@budibase/types"
|
||||
|
||||
export const options = {
|
||||
secretOrKey: env.JWT_SECRET,
|
||||
jwtFromRequest: function (ctx: BBContext) {
|
||||
return ctx.cookies.get(Cookie.Auth)
|
||||
},
|
||||
}
|
||||
|
||||
export async function authenticate(jwt: Function, done: Function) {
|
||||
try {
|
||||
return done(null, jwt)
|
||||
} catch (err) {
|
||||
return authError(done, "JWT invalid", err)
|
||||
}
|
||||
}
|
|
@ -1,18 +1,18 @@
|
|||
import { UserStatus } from "../../constants"
|
||||
import { compare, newid } from "../../utils"
|
||||
import env from "../../environment"
|
||||
import * as users from "../../users"
|
||||
import { authError } from "./utils"
|
||||
import { createASession } from "../../security/sessions"
|
||||
import { getTenantId } from "../../tenancy"
|
||||
import { BBContext } from "@budibase/types"
|
||||
const jwt = require("jsonwebtoken")
|
||||
const { UserStatus } = require("../../constants")
|
||||
const { compare } = require("../../hashing")
|
||||
const env = require("../../environment")
|
||||
const users = require("../../users")
|
||||
const { authError } = require("./utils")
|
||||
const { newid } = require("../../hashing")
|
||||
const { createASession } = require("../../security/sessions")
|
||||
const { getTenantId } = require("../../tenancy")
|
||||
|
||||
const INVALID_ERR = "Invalid credentials"
|
||||
const SSO_NO_PASSWORD = "SSO user does not have a password set"
|
||||
const EXPIRED = "This account has expired. Please reset your password"
|
||||
|
||||
exports.options = {
|
||||
export const options = {
|
||||
passReqToCallback: true,
|
||||
}
|
||||
|
||||
|
@ -24,7 +24,12 @@ exports.options = {
|
|||
* @param {*} done callback from passport to return user information and errors
|
||||
* @returns The authenticated user, or errors if they occur
|
||||
*/
|
||||
exports.authenticate = async function (ctx, email, password, done) {
|
||||
export async function authenticate(
|
||||
ctx: BBContext,
|
||||
email: string,
|
||||
password: string,
|
||||
done: Function
|
||||
) {
|
||||
if (!email) return authError(done, "Email Required")
|
||||
if (!password) return authError(done, "Password Required")
|
||||
|
||||
|
@ -56,9 +61,9 @@ exports.authenticate = async function (ctx, email, password, done) {
|
|||
const sessionId = newid()
|
||||
const tenantId = getTenantId()
|
||||
|
||||
await createASession(dbUser._id, { sessionId, tenantId })
|
||||
await createASession(dbUser._id!, { sessionId, tenantId })
|
||||
|
||||
dbUser.token = jwt.sign(
|
||||
const token = jwt.sign(
|
||||
{
|
||||
userId: dbUser._id,
|
||||
sessionId,
|
||||
|
@ -69,7 +74,10 @@ exports.authenticate = async function (ctx, email, password, done) {
|
|||
// Remove users password in payload
|
||||
delete dbUser.password
|
||||
|
||||
return done(null, dbUser)
|
||||
return done(null, {
|
||||
...dbUser,
|
||||
token,
|
||||
})
|
||||
} else {
|
||||
return authError(done, INVALID_ERR)
|
||||
}
|
|
@ -1,10 +1,22 @@
|
|||
const fetch = require("node-fetch")
|
||||
import fetch from "node-fetch"
|
||||
import { authenticateThirdParty } from "./third-party-common"
|
||||
import { ssoCallbackUrl } from "./utils"
|
||||
import {
|
||||
Config,
|
||||
ConfigType,
|
||||
OIDCInnerCfg,
|
||||
Database,
|
||||
SSOProfile,
|
||||
ThirdPartyUser,
|
||||
} from "@budibase/types"
|
||||
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||
const { authenticateThirdParty } = require("./third-party-common")
|
||||
const { ssoCallbackUrl } = require("./utils")
|
||||
const { Config } = require("../../../constants")
|
||||
|
||||
const buildVerifyFn = saveUserFn => {
|
||||
type JwtClaims = {
|
||||
preferred_username: string
|
||||
email: string
|
||||
}
|
||||
|
||||
export function buildVerifyFn(saveUserFn?: Function) {
|
||||
/**
|
||||
* @param {*} issuer The identity provider base URL
|
||||
* @param {*} sub The user ID
|
||||
|
@ -17,17 +29,17 @@ const buildVerifyFn = saveUserFn => {
|
|||
* @param {*} done The passport callback: err, user, info
|
||||
*/
|
||||
return async (
|
||||
issuer,
|
||||
sub,
|
||||
profile,
|
||||
jwtClaims,
|
||||
accessToken,
|
||||
refreshToken,
|
||||
idToken,
|
||||
params,
|
||||
done
|
||||
issuer: string,
|
||||
sub: string,
|
||||
profile: SSOProfile,
|
||||
jwtClaims: JwtClaims,
|
||||
accessToken: string,
|
||||
refreshToken: string,
|
||||
idToken: string,
|
||||
params: any,
|
||||
done: Function
|
||||
) => {
|
||||
const thirdPartyUser = {
|
||||
const thirdPartyUser: ThirdPartyUser = {
|
||||
// store the issuer info to enable sync in future
|
||||
provider: issuer,
|
||||
providerType: "oidc",
|
||||
|
@ -53,7 +65,7 @@ const buildVerifyFn = saveUserFn => {
|
|||
* @param {*} profile The structured profile created by passport using the user info endpoint
|
||||
* @param {*} jwtClaims The claims returned in the id token
|
||||
*/
|
||||
function getEmail(profile, jwtClaims) {
|
||||
function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {
|
||||
// profile not guaranteed to contain email e.g. github connected azure ad account
|
||||
if (profile._json.email) {
|
||||
return profile._json.email
|
||||
|
@ -77,7 +89,7 @@ function getEmail(profile, jwtClaims) {
|
|||
)
|
||||
}
|
||||
|
||||
function validEmail(value) {
|
||||
function validEmail(value: string) {
|
||||
return (
|
||||
value &&
|
||||
!!value.match(
|
||||
|
@ -91,19 +103,22 @@ function validEmail(value) {
|
|||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||
* @returns Dynamically configured Passport OIDC Strategy
|
||||
*/
|
||||
exports.strategyFactory = async function (config, saveUserFn) {
|
||||
export async function strategyFactory(config: Config, saveUserFn?: Function) {
|
||||
try {
|
||||
const verify = buildVerifyFn(saveUserFn)
|
||||
const strategy = new OIDCStrategy(config, verify)
|
||||
strategy.name = "oidc"
|
||||
return strategy
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
throw new Error("Error constructing OIDC authentication strategy", err)
|
||||
throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
|
||||
}
|
||||
}
|
||||
|
||||
exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) {
|
||||
export async function fetchStrategyConfig(
|
||||
enrichedConfig: OIDCInnerCfg,
|
||||
callbackUrl?: string
|
||||
) {
|
||||
try {
|
||||
const { clientID, clientSecret, configUrl } = enrichedConfig
|
||||
|
||||
|
@ -135,13 +150,15 @@ exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) {
|
|||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
throw new Error("Error constructing OIDC authentication configuration", err)
|
||||
throw new Error(
|
||||
`Error constructing OIDC authentication configuration - ${err}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
exports.getCallbackUrl = async function (db, config) {
|
||||
return ssoCallbackUrl(db, config, Config.OIDC)
|
||||
export async function getCallbackUrl(
|
||||
db: Database,
|
||||
config: { callbackURL?: string }
|
||||
) {
|
||||
return ssoCallbackUrl(db, config, ConfigType.OIDC)
|
||||
}
|
||||
|
||||
// expose for testing
|
||||
exports.buildVerifyFn = buildVerifyFn
|
|
@ -4,7 +4,7 @@ const { data } = require("./utilities/mock-data")
|
|||
const { DEFAULT_TENANT_ID } = require("../../../constants")
|
||||
|
||||
const { generateGlobalUserID } = require("../../../db/utils")
|
||||
const { newid } = require("../../../hashing")
|
||||
const { newid } = require("../../../utils")
|
||||
const { doWithGlobalDB, doInTenant } = require("../../../tenancy")
|
||||
|
||||
const done = jest.fn()
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
const env = require("../../environment")
|
||||
import env from "../../environment"
|
||||
import { generateGlobalUserID } from "../../db"
|
||||
import { authError } from "./utils"
|
||||
import { newid } from "../../utils"
|
||||
import { createASession } from "../../security/sessions"
|
||||
import * as users from "../../users"
|
||||
import { getGlobalDB, getTenantId } from "../../tenancy"
|
||||
import fetch from "node-fetch"
|
||||
import { ThirdPartyUser } from "@budibase/types"
|
||||
const jwt = require("jsonwebtoken")
|
||||
const { generateGlobalUserID } = require("../../db/utils")
|
||||
const { authError } = require("./utils")
|
||||
const { newid } = require("../../hashing")
|
||||
const { createASession } = require("../../security/sessions")
|
||||
const users = require("../../users")
|
||||
const { getGlobalDB, getTenantId } = require("../../tenancy")
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
/**
|
||||
* Common authentication logic for third parties. e.g. OAuth, OIDC.
|
||||
*/
|
||||
exports.authenticateThirdParty = async function (
|
||||
thirdPartyUser,
|
||||
requireLocalAccount = true,
|
||||
done,
|
||||
saveUserFn
|
||||
export async function authenticateThirdParty(
|
||||
thirdPartyUser: ThirdPartyUser,
|
||||
requireLocalAccount: boolean = true,
|
||||
done: Function,
|
||||
saveUserFn?: Function
|
||||
) {
|
||||
if (!saveUserFn) {
|
||||
throw new Error("Save user function must be provided")
|
||||
|
@ -39,7 +40,7 @@ exports.authenticateThirdParty = async function (
|
|||
// try to load by id
|
||||
try {
|
||||
dbUser = await db.get(userId)
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
// abort when not 404 error
|
||||
if (!err.status || err.status !== 404) {
|
||||
return authError(
|
||||
|
@ -81,7 +82,7 @@ exports.authenticateThirdParty = async function (
|
|||
// create or sync the user
|
||||
try {
|
||||
await saveUserFn(dbUser, false, false)
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
return authError(done, err)
|
||||
}
|
||||
|
||||
|
@ -104,13 +105,16 @@ exports.authenticateThirdParty = async function (
|
|||
return done(null, dbUser)
|
||||
}
|
||||
|
||||
async function syncProfilePicture(user, thirdPartyUser) {
|
||||
const pictureUrl = thirdPartyUser.profile._json.picture
|
||||
async function syncProfilePicture(
|
||||
user: ThirdPartyUser,
|
||||
thirdPartyUser: ThirdPartyUser
|
||||
) {
|
||||
const pictureUrl = thirdPartyUser.profile?._json.picture
|
||||
if (pictureUrl) {
|
||||
const response = await fetch(pictureUrl)
|
||||
|
||||
if (response.status === 200) {
|
||||
const type = response.headers.get("content-type")
|
||||
const type = response.headers.get("content-type") as string
|
||||
if (type.startsWith("image/")) {
|
||||
user.pictureUrl = pictureUrl
|
||||
}
|
||||
|
@ -123,7 +127,7 @@ async function syncProfilePicture(user, thirdPartyUser) {
|
|||
/**
|
||||
* @returns a user that has been sync'd with third party information
|
||||
*/
|
||||
async function syncUser(user, thirdPartyUser) {
|
||||
async function syncUser(user: ThirdPartyUser, thirdPartyUser: ThirdPartyUser) {
|
||||
// provider
|
||||
user.provider = thirdPartyUser.provider
|
||||
user.providerType = thirdPartyUser.providerType
|
|
@ -1,6 +1,6 @@
|
|||
const { isMultiTenant, getTenantId } = require("../../tenancy")
|
||||
const { getScopedConfig } = require("../../db/utils")
|
||||
const { Config } = require("../../constants")
|
||||
import { isMultiTenant, getTenantId } from "../../tenancy"
|
||||
import { getScopedConfig } from "../../db"
|
||||
import { ConfigType, Database, Config } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* Utility to handle authentication errors.
|
||||
|
@ -10,7 +10,7 @@ const { Config } = require("../../constants")
|
|||
* @param {*} err (Optional) error that will be logged
|
||||
*/
|
||||
|
||||
exports.authError = function (done, message, err = null) {
|
||||
export function authError(done: Function, message: string, err?: any) {
|
||||
return done(
|
||||
err,
|
||||
null, // never return a user
|
||||
|
@ -18,13 +18,17 @@ exports.authError = function (done, message, err = null) {
|
|||
)
|
||||
}
|
||||
|
||||
exports.ssoCallbackUrl = async (db, config, type) => {
|
||||
export async function ssoCallbackUrl(
|
||||
db: Database,
|
||||
config: { callbackURL?: string },
|
||||
type: ConfigType
|
||||
) {
|
||||
// incase there is a callback URL from before
|
||||
if (config && config.callbackURL) {
|
||||
return config.callbackURL
|
||||
}
|
||||
const publicConfig = await getScopedConfig(db, {
|
||||
type: Config.SETTINGS,
|
||||
type: ConfigType.SETTINGS,
|
||||
})
|
||||
|
||||
let callbackUrl = `/api/global/auth`
|
|
@ -1,426 +1,2 @@
|
|||
const sanitize = require("sanitize-s3-objectkey")
|
||||
import AWS from "aws-sdk"
|
||||
import stream from "stream"
|
||||
import fetch from "node-fetch"
|
||||
import tar from "tar-fs"
|
||||
const zlib = require("zlib")
|
||||
import { promisify } from "util"
|
||||
import { join } from "path"
|
||||
import fs from "fs"
|
||||
import env from "../environment"
|
||||
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
|
||||
import { v4 } from "uuid"
|
||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db/utils"
|
||||
|
||||
const streamPipeline = promisify(stream.pipeline)
|
||||
// use this as a temporary store of buckets that are being created
|
||||
const STATE = {
|
||||
bucketCreationPromises: {},
|
||||
}
|
||||
|
||||
type ListParams = {
|
||||
ContinuationToken?: string
|
||||
}
|
||||
|
||||
type UploadParams = {
|
||||
bucket: string
|
||||
filename: string
|
||||
path: string
|
||||
type?: string
|
||||
// can be undefined, we will remove it
|
||||
metadata?: {
|
||||
[key: string]: string | undefined
|
||||
}
|
||||
}
|
||||
|
||||
const CONTENT_TYPE_MAP: any = {
|
||||
txt: "text/plain",
|
||||
html: "text/html",
|
||||
css: "text/css",
|
||||
js: "application/javascript",
|
||||
json: "application/json",
|
||||
gz: "application/gzip",
|
||||
}
|
||||
const STRING_CONTENT_TYPES = [
|
||||
CONTENT_TYPE_MAP.html,
|
||||
CONTENT_TYPE_MAP.css,
|
||||
CONTENT_TYPE_MAP.js,
|
||||
CONTENT_TYPE_MAP.json,
|
||||
]
|
||||
|
||||
// does normal sanitization and then swaps dev apps to apps
|
||||
export function sanitizeKey(input: string) {
|
||||
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
||||
}
|
||||
|
||||
// simply handles the dev app to app conversion
|
||||
export function sanitizeBucket(input: string) {
|
||||
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
||||
}
|
||||
|
||||
function publicPolicy(bucketName: string) {
|
||||
return {
|
||||
Version: "2012-10-17",
|
||||
Statement: [
|
||||
{
|
||||
Effect: "Allow",
|
||||
Principal: {
|
||||
AWS: ["*"],
|
||||
},
|
||||
Action: "s3:GetObject",
|
||||
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
const PUBLIC_BUCKETS = [
|
||||
ObjectStoreBuckets.APPS,
|
||||
ObjectStoreBuckets.GLOBAL,
|
||||
ObjectStoreBuckets.PLUGINS,
|
||||
]
|
||||
|
||||
/**
|
||||
* Gets a connection to the object store using the S3 SDK.
|
||||
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||
* @constructor
|
||||
*/
|
||||
export const ObjectStore = (bucket: string) => {
|
||||
const config: any = {
|
||||
s3ForcePathStyle: true,
|
||||
signatureVersion: "v4",
|
||||
apiVersion: "2006-03-01",
|
||||
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||
region: env.AWS_REGION,
|
||||
}
|
||||
if (bucket) {
|
||||
config.params = {
|
||||
Bucket: sanitizeBucket(bucket),
|
||||
}
|
||||
}
|
||||
if (env.MINIO_URL) {
|
||||
config.endpoint = env.MINIO_URL
|
||||
}
|
||||
return new AWS.S3(config)
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||
* if it does not exist then it will create it.
|
||||
*/
|
||||
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
try {
|
||||
await client
|
||||
.headBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
} catch (err: any) {
|
||||
const promises: any = STATE.bucketCreationPromises
|
||||
const doesntExist = err.statusCode === 404,
|
||||
noAccess = err.statusCode === 403
|
||||
if (promises[bucketName]) {
|
||||
await promises[bucketName]
|
||||
} else if (doesntExist || noAccess) {
|
||||
if (doesntExist) {
|
||||
// bucket doesn't exist create it
|
||||
promises[bucketName] = client
|
||||
.createBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
await promises[bucketName]
|
||||
delete promises[bucketName]
|
||||
}
|
||||
// public buckets are quite hidden in the system, make sure
|
||||
// no bucket is set accidentally
|
||||
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
||||
await client
|
||||
.putBucketPolicy({
|
||||
Bucket: bucketName,
|
||||
Policy: JSON.stringify(publicPolicy(bucketName)),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
} else {
|
||||
throw new Error("Unable to write to object store bucket.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads the contents of a file given the required parameters, useful when
|
||||
* temp files in use (for example file uploaded as an attachment).
|
||||
*/
|
||||
export const upload = async ({
|
||||
bucket: bucketName,
|
||||
filename,
|
||||
path,
|
||||
type,
|
||||
metadata,
|
||||
}: UploadParams) => {
|
||||
const extension = filename.split(".").pop()
|
||||
const fileBytes = fs.readFileSync(path)
|
||||
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
|
||||
let contentType = type
|
||||
if (!contentType) {
|
||||
contentType = extension
|
||||
? CONTENT_TYPE_MAP[extension.toLowerCase()]
|
||||
: CONTENT_TYPE_MAP.txt
|
||||
}
|
||||
const config: any = {
|
||||
// windows file paths need to be converted to forward slashes for s3
|
||||
Key: sanitizeKey(filename),
|
||||
Body: fileBytes,
|
||||
ContentType: contentType,
|
||||
}
|
||||
if (metadata && typeof metadata === "object") {
|
||||
// remove any nullish keys from the metadata object, as these may be considered invalid
|
||||
for (let key of Object.keys(metadata)) {
|
||||
if (!metadata[key] || typeof metadata[key] !== "string") {
|
||||
delete metadata[key]
|
||||
}
|
||||
}
|
||||
config.Metadata = metadata
|
||||
}
|
||||
return objectStore.upload(config).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* Similar to the upload function but can be used to send a file stream
|
||||
* through to the object store.
|
||||
*/
|
||||
export const streamUpload = async (
|
||||
bucketName: string,
|
||||
filename: string,
|
||||
stream: any,
|
||||
extra = {}
|
||||
) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
|
||||
// Set content type for certain known extensions
|
||||
if (filename?.endsWith(".js")) {
|
||||
extra = {
|
||||
...extra,
|
||||
ContentType: "application/javascript",
|
||||
}
|
||||
} else if (filename?.endsWith(".svg")) {
|
||||
extra = {
|
||||
...extra,
|
||||
ContentType: "image",
|
||||
}
|
||||
}
|
||||
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(filename),
|
||||
Body: stream,
|
||||
...extra,
|
||||
}
|
||||
return objectStore.upload(params).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||
* will be converted, otherwise it will be returned as a buffer stream.
|
||||
*/
|
||||
export const retrieve = async (bucketName: string, filepath: string) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(filepath),
|
||||
}
|
||||
const response: any = await objectStore.getObject(params).promise()
|
||||
// currently these are all strings
|
||||
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
||||
return response.Body.toString("utf8")
|
||||
} else {
|
||||
return response.Body
|
||||
}
|
||||
}
|
||||
|
||||
export const listAllObjects = async (bucketName: string, path: string) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
const list = (params: ListParams = {}) => {
|
||||
return objectStore
|
||||
.listObjectsV2({
|
||||
...params,
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Prefix: sanitizeKey(path),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
let isTruncated = false,
|
||||
token,
|
||||
objects: AWS.S3.Types.Object[] = []
|
||||
do {
|
||||
let params: ListParams = {}
|
||||
if (token) {
|
||||
params.ContinuationToken = token
|
||||
}
|
||||
const response = await list(params)
|
||||
if (response.Contents) {
|
||||
objects = objects.concat(response.Contents)
|
||||
}
|
||||
isTruncated = !!response.IsTruncated
|
||||
} while (isTruncated)
|
||||
return objects
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as retrieval function but puts to a temporary file.
|
||||
*/
|
||||
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
filepath = sanitizeKey(filepath)
|
||||
const data = await retrieve(bucketName, filepath)
|
||||
const outputPath = join(budibaseTempDir(), v4())
|
||||
fs.writeFileSync(outputPath, data)
|
||||
return outputPath
|
||||
}
|
||||
|
||||
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
||||
let writePath = join(budibaseTempDir(), v4())
|
||||
fs.mkdirSync(writePath)
|
||||
const objects = await listAllObjects(bucketName, path)
|
||||
let fullObjects = await Promise.all(
|
||||
objects.map(obj => retrieve(bucketName, obj.Key!))
|
||||
)
|
||||
let count = 0
|
||||
for (let obj of objects) {
|
||||
const filename = obj.Key!
|
||||
const data = fullObjects[count++]
|
||||
const possiblePath = filename.split("/")
|
||||
if (possiblePath.length > 1) {
|
||||
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
||||
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
|
||||
}
|
||||
fs.writeFileSync(join(writePath, ...possiblePath), data)
|
||||
}
|
||||
return writePath
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a single file.
|
||||
*/
|
||||
export const deleteFile = async (bucketName: string, filepath: string) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Key: filepath,
|
||||
}
|
||||
return objectStore.deleteObject(params)
|
||||
}
|
||||
|
||||
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: filepaths.map((path: any) => ({ Key: path })),
|
||||
},
|
||||
}
|
||||
return objectStore.deleteObjects(params).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a path, including everything within.
|
||||
*/
|
||||
export const deleteFolder = async (
|
||||
bucketName: string,
|
||||
folder: string
|
||||
): Promise<any> => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
folder = sanitizeKey(folder)
|
||||
const client = ObjectStore(bucketName)
|
||||
const listParams = {
|
||||
Bucket: bucketName,
|
||||
Prefix: folder,
|
||||
}
|
||||
|
||||
let response: any = await client.listObjects(listParams).promise()
|
||||
if (response.Contents.length === 0) {
|
||||
return
|
||||
}
|
||||
const deleteParams: any = {
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: [],
|
||||
},
|
||||
}
|
||||
|
||||
response.Contents.forEach((content: any) => {
|
||||
deleteParams.Delete.Objects.push({ Key: content.Key })
|
||||
})
|
||||
|
||||
response = await client.deleteObjects(deleteParams).promise()
|
||||
// can only empty 1000 items at once
|
||||
if (response.Deleted.length === 1000) {
|
||||
return deleteFolder(bucketName, folder)
|
||||
}
|
||||
}
|
||||
|
||||
export const uploadDirectory = async (
|
||||
bucketName: string,
|
||||
localPath: string,
|
||||
bucketPath: string
|
||||
) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
let uploads = []
|
||||
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
||||
for (let file of files) {
|
||||
const path = sanitizeKey(join(bucketPath, file.name))
|
||||
const local = join(localPath, file.name)
|
||||
if (file.isDirectory()) {
|
||||
uploads.push(uploadDirectory(bucketName, local, path))
|
||||
} else {
|
||||
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
|
||||
}
|
||||
}
|
||||
await Promise.all(uploads)
|
||||
return files
|
||||
}
|
||||
|
||||
export const downloadTarballDirect = async (
|
||||
url: string,
|
||||
path: string,
|
||||
headers = {}
|
||||
) => {
|
||||
path = sanitizeKey(path)
|
||||
const response = await fetch(url, { headers })
|
||||
if (!response.ok) {
|
||||
throw new Error(`unexpected response ${response.statusText}`)
|
||||
}
|
||||
|
||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
||||
}
|
||||
|
||||
export const downloadTarball = async (
|
||||
url: string,
|
||||
bucketName: string,
|
||||
path: string
|
||||
) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
path = sanitizeKey(path)
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`unexpected response ${response.statusText}`)
|
||||
}
|
||||
|
||||
const tmpPath = join(budibaseTempDir(), path)
|
||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
||||
if (!env.isTest() && env.SELF_HOSTED) {
|
||||
await uploadDirectory(bucketName, tmpPath, path)
|
||||
}
|
||||
// return the temporary path incase there is a use for it
|
||||
return tmpPath
|
||||
}
|
||||
export * from "./objectStore"
|
||||
export * from "./utils"
|
||||
|
|
|
@ -0,0 +1,426 @@
|
|||
const sanitize = require("sanitize-s3-objectkey")
|
||||
import AWS from "aws-sdk"
|
||||
import stream from "stream"
|
||||
import fetch from "node-fetch"
|
||||
import tar from "tar-fs"
|
||||
const zlib = require("zlib")
|
||||
import { promisify } from "util"
|
||||
import { join } from "path"
|
||||
import fs from "fs"
|
||||
import env from "../environment"
|
||||
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
|
||||
import { v4 } from "uuid"
|
||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||
|
||||
const streamPipeline = promisify(stream.pipeline)
|
||||
// use this as a temporary store of buckets that are being created
|
||||
const STATE = {
|
||||
bucketCreationPromises: {},
|
||||
}
|
||||
|
||||
type ListParams = {
|
||||
ContinuationToken?: string
|
||||
}
|
||||
|
||||
type UploadParams = {
|
||||
bucket: string
|
||||
filename: string
|
||||
path: string
|
||||
type?: string
|
||||
// can be undefined, we will remove it
|
||||
metadata?: {
|
||||
[key: string]: string | undefined
|
||||
}
|
||||
}
|
||||
|
||||
const CONTENT_TYPE_MAP: any = {
|
||||
txt: "text/plain",
|
||||
html: "text/html",
|
||||
css: "text/css",
|
||||
js: "application/javascript",
|
||||
json: "application/json",
|
||||
gz: "application/gzip",
|
||||
}
|
||||
const STRING_CONTENT_TYPES = [
|
||||
CONTENT_TYPE_MAP.html,
|
||||
CONTENT_TYPE_MAP.css,
|
||||
CONTENT_TYPE_MAP.js,
|
||||
CONTENT_TYPE_MAP.json,
|
||||
]
|
||||
|
||||
// does normal sanitization and then swaps dev apps to apps
|
||||
export function sanitizeKey(input: string) {
|
||||
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
||||
}
|
||||
|
||||
// simply handles the dev app to app conversion
|
||||
export function sanitizeBucket(input: string) {
|
||||
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
||||
}
|
||||
|
||||
function publicPolicy(bucketName: string) {
|
||||
return {
|
||||
Version: "2012-10-17",
|
||||
Statement: [
|
||||
{
|
||||
Effect: "Allow",
|
||||
Principal: {
|
||||
AWS: ["*"],
|
||||
},
|
||||
Action: "s3:GetObject",
|
||||
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
const PUBLIC_BUCKETS = [
|
||||
ObjectStoreBuckets.APPS,
|
||||
ObjectStoreBuckets.GLOBAL,
|
||||
ObjectStoreBuckets.PLUGINS,
|
||||
]
|
||||
|
||||
/**
|
||||
* Gets a connection to the object store using the S3 SDK.
|
||||
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||
* @constructor
|
||||
*/
|
||||
export const ObjectStore = (bucket: string) => {
|
||||
const config: any = {
|
||||
s3ForcePathStyle: true,
|
||||
signatureVersion: "v4",
|
||||
apiVersion: "2006-03-01",
|
||||
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||
region: env.AWS_REGION,
|
||||
}
|
||||
if (bucket) {
|
||||
config.params = {
|
||||
Bucket: sanitizeBucket(bucket),
|
||||
}
|
||||
}
|
||||
if (env.MINIO_URL) {
|
||||
config.endpoint = env.MINIO_URL
|
||||
}
|
||||
return new AWS.S3(config)
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||
* if it does not exist then it will create it.
|
||||
*/
|
||||
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
try {
|
||||
await client
|
||||
.headBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
} catch (err: any) {
|
||||
const promises: any = STATE.bucketCreationPromises
|
||||
const doesntExist = err.statusCode === 404,
|
||||
noAccess = err.statusCode === 403
|
||||
if (promises[bucketName]) {
|
||||
await promises[bucketName]
|
||||
} else if (doesntExist || noAccess) {
|
||||
if (doesntExist) {
|
||||
// bucket doesn't exist create it
|
||||
promises[bucketName] = client
|
||||
.createBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
await promises[bucketName]
|
||||
delete promises[bucketName]
|
||||
}
|
||||
// public buckets are quite hidden in the system, make sure
|
||||
// no bucket is set accidentally
|
||||
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
||||
await client
|
||||
.putBucketPolicy({
|
||||
Bucket: bucketName,
|
||||
Policy: JSON.stringify(publicPolicy(bucketName)),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
} else {
|
||||
throw new Error("Unable to write to object store bucket.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads the contents of a file given the required parameters, useful when
|
||||
* temp files in use (for example file uploaded as an attachment).
|
||||
*/
|
||||
export const upload = async ({
|
||||
bucket: bucketName,
|
||||
filename,
|
||||
path,
|
||||
type,
|
||||
metadata,
|
||||
}: UploadParams) => {
|
||||
const extension = filename.split(".").pop()
|
||||
const fileBytes = fs.readFileSync(path)
|
||||
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
|
||||
let contentType = type
|
||||
if (!contentType) {
|
||||
contentType = extension
|
||||
? CONTENT_TYPE_MAP[extension.toLowerCase()]
|
||||
: CONTENT_TYPE_MAP.txt
|
||||
}
|
||||
const config: any = {
|
||||
// windows file paths need to be converted to forward slashes for s3
|
||||
Key: sanitizeKey(filename),
|
||||
Body: fileBytes,
|
||||
ContentType: contentType,
|
||||
}
|
||||
if (metadata && typeof metadata === "object") {
|
||||
// remove any nullish keys from the metadata object, as these may be considered invalid
|
||||
for (let key of Object.keys(metadata)) {
|
||||
if (!metadata[key] || typeof metadata[key] !== "string") {
|
||||
delete metadata[key]
|
||||
}
|
||||
}
|
||||
config.Metadata = metadata
|
||||
}
|
||||
return objectStore.upload(config).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* Similar to the upload function but can be used to send a file stream
|
||||
* through to the object store.
|
||||
*/
|
||||
export const streamUpload = async (
|
||||
bucketName: string,
|
||||
filename: string,
|
||||
stream: any,
|
||||
extra = {}
|
||||
) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
|
||||
// Set content type for certain known extensions
|
||||
if (filename?.endsWith(".js")) {
|
||||
extra = {
|
||||
...extra,
|
||||
ContentType: "application/javascript",
|
||||
}
|
||||
} else if (filename?.endsWith(".svg")) {
|
||||
extra = {
|
||||
...extra,
|
||||
ContentType: "image",
|
||||
}
|
||||
}
|
||||
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(filename),
|
||||
Body: stream,
|
||||
...extra,
|
||||
}
|
||||
return objectStore.upload(params).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||
* will be converted, otherwise it will be returned as a buffer stream.
|
||||
*/
|
||||
export const retrieve = async (bucketName: string, filepath: string) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(filepath),
|
||||
}
|
||||
const response: any = await objectStore.getObject(params).promise()
|
||||
// currently these are all strings
|
||||
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
||||
return response.Body.toString("utf8")
|
||||
} else {
|
||||
return response.Body
|
||||
}
|
||||
}
|
||||
|
||||
export const listAllObjects = async (bucketName: string, path: string) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
const list = (params: ListParams = {}) => {
|
||||
return objectStore
|
||||
.listObjectsV2({
|
||||
...params,
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Prefix: sanitizeKey(path),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
let isTruncated = false,
|
||||
token,
|
||||
objects: AWS.S3.Types.Object[] = []
|
||||
do {
|
||||
let params: ListParams = {}
|
||||
if (token) {
|
||||
params.ContinuationToken = token
|
||||
}
|
||||
const response = await list(params)
|
||||
if (response.Contents) {
|
||||
objects = objects.concat(response.Contents)
|
||||
}
|
||||
isTruncated = !!response.IsTruncated
|
||||
} while (isTruncated)
|
||||
return objects
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as retrieval function but puts to a temporary file.
|
||||
*/
|
||||
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
filepath = sanitizeKey(filepath)
|
||||
const data = await retrieve(bucketName, filepath)
|
||||
const outputPath = join(budibaseTempDir(), v4())
|
||||
fs.writeFileSync(outputPath, data)
|
||||
return outputPath
|
||||
}
|
||||
|
||||
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
||||
let writePath = join(budibaseTempDir(), v4())
|
||||
fs.mkdirSync(writePath)
|
||||
const objects = await listAllObjects(bucketName, path)
|
||||
let fullObjects = await Promise.all(
|
||||
objects.map(obj => retrieve(bucketName, obj.Key!))
|
||||
)
|
||||
let count = 0
|
||||
for (let obj of objects) {
|
||||
const filename = obj.Key!
|
||||
const data = fullObjects[count++]
|
||||
const possiblePath = filename.split("/")
|
||||
if (possiblePath.length > 1) {
|
||||
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
||||
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
|
||||
}
|
||||
fs.writeFileSync(join(writePath, ...possiblePath), data)
|
||||
}
|
||||
return writePath
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a single file.
|
||||
*/
|
||||
export const deleteFile = async (bucketName: string, filepath: string) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Key: filepath,
|
||||
}
|
||||
return objectStore.deleteObject(params)
|
||||
}
|
||||
|
||||
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: filepaths.map((path: any) => ({ Key: path })),
|
||||
},
|
||||
}
|
||||
return objectStore.deleteObjects(params).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a path, including everything within.
|
||||
*/
|
||||
export const deleteFolder = async (
|
||||
bucketName: string,
|
||||
folder: string
|
||||
): Promise<any> => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
folder = sanitizeKey(folder)
|
||||
const client = ObjectStore(bucketName)
|
||||
const listParams = {
|
||||
Bucket: bucketName,
|
||||
Prefix: folder,
|
||||
}
|
||||
|
||||
let response: any = await client.listObjects(listParams).promise()
|
||||
if (response.Contents.length === 0) {
|
||||
return
|
||||
}
|
||||
const deleteParams: any = {
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: [],
|
||||
},
|
||||
}
|
||||
|
||||
response.Contents.forEach((content: any) => {
|
||||
deleteParams.Delete.Objects.push({ Key: content.Key })
|
||||
})
|
||||
|
||||
response = await client.deleteObjects(deleteParams).promise()
|
||||
// can only empty 1000 items at once
|
||||
if (response.Deleted.length === 1000) {
|
||||
return deleteFolder(bucketName, folder)
|
||||
}
|
||||
}
|
||||
|
||||
export const uploadDirectory = async (
|
||||
bucketName: string,
|
||||
localPath: string,
|
||||
bucketPath: string
|
||||
) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
let uploads = []
|
||||
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
||||
for (let file of files) {
|
||||
const path = sanitizeKey(join(bucketPath, file.name))
|
||||
const local = join(localPath, file.name)
|
||||
if (file.isDirectory()) {
|
||||
uploads.push(uploadDirectory(bucketName, local, path))
|
||||
} else {
|
||||
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
|
||||
}
|
||||
}
|
||||
await Promise.all(uploads)
|
||||
return files
|
||||
}
|
||||
|
||||
export const downloadTarballDirect = async (
|
||||
url: string,
|
||||
path: string,
|
||||
headers = {}
|
||||
) => {
|
||||
path = sanitizeKey(path)
|
||||
const response = await fetch(url, { headers })
|
||||
if (!response.ok) {
|
||||
throw new Error(`unexpected response ${response.statusText}`)
|
||||
}
|
||||
|
||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
||||
}
|
||||
|
||||
export const downloadTarball = async (
|
||||
url: string,
|
||||
bucketName: string,
|
||||
path: string
|
||||
) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
path = sanitizeKey(path)
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`unexpected response ${response.statusText}`)
|
||||
}
|
||||
|
||||
const tmpPath = join(budibaseTempDir(), path)
|
||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
||||
if (!env.isTest() && env.SELF_HOSTED) {
|
||||
await uploadDirectory(bucketName, tmpPath, path)
|
||||
}
|
||||
// return the temporary path incase there is a use for it
|
||||
return tmpPath
|
||||
}
|
|
@ -1,14 +1,15 @@
|
|||
const { join } = require("path")
|
||||
const { tmpdir } = require("os")
|
||||
const fs = require("fs")
|
||||
const env = require("../environment")
|
||||
import { join } from "path"
|
||||
import { tmpdir } from "os"
|
||||
import fs from "fs"
|
||||
import env from "../environment"
|
||||
|
||||
/****************************************************
|
||||
* NOTE: When adding a new bucket - name *
|
||||
* sure that S3 usages (like budibase-infra) *
|
||||
* have been updated to have a unique bucket name. *
|
||||
****************************************************/
|
||||
exports.ObjectStoreBuckets = {
|
||||
// can't be an enum - only numbers can be used for computed types
|
||||
export const ObjectStoreBuckets = {
|
||||
BACKUPS: env.BACKUPS_BUCKET_NAME,
|
||||
APPS: env.APPS_BUCKET_NAME,
|
||||
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
|
||||
|
@ -22,6 +23,6 @@ if (!fs.existsSync(bbTmp)) {
|
|||
fs.mkdirSync(bbTmp)
|
||||
}
|
||||
|
||||
exports.budibaseTempDir = function () {
|
||||
export function budibaseTempDir() {
|
||||
return bbTmp
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
const env = require("./environment")
|
||||
|
||||
exports.pinoSettings = () => ({
|
||||
prettyPrint: {
|
||||
levelFirst: true,
|
||||
},
|
||||
level: env.LOG_LEVEL || "error",
|
||||
autoLogging: {
|
||||
ignore: req => req.url.includes("/health"),
|
||||
},
|
||||
})
|
|
@ -0,0 +1,13 @@
|
|||
import env from "./environment"
|
||||
|
||||
export function pinoSettings() {
|
||||
return {
|
||||
prettyPrint: {
|
||||
levelFirst: true,
|
||||
},
|
||||
level: env.LOG_LEVEL || "error",
|
||||
autoLogging: {
|
||||
ignore: (req: { url: string }) => req.url.includes("/health"),
|
||||
},
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
// Mimic the outer package export for usage in index.ts
|
||||
// The outer exports can't be used as they now reference dist directly
|
||||
import * as generic from "../cache/generic"
|
||||
import * as user from "../cache/user"
|
||||
import * as app from "../cache/appMetadata"
|
||||
import * as writethrough from "../cache/writethrough"
|
||||
|
||||
export = {
|
||||
app,
|
||||
user,
|
||||
writethrough,
|
||||
...generic,
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
// Mimic the outer package export for usage in index.ts
|
||||
// The outer exports can't be used as they now reference dist directly
|
||||
import {
|
||||
getAppDB,
|
||||
getDevAppDB,
|
||||
getProdAppDB,
|
||||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
doInTenant,
|
||||
doInContext,
|
||||
} from "../context"
|
||||
|
||||
import * as identity from "../context/identity"
|
||||
|
||||
export = {
|
||||
getAppDB,
|
||||
getDevAppDB,
|
||||
getProdAppDB,
|
||||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
doInTenant,
|
||||
doInContext,
|
||||
identity,
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
// Mimic the outer package export for usage in index.ts
|
||||
// The outer exports can't be used as they now reference dist directly
|
||||
export * from "../objectStore"
|
||||
export * from "../objectStore/utils"
|
|
@ -1,13 +0,0 @@
|
|||
// Mimic the outer package export for usage in index.ts
|
||||
// The outer exports can't be used as they now reference dist directly
|
||||
import Client from "../redis"
|
||||
import utils from "../redis/utils"
|
||||
import clients from "../redis/init"
|
||||
import * as redlock from "../redis/redlock"
|
||||
|
||||
export = {
|
||||
Client,
|
||||
utils,
|
||||
clients,
|
||||
redlock,
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
// Mimic the outer package export for usage in index.ts
|
||||
// The outer exports can't be used as they now reference dist directly
|
||||
export * from "../utils"
|
||||
export * from "../hashing"
|
|
@ -1,9 +1,5 @@
|
|||
const {
|
||||
DatasourceFieldType,
|
||||
QueryType,
|
||||
PluginType,
|
||||
} = require("@budibase/types")
|
||||
const joi = require("joi")
|
||||
import { DatasourceFieldType, QueryType, PluginType } from "@budibase/types"
|
||||
import joi from "joi"
|
||||
|
||||
const DATASOURCE_TYPES = [
|
||||
"Relational",
|
||||
|
@ -14,14 +10,14 @@ const DATASOURCE_TYPES = [
|
|||
"API",
|
||||
]
|
||||
|
||||
function runJoi(validator, schema) {
|
||||
function runJoi(validator: joi.Schema, schema: any) {
|
||||
const { error } = validator.validate(schema)
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
function validateComponent(schema) {
|
||||
function validateComponent(schema: any) {
|
||||
const validator = joi.object({
|
||||
type: joi.string().allow("component").required(),
|
||||
metadata: joi.object().unknown(true).required(),
|
||||
|
@ -37,7 +33,7 @@ function validateComponent(schema) {
|
|||
runJoi(validator, schema)
|
||||
}
|
||||
|
||||
function validateDatasource(schema) {
|
||||
function validateDatasource(schema: any) {
|
||||
const fieldValidator = joi.object({
|
||||
type: joi
|
||||
.string()
|
||||
|
@ -86,7 +82,7 @@ function validateDatasource(schema) {
|
|||
runJoi(validator, schema)
|
||||
}
|
||||
|
||||
exports.validate = schema => {
|
||||
export function validate(schema: any) {
|
||||
switch (schema?.type) {
|
||||
case PluginType.COMPONENT:
|
||||
validateComponent(schema)
|
|
@ -1,5 +1,5 @@
|
|||
import events from "events"
|
||||
import { timeout } from "../../utils"
|
||||
import { timeout } from "../utils"
|
||||
|
||||
/**
|
||||
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
||||
|
|
|
@ -1,278 +1,6 @@
|
|||
import RedisWrapper from "../redis"
|
||||
const env = require("../environment")
|
||||
// ioredis mock is all in memory
|
||||
const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis")
|
||||
const {
|
||||
addDbPrefix,
|
||||
removeDbPrefix,
|
||||
getRedisOptions,
|
||||
SEPARATOR,
|
||||
SelectableDatabases,
|
||||
} = require("./utils")
|
||||
|
||||
const RETRY_PERIOD_MS = 2000
|
||||
const STARTUP_TIMEOUT_MS = 5000
|
||||
const CLUSTERED = false
|
||||
const DEFAULT_SELECT_DB = SelectableDatabases.DEFAULT
|
||||
|
||||
// for testing just generate the client once
|
||||
let CLOSED = false
|
||||
let CLIENTS: { [key: number]: any } = {}
|
||||
// if in test always connected
|
||||
let CONNECTED = env.isTest()
|
||||
|
||||
function pickClient(selectDb: number): any {
|
||||
return CLIENTS[selectDb]
|
||||
}
|
||||
|
||||
function connectionError(
|
||||
selectDb: number,
|
||||
timeout: NodeJS.Timeout,
|
||||
err: Error | string
|
||||
) {
|
||||
// manually shut down, ignore errors
|
||||
if (CLOSED) {
|
||||
return
|
||||
}
|
||||
pickClient(selectDb).disconnect()
|
||||
CLOSED = true
|
||||
// always clear this on error
|
||||
clearTimeout(timeout)
|
||||
CONNECTED = false
|
||||
console.error("Redis connection failed - " + err)
|
||||
setTimeout(() => {
|
||||
init()
|
||||
}, RETRY_PERIOD_MS)
|
||||
}
|
||||
|
||||
/**
|
||||
* Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise
|
||||
* will return the ioredis client which will be ready to use.
|
||||
*/
|
||||
function init(selectDb = DEFAULT_SELECT_DB) {
|
||||
let timeout: NodeJS.Timeout
|
||||
CLOSED = false
|
||||
let client = pickClient(selectDb)
|
||||
// already connected, ignore
|
||||
if (client && CONNECTED) {
|
||||
return
|
||||
}
|
||||
// testing uses a single in memory client
|
||||
if (env.isTest()) {
|
||||
CLIENTS[selectDb] = new Redis(getRedisOptions())
|
||||
}
|
||||
// start the timer - only allowed 5 seconds to connect
|
||||
timeout = setTimeout(() => {
|
||||
if (!CONNECTED) {
|
||||
connectionError(
|
||||
selectDb,
|
||||
timeout,
|
||||
"Did not successfully connect in timeout"
|
||||
)
|
||||
}
|
||||
}, STARTUP_TIMEOUT_MS)
|
||||
|
||||
// disconnect any lingering client
|
||||
if (client) {
|
||||
client.disconnect()
|
||||
}
|
||||
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
|
||||
|
||||
if (CLUSTERED) {
|
||||
client = new Redis.Cluster([{ host, port }], opts)
|
||||
} else if (redisProtocolUrl) {
|
||||
client = new Redis(redisProtocolUrl)
|
||||
} else {
|
||||
client = new Redis(opts)
|
||||
}
|
||||
// attach handlers
|
||||
client.on("end", (err: Error) => {
|
||||
connectionError(selectDb, timeout, err)
|
||||
})
|
||||
client.on("error", (err: Error) => {
|
||||
connectionError(selectDb, timeout, err)
|
||||
})
|
||||
client.on("connect", () => {
|
||||
clearTimeout(timeout)
|
||||
CONNECTED = true
|
||||
})
|
||||
CLIENTS[selectDb] = client
|
||||
}
|
||||
|
||||
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
|
||||
return new Promise(resolve => {
|
||||
if (pickClient(selectDb) == null) {
|
||||
init()
|
||||
} else if (CONNECTED) {
|
||||
resolve("")
|
||||
return
|
||||
}
|
||||
// check if the connection is ready
|
||||
const interval = setInterval(() => {
|
||||
if (CONNECTED) {
|
||||
clearInterval(interval)
|
||||
resolve("")
|
||||
}
|
||||
}, 500)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function, takes a redis stream and converts it to a promisified response -
|
||||
* this can only be done with redis streams because they will have an end.
|
||||
* @param stream A redis stream, specifically as this type of stream will have an end.
|
||||
* @param client The client to use for further lookups.
|
||||
* @return {Promise<object>} The final output of the stream
|
||||
*/
|
||||
function promisifyStream(stream: any, client: RedisWrapper) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const outputKeys = new Set()
|
||||
stream.on("data", (keys: string[]) => {
|
||||
keys.forEach(key => {
|
||||
outputKeys.add(key)
|
||||
})
|
||||
})
|
||||
stream.on("error", (err: Error) => {
|
||||
reject(err)
|
||||
})
|
||||
stream.on("end", async () => {
|
||||
const keysArray: string[] = Array.from(outputKeys) as string[]
|
||||
try {
|
||||
let getPromises = []
|
||||
for (let key of keysArray) {
|
||||
getPromises.push(client.get(key))
|
||||
}
|
||||
const jsonArray = await Promise.all(getPromises)
|
||||
resolve(
|
||||
keysArray.map(key => ({
|
||||
key: removeDbPrefix(key),
|
||||
value: JSON.parse(jsonArray.shift()),
|
||||
}))
|
||||
)
|
||||
} catch (err) {
|
||||
reject(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export = class RedisWrapper {
|
||||
_db: string
|
||||
_select: number
|
||||
|
||||
constructor(db: string, selectDb: number | null = null) {
|
||||
this._db = db
|
||||
this._select = selectDb || DEFAULT_SELECT_DB
|
||||
}
|
||||
|
||||
getClient() {
|
||||
return pickClient(this._select)
|
||||
}
|
||||
|
||||
async init() {
|
||||
CLOSED = false
|
||||
init(this._select)
|
||||
await waitForConnection(this._select)
|
||||
return this
|
||||
}
|
||||
|
||||
async finish() {
|
||||
CLOSED = true
|
||||
this.getClient().disconnect()
|
||||
}
|
||||
|
||||
async scan(key = ""): Promise<any> {
|
||||
const db = this._db
|
||||
key = `${db}${SEPARATOR}${key}`
|
||||
let stream
|
||||
if (CLUSTERED) {
|
||||
let node = this.getClient().nodes("master")
|
||||
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
||||
} else {
|
||||
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
||||
}
|
||||
return promisifyStream(stream, this.getClient())
|
||||
}
|
||||
|
||||
async keys(pattern: string) {
|
||||
const db = this._db
|
||||
return this.getClient().keys(addDbPrefix(db, pattern))
|
||||
}
|
||||
|
||||
async get(key: string) {
|
||||
const db = this._db
|
||||
let response = await this.getClient().get(addDbPrefix(db, key))
|
||||
// overwrite the prefixed key
|
||||
if (response != null && response.key) {
|
||||
response.key = key
|
||||
}
|
||||
// if its not an object just return the response
|
||||
try {
|
||||
return JSON.parse(response)
|
||||
} catch (err) {
|
||||
return response
|
||||
}
|
||||
}
|
||||
|
||||
async bulkGet(keys: string[]) {
|
||||
const db = this._db
|
||||
if (keys.length === 0) {
|
||||
return {}
|
||||
}
|
||||
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
|
||||
let response = await this.getClient().mget(prefixedKeys)
|
||||
if (Array.isArray(response)) {
|
||||
let final: any = {}
|
||||
let count = 0
|
||||
for (let result of response) {
|
||||
if (result) {
|
||||
let parsed
|
||||
try {
|
||||
parsed = JSON.parse(result)
|
||||
} catch (err) {
|
||||
parsed = result
|
||||
}
|
||||
final[keys[count]] = parsed
|
||||
}
|
||||
count++
|
||||
}
|
||||
return final
|
||||
} else {
|
||||
throw new Error(`Invalid response: ${response}`)
|
||||
}
|
||||
}
|
||||
|
||||
async store(key: string, value: any, expirySeconds: number | null = null) {
|
||||
const db = this._db
|
||||
if (typeof value === "object") {
|
||||
value = JSON.stringify(value)
|
||||
}
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
await this.getClient().set(prefixedKey, value)
|
||||
if (expirySeconds) {
|
||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||
}
|
||||
}
|
||||
|
||||
async getTTL(key: string) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
return this.getClient().ttl(prefixedKey)
|
||||
}
|
||||
|
||||
async setExpiry(key: string, expirySeconds: number | null) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||
}
|
||||
|
||||
async delete(key: string) {
|
||||
const db = this._db
|
||||
await this.getClient().del(addDbPrefix(db, key))
|
||||
}
|
||||
|
||||
async clear() {
|
||||
let items = await this.scan()
|
||||
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
|
||||
}
|
||||
}
|
||||
// Mimic the outer package export for usage in index.ts
|
||||
// The outer exports can't be used as they now reference dist directly
|
||||
export { default as Client } from "./redis"
|
||||
export * as utils from "./utils"
|
||||
export * as clients from "./init"
|
||||
export * as redlock from "./redlock"
|
||||
|
|
|
@ -1,69 +0,0 @@
|
|||
const Client = require("./index")
|
||||
const utils = require("./utils")
|
||||
|
||||
let userClient,
|
||||
sessionClient,
|
||||
appClient,
|
||||
cacheClient,
|
||||
writethroughClient,
|
||||
lockClient
|
||||
|
||||
async function init() {
|
||||
userClient = await new Client(utils.Databases.USER_CACHE).init()
|
||||
sessionClient = await new Client(utils.Databases.SESSIONS).init()
|
||||
appClient = await new Client(utils.Databases.APP_METADATA).init()
|
||||
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
|
||||
lockClient = await new Client(utils.Databases.LOCKS).init()
|
||||
writethroughClient = await new Client(
|
||||
utils.Databases.WRITE_THROUGH,
|
||||
utils.SelectableDatabases.WRITE_THROUGH
|
||||
).init()
|
||||
}
|
||||
|
||||
process.on("exit", async () => {
|
||||
if (userClient) await userClient.finish()
|
||||
if (sessionClient) await sessionClient.finish()
|
||||
if (appClient) await appClient.finish()
|
||||
if (cacheClient) await cacheClient.finish()
|
||||
if (writethroughClient) await writethroughClient.finish()
|
||||
if (lockClient) await lockClient.finish()
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
getUserClient: async () => {
|
||||
if (!userClient) {
|
||||
await init()
|
||||
}
|
||||
return userClient
|
||||
},
|
||||
getSessionClient: async () => {
|
||||
if (!sessionClient) {
|
||||
await init()
|
||||
}
|
||||
return sessionClient
|
||||
},
|
||||
getAppClient: async () => {
|
||||
if (!appClient) {
|
||||
await init()
|
||||
}
|
||||
return appClient
|
||||
},
|
||||
getCacheClient: async () => {
|
||||
if (!cacheClient) {
|
||||
await init()
|
||||
}
|
||||
return cacheClient
|
||||
},
|
||||
getWritethroughClient: async () => {
|
||||
if (!writethroughClient) {
|
||||
await init()
|
||||
}
|
||||
return writethroughClient
|
||||
},
|
||||
getLockClient: async () => {
|
||||
if (!lockClient) {
|
||||
await init()
|
||||
}
|
||||
return lockClient
|
||||
},
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
import Client from "./redis"
|
||||
import * as utils from "./utils"
|
||||
|
||||
let userClient: Client,
|
||||
sessionClient: Client,
|
||||
appClient: Client,
|
||||
cacheClient: Client,
|
||||
writethroughClient: Client,
|
||||
lockClient: Client
|
||||
|
||||
async function init() {
|
||||
userClient = await new Client(utils.Databases.USER_CACHE).init()
|
||||
sessionClient = await new Client(utils.Databases.SESSIONS).init()
|
||||
appClient = await new Client(utils.Databases.APP_METADATA).init()
|
||||
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
|
||||
lockClient = await new Client(utils.Databases.LOCKS).init()
|
||||
writethroughClient = await new Client(
|
||||
utils.Databases.WRITE_THROUGH,
|
||||
utils.SelectableDatabase.WRITE_THROUGH
|
||||
).init()
|
||||
}
|
||||
|
||||
process.on("exit", async () => {
|
||||
if (userClient) await userClient.finish()
|
||||
if (sessionClient) await sessionClient.finish()
|
||||
if (appClient) await appClient.finish()
|
||||
if (cacheClient) await cacheClient.finish()
|
||||
if (writethroughClient) await writethroughClient.finish()
|
||||
if (lockClient) await lockClient.finish()
|
||||
})
|
||||
|
||||
export async function getUserClient() {
|
||||
if (!userClient) {
|
||||
await init()
|
||||
}
|
||||
return userClient
|
||||
}
|
||||
|
||||
export async function getSessionClient() {
|
||||
if (!sessionClient) {
|
||||
await init()
|
||||
}
|
||||
return sessionClient
|
||||
}
|
||||
|
||||
export async function getAppClient() {
|
||||
if (!appClient) {
|
||||
await init()
|
||||
}
|
||||
return appClient
|
||||
}
|
||||
|
||||
export async function getCacheClient() {
|
||||
if (!cacheClient) {
|
||||
await init()
|
||||
}
|
||||
return cacheClient
|
||||
}
|
||||
|
||||
export async function getWritethroughClient() {
|
||||
if (!writethroughClient) {
|
||||
await init()
|
||||
}
|
||||
return writethroughClient
|
||||
}
|
||||
|
||||
export async function getLockClient() {
|
||||
if (!lockClient) {
|
||||
await init()
|
||||
}
|
||||
return lockClient
|
||||
}
|
|
@ -0,0 +1,279 @@
|
|||
import env from "../environment"
|
||||
// ioredis mock is all in memory
|
||||
const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis")
|
||||
import {
|
||||
addDbPrefix,
|
||||
removeDbPrefix,
|
||||
getRedisOptions,
|
||||
SEPARATOR,
|
||||
SelectableDatabase,
|
||||
} from "./utils"
|
||||
|
||||
const RETRY_PERIOD_MS = 2000
|
||||
const STARTUP_TIMEOUT_MS = 5000
|
||||
const CLUSTERED = false
|
||||
const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
|
||||
|
||||
// for testing just generate the client once
|
||||
let CLOSED = false
|
||||
let CLIENTS: { [key: number]: any } = {}
|
||||
// if in test always connected
|
||||
let CONNECTED = env.isTest()
|
||||
|
||||
function pickClient(selectDb: number): any {
|
||||
return CLIENTS[selectDb]
|
||||
}
|
||||
|
||||
function connectionError(
|
||||
selectDb: number,
|
||||
timeout: NodeJS.Timeout,
|
||||
err: Error | string
|
||||
) {
|
||||
// manually shut down, ignore errors
|
||||
if (CLOSED) {
|
||||
return
|
||||
}
|
||||
pickClient(selectDb).disconnect()
|
||||
CLOSED = true
|
||||
// always clear this on error
|
||||
clearTimeout(timeout)
|
||||
CONNECTED = false
|
||||
console.error("Redis connection failed - " + err)
|
||||
setTimeout(() => {
|
||||
init()
|
||||
}, RETRY_PERIOD_MS)
|
||||
}
|
||||
|
||||
/**
|
||||
* Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise
|
||||
* will return the ioredis client which will be ready to use.
|
||||
*/
|
||||
function init(selectDb = DEFAULT_SELECT_DB) {
|
||||
let timeout: NodeJS.Timeout
|
||||
CLOSED = false
|
||||
let client = pickClient(selectDb)
|
||||
// already connected, ignore
|
||||
if (client && CONNECTED) {
|
||||
return
|
||||
}
|
||||
// testing uses a single in memory client
|
||||
if (env.isTest()) {
|
||||
CLIENTS[selectDb] = new Redis(getRedisOptions())
|
||||
}
|
||||
// start the timer - only allowed 5 seconds to connect
|
||||
timeout = setTimeout(() => {
|
||||
if (!CONNECTED) {
|
||||
connectionError(
|
||||
selectDb,
|
||||
timeout,
|
||||
"Did not successfully connect in timeout"
|
||||
)
|
||||
}
|
||||
}, STARTUP_TIMEOUT_MS)
|
||||
|
||||
// disconnect any lingering client
|
||||
if (client) {
|
||||
client.disconnect()
|
||||
}
|
||||
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
|
||||
|
||||
if (CLUSTERED) {
|
||||
client = new Redis.Cluster([{ host, port }], opts)
|
||||
} else if (redisProtocolUrl) {
|
||||
client = new Redis(redisProtocolUrl)
|
||||
} else {
|
||||
client = new Redis(opts)
|
||||
}
|
||||
// attach handlers
|
||||
client.on("end", (err: Error) => {
|
||||
connectionError(selectDb, timeout, err)
|
||||
})
|
||||
client.on("error", (err: Error) => {
|
||||
connectionError(selectDb, timeout, err)
|
||||
})
|
||||
client.on("connect", () => {
|
||||
clearTimeout(timeout)
|
||||
CONNECTED = true
|
||||
})
|
||||
CLIENTS[selectDb] = client
|
||||
}
|
||||
|
||||
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
|
||||
return new Promise(resolve => {
|
||||
if (pickClient(selectDb) == null) {
|
||||
init()
|
||||
} else if (CONNECTED) {
|
||||
resolve("")
|
||||
return
|
||||
}
|
||||
// check if the connection is ready
|
||||
const interval = setInterval(() => {
|
||||
if (CONNECTED) {
|
||||
clearInterval(interval)
|
||||
resolve("")
|
||||
}
|
||||
}, 500)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function, takes a redis stream and converts it to a promisified response -
|
||||
* this can only be done with redis streams because they will have an end.
|
||||
* @param stream A redis stream, specifically as this type of stream will have an end.
|
||||
* @param client The client to use for further lookups.
|
||||
* @return {Promise<object>} The final output of the stream
|
||||
*/
|
||||
function promisifyStream(stream: any, client: RedisWrapper) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const outputKeys = new Set()
|
||||
stream.on("data", (keys: string[]) => {
|
||||
keys.forEach(key => {
|
||||
outputKeys.add(key)
|
||||
})
|
||||
})
|
||||
stream.on("error", (err: Error) => {
|
||||
reject(err)
|
||||
})
|
||||
stream.on("end", async () => {
|
||||
const keysArray: string[] = Array.from(outputKeys) as string[]
|
||||
try {
|
||||
let getPromises = []
|
||||
for (let key of keysArray) {
|
||||
getPromises.push(client.get(key))
|
||||
}
|
||||
const jsonArray = await Promise.all(getPromises)
|
||||
resolve(
|
||||
keysArray.map(key => ({
|
||||
key: removeDbPrefix(key),
|
||||
value: JSON.parse(jsonArray.shift()),
|
||||
}))
|
||||
)
|
||||
} catch (err) {
|
||||
reject(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
class RedisWrapper {
|
||||
_db: string
|
||||
_select: number
|
||||
|
||||
constructor(db: string, selectDb: number | null = null) {
|
||||
this._db = db
|
||||
this._select = selectDb || DEFAULT_SELECT_DB
|
||||
}
|
||||
|
||||
getClient() {
|
||||
return pickClient(this._select)
|
||||
}
|
||||
|
||||
async init() {
|
||||
CLOSED = false
|
||||
init(this._select)
|
||||
await waitForConnection(this._select)
|
||||
return this
|
||||
}
|
||||
|
||||
async finish() {
|
||||
CLOSED = true
|
||||
this.getClient().disconnect()
|
||||
}
|
||||
|
||||
async scan(key = ""): Promise<any> {
|
||||
const db = this._db
|
||||
key = `${db}${SEPARATOR}${key}`
|
||||
let stream
|
||||
if (CLUSTERED) {
|
||||
let node = this.getClient().nodes("master")
|
||||
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
||||
} else {
|
||||
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
||||
}
|
||||
return promisifyStream(stream, this.getClient())
|
||||
}
|
||||
|
||||
async keys(pattern: string) {
|
||||
const db = this._db
|
||||
return this.getClient().keys(addDbPrefix(db, pattern))
|
||||
}
|
||||
|
||||
async get(key: string) {
|
||||
const db = this._db
|
||||
let response = await this.getClient().get(addDbPrefix(db, key))
|
||||
// overwrite the prefixed key
|
||||
if (response != null && response.key) {
|
||||
response.key = key
|
||||
}
|
||||
// if its not an object just return the response
|
||||
try {
|
||||
return JSON.parse(response)
|
||||
} catch (err) {
|
||||
return response
|
||||
}
|
||||
}
|
||||
|
||||
async bulkGet(keys: string[]) {
|
||||
const db = this._db
|
||||
if (keys.length === 0) {
|
||||
return {}
|
||||
}
|
||||
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
|
||||
let response = await this.getClient().mget(prefixedKeys)
|
||||
if (Array.isArray(response)) {
|
||||
let final: any = {}
|
||||
let count = 0
|
||||
for (let result of response) {
|
||||
if (result) {
|
||||
let parsed
|
||||
try {
|
||||
parsed = JSON.parse(result)
|
||||
} catch (err) {
|
||||
parsed = result
|
||||
}
|
||||
final[keys[count]] = parsed
|
||||
}
|
||||
count++
|
||||
}
|
||||
return final
|
||||
} else {
|
||||
throw new Error(`Invalid response: ${response}`)
|
||||
}
|
||||
}
|
||||
|
||||
async store(key: string, value: any, expirySeconds: number | null = null) {
|
||||
const db = this._db
|
||||
if (typeof value === "object") {
|
||||
value = JSON.stringify(value)
|
||||
}
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
await this.getClient().set(prefixedKey, value)
|
||||
if (expirySeconds) {
|
||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||
}
|
||||
}
|
||||
|
||||
async getTTL(key: string) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
return this.getClient().ttl(prefixedKey)
|
||||
}
|
||||
|
||||
async setExpiry(key: string, expirySeconds: number | null) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||
}
|
||||
|
||||
async delete(key: string) {
|
||||
const db = this._db
|
||||
await this.getClient().del(addDbPrefix(db, key))
|
||||
}
|
||||
|
||||
async clear() {
|
||||
let items = await this.scan()
|
||||
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
|
||||
}
|
||||
}
|
||||
|
||||
export = RedisWrapper
|
|
@ -1,10 +1,10 @@
|
|||
const env = require("../environment")
|
||||
import env from "../environment"
|
||||
|
||||
const SLOT_REFRESH_MS = 2000
|
||||
const CONNECT_TIMEOUT_MS = 10000
|
||||
const SEPARATOR = "-"
|
||||
const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL
|
||||
const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
|
||||
export const SEPARATOR = "-"
|
||||
|
||||
/**
|
||||
* These Redis databases help us to segment up a Redis keyspace by prepending the
|
||||
|
@ -12,23 +12,23 @@ const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
|
|||
* can be split up a bit; allowing us to use scans on small databases to find some particular
|
||||
* keys within.
|
||||
* If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out
|
||||
* of the default keyspace and use a separate one - the SelectableDatabases can be used for this.
|
||||
* of the default keyspace and use a separate one - the SelectableDatabase can be used for this.
|
||||
*/
|
||||
exports.Databases = {
|
||||
PW_RESETS: "pwReset",
|
||||
VERIFICATIONS: "verification",
|
||||
INVITATIONS: "invitation",
|
||||
DEV_LOCKS: "devLocks",
|
||||
DEBOUNCE: "debounce",
|
||||
SESSIONS: "session",
|
||||
USER_CACHE: "users",
|
||||
FLAGS: "flags",
|
||||
APP_METADATA: "appMetadata",
|
||||
QUERY_VARS: "queryVars",
|
||||
LICENSES: "license",
|
||||
GENERIC_CACHE: "data_cache",
|
||||
WRITE_THROUGH: "writeThrough",
|
||||
LOCKS: "locks",
|
||||
export enum Databases {
|
||||
PW_RESETS = "pwReset",
|
||||
VERIFICATIONS = "verification",
|
||||
INVITATIONS = "invitation",
|
||||
DEV_LOCKS = "devLocks",
|
||||
DEBOUNCE = "debounce",
|
||||
SESSIONS = "session",
|
||||
USER_CACHE = "users",
|
||||
FLAGS = "flags",
|
||||
APP_METADATA = "appMetadata",
|
||||
QUERY_VARS = "queryVars",
|
||||
LICENSES = "license",
|
||||
GENERIC_CACHE = "data_cache",
|
||||
WRITE_THROUGH = "writeThrough",
|
||||
LOCKS = "locks",
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -40,30 +40,28 @@ exports.Databases = {
|
|||
* but if you need to walk through all values in a database periodically then a separate selectable
|
||||
* keyspace should be used.
|
||||
*/
|
||||
exports.SelectableDatabases = {
|
||||
DEFAULT: 0,
|
||||
WRITE_THROUGH: 1,
|
||||
UNUSED_1: 2,
|
||||
UNUSED_2: 3,
|
||||
UNUSED_3: 4,
|
||||
UNUSED_4: 5,
|
||||
UNUSED_5: 6,
|
||||
UNUSED_6: 7,
|
||||
UNUSED_7: 8,
|
||||
UNUSED_8: 9,
|
||||
UNUSED_9: 10,
|
||||
UNUSED_10: 11,
|
||||
UNUSED_11: 12,
|
||||
UNUSED_12: 13,
|
||||
UNUSED_13: 14,
|
||||
UNUSED_14: 15,
|
||||
export enum SelectableDatabase {
|
||||
DEFAULT = 0,
|
||||
WRITE_THROUGH = 1,
|
||||
UNUSED_1 = 2,
|
||||
UNUSED_2 = 3,
|
||||
UNUSED_3 = 4,
|
||||
UNUSED_4 = 5,
|
||||
UNUSED_5 = 6,
|
||||
UNUSED_6 = 7,
|
||||
UNUSED_7 = 8,
|
||||
UNUSED_8 = 9,
|
||||
UNUSED_9 = 10,
|
||||
UNUSED_10 = 11,
|
||||
UNUSED_11 = 12,
|
||||
UNUSED_12 = 13,
|
||||
UNUSED_13 = 14,
|
||||
UNUSED_14 = 15,
|
||||
}
|
||||
|
||||
exports.SEPARATOR = SEPARATOR
|
||||
|
||||
exports.getRedisOptions = (clustered = false) => {
|
||||
export function getRedisOptions(clustered = false) {
|
||||
let password = REDIS_PASSWORD
|
||||
let url = REDIS_URL.split("//")
|
||||
let url: string[] | string = REDIS_URL.split("//")
|
||||
// get rid of the protocol
|
||||
url = url.length > 1 ? url[1] : url[0]
|
||||
// check for a password etc
|
||||
|
@ -84,7 +82,7 @@ exports.getRedisOptions = (clustered = false) => {
|
|||
redisProtocolUrl = REDIS_URL
|
||||
}
|
||||
|
||||
const opts = {
|
||||
const opts: any = {
|
||||
connectTimeout: CONNECT_TIMEOUT_MS,
|
||||
}
|
||||
if (clustered) {
|
||||
|
@ -92,7 +90,7 @@ exports.getRedisOptions = (clustered = false) => {
|
|||
opts.redisOptions.tls = {}
|
||||
opts.redisOptions.password = password
|
||||
opts.slotsRefreshTimeout = SLOT_REFRESH_MS
|
||||
opts.dnsLookup = (address, callback) => callback(null, address)
|
||||
opts.dnsLookup = (address: string, callback: any) => callback(null, address)
|
||||
} else {
|
||||
opts.host = host
|
||||
opts.port = port
|
||||
|
@ -101,14 +99,14 @@ exports.getRedisOptions = (clustered = false) => {
|
|||
return { opts, host, port, redisProtocolUrl }
|
||||
}
|
||||
|
||||
exports.addDbPrefix = (db, key) => {
|
||||
export function addDbPrefix(db: string, key: string) {
|
||||
if (key.includes(db)) {
|
||||
return key
|
||||
}
|
||||
return `${db}${SEPARATOR}${key}`
|
||||
}
|
||||
|
||||
exports.removeDbPrefix = key => {
|
||||
export function removeDbPrefix(key: string) {
|
||||
let parts = key.split(SEPARATOR)
|
||||
if (parts.length >= 2) {
|
||||
parts.shift()
|
|
@ -1 +0,0 @@
|
|||
exports.lookupApiKey = async () => {}
|
|
@ -1,5 +1,5 @@
|
|||
const crypto = require("crypto")
|
||||
const env = require("../environment")
|
||||
import crypto from "crypto"
|
||||
import env from "../environment"
|
||||
|
||||
const ALGO = "aes-256-ctr"
|
||||
const SECRET = env.JWT_SECRET
|
||||
|
@ -8,13 +8,13 @@ const ITERATIONS = 10000
|
|||
const RANDOM_BYTES = 16
|
||||
const STRETCH_LENGTH = 32
|
||||
|
||||
function stretchString(string, salt) {
|
||||
function stretchString(string: string, salt: Buffer) {
|
||||
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
||||
}
|
||||
|
||||
exports.encrypt = input => {
|
||||
export function encrypt(input: string) {
|
||||
const salt = crypto.randomBytes(RANDOM_BYTES)
|
||||
const stretched = stretchString(SECRET, salt)
|
||||
const stretched = stretchString(SECRET!, salt)
|
||||
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
|
||||
const base = cipher.update(input)
|
||||
const final = cipher.final()
|
||||
|
@ -22,10 +22,10 @@ exports.encrypt = input => {
|
|||
return `${salt.toString("hex")}${SEPARATOR}${encrypted}`
|
||||
}
|
||||
|
||||
exports.decrypt = input => {
|
||||
export function decrypt(input: string) {
|
||||
const [salt, encrypted] = input.split(SEPARATOR)
|
||||
const saltBuffer = Buffer.from(salt, "hex")
|
||||
const stretched = stretchString(SECRET, saltBuffer)
|
||||
const stretched = stretchString(SECRET!, saltBuffer)
|
||||
const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer)
|
||||
const base = decipher.update(Buffer.from(encrypted, "hex"))
|
||||
const final = decipher.final()
|
|
@ -1,18 +1,18 @@
|
|||
const env = require("./environment")
|
||||
import env from "../environment"
|
||||
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
|
||||
const { v4 } = require("uuid")
|
||||
import { v4 } from "uuid"
|
||||
|
||||
const SALT_ROUNDS = env.SALT_ROUNDS || 10
|
||||
|
||||
exports.hash = async data => {
|
||||
export async function hash(data: string) {
|
||||
const salt = await bcrypt.genSalt(SALT_ROUNDS)
|
||||
return bcrypt.hash(data, salt)
|
||||
}
|
||||
|
||||
exports.compare = async (data, encrypted) => {
|
||||
export async function compare(data: string, encrypted: string) {
|
||||
return bcrypt.compare(data, encrypted)
|
||||
}
|
||||
|
||||
exports.newid = function () {
|
||||
export function newid() {
|
||||
return v4().replace(/-/g, "")
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
export * from "./hashing"
|
||||
export * from "./utils"
|
|
@ -4,14 +4,14 @@ import {
|
|||
ViewName,
|
||||
getAllApps,
|
||||
queryGlobalView,
|
||||
} from "./db"
|
||||
import { options } from "./middleware/passport/jwt"
|
||||
import { Header, Cookie, MAX_VALID_DATE } from "./constants"
|
||||
import env from "./environment"
|
||||
import userCache from "./cache/user"
|
||||
import { getSessionsForUser, invalidateSessions } from "./security/sessions"
|
||||
import * as events from "./events"
|
||||
import tenancy from "./tenancy"
|
||||
} from "../db"
|
||||
import { options } from "../middleware/passport/jwt"
|
||||
import { Header, Cookie, MAX_VALID_DATE } from "../constants"
|
||||
import env from "../environment"
|
||||
import * as userCache from "../cache/user"
|
||||
import { getSessionsForUser, invalidateSessions } from "../security/sessions"
|
||||
import * as events from "../events"
|
||||
import tenancy from "../tenancy"
|
||||
import {
|
||||
App,
|
||||
BBContext,
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/tenancy")
|
|
@ -1,4 +0,0 @@
|
|||
module.exports = {
|
||||
...require("./src/utils"),
|
||||
...require("./src/hashing"),
|
||||
}
|
|
@ -390,7 +390,7 @@ const appPostCreate = async (ctx: any, app: App) => {
|
|||
export const create = async (ctx: any) => {
|
||||
const newApplication = await quotas.addApp(() => performAppCreate(ctx))
|
||||
await appPostCreate(ctx, newApplication)
|
||||
await cache.bustCache(cache.CacheKeys.CHECKLIST)
|
||||
await cache.bustCache(cache.CacheKey.CHECKLIST)
|
||||
ctx.body = newApplication
|
||||
ctx.status = 200
|
||||
}
|
||||
|
|
|
@ -31,17 +31,19 @@ export interface GoogleConfig extends Config {
|
|||
}
|
||||
}
|
||||
|
||||
export interface OIDCInnerCfg {
|
||||
configUrl: string
|
||||
clientID: string
|
||||
clientSecret: string
|
||||
logo: string
|
||||
name: string
|
||||
uuid: string
|
||||
activated: boolean
|
||||
}
|
||||
|
||||
export interface OIDCConfig extends Config {
|
||||
config: {
|
||||
configs: {
|
||||
configUrl: string
|
||||
clientID: string
|
||||
clientSecret: string
|
||||
logo: string
|
||||
name: string
|
||||
uuid: string
|
||||
activated: boolean
|
||||
}[]
|
||||
configs: OIDCInnerCfg[]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,37 @@
|
|||
import { Document } from "../document"
|
||||
|
||||
export interface User extends Document {
|
||||
export interface SSOProfile {
|
||||
id: string
|
||||
name?: {
|
||||
givenName?: string
|
||||
familyName?: string
|
||||
}
|
||||
_json: {
|
||||
email: string
|
||||
picture: string
|
||||
}
|
||||
provider?: string
|
||||
}
|
||||
|
||||
export interface ThirdPartyUser extends Document {
|
||||
thirdPartyProfile?: SSOProfile["_json"]
|
||||
firstName?: string
|
||||
lastName?: string
|
||||
pictureUrl?: string
|
||||
profile?: SSOProfile
|
||||
oauth2?: any
|
||||
provider?: string
|
||||
providerType?: string
|
||||
email: string
|
||||
userId?: string
|
||||
forceResetPassword?: boolean
|
||||
}
|
||||
|
||||
export interface User extends ThirdPartyUser {
|
||||
tenantId: string
|
||||
email: string
|
||||
userId?: string
|
||||
forceResetPassword?: boolean
|
||||
roles: UserRoles
|
||||
builder?: {
|
||||
global: boolean
|
||||
|
@ -10,14 +39,14 @@ export interface User extends Document {
|
|||
admin?: {
|
||||
global: boolean
|
||||
}
|
||||
providerType?: string
|
||||
password?: string
|
||||
status?: string
|
||||
createdAt?: number // override the default createdAt behaviour - users sdk historically set this to Date.now()
|
||||
userGroups?: string[]
|
||||
forceResetPassword?: boolean
|
||||
dayPassRecordedAt?: string
|
||||
userId?: string
|
||||
account?: {
|
||||
authType: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface UserRoles {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { User, Account } from "../documents"
|
||||
import { IdentityType } from "./events/identification"
|
||||
import { IdentityType } from "./events"
|
||||
|
||||
export interface BaseContext {
|
||||
_id: string
|
||||
|
|
|
@ -170,8 +170,8 @@ export async function save(ctx: BBContext) {
|
|||
|
||||
try {
|
||||
const response = await db.put(ctx.request.body)
|
||||
await cache.bustCache(cache.CacheKeys.CHECKLIST)
|
||||
await cache.bustCache(cache.CacheKeys.ANALYTICS_ENABLED)
|
||||
await cache.bustCache(cache.CacheKey.CHECKLIST)
|
||||
await cache.bustCache(cache.CacheKey.ANALYTICS_ENABLED)
|
||||
|
||||
for (const fn of eventFns) {
|
||||
await fn()
|
||||
|
@ -371,7 +371,7 @@ export async function destroy(ctx: BBContext) {
|
|||
const { id, rev } = ctx.params
|
||||
try {
|
||||
await db.remove(id, rev)
|
||||
await cache.delete(cache.CacheKeys.CHECKLIST)
|
||||
await cache.delete(cache.CacheKey.CHECKLIST)
|
||||
ctx.body = { message: "Config deleted successfully" }
|
||||
} catch (err: any) {
|
||||
ctx.throw(err.status, err)
|
||||
|
@ -384,7 +384,7 @@ export async function configChecklist(ctx: BBContext) {
|
|||
|
||||
try {
|
||||
ctx.body = await cache.withCache(
|
||||
cache.CacheKeys.CHECKLIST,
|
||||
cache.CacheKey.CHECKLIST,
|
||||
env.CHECKLIST_CACHE_TTL,
|
||||
async () => {
|
||||
let apps = []
|
||||
|
|
|
@ -104,7 +104,7 @@ export const adminUser = async (ctx: any) => {
|
|||
try {
|
||||
// always bust checklist beforehand, if an error occurs but can proceed, don't get
|
||||
// stuck in a cycle
|
||||
await cache.bustCache(cache.CacheKeys.CHECKLIST)
|
||||
await cache.bustCache(cache.CacheKey.CHECKLIST)
|
||||
const finalUser = await sdk.users.save(user, {
|
||||
hashPassword,
|
||||
requirePassword,
|
||||
|
|
|
@ -6,7 +6,7 @@ export async function systemRestored(ctx: BBContext) {
|
|||
if (!env.SELF_HOSTED) {
|
||||
ctx.throw(405, "This operation is not allowed in cloud.")
|
||||
}
|
||||
await cache.bustCache(cache.CacheKeys.CHECKLIST)
|
||||
await cache.bustCache(cache.CacheKey.CHECKLIST)
|
||||
ctx.body = {
|
||||
message: "System prepared after restore.",
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue