Complete conversion of backend-core to Typescript.
This commit is contained in:
parent
1b408e60f2
commit
a38d617138
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/cloud/accounts")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/auth")
|
|
|
@ -1,9 +0,0 @@
|
||||||
const generic = require("./src/cache/generic")
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
user: require("./src/cache/user"),
|
|
||||||
app: require("./src/cache/appMetadata"),
|
|
||||||
writethrough: require("./src/cache/writethrough"),
|
|
||||||
...generic,
|
|
||||||
cache: generic,
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/constants")
|
|
|
@ -1,24 +0,0 @@
|
||||||
const {
|
|
||||||
getAppDB,
|
|
||||||
getDevAppDB,
|
|
||||||
getProdAppDB,
|
|
||||||
getAppId,
|
|
||||||
updateAppId,
|
|
||||||
doInAppContext,
|
|
||||||
doInTenant,
|
|
||||||
doInContext,
|
|
||||||
} = require("./src/context")
|
|
||||||
|
|
||||||
const identity = require("./src/context/identity")
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getAppDB,
|
|
||||||
getDevAppDB,
|
|
||||||
getProdAppDB,
|
|
||||||
getAppId,
|
|
||||||
updateAppId,
|
|
||||||
doInAppContext,
|
|
||||||
doInTenant,
|
|
||||||
identity,
|
|
||||||
doInContext,
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/db")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/context/deprovision")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/security/encryption")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/logging")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/middleware")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/migrations")
|
|
|
@ -1,4 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
...require("./src/objectStore"),
|
|
||||||
...require("./src/objectStore/utils"),
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/security/permissions")
|
|
|
@ -1,3 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
...require("./src/plugin"),
|
|
||||||
}
|
|
|
@ -1,5 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
Client: require("./src/redis"),
|
|
||||||
utils: require("./src/redis/utils"),
|
|
||||||
clients: require("./src/redis/init"),
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/security/roles")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/security/sessions")
|
|
|
@ -1,6 +1,6 @@
|
||||||
const redis = require("../redis/init")
|
import { getAppClient } from "../redis/init"
|
||||||
const { doWithDB } = require("../db")
|
import { doWithDB, DocumentType } from "../db"
|
||||||
const { DocumentType } = require("../db/constants")
|
import { Database } from "@budibase/types"
|
||||||
|
|
||||||
const AppState = {
|
const AppState = {
|
||||||
INVALID: "invalid",
|
INVALID: "invalid",
|
||||||
|
@ -10,17 +10,17 @@ const EXPIRY_SECONDS = 3600
|
||||||
/**
|
/**
|
||||||
* The default populate app metadata function
|
* The default populate app metadata function
|
||||||
*/
|
*/
|
||||||
const populateFromDB = async appId => {
|
async function populateFromDB(appId: string) {
|
||||||
return doWithDB(
|
return doWithDB(
|
||||||
appId,
|
appId,
|
||||||
db => {
|
(db: Database) => {
|
||||||
return db.get(DocumentType.APP_METADATA)
|
return db.get(DocumentType.APP_METADATA)
|
||||||
},
|
},
|
||||||
{ skip_setup: true }
|
{ skip_setup: true }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const isInvalid = metadata => {
|
function isInvalid(metadata?: { state: string }) {
|
||||||
return !metadata || metadata.state === AppState.INVALID
|
return !metadata || metadata.state === AppState.INVALID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,15 +31,15 @@ const isInvalid = metadata => {
|
||||||
* @param {string} appId the id of the app to get metadata from.
|
* @param {string} appId the id of the app to get metadata from.
|
||||||
* @returns {object} the app metadata.
|
* @returns {object} the app metadata.
|
||||||
*/
|
*/
|
||||||
exports.getAppMetadata = async appId => {
|
export async function getAppMetadata(appId: string) {
|
||||||
const client = await redis.getAppClient()
|
const client = await getAppClient()
|
||||||
// try cache
|
// try cache
|
||||||
let metadata = await client.get(appId)
|
let metadata = await client.get(appId)
|
||||||
if (!metadata) {
|
if (!metadata) {
|
||||||
let expiry = EXPIRY_SECONDS
|
let expiry: number | undefined = EXPIRY_SECONDS
|
||||||
try {
|
try {
|
||||||
metadata = await populateFromDB(appId)
|
metadata = await populateFromDB(appId)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
// app DB left around, but no metadata, it is invalid
|
// app DB left around, but no metadata, it is invalid
|
||||||
if (err && err.status === 404) {
|
if (err && err.status === 404) {
|
||||||
metadata = { state: AppState.INVALID }
|
metadata = { state: AppState.INVALID }
|
||||||
|
@ -74,11 +74,11 @@ exports.getAppMetadata = async appId => {
|
||||||
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
|
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
|
||||||
* @return {Promise<void>} will respond with success when cache is updated.
|
* @return {Promise<void>} will respond with success when cache is updated.
|
||||||
*/
|
*/
|
||||||
exports.invalidateAppMetadata = async (appId, newMetadata = null) => {
|
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
|
||||||
if (!appId) {
|
if (!appId) {
|
||||||
throw "Cannot invalidate if no app ID provided."
|
throw "Cannot invalidate if no app ID provided."
|
||||||
}
|
}
|
||||||
const client = await redis.getAppClient()
|
const client = await getAppClient()
|
||||||
await client.delete(appId)
|
await client.delete(appId)
|
||||||
if (newMetadata) {
|
if (newMetadata) {
|
||||||
await client.store(appId, newMetadata, EXPIRY_SECONDS)
|
await client.store(appId, newMetadata, EXPIRY_SECONDS)
|
|
@ -1,6 +1,6 @@
|
||||||
import { getTenantId } from "../../context"
|
import { getTenantId } from "../../context"
|
||||||
import redis from "../../redis/init"
|
import * as redis from "../../redis/init"
|
||||||
import RedisWrapper from "../../redis"
|
import { Client } from "../../redis"
|
||||||
|
|
||||||
function generateTenantKey(key: string) {
|
function generateTenantKey(key: string) {
|
||||||
const tenantId = getTenantId()
|
const tenantId = getTenantId()
|
||||||
|
@ -8,9 +8,9 @@ function generateTenantKey(key: string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export = class BaseCache {
|
export = class BaseCache {
|
||||||
client: RedisWrapper | undefined
|
client: Client | undefined
|
||||||
|
|
||||||
constructor(client: RedisWrapper | undefined = undefined) {
|
constructor(client: Client | undefined = undefined) {
|
||||||
this.client = client
|
this.client = client
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,30 +0,0 @@
|
||||||
const BaseCache = require("./base")
|
|
||||||
|
|
||||||
const GENERIC = new BaseCache()
|
|
||||||
|
|
||||||
exports.CacheKeys = {
|
|
||||||
CHECKLIST: "checklist",
|
|
||||||
INSTALLATION: "installation",
|
|
||||||
ANALYTICS_ENABLED: "analyticsEnabled",
|
|
||||||
UNIQUE_TENANT_ID: "uniqueTenantId",
|
|
||||||
EVENTS: "events",
|
|
||||||
BACKFILL_METADATA: "backfillMetadata",
|
|
||||||
EVENTS_RATE_LIMIT: "eventsRateLimit",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.TTL = {
|
|
||||||
ONE_MINUTE: 600,
|
|
||||||
ONE_HOUR: 3600,
|
|
||||||
ONE_DAY: 86400,
|
|
||||||
}
|
|
||||||
|
|
||||||
function performExport(funcName) {
|
|
||||||
return (...args) => GENERIC[funcName](...args)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.keys = performExport("keys")
|
|
||||||
exports.get = performExport("get")
|
|
||||||
exports.store = performExport("store")
|
|
||||||
exports.delete = performExport("delete")
|
|
||||||
exports.withCache = performExport("withCache")
|
|
||||||
exports.bustCache = performExport("bustCache")
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
const BaseCache = require("./base")
|
||||||
|
|
||||||
|
const GENERIC = new BaseCache()
|
||||||
|
|
||||||
|
export enum CacheKey {
|
||||||
|
CHECKLIST = "checklist",
|
||||||
|
INSTALLATION = "installation",
|
||||||
|
ANALYTICS_ENABLED = "analyticsEnabled",
|
||||||
|
UNIQUE_TENANT_ID = "uniqueTenantId",
|
||||||
|
EVENTS = "events",
|
||||||
|
BACKFILL_METADATA = "backfillMetadata",
|
||||||
|
EVENTS_RATE_LIMIT = "eventsRateLimit",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum TTL {
|
||||||
|
ONE_MINUTE = 600,
|
||||||
|
ONE_HOUR = 3600,
|
||||||
|
ONE_DAY = 86400,
|
||||||
|
}
|
||||||
|
|
||||||
|
function performExport(funcName: string) {
|
||||||
|
return (...args: any) => GENERIC[funcName](...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const keys = performExport("keys")
|
||||||
|
export const get = performExport("get")
|
||||||
|
export const store = performExport("store")
|
||||||
|
export const destroy = performExport("delete")
|
||||||
|
export const withCache = performExport("withCache")
|
||||||
|
export const bustCache = performExport("bustCache")
|
|
@ -0,0 +1,4 @@
|
||||||
|
export * as generic from "./generic"
|
||||||
|
export * as user from "./user"
|
||||||
|
export * as app from "./appMetadata"
|
||||||
|
export * as writethrough from "./writethrough"
|
|
@ -1,15 +1,16 @@
|
||||||
const redis = require("../redis/init")
|
import * as redis from "../redis/init"
|
||||||
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
|
import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy"
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
const accounts = require("../cloud/accounts")
|
import * as accounts from "../cloud/accounts"
|
||||||
|
import { Database } from "@budibase/types"
|
||||||
|
|
||||||
const EXPIRY_SECONDS = 3600
|
const EXPIRY_SECONDS = 3600
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The default populate user function
|
* The default populate user function
|
||||||
*/
|
*/
|
||||||
const populateFromDB = async (userId, tenantId) => {
|
async function populateFromDB(userId: string, tenantId: string) {
|
||||||
const user = await doWithGlobalDB(tenantId, db => db.get(userId))
|
const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId))
|
||||||
user.budibaseAccess = true
|
user.budibaseAccess = true
|
||||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||||
const account = await accounts.getAccount(user.email)
|
const account = await accounts.getAccount(user.email)
|
||||||
|
@ -31,7 +32,11 @@ const populateFromDB = async (userId, tenantId) => {
|
||||||
* @param {*} populateUser function to provide the user for re-caching. default to couch db
|
* @param {*} populateUser function to provide the user for re-caching. default to couch db
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
export async function getUser(
|
||||||
|
userId: string,
|
||||||
|
tenantId?: string,
|
||||||
|
populateUser?: any
|
||||||
|
) {
|
||||||
if (!populateUser) {
|
if (!populateUser) {
|
||||||
populateUser = populateFromDB
|
populateUser = populateFromDB
|
||||||
}
|
}
|
||||||
|
@ -47,7 +52,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
||||||
let user = await client.get(userId)
|
let user = await client.get(userId)
|
||||||
if (!user) {
|
if (!user) {
|
||||||
user = await populateUser(userId, tenantId)
|
user = await populateUser(userId, tenantId)
|
||||||
client.store(userId, user, EXPIRY_SECONDS)
|
await client.store(userId, user, EXPIRY_SECONDS)
|
||||||
}
|
}
|
||||||
if (user && !user.tenantId && tenantId) {
|
if (user && !user.tenantId && tenantId) {
|
||||||
// make sure the tenant ID is always correct/set
|
// make sure the tenant ID is always correct/set
|
||||||
|
@ -56,7 +61,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
||||||
return user
|
return user
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.invalidateUser = async userId => {
|
export async function invalidateUser(userId: string) {
|
||||||
const client = await redis.getUserClient()
|
const client = await redis.getUserClient()
|
||||||
await client.delete(userId)
|
await client.delete(userId)
|
||||||
}
|
}
|
|
@ -1,42 +0,0 @@
|
||||||
const fetch = require("node-fetch")
|
|
||||||
class API {
|
|
||||||
constructor(host) {
|
|
||||||
this.host = host
|
|
||||||
}
|
|
||||||
|
|
||||||
apiCall =
|
|
||||||
method =>
|
|
||||||
async (url = "", options = {}) => {
|
|
||||||
if (!options.headers) {
|
|
||||||
options.headers = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options.headers["Content-Type"]) {
|
|
||||||
options.headers = {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
Accept: "application/json",
|
|
||||||
...options.headers,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let json = options.headers["Content-Type"] === "application/json"
|
|
||||||
|
|
||||||
const requestOptions = {
|
|
||||||
method: method,
|
|
||||||
body: json ? JSON.stringify(options.body) : options.body,
|
|
||||||
headers: options.headers,
|
|
||||||
// TODO: See if this is necessary
|
|
||||||
credentials: "include",
|
|
||||||
}
|
|
||||||
|
|
||||||
return await fetch(`${this.host}${url}`, requestOptions)
|
|
||||||
}
|
|
||||||
|
|
||||||
post = this.apiCall("POST")
|
|
||||||
get = this.apiCall("GET")
|
|
||||||
patch = this.apiCall("PATCH")
|
|
||||||
del = this.apiCall("DELETE")
|
|
||||||
put = this.apiCall("PUT")
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = API
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
import fetch from "node-fetch"
|
||||||
|
|
||||||
|
export = class API {
|
||||||
|
host: string
|
||||||
|
|
||||||
|
constructor(host: string) {
|
||||||
|
this.host = host
|
||||||
|
}
|
||||||
|
|
||||||
|
async apiCall(method: string, url: string, options?: any) {
|
||||||
|
if (!options.headers) {
|
||||||
|
options.headers = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.headers["Content-Type"]) {
|
||||||
|
options.headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Accept: "application/json",
|
||||||
|
...options.headers,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let json = options.headers["Content-Type"] === "application/json"
|
||||||
|
|
||||||
|
const requestOptions = {
|
||||||
|
method: method,
|
||||||
|
body: json ? JSON.stringify(options.body) : options.body,
|
||||||
|
headers: options.headers,
|
||||||
|
// TODO: See if this is necessary
|
||||||
|
credentials: "include",
|
||||||
|
}
|
||||||
|
|
||||||
|
return await fetch(`${this.host}${url}`, requestOptions)
|
||||||
|
}
|
||||||
|
|
||||||
|
async post(url: string, options?: any) {
|
||||||
|
return this.apiCall("POST", url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(url: string, options?: any) {
|
||||||
|
return this.apiCall("GET", url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async patch(url: string, options?: any) {
|
||||||
|
return this.apiCall("PATCH", url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async del(url: string, options?: any) {
|
||||||
|
return this.apiCall("DELETE", url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async put(url: string, options?: any) {
|
||||||
|
return this.apiCall("PUT", url, options)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,44 +0,0 @@
|
||||||
exports.UserStatus = {
|
|
||||||
ACTIVE: "active",
|
|
||||||
INACTIVE: "inactive",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.Cookie = {
|
|
||||||
CurrentApp: "budibase:currentapp",
|
|
||||||
Auth: "budibase:auth",
|
|
||||||
Init: "budibase:init",
|
|
||||||
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
|
|
||||||
DatasourceAuth: "budibase:datasourceauth",
|
|
||||||
OIDC_CONFIG: "budibase:oidc:config",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.Header = {
|
|
||||||
API_KEY: "x-budibase-api-key",
|
|
||||||
LICENSE_KEY: "x-budibase-license-key",
|
|
||||||
API_VER: "x-budibase-api-version",
|
|
||||||
APP_ID: "x-budibase-app-id",
|
|
||||||
TYPE: "x-budibase-type",
|
|
||||||
PREVIEW_ROLE: "x-budibase-role",
|
|
||||||
TENANT_ID: "x-budibase-tenant-id",
|
|
||||||
TOKEN: "x-budibase-token",
|
|
||||||
CSRF_TOKEN: "x-csrf-token",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.GlobalRoles = {
|
|
||||||
OWNER: "owner",
|
|
||||||
ADMIN: "admin",
|
|
||||||
BUILDER: "builder",
|
|
||||||
WORKSPACE_MANAGER: "workspace_manager",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.Config = {
|
|
||||||
SETTINGS: "settings",
|
|
||||||
ACCOUNT: "account",
|
|
||||||
SMTP: "smtp",
|
|
||||||
GOOGLE: "google",
|
|
||||||
OIDC: "oidc",
|
|
||||||
OIDC_LOGOS: "logos_oidc",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.MAX_VALID_DATE = new Date(2147483647000)
|
|
||||||
exports.DEFAULT_TENANT_ID = "default"
|
|
|
@ -18,7 +18,7 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
export const doInUserContext = (user: User, task: any) => {
|
export const doInUserContext = (user: User, task: any) => {
|
||||||
const userContext: UserContext = {
|
const userContext: any = {
|
||||||
...user,
|
...user,
|
||||||
_id: user._id as string,
|
_id: user._id as string,
|
||||||
type: IdentityType.USER,
|
type: IdentityType.USER,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { newid } from "../hashing"
|
import { newid } from "../utils"
|
||||||
import { DEFAULT_TENANT_ID, Config } from "../constants"
|
import { DEFAULT_TENANT_ID, Config } from "../constants"
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import {
|
import {
|
||||||
|
|
|
@ -2,7 +2,7 @@ import env from "../environment"
|
||||||
import tenancy from "../tenancy"
|
import tenancy from "../tenancy"
|
||||||
import * as dbUtils from "../db/utils"
|
import * as dbUtils from "../db/utils"
|
||||||
import { Config } from "../constants"
|
import { Config } from "../constants"
|
||||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
import { withCache, TTL, CacheKey } from "../cache/generic"
|
||||||
|
|
||||||
export const enabled = async () => {
|
export const enabled = async () => {
|
||||||
// cloud - always use the environment variable
|
// cloud - always use the environment variable
|
||||||
|
@ -13,7 +13,7 @@ export const enabled = async () => {
|
||||||
// self host - prefer the settings doc
|
// self host - prefer the settings doc
|
||||||
// use cache as events have high throughput
|
// use cache as events have high throughput
|
||||||
const enabledInDB = await withCache(
|
const enabledInDB = await withCache(
|
||||||
CacheKeys.ANALYTICS_ENABLED,
|
CacheKey.ANALYTICS_ENABLED,
|
||||||
TTL.ONE_DAY,
|
TTL.ONE_DAY,
|
||||||
async () => {
|
async () => {
|
||||||
const settings = await getSettingsDoc()
|
const settings = await getSettingsDoc()
|
||||||
|
|
|
@ -21,7 +21,7 @@ import {
|
||||||
AppCreatedEvent,
|
AppCreatedEvent,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as context from "../context"
|
import * as context from "../context"
|
||||||
import { CacheKeys } from "../cache/generic"
|
import { CacheKey } from "../cache/generic"
|
||||||
import * as cache from "../cache/generic"
|
import * as cache from "../cache/generic"
|
||||||
|
|
||||||
// LIFECYCLE
|
// LIFECYCLE
|
||||||
|
@ -48,18 +48,18 @@ export const end = async () => {
|
||||||
// CRUD
|
// CRUD
|
||||||
|
|
||||||
const getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {
|
const getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {
|
||||||
return cache.get(CacheKeys.BACKFILL_METADATA)
|
return cache.get(CacheKey.BACKFILL_METADATA)
|
||||||
}
|
}
|
||||||
|
|
||||||
const saveBackfillMetadata = async (
|
const saveBackfillMetadata = async (
|
||||||
backfill: BackfillMetadata
|
backfill: BackfillMetadata
|
||||||
): Promise<void> => {
|
): Promise<void> => {
|
||||||
// no TTL - deleted by backfill
|
// no TTL - deleted by backfill
|
||||||
return cache.store(CacheKeys.BACKFILL_METADATA, backfill)
|
return cache.store(CacheKey.BACKFILL_METADATA, backfill)
|
||||||
}
|
}
|
||||||
|
|
||||||
const deleteBackfillMetadata = async (): Promise<void> => {
|
const deleteBackfillMetadata = async (): Promise<void> => {
|
||||||
await cache.delete(CacheKeys.BACKFILL_METADATA)
|
await cache.destroy(CacheKey.BACKFILL_METADATA)
|
||||||
}
|
}
|
||||||
|
|
||||||
const clearEvents = async () => {
|
const clearEvents = async () => {
|
||||||
|
@ -70,7 +70,7 @@ const clearEvents = async () => {
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
// delete each key
|
// delete each key
|
||||||
// don't use tenancy, already in the key
|
// don't use tenancy, already in the key
|
||||||
await cache.delete(key, { useTenancy: false })
|
await cache.destroy(key, { useTenancy: false })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -167,7 +167,7 @@ const getEventKey = (event?: Event, properties?: any) => {
|
||||||
|
|
||||||
const tenantId = context.getTenantId()
|
const tenantId = context.getTenantId()
|
||||||
if (event) {
|
if (event) {
|
||||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:${event}`
|
eventKey = `${CacheKey.EVENTS}:${tenantId}:${event}`
|
||||||
|
|
||||||
// use some properties to make the key more unique
|
// use some properties to make the key more unique
|
||||||
const custom = CUSTOM_PROPERTY_SUFFIX[event]
|
const custom = CUSTOM_PROPERTY_SUFFIX[event]
|
||||||
|
@ -176,7 +176,7 @@ const getEventKey = (event?: Event, properties?: any) => {
|
||||||
eventKey = `${eventKey}:${suffix}`
|
eventKey = `${eventKey}:${suffix}`
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:*`
|
eventKey = `${CacheKey.EVENTS}:${tenantId}:*`
|
||||||
}
|
}
|
||||||
|
|
||||||
return eventKey
|
return eventKey
|
||||||
|
|
|
@ -20,9 +20,9 @@ import {
|
||||||
import { processors } from "./processors"
|
import { processors } from "./processors"
|
||||||
import * as dbUtils from "../db/utils"
|
import * as dbUtils from "../db/utils"
|
||||||
import { Config } from "../constants"
|
import { Config } from "../constants"
|
||||||
import * as hashing from "../hashing"
|
import { newid } from "../utils"
|
||||||
import * as installation from "../installation"
|
import * as installation from "../installation"
|
||||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
import { withCache, TTL, CacheKey } from "../cache/generic"
|
||||||
|
|
||||||
const pkg = require("../../package.json")
|
const pkg = require("../../package.json")
|
||||||
|
|
||||||
|
@ -270,7 +270,7 @@ const getEventTenantId = async (tenantId: string): Promise<string> => {
|
||||||
const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
||||||
// make sure this tenantId always matches the tenantId in context
|
// make sure this tenantId always matches the tenantId in context
|
||||||
return context.doInTenant(tenantId, () => {
|
return context.doInTenant(tenantId, () => {
|
||||||
return withCache(CacheKeys.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
||||||
const db = context.getGlobalDB()
|
const db = context.getGlobalDB()
|
||||||
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, {
|
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, {
|
||||||
type: Config.SETTINGS,
|
type: Config.SETTINGS,
|
||||||
|
@ -280,7 +280,7 @@ const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
||||||
if (config.config.uniqueTenantId) {
|
if (config.config.uniqueTenantId) {
|
||||||
return config.config.uniqueTenantId
|
return config.config.uniqueTenantId
|
||||||
} else {
|
} else {
|
||||||
uniqueTenantId = `${hashing.newid()}_${tenantId}`
|
uniqueTenantId = `${newid()}_${tenantId}`
|
||||||
config.config.uniqueTenantId = uniqueTenantId
|
config.config.uniqueTenantId = uniqueTenantId
|
||||||
await db.put(config)
|
await db.put(config)
|
||||||
return uniqueTenantId
|
return uniqueTenantId
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { Event } from "@budibase/types"
|
import { Event } from "@budibase/types"
|
||||||
import { CacheKeys, TTL } from "../../../cache/generic"
|
import { CacheKey, TTL } from "../../../cache/generic"
|
||||||
import * as cache from "../../../cache/generic"
|
import * as cache from "../../../cache/generic"
|
||||||
import * as context from "../../../context"
|
import * as context from "../../../context"
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ export const limited = async (event: Event): Promise<boolean> => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const eventKey = (event: RateLimitedEvent) => {
|
const eventKey = (event: RateLimitedEvent) => {
|
||||||
let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}`
|
let key = `${CacheKey.EVENTS_RATE_LIMIT}:${event}`
|
||||||
if (isPerApp(event)) {
|
if (isPerApp(event)) {
|
||||||
key = key + ":" + context.getAppId()
|
key = key + ":" + context.getAppId()
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ import PosthogProcessor from "../PosthogProcessor"
|
||||||
import { Event, IdentityType, Hosting } from "@budibase/types"
|
import { Event, IdentityType, Hosting } from "@budibase/types"
|
||||||
const tk = require("timekeeper")
|
const tk = require("timekeeper")
|
||||||
import * as cache from "../../../../cache/generic"
|
import * as cache from "../../../../cache/generic"
|
||||||
import { CacheKeys } from "../../../../cache/generic"
|
import { CacheKey } from "../../../../cache/generic"
|
||||||
import * as context from "../../../../context"
|
import * as context from "../../../../context"
|
||||||
|
|
||||||
const newIdentity = () => {
|
const newIdentity = () => {
|
||||||
|
@ -19,7 +19,7 @@ describe("PosthogProcessor", () => {
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
jest.clearAllMocks()
|
jest.clearAllMocks()
|
||||||
await cache.bustCache(
|
await cache.bustCache(
|
||||||
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ describe("PosthogProcessor", () => {
|
||||||
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
||||||
|
|
||||||
await cache.bustCache(
|
await cache.bustCache(
|
||||||
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||||
)
|
)
|
||||||
|
|
||||||
tk.freeze(new Date(2022, 0, 1, 14, 0))
|
tk.freeze(new Date(2022, 0, 1, 14, 0))
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
const tenancy = require("../tenancy")
|
import tenancy from "../tenancy"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
|
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
|
||||||
* The env var is formatted as:
|
* The env var is formatted as:
|
||||||
* tenant1:feature1:feature2,tenant2:feature1
|
* tenant1:feature1:feature2,tenant2:feature1
|
||||||
*/
|
*/
|
||||||
const getFeatureFlags = () => {
|
function getFeatureFlags() {
|
||||||
if (!env.TENANT_FEATURE_FLAGS) {
|
if (!env.TENANT_FEATURE_FLAGS) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const tenantFeatureFlags = {}
|
const tenantFeatureFlags: Record<string, string[]> = {}
|
||||||
|
|
||||||
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
|
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
|
||||||
const [tenantId, ...features] = tenantToFeatures.split(":")
|
const [tenantId, ...features] = tenantToFeatures.split(":")
|
||||||
|
@ -29,13 +29,13 @@ const getFeatureFlags = () => {
|
||||||
|
|
||||||
const TENANT_FEATURE_FLAGS = getFeatureFlags()
|
const TENANT_FEATURE_FLAGS = getFeatureFlags()
|
||||||
|
|
||||||
exports.isEnabled = featureFlag => {
|
export function isEnabled(featureFlag: string) {
|
||||||
const tenantId = tenancy.getTenantId()
|
const tenantId = tenancy.getTenantId()
|
||||||
const flags = exports.getTenantFeatureFlags(tenantId)
|
const flags = getTenantFeatureFlags(tenantId)
|
||||||
return flags.includes(featureFlag)
|
return flags.includes(featureFlag)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getTenantFeatureFlags = tenantId => {
|
export function getTenantFeatureFlags(tenantId: string) {
|
||||||
const flags = []
|
const flags = []
|
||||||
|
|
||||||
if (TENANT_FEATURE_FLAGS) {
|
if (TENANT_FEATURE_FLAGS) {
|
||||||
|
@ -53,8 +53,8 @@ exports.getTenantFeatureFlags = tenantId => {
|
||||||
return flags
|
return flags
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.TenantFeatureFlag = {
|
export enum TenantFeatureFlag {
|
||||||
LICENSING: "LICENSING",
|
LICENSING = "LICENSING",
|
||||||
GOOGLE_SHEETS: "GOOGLE_SHEETS",
|
GOOGLE_SHEETS = "GOOGLE_SHEETS",
|
||||||
USER_GROUPS: "USER_GROUPS",
|
USER_GROUPS = "USER_GROUPS",
|
||||||
}
|
}
|
|
@ -4,6 +4,6 @@
|
||||||
* @param {string} url The URL to test and remove any extra double slashes.
|
* @param {string} url The URL to test and remove any extra double slashes.
|
||||||
* @return {string} The updated url.
|
* @return {string} The updated url.
|
||||||
*/
|
*/
|
||||||
exports.checkSlashesInUrl = url => {
|
export function checkSlashesInUrl(url: string) {
|
||||||
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
||||||
}
|
}
|
|
@ -9,26 +9,24 @@ import * as accounts from "./cloud/accounts"
|
||||||
import * as installation from "./installation"
|
import * as installation from "./installation"
|
||||||
import env from "./environment"
|
import env from "./environment"
|
||||||
import tenancy from "./tenancy"
|
import tenancy from "./tenancy"
|
||||||
import featureFlags from "./featureFlags"
|
import * as featureFlags from "./featureFlags"
|
||||||
import * as sessions from "./security/sessions"
|
import * as sessions from "./security/sessions"
|
||||||
import * as deprovisioning from "./context/deprovision"
|
import * as deprovisioning from "./context/deprovision"
|
||||||
import auth from "./auth"
|
import auth from "./auth"
|
||||||
import * as constants from "./constants"
|
import * as constants from "./constants"
|
||||||
import * as dbConstants from "./db/constants"
|
import * as dbConstants from "./db/constants"
|
||||||
import * as logging from "./logging"
|
import * as logging from "./logging"
|
||||||
import pino from "./pino"
|
import * as pino from "./pino"
|
||||||
import * as middleware from "./middleware"
|
import * as middleware from "./middleware"
|
||||||
import plugins from "./plugin"
|
import plugins from "./plugin"
|
||||||
import encryption from "./security/encryption"
|
import * as encryption from "./security/encryption"
|
||||||
import * as queue from "./queue"
|
import * as queue from "./queue"
|
||||||
import * as db from "./db"
|
import * as db from "./db"
|
||||||
|
import * as context from "./context"
|
||||||
// mimic the outer package exports
|
import * as cache from "./cache"
|
||||||
import * as objectStore from "./pkg/objectStore"
|
import * as objectStore from "./objectStore"
|
||||||
import * as utils from "./pkg/utils"
|
import * as redis from "./redis"
|
||||||
import redis from "./pkg/redis"
|
import * as utils from "./utils"
|
||||||
import cache from "./pkg/cache"
|
|
||||||
import context from "./pkg/context"
|
|
||||||
|
|
||||||
const init = (opts: any = {}) => {
|
const init = (opts: any = {}) => {
|
||||||
db.init(opts.db)
|
db.init(opts.db)
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
import * as hashing from "./hashing"
|
import { newid } from "./utils"
|
||||||
import * as events from "./events"
|
import * as events from "./events"
|
||||||
import { StaticDatabases } from "./db/constants"
|
import { StaticDatabases } from "./db"
|
||||||
import { doWithDB } from "./db"
|
import { doWithDB } from "./db"
|
||||||
import { Installation, IdentityType } from "@budibase/types"
|
import { Installation, IdentityType } from "@budibase/types"
|
||||||
import * as context from "./context"
|
import * as context from "./context"
|
||||||
import semver from "semver"
|
import semver from "semver"
|
||||||
import { bustCache, withCache, TTL, CacheKeys } from "./cache/generic"
|
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
|
||||||
|
|
||||||
const pkg = require("../package.json")
|
const pkg = require("../package.json")
|
||||||
|
|
||||||
export const getInstall = async (): Promise<Installation> => {
|
export const getInstall = async (): Promise<Installation> => {
|
||||||
return withCache(CacheKeys.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
||||||
useTenancy: false,
|
useTenancy: false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ const getInstallFromDB = async (): Promise<Installation> => {
|
||||||
if (e.status === 404) {
|
if (e.status === 404) {
|
||||||
install = {
|
install = {
|
||||||
_id: StaticDatabases.PLATFORM_INFO.docs.install,
|
_id: StaticDatabases.PLATFORM_INFO.docs.install,
|
||||||
installId: hashing.newid(),
|
installId: newid(),
|
||||||
version: pkg.version,
|
version: pkg.version,
|
||||||
}
|
}
|
||||||
const resp = await platformDb.put(install)
|
const resp = await platformDb.put(install)
|
||||||
|
@ -50,7 +50,7 @@ const updateVersion = async (version: string): Promise<boolean> => {
|
||||||
const install = await getInstall()
|
const install = await getInstall()
|
||||||
install.version = version
|
install.version = version
|
||||||
await platformDb.put(install)
|
await platformDb.put(install)
|
||||||
await bustCache(CacheKeys.INSTALLATION)
|
await bustCache(CacheKey.INSTALLATION)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
module.exports = async (ctx, next) => {
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export = async (ctx: BBContext, next: any) => {
|
||||||
if (
|
if (
|
||||||
!ctx.internal &&
|
!ctx.internal &&
|
||||||
(!ctx.user || !ctx.user.admin || !ctx.user.admin.global)
|
(!ctx.user || !ctx.user.admin || !ctx.user.admin.global)
|
|
@ -1,4 +0,0 @@
|
||||||
module.exports = async (ctx, next) => {
|
|
||||||
// Placeholder for audit log middleware
|
|
||||||
return next()
|
|
||||||
}
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export = async (ctx: BBContext, next: any) => {
|
||||||
|
// Placeholder for audit log middleware
|
||||||
|
return next()
|
||||||
|
}
|
|
@ -6,10 +6,12 @@ import { buildMatcherRegex, matches } from "./matchers"
|
||||||
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
|
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
|
||||||
import { getGlobalDB, doInTenant } from "../tenancy"
|
import { getGlobalDB, doInTenant } from "../tenancy"
|
||||||
import { decrypt } from "../security/encryption"
|
import { decrypt } from "../security/encryption"
|
||||||
const identity = require("../context/identity")
|
import * as identity from "../context/identity"
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
|
|
||||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000
|
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
|
||||||
|
? parseInt(env.SESSION_UPDATE_PERIOD)
|
||||||
|
: 60 * 1000
|
||||||
|
|
||||||
interface FinaliseOpts {
|
interface FinaliseOpts {
|
||||||
authenticated?: boolean
|
authenticated?: boolean
|
||||||
|
@ -40,13 +42,13 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
|
||||||
return doInTenant(tenantId, async () => {
|
return doInTenant(tenantId, async () => {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
// api key is encrypted in the database
|
// api key is encrypted in the database
|
||||||
const userId = await queryGlobalView(
|
const userId = (await queryGlobalView(
|
||||||
ViewName.BY_API_KEY,
|
ViewName.BY_API_KEY,
|
||||||
{
|
{
|
||||||
key: apiKey,
|
key: apiKey,
|
||||||
},
|
},
|
||||||
db
|
db
|
||||||
)
|
)) as string
|
||||||
if (userId) {
|
if (userId) {
|
||||||
return {
|
return {
|
||||||
valid: true,
|
valid: true,
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
module.exports = async (ctx, next) => {
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export = async (ctx: BBContext, next: any) => {
|
||||||
if (
|
if (
|
||||||
!ctx.internal &&
|
!ctx.internal &&
|
||||||
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global)
|
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global)
|
|
@ -1,4 +1,6 @@
|
||||||
module.exports = async (ctx, next) => {
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export = async (ctx: BBContext, next: any) => {
|
||||||
if (
|
if (
|
||||||
!ctx.internal &&
|
!ctx.internal &&
|
||||||
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global) &&
|
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global) &&
|
|
@ -1,5 +1,6 @@
|
||||||
const { Header } = require("../constants")
|
import { Header } from "../constants"
|
||||||
const { buildMatcherRegex, matches } = require("./matchers")
|
import { buildMatcherRegex, matches } from "./matchers"
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GET, HEAD and OPTIONS methods are considered safe operations
|
* GET, HEAD and OPTIONS methods are considered safe operations
|
||||||
|
@ -31,9 +32,9 @@ const INCLUDED_CONTENT_TYPES = [
|
||||||
* https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern
|
* https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
module.exports = (opts = { noCsrfPatterns: [] }) => {
|
export = (opts = { noCsrfPatterns: [] }) => {
|
||||||
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
|
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
|
||||||
return async (ctx, next) => {
|
return async (ctx: BBContext, next: any) => {
|
||||||
// don't apply for excluded paths
|
// don't apply for excluded paths
|
||||||
const found = matches(ctx, noCsrfOptions)
|
const found = matches(ctx, noCsrfOptions)
|
||||||
if (found) {
|
if (found) {
|
||||||
|
@ -62,7 +63,7 @@ module.exports = (opts = { noCsrfPatterns: [] }) => {
|
||||||
|
|
||||||
// apply csrf when there is a token in the session (new logins)
|
// apply csrf when there is a token in the session (new logins)
|
||||||
// in future there should be a hard requirement that the token is present
|
// in future there should be a hard requirement that the token is present
|
||||||
const userToken = ctx.user.csrfToken
|
const userToken = ctx.user?.csrfToken
|
||||||
if (!userToken) {
|
if (!userToken) {
|
||||||
return next()
|
return next()
|
||||||
}
|
}
|
|
@ -1,18 +1,18 @@
|
||||||
const jwt = require("./passport/jwt")
|
import * as jwt from "./passport/jwt"
|
||||||
const local = require("./passport/local")
|
import * as local from "./passport/local"
|
||||||
const google = require("./passport/google")
|
import * as google from "./passport/google"
|
||||||
const oidc = require("./passport/oidc")
|
import * as oidc from "./passport/oidc"
|
||||||
const { authError, ssoCallbackUrl } = require("./passport/utils")
|
import { authError, ssoCallbackUrl } from "./passport/utils"
|
||||||
const authenticated = require("./authenticated")
|
import authenticated from "./authenticated"
|
||||||
const auditLog = require("./auditLog")
|
import auditLog from "./auditLog"
|
||||||
const tenancy = require("./tenancy")
|
import tenancy from "./tenancy"
|
||||||
const internalApi = require("./internalApi")
|
import internalApi from "./internalApi"
|
||||||
const datasourceGoogle = require("./passport/datasource/google")
|
import * as datasourceGoogle from "./passport/datasource/google"
|
||||||
const csrf = require("./csrf")
|
import csrf from "./csrf"
|
||||||
const adminOnly = require("./adminOnly")
|
import adminOnly from "./adminOnly"
|
||||||
const builderOrAdmin = require("./builderOrAdmin")
|
import builderOrAdmin from "./builderOrAdmin"
|
||||||
const builderOnly = require("./builderOnly")
|
import builderOnly from "./builderOnly"
|
||||||
const joiValidator = require("./joi-validator")
|
import * as joiValidator from "./joi-validator"
|
||||||
|
|
||||||
const pkg = {
|
const pkg = {
|
||||||
google,
|
google,
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
const { Header } = require("../constants")
|
import { Header } from "../constants"
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* API Key only endpoint.
|
* API Key only endpoint.
|
||||||
*/
|
*/
|
||||||
module.exports = async (ctx, next) => {
|
export = async (ctx: BBContext, next: any) => {
|
||||||
const apiKey = ctx.request.headers[Header.API_KEY]
|
const apiKey = ctx.request.headers[Header.API_KEY]
|
||||||
if (apiKey !== env.INTERNAL_API_KEY) {
|
if (apiKey !== env.INTERNAL_API_KEY) {
|
||||||
ctx.throw(403, "Unauthorized")
|
ctx.throw(403, "Unauthorized")
|
|
@ -1,16 +1,19 @@
|
||||||
const Joi = require("joi")
|
import Joi, { ObjectSchema } from "joi"
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
function validate(schema, property) {
|
function validate(schema: Joi.ObjectSchema, property: string) {
|
||||||
// Return a Koa middleware function
|
// Return a Koa middleware function
|
||||||
return (ctx, next) => {
|
return (ctx: BBContext, next: any) => {
|
||||||
if (!schema) {
|
if (!schema) {
|
||||||
return next()
|
return next()
|
||||||
}
|
}
|
||||||
let params = null
|
let params = null
|
||||||
|
// @ts-ignore
|
||||||
|
let reqProp = ctx.request?.[property]
|
||||||
if (ctx[property] != null) {
|
if (ctx[property] != null) {
|
||||||
params = ctx[property]
|
params = ctx[property]
|
||||||
} else if (ctx.request[property] != null) {
|
} else if (reqProp != null) {
|
||||||
params = ctx.request[property]
|
params = reqProp
|
||||||
}
|
}
|
||||||
|
|
||||||
// not all schemas have the append property e.g. array schemas
|
// not all schemas have the append property e.g. array schemas
|
||||||
|
@ -30,10 +33,10 @@ function validate(schema, property) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.body = schema => {
|
export function body(schema: Joi.ObjectSchema) {
|
||||||
return validate(schema, "body")
|
return validate(schema, "body")
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.params = schema => {
|
export function params(schema: Joi.ObjectSchema) {
|
||||||
return validate(schema, "params")
|
return validate(schema, "params")
|
||||||
}
|
}
|
|
@ -1,11 +1,15 @@
|
||||||
const google = require("../google")
|
import * as google from "../google"
|
||||||
|
import { Cookie, Config } from "../../../constants"
|
||||||
|
import { clearCookie, getCookie } from "../../../utils"
|
||||||
|
import { getScopedConfig, getPlatformUrl, doWithDB } from "../../../db"
|
||||||
|
import environment from "../../../environment"
|
||||||
|
import { getGlobalDB } from "../../../tenancy"
|
||||||
|
import { BBContext, Database, SSOProfile } from "@budibase/types"
|
||||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||||
const { Cookie, Config } = require("../../../constants")
|
|
||||||
const { clearCookie, getCookie } = require("../../../utils")
|
type Passport = {
|
||||||
const { getScopedConfig, getPlatformUrl } = require("../../../db/utils")
|
authenticate: any
|
||||||
const { doWithDB } = require("../../../db")
|
}
|
||||||
const environment = require("../../../environment")
|
|
||||||
const { getGlobalDB } = require("../../../tenancy")
|
|
||||||
|
|
||||||
async function fetchGoogleCreds() {
|
async function fetchGoogleCreds() {
|
||||||
// try and get the config from the tenant
|
// try and get the config from the tenant
|
||||||
|
@ -22,7 +26,11 @@ async function fetchGoogleCreds() {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function preAuth(passport, ctx, next) {
|
export async function preAuth(
|
||||||
|
passport: Passport,
|
||||||
|
ctx: BBContext,
|
||||||
|
next: Function
|
||||||
|
) {
|
||||||
// get the relevant config
|
// get the relevant config
|
||||||
const googleConfig = await fetchGoogleCreds()
|
const googleConfig = await fetchGoogleCreds()
|
||||||
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
||||||
|
@ -41,7 +49,11 @@ async function preAuth(passport, ctx, next) {
|
||||||
})(ctx, next)
|
})(ctx, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function postAuth(passport, ctx, next) {
|
export async function postAuth(
|
||||||
|
passport: Passport,
|
||||||
|
ctx: BBContext,
|
||||||
|
next: Function
|
||||||
|
) {
|
||||||
// get the relevant config
|
// get the relevant config
|
||||||
const config = await fetchGoogleCreds()
|
const config = await fetchGoogleCreds()
|
||||||
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
||||||
|
@ -56,15 +68,20 @@ async function postAuth(passport, ctx, next) {
|
||||||
clientSecret: config.clientSecret,
|
clientSecret: config.clientSecret,
|
||||||
callbackURL: callbackUrl,
|
callbackURL: callbackUrl,
|
||||||
},
|
},
|
||||||
(accessToken, refreshToken, profile, done) => {
|
(
|
||||||
|
accessToken: string,
|
||||||
|
refreshToken: string,
|
||||||
|
profile: SSOProfile,
|
||||||
|
done: Function
|
||||||
|
) => {
|
||||||
clearCookie(ctx, Cookie.DatasourceAuth)
|
clearCookie(ctx, Cookie.DatasourceAuth)
|
||||||
done(null, { accessToken, refreshToken })
|
done(null, { accessToken, refreshToken })
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
{ successRedirect: "/", failureRedirect: "/error" },
|
{ successRedirect: "/", failureRedirect: "/error" },
|
||||||
async (err, tokens) => {
|
async (err: any, tokens: string[]) => {
|
||||||
// update the DB for the datasource with all the user info
|
// update the DB for the datasource with all the user info
|
||||||
await doWithDB(authStateCookie.appId, async db => {
|
await doWithDB(authStateCookie.appId, async (db: Database) => {
|
||||||
const datasource = await db.get(authStateCookie.datasourceId)
|
const datasource = await db.get(authStateCookie.datasourceId)
|
||||||
if (!datasource.config) {
|
if (!datasource.config) {
|
||||||
datasource.config = {}
|
datasource.config = {}
|
||||||
|
@ -78,6 +95,3 @@ async function postAuth(passport, ctx, next) {
|
||||||
}
|
}
|
||||||
)(ctx, next)
|
)(ctx, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.preAuth = preAuth
|
|
||||||
exports.postAuth = postAuth
|
|
|
@ -1,10 +1,15 @@
|
||||||
|
import { ssoCallbackUrl } from "./utils"
|
||||||
|
import { authenticateThirdParty } from "./third-party-common"
|
||||||
|
import { ConfigType, GoogleConfig, Database, SSOProfile } from "@budibase/types"
|
||||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||||
const { ssoCallbackUrl } = require("./utils")
|
|
||||||
const { authenticateThirdParty } = require("./third-party-common")
|
|
||||||
const { Config } = require("../../../constants")
|
|
||||||
|
|
||||||
const buildVerifyFn = saveUserFn => {
|
export function buildVerifyFn(saveUserFn?: Function) {
|
||||||
return (accessToken, refreshToken, profile, done) => {
|
return (
|
||||||
|
accessToken: string,
|
||||||
|
refreshToken: string,
|
||||||
|
profile: SSOProfile,
|
||||||
|
done: Function
|
||||||
|
) => {
|
||||||
const thirdPartyUser = {
|
const thirdPartyUser = {
|
||||||
provider: profile.provider, // should always be 'google'
|
provider: profile.provider, // should always be 'google'
|
||||||
providerType: "google",
|
providerType: "google",
|
||||||
|
@ -31,7 +36,11 @@ const buildVerifyFn = saveUserFn => {
|
||||||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||||
* @returns Dynamically configured Passport Google Strategy
|
* @returns Dynamically configured Passport Google Strategy
|
||||||
*/
|
*/
|
||||||
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
export async function strategyFactory(
|
||||||
|
config: GoogleConfig["config"],
|
||||||
|
callbackUrl: string,
|
||||||
|
saveUserFn?: Function
|
||||||
|
) {
|
||||||
try {
|
try {
|
||||||
const { clientID, clientSecret } = config
|
const { clientID, clientSecret } = config
|
||||||
|
|
||||||
|
@ -50,18 +59,15 @@ exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
||||||
},
|
},
|
||||||
verify
|
verify
|
||||||
)
|
)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
throw new Error(
|
throw new Error(`Error constructing google authentication strategy: ${err}`)
|
||||||
`Error constructing google authentication strategy: ${err}`,
|
|
||||||
err
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getCallbackUrl = async function (db, config) {
|
export async function getCallbackUrl(
|
||||||
return ssoCallbackUrl(db, config, Config.GOOGLE)
|
db: Database,
|
||||||
|
config: { callbackURL?: string }
|
||||||
|
) {
|
||||||
|
return ssoCallbackUrl(db, config, ConfigType.GOOGLE)
|
||||||
}
|
}
|
||||||
|
|
||||||
// expose for testing
|
|
||||||
exports.buildVerifyFn = buildVerifyFn
|
|
|
@ -1,18 +0,0 @@
|
||||||
const { Cookie } = require("../../constants")
|
|
||||||
const env = require("../../environment")
|
|
||||||
const { authError } = require("./utils")
|
|
||||||
|
|
||||||
exports.options = {
|
|
||||||
secretOrKey: env.JWT_SECRET,
|
|
||||||
jwtFromRequest: function (ctx) {
|
|
||||||
return ctx.cookies.get(Cookie.Auth)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.authenticate = async function (jwt, done) {
|
|
||||||
try {
|
|
||||||
return done(null, jwt)
|
|
||||||
} catch (err) {
|
|
||||||
return authError(done, "JWT invalid", err)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
import { Cookie } from "../../constants"
|
||||||
|
import env from "../../environment"
|
||||||
|
import { authError } from "./utils"
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export const options = {
|
||||||
|
secretOrKey: env.JWT_SECRET,
|
||||||
|
jwtFromRequest: function (ctx: BBContext) {
|
||||||
|
return ctx.cookies.get(Cookie.Auth)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function authenticate(jwt: Function, done: Function) {
|
||||||
|
try {
|
||||||
|
return done(null, jwt)
|
||||||
|
} catch (err) {
|
||||||
|
return authError(done, "JWT invalid", err)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,18 +1,18 @@
|
||||||
|
import { UserStatus } from "../../constants"
|
||||||
|
import { compare, newid } from "../../utils"
|
||||||
|
import env from "../../environment"
|
||||||
|
import * as users from "../../users"
|
||||||
|
import { authError } from "./utils"
|
||||||
|
import { createASession } from "../../security/sessions"
|
||||||
|
import { getTenantId } from "../../tenancy"
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
const jwt = require("jsonwebtoken")
|
const jwt = require("jsonwebtoken")
|
||||||
const { UserStatus } = require("../../constants")
|
|
||||||
const { compare } = require("../../hashing")
|
|
||||||
const env = require("../../environment")
|
|
||||||
const users = require("../../users")
|
|
||||||
const { authError } = require("./utils")
|
|
||||||
const { newid } = require("../../hashing")
|
|
||||||
const { createASession } = require("../../security/sessions")
|
|
||||||
const { getTenantId } = require("../../tenancy")
|
|
||||||
|
|
||||||
const INVALID_ERR = "Invalid credentials"
|
const INVALID_ERR = "Invalid credentials"
|
||||||
const SSO_NO_PASSWORD = "SSO user does not have a password set"
|
const SSO_NO_PASSWORD = "SSO user does not have a password set"
|
||||||
const EXPIRED = "This account has expired. Please reset your password"
|
const EXPIRED = "This account has expired. Please reset your password"
|
||||||
|
|
||||||
exports.options = {
|
export const options = {
|
||||||
passReqToCallback: true,
|
passReqToCallback: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,7 +24,12 @@ exports.options = {
|
||||||
* @param {*} done callback from passport to return user information and errors
|
* @param {*} done callback from passport to return user information and errors
|
||||||
* @returns The authenticated user, or errors if they occur
|
* @returns The authenticated user, or errors if they occur
|
||||||
*/
|
*/
|
||||||
exports.authenticate = async function (ctx, email, password, done) {
|
export async function authenticate(
|
||||||
|
ctx: BBContext,
|
||||||
|
email: string,
|
||||||
|
password: string,
|
||||||
|
done: Function
|
||||||
|
) {
|
||||||
if (!email) return authError(done, "Email Required")
|
if (!email) return authError(done, "Email Required")
|
||||||
if (!password) return authError(done, "Password Required")
|
if (!password) return authError(done, "Password Required")
|
||||||
|
|
||||||
|
@ -56,9 +61,9 @@ exports.authenticate = async function (ctx, email, password, done) {
|
||||||
const sessionId = newid()
|
const sessionId = newid()
|
||||||
const tenantId = getTenantId()
|
const tenantId = getTenantId()
|
||||||
|
|
||||||
await createASession(dbUser._id, { sessionId, tenantId })
|
await createASession(dbUser._id!, { sessionId, tenantId })
|
||||||
|
|
||||||
dbUser.token = jwt.sign(
|
const token = jwt.sign(
|
||||||
{
|
{
|
||||||
userId: dbUser._id,
|
userId: dbUser._id,
|
||||||
sessionId,
|
sessionId,
|
||||||
|
@ -69,7 +74,10 @@ exports.authenticate = async function (ctx, email, password, done) {
|
||||||
// Remove users password in payload
|
// Remove users password in payload
|
||||||
delete dbUser.password
|
delete dbUser.password
|
||||||
|
|
||||||
return done(null, dbUser)
|
return done(null, {
|
||||||
|
...dbUser,
|
||||||
|
token,
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
return authError(done, INVALID_ERR)
|
return authError(done, INVALID_ERR)
|
||||||
}
|
}
|
|
@ -1,10 +1,22 @@
|
||||||
const fetch = require("node-fetch")
|
import fetch from "node-fetch"
|
||||||
|
import { authenticateThirdParty } from "./third-party-common"
|
||||||
|
import { ssoCallbackUrl } from "./utils"
|
||||||
|
import {
|
||||||
|
Config,
|
||||||
|
ConfigType,
|
||||||
|
OIDCInnerCfg,
|
||||||
|
Database,
|
||||||
|
SSOProfile,
|
||||||
|
ThirdPartyUser,
|
||||||
|
} from "@budibase/types"
|
||||||
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||||
const { authenticateThirdParty } = require("./third-party-common")
|
|
||||||
const { ssoCallbackUrl } = require("./utils")
|
|
||||||
const { Config } = require("../../../constants")
|
|
||||||
|
|
||||||
const buildVerifyFn = saveUserFn => {
|
type JwtClaims = {
|
||||||
|
preferred_username: string
|
||||||
|
email: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildVerifyFn(saveUserFn?: Function) {
|
||||||
/**
|
/**
|
||||||
* @param {*} issuer The identity provider base URL
|
* @param {*} issuer The identity provider base URL
|
||||||
* @param {*} sub The user ID
|
* @param {*} sub The user ID
|
||||||
|
@ -17,17 +29,17 @@ const buildVerifyFn = saveUserFn => {
|
||||||
* @param {*} done The passport callback: err, user, info
|
* @param {*} done The passport callback: err, user, info
|
||||||
*/
|
*/
|
||||||
return async (
|
return async (
|
||||||
issuer,
|
issuer: string,
|
||||||
sub,
|
sub: string,
|
||||||
profile,
|
profile: SSOProfile,
|
||||||
jwtClaims,
|
jwtClaims: JwtClaims,
|
||||||
accessToken,
|
accessToken: string,
|
||||||
refreshToken,
|
refreshToken: string,
|
||||||
idToken,
|
idToken: string,
|
||||||
params,
|
params: any,
|
||||||
done
|
done: Function
|
||||||
) => {
|
) => {
|
||||||
const thirdPartyUser = {
|
const thirdPartyUser: ThirdPartyUser = {
|
||||||
// store the issuer info to enable sync in future
|
// store the issuer info to enable sync in future
|
||||||
provider: issuer,
|
provider: issuer,
|
||||||
providerType: "oidc",
|
providerType: "oidc",
|
||||||
|
@ -53,7 +65,7 @@ const buildVerifyFn = saveUserFn => {
|
||||||
* @param {*} profile The structured profile created by passport using the user info endpoint
|
* @param {*} profile The structured profile created by passport using the user info endpoint
|
||||||
* @param {*} jwtClaims The claims returned in the id token
|
* @param {*} jwtClaims The claims returned in the id token
|
||||||
*/
|
*/
|
||||||
function getEmail(profile, jwtClaims) {
|
function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {
|
||||||
// profile not guaranteed to contain email e.g. github connected azure ad account
|
// profile not guaranteed to contain email e.g. github connected azure ad account
|
||||||
if (profile._json.email) {
|
if (profile._json.email) {
|
||||||
return profile._json.email
|
return profile._json.email
|
||||||
|
@ -77,7 +89,7 @@ function getEmail(profile, jwtClaims) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
function validEmail(value) {
|
function validEmail(value: string) {
|
||||||
return (
|
return (
|
||||||
value &&
|
value &&
|
||||||
!!value.match(
|
!!value.match(
|
||||||
|
@ -91,19 +103,22 @@ function validEmail(value) {
|
||||||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||||
* @returns Dynamically configured Passport OIDC Strategy
|
* @returns Dynamically configured Passport OIDC Strategy
|
||||||
*/
|
*/
|
||||||
exports.strategyFactory = async function (config, saveUserFn) {
|
export async function strategyFactory(config: Config, saveUserFn?: Function) {
|
||||||
try {
|
try {
|
||||||
const verify = buildVerifyFn(saveUserFn)
|
const verify = buildVerifyFn(saveUserFn)
|
||||||
const strategy = new OIDCStrategy(config, verify)
|
const strategy = new OIDCStrategy(config, verify)
|
||||||
strategy.name = "oidc"
|
strategy.name = "oidc"
|
||||||
return strategy
|
return strategy
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
throw new Error("Error constructing OIDC authentication strategy", err)
|
throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) {
|
export async function fetchStrategyConfig(
|
||||||
|
enrichedConfig: OIDCInnerCfg,
|
||||||
|
callbackUrl?: string
|
||||||
|
) {
|
||||||
try {
|
try {
|
||||||
const { clientID, clientSecret, configUrl } = enrichedConfig
|
const { clientID, clientSecret, configUrl } = enrichedConfig
|
||||||
|
|
||||||
|
@ -135,13 +150,15 @@ exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) {
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
throw new Error("Error constructing OIDC authentication configuration", err)
|
throw new Error(
|
||||||
|
`Error constructing OIDC authentication configuration - ${err}`
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getCallbackUrl = async function (db, config) {
|
export async function getCallbackUrl(
|
||||||
return ssoCallbackUrl(db, config, Config.OIDC)
|
db: Database,
|
||||||
|
config: { callbackURL?: string }
|
||||||
|
) {
|
||||||
|
return ssoCallbackUrl(db, config, ConfigType.OIDC)
|
||||||
}
|
}
|
||||||
|
|
||||||
// expose for testing
|
|
||||||
exports.buildVerifyFn = buildVerifyFn
|
|
|
@ -4,7 +4,7 @@ const { data } = require("./utilities/mock-data")
|
||||||
const { DEFAULT_TENANT_ID } = require("../../../constants")
|
const { DEFAULT_TENANT_ID } = require("../../../constants")
|
||||||
|
|
||||||
const { generateGlobalUserID } = require("../../../db/utils")
|
const { generateGlobalUserID } = require("../../../db/utils")
|
||||||
const { newid } = require("../../../hashing")
|
const { newid } = require("../../../utils")
|
||||||
const { doWithGlobalDB, doInTenant } = require("../../../tenancy")
|
const { doWithGlobalDB, doInTenant } = require("../../../tenancy")
|
||||||
|
|
||||||
const done = jest.fn()
|
const done = jest.fn()
|
||||||
|
|
|
@ -1,21 +1,22 @@
|
||||||
const env = require("../../environment")
|
import env from "../../environment"
|
||||||
|
import { generateGlobalUserID } from "../../db"
|
||||||
|
import { authError } from "./utils"
|
||||||
|
import { newid } from "../../utils"
|
||||||
|
import { createASession } from "../../security/sessions"
|
||||||
|
import * as users from "../../users"
|
||||||
|
import { getGlobalDB, getTenantId } from "../../tenancy"
|
||||||
|
import fetch from "node-fetch"
|
||||||
|
import { ThirdPartyUser } from "@budibase/types"
|
||||||
const jwt = require("jsonwebtoken")
|
const jwt = require("jsonwebtoken")
|
||||||
const { generateGlobalUserID } = require("../../db/utils")
|
|
||||||
const { authError } = require("./utils")
|
|
||||||
const { newid } = require("../../hashing")
|
|
||||||
const { createASession } = require("../../security/sessions")
|
|
||||||
const users = require("../../users")
|
|
||||||
const { getGlobalDB, getTenantId } = require("../../tenancy")
|
|
||||||
const fetch = require("node-fetch")
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Common authentication logic for third parties. e.g. OAuth, OIDC.
|
* Common authentication logic for third parties. e.g. OAuth, OIDC.
|
||||||
*/
|
*/
|
||||||
exports.authenticateThirdParty = async function (
|
export async function authenticateThirdParty(
|
||||||
thirdPartyUser,
|
thirdPartyUser: ThirdPartyUser,
|
||||||
requireLocalAccount = true,
|
requireLocalAccount: boolean = true,
|
||||||
done,
|
done: Function,
|
||||||
saveUserFn
|
saveUserFn?: Function
|
||||||
) {
|
) {
|
||||||
if (!saveUserFn) {
|
if (!saveUserFn) {
|
||||||
throw new Error("Save user function must be provided")
|
throw new Error("Save user function must be provided")
|
||||||
|
@ -39,7 +40,7 @@ exports.authenticateThirdParty = async function (
|
||||||
// try to load by id
|
// try to load by id
|
||||||
try {
|
try {
|
||||||
dbUser = await db.get(userId)
|
dbUser = await db.get(userId)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
// abort when not 404 error
|
// abort when not 404 error
|
||||||
if (!err.status || err.status !== 404) {
|
if (!err.status || err.status !== 404) {
|
||||||
return authError(
|
return authError(
|
||||||
|
@ -81,7 +82,7 @@ exports.authenticateThirdParty = async function (
|
||||||
// create or sync the user
|
// create or sync the user
|
||||||
try {
|
try {
|
||||||
await saveUserFn(dbUser, false, false)
|
await saveUserFn(dbUser, false, false)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
return authError(done, err)
|
return authError(done, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,13 +105,16 @@ exports.authenticateThirdParty = async function (
|
||||||
return done(null, dbUser)
|
return done(null, dbUser)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function syncProfilePicture(user, thirdPartyUser) {
|
async function syncProfilePicture(
|
||||||
const pictureUrl = thirdPartyUser.profile._json.picture
|
user: ThirdPartyUser,
|
||||||
|
thirdPartyUser: ThirdPartyUser
|
||||||
|
) {
|
||||||
|
const pictureUrl = thirdPartyUser.profile?._json.picture
|
||||||
if (pictureUrl) {
|
if (pictureUrl) {
|
||||||
const response = await fetch(pictureUrl)
|
const response = await fetch(pictureUrl)
|
||||||
|
|
||||||
if (response.status === 200) {
|
if (response.status === 200) {
|
||||||
const type = response.headers.get("content-type")
|
const type = response.headers.get("content-type") as string
|
||||||
if (type.startsWith("image/")) {
|
if (type.startsWith("image/")) {
|
||||||
user.pictureUrl = pictureUrl
|
user.pictureUrl = pictureUrl
|
||||||
}
|
}
|
||||||
|
@ -123,7 +127,7 @@ async function syncProfilePicture(user, thirdPartyUser) {
|
||||||
/**
|
/**
|
||||||
* @returns a user that has been sync'd with third party information
|
* @returns a user that has been sync'd with third party information
|
||||||
*/
|
*/
|
||||||
async function syncUser(user, thirdPartyUser) {
|
async function syncUser(user: ThirdPartyUser, thirdPartyUser: ThirdPartyUser) {
|
||||||
// provider
|
// provider
|
||||||
user.provider = thirdPartyUser.provider
|
user.provider = thirdPartyUser.provider
|
||||||
user.providerType = thirdPartyUser.providerType
|
user.providerType = thirdPartyUser.providerType
|
|
@ -1,6 +1,6 @@
|
||||||
const { isMultiTenant, getTenantId } = require("../../tenancy")
|
import { isMultiTenant, getTenantId } from "../../tenancy"
|
||||||
const { getScopedConfig } = require("../../db/utils")
|
import { getScopedConfig } from "../../db"
|
||||||
const { Config } = require("../../constants")
|
import { ConfigType, Database, Config } from "@budibase/types"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility to handle authentication errors.
|
* Utility to handle authentication errors.
|
||||||
|
@ -10,7 +10,7 @@ const { Config } = require("../../constants")
|
||||||
* @param {*} err (Optional) error that will be logged
|
* @param {*} err (Optional) error that will be logged
|
||||||
*/
|
*/
|
||||||
|
|
||||||
exports.authError = function (done, message, err = null) {
|
export function authError(done: Function, message: string, err?: any) {
|
||||||
return done(
|
return done(
|
||||||
err,
|
err,
|
||||||
null, // never return a user
|
null, // never return a user
|
||||||
|
@ -18,13 +18,17 @@ exports.authError = function (done, message, err = null) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.ssoCallbackUrl = async (db, config, type) => {
|
export async function ssoCallbackUrl(
|
||||||
|
db: Database,
|
||||||
|
config: { callbackURL?: string },
|
||||||
|
type: ConfigType
|
||||||
|
) {
|
||||||
// incase there is a callback URL from before
|
// incase there is a callback URL from before
|
||||||
if (config && config.callbackURL) {
|
if (config && config.callbackURL) {
|
||||||
return config.callbackURL
|
return config.callbackURL
|
||||||
}
|
}
|
||||||
const publicConfig = await getScopedConfig(db, {
|
const publicConfig = await getScopedConfig(db, {
|
||||||
type: Config.SETTINGS,
|
type: ConfigType.SETTINGS,
|
||||||
})
|
})
|
||||||
|
|
||||||
let callbackUrl = `/api/global/auth`
|
let callbackUrl = `/api/global/auth`
|
|
@ -1,426 +1,2 @@
|
||||||
const sanitize = require("sanitize-s3-objectkey")
|
export * from "./objectStore"
|
||||||
import AWS from "aws-sdk"
|
export * from "./utils"
|
||||||
import stream from "stream"
|
|
||||||
import fetch from "node-fetch"
|
|
||||||
import tar from "tar-fs"
|
|
||||||
const zlib = require("zlib")
|
|
||||||
import { promisify } from "util"
|
|
||||||
import { join } from "path"
|
|
||||||
import fs from "fs"
|
|
||||||
import env from "../environment"
|
|
||||||
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
|
|
||||||
import { v4 } from "uuid"
|
|
||||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db/utils"
|
|
||||||
|
|
||||||
const streamPipeline = promisify(stream.pipeline)
|
|
||||||
// use this as a temporary store of buckets that are being created
|
|
||||||
const STATE = {
|
|
||||||
bucketCreationPromises: {},
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListParams = {
|
|
||||||
ContinuationToken?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
type UploadParams = {
|
|
||||||
bucket: string
|
|
||||||
filename: string
|
|
||||||
path: string
|
|
||||||
type?: string
|
|
||||||
// can be undefined, we will remove it
|
|
||||||
metadata?: {
|
|
||||||
[key: string]: string | undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const CONTENT_TYPE_MAP: any = {
|
|
||||||
txt: "text/plain",
|
|
||||||
html: "text/html",
|
|
||||||
css: "text/css",
|
|
||||||
js: "application/javascript",
|
|
||||||
json: "application/json",
|
|
||||||
gz: "application/gzip",
|
|
||||||
}
|
|
||||||
const STRING_CONTENT_TYPES = [
|
|
||||||
CONTENT_TYPE_MAP.html,
|
|
||||||
CONTENT_TYPE_MAP.css,
|
|
||||||
CONTENT_TYPE_MAP.js,
|
|
||||||
CONTENT_TYPE_MAP.json,
|
|
||||||
]
|
|
||||||
|
|
||||||
// does normal sanitization and then swaps dev apps to apps
|
|
||||||
export function sanitizeKey(input: string) {
|
|
||||||
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
|
||||||
}
|
|
||||||
|
|
||||||
// simply handles the dev app to app conversion
|
|
||||||
export function sanitizeBucket(input: string) {
|
|
||||||
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
|
||||||
}
|
|
||||||
|
|
||||||
function publicPolicy(bucketName: string) {
|
|
||||||
return {
|
|
||||||
Version: "2012-10-17",
|
|
||||||
Statement: [
|
|
||||||
{
|
|
||||||
Effect: "Allow",
|
|
||||||
Principal: {
|
|
||||||
AWS: ["*"],
|
|
||||||
},
|
|
||||||
Action: "s3:GetObject",
|
|
||||||
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const PUBLIC_BUCKETS = [
|
|
||||||
ObjectStoreBuckets.APPS,
|
|
||||||
ObjectStoreBuckets.GLOBAL,
|
|
||||||
ObjectStoreBuckets.PLUGINS,
|
|
||||||
]
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets a connection to the object store using the S3 SDK.
|
|
||||||
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
|
||||||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
|
||||||
* @constructor
|
|
||||||
*/
|
|
||||||
export const ObjectStore = (bucket: string) => {
|
|
||||||
const config: any = {
|
|
||||||
s3ForcePathStyle: true,
|
|
||||||
signatureVersion: "v4",
|
|
||||||
apiVersion: "2006-03-01",
|
|
||||||
accessKeyId: env.MINIO_ACCESS_KEY,
|
|
||||||
secretAccessKey: env.MINIO_SECRET_KEY,
|
|
||||||
region: env.AWS_REGION,
|
|
||||||
}
|
|
||||||
if (bucket) {
|
|
||||||
config.params = {
|
|
||||||
Bucket: sanitizeBucket(bucket),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (env.MINIO_URL) {
|
|
||||||
config.endpoint = env.MINIO_URL
|
|
||||||
}
|
|
||||||
return new AWS.S3(config)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
|
||||||
* if it does not exist then it will create it.
|
|
||||||
*/
|
|
||||||
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
try {
|
|
||||||
await client
|
|
||||||
.headBucket({
|
|
||||||
Bucket: bucketName,
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
} catch (err: any) {
|
|
||||||
const promises: any = STATE.bucketCreationPromises
|
|
||||||
const doesntExist = err.statusCode === 404,
|
|
||||||
noAccess = err.statusCode === 403
|
|
||||||
if (promises[bucketName]) {
|
|
||||||
await promises[bucketName]
|
|
||||||
} else if (doesntExist || noAccess) {
|
|
||||||
if (doesntExist) {
|
|
||||||
// bucket doesn't exist create it
|
|
||||||
promises[bucketName] = client
|
|
||||||
.createBucket({
|
|
||||||
Bucket: bucketName,
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
await promises[bucketName]
|
|
||||||
delete promises[bucketName]
|
|
||||||
}
|
|
||||||
// public buckets are quite hidden in the system, make sure
|
|
||||||
// no bucket is set accidentally
|
|
||||||
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
|
||||||
await client
|
|
||||||
.putBucketPolicy({
|
|
||||||
Bucket: bucketName,
|
|
||||||
Policy: JSON.stringify(publicPolicy(bucketName)),
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Error("Unable to write to object store bucket.")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Uploads the contents of a file given the required parameters, useful when
|
|
||||||
* temp files in use (for example file uploaded as an attachment).
|
|
||||||
*/
|
|
||||||
export const upload = async ({
|
|
||||||
bucket: bucketName,
|
|
||||||
filename,
|
|
||||||
path,
|
|
||||||
type,
|
|
||||||
metadata,
|
|
||||||
}: UploadParams) => {
|
|
||||||
const extension = filename.split(".").pop()
|
|
||||||
const fileBytes = fs.readFileSync(path)
|
|
||||||
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
|
||||||
|
|
||||||
let contentType = type
|
|
||||||
if (!contentType) {
|
|
||||||
contentType = extension
|
|
||||||
? CONTENT_TYPE_MAP[extension.toLowerCase()]
|
|
||||||
: CONTENT_TYPE_MAP.txt
|
|
||||||
}
|
|
||||||
const config: any = {
|
|
||||||
// windows file paths need to be converted to forward slashes for s3
|
|
||||||
Key: sanitizeKey(filename),
|
|
||||||
Body: fileBytes,
|
|
||||||
ContentType: contentType,
|
|
||||||
}
|
|
||||||
if (metadata && typeof metadata === "object") {
|
|
||||||
// remove any nullish keys from the metadata object, as these may be considered invalid
|
|
||||||
for (let key of Object.keys(metadata)) {
|
|
||||||
if (!metadata[key] || typeof metadata[key] !== "string") {
|
|
||||||
delete metadata[key]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
config.Metadata = metadata
|
|
||||||
}
|
|
||||||
return objectStore.upload(config).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Similar to the upload function but can be used to send a file stream
|
|
||||||
* through to the object store.
|
|
||||||
*/
|
|
||||||
export const streamUpload = async (
|
|
||||||
bucketName: string,
|
|
||||||
filename: string,
|
|
||||||
stream: any,
|
|
||||||
extra = {}
|
|
||||||
) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
|
||||||
|
|
||||||
// Set content type for certain known extensions
|
|
||||||
if (filename?.endsWith(".js")) {
|
|
||||||
extra = {
|
|
||||||
...extra,
|
|
||||||
ContentType: "application/javascript",
|
|
||||||
}
|
|
||||||
} else if (filename?.endsWith(".svg")) {
|
|
||||||
extra = {
|
|
||||||
...extra,
|
|
||||||
ContentType: "image",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const params = {
|
|
||||||
Bucket: sanitizeBucket(bucketName),
|
|
||||||
Key: sanitizeKey(filename),
|
|
||||||
Body: stream,
|
|
||||||
...extra,
|
|
||||||
}
|
|
||||||
return objectStore.upload(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
|
||||||
* will be converted, otherwise it will be returned as a buffer stream.
|
|
||||||
*/
|
|
||||||
export const retrieve = async (bucketName: string, filepath: string) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
const params = {
|
|
||||||
Bucket: sanitizeBucket(bucketName),
|
|
||||||
Key: sanitizeKey(filepath),
|
|
||||||
}
|
|
||||||
const response: any = await objectStore.getObject(params).promise()
|
|
||||||
// currently these are all strings
|
|
||||||
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
|
||||||
return response.Body.toString("utf8")
|
|
||||||
} else {
|
|
||||||
return response.Body
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const listAllObjects = async (bucketName: string, path: string) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
const list = (params: ListParams = {}) => {
|
|
||||||
return objectStore
|
|
||||||
.listObjectsV2({
|
|
||||||
...params,
|
|
||||||
Bucket: sanitizeBucket(bucketName),
|
|
||||||
Prefix: sanitizeKey(path),
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
}
|
|
||||||
let isTruncated = false,
|
|
||||||
token,
|
|
||||||
objects: AWS.S3.Types.Object[] = []
|
|
||||||
do {
|
|
||||||
let params: ListParams = {}
|
|
||||||
if (token) {
|
|
||||||
params.ContinuationToken = token
|
|
||||||
}
|
|
||||||
const response = await list(params)
|
|
||||||
if (response.Contents) {
|
|
||||||
objects = objects.concat(response.Contents)
|
|
||||||
}
|
|
||||||
isTruncated = !!response.IsTruncated
|
|
||||||
} while (isTruncated)
|
|
||||||
return objects
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Same as retrieval function but puts to a temporary file.
|
|
||||||
*/
|
|
||||||
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
filepath = sanitizeKey(filepath)
|
|
||||||
const data = await retrieve(bucketName, filepath)
|
|
||||||
const outputPath = join(budibaseTempDir(), v4())
|
|
||||||
fs.writeFileSync(outputPath, data)
|
|
||||||
return outputPath
|
|
||||||
}
|
|
||||||
|
|
||||||
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
|
||||||
let writePath = join(budibaseTempDir(), v4())
|
|
||||||
fs.mkdirSync(writePath)
|
|
||||||
const objects = await listAllObjects(bucketName, path)
|
|
||||||
let fullObjects = await Promise.all(
|
|
||||||
objects.map(obj => retrieve(bucketName, obj.Key!))
|
|
||||||
)
|
|
||||||
let count = 0
|
|
||||||
for (let obj of objects) {
|
|
||||||
const filename = obj.Key!
|
|
||||||
const data = fullObjects[count++]
|
|
||||||
const possiblePath = filename.split("/")
|
|
||||||
if (possiblePath.length > 1) {
|
|
||||||
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
|
||||||
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
|
|
||||||
}
|
|
||||||
fs.writeFileSync(join(writePath, ...possiblePath), data)
|
|
||||||
}
|
|
||||||
return writePath
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a single file.
|
|
||||||
*/
|
|
||||||
export const deleteFile = async (bucketName: string, filepath: string) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
|
||||||
const params = {
|
|
||||||
Bucket: bucketName,
|
|
||||||
Key: filepath,
|
|
||||||
}
|
|
||||||
return objectStore.deleteObject(params)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
|
||||||
const params = {
|
|
||||||
Bucket: bucketName,
|
|
||||||
Delete: {
|
|
||||||
Objects: filepaths.map((path: any) => ({ Key: path })),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return objectStore.deleteObjects(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a path, including everything within.
|
|
||||||
*/
|
|
||||||
export const deleteFolder = async (
|
|
||||||
bucketName: string,
|
|
||||||
folder: string
|
|
||||||
): Promise<any> => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
folder = sanitizeKey(folder)
|
|
||||||
const client = ObjectStore(bucketName)
|
|
||||||
const listParams = {
|
|
||||||
Bucket: bucketName,
|
|
||||||
Prefix: folder,
|
|
||||||
}
|
|
||||||
|
|
||||||
let response: any = await client.listObjects(listParams).promise()
|
|
||||||
if (response.Contents.length === 0) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const deleteParams: any = {
|
|
||||||
Bucket: bucketName,
|
|
||||||
Delete: {
|
|
||||||
Objects: [],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
response.Contents.forEach((content: any) => {
|
|
||||||
deleteParams.Delete.Objects.push({ Key: content.Key })
|
|
||||||
})
|
|
||||||
|
|
||||||
response = await client.deleteObjects(deleteParams).promise()
|
|
||||||
// can only empty 1000 items at once
|
|
||||||
if (response.Deleted.length === 1000) {
|
|
||||||
return deleteFolder(bucketName, folder)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const uploadDirectory = async (
|
|
||||||
bucketName: string,
|
|
||||||
localPath: string,
|
|
||||||
bucketPath: string
|
|
||||||
) => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
let uploads = []
|
|
||||||
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
|
||||||
for (let file of files) {
|
|
||||||
const path = sanitizeKey(join(bucketPath, file.name))
|
|
||||||
const local = join(localPath, file.name)
|
|
||||||
if (file.isDirectory()) {
|
|
||||||
uploads.push(uploadDirectory(bucketName, local, path))
|
|
||||||
} else {
|
|
||||||
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await Promise.all(uploads)
|
|
||||||
return files
|
|
||||||
}
|
|
||||||
|
|
||||||
export const downloadTarballDirect = async (
|
|
||||||
url: string,
|
|
||||||
path: string,
|
|
||||||
headers = {}
|
|
||||||
) => {
|
|
||||||
path = sanitizeKey(path)
|
|
||||||
const response = await fetch(url, { headers })
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`unexpected response ${response.statusText}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
|
||||||
}
|
|
||||||
|
|
||||||
export const downloadTarball = async (
|
|
||||||
url: string,
|
|
||||||
bucketName: string,
|
|
||||||
path: string
|
|
||||||
) => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
path = sanitizeKey(path)
|
|
||||||
const response = await fetch(url)
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`unexpected response ${response.statusText}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const tmpPath = join(budibaseTempDir(), path)
|
|
||||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
|
||||||
if (!env.isTest() && env.SELF_HOSTED) {
|
|
||||||
await uploadDirectory(bucketName, tmpPath, path)
|
|
||||||
}
|
|
||||||
// return the temporary path incase there is a use for it
|
|
||||||
return tmpPath
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,426 @@
|
||||||
|
const sanitize = require("sanitize-s3-objectkey")
|
||||||
|
import AWS from "aws-sdk"
|
||||||
|
import stream from "stream"
|
||||||
|
import fetch from "node-fetch"
|
||||||
|
import tar from "tar-fs"
|
||||||
|
const zlib = require("zlib")
|
||||||
|
import { promisify } from "util"
|
||||||
|
import { join } from "path"
|
||||||
|
import fs from "fs"
|
||||||
|
import env from "../environment"
|
||||||
|
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
|
||||||
|
import { v4 } from "uuid"
|
||||||
|
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||||
|
|
||||||
|
const streamPipeline = promisify(stream.pipeline)
|
||||||
|
// use this as a temporary store of buckets that are being created
|
||||||
|
const STATE = {
|
||||||
|
bucketCreationPromises: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListParams = {
|
||||||
|
ContinuationToken?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadParams = {
|
||||||
|
bucket: string
|
||||||
|
filename: string
|
||||||
|
path: string
|
||||||
|
type?: string
|
||||||
|
// can be undefined, we will remove it
|
||||||
|
metadata?: {
|
||||||
|
[key: string]: string | undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const CONTENT_TYPE_MAP: any = {
|
||||||
|
txt: "text/plain",
|
||||||
|
html: "text/html",
|
||||||
|
css: "text/css",
|
||||||
|
js: "application/javascript",
|
||||||
|
json: "application/json",
|
||||||
|
gz: "application/gzip",
|
||||||
|
}
|
||||||
|
const STRING_CONTENT_TYPES = [
|
||||||
|
CONTENT_TYPE_MAP.html,
|
||||||
|
CONTENT_TYPE_MAP.css,
|
||||||
|
CONTENT_TYPE_MAP.js,
|
||||||
|
CONTENT_TYPE_MAP.json,
|
||||||
|
]
|
||||||
|
|
||||||
|
// does normal sanitization and then swaps dev apps to apps
|
||||||
|
export function sanitizeKey(input: string) {
|
||||||
|
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
// simply handles the dev app to app conversion
|
||||||
|
export function sanitizeBucket(input: string) {
|
||||||
|
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
||||||
|
}
|
||||||
|
|
||||||
|
function publicPolicy(bucketName: string) {
|
||||||
|
return {
|
||||||
|
Version: "2012-10-17",
|
||||||
|
Statement: [
|
||||||
|
{
|
||||||
|
Effect: "Allow",
|
||||||
|
Principal: {
|
||||||
|
AWS: ["*"],
|
||||||
|
},
|
||||||
|
Action: "s3:GetObject",
|
||||||
|
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const PUBLIC_BUCKETS = [
|
||||||
|
ObjectStoreBuckets.APPS,
|
||||||
|
ObjectStoreBuckets.GLOBAL,
|
||||||
|
ObjectStoreBuckets.PLUGINS,
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a connection to the object store using the S3 SDK.
|
||||||
|
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||||
|
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
export const ObjectStore = (bucket: string) => {
|
||||||
|
const config: any = {
|
||||||
|
s3ForcePathStyle: true,
|
||||||
|
signatureVersion: "v4",
|
||||||
|
apiVersion: "2006-03-01",
|
||||||
|
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||||
|
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||||
|
region: env.AWS_REGION,
|
||||||
|
}
|
||||||
|
if (bucket) {
|
||||||
|
config.params = {
|
||||||
|
Bucket: sanitizeBucket(bucket),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (env.MINIO_URL) {
|
||||||
|
config.endpoint = env.MINIO_URL
|
||||||
|
}
|
||||||
|
return new AWS.S3(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||||
|
* if it does not exist then it will create it.
|
||||||
|
*/
|
||||||
|
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
try {
|
||||||
|
await client
|
||||||
|
.headBucket({
|
||||||
|
Bucket: bucketName,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
} catch (err: any) {
|
||||||
|
const promises: any = STATE.bucketCreationPromises
|
||||||
|
const doesntExist = err.statusCode === 404,
|
||||||
|
noAccess = err.statusCode === 403
|
||||||
|
if (promises[bucketName]) {
|
||||||
|
await promises[bucketName]
|
||||||
|
} else if (doesntExist || noAccess) {
|
||||||
|
if (doesntExist) {
|
||||||
|
// bucket doesn't exist create it
|
||||||
|
promises[bucketName] = client
|
||||||
|
.createBucket({
|
||||||
|
Bucket: bucketName,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
await promises[bucketName]
|
||||||
|
delete promises[bucketName]
|
||||||
|
}
|
||||||
|
// public buckets are quite hidden in the system, make sure
|
||||||
|
// no bucket is set accidentally
|
||||||
|
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
||||||
|
await client
|
||||||
|
.putBucketPolicy({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Policy: JSON.stringify(publicPolicy(bucketName)),
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error("Unable to write to object store bucket.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads the contents of a file given the required parameters, useful when
|
||||||
|
* temp files in use (for example file uploaded as an attachment).
|
||||||
|
*/
|
||||||
|
export const upload = async ({
|
||||||
|
bucket: bucketName,
|
||||||
|
filename,
|
||||||
|
path,
|
||||||
|
type,
|
||||||
|
metadata,
|
||||||
|
}: UploadParams) => {
|
||||||
|
const extension = filename.split(".").pop()
|
||||||
|
const fileBytes = fs.readFileSync(path)
|
||||||
|
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
|
|
||||||
|
let contentType = type
|
||||||
|
if (!contentType) {
|
||||||
|
contentType = extension
|
||||||
|
? CONTENT_TYPE_MAP[extension.toLowerCase()]
|
||||||
|
: CONTENT_TYPE_MAP.txt
|
||||||
|
}
|
||||||
|
const config: any = {
|
||||||
|
// windows file paths need to be converted to forward slashes for s3
|
||||||
|
Key: sanitizeKey(filename),
|
||||||
|
Body: fileBytes,
|
||||||
|
ContentType: contentType,
|
||||||
|
}
|
||||||
|
if (metadata && typeof metadata === "object") {
|
||||||
|
// remove any nullish keys from the metadata object, as these may be considered invalid
|
||||||
|
for (let key of Object.keys(metadata)) {
|
||||||
|
if (!metadata[key] || typeof metadata[key] !== "string") {
|
||||||
|
delete metadata[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
config.Metadata = metadata
|
||||||
|
}
|
||||||
|
return objectStore.upload(config).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Similar to the upload function but can be used to send a file stream
|
||||||
|
* through to the object store.
|
||||||
|
*/
|
||||||
|
export const streamUpload = async (
|
||||||
|
bucketName: string,
|
||||||
|
filename: string,
|
||||||
|
stream: any,
|
||||||
|
extra = {}
|
||||||
|
) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
|
|
||||||
|
// Set content type for certain known extensions
|
||||||
|
if (filename?.endsWith(".js")) {
|
||||||
|
extra = {
|
||||||
|
...extra,
|
||||||
|
ContentType: "application/javascript",
|
||||||
|
}
|
||||||
|
} else if (filename?.endsWith(".svg")) {
|
||||||
|
extra = {
|
||||||
|
...extra,
|
||||||
|
ContentType: "image",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
Key: sanitizeKey(filename),
|
||||||
|
Body: stream,
|
||||||
|
...extra,
|
||||||
|
}
|
||||||
|
return objectStore.upload(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||||
|
* will be converted, otherwise it will be returned as a buffer stream.
|
||||||
|
*/
|
||||||
|
export const retrieve = async (bucketName: string, filepath: string) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
const params = {
|
||||||
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
Key: sanitizeKey(filepath),
|
||||||
|
}
|
||||||
|
const response: any = await objectStore.getObject(params).promise()
|
||||||
|
// currently these are all strings
|
||||||
|
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
||||||
|
return response.Body.toString("utf8")
|
||||||
|
} else {
|
||||||
|
return response.Body
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const listAllObjects = async (bucketName: string, path: string) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
const list = (params: ListParams = {}) => {
|
||||||
|
return objectStore
|
||||||
|
.listObjectsV2({
|
||||||
|
...params,
|
||||||
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
Prefix: sanitizeKey(path),
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
}
|
||||||
|
let isTruncated = false,
|
||||||
|
token,
|
||||||
|
objects: AWS.S3.Types.Object[] = []
|
||||||
|
do {
|
||||||
|
let params: ListParams = {}
|
||||||
|
if (token) {
|
||||||
|
params.ContinuationToken = token
|
||||||
|
}
|
||||||
|
const response = await list(params)
|
||||||
|
if (response.Contents) {
|
||||||
|
objects = objects.concat(response.Contents)
|
||||||
|
}
|
||||||
|
isTruncated = !!response.IsTruncated
|
||||||
|
} while (isTruncated)
|
||||||
|
return objects
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Same as retrieval function but puts to a temporary file.
|
||||||
|
*/
|
||||||
|
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
filepath = sanitizeKey(filepath)
|
||||||
|
const data = await retrieve(bucketName, filepath)
|
||||||
|
const outputPath = join(budibaseTempDir(), v4())
|
||||||
|
fs.writeFileSync(outputPath, data)
|
||||||
|
return outputPath
|
||||||
|
}
|
||||||
|
|
||||||
|
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
||||||
|
let writePath = join(budibaseTempDir(), v4())
|
||||||
|
fs.mkdirSync(writePath)
|
||||||
|
const objects = await listAllObjects(bucketName, path)
|
||||||
|
let fullObjects = await Promise.all(
|
||||||
|
objects.map(obj => retrieve(bucketName, obj.Key!))
|
||||||
|
)
|
||||||
|
let count = 0
|
||||||
|
for (let obj of objects) {
|
||||||
|
const filename = obj.Key!
|
||||||
|
const data = fullObjects[count++]
|
||||||
|
const possiblePath = filename.split("/")
|
||||||
|
if (possiblePath.length > 1) {
|
||||||
|
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
||||||
|
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
|
||||||
|
}
|
||||||
|
fs.writeFileSync(join(writePath, ...possiblePath), data)
|
||||||
|
}
|
||||||
|
return writePath
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a single file.
|
||||||
|
*/
|
||||||
|
export const deleteFile = async (bucketName: string, filepath: string) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: filepath,
|
||||||
|
}
|
||||||
|
return objectStore.deleteObject(params)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
Delete: {
|
||||||
|
Objects: filepaths.map((path: any) => ({ Key: path })),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return objectStore.deleteObjects(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a path, including everything within.
|
||||||
|
*/
|
||||||
|
export const deleteFolder = async (
|
||||||
|
bucketName: string,
|
||||||
|
folder: string
|
||||||
|
): Promise<any> => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
folder = sanitizeKey(folder)
|
||||||
|
const client = ObjectStore(bucketName)
|
||||||
|
const listParams = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
Prefix: folder,
|
||||||
|
}
|
||||||
|
|
||||||
|
let response: any = await client.listObjects(listParams).promise()
|
||||||
|
if (response.Contents.length === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const deleteParams: any = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
Delete: {
|
||||||
|
Objects: [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Contents.forEach((content: any) => {
|
||||||
|
deleteParams.Delete.Objects.push({ Key: content.Key })
|
||||||
|
})
|
||||||
|
|
||||||
|
response = await client.deleteObjects(deleteParams).promise()
|
||||||
|
// can only empty 1000 items at once
|
||||||
|
if (response.Deleted.length === 1000) {
|
||||||
|
return deleteFolder(bucketName, folder)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const uploadDirectory = async (
|
||||||
|
bucketName: string,
|
||||||
|
localPath: string,
|
||||||
|
bucketPath: string
|
||||||
|
) => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
let uploads = []
|
||||||
|
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
||||||
|
for (let file of files) {
|
||||||
|
const path = sanitizeKey(join(bucketPath, file.name))
|
||||||
|
const local = join(localPath, file.name)
|
||||||
|
if (file.isDirectory()) {
|
||||||
|
uploads.push(uploadDirectory(bucketName, local, path))
|
||||||
|
} else {
|
||||||
|
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await Promise.all(uploads)
|
||||||
|
return files
|
||||||
|
}
|
||||||
|
|
||||||
|
export const downloadTarballDirect = async (
|
||||||
|
url: string,
|
||||||
|
path: string,
|
||||||
|
headers = {}
|
||||||
|
) => {
|
||||||
|
path = sanitizeKey(path)
|
||||||
|
const response = await fetch(url, { headers })
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`unexpected response ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
||||||
|
}
|
||||||
|
|
||||||
|
export const downloadTarball = async (
|
||||||
|
url: string,
|
||||||
|
bucketName: string,
|
||||||
|
path: string
|
||||||
|
) => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
path = sanitizeKey(path)
|
||||||
|
const response = await fetch(url)
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`unexpected response ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const tmpPath = join(budibaseTempDir(), path)
|
||||||
|
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
||||||
|
if (!env.isTest() && env.SELF_HOSTED) {
|
||||||
|
await uploadDirectory(bucketName, tmpPath, path)
|
||||||
|
}
|
||||||
|
// return the temporary path incase there is a use for it
|
||||||
|
return tmpPath
|
||||||
|
}
|
|
@ -1,14 +1,15 @@
|
||||||
const { join } = require("path")
|
import { join } from "path"
|
||||||
const { tmpdir } = require("os")
|
import { tmpdir } from "os"
|
||||||
const fs = require("fs")
|
import fs from "fs"
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
|
|
||||||
/****************************************************
|
/****************************************************
|
||||||
* NOTE: When adding a new bucket - name *
|
* NOTE: When adding a new bucket - name *
|
||||||
* sure that S3 usages (like budibase-infra) *
|
* sure that S3 usages (like budibase-infra) *
|
||||||
* have been updated to have a unique bucket name. *
|
* have been updated to have a unique bucket name. *
|
||||||
****************************************************/
|
****************************************************/
|
||||||
exports.ObjectStoreBuckets = {
|
// can't be an enum - only numbers can be used for computed types
|
||||||
|
export const ObjectStoreBuckets = {
|
||||||
BACKUPS: env.BACKUPS_BUCKET_NAME,
|
BACKUPS: env.BACKUPS_BUCKET_NAME,
|
||||||
APPS: env.APPS_BUCKET_NAME,
|
APPS: env.APPS_BUCKET_NAME,
|
||||||
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
|
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
|
||||||
|
@ -22,6 +23,6 @@ if (!fs.existsSync(bbTmp)) {
|
||||||
fs.mkdirSync(bbTmp)
|
fs.mkdirSync(bbTmp)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.budibaseTempDir = function () {
|
export function budibaseTempDir() {
|
||||||
return bbTmp
|
return bbTmp
|
||||||
}
|
}
|
|
@ -1,11 +0,0 @@
|
||||||
const env = require("./environment")
|
|
||||||
|
|
||||||
exports.pinoSettings = () => ({
|
|
||||||
prettyPrint: {
|
|
||||||
levelFirst: true,
|
|
||||||
},
|
|
||||||
level: env.LOG_LEVEL || "error",
|
|
||||||
autoLogging: {
|
|
||||||
ignore: req => req.url.includes("/health"),
|
|
||||||
},
|
|
||||||
})
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
import env from "./environment"
|
||||||
|
|
||||||
|
export function pinoSettings() {
|
||||||
|
return {
|
||||||
|
prettyPrint: {
|
||||||
|
levelFirst: true,
|
||||||
|
},
|
||||||
|
level: env.LOG_LEVEL || "error",
|
||||||
|
autoLogging: {
|
||||||
|
ignore: (req: { url: string }) => req.url.includes("/health"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,13 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
import * as generic from "../cache/generic"
|
|
||||||
import * as user from "../cache/user"
|
|
||||||
import * as app from "../cache/appMetadata"
|
|
||||||
import * as writethrough from "../cache/writethrough"
|
|
||||||
|
|
||||||
export = {
|
|
||||||
app,
|
|
||||||
user,
|
|
||||||
writethrough,
|
|
||||||
...generic,
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
import {
|
|
||||||
getAppDB,
|
|
||||||
getDevAppDB,
|
|
||||||
getProdAppDB,
|
|
||||||
getAppId,
|
|
||||||
updateAppId,
|
|
||||||
doInAppContext,
|
|
||||||
doInTenant,
|
|
||||||
doInContext,
|
|
||||||
} from "../context"
|
|
||||||
|
|
||||||
import * as identity from "../context/identity"
|
|
||||||
|
|
||||||
export = {
|
|
||||||
getAppDB,
|
|
||||||
getDevAppDB,
|
|
||||||
getProdAppDB,
|
|
||||||
getAppId,
|
|
||||||
updateAppId,
|
|
||||||
doInAppContext,
|
|
||||||
doInTenant,
|
|
||||||
doInContext,
|
|
||||||
identity,
|
|
||||||
}
|
|
|
@ -1,4 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
export * from "../objectStore"
|
|
||||||
export * from "../objectStore/utils"
|
|
|
@ -1,13 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
import Client from "../redis"
|
|
||||||
import utils from "../redis/utils"
|
|
||||||
import clients from "../redis/init"
|
|
||||||
import * as redlock from "../redis/redlock"
|
|
||||||
|
|
||||||
export = {
|
|
||||||
Client,
|
|
||||||
utils,
|
|
||||||
clients,
|
|
||||||
redlock,
|
|
||||||
}
|
|
|
@ -1,4 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
export * from "../utils"
|
|
||||||
export * from "../hashing"
|
|
|
@ -1,9 +1,5 @@
|
||||||
const {
|
import { DatasourceFieldType, QueryType, PluginType } from "@budibase/types"
|
||||||
DatasourceFieldType,
|
import joi from "joi"
|
||||||
QueryType,
|
|
||||||
PluginType,
|
|
||||||
} = require("@budibase/types")
|
|
||||||
const joi = require("joi")
|
|
||||||
|
|
||||||
const DATASOURCE_TYPES = [
|
const DATASOURCE_TYPES = [
|
||||||
"Relational",
|
"Relational",
|
||||||
|
@ -14,14 +10,14 @@ const DATASOURCE_TYPES = [
|
||||||
"API",
|
"API",
|
||||||
]
|
]
|
||||||
|
|
||||||
function runJoi(validator, schema) {
|
function runJoi(validator: joi.Schema, schema: any) {
|
||||||
const { error } = validator.validate(schema)
|
const { error } = validator.validate(schema)
|
||||||
if (error) {
|
if (error) {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function validateComponent(schema) {
|
function validateComponent(schema: any) {
|
||||||
const validator = joi.object({
|
const validator = joi.object({
|
||||||
type: joi.string().allow("component").required(),
|
type: joi.string().allow("component").required(),
|
||||||
metadata: joi.object().unknown(true).required(),
|
metadata: joi.object().unknown(true).required(),
|
||||||
|
@ -37,7 +33,7 @@ function validateComponent(schema) {
|
||||||
runJoi(validator, schema)
|
runJoi(validator, schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
function validateDatasource(schema) {
|
function validateDatasource(schema: any) {
|
||||||
const fieldValidator = joi.object({
|
const fieldValidator = joi.object({
|
||||||
type: joi
|
type: joi
|
||||||
.string()
|
.string()
|
||||||
|
@ -86,7 +82,7 @@ function validateDatasource(schema) {
|
||||||
runJoi(validator, schema)
|
runJoi(validator, schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.validate = schema => {
|
export function validate(schema: any) {
|
||||||
switch (schema?.type) {
|
switch (schema?.type) {
|
||||||
case PluginType.COMPONENT:
|
case PluginType.COMPONENT:
|
||||||
validateComponent(schema)
|
validateComponent(schema)
|
|
@ -1,5 +1,5 @@
|
||||||
import events from "events"
|
import events from "events"
|
||||||
import { timeout } from "../../utils"
|
import { timeout } from "../utils"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
||||||
|
|
|
@ -1,278 +1,6 @@
|
||||||
import RedisWrapper from "../redis"
|
// Mimic the outer package export for usage in index.ts
|
||||||
const env = require("../environment")
|
// The outer exports can't be used as they now reference dist directly
|
||||||
// ioredis mock is all in memory
|
export { default as Client } from "./redis"
|
||||||
const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis")
|
export * as utils from "./utils"
|
||||||
const {
|
export * as clients from "./init"
|
||||||
addDbPrefix,
|
export * as redlock from "./redlock"
|
||||||
removeDbPrefix,
|
|
||||||
getRedisOptions,
|
|
||||||
SEPARATOR,
|
|
||||||
SelectableDatabases,
|
|
||||||
} = require("./utils")
|
|
||||||
|
|
||||||
const RETRY_PERIOD_MS = 2000
|
|
||||||
const STARTUP_TIMEOUT_MS = 5000
|
|
||||||
const CLUSTERED = false
|
|
||||||
const DEFAULT_SELECT_DB = SelectableDatabases.DEFAULT
|
|
||||||
|
|
||||||
// for testing just generate the client once
|
|
||||||
let CLOSED = false
|
|
||||||
let CLIENTS: { [key: number]: any } = {}
|
|
||||||
// if in test always connected
|
|
||||||
let CONNECTED = env.isTest()
|
|
||||||
|
|
||||||
function pickClient(selectDb: number): any {
|
|
||||||
return CLIENTS[selectDb]
|
|
||||||
}
|
|
||||||
|
|
||||||
function connectionError(
|
|
||||||
selectDb: number,
|
|
||||||
timeout: NodeJS.Timeout,
|
|
||||||
err: Error | string
|
|
||||||
) {
|
|
||||||
// manually shut down, ignore errors
|
|
||||||
if (CLOSED) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
pickClient(selectDb).disconnect()
|
|
||||||
CLOSED = true
|
|
||||||
// always clear this on error
|
|
||||||
clearTimeout(timeout)
|
|
||||||
CONNECTED = false
|
|
||||||
console.error("Redis connection failed - " + err)
|
|
||||||
setTimeout(() => {
|
|
||||||
init()
|
|
||||||
}, RETRY_PERIOD_MS)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise
|
|
||||||
* will return the ioredis client which will be ready to use.
|
|
||||||
*/
|
|
||||||
function init(selectDb = DEFAULT_SELECT_DB) {
|
|
||||||
let timeout: NodeJS.Timeout
|
|
||||||
CLOSED = false
|
|
||||||
let client = pickClient(selectDb)
|
|
||||||
// already connected, ignore
|
|
||||||
if (client && CONNECTED) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// testing uses a single in memory client
|
|
||||||
if (env.isTest()) {
|
|
||||||
CLIENTS[selectDb] = new Redis(getRedisOptions())
|
|
||||||
}
|
|
||||||
// start the timer - only allowed 5 seconds to connect
|
|
||||||
timeout = setTimeout(() => {
|
|
||||||
if (!CONNECTED) {
|
|
||||||
connectionError(
|
|
||||||
selectDb,
|
|
||||||
timeout,
|
|
||||||
"Did not successfully connect in timeout"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}, STARTUP_TIMEOUT_MS)
|
|
||||||
|
|
||||||
// disconnect any lingering client
|
|
||||||
if (client) {
|
|
||||||
client.disconnect()
|
|
||||||
}
|
|
||||||
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
|
|
||||||
|
|
||||||
if (CLUSTERED) {
|
|
||||||
client = new Redis.Cluster([{ host, port }], opts)
|
|
||||||
} else if (redisProtocolUrl) {
|
|
||||||
client = new Redis(redisProtocolUrl)
|
|
||||||
} else {
|
|
||||||
client = new Redis(opts)
|
|
||||||
}
|
|
||||||
// attach handlers
|
|
||||||
client.on("end", (err: Error) => {
|
|
||||||
connectionError(selectDb, timeout, err)
|
|
||||||
})
|
|
||||||
client.on("error", (err: Error) => {
|
|
||||||
connectionError(selectDb, timeout, err)
|
|
||||||
})
|
|
||||||
client.on("connect", () => {
|
|
||||||
clearTimeout(timeout)
|
|
||||||
CONNECTED = true
|
|
||||||
})
|
|
||||||
CLIENTS[selectDb] = client
|
|
||||||
}
|
|
||||||
|
|
||||||
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
|
|
||||||
return new Promise(resolve => {
|
|
||||||
if (pickClient(selectDb) == null) {
|
|
||||||
init()
|
|
||||||
} else if (CONNECTED) {
|
|
||||||
resolve("")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// check if the connection is ready
|
|
||||||
const interval = setInterval(() => {
|
|
||||||
if (CONNECTED) {
|
|
||||||
clearInterval(interval)
|
|
||||||
resolve("")
|
|
||||||
}
|
|
||||||
}, 500)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility function, takes a redis stream and converts it to a promisified response -
|
|
||||||
* this can only be done with redis streams because they will have an end.
|
|
||||||
* @param stream A redis stream, specifically as this type of stream will have an end.
|
|
||||||
* @param client The client to use for further lookups.
|
|
||||||
* @return {Promise<object>} The final output of the stream
|
|
||||||
*/
|
|
||||||
function promisifyStream(stream: any, client: RedisWrapper) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const outputKeys = new Set()
|
|
||||||
stream.on("data", (keys: string[]) => {
|
|
||||||
keys.forEach(key => {
|
|
||||||
outputKeys.add(key)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
stream.on("error", (err: Error) => {
|
|
||||||
reject(err)
|
|
||||||
})
|
|
||||||
stream.on("end", async () => {
|
|
||||||
const keysArray: string[] = Array.from(outputKeys) as string[]
|
|
||||||
try {
|
|
||||||
let getPromises = []
|
|
||||||
for (let key of keysArray) {
|
|
||||||
getPromises.push(client.get(key))
|
|
||||||
}
|
|
||||||
const jsonArray = await Promise.all(getPromises)
|
|
||||||
resolve(
|
|
||||||
keysArray.map(key => ({
|
|
||||||
key: removeDbPrefix(key),
|
|
||||||
value: JSON.parse(jsonArray.shift()),
|
|
||||||
}))
|
|
||||||
)
|
|
||||||
} catch (err) {
|
|
||||||
reject(err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export = class RedisWrapper {
|
|
||||||
_db: string
|
|
||||||
_select: number
|
|
||||||
|
|
||||||
constructor(db: string, selectDb: number | null = null) {
|
|
||||||
this._db = db
|
|
||||||
this._select = selectDb || DEFAULT_SELECT_DB
|
|
||||||
}
|
|
||||||
|
|
||||||
getClient() {
|
|
||||||
return pickClient(this._select)
|
|
||||||
}
|
|
||||||
|
|
||||||
async init() {
|
|
||||||
CLOSED = false
|
|
||||||
init(this._select)
|
|
||||||
await waitForConnection(this._select)
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
async finish() {
|
|
||||||
CLOSED = true
|
|
||||||
this.getClient().disconnect()
|
|
||||||
}
|
|
||||||
|
|
||||||
async scan(key = ""): Promise<any> {
|
|
||||||
const db = this._db
|
|
||||||
key = `${db}${SEPARATOR}${key}`
|
|
||||||
let stream
|
|
||||||
if (CLUSTERED) {
|
|
||||||
let node = this.getClient().nodes("master")
|
|
||||||
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
|
||||||
} else {
|
|
||||||
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
|
||||||
}
|
|
||||||
return promisifyStream(stream, this.getClient())
|
|
||||||
}
|
|
||||||
|
|
||||||
async keys(pattern: string) {
|
|
||||||
const db = this._db
|
|
||||||
return this.getClient().keys(addDbPrefix(db, pattern))
|
|
||||||
}
|
|
||||||
|
|
||||||
async get(key: string) {
|
|
||||||
const db = this._db
|
|
||||||
let response = await this.getClient().get(addDbPrefix(db, key))
|
|
||||||
// overwrite the prefixed key
|
|
||||||
if (response != null && response.key) {
|
|
||||||
response.key = key
|
|
||||||
}
|
|
||||||
// if its not an object just return the response
|
|
||||||
try {
|
|
||||||
return JSON.parse(response)
|
|
||||||
} catch (err) {
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async bulkGet(keys: string[]) {
|
|
||||||
const db = this._db
|
|
||||||
if (keys.length === 0) {
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
|
|
||||||
let response = await this.getClient().mget(prefixedKeys)
|
|
||||||
if (Array.isArray(response)) {
|
|
||||||
let final: any = {}
|
|
||||||
let count = 0
|
|
||||||
for (let result of response) {
|
|
||||||
if (result) {
|
|
||||||
let parsed
|
|
||||||
try {
|
|
||||||
parsed = JSON.parse(result)
|
|
||||||
} catch (err) {
|
|
||||||
parsed = result
|
|
||||||
}
|
|
||||||
final[keys[count]] = parsed
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
return final
|
|
||||||
} else {
|
|
||||||
throw new Error(`Invalid response: ${response}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async store(key: string, value: any, expirySeconds: number | null = null) {
|
|
||||||
const db = this._db
|
|
||||||
if (typeof value === "object") {
|
|
||||||
value = JSON.stringify(value)
|
|
||||||
}
|
|
||||||
const prefixedKey = addDbPrefix(db, key)
|
|
||||||
await this.getClient().set(prefixedKey, value)
|
|
||||||
if (expirySeconds) {
|
|
||||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async getTTL(key: string) {
|
|
||||||
const db = this._db
|
|
||||||
const prefixedKey = addDbPrefix(db, key)
|
|
||||||
return this.getClient().ttl(prefixedKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
async setExpiry(key: string, expirySeconds: number | null) {
|
|
||||||
const db = this._db
|
|
||||||
const prefixedKey = addDbPrefix(db, key)
|
|
||||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(key: string) {
|
|
||||||
const db = this._db
|
|
||||||
await this.getClient().del(addDbPrefix(db, key))
|
|
||||||
}
|
|
||||||
|
|
||||||
async clear() {
|
|
||||||
let items = await this.scan()
|
|
||||||
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,69 +0,0 @@
|
||||||
const Client = require("./index")
|
|
||||||
const utils = require("./utils")
|
|
||||||
|
|
||||||
let userClient,
|
|
||||||
sessionClient,
|
|
||||||
appClient,
|
|
||||||
cacheClient,
|
|
||||||
writethroughClient,
|
|
||||||
lockClient
|
|
||||||
|
|
||||||
async function init() {
|
|
||||||
userClient = await new Client(utils.Databases.USER_CACHE).init()
|
|
||||||
sessionClient = await new Client(utils.Databases.SESSIONS).init()
|
|
||||||
appClient = await new Client(utils.Databases.APP_METADATA).init()
|
|
||||||
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
|
|
||||||
lockClient = await new Client(utils.Databases.LOCKS).init()
|
|
||||||
writethroughClient = await new Client(
|
|
||||||
utils.Databases.WRITE_THROUGH,
|
|
||||||
utils.SelectableDatabases.WRITE_THROUGH
|
|
||||||
).init()
|
|
||||||
}
|
|
||||||
|
|
||||||
process.on("exit", async () => {
|
|
||||||
if (userClient) await userClient.finish()
|
|
||||||
if (sessionClient) await sessionClient.finish()
|
|
||||||
if (appClient) await appClient.finish()
|
|
||||||
if (cacheClient) await cacheClient.finish()
|
|
||||||
if (writethroughClient) await writethroughClient.finish()
|
|
||||||
if (lockClient) await lockClient.finish()
|
|
||||||
})
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getUserClient: async () => {
|
|
||||||
if (!userClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return userClient
|
|
||||||
},
|
|
||||||
getSessionClient: async () => {
|
|
||||||
if (!sessionClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return sessionClient
|
|
||||||
},
|
|
||||||
getAppClient: async () => {
|
|
||||||
if (!appClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return appClient
|
|
||||||
},
|
|
||||||
getCacheClient: async () => {
|
|
||||||
if (!cacheClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return cacheClient
|
|
||||||
},
|
|
||||||
getWritethroughClient: async () => {
|
|
||||||
if (!writethroughClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return writethroughClient
|
|
||||||
},
|
|
||||||
getLockClient: async () => {
|
|
||||||
if (!lockClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return lockClient
|
|
||||||
},
|
|
||||||
}
|
|
|
@ -0,0 +1,72 @@
|
||||||
|
import Client from "./redis"
|
||||||
|
import * as utils from "./utils"
|
||||||
|
|
||||||
|
let userClient: Client,
|
||||||
|
sessionClient: Client,
|
||||||
|
appClient: Client,
|
||||||
|
cacheClient: Client,
|
||||||
|
writethroughClient: Client,
|
||||||
|
lockClient: Client
|
||||||
|
|
||||||
|
async function init() {
|
||||||
|
userClient = await new Client(utils.Databases.USER_CACHE).init()
|
||||||
|
sessionClient = await new Client(utils.Databases.SESSIONS).init()
|
||||||
|
appClient = await new Client(utils.Databases.APP_METADATA).init()
|
||||||
|
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
|
||||||
|
lockClient = await new Client(utils.Databases.LOCKS).init()
|
||||||
|
writethroughClient = await new Client(
|
||||||
|
utils.Databases.WRITE_THROUGH,
|
||||||
|
utils.SelectableDatabase.WRITE_THROUGH
|
||||||
|
).init()
|
||||||
|
}
|
||||||
|
|
||||||
|
process.on("exit", async () => {
|
||||||
|
if (userClient) await userClient.finish()
|
||||||
|
if (sessionClient) await sessionClient.finish()
|
||||||
|
if (appClient) await appClient.finish()
|
||||||
|
if (cacheClient) await cacheClient.finish()
|
||||||
|
if (writethroughClient) await writethroughClient.finish()
|
||||||
|
if (lockClient) await lockClient.finish()
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function getUserClient() {
|
||||||
|
if (!userClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return userClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getSessionClient() {
|
||||||
|
if (!sessionClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return sessionClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getAppClient() {
|
||||||
|
if (!appClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return appClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCacheClient() {
|
||||||
|
if (!cacheClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return cacheClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getWritethroughClient() {
|
||||||
|
if (!writethroughClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return writethroughClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getLockClient() {
|
||||||
|
if (!lockClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return lockClient
|
||||||
|
}
|
|
@ -0,0 +1,279 @@
|
||||||
|
import env from "../environment"
|
||||||
|
// ioredis mock is all in memory
|
||||||
|
const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis")
|
||||||
|
import {
|
||||||
|
addDbPrefix,
|
||||||
|
removeDbPrefix,
|
||||||
|
getRedisOptions,
|
||||||
|
SEPARATOR,
|
||||||
|
SelectableDatabase,
|
||||||
|
} from "./utils"
|
||||||
|
|
||||||
|
const RETRY_PERIOD_MS = 2000
|
||||||
|
const STARTUP_TIMEOUT_MS = 5000
|
||||||
|
const CLUSTERED = false
|
||||||
|
const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
|
||||||
|
|
||||||
|
// for testing just generate the client once
|
||||||
|
let CLOSED = false
|
||||||
|
let CLIENTS: { [key: number]: any } = {}
|
||||||
|
// if in test always connected
|
||||||
|
let CONNECTED = env.isTest()
|
||||||
|
|
||||||
|
function pickClient(selectDb: number): any {
|
||||||
|
return CLIENTS[selectDb]
|
||||||
|
}
|
||||||
|
|
||||||
|
function connectionError(
|
||||||
|
selectDb: number,
|
||||||
|
timeout: NodeJS.Timeout,
|
||||||
|
err: Error | string
|
||||||
|
) {
|
||||||
|
// manually shut down, ignore errors
|
||||||
|
if (CLOSED) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
pickClient(selectDb).disconnect()
|
||||||
|
CLOSED = true
|
||||||
|
// always clear this on error
|
||||||
|
clearTimeout(timeout)
|
||||||
|
CONNECTED = false
|
||||||
|
console.error("Redis connection failed - " + err)
|
||||||
|
setTimeout(() => {
|
||||||
|
init()
|
||||||
|
}, RETRY_PERIOD_MS)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise
|
||||||
|
* will return the ioredis client which will be ready to use.
|
||||||
|
*/
|
||||||
|
function init(selectDb = DEFAULT_SELECT_DB) {
|
||||||
|
let timeout: NodeJS.Timeout
|
||||||
|
CLOSED = false
|
||||||
|
let client = pickClient(selectDb)
|
||||||
|
// already connected, ignore
|
||||||
|
if (client && CONNECTED) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// testing uses a single in memory client
|
||||||
|
if (env.isTest()) {
|
||||||
|
CLIENTS[selectDb] = new Redis(getRedisOptions())
|
||||||
|
}
|
||||||
|
// start the timer - only allowed 5 seconds to connect
|
||||||
|
timeout = setTimeout(() => {
|
||||||
|
if (!CONNECTED) {
|
||||||
|
connectionError(
|
||||||
|
selectDb,
|
||||||
|
timeout,
|
||||||
|
"Did not successfully connect in timeout"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}, STARTUP_TIMEOUT_MS)
|
||||||
|
|
||||||
|
// disconnect any lingering client
|
||||||
|
if (client) {
|
||||||
|
client.disconnect()
|
||||||
|
}
|
||||||
|
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
|
||||||
|
|
||||||
|
if (CLUSTERED) {
|
||||||
|
client = new Redis.Cluster([{ host, port }], opts)
|
||||||
|
} else if (redisProtocolUrl) {
|
||||||
|
client = new Redis(redisProtocolUrl)
|
||||||
|
} else {
|
||||||
|
client = new Redis(opts)
|
||||||
|
}
|
||||||
|
// attach handlers
|
||||||
|
client.on("end", (err: Error) => {
|
||||||
|
connectionError(selectDb, timeout, err)
|
||||||
|
})
|
||||||
|
client.on("error", (err: Error) => {
|
||||||
|
connectionError(selectDb, timeout, err)
|
||||||
|
})
|
||||||
|
client.on("connect", () => {
|
||||||
|
clearTimeout(timeout)
|
||||||
|
CONNECTED = true
|
||||||
|
})
|
||||||
|
CLIENTS[selectDb] = client
|
||||||
|
}
|
||||||
|
|
||||||
|
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
if (pickClient(selectDb) == null) {
|
||||||
|
init()
|
||||||
|
} else if (CONNECTED) {
|
||||||
|
resolve("")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// check if the connection is ready
|
||||||
|
const interval = setInterval(() => {
|
||||||
|
if (CONNECTED) {
|
||||||
|
clearInterval(interval)
|
||||||
|
resolve("")
|
||||||
|
}
|
||||||
|
}, 500)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility function, takes a redis stream and converts it to a promisified response -
|
||||||
|
* this can only be done with redis streams because they will have an end.
|
||||||
|
* @param stream A redis stream, specifically as this type of stream will have an end.
|
||||||
|
* @param client The client to use for further lookups.
|
||||||
|
* @return {Promise<object>} The final output of the stream
|
||||||
|
*/
|
||||||
|
function promisifyStream(stream: any, client: RedisWrapper) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const outputKeys = new Set()
|
||||||
|
stream.on("data", (keys: string[]) => {
|
||||||
|
keys.forEach(key => {
|
||||||
|
outputKeys.add(key)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
stream.on("error", (err: Error) => {
|
||||||
|
reject(err)
|
||||||
|
})
|
||||||
|
stream.on("end", async () => {
|
||||||
|
const keysArray: string[] = Array.from(outputKeys) as string[]
|
||||||
|
try {
|
||||||
|
let getPromises = []
|
||||||
|
for (let key of keysArray) {
|
||||||
|
getPromises.push(client.get(key))
|
||||||
|
}
|
||||||
|
const jsonArray = await Promise.all(getPromises)
|
||||||
|
resolve(
|
||||||
|
keysArray.map(key => ({
|
||||||
|
key: removeDbPrefix(key),
|
||||||
|
value: JSON.parse(jsonArray.shift()),
|
||||||
|
}))
|
||||||
|
)
|
||||||
|
} catch (err) {
|
||||||
|
reject(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
class RedisWrapper {
|
||||||
|
_db: string
|
||||||
|
_select: number
|
||||||
|
|
||||||
|
constructor(db: string, selectDb: number | null = null) {
|
||||||
|
this._db = db
|
||||||
|
this._select = selectDb || DEFAULT_SELECT_DB
|
||||||
|
}
|
||||||
|
|
||||||
|
getClient() {
|
||||||
|
return pickClient(this._select)
|
||||||
|
}
|
||||||
|
|
||||||
|
async init() {
|
||||||
|
CLOSED = false
|
||||||
|
init(this._select)
|
||||||
|
await waitForConnection(this._select)
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
async finish() {
|
||||||
|
CLOSED = true
|
||||||
|
this.getClient().disconnect()
|
||||||
|
}
|
||||||
|
|
||||||
|
async scan(key = ""): Promise<any> {
|
||||||
|
const db = this._db
|
||||||
|
key = `${db}${SEPARATOR}${key}`
|
||||||
|
let stream
|
||||||
|
if (CLUSTERED) {
|
||||||
|
let node = this.getClient().nodes("master")
|
||||||
|
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
||||||
|
} else {
|
||||||
|
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
||||||
|
}
|
||||||
|
return promisifyStream(stream, this.getClient())
|
||||||
|
}
|
||||||
|
|
||||||
|
async keys(pattern: string) {
|
||||||
|
const db = this._db
|
||||||
|
return this.getClient().keys(addDbPrefix(db, pattern))
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(key: string) {
|
||||||
|
const db = this._db
|
||||||
|
let response = await this.getClient().get(addDbPrefix(db, key))
|
||||||
|
// overwrite the prefixed key
|
||||||
|
if (response != null && response.key) {
|
||||||
|
response.key = key
|
||||||
|
}
|
||||||
|
// if its not an object just return the response
|
||||||
|
try {
|
||||||
|
return JSON.parse(response)
|
||||||
|
} catch (err) {
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async bulkGet(keys: string[]) {
|
||||||
|
const db = this._db
|
||||||
|
if (keys.length === 0) {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
|
||||||
|
let response = await this.getClient().mget(prefixedKeys)
|
||||||
|
if (Array.isArray(response)) {
|
||||||
|
let final: any = {}
|
||||||
|
let count = 0
|
||||||
|
for (let result of response) {
|
||||||
|
if (result) {
|
||||||
|
let parsed
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(result)
|
||||||
|
} catch (err) {
|
||||||
|
parsed = result
|
||||||
|
}
|
||||||
|
final[keys[count]] = parsed
|
||||||
|
}
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
return final
|
||||||
|
} else {
|
||||||
|
throw new Error(`Invalid response: ${response}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async store(key: string, value: any, expirySeconds: number | null = null) {
|
||||||
|
const db = this._db
|
||||||
|
if (typeof value === "object") {
|
||||||
|
value = JSON.stringify(value)
|
||||||
|
}
|
||||||
|
const prefixedKey = addDbPrefix(db, key)
|
||||||
|
await this.getClient().set(prefixedKey, value)
|
||||||
|
if (expirySeconds) {
|
||||||
|
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getTTL(key: string) {
|
||||||
|
const db = this._db
|
||||||
|
const prefixedKey = addDbPrefix(db, key)
|
||||||
|
return this.getClient().ttl(prefixedKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
async setExpiry(key: string, expirySeconds: number | null) {
|
||||||
|
const db = this._db
|
||||||
|
const prefixedKey = addDbPrefix(db, key)
|
||||||
|
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(key: string) {
|
||||||
|
const db = this._db
|
||||||
|
await this.getClient().del(addDbPrefix(db, key))
|
||||||
|
}
|
||||||
|
|
||||||
|
async clear() {
|
||||||
|
let items = await this.scan()
|
||||||
|
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export = RedisWrapper
|
|
@ -1,10 +1,10 @@
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
|
|
||||||
const SLOT_REFRESH_MS = 2000
|
const SLOT_REFRESH_MS = 2000
|
||||||
const CONNECT_TIMEOUT_MS = 10000
|
const CONNECT_TIMEOUT_MS = 10000
|
||||||
const SEPARATOR = "-"
|
|
||||||
const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL
|
const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL
|
||||||
const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
|
const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
|
||||||
|
export const SEPARATOR = "-"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* These Redis databases help us to segment up a Redis keyspace by prepending the
|
* These Redis databases help us to segment up a Redis keyspace by prepending the
|
||||||
|
@ -12,23 +12,23 @@ const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
|
||||||
* can be split up a bit; allowing us to use scans on small databases to find some particular
|
* can be split up a bit; allowing us to use scans on small databases to find some particular
|
||||||
* keys within.
|
* keys within.
|
||||||
* If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out
|
* If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out
|
||||||
* of the default keyspace and use a separate one - the SelectableDatabases can be used for this.
|
* of the default keyspace and use a separate one - the SelectableDatabase can be used for this.
|
||||||
*/
|
*/
|
||||||
exports.Databases = {
|
export enum Databases {
|
||||||
PW_RESETS: "pwReset",
|
PW_RESETS = "pwReset",
|
||||||
VERIFICATIONS: "verification",
|
VERIFICATIONS = "verification",
|
||||||
INVITATIONS: "invitation",
|
INVITATIONS = "invitation",
|
||||||
DEV_LOCKS: "devLocks",
|
DEV_LOCKS = "devLocks",
|
||||||
DEBOUNCE: "debounce",
|
DEBOUNCE = "debounce",
|
||||||
SESSIONS: "session",
|
SESSIONS = "session",
|
||||||
USER_CACHE: "users",
|
USER_CACHE = "users",
|
||||||
FLAGS: "flags",
|
FLAGS = "flags",
|
||||||
APP_METADATA: "appMetadata",
|
APP_METADATA = "appMetadata",
|
||||||
QUERY_VARS: "queryVars",
|
QUERY_VARS = "queryVars",
|
||||||
LICENSES: "license",
|
LICENSES = "license",
|
||||||
GENERIC_CACHE: "data_cache",
|
GENERIC_CACHE = "data_cache",
|
||||||
WRITE_THROUGH: "writeThrough",
|
WRITE_THROUGH = "writeThrough",
|
||||||
LOCKS: "locks",
|
LOCKS = "locks",
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -40,30 +40,28 @@ exports.Databases = {
|
||||||
* but if you need to walk through all values in a database periodically then a separate selectable
|
* but if you need to walk through all values in a database periodically then a separate selectable
|
||||||
* keyspace should be used.
|
* keyspace should be used.
|
||||||
*/
|
*/
|
||||||
exports.SelectableDatabases = {
|
export enum SelectableDatabase {
|
||||||
DEFAULT: 0,
|
DEFAULT = 0,
|
||||||
WRITE_THROUGH: 1,
|
WRITE_THROUGH = 1,
|
||||||
UNUSED_1: 2,
|
UNUSED_1 = 2,
|
||||||
UNUSED_2: 3,
|
UNUSED_2 = 3,
|
||||||
UNUSED_3: 4,
|
UNUSED_3 = 4,
|
||||||
UNUSED_4: 5,
|
UNUSED_4 = 5,
|
||||||
UNUSED_5: 6,
|
UNUSED_5 = 6,
|
||||||
UNUSED_6: 7,
|
UNUSED_6 = 7,
|
||||||
UNUSED_7: 8,
|
UNUSED_7 = 8,
|
||||||
UNUSED_8: 9,
|
UNUSED_8 = 9,
|
||||||
UNUSED_9: 10,
|
UNUSED_9 = 10,
|
||||||
UNUSED_10: 11,
|
UNUSED_10 = 11,
|
||||||
UNUSED_11: 12,
|
UNUSED_11 = 12,
|
||||||
UNUSED_12: 13,
|
UNUSED_12 = 13,
|
||||||
UNUSED_13: 14,
|
UNUSED_13 = 14,
|
||||||
UNUSED_14: 15,
|
UNUSED_14 = 15,
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.SEPARATOR = SEPARATOR
|
export function getRedisOptions(clustered = false) {
|
||||||
|
|
||||||
exports.getRedisOptions = (clustered = false) => {
|
|
||||||
let password = REDIS_PASSWORD
|
let password = REDIS_PASSWORD
|
||||||
let url = REDIS_URL.split("//")
|
let url: string[] | string = REDIS_URL.split("//")
|
||||||
// get rid of the protocol
|
// get rid of the protocol
|
||||||
url = url.length > 1 ? url[1] : url[0]
|
url = url.length > 1 ? url[1] : url[0]
|
||||||
// check for a password etc
|
// check for a password etc
|
||||||
|
@ -84,7 +82,7 @@ exports.getRedisOptions = (clustered = false) => {
|
||||||
redisProtocolUrl = REDIS_URL
|
redisProtocolUrl = REDIS_URL
|
||||||
}
|
}
|
||||||
|
|
||||||
const opts = {
|
const opts: any = {
|
||||||
connectTimeout: CONNECT_TIMEOUT_MS,
|
connectTimeout: CONNECT_TIMEOUT_MS,
|
||||||
}
|
}
|
||||||
if (clustered) {
|
if (clustered) {
|
||||||
|
@ -92,7 +90,7 @@ exports.getRedisOptions = (clustered = false) => {
|
||||||
opts.redisOptions.tls = {}
|
opts.redisOptions.tls = {}
|
||||||
opts.redisOptions.password = password
|
opts.redisOptions.password = password
|
||||||
opts.slotsRefreshTimeout = SLOT_REFRESH_MS
|
opts.slotsRefreshTimeout = SLOT_REFRESH_MS
|
||||||
opts.dnsLookup = (address, callback) => callback(null, address)
|
opts.dnsLookup = (address: string, callback: any) => callback(null, address)
|
||||||
} else {
|
} else {
|
||||||
opts.host = host
|
opts.host = host
|
||||||
opts.port = port
|
opts.port = port
|
||||||
|
@ -101,14 +99,14 @@ exports.getRedisOptions = (clustered = false) => {
|
||||||
return { opts, host, port, redisProtocolUrl }
|
return { opts, host, port, redisProtocolUrl }
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.addDbPrefix = (db, key) => {
|
export function addDbPrefix(db: string, key: string) {
|
||||||
if (key.includes(db)) {
|
if (key.includes(db)) {
|
||||||
return key
|
return key
|
||||||
}
|
}
|
||||||
return `${db}${SEPARATOR}${key}`
|
return `${db}${SEPARATOR}${key}`
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.removeDbPrefix = key => {
|
export function removeDbPrefix(key: string) {
|
||||||
let parts = key.split(SEPARATOR)
|
let parts = key.split(SEPARATOR)
|
||||||
if (parts.length >= 2) {
|
if (parts.length >= 2) {
|
||||||
parts.shift()
|
parts.shift()
|
|
@ -1 +0,0 @@
|
||||||
exports.lookupApiKey = async () => {}
|
|
|
@ -1,5 +1,5 @@
|
||||||
const crypto = require("crypto")
|
import crypto from "crypto"
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
|
|
||||||
const ALGO = "aes-256-ctr"
|
const ALGO = "aes-256-ctr"
|
||||||
const SECRET = env.JWT_SECRET
|
const SECRET = env.JWT_SECRET
|
||||||
|
@ -8,13 +8,13 @@ const ITERATIONS = 10000
|
||||||
const RANDOM_BYTES = 16
|
const RANDOM_BYTES = 16
|
||||||
const STRETCH_LENGTH = 32
|
const STRETCH_LENGTH = 32
|
||||||
|
|
||||||
function stretchString(string, salt) {
|
function stretchString(string: string, salt: Buffer) {
|
||||||
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.encrypt = input => {
|
export function encrypt(input: string) {
|
||||||
const salt = crypto.randomBytes(RANDOM_BYTES)
|
const salt = crypto.randomBytes(RANDOM_BYTES)
|
||||||
const stretched = stretchString(SECRET, salt)
|
const stretched = stretchString(SECRET!, salt)
|
||||||
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
|
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
|
||||||
const base = cipher.update(input)
|
const base = cipher.update(input)
|
||||||
const final = cipher.final()
|
const final = cipher.final()
|
||||||
|
@ -22,10 +22,10 @@ exports.encrypt = input => {
|
||||||
return `${salt.toString("hex")}${SEPARATOR}${encrypted}`
|
return `${salt.toString("hex")}${SEPARATOR}${encrypted}`
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.decrypt = input => {
|
export function decrypt(input: string) {
|
||||||
const [salt, encrypted] = input.split(SEPARATOR)
|
const [salt, encrypted] = input.split(SEPARATOR)
|
||||||
const saltBuffer = Buffer.from(salt, "hex")
|
const saltBuffer = Buffer.from(salt, "hex")
|
||||||
const stretched = stretchString(SECRET, saltBuffer)
|
const stretched = stretchString(SECRET!, saltBuffer)
|
||||||
const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer)
|
const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer)
|
||||||
const base = decipher.update(Buffer.from(encrypted, "hex"))
|
const base = decipher.update(Buffer.from(encrypted, "hex"))
|
||||||
const final = decipher.final()
|
const final = decipher.final()
|
|
@ -1,18 +1,18 @@
|
||||||
const env = require("./environment")
|
import env from "../environment"
|
||||||
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
|
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
|
||||||
const { v4 } = require("uuid")
|
import { v4 } from "uuid"
|
||||||
|
|
||||||
const SALT_ROUNDS = env.SALT_ROUNDS || 10
|
const SALT_ROUNDS = env.SALT_ROUNDS || 10
|
||||||
|
|
||||||
exports.hash = async data => {
|
export async function hash(data: string) {
|
||||||
const salt = await bcrypt.genSalt(SALT_ROUNDS)
|
const salt = await bcrypt.genSalt(SALT_ROUNDS)
|
||||||
return bcrypt.hash(data, salt)
|
return bcrypt.hash(data, salt)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.compare = async (data, encrypted) => {
|
export async function compare(data: string, encrypted: string) {
|
||||||
return bcrypt.compare(data, encrypted)
|
return bcrypt.compare(data, encrypted)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.newid = function () {
|
export function newid() {
|
||||||
return v4().replace(/-/g, "")
|
return v4().replace(/-/g, "")
|
||||||
}
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * from "./hashing"
|
||||||
|
export * from "./utils"
|
|
@ -4,14 +4,14 @@ import {
|
||||||
ViewName,
|
ViewName,
|
||||||
getAllApps,
|
getAllApps,
|
||||||
queryGlobalView,
|
queryGlobalView,
|
||||||
} from "./db"
|
} from "../db"
|
||||||
import { options } from "./middleware/passport/jwt"
|
import { options } from "../middleware/passport/jwt"
|
||||||
import { Header, Cookie, MAX_VALID_DATE } from "./constants"
|
import { Header, Cookie, MAX_VALID_DATE } from "../constants"
|
||||||
import env from "./environment"
|
import env from "../environment"
|
||||||
import userCache from "./cache/user"
|
import * as userCache from "../cache/user"
|
||||||
import { getSessionsForUser, invalidateSessions } from "./security/sessions"
|
import { getSessionsForUser, invalidateSessions } from "../security/sessions"
|
||||||
import * as events from "./events"
|
import * as events from "../events"
|
||||||
import tenancy from "./tenancy"
|
import tenancy from "../tenancy"
|
||||||
import {
|
import {
|
||||||
App,
|
App,
|
||||||
BBContext,
|
BBContext,
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/tenancy")
|
|
|
@ -1,4 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
...require("./src/utils"),
|
|
||||||
...require("./src/hashing"),
|
|
||||||
}
|
|
|
@ -390,7 +390,7 @@ const appPostCreate = async (ctx: any, app: App) => {
|
||||||
export const create = async (ctx: any) => {
|
export const create = async (ctx: any) => {
|
||||||
const newApplication = await quotas.addApp(() => performAppCreate(ctx))
|
const newApplication = await quotas.addApp(() => performAppCreate(ctx))
|
||||||
await appPostCreate(ctx, newApplication)
|
await appPostCreate(ctx, newApplication)
|
||||||
await cache.bustCache(cache.CacheKeys.CHECKLIST)
|
await cache.bustCache(cache.CacheKey.CHECKLIST)
|
||||||
ctx.body = newApplication
|
ctx.body = newApplication
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,9 +31,7 @@ export interface GoogleConfig extends Config {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface OIDCConfig extends Config {
|
export interface OIDCInnerCfg {
|
||||||
config: {
|
|
||||||
configs: {
|
|
||||||
configUrl: string
|
configUrl: string
|
||||||
clientID: string
|
clientID: string
|
||||||
clientSecret: string
|
clientSecret: string
|
||||||
|
@ -41,7 +39,11 @@ export interface OIDCConfig extends Config {
|
||||||
name: string
|
name: string
|
||||||
uuid: string
|
uuid: string
|
||||||
activated: boolean
|
activated: boolean
|
||||||
}[]
|
}
|
||||||
|
|
||||||
|
export interface OIDCConfig extends Config {
|
||||||
|
config: {
|
||||||
|
configs: OIDCInnerCfg[]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,37 @@
|
||||||
import { Document } from "../document"
|
import { Document } from "../document"
|
||||||
|
|
||||||
export interface User extends Document {
|
export interface SSOProfile {
|
||||||
|
id: string
|
||||||
|
name?: {
|
||||||
|
givenName?: string
|
||||||
|
familyName?: string
|
||||||
|
}
|
||||||
|
_json: {
|
||||||
|
email: string
|
||||||
|
picture: string
|
||||||
|
}
|
||||||
|
provider?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ThirdPartyUser extends Document {
|
||||||
|
thirdPartyProfile?: SSOProfile["_json"]
|
||||||
|
firstName?: string
|
||||||
|
lastName?: string
|
||||||
|
pictureUrl?: string
|
||||||
|
profile?: SSOProfile
|
||||||
|
oauth2?: any
|
||||||
|
provider?: string
|
||||||
|
providerType?: string
|
||||||
|
email: string
|
||||||
|
userId?: string
|
||||||
|
forceResetPassword?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface User extends ThirdPartyUser {
|
||||||
tenantId: string
|
tenantId: string
|
||||||
email: string
|
email: string
|
||||||
|
userId?: string
|
||||||
|
forceResetPassword?: boolean
|
||||||
roles: UserRoles
|
roles: UserRoles
|
||||||
builder?: {
|
builder?: {
|
||||||
global: boolean
|
global: boolean
|
||||||
|
@ -10,14 +39,14 @@ export interface User extends Document {
|
||||||
admin?: {
|
admin?: {
|
||||||
global: boolean
|
global: boolean
|
||||||
}
|
}
|
||||||
providerType?: string
|
|
||||||
password?: string
|
password?: string
|
||||||
status?: string
|
status?: string
|
||||||
createdAt?: number // override the default createdAt behaviour - users sdk historically set this to Date.now()
|
createdAt?: number // override the default createdAt behaviour - users sdk historically set this to Date.now()
|
||||||
userGroups?: string[]
|
userGroups?: string[]
|
||||||
forceResetPassword?: boolean
|
|
||||||
dayPassRecordedAt?: string
|
dayPassRecordedAt?: string
|
||||||
userId?: string
|
account?: {
|
||||||
|
authType: string
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface UserRoles {
|
export interface UserRoles {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { User, Account } from "../documents"
|
import { User, Account } from "../documents"
|
||||||
import { IdentityType } from "./events/identification"
|
import { IdentityType } from "./events"
|
||||||
|
|
||||||
export interface BaseContext {
|
export interface BaseContext {
|
||||||
_id: string
|
_id: string
|
||||||
|
|
|
@ -170,8 +170,8 @@ export async function save(ctx: BBContext) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await db.put(ctx.request.body)
|
const response = await db.put(ctx.request.body)
|
||||||
await cache.bustCache(cache.CacheKeys.CHECKLIST)
|
await cache.bustCache(cache.CacheKey.CHECKLIST)
|
||||||
await cache.bustCache(cache.CacheKeys.ANALYTICS_ENABLED)
|
await cache.bustCache(cache.CacheKey.ANALYTICS_ENABLED)
|
||||||
|
|
||||||
for (const fn of eventFns) {
|
for (const fn of eventFns) {
|
||||||
await fn()
|
await fn()
|
||||||
|
@ -371,7 +371,7 @@ export async function destroy(ctx: BBContext) {
|
||||||
const { id, rev } = ctx.params
|
const { id, rev } = ctx.params
|
||||||
try {
|
try {
|
||||||
await db.remove(id, rev)
|
await db.remove(id, rev)
|
||||||
await cache.delete(cache.CacheKeys.CHECKLIST)
|
await cache.delete(cache.CacheKey.CHECKLIST)
|
||||||
ctx.body = { message: "Config deleted successfully" }
|
ctx.body = { message: "Config deleted successfully" }
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
ctx.throw(err.status, err)
|
ctx.throw(err.status, err)
|
||||||
|
@ -384,7 +384,7 @@ export async function configChecklist(ctx: BBContext) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ctx.body = await cache.withCache(
|
ctx.body = await cache.withCache(
|
||||||
cache.CacheKeys.CHECKLIST,
|
cache.CacheKey.CHECKLIST,
|
||||||
env.CHECKLIST_CACHE_TTL,
|
env.CHECKLIST_CACHE_TTL,
|
||||||
async () => {
|
async () => {
|
||||||
let apps = []
|
let apps = []
|
||||||
|
|
|
@ -104,7 +104,7 @@ export const adminUser = async (ctx: any) => {
|
||||||
try {
|
try {
|
||||||
// always bust checklist beforehand, if an error occurs but can proceed, don't get
|
// always bust checklist beforehand, if an error occurs but can proceed, don't get
|
||||||
// stuck in a cycle
|
// stuck in a cycle
|
||||||
await cache.bustCache(cache.CacheKeys.CHECKLIST)
|
await cache.bustCache(cache.CacheKey.CHECKLIST)
|
||||||
const finalUser = await sdk.users.save(user, {
|
const finalUser = await sdk.users.save(user, {
|
||||||
hashPassword,
|
hashPassword,
|
||||||
requirePassword,
|
requirePassword,
|
||||||
|
|
|
@ -6,7 +6,7 @@ export async function systemRestored(ctx: BBContext) {
|
||||||
if (!env.SELF_HOSTED) {
|
if (!env.SELF_HOSTED) {
|
||||||
ctx.throw(405, "This operation is not allowed in cloud.")
|
ctx.throw(405, "This operation is not allowed in cloud.")
|
||||||
}
|
}
|
||||||
await cache.bustCache(cache.CacheKeys.CHECKLIST)
|
await cache.bustCache(cache.CacheKey.CHECKLIST)
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
message: "System prepared after restore.",
|
message: "System prepared after restore.",
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue