Merge branch 'develop' of github.com:Budibase/budibase into side-panel
This commit is contained in:
commit
8567ae70af
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.1.32-alpha.11",
|
"version": "2.1.40-alpha.1",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"private": true,
|
"private": true,
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@rollup/plugin-json": "^4.0.2",
|
"@rollup/plugin-json": "^4.0.2",
|
||||||
"@typescript-eslint/parser": "4.28.0",
|
"@typescript-eslint/parser": "5.45.0",
|
||||||
"babel-eslint": "^10.0.3",
|
"babel-eslint": "^10.0.3",
|
||||||
"eslint": "^7.28.0",
|
"eslint": "^7.28.0",
|
||||||
"eslint-plugin-cypress": "^2.11.3",
|
"eslint-plugin-cypress": "^2.11.3",
|
||||||
|
@ -87,4 +87,4 @@
|
||||||
"install:pro": "bash scripts/pro/install.sh",
|
"install:pro": "bash scripts/pro/install.sh",
|
||||||
"dep:clean": "yarn clean && yarn bootstrap"
|
"dep:clean": "yarn clean && yarn bootstrap"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/cloud/accounts")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/auth")
|
|
|
@ -1,9 +0,0 @@
|
||||||
const generic = require("./src/cache/generic")
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
user: require("./src/cache/user"),
|
|
||||||
app: require("./src/cache/appMetadata"),
|
|
||||||
writethrough: require("./src/cache/writethrough"),
|
|
||||||
...generic,
|
|
||||||
cache: generic,
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/constants")
|
|
|
@ -1,24 +0,0 @@
|
||||||
const {
|
|
||||||
getAppDB,
|
|
||||||
getDevAppDB,
|
|
||||||
getProdAppDB,
|
|
||||||
getAppId,
|
|
||||||
updateAppId,
|
|
||||||
doInAppContext,
|
|
||||||
doInTenant,
|
|
||||||
doInContext,
|
|
||||||
} = require("./src/context")
|
|
||||||
|
|
||||||
const identity = require("./src/context/identity")
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getAppDB,
|
|
||||||
getDevAppDB,
|
|
||||||
getProdAppDB,
|
|
||||||
getAppId,
|
|
||||||
updateAppId,
|
|
||||||
doInAppContext,
|
|
||||||
doInTenant,
|
|
||||||
identity,
|
|
||||||
doInContext,
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/db")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/context/deprovision")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/security/encryption")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/logging")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/middleware")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/migrations")
|
|
|
@ -1,4 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
...require("./src/objectStore"),
|
|
||||||
...require("./src/objectStore/utils"),
|
|
||||||
}
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/backend-core",
|
"name": "@budibase/backend-core",
|
||||||
"version": "2.1.32-alpha.11",
|
"version": "2.1.40-alpha.1",
|
||||||
"description": "Budibase backend core libraries used in server and worker",
|
"description": "Budibase backend core libraries used in server and worker",
|
||||||
"main": "dist/src/index.js",
|
"main": "dist/src/index.js",
|
||||||
"types": "dist/src/index.d.ts",
|
"types": "dist/src/index.d.ts",
|
||||||
|
@ -20,7 +20,7 @@
|
||||||
"test:watch": "jest --watchAll"
|
"test:watch": "jest --watchAll"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/types": "2.1.32-alpha.11",
|
"@budibase/types": "2.1.40-alpha.1",
|
||||||
"@shopify/jest-koa-mocks": "5.0.1",
|
"@shopify/jest-koa-mocks": "5.0.1",
|
||||||
"@techpass/passport-openidconnect": "0.3.2",
|
"@techpass/passport-openidconnect": "0.3.2",
|
||||||
"aws-sdk": "2.1030.0",
|
"aws-sdk": "2.1030.0",
|
||||||
|
@ -37,7 +37,6 @@
|
||||||
"lodash.isarguments": "3.1.0",
|
"lodash.isarguments": "3.1.0",
|
||||||
"nano": "^10.1.0",
|
"nano": "^10.1.0",
|
||||||
"node-fetch": "2.6.7",
|
"node-fetch": "2.6.7",
|
||||||
"passport-google-auth": "1.0.2",
|
|
||||||
"passport-google-oauth": "2.0.0",
|
"passport-google-oauth": "2.0.0",
|
||||||
"passport-jwt": "4.0.0",
|
"passport-jwt": "4.0.0",
|
||||||
"passport-local": "1.0.0",
|
"passport-local": "1.0.0",
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/security/permissions")
|
|
|
@ -1,3 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
...require("./src/plugin"),
|
|
||||||
}
|
|
|
@ -1,5 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
Client: require("./src/redis"),
|
|
||||||
utils: require("./src/redis/utils"),
|
|
||||||
clients: require("./src/redis/init"),
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/security/roles")
|
|
|
@ -1 +0,0 @@
|
||||||
module.exports = require("./src/security/sessions")
|
|
|
@ -1,16 +1,14 @@
|
||||||
const passport = require("koa-passport")
|
const _passport = require("koa-passport")
|
||||||
const LocalStrategy = require("passport-local").Strategy
|
const LocalStrategy = require("passport-local").Strategy
|
||||||
const JwtStrategy = require("passport-jwt").Strategy
|
const JwtStrategy = require("passport-jwt").Strategy
|
||||||
import { getGlobalDB } from "./tenancy"
|
import { getGlobalDB } from "../tenancy"
|
||||||
const refresh = require("passport-oauth2-refresh")
|
const refresh = require("passport-oauth2-refresh")
|
||||||
import { Config } from "./constants"
|
import { Config } from "../constants"
|
||||||
import { getScopedConfig } from "./db/utils"
|
import { getScopedConfig } from "../db"
|
||||||
import {
|
import {
|
||||||
jwt,
|
jwt as jwtPassport,
|
||||||
local,
|
local,
|
||||||
authenticated,
|
authenticated,
|
||||||
google,
|
|
||||||
oidc,
|
|
||||||
auditLog,
|
auditLog,
|
||||||
tenancy,
|
tenancy,
|
||||||
authError,
|
authError,
|
||||||
|
@ -21,22 +19,41 @@ import {
|
||||||
builderOnly,
|
builderOnly,
|
||||||
builderOrAdmin,
|
builderOrAdmin,
|
||||||
joiValidator,
|
joiValidator,
|
||||||
} from "./middleware"
|
oidc,
|
||||||
import { invalidateUser } from "./cache/user"
|
google,
|
||||||
|
} from "../middleware"
|
||||||
|
import { invalidateUser } from "../cache/user"
|
||||||
import { User } from "@budibase/types"
|
import { User } from "@budibase/types"
|
||||||
import { logAlert } from "./logging"
|
import { logAlert } from "../logging"
|
||||||
|
export {
|
||||||
|
auditLog,
|
||||||
|
authError,
|
||||||
|
internalApi,
|
||||||
|
ssoCallbackUrl,
|
||||||
|
adminOnly,
|
||||||
|
builderOnly,
|
||||||
|
builderOrAdmin,
|
||||||
|
joiValidator,
|
||||||
|
google,
|
||||||
|
oidc,
|
||||||
|
} from "../middleware"
|
||||||
|
export const buildAuthMiddleware = authenticated
|
||||||
|
export const buildTenancyMiddleware = tenancy
|
||||||
|
export const buildCsrfMiddleware = csrf
|
||||||
|
export const passport = _passport
|
||||||
|
export const jwt = require("jsonwebtoken")
|
||||||
|
|
||||||
// Strategies
|
// Strategies
|
||||||
passport.use(new LocalStrategy(local.options, local.authenticate))
|
_passport.use(new LocalStrategy(local.options, local.authenticate))
|
||||||
if (jwt.options.secretOrKey) {
|
if (jwtPassport.options.secretOrKey) {
|
||||||
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
|
_passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate))
|
||||||
} else {
|
} else {
|
||||||
logAlert("No JWT Secret supplied, cannot configure JWT strategy")
|
logAlert("No JWT Secret supplied, cannot configure JWT strategy")
|
||||||
}
|
}
|
||||||
|
|
||||||
passport.serializeUser((user: User, done: any) => done(null, user))
|
_passport.serializeUser((user: User, done: any) => done(null, user))
|
||||||
|
|
||||||
passport.deserializeUser(async (user: User, done: any) => {
|
_passport.deserializeUser(async (user: User, done: any) => {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -115,7 +132,7 @@ async function refreshGoogleAccessToken(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async function refreshOAuthToken(
|
export async function refreshOAuthToken(
|
||||||
refreshToken: string,
|
refreshToken: string,
|
||||||
configType: string,
|
configType: string,
|
||||||
configId: string
|
configId: string
|
||||||
|
@ -152,7 +169,7 @@ async function refreshOAuthToken(
|
||||||
return refreshResponse
|
return refreshResponse
|
||||||
}
|
}
|
||||||
|
|
||||||
async function updateUserOAuth(userId: string, oAuthConfig: any) {
|
export async function updateUserOAuth(userId: string, oAuthConfig: any) {
|
||||||
const details = {
|
const details = {
|
||||||
accessToken: oAuthConfig.accessToken,
|
accessToken: oAuthConfig.accessToken,
|
||||||
refreshToken: oAuthConfig.refreshToken,
|
refreshToken: oAuthConfig.refreshToken,
|
||||||
|
@ -179,23 +196,3 @@ async function updateUserOAuth(userId: string, oAuthConfig: any) {
|
||||||
console.error("Could not update OAuth details for current user", e)
|
console.error("Could not update OAuth details for current user", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export = {
|
|
||||||
buildAuthMiddleware: authenticated,
|
|
||||||
passport,
|
|
||||||
google,
|
|
||||||
oidc,
|
|
||||||
jwt: require("jsonwebtoken"),
|
|
||||||
buildTenancyMiddleware: tenancy,
|
|
||||||
auditLog,
|
|
||||||
authError,
|
|
||||||
buildCsrfMiddleware: csrf,
|
|
||||||
internalApi,
|
|
||||||
refreshOAuthToken,
|
|
||||||
updateUserOAuth,
|
|
||||||
ssoCallbackUrl,
|
|
||||||
adminOnly,
|
|
||||||
builderOnly,
|
|
||||||
builderOrAdmin,
|
|
||||||
joiValidator,
|
|
||||||
}
|
|
|
@ -0,0 +1 @@
|
||||||
|
export * from "./auth"
|
|
@ -1,6 +1,6 @@
|
||||||
const redis = require("../redis/init")
|
import { getAppClient } from "../redis/init"
|
||||||
const { doWithDB } = require("../db")
|
import { doWithDB, DocumentType } from "../db"
|
||||||
const { DocumentType } = require("../db/constants")
|
import { Database } from "@budibase/types"
|
||||||
|
|
||||||
const AppState = {
|
const AppState = {
|
||||||
INVALID: "invalid",
|
INVALID: "invalid",
|
||||||
|
@ -10,17 +10,17 @@ const EXPIRY_SECONDS = 3600
|
||||||
/**
|
/**
|
||||||
* The default populate app metadata function
|
* The default populate app metadata function
|
||||||
*/
|
*/
|
||||||
const populateFromDB = async appId => {
|
async function populateFromDB(appId: string) {
|
||||||
return doWithDB(
|
return doWithDB(
|
||||||
appId,
|
appId,
|
||||||
db => {
|
(db: Database) => {
|
||||||
return db.get(DocumentType.APP_METADATA)
|
return db.get(DocumentType.APP_METADATA)
|
||||||
},
|
},
|
||||||
{ skip_setup: true }
|
{ skip_setup: true }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const isInvalid = metadata => {
|
function isInvalid(metadata?: { state: string }) {
|
||||||
return !metadata || metadata.state === AppState.INVALID
|
return !metadata || metadata.state === AppState.INVALID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,15 +31,15 @@ const isInvalid = metadata => {
|
||||||
* @param {string} appId the id of the app to get metadata from.
|
* @param {string} appId the id of the app to get metadata from.
|
||||||
* @returns {object} the app metadata.
|
* @returns {object} the app metadata.
|
||||||
*/
|
*/
|
||||||
exports.getAppMetadata = async appId => {
|
export async function getAppMetadata(appId: string) {
|
||||||
const client = await redis.getAppClient()
|
const client = await getAppClient()
|
||||||
// try cache
|
// try cache
|
||||||
let metadata = await client.get(appId)
|
let metadata = await client.get(appId)
|
||||||
if (!metadata) {
|
if (!metadata) {
|
||||||
let expiry = EXPIRY_SECONDS
|
let expiry: number | undefined = EXPIRY_SECONDS
|
||||||
try {
|
try {
|
||||||
metadata = await populateFromDB(appId)
|
metadata = await populateFromDB(appId)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
// app DB left around, but no metadata, it is invalid
|
// app DB left around, but no metadata, it is invalid
|
||||||
if (err && err.status === 404) {
|
if (err && err.status === 404) {
|
||||||
metadata = { state: AppState.INVALID }
|
metadata = { state: AppState.INVALID }
|
||||||
|
@ -74,11 +74,11 @@ exports.getAppMetadata = async appId => {
|
||||||
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
|
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
|
||||||
* @return {Promise<void>} will respond with success when cache is updated.
|
* @return {Promise<void>} will respond with success when cache is updated.
|
||||||
*/
|
*/
|
||||||
exports.invalidateAppMetadata = async (appId, newMetadata = null) => {
|
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
|
||||||
if (!appId) {
|
if (!appId) {
|
||||||
throw "Cannot invalidate if no app ID provided."
|
throw "Cannot invalidate if no app ID provided."
|
||||||
}
|
}
|
||||||
const client = await redis.getAppClient()
|
const client = await getAppClient()
|
||||||
await client.delete(appId)
|
await client.delete(appId)
|
||||||
if (newMetadata) {
|
if (newMetadata) {
|
||||||
await client.store(appId, newMetadata, EXPIRY_SECONDS)
|
await client.store(appId, newMetadata, EXPIRY_SECONDS)
|
|
@ -1,6 +1,6 @@
|
||||||
import { getTenantId } from "../../context"
|
import { getTenantId } from "../../context"
|
||||||
import redis from "../../redis/init"
|
import * as redis from "../../redis/init"
|
||||||
import RedisWrapper from "../../redis"
|
import { Client } from "../../redis"
|
||||||
|
|
||||||
function generateTenantKey(key: string) {
|
function generateTenantKey(key: string) {
|
||||||
const tenantId = getTenantId()
|
const tenantId = getTenantId()
|
||||||
|
@ -8,9 +8,9 @@ function generateTenantKey(key: string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export = class BaseCache {
|
export = class BaseCache {
|
||||||
client: RedisWrapper | undefined
|
client: Client | undefined
|
||||||
|
|
||||||
constructor(client: RedisWrapper | undefined = undefined) {
|
constructor(client: Client | undefined = undefined) {
|
||||||
this.client = client
|
this.client = client
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,30 +0,0 @@
|
||||||
const BaseCache = require("./base")
|
|
||||||
|
|
||||||
const GENERIC = new BaseCache()
|
|
||||||
|
|
||||||
exports.CacheKeys = {
|
|
||||||
CHECKLIST: "checklist",
|
|
||||||
INSTALLATION: "installation",
|
|
||||||
ANALYTICS_ENABLED: "analyticsEnabled",
|
|
||||||
UNIQUE_TENANT_ID: "uniqueTenantId",
|
|
||||||
EVENTS: "events",
|
|
||||||
BACKFILL_METADATA: "backfillMetadata",
|
|
||||||
EVENTS_RATE_LIMIT: "eventsRateLimit",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.TTL = {
|
|
||||||
ONE_MINUTE: 600,
|
|
||||||
ONE_HOUR: 3600,
|
|
||||||
ONE_DAY: 86400,
|
|
||||||
}
|
|
||||||
|
|
||||||
function performExport(funcName) {
|
|
||||||
return (...args) => GENERIC[funcName](...args)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.keys = performExport("keys")
|
|
||||||
exports.get = performExport("get")
|
|
||||||
exports.store = performExport("store")
|
|
||||||
exports.delete = performExport("delete")
|
|
||||||
exports.withCache = performExport("withCache")
|
|
||||||
exports.bustCache = performExport("bustCache")
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
const BaseCache = require("./base")
|
||||||
|
|
||||||
|
const GENERIC = new BaseCache()
|
||||||
|
|
||||||
|
export enum CacheKey {
|
||||||
|
CHECKLIST = "checklist",
|
||||||
|
INSTALLATION = "installation",
|
||||||
|
ANALYTICS_ENABLED = "analyticsEnabled",
|
||||||
|
UNIQUE_TENANT_ID = "uniqueTenantId",
|
||||||
|
EVENTS = "events",
|
||||||
|
BACKFILL_METADATA = "backfillMetadata",
|
||||||
|
EVENTS_RATE_LIMIT = "eventsRateLimit",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum TTL {
|
||||||
|
ONE_MINUTE = 600,
|
||||||
|
ONE_HOUR = 3600,
|
||||||
|
ONE_DAY = 86400,
|
||||||
|
}
|
||||||
|
|
||||||
|
function performExport(funcName: string) {
|
||||||
|
return (...args: any) => GENERIC[funcName](...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const keys = performExport("keys")
|
||||||
|
export const get = performExport("get")
|
||||||
|
export const store = performExport("store")
|
||||||
|
export const destroy = performExport("delete")
|
||||||
|
export const withCache = performExport("withCache")
|
||||||
|
export const bustCache = performExport("bustCache")
|
|
@ -0,0 +1,5 @@
|
||||||
|
export * as generic from "./generic"
|
||||||
|
export * as user from "./user"
|
||||||
|
export * as app from "./appMetadata"
|
||||||
|
export * as writethrough from "./writethrough"
|
||||||
|
export * from "./generic"
|
|
@ -1,15 +1,16 @@
|
||||||
const redis = require("../redis/init")
|
import * as redis from "../redis/init"
|
||||||
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
|
import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy"
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
const accounts = require("../cloud/accounts")
|
import * as accounts from "../cloud/accounts"
|
||||||
|
import { Database } from "@budibase/types"
|
||||||
|
|
||||||
const EXPIRY_SECONDS = 3600
|
const EXPIRY_SECONDS = 3600
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The default populate user function
|
* The default populate user function
|
||||||
*/
|
*/
|
||||||
const populateFromDB = async (userId, tenantId) => {
|
async function populateFromDB(userId: string, tenantId: string) {
|
||||||
const user = await doWithGlobalDB(tenantId, db => db.get(userId))
|
const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId))
|
||||||
user.budibaseAccess = true
|
user.budibaseAccess = true
|
||||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||||
const account = await accounts.getAccount(user.email)
|
const account = await accounts.getAccount(user.email)
|
||||||
|
@ -31,7 +32,11 @@ const populateFromDB = async (userId, tenantId) => {
|
||||||
* @param {*} populateUser function to provide the user for re-caching. default to couch db
|
* @param {*} populateUser function to provide the user for re-caching. default to couch db
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
export async function getUser(
|
||||||
|
userId: string,
|
||||||
|
tenantId?: string,
|
||||||
|
populateUser?: any
|
||||||
|
) {
|
||||||
if (!populateUser) {
|
if (!populateUser) {
|
||||||
populateUser = populateFromDB
|
populateUser = populateFromDB
|
||||||
}
|
}
|
||||||
|
@ -47,7 +52,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
||||||
let user = await client.get(userId)
|
let user = await client.get(userId)
|
||||||
if (!user) {
|
if (!user) {
|
||||||
user = await populateUser(userId, tenantId)
|
user = await populateUser(userId, tenantId)
|
||||||
client.store(userId, user, EXPIRY_SECONDS)
|
await client.store(userId, user, EXPIRY_SECONDS)
|
||||||
}
|
}
|
||||||
if (user && !user.tenantId && tenantId) {
|
if (user && !user.tenantId && tenantId) {
|
||||||
// make sure the tenant ID is always correct/set
|
// make sure the tenant ID is always correct/set
|
||||||
|
@ -56,7 +61,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
||||||
return user
|
return user
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.invalidateUser = async userId => {
|
export async function invalidateUser(userId: string) {
|
||||||
const client = await redis.getUserClient()
|
const client = await redis.getUserClient()
|
||||||
await client.delete(userId)
|
await client.delete(userId)
|
||||||
}
|
}
|
|
@ -1,42 +0,0 @@
|
||||||
const fetch = require("node-fetch")
|
|
||||||
class API {
|
|
||||||
constructor(host) {
|
|
||||||
this.host = host
|
|
||||||
}
|
|
||||||
|
|
||||||
apiCall =
|
|
||||||
method =>
|
|
||||||
async (url = "", options = {}) => {
|
|
||||||
if (!options.headers) {
|
|
||||||
options.headers = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options.headers["Content-Type"]) {
|
|
||||||
options.headers = {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
Accept: "application/json",
|
|
||||||
...options.headers,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let json = options.headers["Content-Type"] === "application/json"
|
|
||||||
|
|
||||||
const requestOptions = {
|
|
||||||
method: method,
|
|
||||||
body: json ? JSON.stringify(options.body) : options.body,
|
|
||||||
headers: options.headers,
|
|
||||||
// TODO: See if this is necessary
|
|
||||||
credentials: "include",
|
|
||||||
}
|
|
||||||
|
|
||||||
return await fetch(`${this.host}${url}`, requestOptions)
|
|
||||||
}
|
|
||||||
|
|
||||||
post = this.apiCall("POST")
|
|
||||||
get = this.apiCall("GET")
|
|
||||||
patch = this.apiCall("PATCH")
|
|
||||||
del = this.apiCall("DELETE")
|
|
||||||
put = this.apiCall("PUT")
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = API
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
import fetch from "node-fetch"
|
||||||
|
|
||||||
|
export = class API {
|
||||||
|
host: string
|
||||||
|
|
||||||
|
constructor(host: string) {
|
||||||
|
this.host = host
|
||||||
|
}
|
||||||
|
|
||||||
|
async apiCall(method: string, url: string, options?: any) {
|
||||||
|
if (!options.headers) {
|
||||||
|
options.headers = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.headers["Content-Type"]) {
|
||||||
|
options.headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Accept: "application/json",
|
||||||
|
...options.headers,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let json = options.headers["Content-Type"] === "application/json"
|
||||||
|
|
||||||
|
const requestOptions = {
|
||||||
|
method: method,
|
||||||
|
body: json ? JSON.stringify(options.body) : options.body,
|
||||||
|
headers: options.headers,
|
||||||
|
// TODO: See if this is necessary
|
||||||
|
credentials: "include",
|
||||||
|
}
|
||||||
|
|
||||||
|
return await fetch(`${this.host}${url}`, requestOptions)
|
||||||
|
}
|
||||||
|
|
||||||
|
async post(url: string, options?: any) {
|
||||||
|
return this.apiCall("POST", url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(url: string, options?: any) {
|
||||||
|
return this.apiCall("GET", url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async patch(url: string, options?: any) {
|
||||||
|
return this.apiCall("PATCH", url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async del(url: string, options?: any) {
|
||||||
|
return this.apiCall("DELETE", url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async put(url: string, options?: any) {
|
||||||
|
return this.apiCall("PUT", url, options)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,44 +0,0 @@
|
||||||
exports.UserStatus = {
|
|
||||||
ACTIVE: "active",
|
|
||||||
INACTIVE: "inactive",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.Cookie = {
|
|
||||||
CurrentApp: "budibase:currentapp",
|
|
||||||
Auth: "budibase:auth",
|
|
||||||
Init: "budibase:init",
|
|
||||||
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
|
|
||||||
DatasourceAuth: "budibase:datasourceauth",
|
|
||||||
OIDC_CONFIG: "budibase:oidc:config",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.Header = {
|
|
||||||
API_KEY: "x-budibase-api-key",
|
|
||||||
LICENSE_KEY: "x-budibase-license-key",
|
|
||||||
API_VER: "x-budibase-api-version",
|
|
||||||
APP_ID: "x-budibase-app-id",
|
|
||||||
TYPE: "x-budibase-type",
|
|
||||||
PREVIEW_ROLE: "x-budibase-role",
|
|
||||||
TENANT_ID: "x-budibase-tenant-id",
|
|
||||||
TOKEN: "x-budibase-token",
|
|
||||||
CSRF_TOKEN: "x-csrf-token",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.GlobalRoles = {
|
|
||||||
OWNER: "owner",
|
|
||||||
ADMIN: "admin",
|
|
||||||
BUILDER: "builder",
|
|
||||||
WORKSPACE_MANAGER: "workspace_manager",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.Config = {
|
|
||||||
SETTINGS: "settings",
|
|
||||||
ACCOUNT: "account",
|
|
||||||
SMTP: "smtp",
|
|
||||||
GOOGLE: "google",
|
|
||||||
OIDC: "oidc",
|
|
||||||
OIDC_LOGOS: "logos_oidc",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.MAX_VALID_DATE = new Date(2147483647000)
|
|
||||||
exports.DEFAULT_TENANT_ID = "default"
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * from "./db"
|
||||||
|
export * from "./misc"
|
|
@ -1,18 +1,17 @@
|
||||||
import { AsyncLocalStorage } from "async_hooks"
|
import { AsyncLocalStorage } from "async_hooks"
|
||||||
import { ContextMap } from "./constants"
|
|
||||||
|
|
||||||
export default class Context {
|
export default class Context {
|
||||||
static storage = new AsyncLocalStorage<ContextMap>()
|
static storage = new AsyncLocalStorage<Record<string, any>>()
|
||||||
|
|
||||||
static run(context: ContextMap, func: any) {
|
static run(context: Record<string, any>, func: any) {
|
||||||
return Context.storage.run(context, () => func())
|
return Context.storage.run(context, () => func())
|
||||||
}
|
}
|
||||||
|
|
||||||
static get(): ContextMap {
|
static get(): Record<string, any> {
|
||||||
return Context.storage.getStore() as ContextMap
|
return Context.storage.getStore() as Record<string, any>
|
||||||
}
|
}
|
||||||
|
|
||||||
static set(context: ContextMap) {
|
static set(context: Record<string, any>) {
|
||||||
Context.storage.enterWith(context)
|
Context.storage.enterWith(context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
import { IdentityContext } from "@budibase/types"
|
|
||||||
|
|
||||||
export type ContextMap = {
|
|
||||||
tenantId?: string
|
|
||||||
appId?: string
|
|
||||||
identity?: IdentityContext
|
|
||||||
}
|
|
|
@ -2,23 +2,22 @@ import {
|
||||||
IdentityContext,
|
IdentityContext,
|
||||||
IdentityType,
|
IdentityType,
|
||||||
User,
|
User,
|
||||||
UserContext,
|
|
||||||
isCloudAccount,
|
isCloudAccount,
|
||||||
Account,
|
Account,
|
||||||
AccountUserContext,
|
AccountUserContext,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as context from "."
|
import * as context from "."
|
||||||
|
|
||||||
export const getIdentity = (): IdentityContext | undefined => {
|
export function getIdentity(): IdentityContext | undefined {
|
||||||
return context.getIdentity()
|
return context.getIdentity()
|
||||||
}
|
}
|
||||||
|
|
||||||
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
|
export function doInIdentityContext(identity: IdentityContext, task: any) {
|
||||||
return context.doInIdentityContext(identity, task)
|
return context.doInIdentityContext(identity, task)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const doInUserContext = (user: User, task: any) => {
|
export function doInUserContext(user: User, task: any) {
|
||||||
const userContext: UserContext = {
|
const userContext: any = {
|
||||||
...user,
|
...user,
|
||||||
_id: user._id as string,
|
_id: user._id as string,
|
||||||
type: IdentityType.USER,
|
type: IdentityType.USER,
|
||||||
|
@ -26,7 +25,7 @@ export const doInUserContext = (user: User, task: any) => {
|
||||||
return doInIdentityContext(userContext, task)
|
return doInIdentityContext(userContext, task)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const doInAccountContext = (account: Account, task: any) => {
|
export function doInAccountContext(account: Account, task: any) {
|
||||||
const _id = getAccountUserId(account)
|
const _id = getAccountUserId(account)
|
||||||
const tenantId = account.tenantId
|
const tenantId = account.tenantId
|
||||||
const accountContext: AccountUserContext = {
|
const accountContext: AccountUserContext = {
|
||||||
|
@ -38,12 +37,12 @@ export const doInAccountContext = (account: Account, task: any) => {
|
||||||
return doInIdentityContext(accountContext, task)
|
return doInIdentityContext(accountContext, task)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getAccountUserId = (account: Account) => {
|
export function getAccountUserId(account: Account) {
|
||||||
let userId: string
|
let userId: string
|
||||||
if (isCloudAccount(account)) {
|
if (isCloudAccount(account)) {
|
||||||
userId = account.budibaseUserId
|
userId = account.budibaseUserId
|
||||||
} else {
|
} else {
|
||||||
// use account id as user id for self hosting
|
// use account id as user id for self-hosting
|
||||||
userId = account.accountId
|
userId = account.accountId
|
||||||
}
|
}
|
||||||
return userId
|
return userId
|
||||||
|
|
|
@ -1,223 +1,3 @@
|
||||||
import env from "../environment"
|
export { DEFAULT_TENANT_ID } from "../constants"
|
||||||
import {
|
export * as identity from "./identity"
|
||||||
SEPARATOR,
|
export * from "./mainContext"
|
||||||
DocumentType,
|
|
||||||
getDevelopmentAppID,
|
|
||||||
getProdAppID,
|
|
||||||
baseGlobalDBName,
|
|
||||||
getDB,
|
|
||||||
} from "../db"
|
|
||||||
import Context from "./Context"
|
|
||||||
import { IdentityContext, Database } from "@budibase/types"
|
|
||||||
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
|
|
||||||
import { ContextMap } from "./constants"
|
|
||||||
export const DEFAULT_TENANT_ID = _DEFAULT_TENANT_ID
|
|
||||||
|
|
||||||
// some test cases call functions directly, need to
|
|
||||||
// store an app ID to pretend there is a context
|
|
||||||
let TEST_APP_ID: string | null = null
|
|
||||||
|
|
||||||
export function isMultiTenant() {
|
|
||||||
return env.MULTI_TENANCY
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isTenantIdSet() {
|
|
||||||
const context = Context.get()
|
|
||||||
return !!context?.tenantId
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isTenancyEnabled() {
|
|
||||||
return env.MULTI_TENANCY
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Given an app ID this will attempt to retrieve the tenant ID from it.
|
|
||||||
* @return {null|string} The tenant ID found within the app ID.
|
|
||||||
*/
|
|
||||||
export function getTenantIDFromAppID(appId: string) {
|
|
||||||
if (!appId) {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
if (!isMultiTenant()) {
|
|
||||||
return DEFAULT_TENANT_ID
|
|
||||||
}
|
|
||||||
const split = appId.split(SEPARATOR)
|
|
||||||
const hasDev = split[1] === DocumentType.DEV
|
|
||||||
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
if (hasDev) {
|
|
||||||
return split[2]
|
|
||||||
} else {
|
|
||||||
return split[1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateContext(updates: ContextMap) {
|
|
||||||
let context: ContextMap
|
|
||||||
try {
|
|
||||||
context = Context.get()
|
|
||||||
} catch (err) {
|
|
||||||
// no context, start empty
|
|
||||||
context = {}
|
|
||||||
}
|
|
||||||
context = {
|
|
||||||
...context,
|
|
||||||
...updates,
|
|
||||||
}
|
|
||||||
return context
|
|
||||||
}
|
|
||||||
|
|
||||||
async function newContext(updates: ContextMap, task: any) {
|
|
||||||
// see if there already is a context setup
|
|
||||||
let context: ContextMap = updateContext(updates)
|
|
||||||
return Context.run(context, task)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function doInContext(appId: string, task: any): Promise<any> {
|
|
||||||
const tenantId = getTenantIDFromAppID(appId)
|
|
||||||
return newContext(
|
|
||||||
{
|
|
||||||
tenantId,
|
|
||||||
appId,
|
|
||||||
},
|
|
||||||
task
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function doInTenant(
|
|
||||||
tenantId: string | null,
|
|
||||||
task: any
|
|
||||||
): Promise<any> {
|
|
||||||
// make sure default always selected in single tenancy
|
|
||||||
if (!env.MULTI_TENANCY) {
|
|
||||||
tenantId = tenantId || DEFAULT_TENANT_ID
|
|
||||||
}
|
|
||||||
|
|
||||||
const updates = tenantId ? { tenantId } : {}
|
|
||||||
return newContext(updates, task)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function doInAppContext(appId: string, task: any): Promise<any> {
|
|
||||||
if (!appId) {
|
|
||||||
throw new Error("appId is required")
|
|
||||||
}
|
|
||||||
|
|
||||||
const tenantId = getTenantIDFromAppID(appId)
|
|
||||||
const updates: ContextMap = { appId }
|
|
||||||
if (tenantId) {
|
|
||||||
updates.tenantId = tenantId
|
|
||||||
}
|
|
||||||
return newContext(updates, task)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function doInIdentityContext(
|
|
||||||
identity: IdentityContext,
|
|
||||||
task: any
|
|
||||||
): Promise<any> {
|
|
||||||
if (!identity) {
|
|
||||||
throw new Error("identity is required")
|
|
||||||
}
|
|
||||||
|
|
||||||
const context: ContextMap = {
|
|
||||||
identity,
|
|
||||||
}
|
|
||||||
if (identity.tenantId) {
|
|
||||||
context.tenantId = identity.tenantId
|
|
||||||
}
|
|
||||||
return newContext(context, task)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getIdentity(): IdentityContext | undefined {
|
|
||||||
try {
|
|
||||||
const context = Context.get()
|
|
||||||
return context?.identity
|
|
||||||
} catch (e) {
|
|
||||||
// do nothing - identity is not in context
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getTenantId(): string {
|
|
||||||
if (!isMultiTenant()) {
|
|
||||||
return DEFAULT_TENANT_ID
|
|
||||||
}
|
|
||||||
const context = Context.get()
|
|
||||||
const tenantId = context?.tenantId
|
|
||||||
if (!tenantId) {
|
|
||||||
throw new Error("Tenant id not found")
|
|
||||||
}
|
|
||||||
return tenantId
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getAppId(): string | undefined {
|
|
||||||
const context = Context.get()
|
|
||||||
const foundId = context?.appId
|
|
||||||
if (!foundId && env.isTest() && TEST_APP_ID) {
|
|
||||||
return TEST_APP_ID
|
|
||||||
} else {
|
|
||||||
return foundId
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function updateTenantId(tenantId?: string) {
|
|
||||||
let context: ContextMap = updateContext({
|
|
||||||
tenantId,
|
|
||||||
})
|
|
||||||
Context.set(context)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function updateAppId(appId: string) {
|
|
||||||
let context: ContextMap = updateContext({
|
|
||||||
appId,
|
|
||||||
})
|
|
||||||
try {
|
|
||||||
Context.set(context)
|
|
||||||
} catch (err) {
|
|
||||||
if (env.isTest()) {
|
|
||||||
TEST_APP_ID = appId
|
|
||||||
} else {
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getGlobalDB(): Database {
|
|
||||||
const context = Context.get()
|
|
||||||
if (!context || (env.MULTI_TENANCY && !context.tenantId)) {
|
|
||||||
throw new Error("Global DB not found")
|
|
||||||
}
|
|
||||||
return getDB(baseGlobalDBName(context?.tenantId))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the app database based on whatever the request
|
|
||||||
* contained, dev or prod.
|
|
||||||
*/
|
|
||||||
export function getAppDB(opts?: any): Database {
|
|
||||||
const appId = getAppId()
|
|
||||||
return getDB(appId, opts)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This specifically gets the prod app ID, if the request
|
|
||||||
* contained a development app ID, this will get the prod one.
|
|
||||||
*/
|
|
||||||
export function getProdAppDB(opts?: any): Database {
|
|
||||||
const appId = getAppId()
|
|
||||||
if (!appId) {
|
|
||||||
throw new Error("Unable to retrieve prod DB - no app ID.")
|
|
||||||
}
|
|
||||||
return getDB(getProdAppID(appId), opts)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This specifically gets the dev app ID, if the request
|
|
||||||
* contained a prod app ID, this will get the dev one.
|
|
||||||
*/
|
|
||||||
export function getDevAppDB(opts?: any): Database {
|
|
||||||
const appId = getAppId()
|
|
||||||
if (!appId) {
|
|
||||||
throw new Error("Unable to retrieve dev DB - no app ID.")
|
|
||||||
}
|
|
||||||
return getDB(getDevelopmentAppID(appId), opts)
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,245 @@
|
||||||
|
// some test cases call functions directly, need to
|
||||||
|
// store an app ID to pretend there is a context
|
||||||
|
import env from "../environment"
|
||||||
|
import Context from "./Context"
|
||||||
|
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
|
||||||
|
import { getDB } from "../db/db"
|
||||||
|
import {
|
||||||
|
DocumentType,
|
||||||
|
SEPARATOR,
|
||||||
|
StaticDatabases,
|
||||||
|
DEFAULT_TENANT_ID,
|
||||||
|
} from "../constants"
|
||||||
|
import { Database, IdentityContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export type ContextMap = {
|
||||||
|
tenantId?: string
|
||||||
|
appId?: string
|
||||||
|
identity?: IdentityContext
|
||||||
|
}
|
||||||
|
|
||||||
|
let TEST_APP_ID: string | null = null
|
||||||
|
|
||||||
|
export function getGlobalDBName(tenantId?: string) {
|
||||||
|
// tenant ID can be set externally, for example user API where
|
||||||
|
// new tenants are being created, this may be the case
|
||||||
|
if (!tenantId) {
|
||||||
|
tenantId = getTenantId()
|
||||||
|
}
|
||||||
|
return baseGlobalDBName(tenantId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function baseGlobalDBName(tenantId: string | undefined | null) {
|
||||||
|
let dbName
|
||||||
|
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
|
||||||
|
dbName = StaticDatabases.GLOBAL.name
|
||||||
|
} else {
|
||||||
|
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
|
||||||
|
}
|
||||||
|
return dbName
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isMultiTenant() {
|
||||||
|
return env.MULTI_TENANCY
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isTenantIdSet() {
|
||||||
|
const context = Context.get()
|
||||||
|
return !!context?.tenantId
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isTenancyEnabled() {
|
||||||
|
return env.MULTI_TENANCY
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given an app ID this will attempt to retrieve the tenant ID from it.
|
||||||
|
* @return {null|string} The tenant ID found within the app ID.
|
||||||
|
*/
|
||||||
|
export function getTenantIDFromAppID(appId: string) {
|
||||||
|
if (!appId) {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
if (!isMultiTenant()) {
|
||||||
|
return DEFAULT_TENANT_ID
|
||||||
|
}
|
||||||
|
const split = appId.split(SEPARATOR)
|
||||||
|
const hasDev = split[1] === DocumentType.DEV
|
||||||
|
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
if (hasDev) {
|
||||||
|
return split[2]
|
||||||
|
} else {
|
||||||
|
return split[1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateContext(updates: ContextMap) {
|
||||||
|
let context: ContextMap
|
||||||
|
try {
|
||||||
|
context = Context.get()
|
||||||
|
} catch (err) {
|
||||||
|
// no context, start empty
|
||||||
|
context = {}
|
||||||
|
}
|
||||||
|
context = {
|
||||||
|
...context,
|
||||||
|
...updates,
|
||||||
|
}
|
||||||
|
return context
|
||||||
|
}
|
||||||
|
|
||||||
|
async function newContext(updates: ContextMap, task: any) {
|
||||||
|
// see if there already is a context setup
|
||||||
|
let context: ContextMap = updateContext(updates)
|
||||||
|
return Context.run(context, task)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function doInContext(appId: string, task: any): Promise<any> {
|
||||||
|
const tenantId = getTenantIDFromAppID(appId)
|
||||||
|
return newContext(
|
||||||
|
{
|
||||||
|
tenantId,
|
||||||
|
appId,
|
||||||
|
},
|
||||||
|
task
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function doInTenant(
|
||||||
|
tenantId: string | null,
|
||||||
|
task: any
|
||||||
|
): Promise<any> {
|
||||||
|
// make sure default always selected in single tenancy
|
||||||
|
if (!env.MULTI_TENANCY) {
|
||||||
|
tenantId = tenantId || DEFAULT_TENANT_ID
|
||||||
|
}
|
||||||
|
|
||||||
|
const updates = tenantId ? { tenantId } : {}
|
||||||
|
return newContext(updates, task)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function doInAppContext(appId: string, task: any): Promise<any> {
|
||||||
|
if (!appId) {
|
||||||
|
throw new Error("appId is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
const tenantId = getTenantIDFromAppID(appId)
|
||||||
|
const updates: ContextMap = { appId }
|
||||||
|
if (tenantId) {
|
||||||
|
updates.tenantId = tenantId
|
||||||
|
}
|
||||||
|
return newContext(updates, task)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function doInIdentityContext(
|
||||||
|
identity: IdentityContext,
|
||||||
|
task: any
|
||||||
|
): Promise<any> {
|
||||||
|
if (!identity) {
|
||||||
|
throw new Error("identity is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
const context: ContextMap = {
|
||||||
|
identity,
|
||||||
|
}
|
||||||
|
if (identity.tenantId) {
|
||||||
|
context.tenantId = identity.tenantId
|
||||||
|
}
|
||||||
|
return newContext(context, task)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getIdentity(): IdentityContext | undefined {
|
||||||
|
try {
|
||||||
|
const context = Context.get()
|
||||||
|
return context?.identity
|
||||||
|
} catch (e) {
|
||||||
|
// do nothing - identity is not in context
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getTenantId(): string {
|
||||||
|
if (!isMultiTenant()) {
|
||||||
|
return DEFAULT_TENANT_ID
|
||||||
|
}
|
||||||
|
const context = Context.get()
|
||||||
|
const tenantId = context?.tenantId
|
||||||
|
if (!tenantId) {
|
||||||
|
throw new Error("Tenant id not found")
|
||||||
|
}
|
||||||
|
return tenantId
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAppId(): string | undefined {
|
||||||
|
const context = Context.get()
|
||||||
|
const foundId = context?.appId
|
||||||
|
if (!foundId && env.isTest() && TEST_APP_ID) {
|
||||||
|
return TEST_APP_ID
|
||||||
|
} else {
|
||||||
|
return foundId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateTenantId(tenantId?: string) {
|
||||||
|
let context: ContextMap = updateContext({
|
||||||
|
tenantId,
|
||||||
|
})
|
||||||
|
Context.set(context)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateAppId(appId: string) {
|
||||||
|
let context: ContextMap = updateContext({
|
||||||
|
appId,
|
||||||
|
})
|
||||||
|
try {
|
||||||
|
Context.set(context)
|
||||||
|
} catch (err) {
|
||||||
|
if (env.isTest()) {
|
||||||
|
TEST_APP_ID = appId
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getGlobalDB(): Database {
|
||||||
|
const context = Context.get()
|
||||||
|
if (!context || (env.MULTI_TENANCY && !context.tenantId)) {
|
||||||
|
throw new Error("Global DB not found")
|
||||||
|
}
|
||||||
|
return getDB(baseGlobalDBName(context?.tenantId))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the app database based on whatever the request
|
||||||
|
* contained, dev or prod.
|
||||||
|
*/
|
||||||
|
export function getAppDB(opts?: any): Database {
|
||||||
|
const appId = getAppId()
|
||||||
|
return getDB(appId, opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This specifically gets the prod app ID, if the request
|
||||||
|
* contained a development app ID, this will get the prod one.
|
||||||
|
*/
|
||||||
|
export function getProdAppDB(opts?: any): Database {
|
||||||
|
const appId = getAppId()
|
||||||
|
if (!appId) {
|
||||||
|
throw new Error("Unable to retrieve prod DB - no app ID.")
|
||||||
|
}
|
||||||
|
return getDB(getProdAppID(appId), opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This specifically gets the dev app ID, if the request
|
||||||
|
* contained a prod app ID, this will get the dev one.
|
||||||
|
*/
|
||||||
|
export function getDevAppDB(opts?: any): Database {
|
||||||
|
const appId = getAppId()
|
||||||
|
if (!appId) {
|
||||||
|
throw new Error("Unable to retrieve dev DB - no app ID.")
|
||||||
|
}
|
||||||
|
return getDB(getDevelopmentAppID(appId), opts)
|
||||||
|
}
|
|
@ -1,5 +1,5 @@
|
||||||
import { getPouchDB, closePouchDB } from "./couch/pouchDB"
|
import { getPouchDB, closePouchDB } from "./couch"
|
||||||
import { DocumentType } from "./constants"
|
import { DocumentType } from "../constants"
|
||||||
|
|
||||||
class Replication {
|
class Replication {
|
||||||
source: any
|
source: any
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { APP_DEV_PREFIX, APP_PREFIX } from "./constants"
|
import { APP_DEV_PREFIX, APP_PREFIX } from "../constants"
|
||||||
import { App } from "@budibase/types"
|
import { App } from "@budibase/types"
|
||||||
const NO_APP_ERROR = "No app provided"
|
const NO_APP_ERROR = "No app provided"
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,8 @@ export * from "./couch"
|
||||||
export * from "./db"
|
export * from "./db"
|
||||||
export * from "./utils"
|
export * from "./utils"
|
||||||
export * from "./views"
|
export * from "./views"
|
||||||
export * from "./constants"
|
|
||||||
export * from "./conversions"
|
export * from "./conversions"
|
||||||
export * from "./tenancy"
|
export { default as Replication } from "./Replication"
|
||||||
|
// exports to support old export structure
|
||||||
|
export * from "../constants/db"
|
||||||
|
export { getGlobalDBName, baseGlobalDBName } from "../context"
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
import { DEFAULT_TENANT_ID } from "../constants"
|
|
||||||
import { StaticDatabases, SEPARATOR } from "./constants"
|
|
||||||
import { getTenantId } from "../context"
|
|
||||||
|
|
||||||
export const getGlobalDBName = (tenantId?: string) => {
|
|
||||||
// tenant ID can be set externally, for example user API where
|
|
||||||
// new tenants are being created, this may be the case
|
|
||||||
if (!tenantId) {
|
|
||||||
tenantId = getTenantId()
|
|
||||||
}
|
|
||||||
return baseGlobalDBName(tenantId)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const baseGlobalDBName = (tenantId: string | undefined | null) => {
|
|
||||||
let dbName
|
|
||||||
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
|
|
||||||
dbName = StaticDatabases.GLOBAL.name
|
|
||||||
} else {
|
|
||||||
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
|
|
||||||
}
|
|
||||||
return dbName
|
|
||||||
}
|
|
|
@ -1,10 +1,12 @@
|
||||||
require("../../../tests")
|
require("../../../tests")
|
||||||
const {
|
const {
|
||||||
generateAppID,
|
|
||||||
getDevelopmentAppID,
|
getDevelopmentAppID,
|
||||||
getProdAppID,
|
getProdAppID,
|
||||||
isDevAppID,
|
isDevAppID,
|
||||||
isProdAppID,
|
isProdAppID,
|
||||||
|
} = require("../conversions")
|
||||||
|
const {
|
||||||
|
generateAppID,
|
||||||
getPlatformUrl,
|
getPlatformUrl,
|
||||||
getScopedConfig
|
getScopedConfig
|
||||||
} = require("../utils")
|
} = require("../utils")
|
||||||
|
|
|
@ -1,26 +1,20 @@
|
||||||
import { newid } from "../hashing"
|
import { newid } from "../newid"
|
||||||
import { DEFAULT_TENANT_ID, Config } from "../constants"
|
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import {
|
import {
|
||||||
|
DEFAULT_TENANT_ID,
|
||||||
SEPARATOR,
|
SEPARATOR,
|
||||||
DocumentType,
|
DocumentType,
|
||||||
UNICODE_MAX,
|
UNICODE_MAX,
|
||||||
ViewName,
|
ViewName,
|
||||||
InternalTable,
|
InternalTable,
|
||||||
} from "./constants"
|
APP_PREFIX,
|
||||||
import { getTenantId, getGlobalDB } from "../context"
|
} from "../constants"
|
||||||
import { getGlobalDBName } from "./tenancy"
|
import { getTenantId, getGlobalDB, getGlobalDBName } from "../context"
|
||||||
import { doWithDB, allDbs, directCouchAllDbs } from "./db"
|
import { doWithDB, allDbs, directCouchAllDbs } from "./db"
|
||||||
import { getAppMetadata } from "../cache/appMetadata"
|
import { getAppMetadata } from "../cache/appMetadata"
|
||||||
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
|
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
|
||||||
import { APP_PREFIX } from "./constants"
|
|
||||||
import * as events from "../events"
|
import * as events from "../events"
|
||||||
import { App, Database } from "@budibase/types"
|
import { App, Database, ConfigType } from "@budibase/types"
|
||||||
|
|
||||||
export * from "./constants"
|
|
||||||
export * from "./conversions"
|
|
||||||
export { default as Replication } from "./Replication"
|
|
||||||
export * from "./tenancy"
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new app ID.
|
* Generates a new app ID.
|
||||||
|
@ -494,7 +488,7 @@ export const getScopedFullConfig = async function (
|
||||||
)[0]
|
)[0]
|
||||||
|
|
||||||
// custom logic for settings doc
|
// custom logic for settings doc
|
||||||
if (type === Config.SETTINGS) {
|
if (type === ConfigType.SETTINGS) {
|
||||||
if (scopedConfig && scopedConfig.doc) {
|
if (scopedConfig && scopedConfig.doc) {
|
||||||
// overrides affected by environment variables
|
// overrides affected by environment variables
|
||||||
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
|
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
|
||||||
|
@ -533,7 +527,7 @@ export const getPlatformUrl = async (opts = { tenantAware: true }) => {
|
||||||
// get the doc directly instead of with getScopedConfig to prevent loop
|
// get the doc directly instead of with getScopedConfig to prevent loop
|
||||||
let settings
|
let settings
|
||||||
try {
|
try {
|
||||||
settings = await db.get(generateConfigID({ type: Config.SETTINGS }))
|
settings = await db.get(generateConfigID({ type: ConfigType.SETTINGS }))
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
if (e.status !== 404) {
|
if (e.status !== 404) {
|
||||||
throw e
|
throw e
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils"
|
import {
|
||||||
|
DocumentType,
|
||||||
|
ViewName,
|
||||||
|
DeprecatedViews,
|
||||||
|
SEPARATOR,
|
||||||
|
StaticDatabases,
|
||||||
|
} from "../constants"
|
||||||
import { getGlobalDB } from "../context"
|
import { getGlobalDB } from "../context"
|
||||||
import { StaticDatabases } from "./constants"
|
|
||||||
import { doWithDB } from "./"
|
import { doWithDB } from "./"
|
||||||
import { Database, DatabaseQueryOpts } from "@budibase/types"
|
import { Database, DatabaseQueryOpts } from "@budibase/types"
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ const DefaultBucketName = {
|
||||||
PLUGINS: "plugins",
|
PLUGINS: "plugins",
|
||||||
}
|
}
|
||||||
|
|
||||||
const env = {
|
const environment = {
|
||||||
isTest,
|
isTest,
|
||||||
isDev,
|
isDev,
|
||||||
JS_BCRYPT: process.env.JS_BCRYPT,
|
JS_BCRYPT: process.env.JS_BCRYPT,
|
||||||
|
@ -75,17 +75,18 @@ const env = {
|
||||||
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
|
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
|
||||||
_set(key: any, value: any) {
|
_set(key: any, value: any) {
|
||||||
process.env[key] = value
|
process.env[key] = value
|
||||||
module.exports[key] = value
|
// @ts-ignore
|
||||||
|
environment[key] = value
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
// clean up any environment variable edge cases
|
// clean up any environment variable edge cases
|
||||||
for (let [key, value] of Object.entries(env)) {
|
for (let [key, value] of Object.entries(environment)) {
|
||||||
// handle the edge case of "0" to disable an environment variable
|
// handle the edge case of "0" to disable an environment variable
|
||||||
if (value === "0") {
|
if (value === "0") {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
env[key] = 0
|
environment[key] = 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export = env
|
export = environment
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import tenancy from "../tenancy"
|
import * as tenancy from "../tenancy"
|
||||||
import * as dbUtils from "../db/utils"
|
import * as dbUtils from "../db/utils"
|
||||||
import { Config } from "../constants"
|
import { Config } from "../constants"
|
||||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
import { withCache, TTL, CacheKey } from "../cache"
|
||||||
|
|
||||||
export const enabled = async () => {
|
export const enabled = async () => {
|
||||||
// cloud - always use the environment variable
|
// cloud - always use the environment variable
|
||||||
|
@ -13,7 +13,7 @@ export const enabled = async () => {
|
||||||
// self host - prefer the settings doc
|
// self host - prefer the settings doc
|
||||||
// use cache as events have high throughput
|
// use cache as events have high throughput
|
||||||
const enabledInDB = await withCache(
|
const enabledInDB = await withCache(
|
||||||
CacheKeys.ANALYTICS_ENABLED,
|
CacheKey.ANALYTICS_ENABLED,
|
||||||
TTL.ONE_DAY,
|
TTL.ONE_DAY,
|
||||||
async () => {
|
async () => {
|
||||||
const settings = await getSettingsDoc()
|
const settings = await getSettingsDoc()
|
||||||
|
|
|
@ -21,7 +21,7 @@ import {
|
||||||
AppCreatedEvent,
|
AppCreatedEvent,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as context from "../context"
|
import * as context from "../context"
|
||||||
import { CacheKeys } from "../cache/generic"
|
import { CacheKey } from "../cache/generic"
|
||||||
import * as cache from "../cache/generic"
|
import * as cache from "../cache/generic"
|
||||||
|
|
||||||
// LIFECYCLE
|
// LIFECYCLE
|
||||||
|
@ -48,18 +48,18 @@ export const end = async () => {
|
||||||
// CRUD
|
// CRUD
|
||||||
|
|
||||||
const getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {
|
const getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {
|
||||||
return cache.get(CacheKeys.BACKFILL_METADATA)
|
return cache.get(CacheKey.BACKFILL_METADATA)
|
||||||
}
|
}
|
||||||
|
|
||||||
const saveBackfillMetadata = async (
|
const saveBackfillMetadata = async (
|
||||||
backfill: BackfillMetadata
|
backfill: BackfillMetadata
|
||||||
): Promise<void> => {
|
): Promise<void> => {
|
||||||
// no TTL - deleted by backfill
|
// no TTL - deleted by backfill
|
||||||
return cache.store(CacheKeys.BACKFILL_METADATA, backfill)
|
return cache.store(CacheKey.BACKFILL_METADATA, backfill)
|
||||||
}
|
}
|
||||||
|
|
||||||
const deleteBackfillMetadata = async (): Promise<void> => {
|
const deleteBackfillMetadata = async (): Promise<void> => {
|
||||||
await cache.delete(CacheKeys.BACKFILL_METADATA)
|
await cache.destroy(CacheKey.BACKFILL_METADATA)
|
||||||
}
|
}
|
||||||
|
|
||||||
const clearEvents = async () => {
|
const clearEvents = async () => {
|
||||||
|
@ -70,7 +70,7 @@ const clearEvents = async () => {
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
// delete each key
|
// delete each key
|
||||||
// don't use tenancy, already in the key
|
// don't use tenancy, already in the key
|
||||||
await cache.delete(key, { useTenancy: false })
|
await cache.destroy(key, { useTenancy: false })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -167,7 +167,7 @@ const getEventKey = (event?: Event, properties?: any) => {
|
||||||
|
|
||||||
const tenantId = context.getTenantId()
|
const tenantId = context.getTenantId()
|
||||||
if (event) {
|
if (event) {
|
||||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:${event}`
|
eventKey = `${CacheKey.EVENTS}:${tenantId}:${event}`
|
||||||
|
|
||||||
// use some properties to make the key more unique
|
// use some properties to make the key more unique
|
||||||
const custom = CUSTOM_PROPERTY_SUFFIX[event]
|
const custom = CUSTOM_PROPERTY_SUFFIX[event]
|
||||||
|
@ -176,7 +176,7 @@ const getEventKey = (event?: Event, properties?: any) => {
|
||||||
eventKey = `${eventKey}:${suffix}`
|
eventKey = `${eventKey}:${suffix}`
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:*`
|
eventKey = `${CacheKey.EVENTS}:${tenantId}:*`
|
||||||
}
|
}
|
||||||
|
|
||||||
return eventKey
|
return eventKey
|
||||||
|
|
|
@ -20,9 +20,9 @@ import {
|
||||||
import { processors } from "./processors"
|
import { processors } from "./processors"
|
||||||
import * as dbUtils from "../db/utils"
|
import * as dbUtils from "../db/utils"
|
||||||
import { Config } from "../constants"
|
import { Config } from "../constants"
|
||||||
import * as hashing from "../hashing"
|
import { newid } from "../utils"
|
||||||
import * as installation from "../installation"
|
import * as installation from "../installation"
|
||||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
import { withCache, TTL, CacheKey } from "../cache/generic"
|
||||||
|
|
||||||
const pkg = require("../../package.json")
|
const pkg = require("../../package.json")
|
||||||
|
|
||||||
|
@ -270,7 +270,7 @@ const getEventTenantId = async (tenantId: string): Promise<string> => {
|
||||||
const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
||||||
// make sure this tenantId always matches the tenantId in context
|
// make sure this tenantId always matches the tenantId in context
|
||||||
return context.doInTenant(tenantId, () => {
|
return context.doInTenant(tenantId, () => {
|
||||||
return withCache(CacheKeys.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
||||||
const db = context.getGlobalDB()
|
const db = context.getGlobalDB()
|
||||||
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, {
|
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, {
|
||||||
type: Config.SETTINGS,
|
type: Config.SETTINGS,
|
||||||
|
@ -280,7 +280,7 @@ const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
||||||
if (config.config.uniqueTenantId) {
|
if (config.config.uniqueTenantId) {
|
||||||
return config.config.uniqueTenantId
|
return config.config.uniqueTenantId
|
||||||
} else {
|
} else {
|
||||||
uniqueTenantId = `${hashing.newid()}_${tenantId}`
|
uniqueTenantId = `${newid()}_${tenantId}`
|
||||||
config.config.uniqueTenantId = uniqueTenantId
|
config.config.uniqueTenantId = uniqueTenantId
|
||||||
await db.put(config)
|
await db.put(config)
|
||||||
return uniqueTenantId
|
return uniqueTenantId
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { Event } from "@budibase/types"
|
import { Event } from "@budibase/types"
|
||||||
import { CacheKeys, TTL } from "../../../cache/generic"
|
import { CacheKey, TTL } from "../../../cache/generic"
|
||||||
import * as cache from "../../../cache/generic"
|
import * as cache from "../../../cache/generic"
|
||||||
import * as context from "../../../context"
|
import * as context from "../../../context"
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ export const limited = async (event: Event): Promise<boolean> => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const eventKey = (event: RateLimitedEvent) => {
|
const eventKey = (event: RateLimitedEvent) => {
|
||||||
let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}`
|
let key = `${CacheKey.EVENTS_RATE_LIMIT}:${event}`
|
||||||
if (isPerApp(event)) {
|
if (isPerApp(event)) {
|
||||||
key = key + ":" + context.getAppId()
|
key = key + ":" + context.getAppId()
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ import PosthogProcessor from "../PosthogProcessor"
|
||||||
import { Event, IdentityType, Hosting } from "@budibase/types"
|
import { Event, IdentityType, Hosting } from "@budibase/types"
|
||||||
const tk = require("timekeeper")
|
const tk = require("timekeeper")
|
||||||
import * as cache from "../../../../cache/generic"
|
import * as cache from "../../../../cache/generic"
|
||||||
import { CacheKeys } from "../../../../cache/generic"
|
import { CacheKey } from "../../../../cache/generic"
|
||||||
import * as context from "../../../../context"
|
import * as context from "../../../../context"
|
||||||
|
|
||||||
const newIdentity = () => {
|
const newIdentity = () => {
|
||||||
|
@ -19,7 +19,7 @@ describe("PosthogProcessor", () => {
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
jest.clearAllMocks()
|
jest.clearAllMocks()
|
||||||
await cache.bustCache(
|
await cache.bustCache(
|
||||||
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ describe("PosthogProcessor", () => {
|
||||||
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
||||||
|
|
||||||
await cache.bustCache(
|
await cache.bustCache(
|
||||||
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||||
)
|
)
|
||||||
|
|
||||||
tk.freeze(new Date(2022, 0, 1, 14, 0))
|
tk.freeze(new Date(2022, 0, 1, 14, 0))
|
||||||
|
|
|
@ -72,7 +72,7 @@ export async function stepCreated(
|
||||||
automationId: automation._id as string,
|
automationId: automation._id as string,
|
||||||
triggerId: automation.definition?.trigger?.id,
|
triggerId: automation.definition?.trigger?.id,
|
||||||
triggerType: automation.definition?.trigger?.stepId,
|
triggerType: automation.definition?.trigger?.stepId,
|
||||||
stepId: step.id,
|
stepId: step.id!,
|
||||||
stepType: step.stepId,
|
stepType: step.stepId,
|
||||||
}
|
}
|
||||||
await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp)
|
await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp)
|
||||||
|
@ -87,7 +87,7 @@ export async function stepDeleted(
|
||||||
automationId: automation._id as string,
|
automationId: automation._id as string,
|
||||||
triggerId: automation.definition?.trigger?.id,
|
triggerId: automation.definition?.trigger?.id,
|
||||||
triggerType: automation.definition?.trigger?.stepId,
|
triggerType: automation.definition?.trigger?.stepId,
|
||||||
stepId: step.id,
|
stepId: step.id!,
|
||||||
stepType: step.stepId,
|
stepType: step.stepId,
|
||||||
}
|
}
|
||||||
await publishEvent(Event.AUTOMATION_STEP_DELETED, properties)
|
await publishEvent(Event.AUTOMATION_STEP_DELETED, properties)
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
const tenancy = require("../tenancy")
|
import * as tenancy from "../tenancy"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
|
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
|
||||||
* The env var is formatted as:
|
* The env var is formatted as:
|
||||||
* tenant1:feature1:feature2,tenant2:feature1
|
* tenant1:feature1:feature2,tenant2:feature1
|
||||||
*/
|
*/
|
||||||
const getFeatureFlags = () => {
|
function getFeatureFlags() {
|
||||||
if (!env.TENANT_FEATURE_FLAGS) {
|
if (!env.TENANT_FEATURE_FLAGS) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const tenantFeatureFlags = {}
|
const tenantFeatureFlags: Record<string, string[]> = {}
|
||||||
|
|
||||||
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
|
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
|
||||||
const [tenantId, ...features] = tenantToFeatures.split(":")
|
const [tenantId, ...features] = tenantToFeatures.split(":")
|
||||||
|
@ -29,13 +29,13 @@ const getFeatureFlags = () => {
|
||||||
|
|
||||||
const TENANT_FEATURE_FLAGS = getFeatureFlags()
|
const TENANT_FEATURE_FLAGS = getFeatureFlags()
|
||||||
|
|
||||||
exports.isEnabled = featureFlag => {
|
export function isEnabled(featureFlag: string) {
|
||||||
const tenantId = tenancy.getTenantId()
|
const tenantId = tenancy.getTenantId()
|
||||||
const flags = exports.getTenantFeatureFlags(tenantId)
|
const flags = getTenantFeatureFlags(tenantId)
|
||||||
return flags.includes(featureFlag)
|
return flags.includes(featureFlag)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getTenantFeatureFlags = tenantId => {
|
export function getTenantFeatureFlags(tenantId: string) {
|
||||||
const flags = []
|
const flags = []
|
||||||
|
|
||||||
if (TENANT_FEATURE_FLAGS) {
|
if (TENANT_FEATURE_FLAGS) {
|
||||||
|
@ -53,8 +53,8 @@ exports.getTenantFeatureFlags = tenantId => {
|
||||||
return flags
|
return flags
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.TenantFeatureFlag = {
|
export enum TenantFeatureFlag {
|
||||||
LICENSING: "LICENSING",
|
LICENSING = "LICENSING",
|
||||||
GOOGLE_SHEETS: "GOOGLE_SHEETS",
|
GOOGLE_SHEETS = "GOOGLE_SHEETS",
|
||||||
USER_GROUPS: "USER_GROUPS",
|
USER_GROUPS = "USER_GROUPS",
|
||||||
}
|
}
|
|
@ -4,6 +4,6 @@
|
||||||
* @param {string} url The URL to test and remove any extra double slashes.
|
* @param {string} url The URL to test and remove any extra double slashes.
|
||||||
* @return {string} The updated url.
|
* @return {string} The updated url.
|
||||||
*/
|
*/
|
||||||
exports.checkSlashesInUrl = url => {
|
export function checkSlashesInUrl(url: string) {
|
||||||
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
||||||
}
|
}
|
|
@ -8,27 +8,24 @@ import * as permissions from "./security/permissions"
|
||||||
import * as accounts from "./cloud/accounts"
|
import * as accounts from "./cloud/accounts"
|
||||||
import * as installation from "./installation"
|
import * as installation from "./installation"
|
||||||
import env from "./environment"
|
import env from "./environment"
|
||||||
import tenancy from "./tenancy"
|
import * as tenancy from "./tenancy"
|
||||||
import featureFlags from "./featureFlags"
|
import * as featureFlags from "./featureFlags"
|
||||||
import * as sessions from "./security/sessions"
|
import * as sessions from "./security/sessions"
|
||||||
import * as deprovisioning from "./context/deprovision"
|
import * as deprovisioning from "./context/deprovision"
|
||||||
import auth from "./auth"
|
import * as auth from "./auth"
|
||||||
import * as constants from "./constants"
|
import * as constants from "./constants"
|
||||||
import * as dbConstants from "./db/constants"
|
|
||||||
import * as logging from "./logging"
|
import * as logging from "./logging"
|
||||||
import pino from "./pino"
|
import * as pino from "./pino"
|
||||||
import * as middleware from "./middleware"
|
import * as middleware from "./middleware"
|
||||||
import plugins from "./plugin"
|
import * as plugins from "./plugin"
|
||||||
import encryption from "./security/encryption"
|
import * as encryption from "./security/encryption"
|
||||||
import * as queue from "./queue"
|
import * as queue from "./queue"
|
||||||
import * as db from "./db"
|
import * as db from "./db"
|
||||||
|
import * as context from "./context"
|
||||||
// mimic the outer package exports
|
import * as cache from "./cache"
|
||||||
import * as objectStore from "./pkg/objectStore"
|
import * as objectStore from "./objectStore"
|
||||||
import * as utils from "./pkg/utils"
|
import * as redis from "./redis"
|
||||||
import redis from "./pkg/redis"
|
import * as utils from "./utils"
|
||||||
import cache from "./pkg/cache"
|
|
||||||
import context from "./pkg/context"
|
|
||||||
|
|
||||||
const init = (opts: any = {}) => {
|
const init = (opts: any = {}) => {
|
||||||
db.init(opts.db)
|
db.init(opts.db)
|
||||||
|
@ -37,7 +34,7 @@ const init = (opts: any = {}) => {
|
||||||
const core = {
|
const core = {
|
||||||
init,
|
init,
|
||||||
db,
|
db,
|
||||||
...dbConstants,
|
...constants,
|
||||||
redis,
|
redis,
|
||||||
locks: redis.redlock,
|
locks: redis.redlock,
|
||||||
objectStore,
|
objectStore,
|
||||||
|
@ -46,7 +43,6 @@ const core = {
|
||||||
cache,
|
cache,
|
||||||
auth,
|
auth,
|
||||||
constants,
|
constants,
|
||||||
...constants,
|
|
||||||
migrations,
|
migrations,
|
||||||
env,
|
env,
|
||||||
accounts,
|
accounts,
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
import * as hashing from "./hashing"
|
import { newid } from "./utils"
|
||||||
import * as events from "./events"
|
import * as events from "./events"
|
||||||
import { StaticDatabases } from "./db/constants"
|
import { StaticDatabases } from "./db"
|
||||||
import { doWithDB } from "./db"
|
import { doWithDB } from "./db"
|
||||||
import { Installation, IdentityType } from "@budibase/types"
|
import { Installation, IdentityType } from "@budibase/types"
|
||||||
import * as context from "./context"
|
import * as context from "./context"
|
||||||
import semver from "semver"
|
import semver from "semver"
|
||||||
import { bustCache, withCache, TTL, CacheKeys } from "./cache/generic"
|
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
|
||||||
|
|
||||||
const pkg = require("../package.json")
|
const pkg = require("../package.json")
|
||||||
|
|
||||||
export const getInstall = async (): Promise<Installation> => {
|
export const getInstall = async (): Promise<Installation> => {
|
||||||
return withCache(CacheKeys.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
||||||
useTenancy: false,
|
useTenancy: false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ const getInstallFromDB = async (): Promise<Installation> => {
|
||||||
if (e.status === 404) {
|
if (e.status === 404) {
|
||||||
install = {
|
install = {
|
||||||
_id: StaticDatabases.PLATFORM_INFO.docs.install,
|
_id: StaticDatabases.PLATFORM_INFO.docs.install,
|
||||||
installId: hashing.newid(),
|
installId: newid(),
|
||||||
version: pkg.version,
|
version: pkg.version,
|
||||||
}
|
}
|
||||||
const resp = await platformDb.put(install)
|
const resp = await platformDb.put(install)
|
||||||
|
@ -50,7 +50,7 @@ const updateVersion = async (version: string): Promise<boolean> => {
|
||||||
const install = await getInstall()
|
const install = await getInstall()
|
||||||
install.version = version
|
install.version = version
|
||||||
await platformDb.put(install)
|
await platformDb.put(install)
|
||||||
await bustCache(CacheKeys.INSTALLATION)
|
await bustCache(CacheKey.INSTALLATION)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
module.exports = async (ctx, next) => {
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export = async (ctx: BBContext, next: any) => {
|
||||||
if (
|
if (
|
||||||
!ctx.internal &&
|
!ctx.internal &&
|
||||||
(!ctx.user || !ctx.user.admin || !ctx.user.admin.global)
|
(!ctx.user || !ctx.user.admin || !ctx.user.admin.global)
|
|
@ -1,4 +0,0 @@
|
||||||
module.exports = async (ctx, next) => {
|
|
||||||
// Placeholder for audit log middleware
|
|
||||||
return next()
|
|
||||||
}
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export = async (ctx: BBContext | any, next: any) => {
|
||||||
|
// Placeholder for audit log middleware
|
||||||
|
return next()
|
||||||
|
}
|
|
@ -6,10 +6,13 @@ import { buildMatcherRegex, matches } from "./matchers"
|
||||||
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
|
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
|
||||||
import { getGlobalDB, doInTenant } from "../tenancy"
|
import { getGlobalDB, doInTenant } from "../tenancy"
|
||||||
import { decrypt } from "../security/encryption"
|
import { decrypt } from "../security/encryption"
|
||||||
const identity = require("../context/identity")
|
import * as identity from "../context/identity"
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
|
import { BBContext, EndpointMatcher } from "@budibase/types"
|
||||||
|
|
||||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000
|
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
|
||||||
|
? parseInt(env.SESSION_UPDATE_PERIOD)
|
||||||
|
: 60 * 1000
|
||||||
|
|
||||||
interface FinaliseOpts {
|
interface FinaliseOpts {
|
||||||
authenticated?: boolean
|
authenticated?: boolean
|
||||||
|
@ -40,13 +43,13 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
|
||||||
return doInTenant(tenantId, async () => {
|
return doInTenant(tenantId, async () => {
|
||||||
const db = getGlobalDB()
|
const db = getGlobalDB()
|
||||||
// api key is encrypted in the database
|
// api key is encrypted in the database
|
||||||
const userId = await queryGlobalView(
|
const userId = (await queryGlobalView(
|
||||||
ViewName.BY_API_KEY,
|
ViewName.BY_API_KEY,
|
||||||
{
|
{
|
||||||
key: apiKey,
|
key: apiKey,
|
||||||
},
|
},
|
||||||
db
|
db
|
||||||
)
|
)) as string
|
||||||
if (userId) {
|
if (userId) {
|
||||||
return {
|
return {
|
||||||
valid: true,
|
valid: true,
|
||||||
|
@ -63,14 +66,14 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
|
||||||
* The tenancy modules should not be used here and it should be assumed that the tenancy context
|
* The tenancy modules should not be used here and it should be assumed that the tenancy context
|
||||||
* has not yet been populated.
|
* has not yet been populated.
|
||||||
*/
|
*/
|
||||||
export = (
|
export = function (
|
||||||
noAuthPatterns = [],
|
noAuthPatterns: EndpointMatcher[] = [],
|
||||||
opts: { publicAllowed: boolean; populateUser?: Function } = {
|
opts: { publicAllowed?: boolean; populateUser?: Function } = {
|
||||||
publicAllowed: false,
|
publicAllowed: false,
|
||||||
}
|
}
|
||||||
) => {
|
) {
|
||||||
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
|
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
|
||||||
return async (ctx: any, next: any) => {
|
return async (ctx: BBContext | any, next: any) => {
|
||||||
let publicEndpoint = false
|
let publicEndpoint = false
|
||||||
const version = ctx.request.headers[Header.API_VER]
|
const version = ctx.request.headers[Header.API_VER]
|
||||||
// the path is not authenticated
|
// the path is not authenticated
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
module.exports = async (ctx, next) => {
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export = async (ctx: BBContext, next: any) => {
|
||||||
if (
|
if (
|
||||||
!ctx.internal &&
|
!ctx.internal &&
|
||||||
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global)
|
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global)
|
|
@ -1,4 +1,6 @@
|
||||||
module.exports = async (ctx, next) => {
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export = async (ctx: BBContext, next: any) => {
|
||||||
if (
|
if (
|
||||||
!ctx.internal &&
|
!ctx.internal &&
|
||||||
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global) &&
|
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global) &&
|
|
@ -1,5 +1,6 @@
|
||||||
const { Header } = require("../constants")
|
import { Header } from "../constants"
|
||||||
const { buildMatcherRegex, matches } = require("./matchers")
|
import { buildMatcherRegex, matches } from "./matchers"
|
||||||
|
import { BBContext, EndpointMatcher } from "@budibase/types"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GET, HEAD and OPTIONS methods are considered safe operations
|
* GET, HEAD and OPTIONS methods are considered safe operations
|
||||||
|
@ -31,9 +32,11 @@ const INCLUDED_CONTENT_TYPES = [
|
||||||
* https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern
|
* https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#synchronizer-token-pattern
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
module.exports = (opts = { noCsrfPatterns: [] }) => {
|
export = function (
|
||||||
|
opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] }
|
||||||
|
) {
|
||||||
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
|
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
|
||||||
return async (ctx, next) => {
|
return async (ctx: BBContext | any, next: any) => {
|
||||||
// don't apply for excluded paths
|
// don't apply for excluded paths
|
||||||
const found = matches(ctx, noCsrfOptions)
|
const found = matches(ctx, noCsrfOptions)
|
||||||
if (found) {
|
if (found) {
|
||||||
|
@ -62,7 +65,7 @@ module.exports = (opts = { noCsrfPatterns: [] }) => {
|
||||||
|
|
||||||
// apply csrf when there is a token in the session (new logins)
|
// apply csrf when there is a token in the session (new logins)
|
||||||
// in future there should be a hard requirement that the token is present
|
// in future there should be a hard requirement that the token is present
|
||||||
const userToken = ctx.user.csrfToken
|
const userToken = ctx.user?.csrfToken
|
||||||
if (!userToken) {
|
if (!userToken) {
|
||||||
return next()
|
return next()
|
||||||
}
|
}
|
|
@ -1,18 +1,18 @@
|
||||||
const jwt = require("./passport/jwt")
|
import * as jwt from "./passport/jwt"
|
||||||
const local = require("./passport/local")
|
import * as local from "./passport/local"
|
||||||
const google = require("./passport/google")
|
import * as google from "./passport/google"
|
||||||
const oidc = require("./passport/oidc")
|
import * as oidc from "./passport/oidc"
|
||||||
const { authError, ssoCallbackUrl } = require("./passport/utils")
|
import { authError, ssoCallbackUrl } from "./passport/utils"
|
||||||
const authenticated = require("./authenticated")
|
import authenticated from "./authenticated"
|
||||||
const auditLog = require("./auditLog")
|
import auditLog from "./auditLog"
|
||||||
const tenancy = require("./tenancy")
|
import tenancy from "./tenancy"
|
||||||
const internalApi = require("./internalApi")
|
import internalApi from "./internalApi"
|
||||||
const datasourceGoogle = require("./passport/datasource/google")
|
import * as datasourceGoogle from "./passport/datasource/google"
|
||||||
const csrf = require("./csrf")
|
import csrf from "./csrf"
|
||||||
const adminOnly = require("./adminOnly")
|
import adminOnly from "./adminOnly"
|
||||||
const builderOrAdmin = require("./builderOrAdmin")
|
import builderOrAdmin from "./builderOrAdmin"
|
||||||
const builderOnly = require("./builderOnly")
|
import builderOnly from "./builderOnly"
|
||||||
const joiValidator = require("./joi-validator")
|
import * as joiValidator from "./joi-validator"
|
||||||
|
|
||||||
const pkg = {
|
const pkg = {
|
||||||
google,
|
google,
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
const { Header } = require("../constants")
|
import { Header } from "../constants"
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* API Key only endpoint.
|
* API Key only endpoint.
|
||||||
*/
|
*/
|
||||||
module.exports = async (ctx, next) => {
|
export = async (ctx: BBContext, next: any) => {
|
||||||
const apiKey = ctx.request.headers[Header.API_KEY]
|
const apiKey = ctx.request.headers[Header.API_KEY]
|
||||||
if (apiKey !== env.INTERNAL_API_KEY) {
|
if (apiKey !== env.INTERNAL_API_KEY) {
|
||||||
ctx.throw(403, "Unauthorized")
|
ctx.throw(403, "Unauthorized")
|
|
@ -1,21 +1,27 @@
|
||||||
const Joi = require("joi")
|
import Joi, { ObjectSchema } from "joi"
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
function validate(schema, property) {
|
function validate(
|
||||||
|
schema: Joi.ObjectSchema | Joi.ArraySchema,
|
||||||
|
property: string
|
||||||
|
) {
|
||||||
// Return a Koa middleware function
|
// Return a Koa middleware function
|
||||||
return (ctx, next) => {
|
return (ctx: BBContext, next: any) => {
|
||||||
if (!schema) {
|
if (!schema) {
|
||||||
return next()
|
return next()
|
||||||
}
|
}
|
||||||
let params = null
|
let params = null
|
||||||
|
// @ts-ignore
|
||||||
|
let reqProp = ctx.request?.[property]
|
||||||
if (ctx[property] != null) {
|
if (ctx[property] != null) {
|
||||||
params = ctx[property]
|
params = ctx[property]
|
||||||
} else if (ctx.request[property] != null) {
|
} else if (reqProp != null) {
|
||||||
params = ctx.request[property]
|
params = reqProp
|
||||||
}
|
}
|
||||||
|
|
||||||
// not all schemas have the append property e.g. array schemas
|
// not all schemas have the append property e.g. array schemas
|
||||||
if (schema.append) {
|
if ((schema as Joi.ObjectSchema).append) {
|
||||||
schema = schema.append({
|
schema = (schema as Joi.ObjectSchema).append({
|
||||||
createdAt: Joi.any().optional(),
|
createdAt: Joi.any().optional(),
|
||||||
updatedAt: Joi.any().optional(),
|
updatedAt: Joi.any().optional(),
|
||||||
})
|
})
|
||||||
|
@ -30,10 +36,10 @@ function validate(schema, property) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.body = schema => {
|
export function body(schema: Joi.ObjectSchema | Joi.ArraySchema) {
|
||||||
return validate(schema, "body")
|
return validate(schema, "body")
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.params = schema => {
|
export function params(schema: Joi.ObjectSchema | Joi.ArraySchema) {
|
||||||
return validate(schema, "params")
|
return validate(schema, "params")
|
||||||
}
|
}
|
|
@ -1,11 +1,15 @@
|
||||||
const google = require("../google")
|
import * as google from "../google"
|
||||||
|
import { Cookie, Config } from "../../../constants"
|
||||||
|
import { clearCookie, getCookie } from "../../../utils"
|
||||||
|
import { getScopedConfig, getPlatformUrl, doWithDB } from "../../../db"
|
||||||
|
import environment from "../../../environment"
|
||||||
|
import { getGlobalDB } from "../../../tenancy"
|
||||||
|
import { BBContext, Database, SSOProfile } from "@budibase/types"
|
||||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||||
const { Cookie, Config } = require("../../../constants")
|
|
||||||
const { clearCookie, getCookie } = require("../../../utils")
|
type Passport = {
|
||||||
const { getScopedConfig, getPlatformUrl } = require("../../../db/utils")
|
authenticate: any
|
||||||
const { doWithDB } = require("../../../db")
|
}
|
||||||
const environment = require("../../../environment")
|
|
||||||
const { getGlobalDB } = require("../../../tenancy")
|
|
||||||
|
|
||||||
async function fetchGoogleCreds() {
|
async function fetchGoogleCreds() {
|
||||||
// try and get the config from the tenant
|
// try and get the config from the tenant
|
||||||
|
@ -22,7 +26,11 @@ async function fetchGoogleCreds() {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function preAuth(passport, ctx, next) {
|
export async function preAuth(
|
||||||
|
passport: Passport,
|
||||||
|
ctx: BBContext,
|
||||||
|
next: Function
|
||||||
|
) {
|
||||||
// get the relevant config
|
// get the relevant config
|
||||||
const googleConfig = await fetchGoogleCreds()
|
const googleConfig = await fetchGoogleCreds()
|
||||||
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
||||||
|
@ -41,7 +49,11 @@ async function preAuth(passport, ctx, next) {
|
||||||
})(ctx, next)
|
})(ctx, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function postAuth(passport, ctx, next) {
|
export async function postAuth(
|
||||||
|
passport: Passport,
|
||||||
|
ctx: BBContext,
|
||||||
|
next: Function
|
||||||
|
) {
|
||||||
// get the relevant config
|
// get the relevant config
|
||||||
const config = await fetchGoogleCreds()
|
const config = await fetchGoogleCreds()
|
||||||
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
||||||
|
@ -56,15 +68,20 @@ async function postAuth(passport, ctx, next) {
|
||||||
clientSecret: config.clientSecret,
|
clientSecret: config.clientSecret,
|
||||||
callbackURL: callbackUrl,
|
callbackURL: callbackUrl,
|
||||||
},
|
},
|
||||||
(accessToken, refreshToken, profile, done) => {
|
(
|
||||||
|
accessToken: string,
|
||||||
|
refreshToken: string,
|
||||||
|
profile: SSOProfile,
|
||||||
|
done: Function
|
||||||
|
) => {
|
||||||
clearCookie(ctx, Cookie.DatasourceAuth)
|
clearCookie(ctx, Cookie.DatasourceAuth)
|
||||||
done(null, { accessToken, refreshToken })
|
done(null, { accessToken, refreshToken })
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
{ successRedirect: "/", failureRedirect: "/error" },
|
{ successRedirect: "/", failureRedirect: "/error" },
|
||||||
async (err, tokens) => {
|
async (err: any, tokens: string[]) => {
|
||||||
// update the DB for the datasource with all the user info
|
// update the DB for the datasource with all the user info
|
||||||
await doWithDB(authStateCookie.appId, async db => {
|
await doWithDB(authStateCookie.appId, async (db: Database) => {
|
||||||
const datasource = await db.get(authStateCookie.datasourceId)
|
const datasource = await db.get(authStateCookie.datasourceId)
|
||||||
if (!datasource.config) {
|
if (!datasource.config) {
|
||||||
datasource.config = {}
|
datasource.config = {}
|
||||||
|
@ -78,6 +95,3 @@ async function postAuth(passport, ctx, next) {
|
||||||
}
|
}
|
||||||
)(ctx, next)
|
)(ctx, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.preAuth = preAuth
|
|
||||||
exports.postAuth = postAuth
|
|
|
@ -1,10 +1,15 @@
|
||||||
|
import { ssoCallbackUrl } from "./utils"
|
||||||
|
import { authenticateThirdParty } from "./third-party-common"
|
||||||
|
import { ConfigType, GoogleConfig, Database, SSOProfile } from "@budibase/types"
|
||||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||||
const { ssoCallbackUrl } = require("./utils")
|
|
||||||
const { authenticateThirdParty } = require("./third-party-common")
|
|
||||||
const { Config } = require("../../../constants")
|
|
||||||
|
|
||||||
const buildVerifyFn = saveUserFn => {
|
export function buildVerifyFn(saveUserFn?: Function) {
|
||||||
return (accessToken, refreshToken, profile, done) => {
|
return (
|
||||||
|
accessToken: string,
|
||||||
|
refreshToken: string,
|
||||||
|
profile: SSOProfile,
|
||||||
|
done: Function
|
||||||
|
) => {
|
||||||
const thirdPartyUser = {
|
const thirdPartyUser = {
|
||||||
provider: profile.provider, // should always be 'google'
|
provider: profile.provider, // should always be 'google'
|
||||||
providerType: "google",
|
providerType: "google",
|
||||||
|
@ -31,7 +36,11 @@ const buildVerifyFn = saveUserFn => {
|
||||||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||||
* @returns Dynamically configured Passport Google Strategy
|
* @returns Dynamically configured Passport Google Strategy
|
||||||
*/
|
*/
|
||||||
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
export async function strategyFactory(
|
||||||
|
config: GoogleConfig["config"],
|
||||||
|
callbackUrl: string,
|
||||||
|
saveUserFn?: Function
|
||||||
|
) {
|
||||||
try {
|
try {
|
||||||
const { clientID, clientSecret } = config
|
const { clientID, clientSecret } = config
|
||||||
|
|
||||||
|
@ -50,18 +59,15 @@ exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
|
||||||
},
|
},
|
||||||
verify
|
verify
|
||||||
)
|
)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
throw new Error(
|
throw new Error(`Error constructing google authentication strategy: ${err}`)
|
||||||
`Error constructing google authentication strategy: ${err}`,
|
|
||||||
err
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getCallbackUrl = async function (db, config) {
|
export async function getCallbackUrl(
|
||||||
return ssoCallbackUrl(db, config, Config.GOOGLE)
|
db: Database,
|
||||||
|
config: { callbackURL?: string }
|
||||||
|
) {
|
||||||
|
return ssoCallbackUrl(db, config, ConfigType.GOOGLE)
|
||||||
}
|
}
|
||||||
|
|
||||||
// expose for testing
|
|
||||||
exports.buildVerifyFn = buildVerifyFn
|
|
|
@ -1,18 +0,0 @@
|
||||||
const { Cookie } = require("../../constants")
|
|
||||||
const env = require("../../environment")
|
|
||||||
const { authError } = require("./utils")
|
|
||||||
|
|
||||||
exports.options = {
|
|
||||||
secretOrKey: env.JWT_SECRET,
|
|
||||||
jwtFromRequest: function (ctx) {
|
|
||||||
return ctx.cookies.get(Cookie.Auth)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.authenticate = async function (jwt, done) {
|
|
||||||
try {
|
|
||||||
return done(null, jwt)
|
|
||||||
} catch (err) {
|
|
||||||
return authError(done, "JWT invalid", err)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
import { Cookie } from "../../constants"
|
||||||
|
import env from "../../environment"
|
||||||
|
import { authError } from "./utils"
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
|
|
||||||
|
export const options = {
|
||||||
|
secretOrKey: env.JWT_SECRET,
|
||||||
|
jwtFromRequest: function (ctx: BBContext) {
|
||||||
|
return ctx.cookies.get(Cookie.Auth)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function authenticate(jwt: Function, done: Function) {
|
||||||
|
try {
|
||||||
|
return done(null, jwt)
|
||||||
|
} catch (err) {
|
||||||
|
return authError(done, "JWT invalid", err)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,18 +1,18 @@
|
||||||
|
import { UserStatus } from "../../constants"
|
||||||
|
import { compare, newid } from "../../utils"
|
||||||
|
import env from "../../environment"
|
||||||
|
import * as users from "../../users"
|
||||||
|
import { authError } from "./utils"
|
||||||
|
import { createASession } from "../../security/sessions"
|
||||||
|
import { getTenantId } from "../../tenancy"
|
||||||
|
import { BBContext } from "@budibase/types"
|
||||||
const jwt = require("jsonwebtoken")
|
const jwt = require("jsonwebtoken")
|
||||||
const { UserStatus } = require("../../constants")
|
|
||||||
const { compare } = require("../../hashing")
|
|
||||||
const env = require("../../environment")
|
|
||||||
const users = require("../../users")
|
|
||||||
const { authError } = require("./utils")
|
|
||||||
const { newid } = require("../../hashing")
|
|
||||||
const { createASession } = require("../../security/sessions")
|
|
||||||
const { getTenantId } = require("../../tenancy")
|
|
||||||
|
|
||||||
const INVALID_ERR = "Invalid credentials"
|
const INVALID_ERR = "Invalid credentials"
|
||||||
const SSO_NO_PASSWORD = "SSO user does not have a password set"
|
const SSO_NO_PASSWORD = "SSO user does not have a password set"
|
||||||
const EXPIRED = "This account has expired. Please reset your password"
|
const EXPIRED = "This account has expired. Please reset your password"
|
||||||
|
|
||||||
exports.options = {
|
export const options = {
|
||||||
passReqToCallback: true,
|
passReqToCallback: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,7 +24,12 @@ exports.options = {
|
||||||
* @param {*} done callback from passport to return user information and errors
|
* @param {*} done callback from passport to return user information and errors
|
||||||
* @returns The authenticated user, or errors if they occur
|
* @returns The authenticated user, or errors if they occur
|
||||||
*/
|
*/
|
||||||
exports.authenticate = async function (ctx, email, password, done) {
|
export async function authenticate(
|
||||||
|
ctx: BBContext,
|
||||||
|
email: string,
|
||||||
|
password: string,
|
||||||
|
done: Function
|
||||||
|
) {
|
||||||
if (!email) return authError(done, "Email Required")
|
if (!email) return authError(done, "Email Required")
|
||||||
if (!password) return authError(done, "Password Required")
|
if (!password) return authError(done, "Password Required")
|
||||||
|
|
||||||
|
@ -56,9 +61,9 @@ exports.authenticate = async function (ctx, email, password, done) {
|
||||||
const sessionId = newid()
|
const sessionId = newid()
|
||||||
const tenantId = getTenantId()
|
const tenantId = getTenantId()
|
||||||
|
|
||||||
await createASession(dbUser._id, { sessionId, tenantId })
|
await createASession(dbUser._id!, { sessionId, tenantId })
|
||||||
|
|
||||||
dbUser.token = jwt.sign(
|
const token = jwt.sign(
|
||||||
{
|
{
|
||||||
userId: dbUser._id,
|
userId: dbUser._id,
|
||||||
sessionId,
|
sessionId,
|
||||||
|
@ -69,7 +74,10 @@ exports.authenticate = async function (ctx, email, password, done) {
|
||||||
// Remove users password in payload
|
// Remove users password in payload
|
||||||
delete dbUser.password
|
delete dbUser.password
|
||||||
|
|
||||||
return done(null, dbUser)
|
return done(null, {
|
||||||
|
...dbUser,
|
||||||
|
token,
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
return authError(done, INVALID_ERR)
|
return authError(done, INVALID_ERR)
|
||||||
}
|
}
|
|
@ -1,10 +1,23 @@
|
||||||
const fetch = require("node-fetch")
|
import fetch from "node-fetch"
|
||||||
|
import { authenticateThirdParty } from "./third-party-common"
|
||||||
|
import { ssoCallbackUrl } from "./utils"
|
||||||
|
import {
|
||||||
|
Config,
|
||||||
|
ConfigType,
|
||||||
|
OIDCInnerCfg,
|
||||||
|
Database,
|
||||||
|
SSOProfile,
|
||||||
|
ThirdPartyUser,
|
||||||
|
OIDCConfiguration,
|
||||||
|
} from "@budibase/types"
|
||||||
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||||
const { authenticateThirdParty } = require("./third-party-common")
|
|
||||||
const { ssoCallbackUrl } = require("./utils")
|
|
||||||
const { Config } = require("../../../constants")
|
|
||||||
|
|
||||||
const buildVerifyFn = saveUserFn => {
|
type JwtClaims = {
|
||||||
|
preferred_username: string
|
||||||
|
email: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildVerifyFn(saveUserFn?: Function) {
|
||||||
/**
|
/**
|
||||||
* @param {*} issuer The identity provider base URL
|
* @param {*} issuer The identity provider base URL
|
||||||
* @param {*} sub The user ID
|
* @param {*} sub The user ID
|
||||||
|
@ -17,17 +30,17 @@ const buildVerifyFn = saveUserFn => {
|
||||||
* @param {*} done The passport callback: err, user, info
|
* @param {*} done The passport callback: err, user, info
|
||||||
*/
|
*/
|
||||||
return async (
|
return async (
|
||||||
issuer,
|
issuer: string,
|
||||||
sub,
|
sub: string,
|
||||||
profile,
|
profile: SSOProfile,
|
||||||
jwtClaims,
|
jwtClaims: JwtClaims,
|
||||||
accessToken,
|
accessToken: string,
|
||||||
refreshToken,
|
refreshToken: string,
|
||||||
idToken,
|
idToken: string,
|
||||||
params,
|
params: any,
|
||||||
done
|
done: Function
|
||||||
) => {
|
) => {
|
||||||
const thirdPartyUser = {
|
const thirdPartyUser: ThirdPartyUser = {
|
||||||
// store the issuer info to enable sync in future
|
// store the issuer info to enable sync in future
|
||||||
provider: issuer,
|
provider: issuer,
|
||||||
providerType: "oidc",
|
providerType: "oidc",
|
||||||
|
@ -53,7 +66,7 @@ const buildVerifyFn = saveUserFn => {
|
||||||
* @param {*} profile The structured profile created by passport using the user info endpoint
|
* @param {*} profile The structured profile created by passport using the user info endpoint
|
||||||
* @param {*} jwtClaims The claims returned in the id token
|
* @param {*} jwtClaims The claims returned in the id token
|
||||||
*/
|
*/
|
||||||
function getEmail(profile, jwtClaims) {
|
function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {
|
||||||
// profile not guaranteed to contain email e.g. github connected azure ad account
|
// profile not guaranteed to contain email e.g. github connected azure ad account
|
||||||
if (profile._json.email) {
|
if (profile._json.email) {
|
||||||
return profile._json.email
|
return profile._json.email
|
||||||
|
@ -77,7 +90,7 @@ function getEmail(profile, jwtClaims) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
function validEmail(value) {
|
function validEmail(value: string) {
|
||||||
return (
|
return (
|
||||||
value &&
|
value &&
|
||||||
!!value.match(
|
!!value.match(
|
||||||
|
@ -91,19 +104,25 @@ function validEmail(value) {
|
||||||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||||
* @returns Dynamically configured Passport OIDC Strategy
|
* @returns Dynamically configured Passport OIDC Strategy
|
||||||
*/
|
*/
|
||||||
exports.strategyFactory = async function (config, saveUserFn) {
|
export async function strategyFactory(
|
||||||
|
config: OIDCConfiguration,
|
||||||
|
saveUserFn?: Function
|
||||||
|
) {
|
||||||
try {
|
try {
|
||||||
const verify = buildVerifyFn(saveUserFn)
|
const verify = buildVerifyFn(saveUserFn)
|
||||||
const strategy = new OIDCStrategy(config, verify)
|
const strategy = new OIDCStrategy(config, verify)
|
||||||
strategy.name = "oidc"
|
strategy.name = "oidc"
|
||||||
return strategy
|
return strategy
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
throw new Error("Error constructing OIDC authentication strategy", err)
|
throw new Error(`Error constructing OIDC authentication strategy - ${err}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) {
|
export async function fetchStrategyConfig(
|
||||||
|
enrichedConfig: OIDCInnerCfg,
|
||||||
|
callbackUrl?: string
|
||||||
|
): Promise<OIDCConfiguration> {
|
||||||
try {
|
try {
|
||||||
const { clientID, clientSecret, configUrl } = enrichedConfig
|
const { clientID, clientSecret, configUrl } = enrichedConfig
|
||||||
|
|
||||||
|
@ -135,13 +154,15 @@ exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) {
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
throw new Error("Error constructing OIDC authentication configuration", err)
|
throw new Error(
|
||||||
|
`Error constructing OIDC authentication configuration - ${err}`
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getCallbackUrl = async function (db, config) {
|
export async function getCallbackUrl(
|
||||||
return ssoCallbackUrl(db, config, Config.OIDC)
|
db: Database,
|
||||||
|
config: { callbackURL?: string }
|
||||||
|
) {
|
||||||
|
return ssoCallbackUrl(db, config, ConfigType.OIDC)
|
||||||
}
|
}
|
||||||
|
|
||||||
// expose for testing
|
|
||||||
exports.buildVerifyFn = buildVerifyFn
|
|
|
@ -4,7 +4,7 @@ const { data } = require("./utilities/mock-data")
|
||||||
const { DEFAULT_TENANT_ID } = require("../../../constants")
|
const { DEFAULT_TENANT_ID } = require("../../../constants")
|
||||||
|
|
||||||
const { generateGlobalUserID } = require("../../../db/utils")
|
const { generateGlobalUserID } = require("../../../db/utils")
|
||||||
const { newid } = require("../../../hashing")
|
const { newid } = require("../../../utils")
|
||||||
const { doWithGlobalDB, doInTenant } = require("../../../tenancy")
|
const { doWithGlobalDB, doInTenant } = require("../../../tenancy")
|
||||||
|
|
||||||
const done = jest.fn()
|
const done = jest.fn()
|
||||||
|
|
|
@ -1,21 +1,22 @@
|
||||||
const env = require("../../environment")
|
import env from "../../environment"
|
||||||
|
import { generateGlobalUserID } from "../../db"
|
||||||
|
import { authError } from "./utils"
|
||||||
|
import { newid } from "../../utils"
|
||||||
|
import { createASession } from "../../security/sessions"
|
||||||
|
import * as users from "../../users"
|
||||||
|
import { getGlobalDB, getTenantId } from "../../tenancy"
|
||||||
|
import fetch from "node-fetch"
|
||||||
|
import { ThirdPartyUser } from "@budibase/types"
|
||||||
const jwt = require("jsonwebtoken")
|
const jwt = require("jsonwebtoken")
|
||||||
const { generateGlobalUserID } = require("../../db/utils")
|
|
||||||
const { authError } = require("./utils")
|
|
||||||
const { newid } = require("../../hashing")
|
|
||||||
const { createASession } = require("../../security/sessions")
|
|
||||||
const users = require("../../users")
|
|
||||||
const { getGlobalDB, getTenantId } = require("../../tenancy")
|
|
||||||
const fetch = require("node-fetch")
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Common authentication logic for third parties. e.g. OAuth, OIDC.
|
* Common authentication logic for third parties. e.g. OAuth, OIDC.
|
||||||
*/
|
*/
|
||||||
exports.authenticateThirdParty = async function (
|
export async function authenticateThirdParty(
|
||||||
thirdPartyUser,
|
thirdPartyUser: ThirdPartyUser,
|
||||||
requireLocalAccount = true,
|
requireLocalAccount: boolean = true,
|
||||||
done,
|
done: Function,
|
||||||
saveUserFn
|
saveUserFn?: Function
|
||||||
) {
|
) {
|
||||||
if (!saveUserFn) {
|
if (!saveUserFn) {
|
||||||
throw new Error("Save user function must be provided")
|
throw new Error("Save user function must be provided")
|
||||||
|
@ -39,7 +40,7 @@ exports.authenticateThirdParty = async function (
|
||||||
// try to load by id
|
// try to load by id
|
||||||
try {
|
try {
|
||||||
dbUser = await db.get(userId)
|
dbUser = await db.get(userId)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
// abort when not 404 error
|
// abort when not 404 error
|
||||||
if (!err.status || err.status !== 404) {
|
if (!err.status || err.status !== 404) {
|
||||||
return authError(
|
return authError(
|
||||||
|
@ -81,7 +82,7 @@ exports.authenticateThirdParty = async function (
|
||||||
// create or sync the user
|
// create or sync the user
|
||||||
try {
|
try {
|
||||||
await saveUserFn(dbUser, false, false)
|
await saveUserFn(dbUser, false, false)
|
||||||
} catch (err) {
|
} catch (err: any) {
|
||||||
return authError(done, err)
|
return authError(done, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,13 +105,16 @@ exports.authenticateThirdParty = async function (
|
||||||
return done(null, dbUser)
|
return done(null, dbUser)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function syncProfilePicture(user, thirdPartyUser) {
|
async function syncProfilePicture(
|
||||||
const pictureUrl = thirdPartyUser.profile._json.picture
|
user: ThirdPartyUser,
|
||||||
|
thirdPartyUser: ThirdPartyUser
|
||||||
|
) {
|
||||||
|
const pictureUrl = thirdPartyUser.profile?._json.picture
|
||||||
if (pictureUrl) {
|
if (pictureUrl) {
|
||||||
const response = await fetch(pictureUrl)
|
const response = await fetch(pictureUrl)
|
||||||
|
|
||||||
if (response.status === 200) {
|
if (response.status === 200) {
|
||||||
const type = response.headers.get("content-type")
|
const type = response.headers.get("content-type") as string
|
||||||
if (type.startsWith("image/")) {
|
if (type.startsWith("image/")) {
|
||||||
user.pictureUrl = pictureUrl
|
user.pictureUrl = pictureUrl
|
||||||
}
|
}
|
||||||
|
@ -123,7 +127,7 @@ async function syncProfilePicture(user, thirdPartyUser) {
|
||||||
/**
|
/**
|
||||||
* @returns a user that has been sync'd with third party information
|
* @returns a user that has been sync'd with third party information
|
||||||
*/
|
*/
|
||||||
async function syncUser(user, thirdPartyUser) {
|
async function syncUser(user: ThirdPartyUser, thirdPartyUser: ThirdPartyUser) {
|
||||||
// provider
|
// provider
|
||||||
user.provider = thirdPartyUser.provider
|
user.provider = thirdPartyUser.provider
|
||||||
user.providerType = thirdPartyUser.providerType
|
user.providerType = thirdPartyUser.providerType
|
|
@ -1,6 +1,6 @@
|
||||||
const { isMultiTenant, getTenantId } = require("../../tenancy")
|
import { isMultiTenant, getTenantId } from "../../tenancy"
|
||||||
const { getScopedConfig } = require("../../db/utils")
|
import { getScopedConfig } from "../../db"
|
||||||
const { Config } = require("../../constants")
|
import { ConfigType, Database, Config } from "@budibase/types"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility to handle authentication errors.
|
* Utility to handle authentication errors.
|
||||||
|
@ -10,7 +10,7 @@ const { Config } = require("../../constants")
|
||||||
* @param {*} err (Optional) error that will be logged
|
* @param {*} err (Optional) error that will be logged
|
||||||
*/
|
*/
|
||||||
|
|
||||||
exports.authError = function (done, message, err = null) {
|
export function authError(done: Function, message: string, err?: any) {
|
||||||
return done(
|
return done(
|
||||||
err,
|
err,
|
||||||
null, // never return a user
|
null, // never return a user
|
||||||
|
@ -18,13 +18,17 @@ exports.authError = function (done, message, err = null) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.ssoCallbackUrl = async (db, config, type) => {
|
export async function ssoCallbackUrl(
|
||||||
|
db: Database,
|
||||||
|
config?: { callbackURL?: string },
|
||||||
|
type?: ConfigType
|
||||||
|
) {
|
||||||
// incase there is a callback URL from before
|
// incase there is a callback URL from before
|
||||||
if (config && config.callbackURL) {
|
if (config && config.callbackURL) {
|
||||||
return config.callbackURL
|
return config.callbackURL
|
||||||
}
|
}
|
||||||
const publicConfig = await getScopedConfig(db, {
|
const publicConfig = await getScopedConfig(db, {
|
||||||
type: Config.SETTINGS,
|
type: ConfigType.SETTINGS,
|
||||||
})
|
})
|
||||||
|
|
||||||
let callbackUrl = `/api/global/auth`
|
let callbackUrl = `/api/global/auth`
|
|
@ -8,15 +8,15 @@ import {
|
||||||
TenantResolutionStrategy,
|
TenantResolutionStrategy,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
const tenancy = (
|
export = function (
|
||||||
allowQueryStringPatterns: EndpointMatcher[],
|
allowQueryStringPatterns: EndpointMatcher[],
|
||||||
noTenancyPatterns: EndpointMatcher[],
|
noTenancyPatterns: EndpointMatcher[],
|
||||||
opts = { noTenancyRequired: false }
|
opts: { noTenancyRequired?: boolean } = { noTenancyRequired: false }
|
||||||
) => {
|
) {
|
||||||
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
|
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
|
||||||
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
|
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
|
||||||
|
|
||||||
return async function (ctx: BBContext, next: any) {
|
return async function (ctx: BBContext | any, next: any) {
|
||||||
const allowNoTenant =
|
const allowNoTenant =
|
||||||
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
|
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
|
||||||
const tenantOpts: GetTenantIdOptions = {
|
const tenantOpts: GetTenantIdOptions = {
|
||||||
|
@ -33,5 +33,3 @@ const tenancy = (
|
||||||
return doInTenant(tenantId, next)
|
return doInTenant(tenantId, next)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export = tenancy
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ const { runMigrations, getMigrationsDoc } = require("../index")
|
||||||
const { getDB } = require("../../db")
|
const { getDB } = require("../../db")
|
||||||
const {
|
const {
|
||||||
StaticDatabases,
|
StaticDatabases,
|
||||||
} = require("../../db/utils")
|
} = require("../../constants")
|
||||||
|
|
||||||
let db
|
let db
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
import { v4 } from "uuid"
|
||||||
|
|
||||||
|
export function newid() {
|
||||||
|
return v4().replace(/-/g, "")
|
||||||
|
}
|
|
@ -1,426 +1,2 @@
|
||||||
const sanitize = require("sanitize-s3-objectkey")
|
export * from "./objectStore"
|
||||||
import AWS from "aws-sdk"
|
export * from "./utils"
|
||||||
import stream from "stream"
|
|
||||||
import fetch from "node-fetch"
|
|
||||||
import tar from "tar-fs"
|
|
||||||
const zlib = require("zlib")
|
|
||||||
import { promisify } from "util"
|
|
||||||
import { join } from "path"
|
|
||||||
import fs from "fs"
|
|
||||||
import env from "../environment"
|
|
||||||
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
|
|
||||||
import { v4 } from "uuid"
|
|
||||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db/utils"
|
|
||||||
|
|
||||||
const streamPipeline = promisify(stream.pipeline)
|
|
||||||
// use this as a temporary store of buckets that are being created
|
|
||||||
const STATE = {
|
|
||||||
bucketCreationPromises: {},
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListParams = {
|
|
||||||
ContinuationToken?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
type UploadParams = {
|
|
||||||
bucket: string
|
|
||||||
filename: string
|
|
||||||
path: string
|
|
||||||
type?: string
|
|
||||||
// can be undefined, we will remove it
|
|
||||||
metadata?: {
|
|
||||||
[key: string]: string | undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const CONTENT_TYPE_MAP: any = {
|
|
||||||
txt: "text/plain",
|
|
||||||
html: "text/html",
|
|
||||||
css: "text/css",
|
|
||||||
js: "application/javascript",
|
|
||||||
json: "application/json",
|
|
||||||
gz: "application/gzip",
|
|
||||||
}
|
|
||||||
const STRING_CONTENT_TYPES = [
|
|
||||||
CONTENT_TYPE_MAP.html,
|
|
||||||
CONTENT_TYPE_MAP.css,
|
|
||||||
CONTENT_TYPE_MAP.js,
|
|
||||||
CONTENT_TYPE_MAP.json,
|
|
||||||
]
|
|
||||||
|
|
||||||
// does normal sanitization and then swaps dev apps to apps
|
|
||||||
export function sanitizeKey(input: string) {
|
|
||||||
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
|
||||||
}
|
|
||||||
|
|
||||||
// simply handles the dev app to app conversion
|
|
||||||
export function sanitizeBucket(input: string) {
|
|
||||||
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
|
||||||
}
|
|
||||||
|
|
||||||
function publicPolicy(bucketName: string) {
|
|
||||||
return {
|
|
||||||
Version: "2012-10-17",
|
|
||||||
Statement: [
|
|
||||||
{
|
|
||||||
Effect: "Allow",
|
|
||||||
Principal: {
|
|
||||||
AWS: ["*"],
|
|
||||||
},
|
|
||||||
Action: "s3:GetObject",
|
|
||||||
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const PUBLIC_BUCKETS = [
|
|
||||||
ObjectStoreBuckets.APPS,
|
|
||||||
ObjectStoreBuckets.GLOBAL,
|
|
||||||
ObjectStoreBuckets.PLUGINS,
|
|
||||||
]
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets a connection to the object store using the S3 SDK.
|
|
||||||
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
|
||||||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
|
||||||
* @constructor
|
|
||||||
*/
|
|
||||||
export const ObjectStore = (bucket: string) => {
|
|
||||||
const config: any = {
|
|
||||||
s3ForcePathStyle: true,
|
|
||||||
signatureVersion: "v4",
|
|
||||||
apiVersion: "2006-03-01",
|
|
||||||
accessKeyId: env.MINIO_ACCESS_KEY,
|
|
||||||
secretAccessKey: env.MINIO_SECRET_KEY,
|
|
||||||
region: env.AWS_REGION,
|
|
||||||
}
|
|
||||||
if (bucket) {
|
|
||||||
config.params = {
|
|
||||||
Bucket: sanitizeBucket(bucket),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (env.MINIO_URL) {
|
|
||||||
config.endpoint = env.MINIO_URL
|
|
||||||
}
|
|
||||||
return new AWS.S3(config)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
|
||||||
* if it does not exist then it will create it.
|
|
||||||
*/
|
|
||||||
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
try {
|
|
||||||
await client
|
|
||||||
.headBucket({
|
|
||||||
Bucket: bucketName,
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
} catch (err: any) {
|
|
||||||
const promises: any = STATE.bucketCreationPromises
|
|
||||||
const doesntExist = err.statusCode === 404,
|
|
||||||
noAccess = err.statusCode === 403
|
|
||||||
if (promises[bucketName]) {
|
|
||||||
await promises[bucketName]
|
|
||||||
} else if (doesntExist || noAccess) {
|
|
||||||
if (doesntExist) {
|
|
||||||
// bucket doesn't exist create it
|
|
||||||
promises[bucketName] = client
|
|
||||||
.createBucket({
|
|
||||||
Bucket: bucketName,
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
await promises[bucketName]
|
|
||||||
delete promises[bucketName]
|
|
||||||
}
|
|
||||||
// public buckets are quite hidden in the system, make sure
|
|
||||||
// no bucket is set accidentally
|
|
||||||
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
|
||||||
await client
|
|
||||||
.putBucketPolicy({
|
|
||||||
Bucket: bucketName,
|
|
||||||
Policy: JSON.stringify(publicPolicy(bucketName)),
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Error("Unable to write to object store bucket.")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Uploads the contents of a file given the required parameters, useful when
|
|
||||||
* temp files in use (for example file uploaded as an attachment).
|
|
||||||
*/
|
|
||||||
export const upload = async ({
|
|
||||||
bucket: bucketName,
|
|
||||||
filename,
|
|
||||||
path,
|
|
||||||
type,
|
|
||||||
metadata,
|
|
||||||
}: UploadParams) => {
|
|
||||||
const extension = filename.split(".").pop()
|
|
||||||
const fileBytes = fs.readFileSync(path)
|
|
||||||
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
|
||||||
|
|
||||||
let contentType = type
|
|
||||||
if (!contentType) {
|
|
||||||
contentType = extension
|
|
||||||
? CONTENT_TYPE_MAP[extension.toLowerCase()]
|
|
||||||
: CONTENT_TYPE_MAP.txt
|
|
||||||
}
|
|
||||||
const config: any = {
|
|
||||||
// windows file paths need to be converted to forward slashes for s3
|
|
||||||
Key: sanitizeKey(filename),
|
|
||||||
Body: fileBytes,
|
|
||||||
ContentType: contentType,
|
|
||||||
}
|
|
||||||
if (metadata && typeof metadata === "object") {
|
|
||||||
// remove any nullish keys from the metadata object, as these may be considered invalid
|
|
||||||
for (let key of Object.keys(metadata)) {
|
|
||||||
if (!metadata[key] || typeof metadata[key] !== "string") {
|
|
||||||
delete metadata[key]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
config.Metadata = metadata
|
|
||||||
}
|
|
||||||
return objectStore.upload(config).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Similar to the upload function but can be used to send a file stream
|
|
||||||
* through to the object store.
|
|
||||||
*/
|
|
||||||
export const streamUpload = async (
|
|
||||||
bucketName: string,
|
|
||||||
filename: string,
|
|
||||||
stream: any,
|
|
||||||
extra = {}
|
|
||||||
) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
|
||||||
|
|
||||||
// Set content type for certain known extensions
|
|
||||||
if (filename?.endsWith(".js")) {
|
|
||||||
extra = {
|
|
||||||
...extra,
|
|
||||||
ContentType: "application/javascript",
|
|
||||||
}
|
|
||||||
} else if (filename?.endsWith(".svg")) {
|
|
||||||
extra = {
|
|
||||||
...extra,
|
|
||||||
ContentType: "image",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const params = {
|
|
||||||
Bucket: sanitizeBucket(bucketName),
|
|
||||||
Key: sanitizeKey(filename),
|
|
||||||
Body: stream,
|
|
||||||
...extra,
|
|
||||||
}
|
|
||||||
return objectStore.upload(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
|
||||||
* will be converted, otherwise it will be returned as a buffer stream.
|
|
||||||
*/
|
|
||||||
export const retrieve = async (bucketName: string, filepath: string) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
const params = {
|
|
||||||
Bucket: sanitizeBucket(bucketName),
|
|
||||||
Key: sanitizeKey(filepath),
|
|
||||||
}
|
|
||||||
const response: any = await objectStore.getObject(params).promise()
|
|
||||||
// currently these are all strings
|
|
||||||
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
|
||||||
return response.Body.toString("utf8")
|
|
||||||
} else {
|
|
||||||
return response.Body
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const listAllObjects = async (bucketName: string, path: string) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
const list = (params: ListParams = {}) => {
|
|
||||||
return objectStore
|
|
||||||
.listObjectsV2({
|
|
||||||
...params,
|
|
||||||
Bucket: sanitizeBucket(bucketName),
|
|
||||||
Prefix: sanitizeKey(path),
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
}
|
|
||||||
let isTruncated = false,
|
|
||||||
token,
|
|
||||||
objects: AWS.S3.Types.Object[] = []
|
|
||||||
do {
|
|
||||||
let params: ListParams = {}
|
|
||||||
if (token) {
|
|
||||||
params.ContinuationToken = token
|
|
||||||
}
|
|
||||||
const response = await list(params)
|
|
||||||
if (response.Contents) {
|
|
||||||
objects = objects.concat(response.Contents)
|
|
||||||
}
|
|
||||||
isTruncated = !!response.IsTruncated
|
|
||||||
} while (isTruncated)
|
|
||||||
return objects
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Same as retrieval function but puts to a temporary file.
|
|
||||||
*/
|
|
||||||
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
filepath = sanitizeKey(filepath)
|
|
||||||
const data = await retrieve(bucketName, filepath)
|
|
||||||
const outputPath = join(budibaseTempDir(), v4())
|
|
||||||
fs.writeFileSync(outputPath, data)
|
|
||||||
return outputPath
|
|
||||||
}
|
|
||||||
|
|
||||||
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
|
||||||
let writePath = join(budibaseTempDir(), v4())
|
|
||||||
fs.mkdirSync(writePath)
|
|
||||||
const objects = await listAllObjects(bucketName, path)
|
|
||||||
let fullObjects = await Promise.all(
|
|
||||||
objects.map(obj => retrieve(bucketName, obj.Key!))
|
|
||||||
)
|
|
||||||
let count = 0
|
|
||||||
for (let obj of objects) {
|
|
||||||
const filename = obj.Key!
|
|
||||||
const data = fullObjects[count++]
|
|
||||||
const possiblePath = filename.split("/")
|
|
||||||
if (possiblePath.length > 1) {
|
|
||||||
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
|
||||||
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
|
|
||||||
}
|
|
||||||
fs.writeFileSync(join(writePath, ...possiblePath), data)
|
|
||||||
}
|
|
||||||
return writePath
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a single file.
|
|
||||||
*/
|
|
||||||
export const deleteFile = async (bucketName: string, filepath: string) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
|
||||||
const params = {
|
|
||||||
Bucket: bucketName,
|
|
||||||
Key: filepath,
|
|
||||||
}
|
|
||||||
return objectStore.deleteObject(params)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
|
||||||
const objectStore = ObjectStore(bucketName)
|
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
|
||||||
const params = {
|
|
||||||
Bucket: bucketName,
|
|
||||||
Delete: {
|
|
||||||
Objects: filepaths.map((path: any) => ({ Key: path })),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return objectStore.deleteObjects(params).promise()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a path, including everything within.
|
|
||||||
*/
|
|
||||||
export const deleteFolder = async (
|
|
||||||
bucketName: string,
|
|
||||||
folder: string
|
|
||||||
): Promise<any> => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
folder = sanitizeKey(folder)
|
|
||||||
const client = ObjectStore(bucketName)
|
|
||||||
const listParams = {
|
|
||||||
Bucket: bucketName,
|
|
||||||
Prefix: folder,
|
|
||||||
}
|
|
||||||
|
|
||||||
let response: any = await client.listObjects(listParams).promise()
|
|
||||||
if (response.Contents.length === 0) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const deleteParams: any = {
|
|
||||||
Bucket: bucketName,
|
|
||||||
Delete: {
|
|
||||||
Objects: [],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
response.Contents.forEach((content: any) => {
|
|
||||||
deleteParams.Delete.Objects.push({ Key: content.Key })
|
|
||||||
})
|
|
||||||
|
|
||||||
response = await client.deleteObjects(deleteParams).promise()
|
|
||||||
// can only empty 1000 items at once
|
|
||||||
if (response.Deleted.length === 1000) {
|
|
||||||
return deleteFolder(bucketName, folder)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const uploadDirectory = async (
|
|
||||||
bucketName: string,
|
|
||||||
localPath: string,
|
|
||||||
bucketPath: string
|
|
||||||
) => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
let uploads = []
|
|
||||||
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
|
||||||
for (let file of files) {
|
|
||||||
const path = sanitizeKey(join(bucketPath, file.name))
|
|
||||||
const local = join(localPath, file.name)
|
|
||||||
if (file.isDirectory()) {
|
|
||||||
uploads.push(uploadDirectory(bucketName, local, path))
|
|
||||||
} else {
|
|
||||||
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await Promise.all(uploads)
|
|
||||||
return files
|
|
||||||
}
|
|
||||||
|
|
||||||
export const downloadTarballDirect = async (
|
|
||||||
url: string,
|
|
||||||
path: string,
|
|
||||||
headers = {}
|
|
||||||
) => {
|
|
||||||
path = sanitizeKey(path)
|
|
||||||
const response = await fetch(url, { headers })
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`unexpected response ${response.statusText}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
|
||||||
}
|
|
||||||
|
|
||||||
export const downloadTarball = async (
|
|
||||||
url: string,
|
|
||||||
bucketName: string,
|
|
||||||
path: string
|
|
||||||
) => {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
|
||||||
path = sanitizeKey(path)
|
|
||||||
const response = await fetch(url)
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`unexpected response ${response.statusText}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const tmpPath = join(budibaseTempDir(), path)
|
|
||||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
|
||||||
if (!env.isTest() && env.SELF_HOSTED) {
|
|
||||||
await uploadDirectory(bucketName, tmpPath, path)
|
|
||||||
}
|
|
||||||
// return the temporary path incase there is a use for it
|
|
||||||
return tmpPath
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,426 @@
|
||||||
|
const sanitize = require("sanitize-s3-objectkey")
|
||||||
|
import AWS from "aws-sdk"
|
||||||
|
import stream from "stream"
|
||||||
|
import fetch from "node-fetch"
|
||||||
|
import tar from "tar-fs"
|
||||||
|
const zlib = require("zlib")
|
||||||
|
import { promisify } from "util"
|
||||||
|
import { join } from "path"
|
||||||
|
import fs from "fs"
|
||||||
|
import env from "../environment"
|
||||||
|
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
|
||||||
|
import { v4 } from "uuid"
|
||||||
|
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||||
|
|
||||||
|
const streamPipeline = promisify(stream.pipeline)
|
||||||
|
// use this as a temporary store of buckets that are being created
|
||||||
|
const STATE = {
|
||||||
|
bucketCreationPromises: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListParams = {
|
||||||
|
ContinuationToken?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadParams = {
|
||||||
|
bucket: string
|
||||||
|
filename: string
|
||||||
|
path: string
|
||||||
|
type?: string
|
||||||
|
// can be undefined, we will remove it
|
||||||
|
metadata?: {
|
||||||
|
[key: string]: string | undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const CONTENT_TYPE_MAP: any = {
|
||||||
|
txt: "text/plain",
|
||||||
|
html: "text/html",
|
||||||
|
css: "text/css",
|
||||||
|
js: "application/javascript",
|
||||||
|
json: "application/json",
|
||||||
|
gz: "application/gzip",
|
||||||
|
}
|
||||||
|
const STRING_CONTENT_TYPES = [
|
||||||
|
CONTENT_TYPE_MAP.html,
|
||||||
|
CONTENT_TYPE_MAP.css,
|
||||||
|
CONTENT_TYPE_MAP.js,
|
||||||
|
CONTENT_TYPE_MAP.json,
|
||||||
|
]
|
||||||
|
|
||||||
|
// does normal sanitization and then swaps dev apps to apps
|
||||||
|
export function sanitizeKey(input: string) {
|
||||||
|
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
// simply handles the dev app to app conversion
|
||||||
|
export function sanitizeBucket(input: string) {
|
||||||
|
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
||||||
|
}
|
||||||
|
|
||||||
|
function publicPolicy(bucketName: string) {
|
||||||
|
return {
|
||||||
|
Version: "2012-10-17",
|
||||||
|
Statement: [
|
||||||
|
{
|
||||||
|
Effect: "Allow",
|
||||||
|
Principal: {
|
||||||
|
AWS: ["*"],
|
||||||
|
},
|
||||||
|
Action: "s3:GetObject",
|
||||||
|
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const PUBLIC_BUCKETS = [
|
||||||
|
ObjectStoreBuckets.APPS,
|
||||||
|
ObjectStoreBuckets.GLOBAL,
|
||||||
|
ObjectStoreBuckets.PLUGINS,
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a connection to the object store using the S3 SDK.
|
||||||
|
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||||
|
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
export const ObjectStore = (bucket: string) => {
|
||||||
|
const config: any = {
|
||||||
|
s3ForcePathStyle: true,
|
||||||
|
signatureVersion: "v4",
|
||||||
|
apiVersion: "2006-03-01",
|
||||||
|
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||||
|
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||||
|
region: env.AWS_REGION,
|
||||||
|
}
|
||||||
|
if (bucket) {
|
||||||
|
config.params = {
|
||||||
|
Bucket: sanitizeBucket(bucket),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (env.MINIO_URL) {
|
||||||
|
config.endpoint = env.MINIO_URL
|
||||||
|
}
|
||||||
|
return new AWS.S3(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||||
|
* if it does not exist then it will create it.
|
||||||
|
*/
|
||||||
|
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
try {
|
||||||
|
await client
|
||||||
|
.headBucket({
|
||||||
|
Bucket: bucketName,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
} catch (err: any) {
|
||||||
|
const promises: any = STATE.bucketCreationPromises
|
||||||
|
const doesntExist = err.statusCode === 404,
|
||||||
|
noAccess = err.statusCode === 403
|
||||||
|
if (promises[bucketName]) {
|
||||||
|
await promises[bucketName]
|
||||||
|
} else if (doesntExist || noAccess) {
|
||||||
|
if (doesntExist) {
|
||||||
|
// bucket doesn't exist create it
|
||||||
|
promises[bucketName] = client
|
||||||
|
.createBucket({
|
||||||
|
Bucket: bucketName,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
await promises[bucketName]
|
||||||
|
delete promises[bucketName]
|
||||||
|
}
|
||||||
|
// public buckets are quite hidden in the system, make sure
|
||||||
|
// no bucket is set accidentally
|
||||||
|
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
||||||
|
await client
|
||||||
|
.putBucketPolicy({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Policy: JSON.stringify(publicPolicy(bucketName)),
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error("Unable to write to object store bucket.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads the contents of a file given the required parameters, useful when
|
||||||
|
* temp files in use (for example file uploaded as an attachment).
|
||||||
|
*/
|
||||||
|
export const upload = async ({
|
||||||
|
bucket: bucketName,
|
||||||
|
filename,
|
||||||
|
path,
|
||||||
|
type,
|
||||||
|
metadata,
|
||||||
|
}: UploadParams) => {
|
||||||
|
const extension = filename.split(".").pop()
|
||||||
|
const fileBytes = fs.readFileSync(path)
|
||||||
|
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
|
|
||||||
|
let contentType = type
|
||||||
|
if (!contentType) {
|
||||||
|
contentType = extension
|
||||||
|
? CONTENT_TYPE_MAP[extension.toLowerCase()]
|
||||||
|
: CONTENT_TYPE_MAP.txt
|
||||||
|
}
|
||||||
|
const config: any = {
|
||||||
|
// windows file paths need to be converted to forward slashes for s3
|
||||||
|
Key: sanitizeKey(filename),
|
||||||
|
Body: fileBytes,
|
||||||
|
ContentType: contentType,
|
||||||
|
}
|
||||||
|
if (metadata && typeof metadata === "object") {
|
||||||
|
// remove any nullish keys from the metadata object, as these may be considered invalid
|
||||||
|
for (let key of Object.keys(metadata)) {
|
||||||
|
if (!metadata[key] || typeof metadata[key] !== "string") {
|
||||||
|
delete metadata[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
config.Metadata = metadata
|
||||||
|
}
|
||||||
|
return objectStore.upload(config).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Similar to the upload function but can be used to send a file stream
|
||||||
|
* through to the object store.
|
||||||
|
*/
|
||||||
|
export const streamUpload = async (
|
||||||
|
bucketName: string,
|
||||||
|
filename: string,
|
||||||
|
stream: any,
|
||||||
|
extra = {}
|
||||||
|
) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
|
|
||||||
|
// Set content type for certain known extensions
|
||||||
|
if (filename?.endsWith(".js")) {
|
||||||
|
extra = {
|
||||||
|
...extra,
|
||||||
|
ContentType: "application/javascript",
|
||||||
|
}
|
||||||
|
} else if (filename?.endsWith(".svg")) {
|
||||||
|
extra = {
|
||||||
|
...extra,
|
||||||
|
ContentType: "image",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
Key: sanitizeKey(filename),
|
||||||
|
Body: stream,
|
||||||
|
...extra,
|
||||||
|
}
|
||||||
|
return objectStore.upload(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||||
|
* will be converted, otherwise it will be returned as a buffer stream.
|
||||||
|
*/
|
||||||
|
export const retrieve = async (bucketName: string, filepath: string) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
const params = {
|
||||||
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
Key: sanitizeKey(filepath),
|
||||||
|
}
|
||||||
|
const response: any = await objectStore.getObject(params).promise()
|
||||||
|
// currently these are all strings
|
||||||
|
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
||||||
|
return response.Body.toString("utf8")
|
||||||
|
} else {
|
||||||
|
return response.Body
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const listAllObjects = async (bucketName: string, path: string) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
const list = (params: ListParams = {}) => {
|
||||||
|
return objectStore
|
||||||
|
.listObjectsV2({
|
||||||
|
...params,
|
||||||
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
Prefix: sanitizeKey(path),
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
}
|
||||||
|
let isTruncated = false,
|
||||||
|
token,
|
||||||
|
objects: AWS.S3.Types.Object[] = []
|
||||||
|
do {
|
||||||
|
let params: ListParams = {}
|
||||||
|
if (token) {
|
||||||
|
params.ContinuationToken = token
|
||||||
|
}
|
||||||
|
const response = await list(params)
|
||||||
|
if (response.Contents) {
|
||||||
|
objects = objects.concat(response.Contents)
|
||||||
|
}
|
||||||
|
isTruncated = !!response.IsTruncated
|
||||||
|
} while (isTruncated)
|
||||||
|
return objects
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Same as retrieval function but puts to a temporary file.
|
||||||
|
*/
|
||||||
|
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
filepath = sanitizeKey(filepath)
|
||||||
|
const data = await retrieve(bucketName, filepath)
|
||||||
|
const outputPath = join(budibaseTempDir(), v4())
|
||||||
|
fs.writeFileSync(outputPath, data)
|
||||||
|
return outputPath
|
||||||
|
}
|
||||||
|
|
||||||
|
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
||||||
|
let writePath = join(budibaseTempDir(), v4())
|
||||||
|
fs.mkdirSync(writePath)
|
||||||
|
const objects = await listAllObjects(bucketName, path)
|
||||||
|
let fullObjects = await Promise.all(
|
||||||
|
objects.map(obj => retrieve(bucketName, obj.Key!))
|
||||||
|
)
|
||||||
|
let count = 0
|
||||||
|
for (let obj of objects) {
|
||||||
|
const filename = obj.Key!
|
||||||
|
const data = fullObjects[count++]
|
||||||
|
const possiblePath = filename.split("/")
|
||||||
|
if (possiblePath.length > 1) {
|
||||||
|
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
||||||
|
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
|
||||||
|
}
|
||||||
|
fs.writeFileSync(join(writePath, ...possiblePath), data)
|
||||||
|
}
|
||||||
|
return writePath
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a single file.
|
||||||
|
*/
|
||||||
|
export const deleteFile = async (bucketName: string, filepath: string) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: filepath,
|
||||||
|
}
|
||||||
|
return objectStore.deleteObject(params)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
Delete: {
|
||||||
|
Objects: filepaths.map((path: any) => ({ Key: path })),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return objectStore.deleteObjects(params).promise()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a path, including everything within.
|
||||||
|
*/
|
||||||
|
export const deleteFolder = async (
|
||||||
|
bucketName: string,
|
||||||
|
folder: string
|
||||||
|
): Promise<any> => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
folder = sanitizeKey(folder)
|
||||||
|
const client = ObjectStore(bucketName)
|
||||||
|
const listParams = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
Prefix: folder,
|
||||||
|
}
|
||||||
|
|
||||||
|
let response: any = await client.listObjects(listParams).promise()
|
||||||
|
if (response.Contents.length === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const deleteParams: any = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
Delete: {
|
||||||
|
Objects: [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Contents.forEach((content: any) => {
|
||||||
|
deleteParams.Delete.Objects.push({ Key: content.Key })
|
||||||
|
})
|
||||||
|
|
||||||
|
response = await client.deleteObjects(deleteParams).promise()
|
||||||
|
// can only empty 1000 items at once
|
||||||
|
if (response.Deleted.length === 1000) {
|
||||||
|
return deleteFolder(bucketName, folder)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const uploadDirectory = async (
|
||||||
|
bucketName: string,
|
||||||
|
localPath: string,
|
||||||
|
bucketPath: string
|
||||||
|
) => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
let uploads = []
|
||||||
|
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
||||||
|
for (let file of files) {
|
||||||
|
const path = sanitizeKey(join(bucketPath, file.name))
|
||||||
|
const local = join(localPath, file.name)
|
||||||
|
if (file.isDirectory()) {
|
||||||
|
uploads.push(uploadDirectory(bucketName, local, path))
|
||||||
|
} else {
|
||||||
|
uploads.push(streamUpload(bucketName, path, fs.createReadStream(local)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await Promise.all(uploads)
|
||||||
|
return files
|
||||||
|
}
|
||||||
|
|
||||||
|
export const downloadTarballDirect = async (
|
||||||
|
url: string,
|
||||||
|
path: string,
|
||||||
|
headers = {}
|
||||||
|
) => {
|
||||||
|
path = sanitizeKey(path)
|
||||||
|
const response = await fetch(url, { headers })
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`unexpected response ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
||||||
|
}
|
||||||
|
|
||||||
|
export const downloadTarball = async (
|
||||||
|
url: string,
|
||||||
|
bucketName: string,
|
||||||
|
path: string
|
||||||
|
) => {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
path = sanitizeKey(path)
|
||||||
|
const response = await fetch(url)
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`unexpected response ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const tmpPath = join(budibaseTempDir(), path)
|
||||||
|
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
||||||
|
if (!env.isTest() && env.SELF_HOSTED) {
|
||||||
|
await uploadDirectory(bucketName, tmpPath, path)
|
||||||
|
}
|
||||||
|
// return the temporary path incase there is a use for it
|
||||||
|
return tmpPath
|
||||||
|
}
|
|
@ -1,14 +1,15 @@
|
||||||
const { join } = require("path")
|
import { join } from "path"
|
||||||
const { tmpdir } = require("os")
|
import { tmpdir } from "os"
|
||||||
const fs = require("fs")
|
import fs from "fs"
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
|
|
||||||
/****************************************************
|
/****************************************************
|
||||||
* NOTE: When adding a new bucket - name *
|
* NOTE: When adding a new bucket - name *
|
||||||
* sure that S3 usages (like budibase-infra) *
|
* sure that S3 usages (like budibase-infra) *
|
||||||
* have been updated to have a unique bucket name. *
|
* have been updated to have a unique bucket name. *
|
||||||
****************************************************/
|
****************************************************/
|
||||||
exports.ObjectStoreBuckets = {
|
// can't be an enum - only numbers can be used for computed types
|
||||||
|
export const ObjectStoreBuckets = {
|
||||||
BACKUPS: env.BACKUPS_BUCKET_NAME,
|
BACKUPS: env.BACKUPS_BUCKET_NAME,
|
||||||
APPS: env.APPS_BUCKET_NAME,
|
APPS: env.APPS_BUCKET_NAME,
|
||||||
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
|
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
|
||||||
|
@ -22,6 +23,6 @@ if (!fs.existsSync(bbTmp)) {
|
||||||
fs.mkdirSync(bbTmp)
|
fs.mkdirSync(bbTmp)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.budibaseTempDir = function () {
|
export function budibaseTempDir() {
|
||||||
return bbTmp
|
return bbTmp
|
||||||
}
|
}
|
|
@ -1,11 +0,0 @@
|
||||||
const env = require("./environment")
|
|
||||||
|
|
||||||
exports.pinoSettings = () => ({
|
|
||||||
prettyPrint: {
|
|
||||||
levelFirst: true,
|
|
||||||
},
|
|
||||||
level: env.LOG_LEVEL || "error",
|
|
||||||
autoLogging: {
|
|
||||||
ignore: req => req.url.includes("/health"),
|
|
||||||
},
|
|
||||||
})
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
import env from "./environment"
|
||||||
|
|
||||||
|
export function pinoSettings() {
|
||||||
|
return {
|
||||||
|
prettyPrint: {
|
||||||
|
levelFirst: true,
|
||||||
|
},
|
||||||
|
level: env.LOG_LEVEL || "error",
|
||||||
|
autoLogging: {
|
||||||
|
ignore: (req: { url: string }) => req.url.includes("/health"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,13 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
import * as generic from "../cache/generic"
|
|
||||||
import * as user from "../cache/user"
|
|
||||||
import * as app from "../cache/appMetadata"
|
|
||||||
import * as writethrough from "../cache/writethrough"
|
|
||||||
|
|
||||||
export = {
|
|
||||||
app,
|
|
||||||
user,
|
|
||||||
writethrough,
|
|
||||||
...generic,
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
import {
|
|
||||||
getAppDB,
|
|
||||||
getDevAppDB,
|
|
||||||
getProdAppDB,
|
|
||||||
getAppId,
|
|
||||||
updateAppId,
|
|
||||||
doInAppContext,
|
|
||||||
doInTenant,
|
|
||||||
doInContext,
|
|
||||||
} from "../context"
|
|
||||||
|
|
||||||
import * as identity from "../context/identity"
|
|
||||||
|
|
||||||
export = {
|
|
||||||
getAppDB,
|
|
||||||
getDevAppDB,
|
|
||||||
getProdAppDB,
|
|
||||||
getAppId,
|
|
||||||
updateAppId,
|
|
||||||
doInAppContext,
|
|
||||||
doInTenant,
|
|
||||||
doInContext,
|
|
||||||
identity,
|
|
||||||
}
|
|
|
@ -1,4 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
export * from "../objectStore"
|
|
||||||
export * from "../objectStore/utils"
|
|
|
@ -1,13 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
import Client from "../redis"
|
|
||||||
import utils from "../redis/utils"
|
|
||||||
import clients from "../redis/init"
|
|
||||||
import * as redlock from "../redis/redlock"
|
|
||||||
|
|
||||||
export = {
|
|
||||||
Client,
|
|
||||||
utils,
|
|
||||||
clients,
|
|
||||||
redlock,
|
|
||||||
}
|
|
|
@ -1,4 +0,0 @@
|
||||||
// Mimic the outer package export for usage in index.ts
|
|
||||||
// The outer exports can't be used as they now reference dist directly
|
|
||||||
export * from "../utils"
|
|
||||||
export * from "../hashing"
|
|
|
@ -1,7 +1 @@
|
||||||
import * as utils from "./utils"
|
export * from "./utils"
|
||||||
|
|
||||||
const pkg = {
|
|
||||||
...utils,
|
|
||||||
}
|
|
||||||
|
|
||||||
export = pkg
|
|
||||||
|
|
|
@ -1,9 +1,5 @@
|
||||||
const {
|
import { DatasourceFieldType, QueryType, PluginType } from "@budibase/types"
|
||||||
DatasourceFieldType,
|
import joi from "joi"
|
||||||
QueryType,
|
|
||||||
PluginType,
|
|
||||||
} = require("@budibase/types")
|
|
||||||
const joi = require("joi")
|
|
||||||
|
|
||||||
const DATASOURCE_TYPES = [
|
const DATASOURCE_TYPES = [
|
||||||
"Relational",
|
"Relational",
|
||||||
|
@ -14,14 +10,14 @@ const DATASOURCE_TYPES = [
|
||||||
"API",
|
"API",
|
||||||
]
|
]
|
||||||
|
|
||||||
function runJoi(validator, schema) {
|
function runJoi(validator: joi.Schema, schema: any) {
|
||||||
const { error } = validator.validate(schema)
|
const { error } = validator.validate(schema)
|
||||||
if (error) {
|
if (error) {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function validateComponent(schema) {
|
function validateComponent(schema: any) {
|
||||||
const validator = joi.object({
|
const validator = joi.object({
|
||||||
type: joi.string().allow("component").required(),
|
type: joi.string().allow("component").required(),
|
||||||
metadata: joi.object().unknown(true).required(),
|
metadata: joi.object().unknown(true).required(),
|
||||||
|
@ -37,7 +33,7 @@ function validateComponent(schema) {
|
||||||
runJoi(validator, schema)
|
runJoi(validator, schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
function validateDatasource(schema) {
|
function validateDatasource(schema: any) {
|
||||||
const fieldValidator = joi.object({
|
const fieldValidator = joi.object({
|
||||||
type: joi
|
type: joi
|
||||||
.string()
|
.string()
|
||||||
|
@ -86,7 +82,7 @@ function validateDatasource(schema) {
|
||||||
runJoi(validator, schema)
|
runJoi(validator, schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.validate = schema => {
|
export function validate(schema: any) {
|
||||||
switch (schema?.type) {
|
switch (schema?.type) {
|
||||||
case PluginType.COMPONENT:
|
case PluginType.COMPONENT:
|
||||||
validateComponent(schema)
|
validateComponent(schema)
|
|
@ -1,5 +1,5 @@
|
||||||
import events from "events"
|
import events from "events"
|
||||||
import { timeout } from "../../utils"
|
import { timeout } from "../utils"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
||||||
|
|
|
@ -39,7 +39,7 @@ export function createQueue<T>(
|
||||||
return queue
|
return queue
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.shutdown = async () => {
|
export async function shutdown() {
|
||||||
if (QUEUES.length) {
|
if (QUEUES.length) {
|
||||||
clearInterval(cleanupInterval)
|
clearInterval(cleanupInterval)
|
||||||
for (let queue of QUEUES) {
|
for (let queue of QUEUES) {
|
||||||
|
|
|
@ -1,278 +1,6 @@
|
||||||
import RedisWrapper from "../redis"
|
// Mimic the outer package export for usage in index.ts
|
||||||
const env = require("../environment")
|
// The outer exports can't be used as they now reference dist directly
|
||||||
// ioredis mock is all in memory
|
export { default as Client } from "./redis"
|
||||||
const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis")
|
export * as utils from "./utils"
|
||||||
const {
|
export * as clients from "./init"
|
||||||
addDbPrefix,
|
export * as redlock from "./redlock"
|
||||||
removeDbPrefix,
|
|
||||||
getRedisOptions,
|
|
||||||
SEPARATOR,
|
|
||||||
SelectableDatabases,
|
|
||||||
} = require("./utils")
|
|
||||||
|
|
||||||
const RETRY_PERIOD_MS = 2000
|
|
||||||
const STARTUP_TIMEOUT_MS = 5000
|
|
||||||
const CLUSTERED = false
|
|
||||||
const DEFAULT_SELECT_DB = SelectableDatabases.DEFAULT
|
|
||||||
|
|
||||||
// for testing just generate the client once
|
|
||||||
let CLOSED = false
|
|
||||||
let CLIENTS: { [key: number]: any } = {}
|
|
||||||
// if in test always connected
|
|
||||||
let CONNECTED = env.isTest()
|
|
||||||
|
|
||||||
function pickClient(selectDb: number): any {
|
|
||||||
return CLIENTS[selectDb]
|
|
||||||
}
|
|
||||||
|
|
||||||
function connectionError(
|
|
||||||
selectDb: number,
|
|
||||||
timeout: NodeJS.Timeout,
|
|
||||||
err: Error | string
|
|
||||||
) {
|
|
||||||
// manually shut down, ignore errors
|
|
||||||
if (CLOSED) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
pickClient(selectDb).disconnect()
|
|
||||||
CLOSED = true
|
|
||||||
// always clear this on error
|
|
||||||
clearTimeout(timeout)
|
|
||||||
CONNECTED = false
|
|
||||||
console.error("Redis connection failed - " + err)
|
|
||||||
setTimeout(() => {
|
|
||||||
init()
|
|
||||||
}, RETRY_PERIOD_MS)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise
|
|
||||||
* will return the ioredis client which will be ready to use.
|
|
||||||
*/
|
|
||||||
function init(selectDb = DEFAULT_SELECT_DB) {
|
|
||||||
let timeout: NodeJS.Timeout
|
|
||||||
CLOSED = false
|
|
||||||
let client = pickClient(selectDb)
|
|
||||||
// already connected, ignore
|
|
||||||
if (client && CONNECTED) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// testing uses a single in memory client
|
|
||||||
if (env.isTest()) {
|
|
||||||
CLIENTS[selectDb] = new Redis(getRedisOptions())
|
|
||||||
}
|
|
||||||
// start the timer - only allowed 5 seconds to connect
|
|
||||||
timeout = setTimeout(() => {
|
|
||||||
if (!CONNECTED) {
|
|
||||||
connectionError(
|
|
||||||
selectDb,
|
|
||||||
timeout,
|
|
||||||
"Did not successfully connect in timeout"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}, STARTUP_TIMEOUT_MS)
|
|
||||||
|
|
||||||
// disconnect any lingering client
|
|
||||||
if (client) {
|
|
||||||
client.disconnect()
|
|
||||||
}
|
|
||||||
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
|
|
||||||
|
|
||||||
if (CLUSTERED) {
|
|
||||||
client = new Redis.Cluster([{ host, port }], opts)
|
|
||||||
} else if (redisProtocolUrl) {
|
|
||||||
client = new Redis(redisProtocolUrl)
|
|
||||||
} else {
|
|
||||||
client = new Redis(opts)
|
|
||||||
}
|
|
||||||
// attach handlers
|
|
||||||
client.on("end", (err: Error) => {
|
|
||||||
connectionError(selectDb, timeout, err)
|
|
||||||
})
|
|
||||||
client.on("error", (err: Error) => {
|
|
||||||
connectionError(selectDb, timeout, err)
|
|
||||||
})
|
|
||||||
client.on("connect", () => {
|
|
||||||
clearTimeout(timeout)
|
|
||||||
CONNECTED = true
|
|
||||||
})
|
|
||||||
CLIENTS[selectDb] = client
|
|
||||||
}
|
|
||||||
|
|
||||||
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
|
|
||||||
return new Promise(resolve => {
|
|
||||||
if (pickClient(selectDb) == null) {
|
|
||||||
init()
|
|
||||||
} else if (CONNECTED) {
|
|
||||||
resolve("")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// check if the connection is ready
|
|
||||||
const interval = setInterval(() => {
|
|
||||||
if (CONNECTED) {
|
|
||||||
clearInterval(interval)
|
|
||||||
resolve("")
|
|
||||||
}
|
|
||||||
}, 500)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility function, takes a redis stream and converts it to a promisified response -
|
|
||||||
* this can only be done with redis streams because they will have an end.
|
|
||||||
* @param stream A redis stream, specifically as this type of stream will have an end.
|
|
||||||
* @param client The client to use for further lookups.
|
|
||||||
* @return {Promise<object>} The final output of the stream
|
|
||||||
*/
|
|
||||||
function promisifyStream(stream: any, client: RedisWrapper) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const outputKeys = new Set()
|
|
||||||
stream.on("data", (keys: string[]) => {
|
|
||||||
keys.forEach(key => {
|
|
||||||
outputKeys.add(key)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
stream.on("error", (err: Error) => {
|
|
||||||
reject(err)
|
|
||||||
})
|
|
||||||
stream.on("end", async () => {
|
|
||||||
const keysArray: string[] = Array.from(outputKeys) as string[]
|
|
||||||
try {
|
|
||||||
let getPromises = []
|
|
||||||
for (let key of keysArray) {
|
|
||||||
getPromises.push(client.get(key))
|
|
||||||
}
|
|
||||||
const jsonArray = await Promise.all(getPromises)
|
|
||||||
resolve(
|
|
||||||
keysArray.map(key => ({
|
|
||||||
key: removeDbPrefix(key),
|
|
||||||
value: JSON.parse(jsonArray.shift()),
|
|
||||||
}))
|
|
||||||
)
|
|
||||||
} catch (err) {
|
|
||||||
reject(err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export = class RedisWrapper {
|
|
||||||
_db: string
|
|
||||||
_select: number
|
|
||||||
|
|
||||||
constructor(db: string, selectDb: number | null = null) {
|
|
||||||
this._db = db
|
|
||||||
this._select = selectDb || DEFAULT_SELECT_DB
|
|
||||||
}
|
|
||||||
|
|
||||||
getClient() {
|
|
||||||
return pickClient(this._select)
|
|
||||||
}
|
|
||||||
|
|
||||||
async init() {
|
|
||||||
CLOSED = false
|
|
||||||
init(this._select)
|
|
||||||
await waitForConnection(this._select)
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
async finish() {
|
|
||||||
CLOSED = true
|
|
||||||
this.getClient().disconnect()
|
|
||||||
}
|
|
||||||
|
|
||||||
async scan(key = ""): Promise<any> {
|
|
||||||
const db = this._db
|
|
||||||
key = `${db}${SEPARATOR}${key}`
|
|
||||||
let stream
|
|
||||||
if (CLUSTERED) {
|
|
||||||
let node = this.getClient().nodes("master")
|
|
||||||
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
|
||||||
} else {
|
|
||||||
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
|
||||||
}
|
|
||||||
return promisifyStream(stream, this.getClient())
|
|
||||||
}
|
|
||||||
|
|
||||||
async keys(pattern: string) {
|
|
||||||
const db = this._db
|
|
||||||
return this.getClient().keys(addDbPrefix(db, pattern))
|
|
||||||
}
|
|
||||||
|
|
||||||
async get(key: string) {
|
|
||||||
const db = this._db
|
|
||||||
let response = await this.getClient().get(addDbPrefix(db, key))
|
|
||||||
// overwrite the prefixed key
|
|
||||||
if (response != null && response.key) {
|
|
||||||
response.key = key
|
|
||||||
}
|
|
||||||
// if its not an object just return the response
|
|
||||||
try {
|
|
||||||
return JSON.parse(response)
|
|
||||||
} catch (err) {
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async bulkGet(keys: string[]) {
|
|
||||||
const db = this._db
|
|
||||||
if (keys.length === 0) {
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
|
|
||||||
let response = await this.getClient().mget(prefixedKeys)
|
|
||||||
if (Array.isArray(response)) {
|
|
||||||
let final: any = {}
|
|
||||||
let count = 0
|
|
||||||
for (let result of response) {
|
|
||||||
if (result) {
|
|
||||||
let parsed
|
|
||||||
try {
|
|
||||||
parsed = JSON.parse(result)
|
|
||||||
} catch (err) {
|
|
||||||
parsed = result
|
|
||||||
}
|
|
||||||
final[keys[count]] = parsed
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
return final
|
|
||||||
} else {
|
|
||||||
throw new Error(`Invalid response: ${response}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async store(key: string, value: any, expirySeconds: number | null = null) {
|
|
||||||
const db = this._db
|
|
||||||
if (typeof value === "object") {
|
|
||||||
value = JSON.stringify(value)
|
|
||||||
}
|
|
||||||
const prefixedKey = addDbPrefix(db, key)
|
|
||||||
await this.getClient().set(prefixedKey, value)
|
|
||||||
if (expirySeconds) {
|
|
||||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async getTTL(key: string) {
|
|
||||||
const db = this._db
|
|
||||||
const prefixedKey = addDbPrefix(db, key)
|
|
||||||
return this.getClient().ttl(prefixedKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
async setExpiry(key: string, expirySeconds: number | null) {
|
|
||||||
const db = this._db
|
|
||||||
const prefixedKey = addDbPrefix(db, key)
|
|
||||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(key: string) {
|
|
||||||
const db = this._db
|
|
||||||
await this.getClient().del(addDbPrefix(db, key))
|
|
||||||
}
|
|
||||||
|
|
||||||
async clear() {
|
|
||||||
let items = await this.scan()
|
|
||||||
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,69 +0,0 @@
|
||||||
const Client = require("./index")
|
|
||||||
const utils = require("./utils")
|
|
||||||
|
|
||||||
let userClient,
|
|
||||||
sessionClient,
|
|
||||||
appClient,
|
|
||||||
cacheClient,
|
|
||||||
writethroughClient,
|
|
||||||
lockClient
|
|
||||||
|
|
||||||
async function init() {
|
|
||||||
userClient = await new Client(utils.Databases.USER_CACHE).init()
|
|
||||||
sessionClient = await new Client(utils.Databases.SESSIONS).init()
|
|
||||||
appClient = await new Client(utils.Databases.APP_METADATA).init()
|
|
||||||
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
|
|
||||||
lockClient = await new Client(utils.Databases.LOCKS).init()
|
|
||||||
writethroughClient = await new Client(
|
|
||||||
utils.Databases.WRITE_THROUGH,
|
|
||||||
utils.SelectableDatabases.WRITE_THROUGH
|
|
||||||
).init()
|
|
||||||
}
|
|
||||||
|
|
||||||
process.on("exit", async () => {
|
|
||||||
if (userClient) await userClient.finish()
|
|
||||||
if (sessionClient) await sessionClient.finish()
|
|
||||||
if (appClient) await appClient.finish()
|
|
||||||
if (cacheClient) await cacheClient.finish()
|
|
||||||
if (writethroughClient) await writethroughClient.finish()
|
|
||||||
if (lockClient) await lockClient.finish()
|
|
||||||
})
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getUserClient: async () => {
|
|
||||||
if (!userClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return userClient
|
|
||||||
},
|
|
||||||
getSessionClient: async () => {
|
|
||||||
if (!sessionClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return sessionClient
|
|
||||||
},
|
|
||||||
getAppClient: async () => {
|
|
||||||
if (!appClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return appClient
|
|
||||||
},
|
|
||||||
getCacheClient: async () => {
|
|
||||||
if (!cacheClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return cacheClient
|
|
||||||
},
|
|
||||||
getWritethroughClient: async () => {
|
|
||||||
if (!writethroughClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return writethroughClient
|
|
||||||
},
|
|
||||||
getLockClient: async () => {
|
|
||||||
if (!lockClient) {
|
|
||||||
await init()
|
|
||||||
}
|
|
||||||
return lockClient
|
|
||||||
},
|
|
||||||
}
|
|
|
@ -0,0 +1,72 @@
|
||||||
|
import Client from "./redis"
|
||||||
|
import * as utils from "./utils"
|
||||||
|
|
||||||
|
let userClient: Client,
|
||||||
|
sessionClient: Client,
|
||||||
|
appClient: Client,
|
||||||
|
cacheClient: Client,
|
||||||
|
writethroughClient: Client,
|
||||||
|
lockClient: Client
|
||||||
|
|
||||||
|
async function init() {
|
||||||
|
userClient = await new Client(utils.Databases.USER_CACHE).init()
|
||||||
|
sessionClient = await new Client(utils.Databases.SESSIONS).init()
|
||||||
|
appClient = await new Client(utils.Databases.APP_METADATA).init()
|
||||||
|
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
|
||||||
|
lockClient = await new Client(utils.Databases.LOCKS).init()
|
||||||
|
writethroughClient = await new Client(
|
||||||
|
utils.Databases.WRITE_THROUGH,
|
||||||
|
utils.SelectableDatabase.WRITE_THROUGH
|
||||||
|
).init()
|
||||||
|
}
|
||||||
|
|
||||||
|
process.on("exit", async () => {
|
||||||
|
if (userClient) await userClient.finish()
|
||||||
|
if (sessionClient) await sessionClient.finish()
|
||||||
|
if (appClient) await appClient.finish()
|
||||||
|
if (cacheClient) await cacheClient.finish()
|
||||||
|
if (writethroughClient) await writethroughClient.finish()
|
||||||
|
if (lockClient) await lockClient.finish()
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function getUserClient() {
|
||||||
|
if (!userClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return userClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getSessionClient() {
|
||||||
|
if (!sessionClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return sessionClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getAppClient() {
|
||||||
|
if (!appClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return appClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCacheClient() {
|
||||||
|
if (!cacheClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return cacheClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getWritethroughClient() {
|
||||||
|
if (!writethroughClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return writethroughClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getLockClient() {
|
||||||
|
if (!lockClient) {
|
||||||
|
await init()
|
||||||
|
}
|
||||||
|
return lockClient
|
||||||
|
}
|
|
@ -0,0 +1,279 @@
|
||||||
|
import env from "../environment"
|
||||||
|
// ioredis mock is all in memory
|
||||||
|
const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis")
|
||||||
|
import {
|
||||||
|
addDbPrefix,
|
||||||
|
removeDbPrefix,
|
||||||
|
getRedisOptions,
|
||||||
|
SEPARATOR,
|
||||||
|
SelectableDatabase,
|
||||||
|
} from "./utils"
|
||||||
|
|
||||||
|
const RETRY_PERIOD_MS = 2000
|
||||||
|
const STARTUP_TIMEOUT_MS = 5000
|
||||||
|
const CLUSTERED = false
|
||||||
|
const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
|
||||||
|
|
||||||
|
// for testing just generate the client once
|
||||||
|
let CLOSED = false
|
||||||
|
let CLIENTS: { [key: number]: any } = {}
|
||||||
|
// if in test always connected
|
||||||
|
let CONNECTED = env.isTest()
|
||||||
|
|
||||||
|
function pickClient(selectDb: number): any {
|
||||||
|
return CLIENTS[selectDb]
|
||||||
|
}
|
||||||
|
|
||||||
|
function connectionError(
|
||||||
|
selectDb: number,
|
||||||
|
timeout: NodeJS.Timeout,
|
||||||
|
err: Error | string
|
||||||
|
) {
|
||||||
|
// manually shut down, ignore errors
|
||||||
|
if (CLOSED) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
pickClient(selectDb).disconnect()
|
||||||
|
CLOSED = true
|
||||||
|
// always clear this on error
|
||||||
|
clearTimeout(timeout)
|
||||||
|
CONNECTED = false
|
||||||
|
console.error("Redis connection failed - " + err)
|
||||||
|
setTimeout(() => {
|
||||||
|
init()
|
||||||
|
}, RETRY_PERIOD_MS)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise
|
||||||
|
* will return the ioredis client which will be ready to use.
|
||||||
|
*/
|
||||||
|
function init(selectDb = DEFAULT_SELECT_DB) {
|
||||||
|
let timeout: NodeJS.Timeout
|
||||||
|
CLOSED = false
|
||||||
|
let client = pickClient(selectDb)
|
||||||
|
// already connected, ignore
|
||||||
|
if (client && CONNECTED) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// testing uses a single in memory client
|
||||||
|
if (env.isTest()) {
|
||||||
|
CLIENTS[selectDb] = new Redis(getRedisOptions())
|
||||||
|
}
|
||||||
|
// start the timer - only allowed 5 seconds to connect
|
||||||
|
timeout = setTimeout(() => {
|
||||||
|
if (!CONNECTED) {
|
||||||
|
connectionError(
|
||||||
|
selectDb,
|
||||||
|
timeout,
|
||||||
|
"Did not successfully connect in timeout"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}, STARTUP_TIMEOUT_MS)
|
||||||
|
|
||||||
|
// disconnect any lingering client
|
||||||
|
if (client) {
|
||||||
|
client.disconnect()
|
||||||
|
}
|
||||||
|
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
|
||||||
|
|
||||||
|
if (CLUSTERED) {
|
||||||
|
client = new Redis.Cluster([{ host, port }], opts)
|
||||||
|
} else if (redisProtocolUrl) {
|
||||||
|
client = new Redis(redisProtocolUrl)
|
||||||
|
} else {
|
||||||
|
client = new Redis(opts)
|
||||||
|
}
|
||||||
|
// attach handlers
|
||||||
|
client.on("end", (err: Error) => {
|
||||||
|
connectionError(selectDb, timeout, err)
|
||||||
|
})
|
||||||
|
client.on("error", (err: Error) => {
|
||||||
|
connectionError(selectDb, timeout, err)
|
||||||
|
})
|
||||||
|
client.on("connect", () => {
|
||||||
|
clearTimeout(timeout)
|
||||||
|
CONNECTED = true
|
||||||
|
})
|
||||||
|
CLIENTS[selectDb] = client
|
||||||
|
}
|
||||||
|
|
||||||
|
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
if (pickClient(selectDb) == null) {
|
||||||
|
init()
|
||||||
|
} else if (CONNECTED) {
|
||||||
|
resolve("")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// check if the connection is ready
|
||||||
|
const interval = setInterval(() => {
|
||||||
|
if (CONNECTED) {
|
||||||
|
clearInterval(interval)
|
||||||
|
resolve("")
|
||||||
|
}
|
||||||
|
}, 500)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility function, takes a redis stream and converts it to a promisified response -
|
||||||
|
* this can only be done with redis streams because they will have an end.
|
||||||
|
* @param stream A redis stream, specifically as this type of stream will have an end.
|
||||||
|
* @param client The client to use for further lookups.
|
||||||
|
* @return {Promise<object>} The final output of the stream
|
||||||
|
*/
|
||||||
|
function promisifyStream(stream: any, client: RedisWrapper) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const outputKeys = new Set()
|
||||||
|
stream.on("data", (keys: string[]) => {
|
||||||
|
keys.forEach(key => {
|
||||||
|
outputKeys.add(key)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
stream.on("error", (err: Error) => {
|
||||||
|
reject(err)
|
||||||
|
})
|
||||||
|
stream.on("end", async () => {
|
||||||
|
const keysArray: string[] = Array.from(outputKeys) as string[]
|
||||||
|
try {
|
||||||
|
let getPromises = []
|
||||||
|
for (let key of keysArray) {
|
||||||
|
getPromises.push(client.get(key))
|
||||||
|
}
|
||||||
|
const jsonArray = await Promise.all(getPromises)
|
||||||
|
resolve(
|
||||||
|
keysArray.map(key => ({
|
||||||
|
key: removeDbPrefix(key),
|
||||||
|
value: JSON.parse(jsonArray.shift()),
|
||||||
|
}))
|
||||||
|
)
|
||||||
|
} catch (err) {
|
||||||
|
reject(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
class RedisWrapper {
|
||||||
|
_db: string
|
||||||
|
_select: number
|
||||||
|
|
||||||
|
constructor(db: string, selectDb: number | null = null) {
|
||||||
|
this._db = db
|
||||||
|
this._select = selectDb || DEFAULT_SELECT_DB
|
||||||
|
}
|
||||||
|
|
||||||
|
getClient() {
|
||||||
|
return pickClient(this._select)
|
||||||
|
}
|
||||||
|
|
||||||
|
async init() {
|
||||||
|
CLOSED = false
|
||||||
|
init(this._select)
|
||||||
|
await waitForConnection(this._select)
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
async finish() {
|
||||||
|
CLOSED = true
|
||||||
|
this.getClient().disconnect()
|
||||||
|
}
|
||||||
|
|
||||||
|
async scan(key = ""): Promise<any> {
|
||||||
|
const db = this._db
|
||||||
|
key = `${db}${SEPARATOR}${key}`
|
||||||
|
let stream
|
||||||
|
if (CLUSTERED) {
|
||||||
|
let node = this.getClient().nodes("master")
|
||||||
|
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
||||||
|
} else {
|
||||||
|
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
||||||
|
}
|
||||||
|
return promisifyStream(stream, this.getClient())
|
||||||
|
}
|
||||||
|
|
||||||
|
async keys(pattern: string) {
|
||||||
|
const db = this._db
|
||||||
|
return this.getClient().keys(addDbPrefix(db, pattern))
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(key: string) {
|
||||||
|
const db = this._db
|
||||||
|
let response = await this.getClient().get(addDbPrefix(db, key))
|
||||||
|
// overwrite the prefixed key
|
||||||
|
if (response != null && response.key) {
|
||||||
|
response.key = key
|
||||||
|
}
|
||||||
|
// if its not an object just return the response
|
||||||
|
try {
|
||||||
|
return JSON.parse(response)
|
||||||
|
} catch (err) {
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async bulkGet(keys: string[]) {
|
||||||
|
const db = this._db
|
||||||
|
if (keys.length === 0) {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
|
||||||
|
let response = await this.getClient().mget(prefixedKeys)
|
||||||
|
if (Array.isArray(response)) {
|
||||||
|
let final: any = {}
|
||||||
|
let count = 0
|
||||||
|
for (let result of response) {
|
||||||
|
if (result) {
|
||||||
|
let parsed
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(result)
|
||||||
|
} catch (err) {
|
||||||
|
parsed = result
|
||||||
|
}
|
||||||
|
final[keys[count]] = parsed
|
||||||
|
}
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
return final
|
||||||
|
} else {
|
||||||
|
throw new Error(`Invalid response: ${response}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async store(key: string, value: any, expirySeconds: number | null = null) {
|
||||||
|
const db = this._db
|
||||||
|
if (typeof value === "object") {
|
||||||
|
value = JSON.stringify(value)
|
||||||
|
}
|
||||||
|
const prefixedKey = addDbPrefix(db, key)
|
||||||
|
await this.getClient().set(prefixedKey, value)
|
||||||
|
if (expirySeconds) {
|
||||||
|
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getTTL(key: string) {
|
||||||
|
const db = this._db
|
||||||
|
const prefixedKey = addDbPrefix(db, key)
|
||||||
|
return this.getClient().ttl(prefixedKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
async setExpiry(key: string, expirySeconds: number | null) {
|
||||||
|
const db = this._db
|
||||||
|
const prefixedKey = addDbPrefix(db, key)
|
||||||
|
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(key: string) {
|
||||||
|
const db = this._db
|
||||||
|
await this.getClient().del(addDbPrefix(db, key))
|
||||||
|
}
|
||||||
|
|
||||||
|
async clear() {
|
||||||
|
let items = await this.scan()
|
||||||
|
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export = RedisWrapper
|
|
@ -1,10 +1,10 @@
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
|
|
||||||
const SLOT_REFRESH_MS = 2000
|
const SLOT_REFRESH_MS = 2000
|
||||||
const CONNECT_TIMEOUT_MS = 10000
|
const CONNECT_TIMEOUT_MS = 10000
|
||||||
const SEPARATOR = "-"
|
|
||||||
const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL
|
const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL
|
||||||
const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
|
const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
|
||||||
|
export const SEPARATOR = "-"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* These Redis databases help us to segment up a Redis keyspace by prepending the
|
* These Redis databases help us to segment up a Redis keyspace by prepending the
|
||||||
|
@ -12,23 +12,23 @@ const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
|
||||||
* can be split up a bit; allowing us to use scans on small databases to find some particular
|
* can be split up a bit; allowing us to use scans on small databases to find some particular
|
||||||
* keys within.
|
* keys within.
|
||||||
* If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out
|
* If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out
|
||||||
* of the default keyspace and use a separate one - the SelectableDatabases can be used for this.
|
* of the default keyspace and use a separate one - the SelectableDatabase can be used for this.
|
||||||
*/
|
*/
|
||||||
exports.Databases = {
|
export enum Databases {
|
||||||
PW_RESETS: "pwReset",
|
PW_RESETS = "pwReset",
|
||||||
VERIFICATIONS: "verification",
|
VERIFICATIONS = "verification",
|
||||||
INVITATIONS: "invitation",
|
INVITATIONS = "invitation",
|
||||||
DEV_LOCKS: "devLocks",
|
DEV_LOCKS = "devLocks",
|
||||||
DEBOUNCE: "debounce",
|
DEBOUNCE = "debounce",
|
||||||
SESSIONS: "session",
|
SESSIONS = "session",
|
||||||
USER_CACHE: "users",
|
USER_CACHE = "users",
|
||||||
FLAGS: "flags",
|
FLAGS = "flags",
|
||||||
APP_METADATA: "appMetadata",
|
APP_METADATA = "appMetadata",
|
||||||
QUERY_VARS: "queryVars",
|
QUERY_VARS = "queryVars",
|
||||||
LICENSES: "license",
|
LICENSES = "license",
|
||||||
GENERIC_CACHE: "data_cache",
|
GENERIC_CACHE = "data_cache",
|
||||||
WRITE_THROUGH: "writeThrough",
|
WRITE_THROUGH = "writeThrough",
|
||||||
LOCKS: "locks",
|
LOCKS = "locks",
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -40,30 +40,28 @@ exports.Databases = {
|
||||||
* but if you need to walk through all values in a database periodically then a separate selectable
|
* but if you need to walk through all values in a database periodically then a separate selectable
|
||||||
* keyspace should be used.
|
* keyspace should be used.
|
||||||
*/
|
*/
|
||||||
exports.SelectableDatabases = {
|
export enum SelectableDatabase {
|
||||||
DEFAULT: 0,
|
DEFAULT = 0,
|
||||||
WRITE_THROUGH: 1,
|
WRITE_THROUGH = 1,
|
||||||
UNUSED_1: 2,
|
UNUSED_1 = 2,
|
||||||
UNUSED_2: 3,
|
UNUSED_2 = 3,
|
||||||
UNUSED_3: 4,
|
UNUSED_3 = 4,
|
||||||
UNUSED_4: 5,
|
UNUSED_4 = 5,
|
||||||
UNUSED_5: 6,
|
UNUSED_5 = 6,
|
||||||
UNUSED_6: 7,
|
UNUSED_6 = 7,
|
||||||
UNUSED_7: 8,
|
UNUSED_7 = 8,
|
||||||
UNUSED_8: 9,
|
UNUSED_8 = 9,
|
||||||
UNUSED_9: 10,
|
UNUSED_9 = 10,
|
||||||
UNUSED_10: 11,
|
UNUSED_10 = 11,
|
||||||
UNUSED_11: 12,
|
UNUSED_11 = 12,
|
||||||
UNUSED_12: 13,
|
UNUSED_12 = 13,
|
||||||
UNUSED_13: 14,
|
UNUSED_13 = 14,
|
||||||
UNUSED_14: 15,
|
UNUSED_14 = 15,
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.SEPARATOR = SEPARATOR
|
export function getRedisOptions(clustered = false) {
|
||||||
|
|
||||||
exports.getRedisOptions = (clustered = false) => {
|
|
||||||
let password = REDIS_PASSWORD
|
let password = REDIS_PASSWORD
|
||||||
let url = REDIS_URL.split("//")
|
let url: string[] | string = REDIS_URL.split("//")
|
||||||
// get rid of the protocol
|
// get rid of the protocol
|
||||||
url = url.length > 1 ? url[1] : url[0]
|
url = url.length > 1 ? url[1] : url[0]
|
||||||
// check for a password etc
|
// check for a password etc
|
||||||
|
@ -84,7 +82,7 @@ exports.getRedisOptions = (clustered = false) => {
|
||||||
redisProtocolUrl = REDIS_URL
|
redisProtocolUrl = REDIS_URL
|
||||||
}
|
}
|
||||||
|
|
||||||
const opts = {
|
const opts: any = {
|
||||||
connectTimeout: CONNECT_TIMEOUT_MS,
|
connectTimeout: CONNECT_TIMEOUT_MS,
|
||||||
}
|
}
|
||||||
if (clustered) {
|
if (clustered) {
|
||||||
|
@ -92,7 +90,7 @@ exports.getRedisOptions = (clustered = false) => {
|
||||||
opts.redisOptions.tls = {}
|
opts.redisOptions.tls = {}
|
||||||
opts.redisOptions.password = password
|
opts.redisOptions.password = password
|
||||||
opts.slotsRefreshTimeout = SLOT_REFRESH_MS
|
opts.slotsRefreshTimeout = SLOT_REFRESH_MS
|
||||||
opts.dnsLookup = (address, callback) => callback(null, address)
|
opts.dnsLookup = (address: string, callback: any) => callback(null, address)
|
||||||
} else {
|
} else {
|
||||||
opts.host = host
|
opts.host = host
|
||||||
opts.port = port
|
opts.port = port
|
||||||
|
@ -101,14 +99,14 @@ exports.getRedisOptions = (clustered = false) => {
|
||||||
return { opts, host, port, redisProtocolUrl }
|
return { opts, host, port, redisProtocolUrl }
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.addDbPrefix = (db, key) => {
|
export function addDbPrefix(db: string, key: string) {
|
||||||
if (key.includes(db)) {
|
if (key.includes(db)) {
|
||||||
return key
|
return key
|
||||||
}
|
}
|
||||||
return `${db}${SEPARATOR}${key}`
|
return `${db}${SEPARATOR}${key}`
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.removeDbPrefix = key => {
|
export function removeDbPrefix(key: string) {
|
||||||
let parts = key.split(SEPARATOR)
|
let parts = key.split(SEPARATOR)
|
||||||
if (parts.length >= 2) {
|
if (parts.length >= 2) {
|
||||||
parts.shift()
|
parts.shift()
|
|
@ -1 +0,0 @@
|
||||||
exports.lookupApiKey = async () => {}
|
|
|
@ -1,5 +1,5 @@
|
||||||
const crypto = require("crypto")
|
import crypto from "crypto"
|
||||||
const env = require("../environment")
|
import env from "../environment"
|
||||||
|
|
||||||
const ALGO = "aes-256-ctr"
|
const ALGO = "aes-256-ctr"
|
||||||
const SECRET = env.JWT_SECRET
|
const SECRET = env.JWT_SECRET
|
||||||
|
@ -8,13 +8,13 @@ const ITERATIONS = 10000
|
||||||
const RANDOM_BYTES = 16
|
const RANDOM_BYTES = 16
|
||||||
const STRETCH_LENGTH = 32
|
const STRETCH_LENGTH = 32
|
||||||
|
|
||||||
function stretchString(string, salt) {
|
function stretchString(string: string, salt: Buffer) {
|
||||||
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.encrypt = input => {
|
export function encrypt(input: string) {
|
||||||
const salt = crypto.randomBytes(RANDOM_BYTES)
|
const salt = crypto.randomBytes(RANDOM_BYTES)
|
||||||
const stretched = stretchString(SECRET, salt)
|
const stretched = stretchString(SECRET!, salt)
|
||||||
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
|
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
|
||||||
const base = cipher.update(input)
|
const base = cipher.update(input)
|
||||||
const final = cipher.final()
|
const final = cipher.final()
|
||||||
|
@ -22,10 +22,10 @@ exports.encrypt = input => {
|
||||||
return `${salt.toString("hex")}${SEPARATOR}${encrypted}`
|
return `${salt.toString("hex")}${SEPARATOR}${encrypted}`
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.decrypt = input => {
|
export function decrypt(input: string) {
|
||||||
const [salt, encrypted] = input.split(SEPARATOR)
|
const [salt, encrypted] = input.split(SEPARATOR)
|
||||||
const saltBuffer = Buffer.from(salt, "hex")
|
const saltBuffer = Buffer.from(salt, "hex")
|
||||||
const stretched = stretchString(SECRET, saltBuffer)
|
const stretched = stretchString(SECRET!, saltBuffer)
|
||||||
const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer)
|
const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer)
|
||||||
const base = decipher.update(Buffer.from(encrypted, "hex"))
|
const base = decipher.update(Buffer.from(encrypted, "hex"))
|
||||||
const final = decipher.final()
|
const final = decipher.final()
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue