Updating worker package with the various parts of server that needed moved over.

This commit is contained in:
mike12345567 2021-03-29 17:31:41 +01:00
parent 76f330fc36
commit 9423128369
12 changed files with 159 additions and 105 deletions

View File

@ -21,6 +21,7 @@
"dependencies": { "dependencies": {
"@koa/router": "^8.0.0", "@koa/router": "^8.0.0",
"aws-sdk": "^2.811.0", "aws-sdk": "^2.811.0",
"bcryptjs": "^2.4.3",
"dotenv": "^8.2.0", "dotenv": "^8.2.0",
"got": "^11.8.1", "got": "^11.8.1",
"joi": "^17.2.1", "joi": "^17.2.1",
@ -33,7 +34,7 @@
"koa-static": "^5.0.0", "koa-static": "^5.0.0",
"node-fetch": "^2.6.1", "node-fetch": "^2.6.1",
"pino-pretty": "^4.0.0", "pino-pretty": "^4.0.0",
"pouchdb": "^7.2.2", "pouchdb": "^7.2.1",
"pouchdb-all-dbs": "^1.0.2", "pouchdb-all-dbs": "^1.0.2",
"server-destroy": "^1.0.1" "server-destroy": "^1.0.1"
}, },

View File

@ -0,0 +1,79 @@
const CouchDB = require("../../../db")
const { StaticDatabases, generateUserID, getUserParams } = require("../../../db/utils")
const { hash } = require("./utils")
const { UserStatus } = require("../../../constants")
const USER_DB = StaticDatabases.USER.name
exports.userSave = async ctx => {
const db = new CouchDB(USER_DB)
const { email, password, _id } = ctx.request.body
const hashedPassword = password ? await hash(password) : null
let user = {
...ctx.request.body,
_id: generateUserID(email),
password: hashedPassword,
}, dbUser
// in-case user existed already
if (_id) {
dbUser = await db.get(_id)
}
// add the active status to a user if its not provided
if (user.status == null) {
user.status = UserStatus.ACTIVE
}
try {
const response = await db.post({
password: hashedPassword || dbUser.password,
...user,
})
ctx.body = {
_id: response.id,
_rev: response.rev,
email,
}
} catch (err) {
if (err.status === 409) {
ctx.throw(400, "User exists already")
} else {
ctx.throw(err.status, err)
}
}
}
exports.userDelete = async ctx => {
const db = new CouchDB(USER_DB)
await db.destroy(generateUserID(ctx.params.email))
ctx.body = {
message: `User ${ctx.params.email} deleted.`,
}
}
// called internally by app server user fetch
exports.userFetch = async ctx => {
const db = new CouchDB(USER_DB)
const users = (
await db.allDocs(
getUserParams(null, {
include_docs: true,
})
)
).rows.map(row => row.doc)
// user hashed password shouldn't ever be returned
for (let user of users) {
if (user) {
delete user.password
}
}
ctx.body = users
}
// called internally by app server user find
exports.userFind = async ctx => {
const db = new CouchDB(USER_DB)
const user = await db.get(generateUserID(ctx.params.email))
if (user) {
delete user.password
}
ctx.body = user
}

View File

@ -0,0 +1,13 @@
const bcrypt = require("bcryptjs")
const env = require("../environment")
const SALT_ROUNDS = env.SALT_ROUNDS || 10
exports.hash = async data => {
const salt = await bcrypt.genSalt(SALT_ROUNDS)
return bcrypt.hash(data, salt)
}
exports.compare = async (data, encrypted) => {
return bcrypt.compare(data, encrypted)
}

View File

@ -1,92 +0,0 @@
const env = require("../../environment")
const got = require("got")
const AWS = require("aws-sdk")
const APP_BUCKET = "app-assets"
// this doesn't matter in self host
const REGION = "eu-west-1"
const PUBLIC_READ_POLICY = {
Version: "2012-10-17",
Statement: [
{
Effect: "Allow",
Principal: {
AWS: ["*"],
},
Action: "s3:GetObject",
Resource: [`arn:aws:s3:::${APP_BUCKET}/*`],
},
],
}
async function getCouchSession() {
// fetch session token for the api user
const session = await got.post(`${env.COUCH_DB_URL}/_session`, {
responseType: "json",
credentials: "include",
json: {
username: env.COUCH_DB_USERNAME,
password: env.COUCH_DB_PASSWORD,
},
})
const cookie = session.headers["set-cookie"][0]
// Get the session cookie value only
return cookie.split(";")[0]
}
async function getMinioSession() {
AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
})
// make sure the bucket exists
const objClient = new AWS.S3({
endpoint: env.MINIO_URL,
region: REGION,
s3ForcePathStyle: true, // needed with minio?
params: {
Bucket: APP_BUCKET,
},
})
// make sure the bucket exists
try {
await objClient
.headBucket({
Bucket: APP_BUCKET,
})
.promise()
} catch (err) {
// bucket doesn't exist create it
if (err.statusCode === 404) {
await objClient
.createBucket({
Bucket: APP_BUCKET,
})
.promise()
} else {
throw err
}
}
// always make sure policy is correct
await objClient
.putBucketPolicy({
Bucket: APP_BUCKET,
Policy: JSON.stringify(PUBLIC_READ_POLICY),
})
.promise()
// Ideally want to send back some pre-signed URLs for files that are to be uploaded
return {
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
}
}
exports.deploy = async ctx => {
ctx.body = {
couchDbSession: await getCouchSession(),
bucket: APP_BUCKET,
objectStoreSession: await getMinioSession(),
}
}

View File

@ -0,0 +1,12 @@
const Router = require("@koa/router")
const controller = require("../../controllers/admin")
const authorized = require("../../../middleware/authorized")
const router = Router()
router.post("/api/admin/users", authorized, controller.userSave)
.delete("/api/admin/users/:email", authorized, controller.userDelete)
.get("/api/admin/users", authorized, controller.userFetch)
.get("/api/admin/users/:email", authorized, controller.userFind)
module.exports = router

View File

@ -1,9 +0,0 @@
const Router = require("@koa/router")
const controller = require("../controllers/deploy")
const checkKey = require("../../middleware/check-key")
const router = Router()
router.post("/api/deploy", checkKey, controller.deploy)
module.exports = router

View File

@ -1,4 +1,4 @@
const deployRoutes = require("./deploy") const adminRoutes = require("./admin")
const appRoutes = require("./app") const appRoutes = require("./app")
exports.routes = [deployRoutes, appRoutes] exports.routes = [adminRoutes, appRoutes]

View File

@ -0,0 +1,4 @@
exports.UserStatus = {
ACTIVE: "active",
INACTIVE: "inactive",
}

View File

@ -0,0 +1,32 @@
exports.StaticDatabases = {
USER: {
name: "user-db",
}
}
const DocumentTypes = {
USER: "us"
}
const UNICODE_MAX = "\ufff0"
const SEPARATOR = "_"
/**
* Generates a new user ID based on the passed in email.
* @param {string} email The email which the ID is going to be built up of.
* @returns {string} The new user ID which the user doc can be stored under.
*/
exports.generateUserID = email => {
return `${DocumentTypes.USER}${SEPARATOR}${email}`
}
/**
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
*/
exports.getUserParams = (email = "", otherProps = {}) => {
return {
...otherProps,
startkey: `${DocumentTypes.USER}${SEPARATOR}${email}`,
endkey: `${DocumentTypes.USER}${SEPARATOR}${email}${UNICODE_MAX}`,
}
}

View File

@ -16,6 +16,8 @@ module.exports = {
MINIO_URL: process.env.MINIO_URL, MINIO_URL: process.env.MINIO_URL,
COUCH_DB_URL: process.env.COUCH_DB_URL, COUCH_DB_URL: process.env.COUCH_DB_URL,
LOG_LEVEL: process.env.LOG_LEVEL, LOG_LEVEL: process.env.LOG_LEVEL,
JWT_SECRET: process.env.JWT_SECRET,
SALT_ROUNDS: process.env.SALT_ROUNDS,
/* TODO: to remove - once deployment removed */ /* TODO: to remove - once deployment removed */
SELF_HOST_KEY: process.env.SELF_HOST_KEY, SELF_HOST_KEY: process.env.SELF_HOST_KEY,
COUCH_DB_USERNAME: process.env.COUCH_DB_USERNAME, COUCH_DB_USERNAME: process.env.COUCH_DB_USERNAME,

View File

@ -0,0 +1,7 @@
/**
* Check the user token, used when creating admin resources, like for example
* a global user record.
*/
module.exports = async (ctx, next) => {
next()
}

View File

@ -285,6 +285,11 @@ base64-js@^1.0.2, base64-js@^1.3.1:
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
bcryptjs@^2.4.3:
version "2.4.3"
resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb"
integrity sha1-mrVie5PmBiH/fNrF2pczAn3x0Ms=
binary-extensions@^2.0.0: binary-extensions@^2.0.0:
version "2.2.0" version "2.2.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
@ -1813,7 +1818,7 @@ pouchdb-promise@5.4.3:
dependencies: dependencies:
lie "3.0.4" lie "3.0.4"
pouchdb@^7.2.2: pouchdb@^7.2.1:
version "7.2.2" version "7.2.2"
resolved "https://registry.yarnpkg.com/pouchdb/-/pouchdb-7.2.2.tgz#fcae82862db527e4cf7576ed8549d1384961f364" resolved "https://registry.yarnpkg.com/pouchdb/-/pouchdb-7.2.2.tgz#fcae82862db527e4cf7576ed8549d1384961f364"
integrity sha512-5gf5nw5XH/2H/DJj8b0YkvG9fhA/4Jt6kL0Y8QjtztVjb1y4J19Rg4rG+fUbXu96gsUrlyIvZ3XfM0b4mogGmw== integrity sha512-5gf5nw5XH/2H/DJj8b0YkvG9fhA/4Jt6kL0Y8QjtztVjb1y4J19Rg4rG+fUbXu96gsUrlyIvZ3XfM0b4mogGmw==