Merge branch 'master' of github.com:Budibase/budibase into endpoint-renaming
This commit is contained in:
commit
3f0d205846
|
@ -1,5 +1,5 @@
|
||||||
<script>
|
<script>
|
||||||
import { Button } from "@budibase/bbui"
|
import { Button, Spacer } from "@budibase/bbui"
|
||||||
import { store } from "builderStore"
|
import { store } from "builderStore"
|
||||||
import { notifier } from "builderStore/store/notifications"
|
import { notifier } from "builderStore/store/notifications"
|
||||||
import api from "builderStore/api"
|
import api from "builderStore/api"
|
||||||
|
@ -51,6 +51,7 @@
|
||||||
<Button secondary medium on:click={deployApp}>
|
<Button secondary medium on:click={deployApp}>
|
||||||
Deploy App
|
Deploy App
|
||||||
{#if loading}
|
{#if loading}
|
||||||
|
<Spacer extraSmall />
|
||||||
<Spinner size="10" />
|
<Spinner size="10" />
|
||||||
{/if}
|
{/if}
|
||||||
</Button>
|
</Button>
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
node_modules
|
||||||
|
npm-debug.log
|
||||||
|
Dockerfile
|
||||||
|
.dockerignore
|
||||||
|
.git
|
||||||
|
.gitignore
|
|
@ -55,7 +55,8 @@ exports.authenticate = async ctx => {
|
||||||
}
|
}
|
||||||
// if in cloud add the user api key
|
// if in cloud add the user api key
|
||||||
if (environment.CLOUD) {
|
if (environment.CLOUD) {
|
||||||
payload.apiKey = getAPIKey(ctx.user.appId)
|
const { apiKey } = await getAPIKey(ctx.user.appId)
|
||||||
|
payload.apiKey = apiKey
|
||||||
}
|
}
|
||||||
|
|
||||||
const token = jwt.sign(payload, ctx.config.jwtSecret, {
|
const token = jwt.sign(payload, ctx.config.jwtSecret, {
|
||||||
|
|
|
@ -4,6 +4,7 @@ const AWS = require("aws-sdk")
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
||||||
const PouchDB = require("../../../db")
|
const PouchDB = require("../../../db")
|
||||||
|
const environment = require("../../../environment")
|
||||||
|
|
||||||
async function invalidateCDN(cfDistribution, appId) {
|
async function invalidateCDN(cfDistribution, appId) {
|
||||||
const cf = new AWS.CloudFront({})
|
const cf = new AWS.CloudFront({})
|
||||||
|
@ -22,11 +23,46 @@ async function invalidateCDN(cfDistribution, appId) {
|
||||||
.promise()
|
.promise()
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fetchTemporaryCredentials = async function() {
|
exports.updateDeploymentQuota = async function(quota) {
|
||||||
|
const DEPLOYMENT_SUCCESS_URL =
|
||||||
|
environment.DEPLOYMENT_CREDENTIALS_URL + "deploy/success"
|
||||||
|
|
||||||
|
const response = await fetch(DEPLOYMENT_SUCCESS_URL, {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify({
|
||||||
|
apiKey: process.env.BUDIBASE_API_KEY,
|
||||||
|
quota,
|
||||||
|
}),
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Accept: "application/json",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (response.status !== 200) {
|
||||||
|
throw new Error(`Error updating deployment quota for API Key`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const json = await response.json()
|
||||||
|
|
||||||
|
return json
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies the users API key and
|
||||||
|
* Verifies that the deployment fits within the quota of the user,
|
||||||
|
* @param {String} instanceId - instanceId being deployed
|
||||||
|
* @param {String} appId - appId being deployed
|
||||||
|
* @param {quota} quota - current quota being changed with this application
|
||||||
|
*/
|
||||||
|
exports.verifyDeployment = async function({ instanceId, appId, quota }) {
|
||||||
const response = await fetch(process.env.DEPLOYMENT_CREDENTIALS_URL, {
|
const response = await fetch(process.env.DEPLOYMENT_CREDENTIALS_URL, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
apiKey: process.env.BUDIBASE_API_KEY,
|
apiKey: process.env.BUDIBASE_API_KEY,
|
||||||
|
instanceId,
|
||||||
|
appId,
|
||||||
|
quota,
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -133,30 +169,33 @@ exports.uploadAppAssets = async function({
|
||||||
|
|
||||||
// Upload file attachments
|
// Upload file attachments
|
||||||
const db = new PouchDB(instanceId)
|
const db = new PouchDB(instanceId)
|
||||||
const fileUploads = await db.get("_local/fileuploads")
|
let fileUploads
|
||||||
if (fileUploads) {
|
try {
|
||||||
for (let file of fileUploads.uploads) {
|
fileUploads = await db.get("_local/fileuploads")
|
||||||
if (file.uploaded) continue
|
} catch (err) {
|
||||||
|
fileUploads = { _id: "_local/fileuploads", uploads: [] }
|
||||||
const attachmentUpload = prepareUploadForS3({
|
|
||||||
file,
|
|
||||||
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
|
||||||
s3,
|
|
||||||
metadata: { accountId },
|
|
||||||
})
|
|
||||||
|
|
||||||
uploads.push(attachmentUpload)
|
|
||||||
|
|
||||||
// mark file as uploaded
|
|
||||||
file.uploaded = true
|
|
||||||
}
|
|
||||||
|
|
||||||
db.put(fileUploads)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (let file of fileUploads.uploads) {
|
||||||
|
if (file.uploaded) continue
|
||||||
|
|
||||||
|
const attachmentUpload = prepareUploadForS3({
|
||||||
|
file,
|
||||||
|
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
||||||
|
s3,
|
||||||
|
metadata: { accountId },
|
||||||
|
})
|
||||||
|
|
||||||
|
uploads.push(attachmentUpload)
|
||||||
|
|
||||||
|
// mark file as uploaded
|
||||||
|
file.uploaded = true
|
||||||
|
}
|
||||||
|
|
||||||
|
db.put(fileUploads)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await Promise.all(uploads)
|
await Promise.all(uploads)
|
||||||
// TODO: update dynamoDB with a synopsis of the app deployment for historical purposes
|
|
||||||
await invalidateCDN(cfDistribution, appId)
|
await invalidateCDN(cfDistribution, appId)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error uploading budibase app assets to s3", err)
|
console.error("Error uploading budibase app assets to s3", err)
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
const CouchDB = require("pouchdb")
|
const CouchDB = require("pouchdb")
|
||||||
const PouchDB = require("../../../db")
|
const PouchDB = require("../../../db")
|
||||||
const { uploadAppAssets, fetchTemporaryCredentials } = require("./aws")
|
const {
|
||||||
|
uploadAppAssets,
|
||||||
|
verifyDeployment,
|
||||||
|
updateDeploymentQuota,
|
||||||
|
} = require("./aws")
|
||||||
|
const { DocumentTypes, SEPARATOR, UNICODE_MAX } = require("../../../db/utils")
|
||||||
|
|
||||||
function replicate(local, remote) {
|
function replicate(local, remote) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
|
@ -31,13 +36,49 @@ async function replicateCouch({ instanceId, clientId, credentials }) {
|
||||||
await Promise.all(replications)
|
await Promise.all(replications)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function getCurrentInstanceQuota(instanceId) {
|
||||||
|
const db = new PouchDB(instanceId)
|
||||||
|
|
||||||
|
const rows = await db.allDocs({
|
||||||
|
startkey: DocumentTypes.ROW + SEPARATOR,
|
||||||
|
endkey: DocumentTypes.ROW + SEPARATOR + UNICODE_MAX,
|
||||||
|
})
|
||||||
|
|
||||||
|
const users = await db.allDocs({
|
||||||
|
startkey: DocumentTypes.USER + SEPARATOR,
|
||||||
|
endkey: DocumentTypes.USER + SEPARATOR + UNICODE_MAX,
|
||||||
|
})
|
||||||
|
|
||||||
|
const existingRows = rows.rows.length
|
||||||
|
const existingUsers = users.rows.length
|
||||||
|
|
||||||
|
const designDoc = await db.get("_design/database")
|
||||||
|
|
||||||
|
return {
|
||||||
|
rows: existingRows,
|
||||||
|
users: existingUsers,
|
||||||
|
views: Object.keys(designDoc.views).length,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
exports.deployApp = async function(ctx) {
|
exports.deployApp = async function(ctx) {
|
||||||
try {
|
try {
|
||||||
const clientAppLookupDB = new PouchDB("client_app_lookup")
|
const clientAppLookupDB = new PouchDB("client_app_lookup")
|
||||||
const { clientId } = await clientAppLookupDB.get(ctx.user.appId)
|
const { clientId } = await clientAppLookupDB.get(ctx.user.appId)
|
||||||
|
|
||||||
|
const instanceQuota = await getCurrentInstanceQuota(ctx.user.instanceId)
|
||||||
|
const credentials = await verifyDeployment({
|
||||||
|
instanceId: ctx.user.instanceId,
|
||||||
|
appId: ctx.user.appId,
|
||||||
|
quota: instanceQuota,
|
||||||
|
})
|
||||||
|
|
||||||
ctx.log.info(`Uploading assets for appID ${ctx.user.appId} assets to s3..`)
|
ctx.log.info(`Uploading assets for appID ${ctx.user.appId} assets to s3..`)
|
||||||
const credentials = await fetchTemporaryCredentials()
|
|
||||||
|
if (credentials.errors) {
|
||||||
|
ctx.throw(500, credentials.errors)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
await uploadAppAssets({
|
await uploadAppAssets({
|
||||||
clientId,
|
clientId,
|
||||||
|
@ -54,6 +95,8 @@ exports.deployApp = async function(ctx) {
|
||||||
credentials: credentials.couchDbCreds,
|
credentials: credentials.couchDbCreds,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
await updateDeploymentQuota(credentials.quota)
|
||||||
|
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
status: "SUCCESS",
|
status: "SUCCESS",
|
||||||
completed: Date.now(),
|
completed: Date.now(),
|
||||||
|
|
|
@ -153,7 +153,7 @@ exports.serveApp = async function(ctx) {
|
||||||
|
|
||||||
// only set the appId cookie for /appId .. we COULD check for valid appIds
|
// only set the appId cookie for /appId .. we COULD check for valid appIds
|
||||||
// but would like to avoid that DB hit
|
// but would like to avoid that DB hit
|
||||||
const looksLikeAppId = /^[0-9a-f]{32}$/.test(appId)
|
const looksLikeAppId = /^(app_)?[0-9a-f]{32}$/.test(appId)
|
||||||
if (looksLikeAppId && !ctx.isAuthenticated) {
|
if (looksLikeAppId && !ctx.isAuthenticated) {
|
||||||
const anonUser = {
|
const anonUser = {
|
||||||
userId: "ANON",
|
userId: "ANON",
|
||||||
|
|
|
@ -82,7 +82,7 @@ exports.updateLinks = async function({
|
||||||
* then an array will be output, object input -> object output.
|
* then an array will be output, object input -> object output.
|
||||||
*/
|
*/
|
||||||
exports.attachLinkInfo = async (instanceId, rows) => {
|
exports.attachLinkInfo = async (instanceId, rows) => {
|
||||||
// handle a single record as well as multiple
|
// handle a single row as well as multiple
|
||||||
let wasArray = true
|
let wasArray = true
|
||||||
if (!(rows instanceof Array)) {
|
if (!(rows instanceof Array)) {
|
||||||
rows = [rows]
|
rows = [rows]
|
||||||
|
|
|
@ -15,6 +15,7 @@ const DocumentTypes = {
|
||||||
|
|
||||||
exports.DocumentTypes = DocumentTypes
|
exports.DocumentTypes = DocumentTypes
|
||||||
exports.SEPARATOR = SEPARATOR
|
exports.SEPARATOR = SEPARATOR
|
||||||
|
exports.UNICODE_MAX = UNICODE_MAX
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If creating DB allDocs/query params with only a single top level ID this can be used, this
|
* If creating DB allDocs/query params with only a single top level ID this can be used, this
|
||||||
|
|
|
@ -14,4 +14,5 @@ module.exports = {
|
||||||
CLOUD: process.env.CLOUD,
|
CLOUD: process.env.CLOUD,
|
||||||
DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT,
|
DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT,
|
||||||
AWS_REGION: process.env.AWS_REGION,
|
AWS_REGION: process.env.AWS_REGION,
|
||||||
|
DEPLOYMENT_CREDENTIALS_URL: process.env.DEPLOYMENT_CREDENTIALS_URL,
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,7 +45,7 @@ module.exports = async (ctx, next) => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const jwtPayload = jwt.verify(appToken, ctx.config.jwtSecret)
|
const jwtPayload = jwt.verify(appToken, ctx.config.jwtSecret)
|
||||||
|
ctx.apiKey = jwtPayload.apiKey
|
||||||
ctx.user = {
|
ctx.user = {
|
||||||
...jwtPayload,
|
...jwtPayload,
|
||||||
accessLevel: await getAccessLevel(
|
accessLevel: await getAccessLevel(
|
||||||
|
|
|
@ -22,7 +22,7 @@ function buildUpdateParams(key, property, usage) {
|
||||||
return {
|
return {
|
||||||
primary: key,
|
primary: key,
|
||||||
condition:
|
condition:
|
||||||
"#quota.#prop < #limits.#prop AND #quotaReset > :now AND attribute_exists(#quota) AND attribute_exists(#limits)",
|
"attribute_exists(#quota) AND attribute_exists(#limits) AND #quota.#prop < #limits.#prop AND #quotaReset > :now",
|
||||||
expression: "ADD #quota.#prop :usage",
|
expression: "ADD #quota.#prop :usage",
|
||||||
names: {
|
names: {
|
||||||
"#quota": "usageQuota",
|
"#quota": "usageQuota",
|
||||||
|
@ -69,13 +69,16 @@ exports.update = async (apiKey, property, usage) => {
|
||||||
try {
|
try {
|
||||||
await apiKeyTable.update(buildUpdateParams(apiKey, property, usage))
|
await apiKeyTable.update(buildUpdateParams(apiKey, property, usage))
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
// conditional check means the condition failed, need to check why
|
||||||
if (err.code === "ConditionalCheckFailedException") {
|
if (err.code === "ConditionalCheckFailedException") {
|
||||||
// get the API key so we can check it
|
// get the API key so we can check it
|
||||||
const keyObj = await apiKeyTable.get({ primary: apiKey })
|
const keyObj = await apiKeyTable.get({ primary: apiKey })
|
||||||
// the usage quota or usage limits didn't exist
|
// the usage quota or usage limits didn't exist
|
||||||
if (keyObj && (keyObj.usageQuota == null || keyObj.usageLimits == null)) {
|
if (keyObj && (keyObj.usageQuota == null || keyObj.usageLimits == null)) {
|
||||||
keyObj.usageQuota = DEFAULT_USAGE
|
keyObj.usageQuota =
|
||||||
keyObj.usageLimits = DEFAULT_PLAN
|
keyObj.usageQuota == null ? DEFAULT_USAGE : keyObj.usageQuota
|
||||||
|
keyObj.usageLimits =
|
||||||
|
keyObj.usageLimits == null ? DEFAULT_PLAN : keyObj.usageLimits
|
||||||
keyObj.quotaReset = getNewQuotaReset()
|
keyObj.quotaReset = getNewQuotaReset()
|
||||||
await apiKeyTable.put({ item: keyObj })
|
await apiKeyTable.put({ item: keyObj })
|
||||||
return
|
return
|
||||||
|
@ -94,7 +97,7 @@ exports.update = async (apiKey, property, usage) => {
|
||||||
await apiKeyTable.put({ item: keyObj })
|
await apiKeyTable.put({ item: keyObj })
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
throw "Resource limits have been reached"
|
|
||||||
}
|
}
|
||||||
|
throw err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue