Some work to not contact lambda or use quotas when deploying locally.
This commit is contained in:
parent
5c4fb1da63
commit
4fed10ccdf
|
@ -1,12 +1,7 @@
|
||||||
const fs = require("fs")
|
const AWS = require("aws-sdk")
|
||||||
const { join } = require("../../../utilities/centralPath")
|
|
||||||
const AwsDeploy = require("aws-sdk")
|
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
|
||||||
const PouchDB = require("../../../db")
|
|
||||||
const env = require("../../../environment")
|
const env = require("../../../environment")
|
||||||
const { prepareUpload } = require("./utils")
|
const { deployToObjectStore } = require("./utils")
|
||||||
const { walkDir } = require("../../../utilities")
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Verifies the users API key and
|
* Verifies the users API key and
|
||||||
|
@ -37,7 +32,7 @@ exports.preDeployment = async function(deployment) {
|
||||||
|
|
||||||
// set credentials here, means any time we're verified we're ready to go
|
// set credentials here, means any time we're verified we're ready to go
|
||||||
if (json.credentials) {
|
if (json.credentials) {
|
||||||
AwsDeploy.config.update({
|
AWS.config.update({
|
||||||
accessKeyId: json.credentials.AccessKeyId,
|
accessKeyId: json.credentials.AccessKeyId,
|
||||||
secretAccessKey: json.credentials.SecretAccessKey,
|
secretAccessKey: json.credentials.SecretAccessKey,
|
||||||
sessionToken: json.credentials.SessionToken,
|
sessionToken: json.credentials.SessionToken,
|
||||||
|
@ -80,65 +75,11 @@ exports.postDeployment = async function(deployment) {
|
||||||
exports.deploy = async function(deployment) {
|
exports.deploy = async function(deployment) {
|
||||||
const appId = deployment.getAppId()
|
const appId = deployment.getAppId()
|
||||||
const { bucket, accountId } = deployment.getVerification()
|
const { bucket, accountId } = deployment.getVerification()
|
||||||
const s3 = new AwsDeploy.S3({
|
const metadata = { accountId }
|
||||||
|
const s3Client = new AWS.S3({
|
||||||
params: {
|
params: {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
await deployToObjectStore(appId, s3Client, metadata)
|
||||||
const appAssetsPath = join(budibaseAppsDir(), appId, "public")
|
|
||||||
|
|
||||||
const appPages = fs.readdirSync(appAssetsPath)
|
|
||||||
|
|
||||||
let uploads = []
|
|
||||||
|
|
||||||
for (let page of appPages) {
|
|
||||||
// Upload HTML, CSS and JS for each page of the web app
|
|
||||||
walkDir(join(appAssetsPath, page), function(filePath) {
|
|
||||||
const appAssetUpload = prepareUpload({
|
|
||||||
file: {
|
|
||||||
path: filePath,
|
|
||||||
name: [...filePath.split("/")].pop(),
|
|
||||||
},
|
|
||||||
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
|
|
||||||
s3,
|
|
||||||
metadata: { accountId },
|
|
||||||
})
|
|
||||||
uploads.push(appAssetUpload)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Upload file attachments
|
|
||||||
const db = new PouchDB(appId)
|
|
||||||
let fileUploads
|
|
||||||
try {
|
|
||||||
fileUploads = await db.get("_local/fileuploads")
|
|
||||||
} catch (err) {
|
|
||||||
fileUploads = { _id: "_local/fileuploads", uploads: [] }
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let file of fileUploads.uploads) {
|
|
||||||
if (file.uploaded) continue
|
|
||||||
|
|
||||||
const attachmentUpload = prepareUpload({
|
|
||||||
file,
|
|
||||||
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
|
||||||
s3,
|
|
||||||
metadata: { accountId },
|
|
||||||
})
|
|
||||||
|
|
||||||
uploads.push(attachmentUpload)
|
|
||||||
|
|
||||||
// mark file as uploaded
|
|
||||||
file.uploaded = true
|
|
||||||
}
|
|
||||||
|
|
||||||
db.put(fileUploads)
|
|
||||||
|
|
||||||
try {
|
|
||||||
return await Promise.all(uploads)
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error uploading budibase app assets to s3", err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,31 @@
|
||||||
exports.preDeployment = async function(deployment) {}
|
const env = require("../../../environment")
|
||||||
|
const AWS = require("aws-sdk")
|
||||||
|
const { deployToObjectStore } = require("./utils")
|
||||||
|
|
||||||
exports.postDeployment = async function(deployment) {}
|
const APP_BUCKET = "app-assets"
|
||||||
|
|
||||||
exports.deploy = async function(deployment) {}
|
exports.preDeployment = async function() {
|
||||||
|
AWS.config.update({
|
||||||
|
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||||
|
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.postDeployment = async function() {
|
||||||
|
// we don't actively need to do anything after deployment in self hosting
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.deploy = async function(deployment) {
|
||||||
|
const appId = deployment.getAppId()
|
||||||
|
var objClient = new AWS.S3({
|
||||||
|
endpoint: "http://localhost:9000",
|
||||||
|
s3ForcePathStyle: true, // needed with minio?
|
||||||
|
signatureVersion: "v4",
|
||||||
|
params: {
|
||||||
|
Bucket: APP_BUCKET,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
// no metadata, aws has account ID in metadata
|
||||||
|
const metadata = {}
|
||||||
|
await deployToObjectStore(appId, objClient, metadata)
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
const fs = require("fs")
|
const fs = require("fs")
|
||||||
const sanitize = require("sanitize-s3-objectkey")
|
const sanitize = require("sanitize-s3-objectkey")
|
||||||
|
const { walkDir } = require("../../../utilities")
|
||||||
|
const { join } = require("../../../utilities/centralPath")
|
||||||
|
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
||||||
|
const PouchDB = require("../../../db")
|
||||||
|
|
||||||
const CONTENT_TYPE_MAP = {
|
const CONTENT_TYPE_MAP = {
|
||||||
html: "text/html",
|
html: "text/html",
|
||||||
|
@ -7,11 +11,11 @@ const CONTENT_TYPE_MAP = {
|
||||||
js: "application/javascript",
|
js: "application/javascript",
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.prepareUpload = async function({ s3Key, metadata, s3, file }) {
|
exports.prepareUpload = async function({ s3Key, metadata, client, file }) {
|
||||||
const extension = [...file.name.split(".")].pop()
|
const extension = [...file.name.split(".")].pop()
|
||||||
const fileBytes = fs.readFileSync(file.path)
|
const fileBytes = fs.readFileSync(file.path)
|
||||||
|
|
||||||
const upload = await s3
|
const upload = await client
|
||||||
.upload({
|
.upload({
|
||||||
// windows file paths need to be converted to forward slashes for s3
|
// windows file paths need to be converted to forward slashes for s3
|
||||||
Key: sanitize(s3Key).replace(/\\/g, "/"),
|
Key: sanitize(s3Key).replace(/\\/g, "/"),
|
||||||
|
@ -29,3 +33,61 @@ exports.prepareUpload = async function({ s3Key, metadata, s3, file }) {
|
||||||
key: upload.Key,
|
key: upload.Key,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.deployToObjectStore = async function(appId, objectClient, metadata) {
|
||||||
|
const appAssetsPath = join(budibaseAppsDir(), appId, "public")
|
||||||
|
|
||||||
|
const appPages = fs.readdirSync(appAssetsPath)
|
||||||
|
|
||||||
|
let uploads = []
|
||||||
|
|
||||||
|
for (let page of appPages) {
|
||||||
|
// Upload HTML, CSS and JS for each page of the web app
|
||||||
|
walkDir(join(appAssetsPath, page), function(filePath) {
|
||||||
|
const appAssetUpload = exports.prepareUpload({
|
||||||
|
file: {
|
||||||
|
path: filePath,
|
||||||
|
name: [...filePath.split("/")].pop(),
|
||||||
|
},
|
||||||
|
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
|
||||||
|
client: objectClient,
|
||||||
|
metadata,
|
||||||
|
})
|
||||||
|
uploads.push(appAssetUpload)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload file attachments
|
||||||
|
const db = new PouchDB(appId)
|
||||||
|
let fileUploads
|
||||||
|
try {
|
||||||
|
fileUploads = await db.get("_local/fileuploads")
|
||||||
|
} catch (err) {
|
||||||
|
fileUploads = { _id: "_local/fileuploads", uploads: [] }
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let file of fileUploads.uploads) {
|
||||||
|
if (file.uploaded) continue
|
||||||
|
|
||||||
|
const attachmentUpload = exports.prepareUpload({
|
||||||
|
file,
|
||||||
|
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
||||||
|
client: objectClient,
|
||||||
|
metadata,
|
||||||
|
})
|
||||||
|
|
||||||
|
uploads.push(attachmentUpload)
|
||||||
|
|
||||||
|
// mark file as uploaded
|
||||||
|
file.uploaded = true
|
||||||
|
}
|
||||||
|
|
||||||
|
db.put(fileUploads)
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await Promise.all(uploads)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error uploading budibase app assets to s3", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -36,6 +36,8 @@ module.exports = {
|
||||||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||||
DEPLOYMENT_DB_URL: process.env.DEPLOYMENT_DB_URL,
|
DEPLOYMENT_DB_URL: process.env.DEPLOYMENT_DB_URL,
|
||||||
LOCAL_TEMPLATES: process.env.LOCAL_TEMPLATES,
|
LOCAL_TEMPLATES: process.env.LOCAL_TEMPLATES,
|
||||||
|
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||||
|
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||||
_set(key, value) {
|
_set(key, value) {
|
||||||
process.env[key] = value
|
process.env[key] = value
|
||||||
module.exports[key] = value
|
module.exports[key] = value
|
||||||
|
|
Loading…
Reference in New Issue