Removing more useless deployment code.

This commit is contained in:
mike12345567 2021-05-11 17:53:54 +01:00
parent 4827ee1484
commit 2f5ecf6e5f
4 changed files with 22 additions and 166 deletions

View File

@ -1,6 +1,5 @@
const PouchDB = require("../../../db") const PouchDB = require("../../../db")
const Deployment = require("./Deployment") const Deployment = require("./Deployment")
const deploymentService = require("./selfDeploy")
// the max time we can wait for an invalidation to complete before considering it failed // the max time we can wait for an invalidation to complete before considering it failed
const MAX_PENDING_TIME_MS = 30 * 60000 const MAX_PENDING_TIME_MS = 30 * 60000
const DeploymentStatus = { const DeploymentStatus = {
@ -56,16 +55,8 @@ async function storeLocalDeploymentHistory(deployment) {
} }
async function deployApp(deployment) { async function deployApp(deployment) {
const appId = deployment.getAppId()
try { try {
console.log(`Uploading assets for appID ${appId}..`) // TODO: DB replication was here but wasn't accurate to new system
await deploymentService.deploy(deployment)
// replicate the DB to the main couchDB cluster
console.log("Replicating local PouchDB to CouchDB..")
await deploymentService.replicateDb(deployment)
deployment.setStatus(DeploymentStatus.SUCCESS) deployment.setStatus(DeploymentStatus.SUCCESS)
await storeLocalDeploymentHistory(deployment) await storeLocalDeploymentHistory(deployment)
} catch (err) { } catch (err) {

View File

@ -1,19 +0,0 @@
const { deployToObjectStore, performReplication } = require("./utils")
exports.deploy = async function (deployment) {
const appId = deployment.getAppId()
const verification = deployment.getVerification()
// no metadata, aws has account ID in metadata
const metadata = {}
await deployToObjectStore(appId, verification.bucket, metadata)
}
exports.replicateDb = async function (deployment) {
const appId = deployment.getAppId()
const verification = deployment.getVerification()
return performReplication(
appId,
verification.couchDbSession,
await getCouchUrl()
)
}

View File

@ -1,136 +0,0 @@
const { join } = require("../../../utilities/centralPath")
const fs = require("fs")
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
const fetch = require("node-fetch")
const PouchDB = require("../../../db")
const CouchDB = require("pouchdb")
const { upload } = require("../../../utilities/fileSystem")
const { attachmentsRelativeURL } = require("../../../utilities")
// TODO: everything in this file is to be removed
function walkDir(dirPath, callback) {
for (let filename of fs.readdirSync(dirPath)) {
const filePath = `${dirPath}/${filename}`
const stat = fs.lstatSync(filePath)
if (stat.isFile()) {
callback(filePath)
} else {
walkDir(filePath, callback)
}
}
}
exports.fetchCredentials = async function (url, body) {
const response = await fetch(url, {
method: "POST",
body: JSON.stringify(body),
headers: { "Content-Type": "application/json" },
})
const json = await response.json()
if (json.errors) {
throw new Error(json.errors)
}
if (response.status !== 200) {
throw new Error(
`Error fetching temporary credentials: ${JSON.stringify(json)}`
)
}
return json
}
exports.prepareUpload = async function ({ s3Key, bucket, metadata, file }) {
const response = await upload({
bucket,
metadata,
filename: s3Key,
path: file.path,
type: file.type,
})
// don't store a URL, work this out on the way out as the URL could change
return {
size: file.size,
name: file.name,
url: attachmentsRelativeURL(response.Key),
extension: [...file.name.split(".")].pop(),
key: response.Key,
}
}
exports.deployToObjectStore = async function (appId, bucket, metadata) {
const appAssetsPath = join(budibaseAppsDir(), appId, "public")
let uploads = []
// Upload HTML, CSS and JS for each page of the web app
walkDir(appAssetsPath, function (filePath) {
const filePathParts = filePath.split("/")
const appAssetUpload = exports.prepareUpload({
bucket,
file: {
path: filePath,
name: filePathParts.pop(),
},
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
metadata,
})
uploads.push(appAssetUpload)
})
// Upload file attachments
const db = new PouchDB(appId)
let fileUploads
try {
fileUploads = await db.get("_local/fileuploads")
} catch (err) {
fileUploads = { _id: "_local/fileuploads", uploads: [] }
}
for (let file of fileUploads.uploads) {
if (file.uploaded) continue
const attachmentUpload = exports.prepareUpload({
file,
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
bucket,
metadata,
})
uploads.push(attachmentUpload)
// mark file as uploaded
file.uploaded = true
}
db.put(fileUploads)
try {
return await Promise.all(uploads)
} catch (err) {
console.error("Error uploading budibase app assets to s3", err)
throw err
}
}
exports.performReplication = (appId, session, dbUrl) => {
return new Promise((resolve, reject) => {
const local = new PouchDB(appId)
const remote = new CouchDB(`${dbUrl}/${appId}`, {
fetch: function (url, opts) {
opts.headers.set("Cookie", `${session};`)
return CouchDB.fetch(url, opts)
},
})
const replication = local.sync(remote)
replication.on("complete", () => resolve())
replication.on("error", err => reject(err))
})
}

View File

@ -5,7 +5,6 @@ const { resolve, join } = require("../../../utilities/centralPath")
const fetch = require("node-fetch") const fetch = require("node-fetch")
const uuid = require("uuid") const uuid = require("uuid")
const { ObjectStoreBuckets } = require("../../../constants") const { ObjectStoreBuckets } = require("../../../constants")
const { prepareUpload } = require("../deploy/utils")
const { processString } = require("@budibase/string-templates") const { processString } = require("@budibase/string-templates")
const { budibaseTempDir } = require("../../../utilities/budibaseDir") const { budibaseTempDir } = require("../../../utilities/budibaseDir")
const { getDeployedApps } = require("../../../utilities/workerRequests") const { getDeployedApps } = require("../../../utilities/workerRequests")
@ -17,6 +16,27 @@ const {
} = require("../../../utilities/fileSystem") } = require("../../../utilities/fileSystem")
const env = require("../../../environment") const env = require("../../../environment")
const { objectStoreUrl, clientLibraryPath } = require("../../../utilities") const { objectStoreUrl, clientLibraryPath } = require("../../../utilities")
const { upload } = require("../../../utilities/fileSystem")
const { attachmentsRelativeURL } = require("../../../utilities")
async function prepareUpload({ s3Key, bucket, metadata, file }) {
const response = await upload({
bucket,
metadata,
filename: s3Key,
path: file.path,
type: file.type,
})
// don't store a URL, work this out on the way out as the URL could change
return {
size: file.size,
name: file.name,
url: attachmentsRelativeURL(response.Key),
extension: [...file.name.split(".")].pop(),
key: response.Key,
}
}
async function checkForSelfHostedURL(ctx) { async function checkForSelfHostedURL(ctx) {
// the "appId" component of the URL may actually be a specific self hosted URL // the "appId" component of the URL may actually be a specific self hosted URL