diff --git a/hosting/docker-compose.yaml b/hosting/docker-compose.yaml
index 78408c85e6..86269837c2 100644
--- a/hosting/docker-compose.yaml
+++ b/hosting/docker-compose.yaml
@@ -11,13 +11,18 @@ services:
- "${APP_PORT}:4002"
environment:
SELF_HOSTED: 1
- CLOUD: 1
COUCH_DB_URL: http://${COUCH_DB_USER}:${COUCH_DB_PASSWORD}@couchdb-service:5984
WORKER_URL: http://worker-service:4003
+ MINIO_URL: http://minio-service:9000
+ MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
+ MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
HOSTING_KEY: ${HOSTING_KEY}
BUDIBASE_ENVIRONMENT: ${BUDIBASE_ENVIRONMENT}
PORT: 4002
JWT_SECRET: ${JWT_SECRET}
+ LOG_LEVEL: info
+ SENTRY_DSN: https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
+ ENABLE_ANALYTICS: true
depends_on:
- worker-service
@@ -28,7 +33,7 @@ services:
ports:
- "${WORKER_PORT}:4003"
environment:
- SELF_HOSTED: 1,
+ SELF_HOSTED: 1
PORT: 4003
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
diff --git a/packages/builder/src/components/backend/DatasourceNavigator/popovers/EditDatasourcePopover.svelte b/packages/builder/src/components/backend/DatasourceNavigator/popovers/EditDatasourcePopover.svelte
index 7570cd6c5e..8c2ea880ae 100644
--- a/packages/builder/src/components/backend/DatasourceNavigator/popovers/EditDatasourcePopover.svelte
+++ b/packages/builder/src/components/backend/DatasourceNavigator/popovers/EditDatasourcePopover.svelte
@@ -27,7 +27,7 @@
notifier.success("Datasource deleted")
// navigate to first index page if the source you are deleting is selected
if (wasSelectedSource === datasource._id) {
- $goto('./datasource')
+ $goto("./datasource")
}
hideEditor()
}
diff --git a/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte b/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte
index 60886b5be1..0a186375d8 100644
--- a/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte
+++ b/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte
@@ -37,13 +37,13 @@
}
async function deleteTable() {
- const wasSelectedTable = $backendUiStore.selectedTable
+ const wasSelectedTable = $backendUiStore.selectedTable
await backendUiStore.actions.tables.delete(table)
store.actions.screens.delete(templateScreens)
await backendUiStore.actions.tables.fetch()
notifier.success("Table deleted")
if (wasSelectedTable._id === table._id) {
- $goto('./table')
+ $goto("./table")
}
hideEditor()
}
diff --git a/packages/server/.env.template b/packages/server/.env.template
deleted file mode 100644
index b2ff5be3f4..0000000000
--- a/packages/server/.env.template
+++ /dev/null
@@ -1,17 +0,0 @@
-# url of couch db, including username and password
-# http://admin:password@localhost:5984
-COUCH_DB_URL={{couchDbUrl}}
-
-# identifies a client database - i.e. group of apps
-CLIENT_ID={{clientId}}
-
-# used to create cookie hashes
-JWT_SECRET={{cookieKey1}}
-
-# error level for koa-pino
-LOG_LEVEL=info
-
-DEPLOYMENT_CREDENTIALS_URL="https://dt4mpwwap8.execute-api.eu-west-1.amazonaws.com/prod/"
-DEPLOYMENT_DB_URL="https://couchdb.budi.live:5984"
-SENTRY_DSN=https://a34ae347621946bf8acded18e5b7d4b8@o420233.ingest.sentry.io/5338131
-ENABLE_ANALYTICS="true"
diff --git a/packages/server/.gitignore b/packages/server/.gitignore
index b42fc06f06..22397018be 100644
--- a/packages/server/.gitignore
+++ b/packages/server/.gitignore
@@ -1,7 +1,6 @@
node_modules/
myapps/
.env
-dev.env
/builder/*
!/builder/assets/
!/builder/pickr.min.js
diff --git a/packages/server/Dockerfile b/packages/server/Dockerfile
index d75fe1f5d0..c13022c2d6 100644
--- a/packages/server/Dockerfile
+++ b/packages/server/Dockerfile
@@ -2,7 +2,6 @@ FROM node:12-alpine
WORKDIR /app
-ENV CLOUD=1
ENV PORT=4001
ENV COUCH_DB_URL=https://couchdb.budi.live:5984
ENV BUDIBASE_ENVIRONMENT=PRODUCTION
diff --git a/packages/server/package.json b/packages/server/package.json
index c9150f11c9..36cd215423 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -39,7 +39,7 @@
"dev:stack:up": "node scripts/dev/manage.js up",
"dev:stack:down": "node scripts/dev/manage.js down",
"dev:stack:nuke": "node scripts/dev/manage.js nuke",
- "dev:builder": "npm run dev:stack:up && env-cmd -f dev.env nodemon src/index.js",
+ "dev:builder": "npm run dev:stack:up && nodemon src/index.js",
"electron": "electron src/electron.js",
"build:electron": "electron-builder --dir",
"publish:electron": "electron-builder -mwl --publish always",
@@ -65,7 +65,8 @@
"!src/db/tests/**/*",
"!src/tests/**/*",
"!src/automations/tests/**/*",
- "!src/utilities/fileProcessor.js"
+ "!src/utilities/fileProcessor.js",
+ "!src/utilities/fileSystem/**/*"
],
"coverageReporters": [
"lcov",
@@ -140,7 +141,6 @@
"electron": "10.1.3",
"electron-builder": "^22.9.1",
"electron-builder-notarize": "^1.1.2",
- "env-cmd": "^10.1.0",
"eslint": "^6.8.0",
"jest": "^24.8.0",
"nodemon": "^2.0.4",
diff --git a/packages/server/scripts/dev/manage.js b/packages/server/scripts/dev/manage.js
index ca67bd3420..af4f67d23a 100644
--- a/packages/server/scripts/dev/manage.js
+++ b/packages/server/scripts/dev/manage.js
@@ -1,6 +1,7 @@
#!/usr/bin/env node
const compose = require("docker-compose")
const path = require("path")
+const fs = require("fs")
// This script wraps docker-compose allowing you to manage your dev infrastructure with simple commands.
const CONFIG = {
@@ -15,8 +16,33 @@ const Commands = {
Nuke: "nuke",
}
+async function init() {
+ const envFilePath = path.join(process.cwd(), ".env")
+ if (fs.existsSync(envFilePath)) {
+ return
+ }
+ const envFileJson = {
+ PORT: 4001,
+ MINIO_URL: "http://localhost:10000/",
+ COUCH_DB_URL: "http://budibase:budibase@localhost:10000/db/",
+ WORKER_URL: "http://localhost:4002",
+ JWT_SECRET: "testsecret",
+ MINIO_ACCESS_KEY: "budibase",
+ MINIO_SECRET_KEY: "budibase",
+ COUCH_DB_PASSWORD: "budibase",
+ COUCH_DB_USER: "budibase",
+ SELF_HOSTED: 1,
+ }
+ let envFile = ""
+ Object.keys(envFileJson).forEach(key => {
+ envFile += `${key}=${envFileJson[key]}\n`
+ })
+ fs.writeFileSync(envFilePath, envFile)
+}
+
async function up() {
console.log("Spinning up your budibase dev environment... 🔧✨")
+ await init()
await compose.upAll(CONFIG)
}
diff --git a/packages/server/scripts/exportAppTemplate.js b/packages/server/scripts/exportAppTemplate.js
index e896917d5b..c6d738225c 100755
--- a/packages/server/scripts/exportAppTemplate.js
+++ b/packages/server/scripts/exportAppTemplate.js
@@ -1,6 +1,9 @@
#!/usr/bin/env node
-const { exportTemplateFromApp } = require("../src/utilities/templates")
const yargs = require("yargs")
+const fs = require("fs")
+const { join } = require("path")
+const CouchDB = require("../src/db")
+const { budibaseAppsDir } = require("../src/utilities/budibaseDir")
// Script to export a chosen budibase app into a package
// Usage: ./scripts/exportAppTemplate.js export --name=Funky --appId=appId
@@ -22,18 +25,22 @@ yargs
},
},
async args => {
+ const name = args.name,
+ appId = args.appId
console.log("Exporting app..")
- if (args.name == null || args.appId == null) {
+ if (name == null || appId == null) {
console.error(
"Unable to export without a name and app ID being specified, check help for more info."
)
return
}
- const exportPath = await exportTemplateFromApp({
- templateName: args.name,
- appId: args.appId,
- })
- console.log(`Template ${args.name} exported to ${exportPath}`)
+ const exportPath = join(budibaseAppsDir(), "templates", "app", name, "db")
+ fs.ensureDirSync(exportPath)
+ const writeStream = fs.createWriteStream(join(exportPath, "dump.text"))
+ // perform couch dump
+ const instanceDb = new CouchDB(appId)
+ await instanceDb.dump(writeStream, {})
+ console.log(`Template ${name} exported to ${exportPath}`)
}
)
.help()
diff --git a/packages/server/src/api/controllers/apikeys.js b/packages/server/src/api/controllers/apikeys.js
index 96754f17cc..1c8caba1cb 100644
--- a/packages/server/src/api/controllers/apikeys.js
+++ b/packages/server/src/api/controllers/apikeys.js
@@ -1,56 +1,32 @@
-const fs = require("fs")
-const { join } = require("../../utilities/centralPath")
-const readline = require("readline")
-const { budibaseAppsDir } = require("../../utilities/budibaseDir")
-const env = require("../../environment")
-const ENV_FILE_PATH = "/.env"
+const builderDB = require("../../db/builder")
exports.fetch = async function(ctx) {
- ctx.status = 200
- ctx.body = {
- budibase: env.BUDIBASE_API_KEY,
- userId: env.USERID_API_KEY,
+ try {
+ const mainDoc = await builderDB.getBuilderMainDoc()
+ ctx.body = mainDoc.apiKeys ? mainDoc.apiKeys : {}
+ } catch (err) {
+ /* istanbul ignore next */
+ ctx.throw(400, err)
}
}
exports.update = async function(ctx) {
- const key = `${ctx.params.key.toUpperCase()}_API_KEY`
+ const key = ctx.params.key
const value = ctx.request.body.value
- // set environment variables
- env._set(key, value)
-
- // Write to file
- await updateValues([key, value])
-
- ctx.status = 200
- ctx.message = `Updated ${ctx.params.key} API key succesfully.`
- ctx.body = { [ctx.params.key]: ctx.request.body.value }
-}
-
-async function updateValues([key, value]) {
- let newContent = ""
- let keyExists = false
- let envPath = join(budibaseAppsDir(), ENV_FILE_PATH)
- const readInterface = readline.createInterface({
- input: fs.createReadStream(envPath),
- output: process.stdout,
- console: false,
- })
- readInterface.on("line", function(line) {
- // Mutate lines and change API Key
- if (line.startsWith(key)) {
- line = `${key}=${value}`
- keyExists = true
+ try {
+ const mainDoc = await builderDB.getBuilderMainDoc()
+ if (mainDoc.apiKeys == null) {
+ mainDoc.apiKeys = {}
}
- newContent = `${newContent}\n${line}`
- })
- readInterface.on("close", function() {
- // Write file here
- if (!keyExists) {
- // Add API Key if it doesn't exist in the file at all
- newContent = `${newContent}\n${key}=${value}`
+ mainDoc.apiKeys[key] = value
+ const resp = await builderDB.setBuilderMainDoc(mainDoc)
+ ctx.body = {
+ _id: resp.id,
+ _rev: resp.rev,
}
- fs.writeFileSync(envPath, newContent)
- })
+ } catch (err) {
+ /* istanbul ignore next */
+ ctx.throw(400, err)
+ }
}
diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js
index 89e233ac5c..8f4cfaf24d 100644
--- a/packages/server/src/api/controllers/application.js
+++ b/packages/server/src/api/controllers/application.js
@@ -1,15 +1,14 @@
const CouchDB = require("../../db")
-const compileStaticAssets = require("../../utilities/builder/compileStaticAssets")
const env = require("../../environment")
-const { existsSync } = require("fs-extra")
-const { budibaseAppsDir } = require("../../utilities/budibaseDir")
const setBuilderToken = require("../../utilities/builder/setBuilderToken")
-const fs = require("fs-extra")
-const { join, resolve } = require("../../utilities/centralPath")
const packageJson = require("../../../package.json")
const { createLinkView } = require("../../db/linkedRows")
const { createRoutingView } = require("../../utilities/routing")
-const { getTemplateStream } = require("../../utilities/fileSystem")
+const {
+ getTemplateStream,
+ createApp,
+ deleteApp,
+} = require("../../utilities/fileSystem")
const {
generateAppID,
getLayoutParams,
@@ -20,9 +19,6 @@ const {
BUILTIN_ROLE_IDS,
AccessController,
} = require("../../utilities/security/roles")
-const {
- downloadExtractComponentLibraries,
-} = require("../../utilities/createAppPackage")
const { BASE_LAYOUTS } = require("../../constants/layouts")
const {
createHomeScreen,
@@ -32,11 +28,7 @@ const { cloneDeep } = require("lodash/fp")
const { processObject } = require("@budibase/string-templates")
const { getAllApps } = require("../../utilities")
const { USERS_TABLE_SCHEMA } = require("../../constants")
-const {
- getDeployedApps,
- getHostingInfo,
- HostingTypes,
-} = require("../../utilities/builder/hosting")
+const { getDeployedApps } = require("../../utilities/builder/hosting")
const URL_REGEX_SLASH = /\/|\\/g
@@ -75,8 +67,7 @@ async function getAppUrlIfNotInUse(ctx) {
url = encodeURI(`${ctx.request.body.name}`)
}
url = `/${url.replace(URL_REGEX_SLASH, "")}`.toLowerCase()
- const hostingInfo = await getHostingInfo()
- if (hostingInfo.type === HostingTypes.CLOUD) {
+ if (!env.SELF_HOSTED) {
return url
}
const deployedApps = await getDeployedApps()
@@ -181,10 +172,10 @@ exports.create = async function(ctx) {
const instanceDb = new CouchDB(appId)
await instanceDb.put(newApplication)
- const newAppFolder = await createEmptyAppPackage(ctx, newApplication)
+ await createEmptyAppPackage(ctx, newApplication)
/* istanbul ignore next */
if (env.NODE_ENV !== "jest") {
- await downloadExtractComponentLibraries(newAppFolder)
+ await createApp(appId)
}
await setBuilderToken(ctx, appId, version)
@@ -214,10 +205,9 @@ exports.delete = async function(ctx) {
const app = await db.get(ctx.params.appId)
const result = await db.destroy()
- // remove top level directory
- await fs.rmdir(join(budibaseAppsDir(), ctx.params.appId), {
- recursive: true,
- })
+ if (env.NODE_ENV !== "jest") {
+ await deleteApp(ctx.params.appId)
+ }
ctx.status = 200
ctx.message = `Application ${app.name} deleted successfully.`
@@ -225,17 +215,8 @@ exports.delete = async function(ctx) {
}
const createEmptyAppPackage = async (ctx, app) => {
- const appsFolder = budibaseAppsDir()
- const newAppFolder = resolve(appsFolder, app._id)
-
const db = new CouchDB(app._id)
- if (existsSync(newAppFolder)) {
- ctx.throw(400, "App folder already exists for this application")
- }
-
- fs.mkdirpSync(newAppFolder)
-
let screensAndLayouts = []
for (let layout of BASE_LAYOUTS) {
const cloned = cloneDeep(layout)
@@ -251,6 +232,4 @@ const createEmptyAppPackage = async (ctx, app) => {
screensAndLayouts.push(loginScreen)
await db.bulkDocs(screensAndLayouts)
- await compileStaticAssets(app._id)
- return newAppFolder
}
diff --git a/packages/server/src/api/controllers/auth.js b/packages/server/src/api/controllers/auth.js
index e5c0f9a029..fc486bcb50 100644
--- a/packages/server/src/api/controllers/auth.js
+++ b/packages/server/src/api/controllers/auth.js
@@ -45,9 +45,9 @@ exports.authenticate = async ctx => {
roleId: dbUser.roleId,
version: app.version,
}
- // if in cloud add the user api key, unless self hosted
+ // if in prod add the user api key, unless self hosted
/* istanbul ignore next */
- if (env.CLOUD && !env.SELF_HOSTED) {
+ if (env.isProd() && !env.SELF_HOSTED) {
const { apiKey } = await getAPIKey(ctx.user.appId)
payload.apiKey = apiKey
}
diff --git a/packages/server/src/api/controllers/backup.js b/packages/server/src/api/controllers/backup.js
index a83f96165b..02be10bbec 100644
--- a/packages/server/src/api/controllers/backup.js
+++ b/packages/server/src/api/controllers/backup.js
@@ -1,28 +1,10 @@
-const { performDump } = require("../../utilities/templates")
-const path = require("path")
-const os = require("os")
-const fs = require("fs-extra")
+const { performBackup } = require("../../utilities/fileSystem")
exports.exportAppDump = async function(ctx) {
const { appId } = ctx.query
-
const appname = decodeURI(ctx.query.appname)
-
- const backupsDir = path.join(os.homedir(), ".budibase", "backups")
- fs.ensureDirSync(backupsDir)
-
const backupIdentifier = `${appname}Backup${new Date().getTime()}.txt`
- await performDump({
- dir: backupsDir,
- appId,
- name: backupIdentifier,
- })
-
- ctx.status = 200
-
- const backupFile = path.join(backupsDir, backupIdentifier)
-
ctx.attachment(backupIdentifier)
- ctx.body = fs.createReadStream(backupFile)
+ ctx.body = await performBackup(appId, backupIdentifier)
}
diff --git a/packages/server/src/api/controllers/component.js b/packages/server/src/api/controllers/component.js
index 092d154817..35a91fe4eb 100644
--- a/packages/server/src/api/controllers/component.js
+++ b/packages/server/src/api/controllers/component.js
@@ -1,44 +1,37 @@
const CouchDB = require("../../db")
-const { resolve, join } = require("../../utilities/centralPath")
-const {
- budibaseTempDir,
- budibaseAppsDir,
-} = require("../../utilities/budibaseDir")
+const { join } = require("../../utilities/centralPath")
+const { budibaseTempDir } = require("../../utilities/budibaseDir")
+const fileSystem = require("../../utilities/fileSystem")
+const env = require("../../environment")
exports.fetchAppComponentDefinitions = async function(ctx) {
const appId = ctx.params.appId || ctx.appId
const db = new CouchDB(appId)
const app = await db.get(appId)
- ctx.body = app.componentLibraries.reduce((acc, componentLibrary) => {
- let appDirectory = resolve(budibaseAppsDir(), appId, "node_modules")
-
- if (ctx.isDev) {
- appDirectory = budibaseTempDir()
- }
-
- const componentJson = require(join(
- appDirectory,
- componentLibrary,
- ctx.isDev ? "" : "package",
- "manifest.json"
- ))
-
- const result = {}
-
- // map over the components.json and add the library identifier as a key
- // button -> @budibase/standard-components/button
- for (let key of Object.keys(componentJson)) {
- const fullComponentName = `${componentLibrary}/${key}`.toLowerCase()
- result[fullComponentName] = {
+ let componentManifests = await Promise.all(
+ app.componentLibraries.map(async library => {
+ let manifest
+ if (env.isDev()) {
+ manifest = require(join(budibaseTempDir(), library, "manifest.json"))
+ } else {
+ manifest = await fileSystem.getComponentLibraryManifest(appId, library)
+ }
+ return {
+ manifest,
+ library,
+ }
+ })
+ )
+ const definitions = {}
+ for (let { manifest, library } of componentManifests) {
+ for (let key of Object.keys(manifest)) {
+ const fullComponentName = `${library}/${key}`.toLowerCase()
+ definitions[fullComponentName] = {
component: fullComponentName,
- ...componentJson[key],
+ ...manifest[key],
}
}
-
- return {
- ...acc,
- ...result,
- }
- }, {})
+ }
+ ctx.body = definitions
}
diff --git a/packages/server/src/api/controllers/deploy/awsDeploy.js b/packages/server/src/api/controllers/deploy/awsDeploy.js
index 18c9279515..2d34bc1b04 100644
--- a/packages/server/src/api/controllers/deploy/awsDeploy.js
+++ b/packages/server/src/api/controllers/deploy/awsDeploy.js
@@ -66,12 +66,7 @@ exports.deploy = async function(deployment) {
const appId = deployment.getAppId()
const { bucket, accountId } = deployment.getVerification()
const metadata = { accountId }
- const s3Client = new AWS.S3({
- params: {
- Bucket: bucket,
- },
- })
- await deployToObjectStore(appId, s3Client, metadata)
+ await deployToObjectStore(appId, bucket, metadata)
}
exports.replicateDb = async function(deployment) {
diff --git a/packages/server/src/api/controllers/deploy/selfDeploy.js b/packages/server/src/api/controllers/deploy/selfDeploy.js
index 81fa72cae5..444e7cd873 100644
--- a/packages/server/src/api/controllers/deploy/selfDeploy.js
+++ b/packages/server/src/api/controllers/deploy/selfDeploy.js
@@ -7,7 +7,6 @@ const {
const {
getWorkerUrl,
getCouchUrl,
- getMinioUrl,
getSelfHostKey,
} = require("../../../utilities/builder/hosting")
@@ -45,17 +44,9 @@ exports.postDeployment = async function() {
exports.deploy = async function(deployment) {
const appId = deployment.getAppId()
const verification = deployment.getVerification()
- const objClient = new AWS.S3({
- endpoint: await getMinioUrl(),
- s3ForcePathStyle: true, // needed with minio?
- signatureVersion: "v4",
- params: {
- Bucket: verification.bucket,
- },
- })
// no metadata, aws has account ID in metadata
const metadata = {}
- await deployToObjectStore(appId, objClient, metadata)
+ await deployToObjectStore(appId, verification.bucket, metadata)
}
exports.replicateDb = async function(deployment) {
diff --git a/packages/server/src/api/controllers/deploy/utils.js b/packages/server/src/api/controllers/deploy/utils.js
index 3536a6f630..de608acfb1 100644
--- a/packages/server/src/api/controllers/deploy/utils.js
+++ b/packages/server/src/api/controllers/deploy/utils.js
@@ -1,16 +1,24 @@
-const fs = require("fs")
-const sanitize = require("sanitize-s3-objectkey")
-const { walkDir } = require("../../../utilities")
const { join } = require("../../../utilities/centralPath")
+const fs = require("fs")
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
const fetch = require("node-fetch")
const PouchDB = require("../../../db")
const CouchDB = require("pouchdb")
+const { upload } = require("../../../utilities/fileSystem")
-const CONTENT_TYPE_MAP = {
- html: "text/html",
- css: "text/css",
- js: "application/javascript",
+// TODO: everything in this file is to be removed
+
+function walkDir(dirPath, callback) {
+ for (let filename of fs.readdirSync(dirPath)) {
+ const filePath = `${dirPath}/${filename}`
+ const stat = fs.lstatSync(filePath)
+
+ if (stat.isFile()) {
+ callback(filePath)
+ } else {
+ walkDir(filePath, callback)
+ }
+ }
}
exports.fetchCredentials = async function(url, body) {
@@ -34,30 +42,25 @@ exports.fetchCredentials = async function(url, body) {
return json
}
-exports.prepareUpload = async function({ s3Key, metadata, client, file }) {
- const extension = [...file.name.split(".")].pop()
- const fileBytes = fs.readFileSync(file.path)
-
- const upload = await client
- .upload({
- // windows file paths need to be converted to forward slashes for s3
- Key: sanitize(s3Key).replace(/\\/g, "/"),
- Body: fileBytes,
- ContentType: file.type || CONTENT_TYPE_MAP[extension.toLowerCase()],
- Metadata: metadata,
- })
- .promise()
+exports.prepareUpload = async function({ s3Key, bucket, metadata, file }) {
+ const response = await upload({
+ bucket,
+ metadata,
+ filename: s3Key,
+ path: file.path,
+ type: file.type,
+ })
return {
size: file.size,
name: file.name,
- extension,
- url: upload.Location,
- key: upload.Key,
+ extension: [...file.name.split(".")].pop(),
+ url: response.Location,
+ key: response.Key,
}
}
-exports.deployToObjectStore = async function(appId, objectClient, metadata) {
+exports.deployToObjectStore = async function(appId, bucket, metadata) {
const appAssetsPath = join(budibaseAppsDir(), appId, "public")
let uploads = []
@@ -66,12 +69,12 @@ exports.deployToObjectStore = async function(appId, objectClient, metadata) {
walkDir(appAssetsPath, function(filePath) {
const filePathParts = filePath.split("/")
const appAssetUpload = exports.prepareUpload({
+ bucket,
file: {
path: filePath,
name: filePathParts.pop(),
},
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
- client: objectClient,
metadata,
})
uploads.push(appAssetUpload)
@@ -92,7 +95,7 @@ exports.deployToObjectStore = async function(appId, objectClient, metadata) {
const attachmentUpload = exports.prepareUpload({
file,
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
- client: objectClient,
+ bucket,
metadata,
})
diff --git a/packages/server/src/api/controllers/hosting.js b/packages/server/src/api/controllers/hosting.js
index 1d1884eb52..4b070cf75b 100644
--- a/packages/server/src/api/controllers/hosting.js
+++ b/packages/server/src/api/controllers/hosting.js
@@ -1,11 +1,11 @@
const CouchDB = require("../../db")
-const { BUILDER_CONFIG_DB, HOSTING_DOC } = require("../../constants")
const {
getHostingInfo,
getDeployedApps,
HostingTypes,
getAppUrl,
} = require("../../utilities/builder/hosting")
+const { StaticDatabases } = require("../../db/utils")
exports.fetchInfo = async ctx => {
ctx.body = {
@@ -14,17 +14,17 @@ exports.fetchInfo = async ctx => {
}
exports.save = async ctx => {
- const db = new CouchDB(BUILDER_CONFIG_DB)
+ const db = new CouchDB(StaticDatabases.BUILDER_HOSTING.name)
const { type } = ctx.request.body
if (type === HostingTypes.CLOUD && ctx.request.body._rev) {
ctx.body = await db.remove({
...ctx.request.body,
- _id: HOSTING_DOC,
+ _id: StaticDatabases.BUILDER_HOSTING.baseDoc,
})
} else {
ctx.body = await db.put({
...ctx.request.body,
- _id: HOSTING_DOC,
+ _id: StaticDatabases.BUILDER_HOSTING.baseDoc,
})
}
}
diff --git a/packages/server/src/api/controllers/query.js b/packages/server/src/api/controllers/query.js
index a2badb0d0d..b9b7c85427 100644
--- a/packages/server/src/api/controllers/query.js
+++ b/packages/server/src/api/controllers/query.js
@@ -93,7 +93,7 @@ exports.find = async function(ctx) {
const db = new CouchDB(ctx.user.appId)
const query = enrichQueries(await db.get(ctx.params.queryId))
// remove properties that could be dangerous in real app
- if (env.CLOUD) {
+ if (env.isProd()) {
delete query.fields
delete query.parameters
delete query.schema
diff --git a/packages/server/src/api/controllers/static/index.js b/packages/server/src/api/controllers/static/index.js
index 24cee67146..1da98d1940 100644
--- a/packages/server/src/api/controllers/static/index.js
+++ b/packages/server/src/api/controllers/static/index.js
@@ -3,9 +3,7 @@ require("svelte/register")
const send = require("koa-send")
const { resolve, join } = require("../../../utilities/centralPath")
const fetch = require("node-fetch")
-const fs = require("fs-extra")
const uuid = require("uuid")
-const AWS = require("aws-sdk")
const { prepareUpload } = require("../deploy/utils")
const { processString } = require("@budibase/string-templates")
const {
@@ -15,9 +13,10 @@ const {
const { getDeployedApps } = require("../../../utilities/builder/hosting")
const CouchDB = require("../../../db")
const setBuilderToken = require("../../../utilities/builder/setBuilderToken")
-const fileProcessor = require("../../../utilities/fileProcessor")
+const { loadHandlebarsFile } = require("../../../utilities/fileSystem")
const env = require("../../../environment")
const { OBJ_STORE_DIRECTORY } = require("../../../constants")
+const fileProcessor = require("../../../utilities/fileSystem/processor")
function objectStoreUrl() {
if (env.SELF_HOSTED) {
@@ -51,94 +50,30 @@ exports.serveBuilder = async function(ctx) {
}
exports.uploadFile = async function(ctx) {
- let files
- files =
+ let files =
ctx.request.files.file.length > 1
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
- const attachmentsPath = resolve(
- budibaseAppsDir(),
- ctx.user.appId,
- "attachments"
- )
-
- if (env.CLOUD) {
- // remote upload
- const s3 = new AWS.S3({
- params: {
- Bucket: "prod-budi-app-assets",
- },
- })
-
- const uploads = files.map(file => {
- const fileExtension = [...file.name.split(".")].pop()
- const processedFileName = `${uuid.v4()}.${fileExtension}`
-
- return prepareUpload({
- file,
- s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
- s3,
- })
- })
-
- ctx.body = await Promise.all(uploads)
- return
- }
-
- ctx.body = await processLocalFileUploads({
- files,
- outputPath: attachmentsPath,
- appId: ctx.user.appId,
- })
-}
-
-async function processLocalFileUploads({ files, outputPath, appId }) {
- // create attachments dir if it doesnt exist
- !fs.existsSync(outputPath) && fs.mkdirSync(outputPath, { recursive: true })
-
- const filesToProcess = files.map(file => {
+ const uploads = files.map(async file => {
const fileExtension = [...file.name.split(".")].pop()
// filenames converted to UUIDs so they are unique
const processedFileName = `${uuid.v4()}.${fileExtension}`
- return {
- name: file.name,
- path: file.path,
- size: file.size,
- type: file.type,
- processedFileName,
+ // need to handle image processing
+ await fileProcessor.process({
+ ...file,
extension: fileExtension,
- outputPath: join(outputPath, processedFileName),
- url: join("/attachments", processedFileName),
- }
+ })
+
+ return prepareUpload({
+ file,
+ s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
+ bucket: "prod-budi-app-assets",
+ })
})
- const fileProcessOperations = filesToProcess.map(fileProcessor.process)
-
- const processedFiles = await Promise.all(fileProcessOperations)
-
- let pendingFileUploads
- // local document used to track which files need to be uploaded
- // db.get throws an error if the document doesn't exist
- // need to use a promise to default
- const db = new CouchDB(appId)
- await db
- .get("_local/fileuploads")
- .then(data => {
- pendingFileUploads = data
- })
- .catch(() => {
- pendingFileUploads = { _id: "_local/fileuploads", uploads: [] }
- })
-
- pendingFileUploads.uploads = [
- ...processedFiles,
- ...pendingFileUploads.uploads,
- ]
- await db.put(pendingFileUploads)
-
- return processedFiles
+ ctx.body = await Promise.all(uploads)
}
exports.serveApp = async function(ctx) {
@@ -152,12 +87,12 @@ exports.serveApp = async function(ctx) {
const { head, html, css } = App.render({
title: appInfo.name,
- production: env.CLOUD,
+ production: env.isProd(),
appId,
objectStoreUrl: objectStoreUrl(),
})
- const appHbs = fs.readFileSync(`${__dirname}/templates/app.hbs`, "utf8")
+ const appHbs = loadHandlebarsFile(`${__dirname}/templates/app.hbs`)
ctx.body = await processString(appHbs, {
head,
body: html,
@@ -171,7 +106,7 @@ exports.serveAttachment = async function(ctx) {
const attachmentsPath = resolve(budibaseAppsDir(), appId, "attachments")
// Serve from object store
- if (env.CLOUD) {
+ if (env.isProd()) {
const S3_URL = join(objectStoreUrl(), appId, "attachments", ctx.file)
const response = await fetch(S3_URL)
const body = await response.text()
@@ -202,15 +137,13 @@ exports.serveComponentLibrary = async function(ctx) {
"dist"
)
- if (ctx.isDev) {
+ if (env.isDev()) {
componentLibraryPath = join(
budibaseTempDir(),
decodeURI(ctx.query.library),
"dist"
)
- }
-
- if (env.CLOUD) {
+ } else {
let componentLib = "componentlibrary"
if (ctx.user.version) {
componentLib += `-${ctx.user.version}`
diff --git a/packages/server/src/api/controllers/templates.js b/packages/server/src/api/controllers/templates.js
index c3cfa28706..4d55bc5957 100644
--- a/packages/server/src/api/controllers/templates.js
+++ b/packages/server/src/api/controllers/templates.js
@@ -1,10 +1,5 @@
const fetch = require("node-fetch")
-const {
- downloadTemplate,
- exportTemplateFromApp,
- getLocalTemplates,
-} = require("../../utilities/templates")
-const env = require("../../environment")
+const { downloadTemplate } = require("../../utilities/fileSystem")
// development flag, can be used to test against templates exported locally
const DEFAULT_TEMPLATES_BUCKET =
@@ -12,16 +7,11 @@ const DEFAULT_TEMPLATES_BUCKET =
exports.fetch = async function(ctx) {
const { type = "app" } = ctx.query
-
- if (env.LOCAL_TEMPLATES) {
- ctx.body = Object.values(getLocalTemplates()[type])
- } else {
- const response = await fetch(
- `https://${DEFAULT_TEMPLATES_BUCKET}/manifest.json`
- )
- const json = await response.json()
- ctx.body = Object.values(json.templates[type])
- }
+ const response = await fetch(
+ `https://${DEFAULT_TEMPLATES_BUCKET}/manifest.json`
+ )
+ const json = await response.json()
+ ctx.body = Object.values(json.templates[type])
}
// can't currently test this, have to ignore from coverage
@@ -29,26 +19,9 @@ exports.fetch = async function(ctx) {
exports.downloadTemplate = async function(ctx) {
const { type, name } = ctx.params
- if (!env.LOCAL_TEMPLATES) {
- await downloadTemplate(type, name)
- }
+ await downloadTemplate(type, name)
ctx.body = {
message: `template ${type}:${name} downloaded successfully.`,
}
}
-
-exports.exportTemplateFromApp = async function(ctx) {
- const { appId } = ctx.user
- const { templateName } = ctx.request.body
-
- await exportTemplateFromApp({
- appId,
- templateName,
- })
-
- ctx.status = 200
- ctx.body = {
- message: `Created template: ${templateName}`,
- }
-}
diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js
index f482f3f2a6..0f6f008a1b 100644
--- a/packages/server/src/api/controllers/view/index.js
+++ b/packages/server/src/api/controllers/view/index.js
@@ -1,8 +1,6 @@
const CouchDB = require("../../../db")
const viewTemplate = require("./viewBuilder")
-const fs = require("fs")
-const { join } = require("../../../utilities/centralPath")
-const os = require("os")
+const { apiFileReturn } = require("../../../utilities/fileSystem")
const exporters = require("./exporters")
const { fetchView } = require("../row")
const { ViewNames } = require("../../../db/utils")
@@ -120,12 +118,10 @@ const controller = {
// Export part
let headers = Object.keys(schema)
const exporter = exporters[format]
- const exportedFile = exporter(headers, ctx.body)
const filename = `${viewName}.${format}`
- fs.writeFileSync(join(os.tmpdir(), filename), exportedFile)
-
+ // send down the file
ctx.attachment(filename)
- ctx.body = fs.createReadStream(join(os.tmpdir(), filename))
+ ctx.body = apiFileReturn(exporter(headers, ctx.body))
},
}
diff --git a/packages/server/src/api/index.js b/packages/server/src/api/index.js
index 7628fa2077..9315c2aaf0 100644
--- a/packages/server/src/api/index.js
+++ b/packages/server/src/api/index.js
@@ -3,7 +3,6 @@ const authenticated = require("../middleware/authenticated")
const compress = require("koa-compress")
const zlib = require("zlib")
const { budibaseAppsDir } = require("../utilities/budibaseDir")
-const { isDev } = require("../utilities")
const { mainRoutes, authRoutes, staticRoutes } = require("./routes")
const pkg = require("../../package.json")
@@ -29,7 +28,6 @@ router
jwtSecret: env.JWT_SECRET,
useAppRootPath: true,
}
- ctx.isDev = isDev()
await next()
})
.use("/health", ctx => (ctx.status = 200))
@@ -68,8 +66,6 @@ for (let route of mainRoutes) {
router.use(staticRoutes.routes())
router.use(staticRoutes.allowedMethods())
-if (!env.SELF_HOSTED && !env.CLOUD) {
- router.redirect("/", "/_builder")
-}
+router.redirect("/", "/_builder")
module.exports = router
diff --git a/packages/server/src/api/routes/static.js b/packages/server/src/api/routes/static.js
index 5088002df3..d5d57f6d76 100644
--- a/packages/server/src/api/routes/static.js
+++ b/packages/server/src/api/routes/static.js
@@ -4,6 +4,7 @@ const { budibaseTempDir } = require("../../utilities/budibaseDir")
const authorized = require("../../middleware/authorized")
const { BUILDER } = require("../../utilities/security/permissions")
const usage = require("../../middleware/usageQuota")
+const env = require("../../environment")
const router = Router()
@@ -12,7 +13,7 @@ router.param("file", async (file, ctx, next) => {
ctx.file = file && file.includes(".") ? file : "index.html"
// Serving the client library from your local dir in dev
- if (ctx.isDev && ctx.file.startsWith("budibase-client")) {
+ if (env.isDev() && ctx.file.startsWith("budibase-client")) {
ctx.devPath = budibaseTempDir()
}
diff --git a/packages/server/src/api/routes/templates.js b/packages/server/src/api/routes/templates.js
index 05882a22ea..6a427e8383 100644
--- a/packages/server/src/api/routes/templates.js
+++ b/packages/server/src/api/routes/templates.js
@@ -12,6 +12,5 @@ router
authorized(BUILDER),
controller.downloadTemplate
)
- .post("/api/templates", authorized(BUILDER), controller.exportTemplateFromApp)
module.exports = router
diff --git a/packages/server/src/api/routes/tests/apikeys.spec.js b/packages/server/src/api/routes/tests/apikeys.spec.js
index dbee57c8b0..24402a8794 100644
--- a/packages/server/src/api/routes/tests/apikeys.spec.js
+++ b/packages/server/src/api/routes/tests/apikeys.spec.js
@@ -1,8 +1,5 @@
const setup = require("./utilities")
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
-const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
-const fs = require("fs")
-const path = require("path")
describe("/api/keys", () => {
let request = setup.getRequest()
@@ -16,12 +13,14 @@ describe("/api/keys", () => {
describe("fetch", () => {
it("should allow fetching", async () => {
- const res = await request
- .get(`/api/keys`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body).toBeDefined()
+ await setup.switchToSelfHosted(async () => {
+ const res = await request
+ .get(`/api/keys`)
+ .set(config.defaultHeaders())
+ .expect("Content-Type", /json/)
+ .expect(200)
+ expect(res.body).toBeDefined()
+ })
})
it("should check authorization for builder", async () => {
@@ -35,17 +34,18 @@ describe("/api/keys", () => {
describe("update", () => {
it("should allow updating a value", async () => {
- fs.writeFileSync(path.join(budibaseAppsDir(), ".env"), "TEST_API_KEY=thing")
- const res = await request
- .put(`/api/keys/TEST`)
- .send({
- value: "test"
- })
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body["TEST"]).toEqual("test")
- expect(process.env.TEST_API_KEY).toEqual("test")
+ await setup.switchToSelfHosted(async () => {
+ const res = await request
+ .put(`/api/keys/TEST`)
+ .send({
+ value: "test"
+ })
+ .set(config.defaultHeaders())
+ .expect("Content-Type", /json/)
+ .expect(200)
+ expect(res.body._id).toBeDefined()
+ expect(res.body._rev).toBeDefined()
+ })
})
it("should check authorization for builder", async () => {
diff --git a/packages/server/src/api/routes/tests/auth.spec.js b/packages/server/src/api/routes/tests/auth.spec.js
index 0eb0b6d851..13695d596d 100644
--- a/packages/server/src/api/routes/tests/auth.spec.js
+++ b/packages/server/src/api/routes/tests/auth.spec.js
@@ -1,4 +1,3 @@
-const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const setup = require("./utilities")
describe("/authenticate", () => {
diff --git a/packages/server/src/api/routes/tests/backup.spec.js b/packages/server/src/api/routes/tests/backup.spec.js
index d603990294..4e586bfd08 100644
--- a/packages/server/src/api/routes/tests/backup.spec.js
+++ b/packages/server/src/api/routes/tests/backup.spec.js
@@ -1,6 +1,8 @@
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const setup = require("./utilities")
+jest.mock("../../../utilities/fileSystem/utilities")
+
describe("/backups", () => {
let request = setup.getRequest()
let config = setup.getConfig()
@@ -14,7 +16,7 @@ describe("/backups", () => {
describe("exportAppDump", () => {
it("should be able to export app", async () => {
const res = await request
- .get(`/api/backups/export?appId=${config.getAppId()}`)
+ .get(`/api/backups/export?appId=${config.getAppId()}&appname=test`)
.set(config.defaultHeaders())
.expect(200)
expect(res.text).toBeDefined()
diff --git a/packages/server/src/api/routes/tests/cloud.spec.js b/packages/server/src/api/routes/tests/cloud.spec.js
deleted file mode 100644
index 3cb65ed819..0000000000
--- a/packages/server/src/api/routes/tests/cloud.spec.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const setup = require("./utilities")
-
-describe("test things in the Cloud/Self hosted", () => {
- describe("test self hosted static page", () => {
- it("should be able to load the static page", async () => {
- await setup.switchToCloudForFunction(async () => {
- let request = setup.getRequest()
- let config = setup.getConfig()
- await config.init()
- const res = await request.get(`/`).expect(200)
- expect(res.text.includes("
Budibase self hosting️")).toEqual(true)
- setup.afterAll()
- })
- })
- })
-})
diff --git a/packages/server/src/api/routes/tests/component.spec.js b/packages/server/src/api/routes/tests/component.spec.js
index cabf9f8223..a485939ae4 100644
--- a/packages/server/src/api/routes/tests/component.spec.js
+++ b/packages/server/src/api/routes/tests/component.spec.js
@@ -1,8 +1,15 @@
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const setup = require("./utilities")
-const fs = require("fs")
-const { resolve, join } = require("path")
-const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
+
+jest.mock("../../../utilities/fileSystem/utilities", () => ({
+ ...jest.requireActual("../../../utilities/fileSystem/utilities"),
+ retrieve: () => {
+ const { join } = require("path")
+ const library = join("@budibase", "standard-components")
+ const path = require.resolve(library).split(join("dist", "index.js"))[0] + "manifest.json"
+ return JSON.stringify(require(path))
+ }
+}))
describe("/component", () => {
let request = setup.getRequest()
@@ -14,23 +21,8 @@ describe("/component", () => {
await config.init()
})
- function mock() {
- const manifestFile = "manifest.json"
- const appId = config.getAppId()
- const libraries = [join("@budibase", "standard-components")]
- for (let library of libraries) {
- let appDirectory = resolve(budibaseAppsDir(), appId, "node_modules", library, "package")
- fs.mkdirSync(appDirectory, { recursive: true })
-
- const file = require.resolve(library).split(join("dist", "index.js"))[0] + manifestFile
- fs.copyFileSync(file, join(appDirectory, manifestFile))
- }
- }
-
describe("fetch definitions", () => {
it("should be able to fetch definitions", async () => {
- // have to "mock" the files required
- mock()
const res = await request
.get(`/${config.getAppId()}/components/definitions`)
.set(config.defaultHeaders())
diff --git a/packages/server/src/api/routes/tests/hosting.spec.js b/packages/server/src/api/routes/tests/hosting.spec.js
index 2da5b11778..99a44640bc 100644
--- a/packages/server/src/api/routes/tests/hosting.spec.js
+++ b/packages/server/src/api/routes/tests/hosting.spec.js
@@ -107,17 +107,16 @@ describe("/hosting", () => {
})
describe("getDeployedApps", () => {
- it("should get apps when in builder", async () => {
- const res = await request
+ it("should fail when not self hosted", async () => {
+ await request
.get(`/api/hosting/apps`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
- .expect(200)
- expect(res.body.app1).toEqual({url: "/app1"})
+ .expect(400)
})
it("should get apps when in cloud", async () => {
- await setup.switchToCloudForFunction(async () => {
+ await setup.switchToSelfHosted(async () => {
const res = await request
.get(`/api/hosting/apps`)
.set(config.defaultHeaders())
diff --git a/packages/server/src/api/routes/tests/query.spec.js b/packages/server/src/api/routes/tests/query.spec.js
index 87938c6a37..2755c0230e 100644
--- a/packages/server/src/api/routes/tests/query.spec.js
+++ b/packages/server/src/api/routes/tests/query.spec.js
@@ -89,7 +89,7 @@ describe("/queries", () => {
})
it("should find a query in cloud", async () => {
- await setup.switchToCloudForFunction(async () => {
+ await setup.switchToSelfHosted(async () => {
const query = await config.createQuery()
const res = await request
.get(`/api/queries/${query._id}`)
diff --git a/packages/server/src/api/routes/tests/row.spec.js b/packages/server/src/api/routes/tests/row.spec.js
index 652a17366d..c79f648c51 100644
--- a/packages/server/src/api/routes/tests/row.spec.js
+++ b/packages/server/src/api/routes/tests/row.spec.js
@@ -410,7 +410,7 @@ describe("/rows", () => {
tableId: table._id,
})
// the environment needs configured for this
- await setup.switchToCloudForFunction(async () => {
+ await setup.switchToSelfHosted(async () => {
const enriched = await outputProcessing(config.getAppId(), table, [row])
expect(enriched[0].attachment[0].url).toBe(`/app-assets/assets/${config.getAppId()}/test/thing`)
})
diff --git a/packages/server/src/api/routes/tests/templates.spec.js b/packages/server/src/api/routes/tests/templates.spec.js
index f0d26bc7db..30e337e855 100644
--- a/packages/server/src/api/routes/tests/templates.spec.js
+++ b/packages/server/src/api/routes/tests/templates.spec.js
@@ -1,7 +1,4 @@
const setup = require("./utilities")
-const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
-const fs = require("fs")
-const { join } = require("path")
describe("/templates", () => {
let request = setup.getRequest()
@@ -24,26 +21,4 @@ describe("/templates", () => {
expect(Array.isArray(res.body)).toEqual(true)
})
})
-
- describe("export", () => {
- it("should be able to export the basic app", async () => {
- const res = await request
- .post(`/api/templates`)
- .send({
- templateName: "test",
- })
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body.message).toEqual("Created template: test")
- const dir = join(
- budibaseAppsDir(),
- "templates",
- "app",
- "test",
- "db"
- )
- expect(fs.existsSync(dir)).toEqual(true)
- })
- })
})
\ No newline at end of file
diff --git a/packages/server/src/api/routes/tests/utilities/index.js b/packages/server/src/api/routes/tests/utilities/index.js
index ed5c98cc48..3bd3886a31 100644
--- a/packages/server/src/api/routes/tests/utilities/index.js
+++ b/packages/server/src/api/routes/tests/utilities/index.js
@@ -35,18 +35,18 @@ exports.getConfig = () => {
return config
}
-exports.switchToCloudForFunction = async func => {
+exports.switchToSelfHosted = async func => {
// self hosted stops any attempts to Dynamo
- env.CLOUD = true
- env.SELF_HOSTED = true
+ env._set("NODE_ENV", "production")
+ env._set("SELF_HOSTED", true)
let error
try {
await func()
} catch (err) {
error = err
}
- env.CLOUD = false
- env.SELF_HOSTED = false
+ env._set("NODE_ENV", "jest")
+ env._set("SELF_HOSTED", false)
// don't throw error until after reset
if (error) {
throw error
diff --git a/packages/server/src/app.js b/packages/server/src/app.js
index 8bbea00474..e5e9b77084 100644
--- a/packages/server/src/app.js
+++ b/packages/server/src/app.js
@@ -9,6 +9,7 @@ const env = require("./environment")
const eventEmitter = require("./events")
const automations = require("./automations/index")
const Sentry = require("@sentry/node")
+const fileSystem = require("./utilities/fileSystem")
const app = new Koa()
@@ -65,6 +66,7 @@ module.exports = server.listen(env.PORT || 0, async () => {
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
env._set("PORT", server.address().port)
eventEmitter.emitPort(env.PORT)
+ fileSystem.init()
await automations.init()
})
diff --git a/packages/server/src/automations/actions.js b/packages/server/src/automations/actions.js
index ee57f5a109..9675568808 100644
--- a/packages/server/src/automations/actions.js
+++ b/packages/server/src/automations/actions.js
@@ -5,17 +5,12 @@ const deleteRow = require("./steps/deleteRow")
const createUser = require("./steps/createUser")
const outgoingWebhook = require("./steps/outgoingWebhook")
const env = require("../environment")
-const download = require("download")
-const fetch = require("node-fetch")
-const { join } = require("../utilities/centralPath")
-const os = require("os")
-const fs = require("fs")
const Sentry = require("@sentry/node")
+const {
+ automationInit,
+ getExternalAutomationStep,
+} = require("../utilities/fileSystem")
-const DEFAULT_BUCKET =
- "https://prod-budi-automations.s3-eu-west-1.amazonaws.com"
-const DEFAULT_DIRECTORY = ".budibase-automations"
-const AUTOMATION_MANIFEST = "manifest.json"
const BUILTIN_ACTIONS = {
SEND_EMAIL: sendEmail.run,
CREATE_ROW: createRow.run,
@@ -33,8 +28,6 @@ const BUILTIN_DEFINITIONS = {
OUTGOING_WEBHOOK: outgoingWebhook.definition,
}
-let AUTOMATION_BUCKET = env.AUTOMATION_BUCKET
-let AUTOMATION_DIRECTORY = env.AUTOMATION_DIRECTORY
let MANIFEST = null
/* istanbul ignore next */
@@ -42,22 +35,13 @@ function buildBundleName(pkgName, version) {
return `${pkgName}@${version}.min.js`
}
-/* istanbul ignore next */
-async function downloadPackage(name, version, bundleName) {
- await download(
- `${AUTOMATION_BUCKET}/${name}/${version}/${bundleName}`,
- AUTOMATION_DIRECTORY
- )
- return require(join(AUTOMATION_DIRECTORY, bundleName))
-}
-
/* istanbul ignore next */
module.exports.getAction = async function(actionName) {
if (BUILTIN_ACTIONS[actionName] != null) {
return BUILTIN_ACTIONS[actionName]
}
// worker pools means that a worker may not have manifest
- if (env.CLOUD && MANIFEST == null) {
+ if (env.isProd() && MANIFEST == null) {
MANIFEST = await module.exports.init()
}
// env setup to get async packages
@@ -66,28 +50,12 @@ module.exports.getAction = async function(actionName) {
}
const pkg = MANIFEST.packages[actionName]
const bundleName = buildBundleName(pkg.stepId, pkg.version)
- try {
- return require(join(AUTOMATION_DIRECTORY, bundleName))
- } catch (err) {
- return downloadPackage(pkg.stepId, pkg.version, bundleName)
- }
+ return getExternalAutomationStep(pkg.stepId, pkg.version, bundleName)
}
module.exports.init = async function() {
- // set defaults
- if (!AUTOMATION_DIRECTORY) {
- AUTOMATION_DIRECTORY = join(os.homedir(), DEFAULT_DIRECTORY)
- }
- if (!AUTOMATION_BUCKET) {
- AUTOMATION_BUCKET = DEFAULT_BUCKET
- }
- if (!fs.existsSync(AUTOMATION_DIRECTORY)) {
- fs.mkdirSync(AUTOMATION_DIRECTORY, { recursive: true })
- }
- // env setup to get async packages
try {
- let response = await fetch(`${AUTOMATION_BUCKET}/${AUTOMATION_MANIFEST}`)
- MANIFEST = await response.json()
+ MANIFEST = await automationInit()
module.exports.DEFINITIONS =
MANIFEST && MANIFEST.packages
? Object.assign(MANIFEST.packages, BUILTIN_DEFINITIONS)
diff --git a/packages/server/src/automations/index.js b/packages/server/src/automations/index.js
index 9aba399133..d67227e6ac 100644
--- a/packages/server/src/automations/index.js
+++ b/packages/server/src/automations/index.js
@@ -34,10 +34,10 @@ module.exports.init = async function() {
await actions.init()
triggers.automationQueue.process(async job => {
try {
- if (env.CLOUD && job.data.automation && !env.SELF_HOSTED) {
+ if (env.USE_QUOTAS) {
job.data.automation.apiKey = await updateQuota(job.data.automation)
}
- if (env.BUDIBASE_ENVIRONMENT === "PRODUCTION") {
+ if (env.isProd()) {
await runWorker(job)
} else {
await singleThread(job)
diff --git a/packages/server/src/automations/steps/createRow.js b/packages/server/src/automations/steps/createRow.js
index ef136e1131..aa910dbb42 100644
--- a/packages/server/src/automations/steps/createRow.js
+++ b/packages/server/src/automations/steps/createRow.js
@@ -85,7 +85,7 @@ module.exports.run = async function({ inputs, appId, apiKey, emitter }) {
inputs.row.tableId,
inputs.row
)
- if (env.CLOUD) {
+ if (env.isProd()) {
await usage.update(apiKey, usage.Properties.ROW, 1)
}
await rowController.save(ctx)
diff --git a/packages/server/src/automations/steps/createUser.js b/packages/server/src/automations/steps/createUser.js
index 8496967105..147a3f7868 100644
--- a/packages/server/src/automations/steps/createUser.js
+++ b/packages/server/src/automations/steps/createUser.js
@@ -72,7 +72,7 @@ module.exports.run = async function({ inputs, appId, apiKey, emitter }) {
}
try {
- if (env.CLOUD) {
+ if (env.isProd()) {
await usage.update(apiKey, usage.Properties.USER, 1)
}
await userController.create(ctx)
diff --git a/packages/server/src/automations/steps/deleteRow.js b/packages/server/src/automations/steps/deleteRow.js
index ea4d60a04e..57555ddaad 100644
--- a/packages/server/src/automations/steps/deleteRow.js
+++ b/packages/server/src/automations/steps/deleteRow.js
@@ -70,7 +70,7 @@ module.exports.run = async function({ inputs, appId, apiKey, emitter }) {
}
try {
- if (env.CLOUD) {
+ if (env.isProd()) {
await usage.update(apiKey, usage.Properties.ROW, -1)
}
await rowController.destroy(ctx)
diff --git a/packages/server/src/automations/tests/automation.spec.js b/packages/server/src/automations/tests/automation.spec.js
index f4d3b4c865..2e9bb16e55 100644
--- a/packages/server/src/automations/tests/automation.spec.js
+++ b/packages/server/src/automations/tests/automation.spec.js
@@ -47,27 +47,23 @@ describe("Run through some parts of the automations system", () => {
expect(thread).toHaveBeenCalled()
})
- it("should be able to init in cloud", async () => {
- env.CLOUD = true
- env.BUDIBASE_ENVIRONMENT = "PRODUCTION"
- await triggers.externalTrigger(basicAutomation(), { a: 1 })
- await wait(100)
- // haven't added a mock implementation so getAPIKey of usageQuota just returns undefined
- expect(usageQuota.update).toHaveBeenCalledWith("test", "automationRuns", 1)
- expect(workerJob).toBeDefined()
- env.BUDIBASE_ENVIRONMENT = "JEST"
- env.CLOUD = false
+ it("should be able to init in prod", async () => {
+ await setup.runInProd(async () => {
+ await triggers.externalTrigger(basicAutomation(), { a: 1 })
+ await wait(100)
+ // haven't added a mock implementation so getAPIKey of usageQuota just returns undefined
+ expect(usageQuota.update).toHaveBeenCalledWith("test", "automationRuns", 1)
+ expect(workerJob).toBeDefined()
+ })
})
it("try error scenario", async () => {
- env.CLOUD = true
- env.BUDIBASE_ENVIRONMENT = "PRODUCTION"
- // the second call will throw an error
- await triggers.externalTrigger(basicAutomation(), { a: 1 })
- await wait(100)
- expect(console.error).toHaveBeenCalled()
- env.BUDIBASE_ENVIRONMENT = "JEST"
- env.CLOUD = false
+ await setup.runInProd(async () => {
+ // the second call will throw an error
+ await triggers.externalTrigger(basicAutomation(), { a: 1 })
+ await wait(100)
+ expect(console.error).toHaveBeenCalled()
+ })
})
it("should be able to check triggering row filling", async () => {
diff --git a/packages/server/src/automations/tests/createRow.spec.js b/packages/server/src/automations/tests/createRow.spec.js
index 0be2803e47..c01d630bed 100644
--- a/packages/server/src/automations/tests/createRow.spec.js
+++ b/packages/server/src/automations/tests/createRow.spec.js
@@ -42,12 +42,12 @@ describe("test the create row action", () => {
})
it("check usage quota attempts", async () => {
- env.CLOUD = true
- await setup.runStep(setup.actions.CREATE_ROW.stepId, {
- row
+ await setup.runInProd(async () => {
+ await setup.runStep(setup.actions.CREATE_ROW.stepId, {
+ row
+ })
+ expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1)
})
- expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1)
- env.CLOUD = false
})
it("should check invalid inputs return an error", async () => {
diff --git a/packages/server/src/automations/tests/createUser.spec.js b/packages/server/src/automations/tests/createUser.spec.js
index 5f65e260a9..f188c31aa4 100644
--- a/packages/server/src/automations/tests/createUser.spec.js
+++ b/packages/server/src/automations/tests/createUser.spec.js
@@ -35,9 +35,9 @@ describe("test the create user action", () => {
})
it("check usage quota attempts", async () => {
- env.CLOUD = true
- await setup.runStep(setup.actions.CREATE_USER.stepId, user)
- expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "users", 1)
- env.CLOUD = false
+ await setup.runInProd(async () => {
+ await setup.runStep(setup.actions.CREATE_USER.stepId, user)
+ expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "users", 1)
+ })
})
})
diff --git a/packages/server/src/automations/tests/deleteRow.spec.js b/packages/server/src/automations/tests/deleteRow.spec.js
index 0d5ff47ed8..2a300cbd8c 100644
--- a/packages/server/src/automations/tests/deleteRow.spec.js
+++ b/packages/server/src/automations/tests/deleteRow.spec.js
@@ -36,10 +36,10 @@ describe("test the delete row action", () => {
})
it("check usage quota attempts", async () => {
- env.CLOUD = true
- await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
- expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1)
- env.CLOUD = false
+ await setup.runInProd(async () => {
+ await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
+ expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1)
+ })
})
it("should check invalid inputs return an error", async () => {
diff --git a/packages/server/src/automations/tests/utilities/index.js b/packages/server/src/automations/tests/utilities/index.js
index ad149d6bde..ab9de55430 100644
--- a/packages/server/src/automations/tests/utilities/index.js
+++ b/packages/server/src/automations/tests/utilities/index.js
@@ -2,6 +2,7 @@ const TestConfig = require("../../../tests/utilities/TestConfiguration")
const actions = require("../../actions")
const logic = require("../../logic")
const emitter = require("../../../events/index")
+const env = require("../../../environment")
let config
@@ -16,6 +17,22 @@ exports.afterAll = () => {
config.end()
}
+exports.runInProd = async fn => {
+ env._set("NODE_ENV", "production")
+ env._set("USE_QUOTAS", 1)
+ let error
+ try {
+ await fn()
+ } catch (err) {
+ error = err
+ }
+ env._set("NODE_ENV", "jest")
+ env._set("USE_QUOTAS", null)
+ if (error) {
+ throw error
+ }
+}
+
exports.runStep = async function runStep(stepId, inputs) {
let step
if (
diff --git a/packages/server/src/constants/index.js b/packages/server/src/constants/index.js
index 46fb5cb649..379cfb8aa5 100644
--- a/packages/server/src/constants/index.js
+++ b/packages/server/src/constants/index.js
@@ -80,8 +80,6 @@ exports.AutoFieldSubTypes = {
AUTO_ID: "autoID",
}
-exports.BUILDER_CONFIG_DB = "builder-config-db"
-exports.HOSTING_DOC = "hosting-doc"
exports.OBJ_STORE_DIRECTORY = "/app-assets/assets"
exports.BaseQueryVerbs = {
CREATE: "create",
@@ -89,3 +87,9 @@ exports.BaseQueryVerbs = {
UPDATE: "update",
DELETE: "delete",
}
+
+exports.ObjectStoreBuckets = {
+ BACKUPS: "backups",
+ APPS: "prod-budi-app-assets",
+ TEMPLATES: "templates",
+}
diff --git a/packages/server/src/db/builder.js b/packages/server/src/db/builder.js
new file mode 100644
index 0000000000..d2bbcd404b
--- /dev/null
+++ b/packages/server/src/db/builder.js
@@ -0,0 +1,38 @@
+const CouchDB = require("./index")
+const { StaticDatabases } = require("./utils")
+const env = require("../environment")
+
+const SELF_HOST_ERR = "Unable to access builder DB/doc - not self hosted."
+const BUILDER_DB = StaticDatabases.BUILDER
+
+/**
+ * This is the builder database, right now this is a single, static database
+ * that is present across the whole system and determines some core functionality
+ * for the builder (e.g. storage of API keys). This has been limited to self hosting
+ * as it doesn't make as much sense against the currently design Cloud system.
+ */
+
+exports.getBuilderMainDoc = async () => {
+ if (!env.SELF_HOSTED) {
+ throw SELF_HOST_ERR
+ }
+ const db = new CouchDB(BUILDER_DB.name)
+ try {
+ return await db.get(BUILDER_DB.baseDoc)
+ } catch (err) {
+ // doesn't exist yet, nothing to get
+ return {
+ _id: BUILDER_DB.baseDoc,
+ }
+ }
+}
+
+exports.setBuilderMainDoc = async doc => {
+ if (!env.SELF_HOSTED) {
+ throw SELF_HOST_ERR
+ }
+ // make sure to override the ID
+ doc._id = BUILDER_DB.baseDoc
+ const db = new CouchDB(BUILDER_DB.name)
+ return db.put(doc)
+}
diff --git a/packages/server/src/db/client.js b/packages/server/src/db/client.js
index 3645573e40..3e3a4f50fe 100644
--- a/packages/server/src/db/client.js
+++ b/packages/server/src/db/client.js
@@ -5,7 +5,6 @@ const find = require("pouchdb-find")
const env = require("../environment")
const COUCH_DB_URL = env.COUCH_DB_URL || "http://localhost:10000/db/"
-const isInMemory = env.NODE_ENV === "jest"
PouchDB.plugin(replicationStream.plugin)
PouchDB.plugin(find)
@@ -13,10 +12,10 @@ PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
let POUCH_DB_DEFAULTS = {
prefix: COUCH_DB_URL,
- skip_setup: !!env.CLOUD,
+ skip_setup: env.isProd(),
}
-if (isInMemory) {
+if (env.isTest()) {
PouchDB.plugin(require("pouchdb-adapter-memory"))
POUCH_DB_DEFAULTS = {
prefix: undefined,
diff --git a/packages/server/src/db/dynamoClient.js b/packages/server/src/db/dynamoClient.js
index fcba726f84..19924b1a7e 100644
--- a/packages/server/src/db/dynamoClient.js
+++ b/packages/server/src/db/dynamoClient.js
@@ -1,4 +1,4 @@
-let _ = require("lodash")
+let { merge } = require("lodash")
let env = require("../environment")
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
@@ -38,7 +38,7 @@ class Table {
params.Key[this._sort] = sort
}
if (otherProps) {
- params = _.merge(params, otherProps)
+ params = merge(params, otherProps)
}
let response = await docClient.get(params).promise()
return response.Item
@@ -77,7 +77,7 @@ class Table {
params.ConditionExpression += "attribute_exists(#PRIMARY)"
}
if (otherProps) {
- params = _.merge(params, otherProps)
+ params = merge(params, otherProps)
}
return docClient.update(params).promise()
}
@@ -94,7 +94,7 @@ class Table {
Item: item,
}
if (otherProps) {
- params = _.merge(params, otherProps)
+ params = merge(params, otherProps)
}
return docClient.put(params).promise()
}
@@ -119,7 +119,7 @@ exports.init = endpoint => {
exports.apiKeyTable = new Table(TableInfo.API_KEYS)
exports.userTable = new Table(TableInfo.USERS)
-if (env.CLOUD) {
+if (env.isProd()) {
exports.init(`https://dynamodb.${AWS_REGION}.amazonaws.com`)
} else {
env._set("AWS_ACCESS_KEY_ID", "KEY_ID")
diff --git a/packages/server/src/db/utils.js b/packages/server/src/db/utils.js
index 2d0722d83a..e480d4f554 100644
--- a/packages/server/src/db/utils.js
+++ b/packages/server/src/db/utils.js
@@ -3,6 +3,18 @@ const newid = require("./newid")
const UNICODE_MAX = "\ufff0"
const SEPARATOR = "_"
+const StaticDatabases = {
+ BUILDER: {
+ name: "builder-db",
+ baseDoc: "builder-doc",
+ },
+ // TODO: needs removed
+ BUILDER_HOSTING: {
+ name: "builder-config-db",
+ baseDoc: "hosting-doc",
+ },
+}
+
const DocumentTypes = {
TABLE: "ta",
ROW: "ro",
@@ -25,6 +37,7 @@ const ViewNames = {
USERS: "ta_users",
}
+exports.StaticDatabases = StaticDatabases
exports.ViewNames = ViewNames
exports.DocumentTypes = DocumentTypes
exports.SEPARATOR = SEPARATOR
diff --git a/packages/server/src/environment.js b/packages/server/src/environment.js
index 4faaabe6ab..dc15bc8a9a 100644
--- a/packages/server/src/environment.js
+++ b/packages/server/src/environment.js
@@ -1,45 +1,64 @@
-const { resolve, join } = require("./utilities/centralPath")
-const { homedir } = require("os")
-const { app } = require("electron")
+function isTest() {
+ return (
+ process.env.NODE_ENV === "jest" ||
+ process.env.NODE_ENV === "cypress" ||
+ process.env.JEST_WORKER_ID != null
+ )
+}
+
+function isDev() {
+ return (
+ process.env.NODE_ENV !== "production" &&
+ process.env.BUDIBASE_ENVIRONMENT !== "production"
+ )
+}
let LOADED = false
-
-if (!LOADED) {
- const homeDir = app ? app.getPath("home") : homedir()
- const budibaseDir = join(homeDir, ".budibase")
- process.env.BUDIBASE_DIR = budibaseDir
- require("dotenv").config({ path: resolve(budibaseDir, ".env") })
+if (!LOADED && isDev() && !isTest()) {
+ require("dotenv").config()
LOADED = true
}
module.exports = {
- CLIENT_ID: process.env.CLIENT_ID,
- NODE_ENV: process.env.NODE_ENV,
- JWT_SECRET: process.env.JWT_SECRET,
- BUDIBASE_DIR: process.env.BUDIBASE_DIR,
+ // important
PORT: process.env.PORT,
+ JWT_SECRET: process.env.JWT_SECRET,
COUCH_DB_URL: process.env.COUCH_DB_URL,
+ MINIO_URL: process.env.MINIO_URL,
+ WORKER_URL: process.env.WORKER_URL,
+ SELF_HOSTED: process.env.SELF_HOSTED,
+ AWS_REGION: process.env.AWS_REGION,
+ ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
+ MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
+ MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
+ USE_QUOTAS: process.env.USE_QUOTAS,
+ // environment
+ NODE_ENV: process.env.NODE_ENV,
+ JEST_WORKER_ID: process.env.JEST_WORKER_ID,
+ BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
+ // minor
SALT_ROUNDS: process.env.SALT_ROUNDS,
LOGGER: process.env.LOGGER,
LOG_LEVEL: process.env.LOG_LEVEL,
AUTOMATION_DIRECTORY: process.env.AUTOMATION_DIRECTORY,
AUTOMATION_BUCKET: process.env.AUTOMATION_BUCKET,
- BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
SENDGRID_API_KEY: process.env.SENDGRID_API_KEY,
- CLOUD: process.env.CLOUD,
- SELF_HOSTED: process.env.SELF_HOSTED,
- WORKER_URL: process.env.WORKER_URL,
- HOSTING_KEY: process.env.HOSTING_KEY,
DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT,
- AWS_REGION: process.env.AWS_REGION,
- DEPLOYMENT_CREDENTIALS_URL: process.env.DEPLOYMENT_CREDENTIALS_URL,
+ // old - to remove
+ CLIENT_ID: process.env.CLIENT_ID,
+ BUDIBASE_DIR: process.env.BUDIBASE_DIR,
+ DEPLOYMENT_DB_URL: process.env.DEPLOYMENT_DB_URL,
BUDIBASE_API_KEY: process.env.BUDIBASE_API_KEY,
USERID_API_KEY: process.env.USERID_API_KEY,
- ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
- DEPLOYMENT_DB_URL: process.env.DEPLOYMENT_DB_URL,
- LOCAL_TEMPLATES: process.env.LOCAL_TEMPLATES,
+ DEPLOYMENT_CREDENTIALS_URL: process.env.DEPLOYMENT_CREDENTIALS_URL,
+ HOSTING_KEY: process.env.HOSTING_KEY,
_set(key, value) {
process.env[key] = value
module.exports[key] = value
},
+ isTest,
+ isDev,
+ isProd: () => {
+ return !isDev()
+ },
}
diff --git a/packages/server/src/middleware/authorized.js b/packages/server/src/middleware/authorized.js
index 2a1caef2a2..564896080e 100644
--- a/packages/server/src/middleware/authorized.js
+++ b/packages/server/src/middleware/authorized.js
@@ -13,19 +13,12 @@ const { AuthTypes } = require("../constants")
const ADMIN_ROLES = [BUILTIN_ROLE_IDS.ADMIN, BUILTIN_ROLE_IDS.BUILDER]
-const LOCAL_PASS = new RegExp(["webhooks/trigger"].join("|"))
-
function hasResource(ctx) {
return ctx.resourceId != null
}
module.exports = (permType, permLevel = null) => async (ctx, next) => {
- // webhooks can pass locally
- if (!env.CLOUD && LOCAL_PASS.test(ctx.request.url)) {
- return next()
- }
-
- if (env.CLOUD && ctx.headers["x-api-key"] && ctx.headers["x-instanceid"]) {
+ if (env.isProd() && ctx.headers["x-api-key"] && ctx.headers["x-instanceid"]) {
// api key header passed by external webhook
if (await isAPIKeyValid(ctx.headers["x-api-key"])) {
ctx.auth = {
@@ -41,20 +34,23 @@ module.exports = (permType, permLevel = null) => async (ctx, next) => {
return ctx.throw(403, "API key invalid")
}
- // don't expose builder endpoints in the cloud
- if (env.CLOUD && permType === PermissionTypes.BUILDER) return
-
if (!ctx.user) {
return ctx.throw(403, "No user info found")
}
const role = ctx.user.role
+ const isBuilder = role._id === BUILTIN_ROLE_IDS.BUILDER
+ const isAdmin = ADMIN_ROLES.includes(role._id)
+ const isAuthed = ctx.auth.authenticated
+
+ if (permType === PermissionTypes.BUILDER && isBuilder) {
+ return next()
+ }
+
const { basePermissions, permissions } = await getUserPermissions(
ctx.appId,
role._id
)
- const isAdmin = ADMIN_ROLES.includes(role._id)
- const isAuthed = ctx.auth.authenticated
// this may need to change in the future, right now only admins
// can have access to builder features, this is hard coded into
diff --git a/packages/server/src/middleware/selfhost.js b/packages/server/src/middleware/selfhost.js
index 1c96cee33c..1e7117c83d 100644
--- a/packages/server/src/middleware/selfhost.js
+++ b/packages/server/src/middleware/selfhost.js
@@ -1,14 +1,8 @@
const env = require("../environment")
-const hosting = require("../utilities/builder/hosting")
// if added as a middleware will stop requests unless builder is in self host mode
// or cloud is in self host
module.exports = async (ctx, next) => {
- if (env.CLOUD && env.SELF_HOSTED) {
- await next()
- return
- }
- const hostingInfo = await hosting.getHostingInfo()
- if (hostingInfo.type === hosting.HostingTypes.SELF) {
+ if (env.SELF_HOSTED) {
await next()
return
}
diff --git a/packages/server/src/middleware/tests/authorized.spec.js b/packages/server/src/middleware/tests/authorized.spec.js
index d3e5e52d2d..7968a8a939 100644
--- a/packages/server/src/middleware/tests/authorized.spec.js
+++ b/packages/server/src/middleware/tests/authorized.spec.js
@@ -3,8 +3,15 @@ const env = require("../../environment")
const apiKey = require("../../utilities/security/apikey")
const { AuthTypes } = require("../../constants")
const { PermissionTypes, PermissionLevels } = require("../../utilities/security/permissions")
-const { Test } = require("supertest")
-jest.mock("../../environment")
+jest.mock("../../environment", () => ({
+ prod: false,
+ isTest: () => true,
+ isProd: () => this.prod,
+ _set: (key, value) => {
+ this.prod = value === "production"
+ }
+ })
+)
jest.mock("../../utilities/security/apikey")
class TestConfiguration {
@@ -47,8 +54,8 @@ class TestConfiguration {
this.ctx.request.url = url
}
- setCloudEnv(isCloud) {
- env.CLOUD = isCloud
+ setEnvironment(isProd) {
+ env._set("NODE_ENV", isProd ? "production" : "jest")
}
setRequestHeaders(headers) {
@@ -71,12 +78,6 @@ describe("Authorization middleware", () => {
beforeEach(() => {
config = new TestConfiguration()
- })
-
- it("passes the middleware for local webhooks", async () => {
- config.setRequestUrl("https://something/webhooks/trigger")
- await config.executeMiddleware()
- expect(config.next).toHaveBeenCalled()
})
describe("external web hook call", () => {
@@ -85,7 +86,7 @@ describe("Authorization middleware", () => {
beforeEach(() => {
config = new TestConfiguration()
- config.setCloudEnv(true)
+ config.setEnvironment(true)
config.setRequestHeaders({
"x-api-key": "abc123",
"x-instanceid": "instance123",
@@ -121,7 +122,7 @@ describe("Authorization middleware", () => {
beforeEach(() => {
config = new TestConfiguration()
- config.setCloudEnv(true)
+ config.setEnvironment(true)
config.setAuthenticated(true)
})
@@ -144,7 +145,7 @@ describe("Authorization middleware", () => {
})
it("throws if the user has only builder permissions", async () => {
- config.setCloudEnv(false)
+ config.setEnvironment(false)
config.setMiddlewareRequiredPermission(PermissionTypes.BUILDER)
config.setUser({
role: {
diff --git a/packages/server/src/middleware/tests/selfhost.spec.js b/packages/server/src/middleware/tests/selfhost.spec.js
index 061da17f9c..6ce61c60ef 100644
--- a/packages/server/src/middleware/tests/selfhost.spec.js
+++ b/packages/server/src/middleware/tests/selfhost.spec.js
@@ -1,6 +1,5 @@
-const selfHostMiddleware = require("../selfhost");
+const selfHostMiddleware = require("../selfhost")
const env = require("../../environment")
-const hosting = require("../../utilities/builder/hosting");
jest.mock("../../environment")
jest.mock("../../utilities/builder/hosting")
@@ -20,16 +19,6 @@ class TestConfiguration {
return this.middleware(this.ctx, this.next)
}
- setCloudHosted() {
- env.CLOUD = 1
- env.SELF_HOSTED = 0
- }
-
- setSelfHosted() {
- env.CLOUD = 0
- env.SELF_HOSTED = 1
- }
-
afterEach() {
jest.clearAllMocks()
}
@@ -46,30 +35,10 @@ describe("Self host middleware", () => {
config.afterEach()
})
- it("calls next() when CLOUD and SELF_HOSTED env vars are set", async () => {
- env.CLOUD = 1
+ it("calls next() when SELF_HOSTED env var is set", async () => {
env.SELF_HOSTED = 1
await config.executeMiddleware()
expect(config.next).toHaveBeenCalled()
})
-
- it("throws when hostingInfo type is cloud", async () => {
- config.setSelfHosted()
-
- hosting.getHostingInfo.mockImplementationOnce(() => ({ type: hosting.HostingTypes.CLOUD }))
-
- await config.executeMiddleware()
- expect(config.throw).toHaveBeenCalledWith(400, "Endpoint unavailable in cloud hosting.")
- expect(config.next).not.toHaveBeenCalled()
- })
-
- it("calls the self hosting middleware to pass through to next() when the hostingInfo type is self", async () => {
- config.setSelfHosted()
-
- hosting.getHostingInfo.mockImplementationOnce(() => ({ type: hosting.HostingTypes.SELF }))
-
- await config.executeMiddleware()
- expect(config.next).toHaveBeenCalled()
- })
})
diff --git a/packages/server/src/middleware/tests/usageQuota.spec.js b/packages/server/src/middleware/tests/usageQuota.spec.js
index 395f14c1ed..9ab17ef992 100644
--- a/packages/server/src/middleware/tests/usageQuota.spec.js
+++ b/packages/server/src/middleware/tests/usageQuota.spec.js
@@ -5,7 +5,12 @@ const env = require("../../environment")
jest.mock("../../db")
jest.mock("../../utilities/usageQuota")
-jest.mock("../../environment")
+jest.mock("../../environment", () => ({
+ isTest: () => true,
+ isProd: () => false,
+ isDev: () => true,
+ _set: () => {},
+}))
class TestConfiguration {
constructor() {
@@ -32,12 +37,14 @@ class TestConfiguration {
return this.middleware(this.ctx, this.next)
}
- cloudHosted(bool) {
+ setProd(bool) {
if (bool) {
- env.CLOUD = 1
+ env.isDev = () => false
+ env.isProd = () => true
this.ctx.auth = { apiKey: "test" }
} else {
- env.CLOUD = 0
+ env.isDev = () => true
+ env.isProd = () => false
}
}
@@ -102,7 +109,7 @@ describe("usageQuota middleware", () => {
it("calculates and persists the correct usage quota for the relevant action", async () => {
config.setUrl("/rows")
- config.cloudHosted(true)
+ config.setProd(true)
await config.executeMiddleware()
@@ -112,7 +119,7 @@ describe("usageQuota middleware", () => {
it("calculates the correct file size from a file upload call and adds it to quota", async () => {
config.setUrl("/upload")
- config.cloudHosted(true)
+ config.setProd(true)
config.setFiles([
{
size: 100
diff --git a/packages/server/src/middleware/usageQuota.js b/packages/server/src/middleware/usageQuota.js
index 1b809868be..1bc829fbcf 100644
--- a/packages/server/src/middleware/usageQuota.js
+++ b/packages/server/src/middleware/usageQuota.js
@@ -44,8 +44,8 @@ module.exports = async (ctx, next) => {
}
}
- // if running in builder or a self hosted cloud usage quotas should not be executed
- if (!env.CLOUD || env.SELF_HOSTED) {
+ // if in development or a self hosted cloud usage quotas should not be executed
+ if (env.isDev() || env.SELF_HOSTED) {
return next()
}
// update usage for uploads to be the total size
diff --git a/packages/server/src/tests/utilities/TestConfiguration.js b/packages/server/src/tests/utilities/TestConfiguration.js
index a12d596534..a9723c8671 100644
--- a/packages/server/src/tests/utilities/TestConfiguration.js
+++ b/packages/server/src/tests/utilities/TestConfiguration.js
@@ -14,9 +14,6 @@ const {
} = require("./structures")
const controllers = require("./controllers")
const supertest = require("supertest")
-const fs = require("fs")
-const { budibaseAppsDir } = require("../../utilities/budibaseDir")
-const { join } = require("path")
const EMAIL = "babs@babs.com"
const PASSWORD = "babs_password"
@@ -66,13 +63,6 @@ class TestConfiguration {
if (this.server) {
this.server.close()
}
- const appDir = budibaseAppsDir()
- const files = fs.readdirSync(appDir)
- for (let file of files) {
- if (this.allApps.some(app => file.includes(app._id))) {
- fs.rmdirSync(join(appDir, file), { recursive: true })
- }
- }
}
defaultHeaders() {
@@ -81,9 +71,11 @@ class TestConfiguration {
roleId: BUILTIN_ROLE_IDS.BUILDER,
}
const builderToken = jwt.sign(builderUser, env.JWT_SECRET)
+ // can be "production" for test case
+ const type = env.isProd() ? "cloud" : "local"
const headers = {
Accept: "application/json",
- Cookie: [`budibase:builder:local=${builderToken}`],
+ Cookie: [`budibase:builder:${type}=${builderToken}`],
}
if (this.appId) {
headers["x-budibase-app-id"] = this.appId
diff --git a/packages/server/src/utilities/builder/compileStaticAssets.js b/packages/server/src/utilities/builder/compileStaticAssets.js
deleted file mode 100644
index 0389c920ee..0000000000
--- a/packages/server/src/utilities/builder/compileStaticAssets.js
+++ /dev/null
@@ -1,35 +0,0 @@
-const { ensureDir, constants, copyFile } = require("fs-extra")
-const { join } = require("../centralPath")
-const { budibaseAppsDir } = require("../budibaseDir")
-
-/**
- * Compile all the non-db static web assets that are required for the running of
- * a budibase application. This includes the JSON structure of the DOM and
- * the client library, a script responsible for reading the JSON structure
- * and rendering the application.
- * @param {string} appId id of the application we want to compile static assets for
- */
-module.exports = async appId => {
- const publicPath = join(budibaseAppsDir(), appId, "public")
- await ensureDir(publicPath)
- await copyClientLib(publicPath)
-}
-
-/**
- * Copy the budibase client library and sourcemap from NPM to /public/.
- * The client library is then served as a static asset when the budibase application
- * is running in preview or prod
- * @param {String} publicPath - path to write the client library to
- */
-const copyClientLib = async publicPath => {
- const sourcepath = require.resolve("@budibase/client")
- const destPath = join(publicPath, "budibase-client.js")
-
- await copyFile(sourcepath, destPath, constants.COPYFILE_FICLONE)
-
- await copyFile(
- sourcepath + ".map",
- destPath + ".map",
- constants.COPYFILE_FICLONE
- )
-}
diff --git a/packages/server/src/utilities/builder/hosting.js b/packages/server/src/utilities/builder/hosting.js
index c265c26dd0..eeaf220c64 100644
--- a/packages/server/src/utilities/builder/hosting.js
+++ b/packages/server/src/utilities/builder/hosting.js
@@ -1,5 +1,5 @@
const CouchDB = require("../../db")
-const { BUILDER_CONFIG_DB, HOSTING_DOC } = require("../../constants")
+const { StaticDatabases } = require("../../db/utils")
const fetch = require("node-fetch")
const env = require("../../environment")
@@ -23,16 +23,16 @@ exports.HostingTypes = {
}
exports.getHostingInfo = async () => {
- const db = new CouchDB(BUILDER_CONFIG_DB)
+ const db = new CouchDB(StaticDatabases.BUILDER_HOSTING.name)
let doc
try {
- doc = await db.get(HOSTING_DOC)
+ doc = await db.get(StaticDatabases.BUILDER_HOSTING.baseDoc)
} catch (err) {
// don't write this doc, want to be able to update these default props
// for our servers with a new release without needing to worry about state of
// PouchDB in peoples installations
doc = {
- _id: HOSTING_DOC,
+ _id: StaticDatabases.BUILDER_HOSTING.baseDoc,
type: exports.HostingTypes.CLOUD,
hostingUrl: PROD_HOSTING_URL,
selfHostKey: "",
@@ -85,15 +85,11 @@ exports.getTemplatesUrl = async (appId, type, name) => {
}
exports.getDeployedApps = async () => {
- const hostingInfo = await exports.getHostingInfo()
- if (
- (!env.CLOUD && hostingInfo.type === exports.HostingTypes.CLOUD) ||
- (env.CLOUD && !env.SELF_HOSTED)
- ) {
+ if (!env.SELF_HOSTED) {
throw "Can only check apps for self hosted environments"
}
- const workerUrl = !env.CLOUD ? await exports.getWorkerUrl() : env.WORKER_URL
- const hostingKey = !env.CLOUD ? hostingInfo.selfHostKey : env.HOSTING_KEY
+ const workerUrl = env.WORKER_URL
+ const hostingKey = env.HOSTING_KEY
try {
const response = await fetch(`${workerUrl}/api/apps`, {
method: "GET",
diff --git a/packages/server/src/utilities/createAppPackage.js b/packages/server/src/utilities/createAppPackage.js
deleted file mode 100644
index 9500554227..0000000000
--- a/packages/server/src/utilities/createAppPackage.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const stream = require("stream")
-const fetch = require("node-fetch")
-const tar = require("tar-fs")
-const zlib = require("zlib")
-const { promisify } = require("util")
-const packageJson = require("../../package.json")
-
-const streamPipeline = promisify(stream.pipeline)
-
-// can't really test this due to the downloading nature of it, wouldn't be a great test case
-/* istanbul ignore next */
-exports.downloadExtractComponentLibraries = async appFolder => {
- const LIBRARIES = ["standard-components"]
-
- // Need to download tarballs directly from NPM as our users may not have node on their machine
- for (let lib of LIBRARIES) {
- // download tarball
- const registryUrl = `https://registry.npmjs.org/@budibase/${lib}/-/${lib}-${packageJson.version}.tgz`
- const response = await fetch(registryUrl)
- if (!response.ok)
- throw new Error(`unexpected response ${response.statusText}`)
-
- await streamPipeline(
- response.body,
- zlib.Unzip(),
- tar.extract(`${appFolder}/node_modules/@budibase/${lib}`)
- )
- }
-}
diff --git a/packages/server/src/utilities/fileSystem.js b/packages/server/src/utilities/fileSystem.js
deleted file mode 100644
index b55c4110fd..0000000000
--- a/packages/server/src/utilities/fileSystem.js
+++ /dev/null
@@ -1,40 +0,0 @@
-const { budibaseTempDir } = require("./budibaseDir")
-const { isDev } = require("./index")
-const fs = require("fs")
-const { join } = require("path")
-const { downloadTemplate } = require("./templates")
-
-/**
- * The single stack system (Cloud and Builder) should not make use of the file system where possible,
- * this file handles all of the file access for the system with the intention of limiting it all to one
- * place. Keeping all of this logic in one place means that when we need to do file system access (like
- * downloading a package or opening a temporary file) in can be done in way that we can confirm it shouldn't
- * be done through an object store instead.
- */
-
-/**
- * Checks if the system is currently in development mode and if it is makes sure
- * everything required to function is ready.
- */
-exports.checkDevelopmentEnvironment = () => {
- if (isDev() && !fs.existsSync(budibaseTempDir())) {
- console.error(
- "Please run a build before attempting to run server independently to fill 'tmp' directory."
- )
- process.exit(-1)
- }
-}
-
-/**
- * This function manages temporary template files which are stored by Koa.
- * @param {Object} template The template object retrieved from the Koa context object.
- * @returns {Object} Returns an fs read stream which can be loaded into the database.
- */
-exports.getTemplateStream = async template => {
- if (template.file) {
- return fs.createReadStream(template.file.path)
- } else {
- const templatePath = await downloadTemplate(...template.key.split("/"))
- return fs.createReadStream(join(templatePath, "db", "dump.txt"))
- }
-}
diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.js
new file mode 100644
index 0000000000..a64a24a9fc
--- /dev/null
+++ b/packages/server/src/utilities/fileSystem/index.js
@@ -0,0 +1,204 @@
+const { budibaseTempDir } = require("../budibaseDir")
+const { isDev } = require("../index")
+const fs = require("fs")
+const { join } = require("path")
+const uuid = require("uuid/v4")
+const CouchDB = require("../../db")
+const { ObjectStoreBuckets } = require("../../constants")
+const {
+ upload,
+ retrieve,
+ streamUpload,
+ deleteFolder,
+ downloadTarball,
+} = require("./utilities")
+const { downloadLibraries, newAppPublicPath } = require("./newApp")
+const download = require("download")
+const env = require("../../environment")
+const { homedir } = require("os")
+const fetch = require("node-fetch")
+
+const DEFAULT_AUTOMATION_BUCKET =
+ "https://prod-budi-automations.s3-eu-west-1.amazonaws.com"
+const DEFAULT_AUTOMATION_DIRECTORY = ".budibase-automations"
+
+/**
+ * The single stack system (Cloud and Builder) should not make use of the file system where possible,
+ * this file handles all of the file access for the system with the intention of limiting it all to one
+ * place. Keeping all of this logic in one place means that when we need to do file system access (like
+ * downloading a package or opening a temporary file) in can be done in way that we can confirm it shouldn't
+ * be done through an object store instead.
+ */
+
+/**
+ * Upon first startup of instance there may not be everything we need in tmp directory, set it up.
+ */
+exports.init = () => {
+ const tempDir = budibaseTempDir()
+ if (!fs.existsSync(tempDir)) {
+ fs.mkdirSync(tempDir)
+ }
+}
+
+/**
+ * Checks if the system is currently in development mode and if it is makes sure
+ * everything required to function is ready.
+ */
+exports.checkDevelopmentEnvironment = () => {
+ if (!isDev()) {
+ return
+ }
+ let error
+ if (!fs.existsSync(budibaseTempDir())) {
+ error =
+ "Please run a build before attempting to run server independently to fill 'tmp' directory."
+ }
+ if (!fs.existsSync(join(process.cwd(), ".env"))) {
+ error = "Must run via yarn once to generate environment."
+ }
+ if (error) {
+ console.error(error)
+ process.exit(-1)
+ }
+}
+
+/**
+ * This function manages temporary template files which are stored by Koa.
+ * @param {Object} template The template object retrieved from the Koa context object.
+ * @returns {Object} Returns an fs read stream which can be loaded into the database.
+ */
+exports.getTemplateStream = async template => {
+ if (template.file) {
+ return fs.createReadStream(template.file.path)
+ } else {
+ const tmpPath = await exports.downloadTemplate(...template.key.split("/"))
+ return fs.createReadStream(join(tmpPath, "db", "dump.txt"))
+ }
+}
+
+/**
+ * Used to retrieve a handlebars file from the system which will be used as a template.
+ * This is allowable as the template handlebars files should be static and identical across
+ * the cluster.
+ * @param {string} path The path to the handlebars file which is to be loaded.
+ * @returns {string} The loaded handlebars file as a string - loaded as utf8.
+ */
+exports.loadHandlebarsFile = path => {
+ return fs.readFileSync(path, "utf8")
+}
+
+/**
+ * When return a file from the API need to write the file to the system temporarily so we
+ * can create a read stream to send.
+ * @param {string} contents the contents of the file which is to be returned from the API.
+ * @return {Object} the read stream which can be put into the koa context body.
+ */
+exports.apiFileReturn = contents => {
+ const path = join(budibaseTempDir(), uuid())
+ fs.writeFileSync(path, contents)
+ return fs.createReadStream(path)
+}
+
+/**
+ * Takes a copy of the database state for an app to the object store.
+ * @param {string} appId The ID of the app which is to be backed up.
+ * @param {string} backupName The name of the backup located in the object store.
+ * @return The backup has been completed when this promise completes and returns a file stream
+ * to the temporary backup file (to return via API if required).
+ */
+exports.performBackup = async (appId, backupName) => {
+ const path = join(budibaseTempDir(), backupName)
+ const writeStream = fs.createWriteStream(path)
+ // perform couch dump
+ const instanceDb = new CouchDB(appId)
+ await instanceDb.dump(writeStream, {})
+ // write the file to the object store
+ await streamUpload(
+ ObjectStoreBuckets.BACKUPS,
+ join(appId, backupName),
+ fs.createReadStream(path)
+ )
+ return fs.createReadStream(path)
+}
+
+/**
+ * Downloads required libraries and creates a new path in the object store.
+ * @param {string} appId The ID of the app which is being created.
+ * @return {Promise} once promise completes app resources should be ready in object store.
+ */
+exports.createApp = async appId => {
+ await downloadLibraries(appId)
+ await newAppPublicPath(appId)
+}
+
+/**
+ * Removes all of the assets created for an app in the object store.
+ * @param {string} appId The ID of the app which is being deleted.
+ * @return {Promise} once promise completes the app resources will be removed from object store.
+ */
+exports.deleteApp = async appId => {
+ await deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
+}
+
+/**
+ * Retrieves a template and pipes it to minio as well as making it available temporarily.
+ * @param {string} type The type of template which is to be retrieved.
+ * @param name
+ * @return {Promise<*>}
+ */
+exports.downloadTemplate = async (type, name) => {
+ const DEFAULT_TEMPLATES_BUCKET =
+ "prod-budi-templates.s3-eu-west-1.amazonaws.com"
+ const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
+ return downloadTarball(templateUrl, ObjectStoreBuckets.TEMPLATES, type)
+}
+
+/**
+ * Retrieves component libraries from object store (or tmp symlink if in local)
+ */
+exports.getComponentLibraryManifest = async (appId, library) => {
+ const path = join(appId, "node_modules", library, "package", "manifest.json")
+ let resp = await retrieve(ObjectStoreBuckets.APPS, path)
+ if (typeof resp !== "string") {
+ resp = resp.toString("utf8")
+ }
+ return JSON.parse(resp)
+}
+
+exports.automationInit = async () => {
+ const directory =
+ env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
+ const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
+ if (!fs.existsSync(directory)) {
+ fs.mkdirSync(directory, { recursive: true })
+ }
+ // env setup to get async packages
+ let response = await fetch(`${bucket}/manifest.json`)
+ return response.json()
+}
+
+exports.getExternalAutomationStep = async (name, version, bundleName) => {
+ const directory =
+ env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
+ const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
+ try {
+ return require(join(directory, bundleName))
+ } catch (err) {
+ await download(`${bucket}/${name}/${version}/${bundleName}`, directory)
+ return require(join(directory, bundleName))
+ }
+}
+
+/**
+ * All file reads come through here just to make sure all of them make sense
+ * allows a centralised location to check logic is all good.
+ */
+exports.readFileSync = (filepath, options = "utf8") => {
+ return fs.readFileSync(filepath, options)
+}
+
+/**
+ * Full function definition for below can be found in the utilities.
+ */
+exports.upload = upload
+exports.retrieve = retrieve
diff --git a/packages/server/src/utilities/fileSystem/newApp.js b/packages/server/src/utilities/fileSystem/newApp.js
new file mode 100644
index 0000000000..ba3d13afed
--- /dev/null
+++ b/packages/server/src/utilities/fileSystem/newApp.js
@@ -0,0 +1,34 @@
+const packageJson = require("../../../package.json")
+const { join } = require("path")
+const { ObjectStoreBuckets } = require("../../constants")
+const { streamUpload, downloadTarball } = require("./utilities")
+const fs = require("fs")
+
+const BUCKET_NAME = ObjectStoreBuckets.APPS
+
+// can't really test this due to the downloading nature of it, wouldn't be a great test case
+/* istanbul ignore next */
+exports.downloadLibraries = async appId => {
+ const LIBRARIES = ["standard-components"]
+
+ // Need to download tarballs directly from NPM as our users may not have node on their machine
+ for (let lib of LIBRARIES) {
+ // download tarball
+ const registryUrl = `https://registry.npmjs.org/@budibase/${lib}/-/${lib}-${packageJson.version}.tgz`
+ const path = join(appId, "node_modules", "@budibase", lib)
+ await downloadTarball(registryUrl, BUCKET_NAME, path)
+ }
+}
+
+exports.newAppPublicPath = async appId => {
+ const path = join(appId, "public")
+ const sourcepath = require.resolve("@budibase/client")
+ const destPath = join(path, "budibase-client.js")
+
+ await streamUpload(BUCKET_NAME, destPath, fs.createReadStream(sourcepath))
+ await streamUpload(
+ BUCKET_NAME,
+ destPath + ".map",
+ fs.createReadStream(sourcepath + ".map")
+ )
+}
diff --git a/packages/server/src/utilities/fileProcessor.js b/packages/server/src/utilities/fileSystem/processor.js
similarity index 62%
rename from packages/server/src/utilities/fileProcessor.js
rename to packages/server/src/utilities/fileSystem/processor.js
index 15132b2d49..3778b50168 100644
--- a/packages/server/src/utilities/fileProcessor.js
+++ b/packages/server/src/utilities/fileSystem/processor.js
@@ -1,25 +1,20 @@
-const fs = require("fs")
const jimp = require("jimp")
-const fsPromises = fs.promises
const FORMATS = {
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
}
function processImage(file) {
+ // this will overwrite the temp file
return jimp.read(file.path).then(img => {
- return img.resize(300, jimp.AUTO).write(file.outputPath)
+ return img.resize(300, jimp.AUTO).write(file.path)
})
}
async function process(file) {
if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) {
await processImage(file)
- return file
}
-
- // No processing required
- await fsPromises.copyFile(file.path, file.outputPath)
return file
}
diff --git a/packages/server/src/utilities/fileSystem/utilities.js b/packages/server/src/utilities/fileSystem/utilities.js
new file mode 100644
index 0000000000..db515106c0
--- /dev/null
+++ b/packages/server/src/utilities/fileSystem/utilities.js
@@ -0,0 +1,221 @@
+const sanitize = require("sanitize-s3-objectkey")
+const AWS = require("aws-sdk")
+const stream = require("stream")
+const fetch = require("node-fetch")
+const tar = require("tar-fs")
+const zlib = require("zlib")
+const { promisify } = require("util")
+const { join } = require("path")
+const fs = require("fs")
+const { budibaseTempDir } = require("../budibaseDir")
+const env = require("../../environment")
+const { ObjectStoreBuckets } = require("../../constants")
+
+const streamPipeline = promisify(stream.pipeline)
+
+const CONTENT_TYPE_MAP = {
+ html: "text/html",
+ css: "text/css",
+ js: "application/javascript",
+}
+const STRING_CONTENT_TYPES = [
+ CONTENT_TYPE_MAP.html,
+ CONTENT_TYPE_MAP.css,
+ CONTENT_TYPE_MAP.js,
+]
+
+function publicPolicy(bucketName) {
+ return {
+ Version: "2012-10-17",
+ Statement: [
+ {
+ Effect: "Allow",
+ Principal: {
+ AWS: ["*"],
+ },
+ Action: "s3:GetObject",
+ Resource: [`arn:aws:s3:::${bucketName}/*`],
+ },
+ ],
+ }
+}
+
+const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS]
+
+/**
+ * Gets a connection to the object store using the S3 SDK.
+ * @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
+ * @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
+ * @constructor
+ */
+exports.ObjectStore = bucket => {
+ if (env.SELF_HOSTED) {
+ AWS.config.update({
+ accessKeyId: env.MINIO_ACCESS_KEY,
+ secretAccessKey: env.MINIO_SECRET_KEY,
+ })
+ }
+ const config = {
+ s3ForcePathStyle: true,
+ signatureVersion: "v4",
+ params: {
+ Bucket: bucket,
+ },
+ }
+ if (env.MINIO_URL) {
+ config.endpoint = env.MINIO_URL
+ }
+ return new AWS.S3(config)
+}
+
+/**
+ * Given an object store and a bucket name this will make sure the bucket exists,
+ * if it does not exist then it will create it.
+ */
+exports.makeSureBucketExists = async (client, bucketName) => {
+ try {
+ await client
+ .headBucket({
+ Bucket: bucketName,
+ })
+ .promise()
+ } catch (err) {
+ // bucket doesn't exist create it
+ if (err.statusCode === 404) {
+ await client
+ .createBucket({
+ Bucket: bucketName,
+ })
+ .promise()
+ // public buckets are quite hidden in the system, make sure
+ // no bucket is set accidentally
+ if (PUBLIC_BUCKETS.includes(bucketName)) {
+ await client
+ .putBucketPolicy({
+ Bucket: bucketName,
+ Policy: JSON.stringify(publicPolicy(bucketName)),
+ })
+ .promise()
+ }
+ } else {
+ throw err
+ }
+ }
+}
+
+/**
+ * Uploads the contents of a file given the required parameters, useful when
+ * temp files in use (for example file uploaded as an attachment).
+ */
+exports.upload = async ({ bucket, filename, path, type, metadata }) => {
+ const extension = [...filename.split(".")].pop()
+ const fileBytes = fs.readFileSync(path)
+
+ const objectStore = exports.ObjectStore(bucket)
+ await exports.makeSureBucketExists(objectStore, bucket)
+
+ const config = {
+ // windows file paths need to be converted to forward slashes for s3
+ Key: sanitize(filename).replace(/\\/g, "/"),
+ Body: fileBytes,
+ ContentType: type || CONTENT_TYPE_MAP[extension.toLowerCase()],
+ }
+ if (metadata) {
+ config.Metadata = metadata
+ }
+ return objectStore.upload(config).promise()
+}
+
+/**
+ * Similar to the upload function but can be used to send a file stream
+ * through to the object store.
+ */
+exports.streamUpload = async (bucket, filename, stream) => {
+ const objectStore = exports.ObjectStore(bucket)
+ await exports.makeSureBucketExists(objectStore, bucket)
+
+ const params = {
+ Bucket: bucket,
+ Key: sanitize(filename).replace(/\\/g, "/"),
+ Body: stream,
+ }
+ return objectStore.upload(params).promise()
+}
+
+/**
+ * retrieves the contents of a file from the object store, if it is a known content type it
+ * will be converted, otherwise it will be returned as a buffer stream.
+ */
+exports.retrieve = async (bucket, filename) => {
+ const objectStore = exports.ObjectStore(bucket)
+ const params = {
+ Bucket: bucket,
+ Key: sanitize(filename).replace(/\\/g, "/"),
+ }
+ const response = await objectStore.getObject(params).promise()
+ // currently these are all strings
+ if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
+ return response.Body.toString("utf8")
+ } else {
+ return response.Body
+ }
+}
+
+exports.deleteFolder = async (bucket, folder) => {
+ const client = exports.ObjectStore(bucket)
+ const listParams = {
+ Bucket: bucket,
+ Prefix: folder,
+ }
+
+ let response = await client.listObjects(listParams).promise()
+ if (response.Contents.length === 0) {
+ return
+ }
+ const deleteParams = {
+ Bucket: bucket,
+ Delete: {
+ Objects: [],
+ },
+ }
+
+ response.Contents.forEach(content => {
+ deleteParams.Delete.Objects.push({ Key: content.Key })
+ })
+
+ response = await client.deleteObjects(deleteParams).promise()
+ // can only empty 1000 items at once
+ if (response.Deleted.length === 1000) {
+ return exports.deleteFolder(bucket, folder)
+ }
+}
+
+exports.uploadDirectory = async (bucket, localPath, bucketPath) => {
+ let uploads = []
+ const files = fs.readdirSync(localPath, { withFileTypes: true })
+ for (let file of files) {
+ const path = join(bucketPath, file.name)
+ const local = join(localPath, file.name)
+ if (file.isDirectory()) {
+ uploads.push(exports.uploadDirectory(bucket, local, path))
+ } else {
+ uploads.push(
+ exports.streamUpload(bucket, path, fs.createReadStream(local))
+ )
+ }
+ }
+ await Promise.all(uploads)
+}
+
+exports.downloadTarball = async (url, bucket, path) => {
+ const response = await fetch(url)
+ if (!response.ok) {
+ throw new Error(`unexpected response ${response.statusText}`)
+ }
+
+ const tmpPath = join(budibaseTempDir(), path)
+ await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
+ await exports.uploadDirectory(bucket, tmpPath, path)
+ // return the temporary path incase there is a use for it
+ return tmpPath
+}
diff --git a/packages/server/src/utilities/index.js b/packages/server/src/utilities/index.js
index 7420884d44..7d6794b1b3 100644
--- a/packages/server/src/utilities/index.js
+++ b/packages/server/src/utilities/index.js
@@ -1,6 +1,5 @@
const env = require("../environment")
const { DocumentTypes, SEPARATOR } = require("../db/utils")
-const fs = require("fs")
const CouchDB = require("../db")
const APP_PREFIX = DocumentTypes.APP + SEPARATOR
@@ -13,14 +12,7 @@ function confirmAppId(possibleAppId) {
exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms))
-exports.isDev = () => {
- return (
- !env.CLOUD &&
- env.NODE_ENV !== "production" &&
- env.NODE_ENV !== "jest" &&
- env.NODE_ENV !== "cypress"
- )
-}
+exports.isDev = env.isDev
/**
* Given a request tries to find the appId, which can be located in various places
@@ -28,10 +20,18 @@ exports.isDev = () => {
* @returns {string|undefined} If an appId was found it will be returned.
*/
exports.getAppId = ctx => {
- let appId = confirmAppId(ctx.headers["x-budibase-app-id"])
- if (!appId) {
- appId = confirmAppId(env.CLOUD ? ctx.subdomains[1] : ctx.params.appId)
+ const options = [ctx.headers["x-budibase-app-id"], ctx.params.appId]
+ if (ctx.subdomains) {
+ options.push(ctx.subdomains[1])
}
+ let appId
+ for (let option of options) {
+ appId = confirmAppId(option)
+ if (appId) {
+ break
+ }
+ }
+
// look in body if can't find it in subdomain
if (!appId && ctx.request.body && ctx.request.body.appId) {
appId = confirmAppId(ctx.request.body.appId)
@@ -51,7 +51,7 @@ exports.getAppId = ctx => {
* @returns {string} The name of the token trying to find
*/
exports.getCookieName = (name = "builder") => {
- let environment = env.CLOUD ? "cloud" : "local"
+ let environment = env.isProd() ? "cloud" : "local"
return `budibase:${name}:${environment}`
}
@@ -89,24 +89,6 @@ exports.isClient = ctx => {
return ctx.headers["x-budibase-type"] === "client"
}
-/**
- * Recursively walk a directory tree and execute a callback on all files.
- * @param {String} dirPath - Directory to traverse
- * @param {Function} callback - callback to execute on files
- */
-exports.walkDir = (dirPath, callback) => {
- for (let filename of fs.readdirSync(dirPath)) {
- const filePath = `${dirPath}/${filename}`
- const stat = fs.lstatSync(filePath)
-
- if (stat.isFile()) {
- callback(filePath)
- } else {
- exports.walkDir(filePath, callback)
- }
- }
-}
-
exports.getLogoUrl = () => {
return "https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg"
}
diff --git a/packages/server/src/utilities/rowProcessor.js b/packages/server/src/utilities/rowProcessor.js
index eddd597459..97e2a2880c 100644
--- a/packages/server/src/utilities/rowProcessor.js
+++ b/packages/server/src/utilities/rowProcessor.js
@@ -180,7 +180,7 @@ exports.outputProcessing = async (appId, table, rows) => {
rows
)
// update the attachments URL depending on hosting
- if (env.CLOUD && env.SELF_HOSTED) {
+ if (env.isProd() && env.SELF_HOSTED) {
for (let [property, column] of Object.entries(table.schema)) {
if (column.type === FieldTypes.ATTACHMENT) {
for (let row of outputRows) {
diff --git a/packages/server/src/utilities/templates.js b/packages/server/src/utilities/templates.js
deleted file mode 100644
index c3d89477df..0000000000
--- a/packages/server/src/utilities/templates.js
+++ /dev/null
@@ -1,81 +0,0 @@
-const fs = require("fs-extra")
-const { join } = require("./centralPath")
-const os = require("os")
-const fetch = require("node-fetch")
-const stream = require("stream")
-const tar = require("tar-fs")
-const zlib = require("zlib")
-const { promisify } = require("util")
-const streamPipeline = promisify(stream.pipeline)
-const { budibaseAppsDir } = require("./budibaseDir")
-const env = require("../environment")
-const CouchDB = require("../db")
-
-const DEFAULT_TEMPLATES_BUCKET =
- "prod-budi-templates.s3-eu-west-1.amazonaws.com"
-
-exports.getLocalTemplates = function() {
- const templatesDir = join(os.homedir(), ".budibase", "templates", "app")
- const templateObj = { app: {} }
- fs.ensureDirSync(templatesDir)
- const templateNames = fs.readdirSync(templatesDir)
- for (let name of templateNames) {
- templateObj.app[name] = {
- name,
- category: "local",
- description: "local template",
- type: "app",
- key: `app/${name}`,
- }
- }
- return templateObj
-}
-
-// can't really test this, downloading is just not something we should do in a behavioural test
-/* istanbul ignore next */
-exports.downloadTemplate = async function(type, name) {
- const dirName = join(budibaseAppsDir(), "templates", type, name)
- if (env.LOCAL_TEMPLATES) {
- return dirName
- }
- const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
- const response = await fetch(templateUrl)
-
- if (!response.ok) {
- throw new Error(
- `Error downloading template ${type}:${name}: ${response.statusText}`
- )
- }
-
- // stream the response, unzip and extract
- await streamPipeline(
- response.body,
- zlib.Unzip(),
- tar.extract(join(budibaseAppsDir(), "templates", type))
- )
-
- return dirName
-}
-
-async function performDump({ dir, appId, name = "dump.txt" }) {
- const writeStream = fs.createWriteStream(join(dir, name))
- // perform couch dump
- const instanceDb = new CouchDB(appId)
- await instanceDb.dump(writeStream, {})
-}
-
-exports.performDump = performDump
-
-exports.exportTemplateFromApp = async function({ templateName, appId }) {
- // Copy frontend files
- const templatesDir = join(
- budibaseAppsDir(),
- "templates",
- "app",
- templateName,
- "db"
- )
- fs.ensureDirSync(templatesDir)
- await performDump({ dir: templatesDir, appId })
- return templatesDir
-}
diff --git a/packages/server/src/utilities/tests/csvParser.spec.js b/packages/server/src/utilities/tests/csvParser.spec.js
index 48e275fdd1..76ea9a7eb3 100644
--- a/packages/server/src/utilities/tests/csvParser.spec.js
+++ b/packages/server/src/utilities/tests/csvParser.spec.js
@@ -1,4 +1,4 @@
-const fs = require("fs")
+const { readFileSync } = require("../fileSystem")
const csvParser = require("../csvParser")
const CSV_PATH = __dirname + "/test.csv"
@@ -33,7 +33,7 @@ const SCHEMAS = {
}
describe("CSV Parser", () => {
- const csvString = fs.readFileSync(CSV_PATH, "utf8")
+ const csvString = readFileSync(CSV_PATH, "utf8")
describe("parsing", () => {
it("returns status and types for a valid CSV transformation", async () => {
diff --git a/packages/server/src/utilities/usageQuota.js b/packages/server/src/utilities/usageQuota.js
index d809d9e673..d042d290d5 100644
--- a/packages/server/src/utilities/usageQuota.js
+++ b/packages/server/src/utilities/usageQuota.js
@@ -50,7 +50,7 @@ exports.Properties = {
}
exports.getAPIKey = async appId => {
- if (env.SELF_HOSTED) {
+ if (!env.USE_QUOTAS) {
return { apiKey: null }
}
return apiKeyTable.get({ primary: appId })
@@ -65,8 +65,7 @@ exports.getAPIKey = async appId => {
* also been reset after this call.
*/
exports.update = async (apiKey, property, usage) => {
- // don't try validate in builder
- if (!env.CLOUD || env.SELF_HOSTED) {
+ if (!env.USE_QUOTAS) {
return
}
try {