Main work of file system refactor now complete, ready to test more fully - most test cases passing, need to look through them more thoroughly and make sure everything still makes sense.
This commit is contained in:
parent
00344b3b39
commit
34918013cb
|
@ -27,7 +27,7 @@
|
|||
notifier.success("Datasource deleted")
|
||||
// navigate to first index page if the source you are deleting is selected
|
||||
if (wasSelectedSource === datasource._id) {
|
||||
$goto('./datasource')
|
||||
$goto("./datasource")
|
||||
}
|
||||
hideEditor()
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
await backendUiStore.actions.tables.fetch()
|
||||
notifier.success("Table deleted")
|
||||
if (wasSelectedTable._id === table._id) {
|
||||
$goto('./table')
|
||||
$goto("./table")
|
||||
}
|
||||
hideEditor()
|
||||
}
|
||||
|
|
|
@ -210,7 +210,9 @@ exports.delete = async function(ctx) {
|
|||
const app = await db.get(ctx.params.appId)
|
||||
const result = await db.destroy()
|
||||
|
||||
if (env.NODE_ENV !== "jest") {
|
||||
await deleteApp(ctx.params.appId)
|
||||
}
|
||||
|
||||
ctx.status = 200
|
||||
ctx.message = `Application ${app.name} deleted successfully.`
|
||||
|
|
|
@ -1,44 +1,41 @@
|
|||
const CouchDB = require("../../db")
|
||||
const { resolve, join } = require("../../utilities/centralPath")
|
||||
const {
|
||||
budibaseTempDir,
|
||||
budibaseAppsDir,
|
||||
} = require("../../utilities/budibaseDir")
|
||||
const { join } = require("../../utilities/centralPath")
|
||||
const { budibaseTempDir } = require("../../utilities/budibaseDir")
|
||||
const fileSystem = require("../../utilities/fileSystem")
|
||||
|
||||
exports.fetchAppComponentDefinitions = async function(ctx) {
|
||||
const appId = ctx.params.appId || ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const app = await db.get(appId)
|
||||
|
||||
ctx.body = app.componentLibraries.reduce((acc, componentLibrary) => {
|
||||
let appDirectory = resolve(budibaseAppsDir(), appId, "node_modules")
|
||||
|
||||
let componentManifests = await Promise.all(
|
||||
app.componentLibraries.map(async library => {
|
||||
let manifest
|
||||
if (ctx.isDev) {
|
||||
appDirectory = budibaseTempDir()
|
||||
}
|
||||
|
||||
const componentJson = require(join(
|
||||
appDirectory,
|
||||
componentLibrary,
|
||||
manifest = require(join(
|
||||
budibaseTempDir(),
|
||||
library,
|
||||
ctx.isDev ? "" : "package",
|
||||
"manifest.json"
|
||||
))
|
||||
|
||||
const result = {}
|
||||
|
||||
// map over the components.json and add the library identifier as a key
|
||||
// button -> @budibase/standard-components/button
|
||||
for (let key of Object.keys(componentJson)) {
|
||||
const fullComponentName = `${componentLibrary}/${key}`.toLowerCase()
|
||||
result[fullComponentName] = {
|
||||
component: fullComponentName,
|
||||
...componentJson[key],
|
||||
} else {
|
||||
manifest = await fileSystem.getComponentLibraryManifest(appId, library)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...acc,
|
||||
...result,
|
||||
manifest,
|
||||
library,
|
||||
}
|
||||
}, {})
|
||||
})
|
||||
)
|
||||
const definitions = {}
|
||||
for (let { manifest, library } of componentManifests) {
|
||||
for (let key of Object.keys(manifest)) {
|
||||
const fullComponentName = `${library}/${key}`.toLowerCase()
|
||||
definitions[fullComponentName] = {
|
||||
component: fullComponentName,
|
||||
...manifest[key],
|
||||
}
|
||||
}
|
||||
}
|
||||
ctx.body = definitions
|
||||
}
|
||||
|
|
|
@ -1,11 +1,26 @@
|
|||
const { walkDir } = require("../../../utilities")
|
||||
const { join } = require("../../../utilities/centralPath")
|
||||
const fs = require("fs")
|
||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
||||
const fetch = require("node-fetch")
|
||||
const PouchDB = require("../../../db")
|
||||
const CouchDB = require("pouchdb")
|
||||
const { upload } = require("../../../utilities/fileSystem")
|
||||
|
||||
// TODO: everything in this file is to be removed
|
||||
|
||||
function walkDir(dirPath, callback) {
|
||||
for (let filename of fs.readdirSync(dirPath)) {
|
||||
const filePath = `${dirPath}/${filename}`
|
||||
const stat = fs.lstatSync(filePath)
|
||||
|
||||
if (stat.isFile()) {
|
||||
callback(filePath)
|
||||
} else {
|
||||
walkDir(filePath, callback)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.fetchCredentials = async function(url, body) {
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||
const setup = require("./utilities")
|
||||
|
||||
describe("/authenticate", () => {
|
||||
|
|
|
@ -1,8 +1,15 @@
|
|||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||
const setup = require("./utilities")
|
||||
const fs = require("fs")
|
||||
const { resolve, join } = require("path")
|
||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
||||
|
||||
jest.mock("../../../utilities/fileSystem/utilities", () => ({
|
||||
...jest.requireActual("../../../utilities/fileSystem/utilities"),
|
||||
retrieve: () => {
|
||||
const { join } = require("path")
|
||||
const library = join("@budibase", "standard-components")
|
||||
const path = require.resolve(library).split(join("dist", "index.js"))[0] + "manifest.json"
|
||||
return JSON.stringify(require(path))
|
||||
}
|
||||
}))
|
||||
|
||||
describe("/component", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -14,23 +21,8 @@ describe("/component", () => {
|
|||
await config.init()
|
||||
})
|
||||
|
||||
function mock() {
|
||||
const manifestFile = "manifest.json"
|
||||
const appId = config.getAppId()
|
||||
const libraries = [join("@budibase", "standard-components")]
|
||||
for (let library of libraries) {
|
||||
let appDirectory = resolve(budibaseAppsDir(), appId, "node_modules", library, "package")
|
||||
fs.mkdirSync(appDirectory, { recursive: true })
|
||||
|
||||
const file = require.resolve(library).split(join("dist", "index.js"))[0] + manifestFile
|
||||
fs.copyFileSync(file, join(appDirectory, manifestFile))
|
||||
}
|
||||
}
|
||||
|
||||
describe("fetch definitions", () => {
|
||||
it("should be able to fetch definitions", async () => {
|
||||
// have to "mock" the files required
|
||||
mock()
|
||||
const res = await request
|
||||
.get(`/${config.getAppId()}/components/definitions`)
|
||||
.set(config.defaultHeaders())
|
||||
|
|
|
@ -5,17 +5,12 @@ const deleteRow = require("./steps/deleteRow")
|
|||
const createUser = require("./steps/createUser")
|
||||
const outgoingWebhook = require("./steps/outgoingWebhook")
|
||||
const env = require("../environment")
|
||||
const download = require("download")
|
||||
const fetch = require("node-fetch")
|
||||
const { join } = require("../utilities/centralPath")
|
||||
const os = require("os")
|
||||
const fs = require("fs")
|
||||
const Sentry = require("@sentry/node")
|
||||
const {
|
||||
automationInit,
|
||||
getExternalAutomationStep,
|
||||
} = require("../utilities/fileSystem")
|
||||
|
||||
const DEFAULT_BUCKET =
|
||||
"https://prod-budi-automations.s3-eu-west-1.amazonaws.com"
|
||||
const DEFAULT_DIRECTORY = ".budibase-automations"
|
||||
const AUTOMATION_MANIFEST = "manifest.json"
|
||||
const BUILTIN_ACTIONS = {
|
||||
SEND_EMAIL: sendEmail.run,
|
||||
CREATE_ROW: createRow.run,
|
||||
|
@ -33,8 +28,6 @@ const BUILTIN_DEFINITIONS = {
|
|||
OUTGOING_WEBHOOK: outgoingWebhook.definition,
|
||||
}
|
||||
|
||||
let AUTOMATION_BUCKET = env.AUTOMATION_BUCKET
|
||||
let AUTOMATION_DIRECTORY = env.AUTOMATION_DIRECTORY
|
||||
let MANIFEST = null
|
||||
|
||||
/* istanbul ignore next */
|
||||
|
@ -42,15 +35,6 @@ function buildBundleName(pkgName, version) {
|
|||
return `${pkgName}@${version}.min.js`
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
async function downloadPackage(name, version, bundleName) {
|
||||
await download(
|
||||
`${AUTOMATION_BUCKET}/${name}/${version}/${bundleName}`,
|
||||
AUTOMATION_DIRECTORY
|
||||
)
|
||||
return require(join(AUTOMATION_DIRECTORY, bundleName))
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
module.exports.getAction = async function(actionName) {
|
||||
if (BUILTIN_ACTIONS[actionName] != null) {
|
||||
|
@ -66,28 +50,12 @@ module.exports.getAction = async function(actionName) {
|
|||
}
|
||||
const pkg = MANIFEST.packages[actionName]
|
||||
const bundleName = buildBundleName(pkg.stepId, pkg.version)
|
||||
try {
|
||||
return require(join(AUTOMATION_DIRECTORY, bundleName))
|
||||
} catch (err) {
|
||||
return downloadPackage(pkg.stepId, pkg.version, bundleName)
|
||||
}
|
||||
return getExternalAutomationStep(pkg.stepId, pkg.version, bundleName)
|
||||
}
|
||||
|
||||
module.exports.init = async function() {
|
||||
// set defaults
|
||||
if (!AUTOMATION_DIRECTORY) {
|
||||
AUTOMATION_DIRECTORY = join(os.homedir(), DEFAULT_DIRECTORY)
|
||||
}
|
||||
if (!AUTOMATION_BUCKET) {
|
||||
AUTOMATION_BUCKET = DEFAULT_BUCKET
|
||||
}
|
||||
if (!fs.existsSync(AUTOMATION_DIRECTORY)) {
|
||||
fs.mkdirSync(AUTOMATION_DIRECTORY, { recursive: true })
|
||||
}
|
||||
// env setup to get async packages
|
||||
try {
|
||||
let response = await fetch(`${AUTOMATION_BUCKET}/${AUTOMATION_MANIFEST}`)
|
||||
MANIFEST = await response.json()
|
||||
MANIFEST = await automationInit()
|
||||
module.exports.DEFINITIONS =
|
||||
MANIFEST && MANIFEST.packages
|
||||
? Object.assign(MANIFEST.packages, BUILTIN_DEFINITIONS)
|
||||
|
|
|
@ -14,9 +14,6 @@ const {
|
|||
} = require("./structures")
|
||||
const controllers = require("./controllers")
|
||||
const supertest = require("supertest")
|
||||
const fs = require("fs")
|
||||
const { budibaseAppsDir } = require("../../utilities/budibaseDir")
|
||||
const { join } = require("path")
|
||||
|
||||
const EMAIL = "babs@babs.com"
|
||||
const PASSWORD = "babs_password"
|
||||
|
@ -66,13 +63,6 @@ class TestConfiguration {
|
|||
if (this.server) {
|
||||
this.server.close()
|
||||
}
|
||||
const appDir = budibaseAppsDir()
|
||||
const files = fs.readdirSync(appDir)
|
||||
for (let file of files) {
|
||||
if (this.allApps.some(app => file.includes(app._id))) {
|
||||
fs.rmdirSync(join(appDir, file), { recursive: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
defaultHeaders() {
|
||||
|
|
|
@ -5,8 +5,21 @@ const { join } = require("path")
|
|||
const uuid = require("uuid/v4")
|
||||
const CouchDB = require("../../db")
|
||||
const { ObjectStoreBuckets } = require("../../constants")
|
||||
const { upload, streamUpload, deleteFolder, downloadTarball } = require("./utilities")
|
||||
const {
|
||||
upload,
|
||||
retrieve,
|
||||
streamUpload,
|
||||
deleteFolder,
|
||||
downloadTarball,
|
||||
} = require("./utilities")
|
||||
const { downloadLibraries, newAppPublicPath } = require("./newApp")
|
||||
const download = require("download")
|
||||
const env = require("../../environment")
|
||||
const { homedir } = require("os")
|
||||
|
||||
const DEFAULT_AUTOMATION_BUCKET =
|
||||
"https://prod-budi-automations.s3-eu-west-1.amazonaws.com"
|
||||
const DEFAULT_AUTOMATION_DIRECTORY = ".budibase-automations"
|
||||
|
||||
/**
|
||||
* The single stack system (Cloud and Builder) should not make use of the file system where possible,
|
||||
|
@ -21,10 +34,19 @@ const { downloadLibraries, newAppPublicPath } = require("./newApp")
|
|||
* everything required to function is ready.
|
||||
*/
|
||||
exports.checkDevelopmentEnvironment = () => {
|
||||
if (isDev() && !fs.existsSync(budibaseTempDir())) {
|
||||
console.error(
|
||||
if (!isDev()) {
|
||||
return
|
||||
}
|
||||
let error
|
||||
if (!fs.existsSync(budibaseTempDir())) {
|
||||
error =
|
||||
"Please run a build before attempting to run server independently to fill 'tmp' directory."
|
||||
)
|
||||
}
|
||||
if (!fs.existsSync(join(process.cwd(), ".env"))) {
|
||||
error = "Must run via yarn once to generate environment."
|
||||
}
|
||||
if (error) {
|
||||
console.error(error)
|
||||
process.exit(-1)
|
||||
}
|
||||
}
|
||||
|
@ -66,6 +88,13 @@ exports.apiFileReturn = contents => {
|
|||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a copy of the database state for an app to the object store.
|
||||
* @param {string} appId The ID of the app which is to be backed up.
|
||||
* @param {string} backupName The name of the backup located in the object store.
|
||||
* @return The backup has been completed when this promise completes and returns a file stream
|
||||
* to the temporary backup file (to return via API if required).
|
||||
*/
|
||||
exports.performBackup = async (appId, backupName) => {
|
||||
const path = join(budibaseTempDir(), backupName)
|
||||
const writeStream = fs.createWriteStream(path)
|
||||
|
@ -81,15 +110,31 @@ exports.performBackup = async (appId, backupName) => {
|
|||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads required libraries and creates a new path in the object store.
|
||||
* @param {string} appId The ID of the app which is being created.
|
||||
* @return {Promise<void>} once promise completes app resources should be ready in object store.
|
||||
*/
|
||||
exports.createApp = async appId => {
|
||||
await downloadLibraries(appId)
|
||||
await newAppPublicPath(appId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all of the assets created for an app in the object store.
|
||||
* @param {string} appId The ID of the app which is being deleted.
|
||||
* @return {Promise<void>} once promise completes the app resources will be removed from object store.
|
||||
*/
|
||||
exports.deleteApp = async appId => {
|
||||
await deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a template and pipes it to minio as well as making it available temporarily.
|
||||
* @param {string} type The type of template which is to be retrieved.
|
||||
* @param name
|
||||
* @return {Promise<*>}
|
||||
*/
|
||||
exports.downloadTemplate = async (type, name) => {
|
||||
const DEFAULT_TEMPLATES_BUCKET =
|
||||
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
|
||||
|
@ -97,6 +142,44 @@ exports.downloadTemplate = async (type, name) => {
|
|||
return downloadTarball(templateUrl, ObjectStoreBuckets.TEMPLATES, type)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves component libraries from object store (or tmp symlink if in local)
|
||||
*/
|
||||
exports.getComponentLibraryManifest = async (appId, library) => {
|
||||
const path = join(appId, "node_modules", library, "package", "manifest.json")
|
||||
let resp = await retrieve(ObjectStoreBuckets.APPS, path)
|
||||
if (typeof resp !== "string") {
|
||||
resp = resp.toString("utf8")
|
||||
}
|
||||
return JSON.parse(resp)
|
||||
}
|
||||
|
||||
exports.automationInit = async () => {
|
||||
const directory =
|
||||
env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
|
||||
const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
|
||||
if (!fs.existsSync(directory)) {
|
||||
fs.mkdirSync(directory, { recursive: true })
|
||||
}
|
||||
// env setup to get async packages
|
||||
let response = await fetch(`${bucket}/manifest.json`)
|
||||
return response.json()
|
||||
}
|
||||
|
||||
exports.getExternalAutomationStep = async (name, version, bundleName) => {
|
||||
const directory = env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
|
||||
const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
|
||||
try {
|
||||
return require(join(directory, bundleName))
|
||||
} catch (err) {
|
||||
await download(
|
||||
`${bucket}/${name}/${version}/${bundleName}`,
|
||||
directory
|
||||
)
|
||||
return require(join(directory, bundleName))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All file reads come through here just to make sure all of them make sense
|
||||
* allows a centralised location to check logic is all good.
|
||||
|
@ -106,6 +189,7 @@ exports.readFileSync = (filepath, options = "utf8") => {
|
|||
}
|
||||
|
||||
/**
|
||||
* Full function definition provided in the utilities.
|
||||
* Full function definition for below can be found in the utilities.
|
||||
*/
|
||||
exports.upload = upload
|
||||
exports.retrieve = retrieve
|
||||
|
|
|
@ -10,6 +10,7 @@ const { streamUpload } = require("./utilities")
|
|||
const fs = require("fs")
|
||||
const { budibaseTempDir } = require("../budibaseDir")
|
||||
const env = require("../../environment")
|
||||
const { ObjectStoreBuckets } = require("../../constants")
|
||||
|
||||
const streamPipeline = promisify(stream.pipeline)
|
||||
|
||||
|
@ -18,6 +19,29 @@ const CONTENT_TYPE_MAP = {
|
|||
css: "text/css",
|
||||
js: "application/javascript",
|
||||
}
|
||||
const STRING_CONTENT_TYPES = [
|
||||
CONTENT_TYPE_MAP.html,
|
||||
CONTENT_TYPE_MAP.css,
|
||||
CONTENT_TYPE_MAP.js,
|
||||
]
|
||||
|
||||
function publicPolicy(bucketName) {
|
||||
return {
|
||||
Version: "2012-10-17",
|
||||
Statement: [
|
||||
{
|
||||
Effect: "Allow",
|
||||
Principal: {
|
||||
AWS: ["*"],
|
||||
},
|
||||
Action: "s3:GetObject",
|
||||
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS]
|
||||
|
||||
/**
|
||||
* Gets a connection to the object store using the S3 SDK.
|
||||
|
@ -26,17 +50,23 @@ const CONTENT_TYPE_MAP = {
|
|||
* @constructor
|
||||
*/
|
||||
exports.ObjectStore = bucket => {
|
||||
return new AWS.S3({
|
||||
// TODO: need to deal with endpoint properly
|
||||
endpoint: env.MINIO_URL,
|
||||
const config = {
|
||||
s3ForcePathStyle: true, // needed with minio?
|
||||
signatureVersion: "v4",
|
||||
params: {
|
||||
Bucket: bucket,
|
||||
},
|
||||
})
|
||||
}
|
||||
if (env.MINIO_URL) {
|
||||
config.endpoint = env.MINIO_URL
|
||||
}
|
||||
return new AWS.S3(config)
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||
* if it does not exist then it will create it.
|
||||
*/
|
||||
exports.makeSureBucketExists = async (client, bucketName) => {
|
||||
try {
|
||||
await client
|
||||
|
@ -52,6 +82,16 @@ exports.makeSureBucketExists = async (client, bucketName) => {
|
|||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
// public buckets are quite hidden in the system, make sure
|
||||
// no bucket is set accidentally
|
||||
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
||||
await client
|
||||
.putBucketPolicy({
|
||||
Bucket: bucketName,
|
||||
Policy: JSON.stringify(publicPolicy(bucketName)),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
|
@ -61,13 +101,6 @@ exports.makeSureBucketExists = async (client, bucketName) => {
|
|||
/**
|
||||
* Uploads the contents of a file given the required parameters, useful when
|
||||
* temp files in use (for example file uploaded as an attachment).
|
||||
* @param {string} bucket The name of the bucket to be uploaded to.
|
||||
* @param {string} filename The name/path of the file in the object store.
|
||||
* @param {string} path The path to the file (ideally a temporary file).
|
||||
* @param {string} type If the content type is known can be specified.
|
||||
* @param {object} metadata If there is metadata for the object it can be passed as well.
|
||||
* @return {Promise<ManagedUpload.SendData>} The file has been uploaded to the object store successfully when
|
||||
* promise completes.
|
||||
*/
|
||||
exports.upload = async ({ bucket, filename, path, type, metadata }) => {
|
||||
const extension = [...filename.split(".")].pop()
|
||||
|
@ -86,6 +119,10 @@ exports.upload = async ({ bucket, filename, path, type, metadata }) => {
|
|||
return objectStore.upload(config).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* Similar to the upload function but can be used to send a file stream
|
||||
* through to the object store.
|
||||
*/
|
||||
exports.streamUpload = async (bucket, filename, stream) => {
|
||||
const objectStore = exports.ObjectStore(bucket)
|
||||
await exports.makeSureBucketExists(objectStore, bucket)
|
||||
|
@ -98,6 +135,25 @@ exports.streamUpload = async (bucket, filename, stream) => {
|
|||
return objectStore.upload(params).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||
* will be converted, otherwise it will be returned as a buffer stream.
|
||||
*/
|
||||
exports.retrieve = async (bucket, filename) => {
|
||||
const objectStore = exports.ObjectStore(bucket)
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: sanitize(filename).replace(/\\/g, "/"),
|
||||
}
|
||||
const response = await objectStore.getObject(params).promise()
|
||||
// currently these are all strings
|
||||
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
||||
return response.Body.toString("utf8")
|
||||
} else {
|
||||
return response.Body
|
||||
}
|
||||
}
|
||||
|
||||
exports.deleteFolder = async (bucket, folder) => {
|
||||
const client = exports.ObjectStore(bucket)
|
||||
const listParams = {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
const env = require("../environment")
|
||||
const { DocumentTypes, SEPARATOR } = require("../db/utils")
|
||||
const fs = require("fs")
|
||||
const CouchDB = require("../db")
|
||||
|
||||
const APP_PREFIX = DocumentTypes.APP + SEPARATOR
|
||||
|
@ -82,24 +81,6 @@ exports.isClient = ctx => {
|
|||
return ctx.headers["x-budibase-type"] === "client"
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively walk a directory tree and execute a callback on all files.
|
||||
* @param {String} dirPath - Directory to traverse
|
||||
* @param {Function} callback - callback to execute on files
|
||||
*/
|
||||
exports.walkDir = (dirPath, callback) => {
|
||||
for (let filename of fs.readdirSync(dirPath)) {
|
||||
const filePath = `${dirPath}/${filename}`
|
||||
const stat = fs.lstatSync(filePath)
|
||||
|
||||
if (stat.isFile()) {
|
||||
callback(filePath)
|
||||
} else {
|
||||
exports.walkDir(filePath, callback)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.getLogoUrl = () => {
|
||||
return "https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg"
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue