Merge pull request #1461 from Budibase/feature/logo-api
Configuration file upload API
This commit is contained in:
commit
fdf39520e3
|
@ -38,6 +38,9 @@ static_resources:
|
|||
route:
|
||||
cluster: server-dev
|
||||
|
||||
# the below three cases are needed to make sure
|
||||
# all traffic prefixed for the builder is passed through
|
||||
# correctly.
|
||||
- match: { path: "/" }
|
||||
route:
|
||||
cluster: builder-dev
|
||||
|
|
|
@ -6,15 +6,20 @@
|
|||
"author": "Budibase",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"aws-sdk": "^2.901.0",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"ioredis": "^4.27.1",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"koa-passport": "^4.1.4",
|
||||
"node-fetch": "^2.6.1",
|
||||
"passport-google-auth": "^1.0.2",
|
||||
"passport-google-oauth": "^2.0.0",
|
||||
"passport-jwt": "^4.0.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"uuid": "^8.3.2"
|
||||
"sanitize-s3-objectkey": "^0.0.1",
|
||||
"tar-fs": "^2.1.1",
|
||||
"uuid": "^8.3.2",
|
||||
"zlib": "^1.0.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ioredis-mock": "^5.5.5"
|
||||
|
|
|
@ -12,5 +12,8 @@ module.exports = {
|
|||
SALT_ROUNDS: process.env.SALT_ROUNDS,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||
MINIO_URL: process.env.MINIO_URL,
|
||||
isTest,
|
||||
}
|
||||
|
|
|
@ -32,6 +32,10 @@ module.exports = {
|
|||
Client: require("./redis"),
|
||||
utils: require("./redis/utils"),
|
||||
},
|
||||
objectStore: {
|
||||
...require("./objectStore"),
|
||||
...require("./objectStore/utils"),
|
||||
},
|
||||
utils: {
|
||||
...require("./utils"),
|
||||
...require("./hashing"),
|
||||
|
|
|
@ -0,0 +1,240 @@
|
|||
const sanitize = require("sanitize-s3-objectkey")
|
||||
const AWS = require("aws-sdk")
|
||||
const stream = require("stream")
|
||||
const fetch = require("node-fetch")
|
||||
const tar = require("tar-fs")
|
||||
const zlib = require("zlib")
|
||||
const { promisify } = require("util")
|
||||
const { join } = require("path")
|
||||
const fs = require("fs")
|
||||
const env = require("../environment")
|
||||
const { budibaseTempDir, ObjectStoreBuckets } = require("./utils")
|
||||
const { v4 } = require("uuid")
|
||||
|
||||
const streamPipeline = promisify(stream.pipeline)
|
||||
// use this as a temporary store of buckets that are being created
|
||||
const STATE = {
|
||||
bucketCreationPromises: {},
|
||||
}
|
||||
|
||||
const CONTENT_TYPE_MAP = {
|
||||
html: "text/html",
|
||||
css: "text/css",
|
||||
js: "application/javascript",
|
||||
}
|
||||
const STRING_CONTENT_TYPES = [
|
||||
CONTENT_TYPE_MAP.html,
|
||||
CONTENT_TYPE_MAP.css,
|
||||
CONTENT_TYPE_MAP.js,
|
||||
]
|
||||
|
||||
function publicPolicy(bucketName) {
|
||||
return {
|
||||
Version: "2012-10-17",
|
||||
Statement: [
|
||||
{
|
||||
Effect: "Allow",
|
||||
Principal: {
|
||||
AWS: ["*"],
|
||||
},
|
||||
Action: "s3:GetObject",
|
||||
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
|
||||
|
||||
/**
|
||||
* Gets a connection to the object store using the S3 SDK.
|
||||
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||
* @constructor
|
||||
*/
|
||||
exports.ObjectStore = bucket => {
|
||||
AWS.config.update({
|
||||
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||
})
|
||||
const config = {
|
||||
s3ForcePathStyle: true,
|
||||
signatureVersion: "v4",
|
||||
params: {
|
||||
Bucket: bucket,
|
||||
},
|
||||
}
|
||||
if (env.MINIO_URL) {
|
||||
config.endpoint = env.MINIO_URL
|
||||
}
|
||||
return new AWS.S3(config)
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||
* if it does not exist then it will create it.
|
||||
*/
|
||||
exports.makeSureBucketExists = async (client, bucketName) => {
|
||||
try {
|
||||
await client
|
||||
.headBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
} catch (err) {
|
||||
const promises = STATE.bucketCreationPromises
|
||||
if (promises[bucketName]) {
|
||||
await promises[bucketName]
|
||||
} else if (err.statusCode === 404) {
|
||||
// bucket doesn't exist create it
|
||||
promises[bucketName] = client
|
||||
.createBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
await promises[bucketName]
|
||||
delete promises[bucketName]
|
||||
// public buckets are quite hidden in the system, make sure
|
||||
// no bucket is set accidentally
|
||||
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
||||
await client
|
||||
.putBucketPolicy({
|
||||
Bucket: bucketName,
|
||||
Policy: JSON.stringify(publicPolicy(bucketName)),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads the contents of a file given the required parameters, useful when
|
||||
* temp files in use (for example file uploaded as an attachment).
|
||||
*/
|
||||
exports.upload = async ({ bucket, filename, path, type, metadata }) => {
|
||||
const extension = [...filename.split(".")].pop()
|
||||
const fileBytes = fs.readFileSync(path)
|
||||
|
||||
const objectStore = exports.ObjectStore(bucket)
|
||||
await exports.makeSureBucketExists(objectStore, bucket)
|
||||
|
||||
const config = {
|
||||
// windows file paths need to be converted to forward slashes for s3
|
||||
Key: sanitize(filename).replace(/\\/g, "/"),
|
||||
Body: fileBytes,
|
||||
ContentType: type || CONTENT_TYPE_MAP[extension.toLowerCase()],
|
||||
}
|
||||
if (metadata) {
|
||||
config.Metadata = metadata
|
||||
}
|
||||
return objectStore.upload(config).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* Similar to the upload function but can be used to send a file stream
|
||||
* through to the object store.
|
||||
*/
|
||||
exports.streamUpload = async (bucket, filename, stream) => {
|
||||
const objectStore = exports.ObjectStore(bucket)
|
||||
await exports.makeSureBucketExists(objectStore, bucket)
|
||||
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: sanitize(filename).replace(/\\/g, "/"),
|
||||
Body: stream,
|
||||
}
|
||||
return objectStore.upload(params).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||
* will be converted, otherwise it will be returned as a buffer stream.
|
||||
*/
|
||||
exports.retrieve = async (bucket, filepath) => {
|
||||
const objectStore = exports.ObjectStore(bucket)
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: sanitize(filepath).replace(/\\/g, "/"),
|
||||
}
|
||||
const response = await objectStore.getObject(params).promise()
|
||||
// currently these are all strings
|
||||
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
||||
return response.Body.toString("utf8")
|
||||
} else {
|
||||
return response.Body
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as retrieval function but puts to a temporary file.
|
||||
*/
|
||||
exports.retrieveToTmp = async (bucket, filepath) => {
|
||||
const data = await exports.retrieve(bucket, filepath)
|
||||
const outputPath = join(budibaseTempDir(), v4())
|
||||
fs.writeFileSync(outputPath, data)
|
||||
return outputPath
|
||||
}
|
||||
|
||||
exports.deleteFolder = async (bucket, folder) => {
|
||||
const client = exports.ObjectStore(bucket)
|
||||
const listParams = {
|
||||
Bucket: bucket,
|
||||
Prefix: folder,
|
||||
}
|
||||
|
||||
let response = await client.listObjects(listParams).promise()
|
||||
if (response.Contents.length === 0) {
|
||||
return
|
||||
}
|
||||
const deleteParams = {
|
||||
Bucket: bucket,
|
||||
Delete: {
|
||||
Objects: [],
|
||||
},
|
||||
}
|
||||
|
||||
response.Contents.forEach(content => {
|
||||
deleteParams.Delete.Objects.push({ Key: content.Key })
|
||||
})
|
||||
|
||||
response = await client.deleteObjects(deleteParams).promise()
|
||||
// can only empty 1000 items at once
|
||||
if (response.Deleted.length === 1000) {
|
||||
return exports.deleteFolder(bucket, folder)
|
||||
}
|
||||
}
|
||||
|
||||
exports.uploadDirectory = async (bucket, localPath, bucketPath) => {
|
||||
let uploads = []
|
||||
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
||||
for (let file of files) {
|
||||
const path = join(bucketPath, file.name)
|
||||
const local = join(localPath, file.name)
|
||||
if (file.isDirectory()) {
|
||||
uploads.push(exports.uploadDirectory(bucket, local, path))
|
||||
} else {
|
||||
uploads.push(
|
||||
exports.streamUpload(bucket, path, fs.createReadStream(local))
|
||||
)
|
||||
}
|
||||
}
|
||||
await Promise.all(uploads)
|
||||
}
|
||||
|
||||
exports.downloadTarball = async (url, bucket, path) => {
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`unexpected response ${response.statusText}`)
|
||||
}
|
||||
|
||||
const tmpPath = join(budibaseTempDir(), path)
|
||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
||||
if (!env.isTest()) {
|
||||
await exports.uploadDirectory(bucket, tmpPath, path)
|
||||
}
|
||||
// return the temporary path incase there is a use for it
|
||||
return tmpPath
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
const { join } = require("path")
|
||||
const { tmpdir } = require("os")
|
||||
|
||||
exports.ObjectStoreBuckets = {
|
||||
BACKUPS: "backups",
|
||||
APPS: "prod-budi-app-assets",
|
||||
TEMPLATES: "templates",
|
||||
GLOBAL: "global",
|
||||
}
|
||||
|
||||
exports.budibaseTempDir = function () {
|
||||
return join(tmpdir(), ".budibase")
|
||||
}
|
|
@ -36,6 +36,21 @@ asynckit@^0.4.0:
|
|||
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
||||
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
|
||||
|
||||
aws-sdk@^2.901.0:
|
||||
version "2.901.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.901.0.tgz#96b387778cf2b3537383fba04994e815f1fab4d4"
|
||||
integrity sha512-prmUjg4mjguamnwaXMdm/g1xtnT9515cjSaV/MhBsMUVzhe66EX7dLwiA7Jo8qUlwFMyCVIcp/2T+6KwJ9sQgQ==
|
||||
dependencies:
|
||||
buffer "4.9.2"
|
||||
events "1.1.1"
|
||||
ieee754 "1.1.13"
|
||||
jmespath "0.15.0"
|
||||
querystring "0.2.0"
|
||||
sax "1.2.1"
|
||||
url "0.10.3"
|
||||
uuid "3.3.2"
|
||||
xml2js "0.4.19"
|
||||
|
||||
aws-sign2@~0.7.0:
|
||||
version "0.7.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
|
||||
|
@ -51,6 +66,11 @@ balanced-match@^1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
|
||||
integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
|
||||
|
||||
base64-js@^1.0.2, base64-js@^1.3.1:
|
||||
version "1.5.1"
|
||||
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
|
||||
integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
|
||||
|
||||
base64url@3.x.x:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/base64url/-/base64url-3.0.1.tgz#6399d572e2bc3f90a9a8b22d5dbb0a32d33f788d"
|
||||
|
@ -68,6 +88,15 @@ bcryptjs@^2.4.3:
|
|||
resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb"
|
||||
integrity sha1-mrVie5PmBiH/fNrF2pczAn3x0Ms=
|
||||
|
||||
bl@^4.0.3:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a"
|
||||
integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==
|
||||
dependencies:
|
||||
buffer "^5.5.0"
|
||||
inherits "^2.0.4"
|
||||
readable-stream "^3.4.0"
|
||||
|
||||
brace-expansion@^1.1.7:
|
||||
version "1.1.11"
|
||||
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
|
||||
|
@ -81,11 +110,33 @@ buffer-equal-constant-time@1.0.1:
|
|||
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
|
||||
integrity sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=
|
||||
|
||||
buffer@4.9.2:
|
||||
version "4.9.2"
|
||||
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8"
|
||||
integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==
|
||||
dependencies:
|
||||
base64-js "^1.0.2"
|
||||
ieee754 "^1.1.4"
|
||||
isarray "^1.0.0"
|
||||
|
||||
buffer@^5.5.0:
|
||||
version "5.7.1"
|
||||
resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0"
|
||||
integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==
|
||||
dependencies:
|
||||
base64-js "^1.3.1"
|
||||
ieee754 "^1.1.13"
|
||||
|
||||
caseless@~0.12.0:
|
||||
version "0.12.0"
|
||||
resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
|
||||
integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=
|
||||
|
||||
chownr@^1.1.1:
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
|
||||
integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==
|
||||
|
||||
cluster-key-slot@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz#30474b2a981fb12172695833052bc0d01336d10d"
|
||||
|
@ -147,6 +198,18 @@ ecdsa-sig-formatter@1.0.11:
|
|||
dependencies:
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
end-of-stream@^1.1.0, end-of-stream@^1.4.1:
|
||||
version "1.4.4"
|
||||
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
|
||||
integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
|
||||
dependencies:
|
||||
once "^1.4.0"
|
||||
|
||||
events@1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924"
|
||||
integrity sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=
|
||||
|
||||
extend@~3.0.2:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
|
||||
|
@ -200,6 +263,11 @@ form-data@~2.3.2:
|
|||
combined-stream "^1.0.6"
|
||||
mime-types "^2.1.12"
|
||||
|
||||
fs-constants@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
|
||||
integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
|
||||
|
||||
getpass@^0.1.1:
|
||||
version "0.1.7"
|
||||
resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa"
|
||||
|
@ -265,6 +333,21 @@ http-signature@~1.2.0:
|
|||
jsprim "^1.2.2"
|
||||
sshpk "^1.7.0"
|
||||
|
||||
ieee754@1.1.13:
|
||||
version "1.1.13"
|
||||
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84"
|
||||
integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==
|
||||
|
||||
ieee754@^1.1.13, ieee754@^1.1.4:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
|
||||
integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==
|
||||
|
||||
inherits@^2.0.3, inherits@^2.0.4:
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
|
||||
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
|
||||
ioredis-mock@^5.5.5:
|
||||
version "5.5.5"
|
||||
resolved "https://registry.yarnpkg.com/ioredis-mock/-/ioredis-mock-5.5.5.tgz#dec9fedd238c6ab9f56c026fc366533144f8a256"
|
||||
|
@ -297,11 +380,21 @@ is-typedarray@~1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
|
||||
integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=
|
||||
|
||||
isarray@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
|
||||
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
|
||||
|
||||
isstream@~0.1.2:
|
||||
version "0.1.2"
|
||||
resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
|
||||
integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=
|
||||
|
||||
jmespath@0.15.0:
|
||||
version "0.15.0"
|
||||
resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217"
|
||||
integrity sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=
|
||||
|
||||
jsbn@~0.1.0:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
|
||||
|
@ -451,6 +544,11 @@ minimatch@^3.0.4:
|
|||
dependencies:
|
||||
brace-expansion "^1.1.7"
|
||||
|
||||
mkdirp-classic@^0.5.2:
|
||||
version "0.5.3"
|
||||
resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113"
|
||||
integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==
|
||||
|
||||
ms@2.1.2:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
|
||||
|
@ -461,6 +559,11 @@ ms@^2.1.1:
|
|||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
|
||||
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
||||
|
||||
node-fetch@^2.6.1:
|
||||
version "2.6.1"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
|
||||
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
|
||||
|
||||
node-forge@^0.7.1:
|
||||
version "0.7.6"
|
||||
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac"
|
||||
|
@ -476,6 +579,13 @@ oauth@0.9.x:
|
|||
resolved "https://registry.yarnpkg.com/oauth/-/oauth-0.9.15.tgz#bd1fefaf686c96b75475aed5196412ff60cfb9c1"
|
||||
integrity sha1-vR/vr2hslrdUda7VGWQS/2DPucE=
|
||||
|
||||
once@^1.3.1, once@^1.4.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
|
||||
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
|
||||
dependencies:
|
||||
wrappy "1"
|
||||
|
||||
os-tmpdir@~1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
|
||||
|
@ -579,6 +689,19 @@ psl@^1.1.28:
|
|||
resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24"
|
||||
integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==
|
||||
|
||||
pump@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
|
||||
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
|
||||
dependencies:
|
||||
end-of-stream "^1.1.0"
|
||||
once "^1.3.1"
|
||||
|
||||
punycode@1.3.2:
|
||||
version "1.3.2"
|
||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
|
||||
integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=
|
||||
|
||||
punycode@^2.1.0, punycode@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
|
||||
|
@ -589,6 +712,20 @@ qs@~6.5.2:
|
|||
resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36"
|
||||
integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==
|
||||
|
||||
querystring@0.2.0:
|
||||
version "0.2.0"
|
||||
resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
|
||||
integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=
|
||||
|
||||
readable-stream@^3.1.1, readable-stream@^3.4.0:
|
||||
version "3.6.0"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
|
||||
integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
|
||||
dependencies:
|
||||
inherits "^2.0.3"
|
||||
string_decoder "^1.1.1"
|
||||
util-deprecate "^1.0.1"
|
||||
|
||||
readline-sync@^1.4.9:
|
||||
version "1.4.10"
|
||||
resolved "https://registry.yarnpkg.com/readline-sync/-/readline-sync-1.4.10.tgz#41df7fbb4b6312d673011594145705bf56d8873b"
|
||||
|
@ -637,7 +774,7 @@ request@^2.72.0, request@^2.74.0:
|
|||
tunnel-agent "^0.6.0"
|
||||
uuid "^3.3.2"
|
||||
|
||||
safe-buffer@^5.0.1, safe-buffer@^5.1.2:
|
||||
safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0:
|
||||
version "5.2.1"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
|
||||
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
|
||||
|
@ -647,6 +784,21 @@ safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
|
|||
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
|
||||
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
|
||||
|
||||
sanitize-s3-objectkey@^0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/sanitize-s3-objectkey/-/sanitize-s3-objectkey-0.0.1.tgz#efa9887cd45275b40234fb4bb12fc5754fe64e7e"
|
||||
integrity sha512-ZTk7aqLxy4sD40GWcYWoLfbe05XLmkKvh6vGKe13ADlei24xlezcvjgKy1qRArlaIbIMYaqK7PCalvZtulZlaQ==
|
||||
|
||||
sax@1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a"
|
||||
integrity sha1-e45lYZCyKOgaZq6nSEgNgozS03o=
|
||||
|
||||
sax@>=0.6.0:
|
||||
version "1.2.4"
|
||||
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
|
||||
integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
|
||||
|
||||
semver@^5.6.0:
|
||||
version "5.7.1"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
|
||||
|
@ -682,6 +834,34 @@ string-template@~1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/string-template/-/string-template-1.0.0.tgz#9e9f2233dc00f218718ec379a28a5673ecca8b96"
|
||||
integrity sha1-np8iM9wA8hhxjsN5oopWc+zKi5Y=
|
||||
|
||||
string_decoder@^1.1.1:
|
||||
version "1.3.0"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
|
||||
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
|
||||
dependencies:
|
||||
safe-buffer "~5.2.0"
|
||||
|
||||
tar-fs@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784"
|
||||
integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==
|
||||
dependencies:
|
||||
chownr "^1.1.1"
|
||||
mkdirp-classic "^0.5.2"
|
||||
pump "^3.0.0"
|
||||
tar-stream "^2.1.4"
|
||||
|
||||
tar-stream@^2.1.4:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287"
|
||||
integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==
|
||||
dependencies:
|
||||
bl "^4.0.3"
|
||||
end-of-stream "^1.4.1"
|
||||
fs-constants "^1.0.0"
|
||||
inherits "^2.0.3"
|
||||
readable-stream "^3.1.1"
|
||||
|
||||
tmp@^0.0.33:
|
||||
version "0.0.33"
|
||||
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
|
||||
|
@ -721,11 +901,29 @@ uri-js@^4.2.2:
|
|||
dependencies:
|
||||
punycode "^2.1.0"
|
||||
|
||||
url@0.10.3:
|
||||
version "0.10.3"
|
||||
resolved "https://registry.yarnpkg.com/url/-/url-0.10.3.tgz#021e4d9c7705f21bbf37d03ceb58767402774c64"
|
||||
integrity sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=
|
||||
dependencies:
|
||||
punycode "1.3.2"
|
||||
querystring "0.2.0"
|
||||
|
||||
util-deprecate@^1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
|
||||
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
|
||||
|
||||
utils-merge@1.x.x:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
|
||||
integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=
|
||||
|
||||
uuid@3.3.2:
|
||||
version "3.3.2"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
|
||||
integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==
|
||||
|
||||
uuid@^3.3.2:
|
||||
version "3.4.0"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
|
||||
|
@ -744,3 +942,26 @@ verror@1.10.0:
|
|||
assert-plus "^1.0.0"
|
||||
core-util-is "1.0.2"
|
||||
extsprintf "^1.2.0"
|
||||
|
||||
wrappy@1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
|
||||
|
||||
xml2js@0.4.19:
|
||||
version "0.4.19"
|
||||
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
|
||||
integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==
|
||||
dependencies:
|
||||
sax ">=0.6.0"
|
||||
xmlbuilder "~9.0.1"
|
||||
|
||||
xmlbuilder@~9.0.1:
|
||||
version "9.0.7"
|
||||
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
|
||||
integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=
|
||||
|
||||
zlib@^1.0.5:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/zlib/-/zlib-1.0.5.tgz#6e7c972fc371c645a6afb03ab14769def114fcc0"
|
||||
integrity sha1-bnyXL8NxxkWmr7A6sUdp3vEU/MA=
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -90,16 +90,12 @@
|
|||
"arangojs": "7.2.0",
|
||||
"aws-sdk": "^2.767.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bull": "^3.22.0",
|
||||
"bull-board": "^1.5.1",
|
||||
"bull": "^3.22.4",
|
||||
"bull-board": "^2.0.1",
|
||||
"chmodr": "1.2.0",
|
||||
"csvtojson": "2.0.10",
|
||||
"dotenv": "8.2.0",
|
||||
"download": "8.0.0",
|
||||
"electron-is-dev": "1.2.0",
|
||||
"electron-unhandled": "3.0.2",
|
||||
"electron-updater": "4.3.1",
|
||||
"electron-util": "0.14.2",
|
||||
"fix-path": "3.0.0",
|
||||
"fs-extra": "8.1.0",
|
||||
"jimp": "0.16.1",
|
||||
|
@ -125,10 +121,8 @@
|
|||
"pouchdb-all-dbs": "1.0.2",
|
||||
"pouchdb-find": "^7.2.2",
|
||||
"pouchdb-replication-stream": "1.2.9",
|
||||
"sanitize-s3-objectkey": "0.0.1",
|
||||
"server-destroy": "1.0.1",
|
||||
"svelte": "3.30.0",
|
||||
"tar-fs": "2.1.0",
|
||||
"to-json-schema": "0.2.5",
|
||||
"uuid": "3.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
|
@ -140,10 +134,8 @@
|
|||
"@budibase/standard-components": "^0.8.16",
|
||||
"@jest/test-sequencer": "^24.8.0",
|
||||
"docker-compose": "^0.23.6",
|
||||
"electron": "10.1.3",
|
||||
"electron-builder": "^22.9.1",
|
||||
"electron-builder-notarize": "^1.1.2",
|
||||
"eslint": "^6.8.0",
|
||||
"express": "^4.17.1",
|
||||
"jest": "^24.8.0",
|
||||
"nodemon": "^2.0.4",
|
||||
"pouchdb-adapter-memory": "^7.2.1",
|
||||
|
|
|
@ -4,6 +4,7 @@ const send = require("koa-send")
|
|||
const { resolve, join } = require("../../../utilities/centralPath")
|
||||
const fetch = require("node-fetch")
|
||||
const uuid = require("uuid")
|
||||
const { ObjectStoreBuckets } = require("../../../constants")
|
||||
const { prepareUpload } = require("../deploy/utils")
|
||||
const { processString } = require("@budibase/string-templates")
|
||||
const { budibaseTempDir } = require("../../../utilities/budibaseDir")
|
||||
|
@ -15,7 +16,6 @@ const {
|
|||
TOP_LEVEL_PATH,
|
||||
} = require("../../../utilities/fileSystem")
|
||||
const env = require("../../../environment")
|
||||
// const fileProcessor = require("../../../utilities/fileSystem/processor")
|
||||
const { objectStoreUrl, clientLibraryPath } = require("../../../utilities")
|
||||
|
||||
async function checkForSelfHostedURL(ctx) {
|
||||
|
@ -48,17 +48,10 @@ exports.uploadFile = async function (ctx) {
|
|||
// filenames converted to UUIDs so they are unique
|
||||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||
|
||||
// need to handle image processing
|
||||
// TODO either offer this as an option, or don't do it at all
|
||||
// await fileProcessor.process({
|
||||
// ...file,
|
||||
// extension: fileExtension,
|
||||
// })
|
||||
|
||||
return prepareUpload({
|
||||
file,
|
||||
s3Key: `assets/${ctx.appId}/attachments/${processedFileName}`,
|
||||
bucket: "prod-budi-app-assets",
|
||||
bucket: ObjectStoreBuckets.APPS,
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -7,13 +7,6 @@ const { mainRoutes, staticRoutes } = require("./routes")
|
|||
const pkg = require("../../package.json")
|
||||
const env = require("../environment")
|
||||
|
||||
if (!env.isTest()) {
|
||||
const bullboard = require("bull-board")
|
||||
const expressApp = require("express")()
|
||||
|
||||
expressApp.use("/bulladmin", bullboard.router)
|
||||
}
|
||||
|
||||
const router = new Router()
|
||||
|
||||
router
|
||||
|
|
|
@ -4,7 +4,6 @@ const CouchDB = require("./db")
|
|||
require("@budibase/auth").init(CouchDB)
|
||||
const Koa = require("koa")
|
||||
const destroyable = require("server-destroy")
|
||||
const electron = require("electron")
|
||||
const koaBody = require("koa-body")
|
||||
const logger = require("koa-pino-logger")
|
||||
const http = require("http")
|
||||
|
@ -13,6 +12,7 @@ const eventEmitter = require("./events")
|
|||
const automations = require("./automations/index")
|
||||
const Sentry = require("@sentry/node")
|
||||
const fileSystem = require("./utilities/fileSystem")
|
||||
const bullboard = require("./automations/bullboard")
|
||||
|
||||
const app = new Koa()
|
||||
|
||||
|
@ -36,13 +36,26 @@ app.use(
|
|||
})
|
||||
)
|
||||
|
||||
if (!env.isTest()) {
|
||||
const bullApp = bullboard.init()
|
||||
app.use(async (ctx, next) => {
|
||||
if (ctx.path.startsWith(bullboard.pathPrefix)) {
|
||||
ctx.status = 200
|
||||
ctx.respond = false
|
||||
bullApp(ctx.req, ctx.res)
|
||||
} else {
|
||||
await next()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
app.context.eventEmitter = eventEmitter
|
||||
app.context.auth = {}
|
||||
|
||||
// api routes
|
||||
app.use(api.routes())
|
||||
|
||||
if (electron.app && electron.app.isPackaged) {
|
||||
if (env.isProd()) {
|
||||
env._set("NODE_ENV", "production")
|
||||
Sentry.init()
|
||||
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
const { createBullBoard } = require("bull-board")
|
||||
const { BullAdapter } = require("bull-board/bullAdapter")
|
||||
const { getQueues } = require("./triggers")
|
||||
const express = require("express")
|
||||
|
||||
exports.pathPrefix = "/bulladmin"
|
||||
|
||||
exports.init = () => {
|
||||
const expressApp = express()
|
||||
// Set up queues for bull board admin
|
||||
const queues = getQueues()
|
||||
const adapters = []
|
||||
for (let queue of queues) {
|
||||
adapters.push(new BullAdapter(queue))
|
||||
}
|
||||
const { router } = createBullBoard(adapters)
|
||||
|
||||
expressApp.use(exports.pathPrefix, router)
|
||||
return expressApp
|
||||
}
|
|
@ -4,7 +4,6 @@ const env = require("../environment")
|
|||
const Queue = env.isTest()
|
||||
? require("../utilities/queue/inMemoryQueue")
|
||||
: require("bull")
|
||||
const { setQueues, BullAdapter } = require("bull-board")
|
||||
const { getAutomationParams } = require("../db/utils")
|
||||
const { coerce } = require("../utilities/rowProcessor")
|
||||
const { utils } = require("@budibase/auth").redis
|
||||
|
@ -12,9 +11,6 @@ const { utils } = require("@budibase/auth").redis
|
|||
const { opts } = utils.getRedisOptions()
|
||||
let automationQueue = new Queue("automationQueue", { redis: opts })
|
||||
|
||||
// Set up queues for bull board admin
|
||||
setQueues([new BullAdapter(automationQueue)])
|
||||
|
||||
const FAKE_STRING = "TEST"
|
||||
const FAKE_BOOL = false
|
||||
const FAKE_NUMBER = 1
|
||||
|
@ -310,6 +306,9 @@ module.exports.externalTrigger = async function (automation, params) {
|
|||
automationQueue.add({ automation, event: params })
|
||||
}
|
||||
|
||||
module.exports.getQueues = () => {
|
||||
return [automationQueue]
|
||||
}
|
||||
module.exports.fillRowOutput = fillRowOutput
|
||||
module.exports.automationQueue = automationQueue
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const { BUILTIN_ROLE_IDS } = require("../utilities/security/roles")
|
||||
const { UserStatus } = require("@budibase/auth").constants
|
||||
const { ObjectStoreBuckets } = require("@budibase/auth").objectStore
|
||||
|
||||
exports.LOGO_URL =
|
||||
"https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg"
|
||||
|
@ -89,8 +90,5 @@ exports.BaseQueryVerbs = {
|
|||
DELETE: "delete",
|
||||
}
|
||||
|
||||
exports.ObjectStoreBuckets = {
|
||||
BACKUPS: "backups",
|
||||
APPS: "prod-budi-app-assets",
|
||||
TEMPLATES: "templates",
|
||||
}
|
||||
// pass through the list from the auth/core lib
|
||||
exports.ObjectStoreBuckets = ObjectStoreBuckets
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
const { join } = require("./centralPath")
|
||||
const { homedir, tmpdir } = require("os")
|
||||
const { homedir } = require("os")
|
||||
const env = require("../environment")
|
||||
const { budibaseTempDir } = require("@budibase/auth").objectStore
|
||||
|
||||
module.exports.budibaseAppsDir = function () {
|
||||
return env.BUDIBASE_DIR || join(homedir(), ".budibase")
|
||||
}
|
||||
|
||||
module.exports.budibaseTempDir = function () {
|
||||
return join(tmpdir(), ".budibase")
|
||||
}
|
||||
module.exports.budibaseTempDir = budibaseTempDir
|
||||
|
|
|
@ -1,243 +1,30 @@
|
|||
const sanitize = require("sanitize-s3-objectkey")
|
||||
const AWS = require("aws-sdk")
|
||||
const stream = require("stream")
|
||||
const fetch = require("node-fetch")
|
||||
const tar = require("tar-fs")
|
||||
const zlib = require("zlib")
|
||||
const { promisify } = require("util")
|
||||
const { join } = require("path")
|
||||
const fs = require("fs")
|
||||
const { budibaseTempDir } = require("../budibaseDir")
|
||||
const env = require("../../environment")
|
||||
const { ObjectStoreBuckets } = require("../../constants")
|
||||
const uuid = require("uuid/v4")
|
||||
const {
|
||||
ObjectStore,
|
||||
makeSureBucketExists,
|
||||
upload,
|
||||
streamUpload,
|
||||
retrieve,
|
||||
retrieveToTmp,
|
||||
deleteFolder,
|
||||
uploadDirectory,
|
||||
downloadTarball,
|
||||
} = require("@budibase/auth").objectStore
|
||||
|
||||
const streamPipeline = promisify(stream.pipeline)
|
||||
// use this as a temporary store of buckets that are being created
|
||||
const STATE = {
|
||||
bucketCreationPromises: {},
|
||||
}
|
||||
/***********************************
|
||||
* NOTE *
|
||||
* This file purely exists so that *
|
||||
* the object store functionality *
|
||||
* can easily be mocked out of *
|
||||
* the server without mocking the *
|
||||
* entire core library. *
|
||||
***********************************/
|
||||
|
||||
const CONTENT_TYPE_MAP = {
|
||||
html: "text/html",
|
||||
css: "text/css",
|
||||
js: "application/javascript",
|
||||
}
|
||||
const STRING_CONTENT_TYPES = [
|
||||
CONTENT_TYPE_MAP.html,
|
||||
CONTENT_TYPE_MAP.css,
|
||||
CONTENT_TYPE_MAP.js,
|
||||
]
|
||||
|
||||
function publicPolicy(bucketName) {
|
||||
return {
|
||||
Version: "2012-10-17",
|
||||
Statement: [
|
||||
{
|
||||
Effect: "Allow",
|
||||
Principal: {
|
||||
AWS: ["*"],
|
||||
},
|
||||
Action: "s3:GetObject",
|
||||
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS]
|
||||
|
||||
/**
|
||||
* Gets a connection to the object store using the S3 SDK.
|
||||
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||
* @constructor
|
||||
*/
|
||||
exports.ObjectStore = bucket => {
|
||||
if (env.SELF_HOSTED) {
|
||||
AWS.config.update({
|
||||
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||
})
|
||||
}
|
||||
const config = {
|
||||
s3ForcePathStyle: true,
|
||||
signatureVersion: "v4",
|
||||
params: {
|
||||
Bucket: bucket,
|
||||
},
|
||||
}
|
||||
if (env.MINIO_URL) {
|
||||
config.endpoint = env.MINIO_URL
|
||||
}
|
||||
return new AWS.S3(config)
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||
* if it does not exist then it will create it.
|
||||
*/
|
||||
exports.makeSureBucketExists = async (client, bucketName) => {
|
||||
try {
|
||||
await client
|
||||
.headBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
} catch (err) {
|
||||
const promises = STATE.bucketCreationPromises
|
||||
if (promises[bucketName]) {
|
||||
await promises[bucketName]
|
||||
} else if (err.statusCode === 404) {
|
||||
// bucket doesn't exist create it
|
||||
promises[bucketName] = client
|
||||
.createBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
await promises[bucketName]
|
||||
delete promises[bucketName]
|
||||
// public buckets are quite hidden in the system, make sure
|
||||
// no bucket is set accidentally
|
||||
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
||||
await client
|
||||
.putBucketPolicy({
|
||||
Bucket: bucketName,
|
||||
Policy: JSON.stringify(publicPolicy(bucketName)),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads the contents of a file given the required parameters, useful when
|
||||
* temp files in use (for example file uploaded as an attachment).
|
||||
*/
|
||||
exports.upload = async ({ bucket, filename, path, type, metadata }) => {
|
||||
const extension = [...filename.split(".")].pop()
|
||||
const fileBytes = fs.readFileSync(path)
|
||||
|
||||
const objectStore = exports.ObjectStore(bucket)
|
||||
await exports.makeSureBucketExists(objectStore, bucket)
|
||||
|
||||
const config = {
|
||||
// windows file paths need to be converted to forward slashes for s3
|
||||
Key: sanitize(filename).replace(/\\/g, "/"),
|
||||
Body: fileBytes,
|
||||
ContentType: type || CONTENT_TYPE_MAP[extension.toLowerCase()],
|
||||
}
|
||||
if (metadata) {
|
||||
config.Metadata = metadata
|
||||
}
|
||||
return objectStore.upload(config).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* Similar to the upload function but can be used to send a file stream
|
||||
* through to the object store.
|
||||
*/
|
||||
exports.streamUpload = async (bucket, filename, stream) => {
|
||||
const objectStore = exports.ObjectStore(bucket)
|
||||
await exports.makeSureBucketExists(objectStore, bucket)
|
||||
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: sanitize(filename).replace(/\\/g, "/"),
|
||||
Body: stream,
|
||||
}
|
||||
return objectStore.upload(params).promise()
|
||||
}
|
||||
|
||||
/**
|
||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||
* will be converted, otherwise it will be returned as a buffer stream.
|
||||
*/
|
||||
exports.retrieve = async (bucket, filepath) => {
|
||||
const objectStore = exports.ObjectStore(bucket)
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: sanitize(filepath).replace(/\\/g, "/"),
|
||||
}
|
||||
const response = await objectStore.getObject(params).promise()
|
||||
// currently these are all strings
|
||||
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
|
||||
return response.Body.toString("utf8")
|
||||
} else {
|
||||
return response.Body
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as retrieval function but puts to a temporary file.
|
||||
*/
|
||||
exports.retrieveToTmp = async (bucket, filepath) => {
|
||||
const data = await exports.retrieve(bucket, filepath)
|
||||
const outputPath = join(budibaseTempDir(), uuid())
|
||||
fs.writeFileSync(outputPath, data)
|
||||
return outputPath
|
||||
}
|
||||
|
||||
exports.deleteFolder = async (bucket, folder) => {
|
||||
const client = exports.ObjectStore(bucket)
|
||||
const listParams = {
|
||||
Bucket: bucket,
|
||||
Prefix: folder,
|
||||
}
|
||||
|
||||
let response = await client.listObjects(listParams).promise()
|
||||
if (response.Contents.length === 0) {
|
||||
return
|
||||
}
|
||||
const deleteParams = {
|
||||
Bucket: bucket,
|
||||
Delete: {
|
||||
Objects: [],
|
||||
},
|
||||
}
|
||||
|
||||
response.Contents.forEach(content => {
|
||||
deleteParams.Delete.Objects.push({ Key: content.Key })
|
||||
})
|
||||
|
||||
response = await client.deleteObjects(deleteParams).promise()
|
||||
// can only empty 1000 items at once
|
||||
if (response.Deleted.length === 1000) {
|
||||
return exports.deleteFolder(bucket, folder)
|
||||
}
|
||||
}
|
||||
|
||||
exports.uploadDirectory = async (bucket, localPath, bucketPath) => {
|
||||
let uploads = []
|
||||
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
||||
for (let file of files) {
|
||||
const path = join(bucketPath, file.name)
|
||||
const local = join(localPath, file.name)
|
||||
if (file.isDirectory()) {
|
||||
uploads.push(exports.uploadDirectory(bucket, local, path))
|
||||
} else {
|
||||
uploads.push(
|
||||
exports.streamUpload(bucket, path, fs.createReadStream(local))
|
||||
)
|
||||
}
|
||||
}
|
||||
await Promise.all(uploads)
|
||||
}
|
||||
|
||||
exports.downloadTarball = async (url, bucket, path) => {
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`unexpected response ${response.statusText}`)
|
||||
}
|
||||
|
||||
const tmpPath = join(budibaseTempDir(), path)
|
||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
||||
if (!env.isTest()) {
|
||||
await exports.uploadDirectory(bucket, tmpPath, path)
|
||||
}
|
||||
// return the temporary path incase there is a use for it
|
||||
return tmpPath
|
||||
}
|
||||
exports.ObjectStore = ObjectStore
|
||||
exports.makeSureBucketExists = makeSureBucketExists
|
||||
exports.upload = upload
|
||||
exports.streamUpload = streamUpload
|
||||
exports.retrieve = retrieve
|
||||
exports.retrieveToTmp = retrieveToTmp
|
||||
exports.deleteFolder = deleteFolder
|
||||
exports.uploadDirectory = uploadDirectory
|
||||
exports.downloadTarball = downloadTarball
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -10,6 +10,7 @@ const fetch = require("node-fetch")
|
|||
const { Configs } = require("../../../constants")
|
||||
const email = require("../../../utilities/email")
|
||||
const env = require("../../../environment")
|
||||
const { upload, ObjectStoreBuckets } = require("@budibase/auth").objectStore
|
||||
|
||||
const APP_PREFIX = "app_"
|
||||
|
||||
|
@ -94,6 +95,46 @@ exports.find = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.upload = async function (ctx) {
|
||||
if (ctx.request.files == null || ctx.request.files.file.length > 1) {
|
||||
ctx.throw(400, "One file must be uploaded.")
|
||||
}
|
||||
const file = ctx.request.files.file
|
||||
const { type, name } = ctx.params
|
||||
|
||||
const fileExtension = [...file.name.split(".")].pop()
|
||||
// filenames converted to UUIDs so they are unique
|
||||
const processedFileName = `${name}.${fileExtension}`
|
||||
|
||||
const bucket = ObjectStoreBuckets.GLOBAL
|
||||
const key = `${type}/${processedFileName}`
|
||||
await upload({
|
||||
bucket,
|
||||
filename: key,
|
||||
path: file.path,
|
||||
type: file.type,
|
||||
})
|
||||
|
||||
// add to configuration structure
|
||||
// TODO: right now this only does a global level
|
||||
const db = new CouchDB(GLOBAL_DB)
|
||||
let config = await getScopedFullConfig(db, { type })
|
||||
if (!config) {
|
||||
config = {
|
||||
_id: generateConfigID({ type }),
|
||||
}
|
||||
}
|
||||
const url = `/${bucket}/${key}`
|
||||
config[`${name}Url`] = url
|
||||
// write back to db with url updated
|
||||
await db.put(config)
|
||||
|
||||
ctx.body = {
|
||||
message: "File has been uploaded and url stored to config.",
|
||||
url,
|
||||
}
|
||||
}
|
||||
|
||||
exports.destroy = async function (ctx) {
|
||||
const db = new CouchDB(GLOBAL_DB)
|
||||
const { id, rev } = ctx.params
|
||||
|
|
|
@ -2,7 +2,7 @@ const Router = require("@koa/router")
|
|||
const controller = require("../../controllers/admin/configs")
|
||||
const joiValidator = require("../../../middleware/joi-validator")
|
||||
const Joi = require("joi")
|
||||
const { Configs } = require("../../../constants")
|
||||
const { Configs, ConfigUploads } = require("../../../constants")
|
||||
|
||||
const router = Router()
|
||||
|
||||
|
@ -61,6 +61,14 @@ function buildConfigSaveValidation() {
|
|||
)
|
||||
}
|
||||
|
||||
function buildUploadValidation() {
|
||||
// prettier-ignore
|
||||
return joiValidator.params(Joi.object({
|
||||
type: Joi.string().valid(...Object.values(Configs)).required(),
|
||||
name: Joi.string().valid(...Object.values(ConfigUploads)).required(),
|
||||
}).required())
|
||||
}
|
||||
|
||||
function buildConfigGetValidation() {
|
||||
// prettier-ignore
|
||||
return joiValidator.params(Joi.object({
|
||||
|
@ -79,5 +87,10 @@ router
|
|||
controller.fetch
|
||||
)
|
||||
.get("/api/admin/configs/:type", buildConfigGetValidation(), controller.find)
|
||||
.post(
|
||||
"/api/admin/configs/upload/:type/:name",
|
||||
buildUploadValidation(),
|
||||
controller.upload
|
||||
)
|
||||
|
||||
module.exports = router
|
||||
|
|
|
@ -17,6 +17,10 @@ exports.Configs = {
|
|||
GOOGLE: "google",
|
||||
}
|
||||
|
||||
exports.ConfigUploads = {
|
||||
LOGO: "logo",
|
||||
}
|
||||
|
||||
const TemplateTypes = {
|
||||
EMAIL: "email",
|
||||
}
|
||||
|
|
|
@ -21,7 +21,8 @@ exports.getSettingsTemplateContext = async (purpose, code = null) => {
|
|||
}
|
||||
const URL = settings.platformUrl
|
||||
const context = {
|
||||
[TemplateBindings.LOGO_URL]: settings.logoUrl || LOGO_URL,
|
||||
[TemplateBindings.LOGO_URL]:
|
||||
checkSlashesInUrl(`${URL}/${settings.logoUrl}`) || LOGO_URL,
|
||||
[TemplateBindings.PLATFORM_URL]: URL,
|
||||
[TemplateBindings.COMPANY]: settings.company || BASE_COMPANY,
|
||||
[TemplateBindings.DOCS_URL]:
|
||||
|
|
Loading…
Reference in New Issue