moving some test cases to not use file system and re-introducing the image processor.
This commit is contained in:
parent
19897de535
commit
14586cd124
|
@ -17,6 +17,7 @@ const setBuilderToken = require("../../../utilities/builder/setBuilderToken")
|
||||||
const { loadHandlebarsFile } = require("../../../utilities/fileSystem")
|
const { loadHandlebarsFile } = require("../../../utilities/fileSystem")
|
||||||
const env = require("../../../environment")
|
const env = require("../../../environment")
|
||||||
const { OBJ_STORE_DIRECTORY } = require("../../../constants")
|
const { OBJ_STORE_DIRECTORY } = require("../../../constants")
|
||||||
|
const fileProcessor = require("../../../utilities/fileSystem/processor")
|
||||||
|
|
||||||
function objectStoreUrl() {
|
function objectStoreUrl() {
|
||||||
if (env.SELF_HOSTED) {
|
if (env.SELF_HOSTED) {
|
||||||
|
@ -50,8 +51,7 @@ exports.serveBuilder = async function(ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.uploadFile = async function(ctx) {
|
exports.uploadFile = async function(ctx) {
|
||||||
let files
|
let files =
|
||||||
files =
|
|
||||||
ctx.request.files.file.length > 1
|
ctx.request.files.file.length > 1
|
||||||
? Array.from(ctx.request.files.file)
|
? Array.from(ctx.request.files.file)
|
||||||
: [ctx.request.files.file]
|
: [ctx.request.files.file]
|
||||||
|
@ -62,10 +62,17 @@ exports.uploadFile = async function(ctx) {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const uploads = files.map(file => {
|
const uploads = files.map(async file => {
|
||||||
const fileExtension = [...file.name.split(".")].pop()
|
const fileExtension = [...file.name.split(".")].pop()
|
||||||
|
// filenames converted to UUIDs so they are unique
|
||||||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||||
|
|
||||||
|
// need to handle image processing
|
||||||
|
await fileProcessor.process({
|
||||||
|
...file,
|
||||||
|
extension: fileExtension,
|
||||||
|
})
|
||||||
|
|
||||||
return prepareUpload({
|
return prepareUpload({
|
||||||
file,
|
file,
|
||||||
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
|
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
|
||||||
|
|
|
@ -96,3 +96,11 @@ exports.downloadTemplate = async (type, name) => {
|
||||||
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
|
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
|
||||||
return downloadTarball(templateUrl, ObjectStoreBuckets.TEMPLATES, type)
|
return downloadTarball(templateUrl, ObjectStoreBuckets.TEMPLATES, type)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All file reads come through here just to make sure all of them make sense
|
||||||
|
* allows a centralised location to check logic is all good.
|
||||||
|
*/
|
||||||
|
exports.readFileSync = (filepath, options = "utf8") => {
|
||||||
|
return fs.readFileSync(filepath, options)
|
||||||
|
}
|
||||||
|
|
|
@ -1,25 +1,20 @@
|
||||||
const fs = require("fs")
|
|
||||||
const jimp = require("jimp")
|
const jimp = require("jimp")
|
||||||
const fsPromises = fs.promises
|
|
||||||
|
|
||||||
const FORMATS = {
|
const FORMATS = {
|
||||||
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
|
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
|
||||||
}
|
}
|
||||||
|
|
||||||
function processImage(file) {
|
function processImage(file) {
|
||||||
|
// this will overwrite the temp file
|
||||||
return jimp.read(file.path).then(img => {
|
return jimp.read(file.path).then(img => {
|
||||||
return img.resize(300, jimp.AUTO).write(file.outputPath)
|
return img.resize(300, jimp.AUTO).write(file.path)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async function process(file) {
|
async function process(file) {
|
||||||
if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) {
|
if (FORMATS.IMAGES.includes(file.extension.toLowerCase())) {
|
||||||
await processImage(file)
|
await processImage(file)
|
||||||
return file
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// No processing required
|
|
||||||
await fsPromises.copyFile(file.path, file.outputPath)
|
|
||||||
return file
|
return file
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
const fs = require("fs-extra")
|
|
||||||
const { join } = require("./centralPath")
|
|
||||||
const os = require("os")
|
|
||||||
const fetch = require("node-fetch")
|
|
||||||
const stream = require("stream")
|
|
||||||
const tar = require("tar-fs")
|
|
||||||
const zlib = require("zlib")
|
|
||||||
const { promisify } = require("util")
|
|
||||||
const streamPipeline = promisify(stream.pipeline)
|
|
||||||
const { budibaseAppsDir } = require("./budibaseDir")
|
|
||||||
const env = require("../environment")
|
|
||||||
const { downloadTemplate } = require("./fileSystem")
|
|
||||||
|
|
||||||
|
|
||||||
// can't really test this, downloading is just not something we should do in a behavioural test
|
|
||||||
/* istanbul ignore next */
|
|
||||||
exports.downloadTemplate = downloadTemplate
|
|
|
@ -1,4 +1,4 @@
|
||||||
const fs = require("fs")
|
const { readFileSync } = require("../fileSystem")
|
||||||
const csvParser = require("../csvParser")
|
const csvParser = require("../csvParser")
|
||||||
|
|
||||||
const CSV_PATH = __dirname + "/test.csv"
|
const CSV_PATH = __dirname + "/test.csv"
|
||||||
|
@ -33,7 +33,7 @@ const SCHEMAS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("CSV Parser", () => {
|
describe("CSV Parser", () => {
|
||||||
const csvString = fs.readFileSync(CSV_PATH, "utf8")
|
const csvString = readFileSync(CSV_PATH, "utf8")
|
||||||
|
|
||||||
describe("parsing", () => {
|
describe("parsing", () => {
|
||||||
it("returns status and types for a valid CSV transformation", async () => {
|
it("returns status and types for a valid CSV transformation", async () => {
|
||||||
|
|
Loading…
Reference in New Issue