diff --git a/.idea/codeStyles/codeStyleConfig.xml b/.idea/codeStyles/codeStyleConfig.xml
new file mode 100644
index 0000000..a55e7a1
--- /dev/null
+++ b/.idea/codeStyles/codeStyleConfig.xml
@@ -0,0 +1,5 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/Cargo.lock b/Cargo.lock
index 1bca635..31354b0 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -199,9 +199,12 @@ version = "1.0.0-beta.0"
dependencies = [
"clap",
"filetime",
+ "human_bytes",
"image",
+ "imagesize",
"indicatif",
"infer",
+ "kamadak-exif 0.6.1",
"libcaesium",
"rayon",
"tempfile",
@@ -527,6 +530,12 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
+[[package]]
+name = "human_bytes"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91f255a4535024abf7640cb288260811fc14794f62b063652ed349f9a6c2348e"
+
[[package]]
name = "image"
version = "0.25.5"
@@ -573,6 +582,12 @@ dependencies = [
"thread_local",
]
+[[package]]
+name = "imagesize"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "edcd27d72f2f071c64249075f42e205ff93c9a4c5f6c6da53e79ed9f9832c285"
+
[[package]]
name = "img-parts"
version = "0.3.1"
@@ -683,6 +698,15 @@ dependencies = [
"mutate_once",
]
+[[package]]
+name = "kamadak-exif"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1130d80c7374efad55a117d715a3af9368f0fa7a2c54573afc15a188cd984837"
+dependencies = [
+ "mutate_once",
+]
+
[[package]]
name = "lazy_static"
version = "1.5.0"
@@ -713,7 +737,7 @@ dependencies = [
"imagequant",
"img-parts",
"infer",
- "kamadak-exif",
+ "kamadak-exif 0.5.5",
"libc",
"lodepng",
"mozjpeg-sys",
diff --git a/Cargo.toml b/Cargo.toml
index bb6fbac..b035b3b 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -11,9 +11,10 @@ walkdir = "2.5"
infer = "0.16"
rayon = "1.10"
#rand = "0.8"
-#human_bytes = { version = "0.4", default-features = false }
+human_bytes = { version = "0.4", default-features = false }
+kamadak-exif = "0.6"
filetime = "0.2"
-#imagesize = "0.13"
+imagesize = "0.13"
libcaesium = "0.17.0"
clap = { version = "4.5", features = ["derive"] }
diff --git a/rustfmt.toml b/rustfmt.toml
new file mode 100644
index 0000000..cc66a4b
--- /dev/null
+++ b/rustfmt.toml
@@ -0,0 +1,6 @@
+comment_width = 120
+format_code_in_doc_comments = true
+imports_granularity = "Crate"
+imports_layout = "Vertical"
+wrap_comments = true
+max_width = 120
\ No newline at end of file
diff --git a/src/main.rs b/src/main.rs
index 8a4bfd0..e5977ef 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,5 +1,5 @@
use crate::options::{CommandLineArgs, OverwritePolicy};
-use crate::scan_files::scan_files;
+use crate::scan_files::{get_file_mime_type, scan_files};
use caesium::compress_in_memory;
use caesium::parameters::CSParameters;
use clap::Parser;
@@ -7,16 +7,18 @@ use filetime::{set_file_times, FileTime};
use indicatif::{ParallelProgressIterator, ProgressBar, ProgressDrawTarget, ProgressStyle};
use rayon::iter::IntoParallelRefIterator;
use rayon::iter::ParallelIterator;
+use std::error::Error;
use std::fs::File;
-use std::io::{Read, Write};
+use std::io::{BufReader, Read, Write};
use std::num::NonZero;
use std::path::{Path, PathBuf};
use std::time::Duration;
use std::{fs, io};
+use human_bytes::human_bytes;
+mod logger;
mod options;
mod scan_files;
-mod logger;
enum CompressionStatus {
Success,
@@ -44,8 +46,11 @@ fn main() {
.get(),
);
let verbose = if quiet { 0 } else { args.verbose };
- let compression_parameters = build_compression_parameters(&args);
- let (base_path, input_files) = scan_files(args.files, args.recursive, quiet);
+ let needs_resize = args.resize.width.is_some()
+ || args.resize.height.is_some()
+ || args.resize.long_edge.is_some()
+ || args.resize.short_edge.is_some();
+ let (base_path, input_files) = scan_files(&args.files, args.recursive, quiet);
rayon::ThreadPoolBuilder::new()
.num_threads(threads_number)
@@ -68,14 +73,14 @@ fn main() {
message: String::new(),
};
- let original_file_size = match input_file.metadata() {
- Ok(m) => m.len(),
+ let input_file_metadata = match input_file.metadata() {
+ Ok(m) => m,
Err(_) => {
compression_result.message = "Error reading file metadata".to_string();
return compression_result;
}
};
-
+ let original_file_size = input_file_metadata.len();
compression_result.original_size = original_file_size;
let output_directory = if args.output_destination.same_folder_as_input {
@@ -115,16 +120,17 @@ fn main() {
return compression_result;
};
- let compressed_image = match compress_in_memory(
- read_file_to_vec(input_file).unwrap(),
- &compression_parameters,
- ) {
- Ok(v) => v,
- Err(e) => {
- compression_result.message = format!("Error compressing file: {}", e);
- return compression_result;
- }
- };
+ let compression_parameters = build_compression_parameters(&args, input_file, needs_resize);
+
+ let compressed_image =
+ match compress_in_memory(read_file_to_vec(input_file).unwrap(), &compression_parameters) {
+ //TODO: handle error
+ Ok(v) => v,
+ Err(e) => {
+ compression_result.message = format!("Error compressing file: {}", e);
+ return compression_result;
+ }
+ };
compression_result.output_path = output_full_path.display().to_string();
let output_file_size = compressed_image.len() as u64;
@@ -133,8 +139,7 @@ fn main() {
OverwritePolicy::None => {
compression_result.status = CompressionStatus::Skipped;
compression_result.compressed_size = original_file_size;
- compression_result.message =
- "File already exists, skipped due overwrite policy".to_string();
+ compression_result.message = "File already exists, skipped due overwrite policy".to_string();
return compression_result;
}
OverwritePolicy::Bigger => {
@@ -166,17 +171,9 @@ fn main() {
};
if args.keep_dates {
- let output_file_metadata = match output_file.metadata() {
- Ok(m) => m,
- Err(_) => {
- compression_result.message =
- "Error reading output file metadata".to_string();
- return compression_result;
- }
- };
let (last_modification_time, last_access_time) = (
- FileTime::from_last_modification_time(&output_file_metadata),
- FileTime::from_last_access_time(&output_file_metadata),
+ FileTime::from_last_modification_time(&input_file_metadata),
+ FileTime::from_last_access_time(&input_file_metadata),
);
match preserve_dates(&output_full_path, last_modification_time, last_access_time) {
Ok(_) => {}
@@ -192,8 +189,40 @@ fn main() {
compression_result
})
.collect();
+
+ write_recap_message(&compression_results, verbose);
+}
- let recap_message = format!("Processed {} files", compression_results.len());
+fn write_recap_message(compression_results: &[CompressionResult], verbose: u8) {
+ let mut total_original_size = 0;
+ let mut total_compressed_size = 0;
+ let total_files = compression_results.len();
+ let mut total_success = 0;
+ let mut total_skipped = 0;
+ let mut total_errors = 0;
+
+ for result in compression_results.iter() {
+ total_original_size += result.original_size;
+ total_compressed_size += result.compressed_size;
+ match result.status {
+ CompressionStatus::Skipped => total_skipped += 1,
+ CompressionStatus::Error => total_errors += 1,
+ _ => total_success += 1
+ }
+ }
+
+ let total_saved = total_original_size as f64 - total_compressed_size as f64;
+ let total_saved_percent = total_saved / total_original_size as f64 * 100.0;
+
+ if verbose > 0 {
+ println!("Total files: {}", total_files);
+ println!("Total success: {}", total_success);
+ println!("Total skipped: {}", total_skipped);
+ println!("Total errors: {}", total_errors);
+ println!("Total original size: {}", human_bytes(total_original_size as f64));
+ println!("Total compressed size: {}", human_bytes(total_compressed_size as f64));
+ println!("Total saved: {:.2} bytes ({:.2}%)", human_bytes(total_saved), total_saved_percent);
+ }
}
fn get_parallelism_count(requested_threads: u32, available_threads: usize) -> usize {
@@ -204,7 +233,7 @@ fn get_parallelism_count(requested_threads: u32, available_threads: usize) -> us
}
}
-fn build_compression_parameters(args: &CommandLineArgs) -> CSParameters {
+fn build_compression_parameters(args: &CommandLineArgs, input_file: &Path, needs_resize: bool) -> CSParameters {
let mut parameters = CSParameters::new();
let quality = args.compression.quality.unwrap_or(80) as u32;
@@ -218,9 +247,46 @@ fn build_compression_parameters(args: &CommandLineArgs) -> CSParameters {
parameters.png.optimization_level = args.png_opt_level;
parameters.png.force_zopfli = args.zopfli;
+ if needs_resize {
+ let mime_type = get_file_mime_type(input_file);
+ build_resize_parameters(args, &mut parameters, input_file, mime_type).unwrap();
+ //TODO
+ }
+
parameters
}
+fn build_resize_parameters(
+ args: &CommandLineArgs,
+ parameters: &mut CSParameters,
+ input_file_path: &Path,
+ mime_type: Option,
+) -> Result<(), Box> {
+ let (width, height) = get_real_resolution(input_file_path, mime_type, args.exif)?;
+
+ if args.resize.width.is_some() {
+ parameters.width = args.resize.width.unwrap_or(0);
+ } else if args.resize.height.is_some() {
+ parameters.height = args.resize.height.unwrap_or(0);
+ } else if args.resize.long_edge.is_some() {
+ let long_edge = args.resize.long_edge.unwrap_or(0);
+ if width > height {
+ parameters.width = long_edge;
+ } else {
+ parameters.height = long_edge;
+ }
+ } else if args.resize.short_edge.is_some() {
+ let short_edge = args.resize.short_edge.unwrap_or(0);
+ if width < height {
+ parameters.width = short_edge;
+ } else {
+ parameters.height = short_edge;
+ }
+ }
+
+ Ok(())
+}
+
fn compute_output_full_path(
output_directory: PathBuf,
input_file_path: PathBuf,
@@ -228,14 +294,8 @@ fn compute_output_full_path(
keep_structure: bool,
suffix: &str,
) -> Option {
- let extension = input_file_path
- .extension()
- .unwrap_or_default()
- .to_os_string();
- let base_name = input_file_path
- .file_stem()
- .unwrap_or_default()
- .to_os_string();
+ let extension = input_file_path.extension().unwrap_or_default().to_os_string();
+ let base_name = input_file_path.file_stem().unwrap_or_default().to_os_string();
let mut output_file_name = base_name;
output_file_name.push(suffix);
if !extension.is_empty() {
@@ -269,14 +329,36 @@ fn read_file_to_vec(file_path: &PathBuf) -> io::Result> {
Ok(buffer)
}
-fn preserve_dates(
- output_file: &PathBuf,
- input_atime: FileTime,
- input_mtime: FileTime,
-) -> io::Result<()> {
+fn preserve_dates(output_file: &PathBuf, input_atime: FileTime, input_mtime: FileTime) -> io::Result<()> {
set_file_times(output_file, input_atime, input_mtime)
}
+fn get_real_resolution(
+ file: &Path,
+ mime_type: Option,
+ keep_metadata: bool,
+) -> Result<(usize, usize), Box> {
+ let resolution = imagesize::size(file)?;
+ let mut orientation = 1;
+ let mime = mime_type.unwrap_or("".to_string());
+ if mime == "image/jpeg" && keep_metadata {
+ let f = File::open(file)?;
+ if let Ok(e) = exif::Reader::new().read_from_container(&mut BufReader::new(&f)) {
+ let exif_field = match e.get_field(exif::Tag::Orientation, exif::In::PRIMARY) {
+ Some(f) => f,
+ None => return Ok((resolution.width, resolution.height)),
+ };
+ orientation = exif_field.value.get_uint(0).unwrap_or(1);
+ };
+ }
+ let (width, height) = match orientation {
+ 5..=8 => (resolution.height, resolution.width),
+ _ => (resolution.width, resolution.height),
+ };
+
+ Ok((width, height))
+}
+
fn setup_progress_bar(len: usize, verbose: u8) -> ProgressBar {
let progress_bar = ProgressBar::new(len as u64);
if verbose == 0 {
@@ -294,7 +376,7 @@ fn setup_progress_bar(len: usize, verbose: u8) -> ProgressBar {
progress_bar
}
-#[cfg(test)]
+#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
diff --git a/src/options.rs b/src/options.rs
index a022679..c7e0cc2 100644
--- a/src/options.rs
+++ b/src/options.rs
@@ -16,6 +16,9 @@ pub enum OverwritePolicy {
pub struct CommandLineArgs {
#[command(flatten)]
pub compression: Compression,
+
+ #[command(flatten)]
+ pub resize: Resize,
#[command(flatten)]
pub output_destination: OutputDestination,
@@ -87,6 +90,26 @@ pub struct Compression {
pub max_size: Option,
}
+#[derive(Args, Debug)]
+#[group(required = false, multiple = true)]
+pub struct Resize {
+ /// width of the output image, if height is not set will preserve aspect ratio
+ #[arg(long, conflicts_with_all = &["long_edge", "short_edge"])]
+ pub width: Option,
+
+ /// height of the output image, if width is not set will preserve aspect ratio
+ #[arg(long, conflicts_with_all = &["long_edge", "short_edge"])]
+ pub height: Option,
+
+ /// sets the size of the longest edge of the image
+ #[arg(long, conflicts_with_all = &["width", "height", "short_edge"])]
+ pub long_edge: Option,
+
+ /// sets the size of the shortest edge of the image
+ #[arg(long, conflicts_with_all = &["width", "height", "long_edge"])]
+ pub short_edge: Option,
+}
+
#[derive(Args, Debug)]
#[group(required = true, multiple = false)]
pub struct OutputDestination {
diff --git a/src/scan_files.rs b/src/scan_files.rs
index 8ed563a..927df64 100644
--- a/src/scan_files.rs
+++ b/src/scan_files.rs
@@ -6,15 +6,21 @@ use indicatif::ProgressStyle;
use walkdir::WalkDir;
fn is_filetype_supported(path: &Path) -> bool {
+ match get_file_mime_type(path) {
+ Some(mime_type) => {
+ matches!(mime_type.as_str(), "image/jpeg" | "image/png" | "image/webp" | "image/gif")
+ }
+ None => false,
+ }
+}
+
+pub fn get_file_mime_type(path: &Path) -> Option {
match infer::get_from_path(path) {
Ok(v) => match v {
- None => false,
- Some(ft) => matches!(
- ft.mime_type(),
- "image/jpeg" | "image/png" | "image/gif" | "image/webp"
- ),
+ None => None,
+ Some(ft) => Some(ft.mime_type().to_string()),
},
- Err(_) => false,
+ Err(_) => None,
}
}
@@ -22,7 +28,7 @@ fn is_valid(entry: &Path) -> bool {
entry.exists() && entry.is_file() && is_filetype_supported(entry)
}
-pub fn scan_files(args: Vec, recursive: bool, quiet: bool) -> (PathBuf, Vec) {
+pub fn scan_files(args: &Vec, recursive: bool, quiet: bool) -> (PathBuf, Vec) {
if args.is_empty() {
return (PathBuf::new(), vec![]);
}