Basic working example (no resize, no convert)

This commit is contained in:
Matteo Paonessa 2024-12-13 20:31:17 +01:00
parent 236de28cb7
commit 559e2a48a6
9 changed files with 1392 additions and 893 deletions

2
.gitignore vendored
View File

@ -79,3 +79,5 @@ fabric.properties
# Android studio 3.1+ serialized cache file # Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser .idea/caches/build_file_checksums.ser
/tmp

View File

@ -8,6 +8,7 @@
<sourceFolder url="file://$MODULE_DIR$/benches" isTestSource="true" /> <sourceFolder url="file://$MODULE_DIR$/benches" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/target" /> <excludeFolder url="file://$MODULE_DIR$/target" />
<excludeFolder url="file://$MODULE_DIR$/cmake-build-debug/CMakeFiles" /> <excludeFolder url="file://$MODULE_DIR$/cmake-build-debug/CMakeFiles" />
<excludeFolder url="file://$MODULE_DIR$/tmp" />
</content> </content>
<orderEntry type="inheritedJdk" /> <orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />

950
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,19 +1,22 @@
[package] [package]
name = "caesiumclt" name = "caesiumclt"
version = "0.22.0" version = "1.0.0-beta.0"
authors = ["Matteo Paonessa <matteo.paonessa@gmail.com>"] authors = ["Matteo Paonessa <matteo.paonessa@gmail.com>"]
edition = "2021" edition = "2021"
[dependencies] [dependencies]
structopt = "0.3" indicatif = {version= "0.17", features = ["rayon"]}
indicatif = "0.17"
walkdir = "2.5" walkdir = "2.5"
num_cpus = "1.16"
infer = "0.16" infer = "0.16"
rayon = "1.10" rayon = "1.10"
rand = "0.8" #rand = "0.8"
human_bytes = { version = "0.4", default-features = false } #human_bytes = { version = "0.4", default-features = false }
filetime = "0.2" filetime = "0.2"
imagesize = "0.13" #imagesize = "0.13"
libcaesium = "0.17.0" libcaesium = "0.17.0"
clap = { version = "4.5", features = ["derive"] }
[dev-dependencies]
tempfile = "3.14"
image = "0.25"

View File

@ -1,66 +0,0 @@
## Caesium 命令行工具
###### caesium-clt - v0.21.0
###### 依赖
* [Rust](https://www.rust-lang.org/tools/install)
----------
###### 编译
`cargo build --release`
----------
###### 命令行参数
- `-q, --quality [value]` {Required}
设置图像质量,值越高,图像越好。当值为 0 将 _无损_ 压缩图片,不会修改原图,但压缩得很少。
可选范围为 [0, 100] ,有损压缩的常见值为 `80`
- `-e, --exif`
压缩过程中保留 JPEG 元数据信息,文件大小将会略高。
- `-o, --output [value]` {Required}
压缩文件的输出文件夹的路径,如果和输入文件夹相同将覆盖原文件。
- `-R, --recursive`
如果输入是文件夹caesiumclt 将会递归扫描每个子文件夹以搜索图像。
请注意,这最终可能会有大量要压缩的文件,应谨慎使用。
- `-S, --keep-structure`
如果输入是文件夹,并且设置了 `-R` 选项caesiumclt 将保持原始文件夹结构地压缩所有文件。
- `-O, --overwrite`
设置覆盖策略:`all` 将覆盖任何现有文件,`prompt` 将在每次覆盖前询问,`bigger` 将仅覆盖更大的文件,而 `none` 将静默跳过现有文件。
- `-d, --dry-run`
如果设置了此选项,则不会压缩任何文件,而只是模拟整个过程。
用于检查是否所有文件都将会被正确处理。
- `-Q, --quiet`
抑制所有的输出,但 libcaesium 库的输出仍将被输出。
- `-h, --help`
显示命令行参数的摘要,就像您正在阅读的这个。
- `-v, --version`
打印当前的 caesiumclt 版本。
----------
###### 使用示例
将位于 `home` 目录中的 `image1.jpg` 无损压缩到名为 `output` 的文件夹中:
```
$ caesiumclt -q 0 -o ~/output/ ~/image.jpg
```
将位于 `home` 目录中的 `image1.jpg` 压缩到名为 `output` 的文件夹中,且有损压缩和质量设置为 `80`
```
$ caesiumclt -q 80 -o ~/output/ ~/image.jpg
```
将位于 `home` 目录中的 `image1.jpg` 无损压缩到名为 `output` 的文件夹中,且保留 EXIF 元数据:
```
$ caesiumclt -q 0 -e -o ~/output/ ~/image.jpg
```
将位于 `home` 目录中的 `Pictures` 文件夹和其子文件夹无损压缩到名为 `output` 的文件夹中:
```
$ caesiumclt -q 0 -R -o ~/output/ ~/Pictures
```
将位于 `home` 目录中的 `Pictures` 文件夹和其子文件夹无损压缩到名为 `output` 的文件夹中,且保留输入文件夹的结构:
```
$ caesiumclt -q 0 -RS -o ~/output/ ~/Pictures
```

View File

@ -1,369 +1,375 @@
use std::fs; use crate::options::{CommandLineArgs, OverwritePolicy};
use std::path::Path; use crate::scan_files::scan_files;
use std::sync::{Arc, Mutex}; use caesium::compress_in_memory;
use caesium::parameters::CSParameters; use caesium::parameters::CSParameters;
use caesium::SupportedFileTypes; use clap::Parser;
use filetime::{FileTime, set_file_times}; use filetime::{set_file_times, FileTime};
use human_bytes::human_bytes; use indicatif::{ParallelProgressIterator, ProgressBar, ProgressDrawTarget, ProgressStyle};
use indicatif::ProgressBar; use rayon::iter::IntoParallelRefIterator;
use indicatif::ProgressDrawTarget; use rayon::iter::ParallelIterator;
use indicatif::ProgressStyle; use std::fs::File;
use rand::{Rng, thread_rng}; use std::io::{Read, Write};
use rand::distributions::Alphanumeric; use std::num::NonZero;
use rayon::prelude::*; use std::path::{Path, PathBuf};
use std::time::Duration;
use std::{fs, io};
use crate::logger::ErrorLevel::{Error, Log, Notice, Warning};
use crate::logger::log;
use crate::options::OverwritePolicy;
mod scanfiles;
mod options; mod options;
mod scan_files;
mod logger; mod logger;
struct CompressionResult { enum CompressionStatus {
pub path: String, Success,
pub output_path: String, Skipped,
pub original_size: u64, Error,
pub compressed_size: u64,
pub error: String,
pub result: bool,
} }
struct OutputFormat { struct CompressionResult {
pub file_type: SupportedFileTypes, original_path: String,
pub extension: String, output_path: String,
original_size: u64,
compressed_size: u64,
status: CompressionStatus,
message: String,
} }
fn main() { fn main() {
let opt = options::get_opts(); let args = CommandLineArgs::parse();
let mut verbose = opt.verbose;
let args = opt.files;
let dry_run = opt.dry_run;
let output_format = map_output_format(opt.output_format);
let convert = output_format.file_type != SupportedFileTypes::Unkn;
let keep_dates = opt.keep_dates;
let png_optimization_level = opt.png_opt_level.clamp(0, 6);
let lossless = opt.lossless;
let suffix = opt.suffix;
let compress_by_size = opt.max_size.is_some(); let quiet = args.quiet || args.verbose == 0;
let threads_number = get_parallelism_count(
args.threads,
std::thread::available_parallelism()
.unwrap_or(NonZero::new(1).unwrap())
.get(),
);
let verbose = if quiet { 0 } else { args.verbose };
let compression_parameters = build_compression_parameters(&args);
let (base_path, input_files) = scan_files(args.files, args.recursive, quiet);
if opt.quiet { rayon::ThreadPoolBuilder::new()
verbose = 0; .num_threads(threads_number)
} .build_global()
let cpus = if opt.threads > 0 { .unwrap_or_default();
std::cmp::min(num_cpus::get(), opt.threads as usize)
} else {
num_cpus::get()
};
rayon::ThreadPoolBuilder::new().num_threads(cpus).build_global().unwrap_or_default();
let (base_path, files) = scanfiles::scanfiles(args, opt.recursive);
let same_folder_as_input = opt.same_folder_as_input; let total_files = input_files.len();
let output_dir = if same_folder_as_input {
base_path.clone()
} else {
opt.output.unwrap()
};
if dry_run {
log("Running in dry run mode", 0, Notice, verbose);
} else {
match fs::create_dir_all(output_dir.clone()) {
Ok(_) => {}
Err(_) => log("Cannot create output path. Check your permissions.", 201, Error, verbose)
}
}
let mut compression_parameters = CSParameters::new();
if opt.quality.is_some() {
let quality = opt.quality.unwrap_or(80);
compression_parameters.jpeg.quality = quality;
compression_parameters.png.quality = quality;
compression_parameters.gif.quality = quality;
compression_parameters.webp.quality = quality;
} else if lossless {
compression_parameters.optimize = true;
compression_parameters.png.force_zopfli = opt.zopfli;
}
compression_parameters.keep_metadata = opt.exif;
compression_parameters.png.optimization_level = png_optimization_level;
if opt.width.is_some() {
compression_parameters.width = opt.width.unwrap_or(0);
}
if opt.height.is_some() {
compression_parameters.height = opt.height.unwrap_or(0);
}
let overwrite_policy = opt.overwrite;
let keep_structure = opt.keep_structure;
if opt.zopfli {
log("Using zopfli may take a very long time, especially with large images!", 0, Notice, verbose);
}
let progress_bar = setup_progress_bar(files.len() as u64, verbose);
progress_bar.set_message("Compressing...");
let results = Arc::new(Mutex::new(Vec::new()));
files.par_iter().for_each(|input_file| {
let mut local_compression_parameters = compression_parameters;
let input_file_metadata = fs::metadata(input_file);
let (input_size, input_mtime, input_atime) = match input_file_metadata {
Ok(s) => (s.len(), FileTime::from_last_modification_time(&s), FileTime::from_last_access_time(&s)),
Err(e) => {
let error_message = format!("Cannot get file size for {}, Error: {}", input_file.display(), e);
log(error_message.as_str(), 202, Warning, verbose);
(0, FileTime::now(), FileTime::now())
}
};
let progress_bar = setup_progress_bar(total_files, verbose);
let compression_results: Vec<CompressionResult> = input_files
.par_iter()
.progress_with(progress_bar)
.map(|input_file| {
let mut compression_result = CompressionResult { let mut compression_result = CompressionResult {
path: input_file.display().to_string(), original_path: input_file.display().to_string(),
output_path: "".to_string(), output_path: String::new(),
original_size: input_size, original_size: 0,
compressed_size: 0, compressed_size: 0,
error: "Unknown".to_string(), status: CompressionStatus::Error,
result: false, message: String::new(),
}; };
let mut filename = if keep_structure { let original_file_size = match input_file.metadata() {
input_file.strip_prefix(base_path.clone()).unwrap_or_else(|_| Path::new("")).as_os_str() Ok(m) => m.len(),
} else { Err(_) => {
input_file.file_name().unwrap_or_default() compression_result.message = "Error reading file metadata".to_string();
return compression_result;
}
}; };
let mut basename = Path::new(filename).file_stem().unwrap_or_default().to_os_string();
if !suffix.is_empty() { compression_result.original_size = original_file_size;
basename.push(suffix.clone());
if let Some(ext) = input_file.extension() {
basename.push(".");
basename.push(ext);
}
filename = basename.as_os_str();
}
if filename.is_empty() { let output_directory = if args.output_destination.same_folder_as_input {
compression_result.error = "Cannot retrieve filename for {}. Skipping.".to_string(); match input_file.parent() {
results.lock().unwrap().push(compression_result); Some(p) => p,
return;
}
let filename_str = match filename.to_str() {
None => { None => {
compression_result.error = "Cannot convert filename for {}. Skipping.".to_string(); compression_result.message = "Error getting parent directory".to_string();
results.lock().unwrap().push(compression_result); return compression_result;
return;
} }
Some(fs) => fs
};
let random_suffix: String = (&mut thread_rng()).sample_iter(Alphanumeric)
.take(8)
.map(char::from)
.collect();
let random_suffixed_name = format!("{}.{}", filename_str, random_suffix);
let mut final_output_full_path = output_dir.clone().join(filename);
if convert {
final_output_full_path.set_extension(output_format.extension.clone());
} }
} else {
let output_full_path = output_dir.clone().join(random_suffixed_name); match args.output_destination.output.as_ref() {
let output_full_dir = output_full_path.parent().unwrap_or_else(|| Path::new("/")); Some(p) => p,
compression_result.output_path = final_output_full_path.display().to_string();
if !output_full_dir.exists() {
match fs::create_dir_all(output_full_dir) {
Ok(_) => {}
Err(e) => {
compression_result.error = format!("Cannot create output directory. Error: {}.", e);
results.lock().unwrap().push(compression_result);
return;
}
};
}
if !matches!(overwrite_policy, OverwritePolicy::All) && final_output_full_path.exists() {
if let OverwritePolicy::None = overwrite_policy { return; }
}
let input_full_path = input_file.to_str().unwrap();
let output_full_path_str = match output_full_path.to_str() {
None => { None => {
compression_result.error = "Cannot convert output_full_path. Skipping.".to_string(); compression_result.message = "Error getting output directory".to_string();
return; return compression_result;
}
} }
Some(ofp) => ofp
}; };
if opt.long_edge.is_some() || opt.short_edge.is_some() { let output_full_path = match compute_output_full_path(
let size = imagesize::size(input_full_path).unwrap(); output_directory.to_path_buf(),
input_file.to_path_buf(),
if size.width > size.height { base_path.to_path_buf(),
if opt.long_edge.is_some() { args.keep_structure,
local_compression_parameters.width = opt.long_edge.unwrap_or(0); args.suffix.as_ref().unwrap_or(&String::new()).as_ref(),
} else if opt.short_edge.is_some() { ) {
local_compression_parameters.height = opt.short_edge.unwrap_or(0); Some(p) => p,
None => {
compression_result.message = "Error computing output path".to_string();
return compression_result;
} }
} else if opt.long_edge.is_some() {
local_compression_parameters.height = opt.long_edge.unwrap_or(0);
} else if opt.short_edge.is_some() {
local_compression_parameters.width = opt.short_edge.unwrap_or(0);
}
}
if !dry_run {
let result = if convert {
caesium::convert(input_full_path.to_string(), output_full_path_str.to_string(), &local_compression_parameters, output_format.file_type)
} else if compress_by_size {
caesium::compress_to_size(input_full_path.to_string(), output_full_path_str.to_string(), &mut local_compression_parameters, opt.max_size.unwrap() as usize, true)
} else {
caesium::compress(input_full_path.to_string(), output_full_path_str.to_string(), &local_compression_parameters)
}; };
match result { if args.dry_run {
Ok(_) => { compression_result.status = CompressionStatus::Success;
compression_result.result = true; return compression_result;
let output_size = match fs::metadata(output_full_path.clone()) {
Ok(s) => s.len(),
Err(_) => 0
}; };
let mut final_output_size = output_size;
if matches!(overwrite_policy, OverwritePolicy::Bigger) && final_output_full_path.exists() { let compressed_image = match compress_in_memory(
let existing_file_size = match fs::metadata(final_output_full_path.clone()) { read_file_to_vec(input_file).unwrap(),
Ok(s) => s.len(), &compression_parameters,
Err(_) => 0 ) {
}; Ok(v) => v,
if output_size >= existing_file_size {
match fs::remove_file(output_full_path) {
Ok(_) => {}
Err(e) => { Err(e) => {
compression_result.error = format!("Cannot remove existing file. Error: {}.", e); compression_result.message = format!("Error compressing file: {}", e);
compression_result.result = false; return compression_result;
} }
}; };
final_output_size = existing_file_size; compression_result.output_path = output_full_path.display().to_string();
} else { let output_file_size = compressed_image.len() as u64;
match fs::rename(output_full_path, final_output_full_path.clone()) {
Ok(_) => {} if output_full_path.exists() {
Err(e) => { match args.overwrite {
compression_result.error = format!("Cannot rename existing file. Error: {}.", e); OverwritePolicy::None => {
compression_result.result = false; compression_result.status = CompressionStatus::Skipped;
compression_result.compressed_size = original_file_size;
compression_result.message =
"File already exists, skipped due overwrite policy".to_string();
return compression_result;
}
OverwritePolicy::Bigger => {
if output_file_size >= original_file_size {
compression_result.status = CompressionStatus::Skipped;
compression_result.compressed_size = original_file_size;
compression_result.message =
"File already exists, skipped due overwrite policy".to_string();
return compression_result;
}
}
_ => {}
}
}
let mut output_file = match File::create(&output_full_path) {
Ok(f) => f,
Err(_) => {
compression_result.message = "Error creating output file".to_string();
return compression_result;
} }
}; };
} match output_file.write_all(&compressed_image) {
} else {
match fs::rename(output_full_path, final_output_full_path.clone()) {
Ok(_) => {}
Err(e) => {
compression_result.error = format!("Cannot rename existing file. Error: {}.", e);
compression_result.result = false;
}
};
}
compression_result.compressed_size = final_output_size;
if compression_result.result && keep_dates {
match set_file_times(final_output_full_path, input_atime, input_mtime) {
Ok(_) => {} Ok(_) => {}
Err(_) => { Err(_) => {
compression_result.error = "Cannot set original file dates.".into(); compression_result.message = "Error writing output file".to_string();
return compression_result;
} }
} };
}
results.lock().unwrap().push(compression_result);
}
Err(e) => {
compression_result.error = e.to_string();
results.lock().unwrap().push(compression_result);
}
}
} else {
results.lock().unwrap().push(compression_result)
}
progress_bar.inc(1);
});
if args.keep_dates {
progress_bar.finish_with_message("Compression completed!"); let output_file_metadata = match output_file.metadata() {
Ok(m) => m,
let mut total_original_size = 0.0; Err(_) => {
let mut total_compressed_size = 0.0; compression_result.message =
let mut total_errors: u32 = 0; "Error reading output file metadata".to_string();
let mut total_compressed_files = 0; return compression_result;
}
results.lock().unwrap().iter().for_each(|result| { };
if result.result { let (last_modification_time, last_access_time) = (
total_compressed_size += result.compressed_size as f64; FileTime::from_last_modification_time(&output_file_metadata),
if verbose > 1 { FileTime::from_last_access_time(&output_file_metadata),
let message = format!("{} -> {}\n{} -> {} [{:.2}%]",
result.path,
result.output_path,
human_bytes(result.original_size as f64),
human_bytes(result.compressed_size as f64),
(result.compressed_size as f64 - result.original_size as f64) * 100.0 / result.original_size as f64
); );
log(message.as_str(), 0, Log, verbose); match preserve_dates(&output_full_path, last_modification_time, last_access_time) {
} Ok(_) => {}
total_compressed_files += 1; Err(_) => {
} else { compression_result.message = "Error preserving file dates".to_string();
total_compressed_size += result.original_size as f64; return compression_result;
if !dry_run { }
total_errors += 1;
log(format!("File {} was not compressed. Reason: {}", result.path, result.error).as_str(), 210, Warning, verbose);
} }
} }
total_original_size += result.original_size as f64;
});
let recap_message = format!("\nCompressed {} files ({} errors)\n{} -> {} [{:.2}% | -{}]", compression_result.status = CompressionStatus::Success;
total_compressed_files, compression_result.compressed_size = output_file_size;
total_errors, compression_result
human_bytes(total_original_size), })
human_bytes(total_compressed_size), .collect();
(total_compressed_size - total_original_size) * 100.0 / total_original_size,
human_bytes(total_original_size - total_compressed_size) //TODO can be positive
);
log(recap_message.as_str(), 0, Log, verbose); let recap_message = format!("Processed {} files", compression_results.len());
} }
fn setup_progress_bar(len: u64, verbose: u8) -> ProgressBar { fn get_parallelism_count(requested_threads: u32, available_threads: usize) -> usize {
let progress_bar = ProgressBar::new(len); if requested_threads > 0 {
progress_bar.set_draw_target(ProgressDrawTarget::stdout()); std::cmp::min(available_threads, requested_threads as usize)
progress_bar.set_style(ProgressStyle::default_bar() } else {
.template("[{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len}\n{msg}") available_threads
.unwrap() }
.progress_chars("#>-")); }
if verbose == 0 { fn build_compression_parameters(args: &CommandLineArgs) -> CSParameters {
progress_bar.set_draw_target(ProgressDrawTarget::hidden()); let mut parameters = CSParameters::new();
let quality = args.compression.quality.unwrap_or(80) as u32;
parameters.jpeg.quality = quality;
parameters.png.quality = quality;
parameters.webp.quality = quality;
parameters.gif.quality = quality;
parameters.keep_metadata = args.exif;
parameters.png.optimization_level = args.png_opt_level;
parameters.png.force_zopfli = args.zopfli;
parameters
}
fn compute_output_full_path(
output_directory: PathBuf,
input_file_path: PathBuf,
base_directory: PathBuf,
keep_structure: bool,
suffix: &str,
) -> Option<PathBuf> {
let extension = input_file_path
.extension()
.unwrap_or_default()
.to_os_string();
let base_name = input_file_path
.file_stem()
.unwrap_or_default()
.to_os_string();
let mut output_file_name = base_name;
output_file_name.push(suffix);
if !extension.is_empty() {
output_file_name.push(".");
output_file_name.push(extension);
} }
if keep_structure {
let parent = match input_file_path.parent()?.canonicalize() {
Ok(p) => p,
Err(_) => return None,
};
let output_path_prefix = match parent.strip_prefix(base_directory) {
Ok(p) => p,
Err(_) => return None,
};
let full_output_directory = output_directory.join(output_path_prefix);
fs::create_dir_all(&full_output_directory).ok()?; // TODO I don't like that the creation is done inside this function because the name is a bit obscure
Some(full_output_directory.join(output_file_name))
} else {
fs::create_dir_all(&output_directory).ok()?; // TODO I don't like that the creation is done inside this function because the name is a bit obscure
Some(output_directory.join(output_file_name))
}
}
fn read_file_to_vec(file_path: &PathBuf) -> io::Result<Vec<u8>> {
let mut file = File::open(file_path)?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
Ok(buffer)
}
fn preserve_dates(
output_file: &PathBuf,
input_atime: FileTime,
input_mtime: FileTime,
) -> io::Result<()> {
set_file_times(output_file, input_atime, input_mtime)
}
fn setup_progress_bar(len: usize, verbose: u8) -> ProgressBar {
let progress_bar = ProgressBar::new(len as u64);
if verbose == 0 {
progress_bar.set_draw_target(ProgressDrawTarget::hidden());
} else {
progress_bar.set_draw_target(ProgressDrawTarget::stdout());
}
progress_bar.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len}\n{msg}")
.unwrap() //TODO: handle error
.progress_chars("#>-"),
);
progress_bar.enable_steady_tick(Duration::new(1, 0));
progress_bar progress_bar
} }
fn map_output_format(format: String) -> OutputFormat { #[cfg(test)]
match format.to_lowercase().as_str() { mod tests {
"jpg|jpeg" => OutputFormat { use super::*;
file_type: SupportedFileTypes::Jpeg, use std::path::Path;
extension: format,
}, #[test]
"png" => OutputFormat { fn test_get_parallelism_count() {
file_type: SupportedFileTypes::Png, let result = get_parallelism_count(4, 4);
extension: format, assert_eq!(result, 4);
},
"webp" => OutputFormat { let result = get_parallelism_count(2, 8);
file_type: SupportedFileTypes::WebP, assert_eq!(result, 2);
extension: format,
}, let result = get_parallelism_count(0, 8);
"tiff|tif" => OutputFormat { assert_eq!(result, 8);
file_type: SupportedFileTypes::Tiff,
extension: format, let result = get_parallelism_count(1, 8);
}, assert_eq!(result, 1);
_ => OutputFormat {
file_type: SupportedFileTypes::Unkn, let result = get_parallelism_count(8, 2);
extension: "".to_string(), assert_eq!(result, 2);
},
let result = get_parallelism_count(0, 0);
assert_eq!(result, 0);
}
#[test]
fn test_compute_output_full_path() {
let output_directory = PathBuf::from("/output");
let base_directory = PathBuf::from("/base");
// Test case 1: keep_structure = true
let input_file_path = PathBuf::from("/base/folder/test.jpg");
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
true,
"_suffix",
)
.unwrap();
assert_eq!(result, Path::new("/output/folder/test_suffix.jpg"));
// Test case 2: keep_structure = false
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
false,
"_suffix",
)
.unwrap();
assert_eq!(result, Path::new("/output/test_suffix.jpg"));
// Test case 3: input file without extension
let input_file_path = PathBuf::from("/base/folder/test");
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
false,
"_suffix",
)
.unwrap();
assert_eq!(result, Path::new("/output/test_suffix"));
// Test case 4: input file with different base directory
let input_file_path = PathBuf::from("/different_base/folder/test.jpg");
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
false,
"_suffix",
)
.unwrap();
assert_eq!(result, Path::new("/output/test_suffix.jpg"));
} }
} }

View File

@ -1,128 +1,100 @@
use std::path::PathBuf; use std::path::PathBuf;
use structopt::clap::arg_enum; use clap::{Args, Parser, ValueEnum};
use structopt::StructOpt;
use crate::logger::log; #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum, Debug)]
use crate::logger::ErrorLevel::Error; pub enum OverwritePolicy {
/// Always overwrite
arg_enum! {
#[derive(Debug, Clone, Copy)]
pub enum OverwritePolicy {
All, All,
/// Never overwrite
None, None,
/// Overwrite only if the file to be overwritten is bigger
Bigger Bigger
}
} }
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
pub struct CommandLineArgs {
#[command(flatten)]
pub compression: Compression,
#[derive(StructOpt)] #[command(flatten)]
#[structopt(name = "", about = "CaesiumCLT - Command Line Tools for image compression")] pub output_destination: OutputDestination,
pub struct Opt {
/// sets output file quality between [0-100], 0 for optimization
#[structopt(short = "q", long, conflicts_with_all(&["lossless", "max-size"]), required_unless="lossless", required_unless="max-size")]
pub quality: Option<u32>,
/// set the expected maximum output size in bytes /// select level for PNG optimization, between [0-6]
#[structopt(long = "max-size", conflicts_with_all(&["quality", "lossless"]))] #[arg(long, default_value = "3")]
pub max_size: Option<u32>, pub png_opt_level: u8,
/// perform lossless compression /// use zopfli when optimizing PNG files (it may take a very long time to complete)
#[structopt(short = "l", long = "lossless", conflicts_with_all(&["quality", "max-size"]))] #[arg(long)]
pub lossless: bool, pub zopfli: bool,
/// keeps EXIF info during compression /// keeps EXIF info during compression
#[structopt(short = "e", long)] #[arg(short, long)]
pub exif: bool, pub exif: bool,
/// width of the output image, if height is not set will preserve aspect ratio /// keep original file date information
#[structopt(long, conflicts_with_all(&["long-edge", "short-edge"]))] #[arg(long)]
pub width: Option<u32>, pub keep_dates: bool,
/// height of the output image, if width is not set will preserve aspect ratio /// add a suffix to the output filename
#[structopt(long, conflicts_with_all(&["long-edge", "short-edge"]))] #[arg(long)]
pub height: Option<u32>, pub suffix: Option<String>,
/// sets the size of the longest edge of the image
#[structopt(long = "long-edge", conflicts_with_all(&["width", "height", "short-edge"]))]
pub long_edge: Option<u32>,
/// sets the size of the shortest edge of the image
#[structopt(long = "short-edge", conflicts_with_all(&["width", "height", "long-edge"]))]
pub short_edge: Option<u32>,
/// output folder
#[structopt(short = "o", long, conflicts_with = "same-folder-as-input", parse(from_os_str))]
pub output: Option<PathBuf>,
/// if input is a folder, scan subfolders too /// if input is a folder, scan subfolders too
#[structopt(short = "R", long)] #[arg(short = 'R', long)]
pub recursive: bool, pub recursive: bool,
/// keep the folder structure, can be used only with -R /// keep the folder structure, can be used only with -R
#[structopt(short = "S", long)] #[arg(short = 'S', long)]
pub keep_structure: bool, pub keep_structure: bool,
/// overwrite policy /// do not write output files
#[structopt(short = "O", long, default_value = "all")] #[arg(long, short, default_value = "false")]
pub overwrite: OverwritePolicy,
/// do not compress files but just show output paths
#[structopt(long = "dry-run", short = "d", long)]
pub dry_run: bool, pub dry_run: bool,
/// suppress all output
#[structopt(short = "Q", long)]
pub quiet: bool,
/// specify the number of parallel jobs (max is the number of processors available) /// specify the number of parallel jobs (max is the number of processors available)
#[structopt(long, default_value = "0")] #[arg(long, default_value = "0")]
pub threads: u32, pub threads: u32,
/// use zopfli when optimizing PNG files (it may take a very long time to complete) /// suppress all output
#[structopt(long)] #[arg(short = 'Q', long, group = "verbosity")]
pub zopfli: bool, pub quiet: bool,
/// overwrite policy
#[arg(short = 'O', long, value_enum, default_value = "all")]
pub overwrite: OverwritePolicy,
/// select how much output you want to see, 0 is equal to -Q, --quiet /// select how much output you want to see, 0 is equal to -Q, --quiet
#[structopt(long, default_value = "1")] #[arg(long, default_value = "1", group = "verbosity")]
pub verbose: u8, pub verbose: u8,
/// convert the image to the selected format (jpg, png, webp, tiff) pub files: Vec<String>,
#[structopt(long = "output-format", default_value = "none")] }
pub output_format: String,
/// keep original file date information #[derive(Args, Debug)]
#[structopt(long = "keep-dates")] #[group(required = true, multiple = false)]
pub keep_dates: bool, pub struct Compression {
/// sets output file quality between [0-100]
#[arg(short, long)]
pub quality: Option<u8>,
/// select level for PNG optimization, between [0-6] /// perform lossless compression
#[structopt(long = "png-opt-level", default_value = "3")] #[arg(long, default_value = "false")]
pub png_opt_level: u8, pub lossless: bool,
/// sets the output folder to be the same as the input folder. Overwrites original files /// set the expected maximum output size in bytes
#[structopt(long = "same-folder-as-input", conflicts_with = "output")] #[arg(long)]
pub max_size: Option<u8>,
}
#[derive(Args, Debug)]
#[group(required = true, multiple = false)]
pub struct OutputDestination {
/// output folder
#[arg(short = 'o', long, group = "output_destination")]
pub output: Option<PathBuf>,
/// sets the output folder to be the same as the input folder, overwrites original files
#[arg(long, default_value = "false", group = "output_destination")]
pub same_folder_as_input: bool, pub same_folder_as_input: bool,
/// add a suffix to the output filename
#[structopt(long = "suffix", default_value = "none")]
pub suffix: String,
/// Files to process
#[structopt(name = "FILE", parse(from_os_str))]
pub files: Vec<PathBuf>,
}
pub fn get_opts() -> Opt {
let opt = Opt::from_args();
validate_opts(&opt);
opt
}
fn validate_opts(opt: &Opt) {
let args = &opt.files;
let verbose = opt.verbose;
if args.is_empty() {
log("Please provide at least one file or folder.", 101, Error, verbose);
}
} }

273
src/scan_files.rs Normal file
View File

@ -0,0 +1,273 @@
use std::path::{Path, PathBuf};
use std::time::Duration;
use indicatif::{ProgressBar, ProgressDrawTarget, ProgressIterator};
use indicatif::ProgressStyle;
use walkdir::WalkDir;
fn is_filetype_supported(path: &Path) -> bool {
match infer::get_from_path(path) {
Ok(v) => match v {
None => false,
Some(ft) => matches!(
ft.mime_type(),
"image/jpeg" | "image/png" | "image/gif" | "image/webp"
),
},
Err(_) => false,
}
}
fn is_valid(entry: &Path) -> bool {
entry.exists() && entry.is_file() && is_filetype_supported(entry)
}
pub fn scan_files(args: Vec<String>, recursive: bool, quiet: bool) -> (PathBuf, Vec<PathBuf>) {
if args.is_empty() {
return (PathBuf::new(), vec![]);
}
let mut files: Vec<PathBuf> = vec![];
let mut base_path = PathBuf::new();
let progress_bar = init_progress_bar(quiet);
for path in args.into_iter().progress_with(progress_bar) {
let input = PathBuf::from(path);
if input.exists() && input.is_dir() {
let mut walk_dir = WalkDir::new(input);
if !recursive {
walk_dir = walk_dir.max_depth(1);
}
for entry in walk_dir.into_iter().filter_map(|e| e.ok()) {
let path = entry.into_path();
if is_valid(&path) {
base_path = canonicalize_and_push(&path, base_path, &mut files);
}
}
} else if is_valid(&input) {
base_path = canonicalize_and_push(&input, base_path, &mut files);
}
}
(base_path, files)
}
fn canonicalize_and_push(path: &Path, mut base_path: PathBuf, files: &mut Vec<PathBuf>) -> PathBuf {
if let Ok(ap) = path.canonicalize() {
base_path = compute_base_folder(&base_path, &ap);
files.push(ap);
}
base_path
}
fn compute_base_folder(base_folder: &Path, new_path: &Path) -> PathBuf {
if base_folder.as_os_str().is_empty() && new_path.parent().is_some() {
return new_path.parent().unwrap().to_path_buf();
}
if base_folder.parent().is_none() {
return base_folder.to_path_buf();
}
let mut folder = PathBuf::new();
let mut new_path_folder = new_path.to_path_buf();
if new_path.is_file() {
new_path_folder = new_path
.parent()
.unwrap_or(&*PathBuf::from("/"))
.to_path_buf();
}
for (i, component) in base_folder.iter().enumerate() {
if let Some(new_path_component) = new_path_folder.iter().nth(i) {
if new_path_component == component {
folder.push(component);
} else {
break;
}
} else {
break;
}
}
if folder.parent().is_none() {
return PathBuf::from("/");
}
folder
}
fn init_progress_bar(quiet: bool) -> ProgressBar {
let progress_bar = ProgressBar::new_spinner();
if quiet {
progress_bar.set_draw_target(ProgressDrawTarget::hidden());
return progress_bar;
}
let style = ProgressStyle::default_spinner()
.tick_strings(&["", "", "", "", "", "", "", "", "", ""])
.template("{spinner:.cyan} {msg}")
.unwrap_or_else(|_| ProgressStyle::default_spinner());
progress_bar.set_message("Collecting files...");
progress_bar.enable_steady_tick(Duration::from_millis(100));
progress_bar.set_style(style);
progress_bar
}
#[cfg(test)]
mod tests {
use super::*;
use image::RgbImage;
use std::io::{Cursor, Write};
use std::path::Path;
use tempfile::NamedTempFile;
#[test]
fn test_is_filetype_supported() {
let supported_file_types = [
image::ImageFormat::Jpeg,
image::ImageFormat::Png,
image::ImageFormat::WebP,
image::ImageFormat::Gif,
];
for supported_file in supported_file_types {
let mut temp_file = NamedTempFile::new().unwrap();
let rgb_image = RgbImage::new(1, 1);
let mut bytes: Vec<u8> = Vec::new();
rgb_image
.write_to(&mut Cursor::new(&mut bytes), supported_file)
.unwrap();
temp_file.write_all(bytes.as_slice()).unwrap();
assert!(is_filetype_supported(temp_file.path()));
}
let unsupported_file_types = [image::ImageFormat::Tiff, image::ImageFormat::Avif];
for unsupported_file in unsupported_file_types {
let mut temp_file = NamedTempFile::new().unwrap();
let rgb_image = RgbImage::new(1, 1);
let mut bytes: Vec<u8> = Vec::new();
rgb_image
.write_to(&mut Cursor::new(&mut bytes), unsupported_file)
.unwrap();
temp_file.write_all(bytes.as_slice()).unwrap();
assert!(!is_filetype_supported(temp_file.path()));
}
}
#[test]
fn test_is_valid() {
let mut temp_file = NamedTempFile::new().unwrap();
let rgb_image = RgbImage::new(1, 1);
let mut bytes: Vec<u8> = Vec::new();
rgb_image
.write_to(&mut Cursor::new(&mut bytes), image::ImageFormat::Jpeg)
.unwrap();
temp_file.write_all(bytes.as_slice()).unwrap();
assert!(is_valid(temp_file.path()));
assert!(!is_valid(temp_file.path().parent().unwrap()));
assert!(!is_valid(temp_file.path().join("test").as_path()));
let mut temp_file = NamedTempFile::new().unwrap();
let rgb_image = RgbImage::new(1, 1);
let mut bytes: Vec<u8> = Vec::new();
rgb_image
.write_to(&mut Cursor::new(&mut bytes), image::ImageFormat::Avif)
.unwrap();
temp_file.write_all(bytes.as_slice()).unwrap();
assert!(!is_valid(temp_file.path()));
}
//
// #[test]
// fn test_scanfiles() {
// let temp_dir = tempfile::tempdir().unwrap();
// let file_path = temp_dir.path().join("test.jpg");
// let mut file = File::create(&file_path).unwrap();
// file.write_all(b"test").unwrap();
//
// let args = vec![file_path.to_str().unwrap().to_string()];
// let (base_path, files) = scanfiles(args, false);
//
// assert_eq!(files.len(), 1);
// assert_eq!(files[0], file_path);
// assert_eq!(base_path, temp_dir.path().canonicalize().unwrap());
// }
//
#[test]
fn test_compute_base_folder_with_files() {
let base_folder = Path::new("/base/folder");
let new_path = Path::new("/base/folder/subfolder/file.jpg");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/base/folder"));
let base_folder = Path::new("/base/folder/subfolder/another/folder");
let new_path = Path::new("/base/folder/subfolder/file.jpg");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/base/folder/subfolder"));
let base_folder = Path::new("/base/folder/subfolder/another/folder");
let new_path = Path::new("/file.jpg");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/"));
let base_folder = Path::new("/");
let new_path = Path::new("/base/folder/subfolder/file.jpg");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/"));
let base_folder = Path::new("/");
let new_path = Path::new("/file.jpg");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/"));
let base_folder = PathBuf::new();
let new_path = Path::new("/temp/file.jpg");
let result = compute_base_folder(&base_folder, new_path);
assert_eq!(result, Path::new("/temp"));
}
#[test]
fn test_compute_base_folder_with_folders() {
let base_folder = Path::new("/base/folder");
let new_path = Path::new("/base/folder/subfolder");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/base/folder"));
let base_folder = Path::new("/base/folder/subfolder/another/folder");
let new_path = Path::new("/base/folder/subfolder");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/base/folder/subfolder"));
let base_folder = Path::new("/base/folder/subfolder/another/folder");
let new_path = Path::new("/");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/"));
let base_folder = Path::new("/");
let new_path = Path::new("/base/folder/subfolder");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/"));
let base_folder = Path::new("/");
let new_path = Path::new("/");
let result = compute_base_folder(base_folder, new_path);
assert_eq!(result, Path::new("/"));
}
}

View File

@ -1,110 +0,0 @@
use std::path::{Path, PathBuf};
use std::time::Duration;
use indicatif::ProgressBar;
use indicatif::ProgressStyle;
use walkdir::WalkDir;
pub fn is_filetype_supported(path: &Path) -> bool {
let file_path = match path.to_str() {
None => return false,
Some(p) => p
};
match infer::get_from_path(file_path) {
Ok(v) => match v {
None => false,
Some(ft) => matches!(ft.mime_type(), "image/jpeg" | "image/png" | "image/gif" | "image/webp"),
},
Err(_) => false
}
}
fn is_valid(entry: &Path) -> bool {
entry.exists() && entry.is_file() && is_filetype_supported(entry)
}
pub fn scanfiles(args: Vec<PathBuf>, recursive: bool) -> (PathBuf, Vec<PathBuf>) {
let mut files: Vec<PathBuf> = vec![];
let mut base_path = PathBuf::new();
let progress_bar = init_progress_bar();
for input in args.into_iter() {
if input.exists() && input.is_dir() {
let mut walk_dir = WalkDir::new(input);
if !recursive {
walk_dir = walk_dir.max_depth(1);
}
for entry in walk_dir.into_iter().filter_map(|e| e.ok()) {
let path = entry.into_path();
if is_valid(&path) {
base_path = canonicalize_and_push(&path, base_path, &mut files);
}
}
} else if is_valid(&input) {
base_path = canonicalize_and_push(&input, base_path, &mut files);
}
progress_bar.tick();
}
progress_bar.finish_and_clear();
(base_path, files)
}
fn canonicalize_and_push(path: &Path, mut base_path: PathBuf, files: &mut Vec<PathBuf>) -> PathBuf {
if let Ok(ap) = path.canonicalize() {
base_path = compute_base_folder(&base_path, &ap);
files.push(ap);
}
base_path
}
fn compute_base_folder(base_folder: &Path, new_path: &Path) -> PathBuf {
if base_folder.parent().is_none() {
return if new_path.is_dir() {
new_path.to_path_buf()
} else {
new_path.parent().unwrap_or(&*PathBuf::from("/")).to_path_buf()
};
}
let mut folder = PathBuf::new();
let mut new_path_folder = new_path.to_path_buf();
if new_path.is_file() {
new_path_folder = new_path.parent().unwrap_or(&*PathBuf::from("/")).to_path_buf();
}
for (i, component) in base_folder.iter().enumerate() {
if let Some(new_path_component) = new_path_folder.iter().nth(i) {
if new_path_component == component {
folder.push(component);
} else {
break;
}
} else {
break;
}
}
if folder.parent().is_none() {
return PathBuf::from("/");
}
folder
}
fn init_progress_bar() -> ProgressBar {
let progress_bar = ProgressBar::new_spinner();
let style = ProgressStyle::default_spinner()
.tick_strings(&["", "", "", "", "", "", "", "", "", ""])
.template("{spinner:.cyan} {msg}").unwrap_or_else(|_| ProgressStyle::default_spinner());
progress_bar.set_message("Collecting files...");
progress_bar.enable_steady_tick(Duration::from_millis(80));
progress_bar.set_style(style);
progress_bar
}