Refactoring main to be more lean

This commit is contained in:
Matteo Paonessa 2024-12-17 20:20:52 +01:00
parent f37acd3b9b
commit 3e24bae549
7 changed files with 739 additions and 624 deletions

View File

@ -1,122 +0,0 @@
# .github/workflows/deploy.yml
name: Build Release
on:
push:
# branches:
# - 'main'
tags:
- "[0-9]+.[0-9]+.[0-9]+"
workflow_dispatch:
repository_dispatch:
types: [ webhook ]
permissions:
contents: write
jobs:
build-and-upload:
name: Build and upload
runs-on: ${{ matrix.os }}
strategy:
matrix:
# You can add more, for any target you'd like!
include:
- build: linux x86
os: ubuntu-latest
target: x86_64-unknown-linux-musl
- build: linux arm64
os: ubuntu-latest
target: aarch64-unknown-linux-musl
- build: macos x86
os: macos-latest
target: x86_64-apple-darwin
- build: macos arm64
os: macos-latest
target: aarch64-apple-darwin
- build: win x86
os: windows-latest
target: x86_64-pc-windows-msvc
# - build: win arm64
# os: windows-latest
# target: aarch64-pc-windows-msvc
# error: failed to run custom build command for `mozjpeg-sys v*`
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Get the release version from the tag
shell: bash
run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
- name: Install Rust
# Or @nightly if you want
uses: dtolnay/rust-toolchain@stable
# Arguments to pass in
with:
# Make Rust compile to our target (defined in the matrix)
targets: ${{ matrix.target }}
- name: Build
uses: clechasseur/rs-cargo@v2
with:
use-cross: true
command: build
args: --verbose --release --target ${{ matrix.target }}
- name: Build archive
shell: bash
run: |
# Replace with the name of your binary
binary_name="caesiumclt"
dirname="$binary_name-${{ env.VERSION }}-${{ matrix.target }}"
mkdir "$dirname"
if [ "${{ matrix.os }}" = "windows-latest" ]; then
mv "target/${{ matrix.target }}/release/$binary_name.exe" "$dirname"
else
mv "target/${{ matrix.target }}/release/$binary_name" "$dirname"
fi
if [ "${{ matrix.os }}" = "windows-latest" ]; then
7z a "$dirname.zip" "$dirname"
echo "ASSET=$dirname.zip" >> $GITHUB_ENV
else
tar -czf "$dirname.tar.gz" "$dirname"
echo "ASSET=$dirname.tar.gz" >> $GITHUB_ENV
fi
# https://github.com/softprops/action-gh-release?tab=readme-ov-file#-customizing
- name: Release
uses: softprops/action-gh-release@v2
with:
files: |
${{ env.ASSET }}
# body_path: ''
body: "|Arch|Filename|\n
|:--: |:--:|\n
|MacOS ARM| caesiumclt-v*-aarch64-apple-darwin.tar.gz|\n
|MacOS x86_64| caesiumclt-v*-x86_64-apple-darwin.tar.gz|\n
|Linux ARM| caesiumclt-v*-aarch64-unknown-linux-musl.tar.gz|\n
|Linux x86_64| caesiumclt-v*-x86_64-unknown-linux-musl.tar.gz|\n
|Windows x86_64| caesiumclt-v*-x86_64-pc-windows-msvc.zip|\n"
- name: Upload Artifact 🚀
uses: actions/upload-artifact@v4
with:
name: ${{ env.ASSET }}
path: ${{ env.ASSET }}
- name: Upload binaries to release ☕
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ${{ env.ASSET }}
asset_name: ${{ env.ASSET }}
tag: ${{ github.ref }}
overwrite: true
body: "Generated by Github Actions"

24
.github/workflows/test.yml vendored Normal file
View File

@ -0,0 +1,24 @@
name: Test
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build --verbose --release
- name: Run tests
run: cargo test --verbose --release

137
README.md
View File

@ -1,5 +1,5 @@
## Caesium CommandLineTools
###### caesium-clt - v0.21.0
###### caesium-clt - v1.0.0-beta.0
###### REQUIREMENTS
* [Rust](https://www.rust-lang.org/tools/install)
@ -10,51 +10,114 @@
----------
###### COMMAND LINE ARGUMENTS
###### USAGE
```
USAGE:
caesiumclt.exe [FLAGS] [OPTIONS] --quality <quality> [FILE]...
Usage: caesiumclt [OPTIONS] <--quality <QUALITY>|--lossless|--max-size <MAX_SIZE>> <--output <OUTPUT>|--same-folder-as-input> [FILES]...
FLAGS:
-d, --dry-run do not compress files but just show output paths
-e, --exif keeps EXIF info during compression
-h, --help Prints help information
--keep-dates keep original file date information
-S, --keep-structure keep the folder structure, can be used only with -R
-l, --lossless perform lossless compression
-Q, --quiet suppress all output
-R, --recursive if input is a folder, scan subfolders too
--same-folder-as-input sets the output folder to be the same as the input folder. Overwrites original files
-V, --version Prints version information
--zopfli use zopfli when optimizing PNG files (it may take a very long time to complete)
Arguments:
[FILES]...
OPTIONS:
--height <height> height of the output image, if width is not set will preserve aspect ratio
--long-edge <long-edge> sets the size of the longest edge of the image
--max-size <max-size> set the expected maximum output size in bytes
-o, --output <output> output folder
--output-format <output-format> convert the image to the selected format (jpg, png, webp, tiff) [default:
none]
-O, --overwrite <overwrite> overwrite policy [default: all]
--png-opt-level <png-opt-level> select level for PNG optimization, between [0-6] [default: 3]
-q, --quality <quality> sets output file quality between [0-100], 0 for optimization
--short-edge <short-edge> sets the size of the shortest edge of the image
--suffix <suffix> convert the image to the selected format (jpg, png, webp, tiff) [default:
none]
--threads <threads> specify the number of parallel jobs (max is the number of processors
available) [default: 0]
--verbose <verbose> select how much output you want to see, 0 is equal to -Q, --quiet [default:
1]
--width <width> width of the output image, if height is not set will preserve aspect ratio
ARGS:
<FILE>... Files to process
Options:
-q, --quality <QUALITY>
sets output file quality between [0-100]
--lossless
perform lossless compression
--max-size <MAX_SIZE>
set the expected maximum output size in bytes
--width <WIDTH>
width of the output image, if height is not set will preserve aspect ratio
--height <HEIGHT>
height of the output image, if width is not set will preserve aspect ratio
--long-edge <LONG_EDGE>
sets the size of the longest edge of the image
--short-edge <SHORT_EDGE>
sets the size of the shortest edge of the image
-o, --output <OUTPUT>
output folder
--same-folder-as-input
sets the output folder to be the same as the input folder, overwrites original files
--format <FORMAT>
convert to the selected output format, or keep the original
[default: original]
[possible values: jpeg, png, webp, tiff, original]
--png-opt-level <PNG_OPT_LEVEL>
select level for PNG optimization, between [0-6]
[default: 3]
--zopfli
use zopfli when optimizing PNG files (it may take a very long time to complete)
-e, --exif
keeps EXIF info during compression
--keep-dates
keep original file date information
--suffix <SUFFIX>
add a suffix to the output filename
-R, --recursive
if input is a folder, scan subfolders too
-S, --keep-structure
keep the folder structure, can be used only with -R
-d, --dry-run
do not write output files
--threads <THREADS>
specify the number of parallel jobs (max is the number of processors available)
[default: 1]
-O, --overwrite <OVERWRITE>
overwrite policy
[default: all]
Possible values:
- all: Always overwrite
- never: Never overwrite
- bigger: Overwrite only if the file to be overwritten is bigger
-Q, --quiet
suppress all output
--verbose <VERBOSE>
select how much output you want to see
[default: progress]
Possible values:
- quiet: Suppress all output
- progress: Show only progress and final results
- warnings-and-errors: Show also skipped and error messages
- all: Print all
-h, --help
Print help (see a summary with '-h')
-V, --version
Print version
```
----------
###### USAGE EXAMPLES
###### EX AMPLES
Losslessly compress ```image1.jpg```, located in the ```home``` directory, into a folder called ```output```
```

578
src/compressor.rs Normal file
View File

@ -0,0 +1,578 @@
use crate::options::{CommandLineArgs, OutputFormat, OverwritePolicy};
use crate::scan_files::get_file_mime_type;
use crate::CompressionStatus;
use caesium::parameters::CSParameters;
use caesium::{compress_in_memory, compress_to_size_in_memory, convert_in_memory, SupportedFileTypes};
use filetime::{set_file_times, FileTime};
use indicatif::{ParallelProgressIterator, ProgressBar};
use rayon::iter::ParallelIterator;
use rayon::prelude::IntoParallelRefIterator;
use std::error::Error;
use std::ffi::OsString;
use std::fs::File;
use std::io::{BufReader, Read, Write};
use std::path::{absolute, Path, PathBuf};
use std::{fs, io};
pub struct CompressionResult {
pub original_path: String,
pub output_path: String,
pub original_size: u64,
pub compressed_size: u64,
pub status: CompressionStatus,
pub message: String,
}
pub fn perform_compression(
input_files: &Vec<PathBuf>,
args: &CommandLineArgs,
base_path: &PathBuf,
progress_bar: ProgressBar,
) -> Vec<CompressionResult> {
let needs_resize = args.resize.width.is_some()
|| args.resize.height.is_some()
|| args.resize.long_edge.is_some()
|| args.resize.short_edge.is_some();
input_files
.par_iter()
.progress_with(progress_bar)
.map(|input_file| {
let mut compression_result = CompressionResult {
original_path: input_file.display().to_string(),
output_path: String::new(),
original_size: 0,
compressed_size: 0,
status: CompressionStatus::Error,
message: String::new(),
};
let input_file_metadata = match input_file.metadata() {
Ok(m) => m,
Err(_) => {
compression_result.message = "Error reading file metadata".to_string();
return compression_result;
}
};
let original_file_size = input_file_metadata.len();
compression_result.original_size = original_file_size;
let output_directory = if args.output_destination.same_folder_as_input {
match input_file.parent() {
Some(p) => p,
None => {
compression_result.message = "Error getting parent directory".to_string();
return compression_result;
}
}
} else {
match args.output_destination.output.as_ref() {
Some(p) => p,
None => {
compression_result.message = "Error getting output directory".to_string();
return compression_result;
}
}
};
let (output_directory, filename) = match compute_output_full_path(
output_directory,
input_file,
base_path,
args.keep_structure,
args.suffix.as_ref().unwrap_or(&String::new()).as_ref(),
args.format,
) {
Some(p) => p,
None => {
compression_result.message = "Error computing output path".to_string();
return compression_result;
}
};
if !output_directory.exists() {
match fs::create_dir_all(&output_directory) {
Ok(_) => {}
Err(_) => {
compression_result.message = "Error creating output directory".to_string();
return compression_result;
}
}
}
let output_full_path = output_directory.join(filename);
if args.dry_run {
compression_result.status = CompressionStatus::Success;
return compression_result;
};
let mut compression_parameters = build_compression_parameters(args, input_file, needs_resize);
let input_file_buffer = match read_file_to_vec(input_file) {
Ok(b) => b,
Err(_) => {
compression_result.message = "Error reading input file".to_string();
return compression_result;
}
};
let compression = if args.compression.max_size.is_some() {
compress_to_size_in_memory(
input_file_buffer,
&mut compression_parameters,
args.compression.max_size.unwrap() as usize,
true,
)
} else if args.format != OutputFormat::Original {
convert_in_memory(
input_file_buffer,
&compression_parameters,
map_supported_formats(args.format),
)
} else {
compress_in_memory(input_file_buffer, &compression_parameters)
};
let compressed_image = match compression {
Ok(v) => v,
Err(e) => {
compression_result.message = format!("Error compressing file: {}", e);
return compression_result;
}
};
compression_result.output_path = output_full_path.display().to_string();
let output_file_size = compressed_image.len() as u64;
if output_full_path.exists() {
match args.overwrite {
OverwritePolicy::Never | OverwritePolicy::Bigger => {
if (matches!(args.overwrite, OverwritePolicy::Bigger) && output_file_size >= original_file_size)
|| matches!(args.overwrite, OverwritePolicy::Never)
{
compression_result.status = CompressionStatus::Skipped;
compression_result.compressed_size = original_file_size;
compression_result.message =
"File already exists, skipped due overwrite policy".to_string();
return compression_result;
}
}
_ => {}
}
}
let mut output_file = match File::create(&output_full_path) {
Ok(f) => f,
Err(_) => {
compression_result.message = "Error creating output file".to_string();
return compression_result;
}
};
match output_file.write_all(&compressed_image) {
Ok(_) => {}
Err(_) => {
compression_result.message = "Error writing output file".to_string();
return compression_result;
}
};
if args.keep_dates {
let (last_modification_time, last_access_time) = (
FileTime::from_last_modification_time(&input_file_metadata),
FileTime::from_last_access_time(&input_file_metadata),
);
match preserve_dates(&output_full_path, last_modification_time, last_access_time) {
Ok(_) => {}
Err(_) => {
compression_result.message = "Error preserving file dates".to_string();
return compression_result;
}
}
}
compression_result.status = CompressionStatus::Success;
compression_result.compressed_size = output_file_size;
compression_result
})
.collect()
}
fn build_compression_parameters(args: &CommandLineArgs, input_file: &Path, needs_resize: bool) -> CSParameters {
let mut parameters = CSParameters::new();
let quality = args.compression.quality.unwrap_or(80) as u32;
parameters.jpeg.quality = quality;
parameters.png.quality = quality;
parameters.webp.quality = quality;
parameters.gif.quality = quality;
parameters.keep_metadata = args.exif;
parameters.png.optimization_level = args.png_opt_level;
parameters.png.force_zopfli = args.zopfli;
if needs_resize {
let mime_type = get_file_mime_type(input_file);
build_resize_parameters(args, &mut parameters, input_file, mime_type).unwrap();
//TODO
}
parameters
}
fn compute_output_full_path(
output_directory: &Path,
input_file_path: &Path,
base_directory: &PathBuf,
keep_structure: bool,
suffix: &str,
format: OutputFormat,
) -> Option<(PathBuf, OsString)> {
let extension = match format {
OutputFormat::Jpeg => "jpg".into(),
OutputFormat::Png => "png".into(),
OutputFormat::Webp => "webp".into(),
OutputFormat::Tiff => "tiff".into(),
OutputFormat::Original => input_file_path.extension().unwrap_or_default().to_os_string(),
};
let base_name = input_file_path.file_stem().unwrap_or_default().to_os_string();
let mut output_file_name = base_name;
output_file_name.push(suffix);
if !extension.is_empty() {
output_file_name.push(".");
output_file_name.push(extension);
}
if keep_structure {
let parent = match absolute(input_file_path.parent()?) {
Ok(p) => p,
Err(_) => return None,
};
let output_path_prefix = match parent.strip_prefix(base_directory) {
Ok(p) => p,
Err(_) => return None,
};
let full_output_directory = output_directory.join(output_path_prefix);
Some((full_output_directory, output_file_name))
} else {
Some((PathBuf::from(output_directory), output_file_name))
}
}
fn build_resize_parameters(
args: &CommandLineArgs,
parameters: &mut CSParameters,
input_file_path: &Path,
mime_type: Option<String>,
) -> Result<(), Box<dyn Error>> {
let (width, height) = get_real_resolution(input_file_path, mime_type, args.exif)?;
if args.resize.width.is_some() {
parameters.width = args.resize.width.unwrap_or(0);
} else if args.resize.height.is_some() {
parameters.height = args.resize.height.unwrap_or(0);
} else if args.resize.long_edge.is_some() {
let long_edge = args.resize.long_edge.unwrap_or(0);
if width > height {
parameters.width = long_edge;
} else {
parameters.height = long_edge;
}
} else if args.resize.short_edge.is_some() {
let short_edge = args.resize.short_edge.unwrap_or(0);
if width < height {
parameters.width = short_edge;
} else {
parameters.height = short_edge;
}
}
Ok(())
}
fn get_real_resolution(
file: &Path,
mime_type: Option<String>,
keep_metadata: bool,
) -> Result<(usize, usize), Box<dyn Error>> {
let resolution = imagesize::size(file)?;
let mut orientation = 1;
let mime = mime_type.unwrap_or("".to_string());
if mime == "image/jpeg" && keep_metadata {
let f = File::open(file)?;
if let Ok(e) = exif::Reader::new().read_from_container(&mut BufReader::new(&f)) {
let exif_field = match e.get_field(exif::Tag::Orientation, exif::In::PRIMARY) {
Some(f) => f,
None => return Ok((resolution.width, resolution.height)),
};
orientation = exif_field.value.get_uint(0).unwrap_or(1);
};
}
let (width, height) = match orientation {
5..=8 => (resolution.height, resolution.width),
_ => (resolution.width, resolution.height),
};
Ok((width, height))
}
fn preserve_dates(output_file: &PathBuf, input_atime: FileTime, input_mtime: FileTime) -> io::Result<()> {
set_file_times(output_file, input_atime, input_mtime)
}
fn map_supported_formats(format: OutputFormat) -> SupportedFileTypes {
match format {
OutputFormat::Jpeg => SupportedFileTypes::Jpeg,
OutputFormat::Png => SupportedFileTypes::Png,
OutputFormat::Webp => SupportedFileTypes::WebP,
OutputFormat::Tiff => SupportedFileTypes::Tiff,
_ => SupportedFileTypes::Unkn,
}
}
fn read_file_to_vec(file_path: &PathBuf) -> io::Result<Vec<u8>> {
let mut file = File::open(file_path)?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
Ok(buffer)
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
#[cfg(not(target_os = "windows"))]
#[test]
fn test_compute_output_full_path() {
let output_directory = PathBuf::from("/output");
let base_directory = PathBuf::from("/base");
// Test case 1: keep_structure = true
let input_file_path = PathBuf::from("/base/folder/test.jpg");
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
true,
"_suffix",
OutputFormat::Original,
)
.unwrap();
assert_eq!(
result,
(Path::new("/output/folder").to_path_buf(), "test_suffix.jpg".into())
);
// Test case 2: keep_structure = false
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Original,
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix.jpg".into()));
// Test case 3: input file without extension
let input_file_path = PathBuf::from("/base/folder/test");
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Original,
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix".into()));
// Test case 4: input file with different base directory
let input_file_path = PathBuf::from("/different_base/folder/test.jpg");
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Original,
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix.jpg".into()));
// Test case 5: input file with OutputFormat::Jpeg
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Jpeg,
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix.jpg".into()));
// Test case 6: input file with OutputFormat::Png
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Png,
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix.png".into()));
// Test case 7: input file with OutputFormat::Webp
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Webp,
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix.webp".into()));
// Test case 8: input file with OutputFormat::Tiff
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Tiff,
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix.tiff".into()));
}
#[cfg(target_os = "windows")]
#[test]
fn test_compute_output_full_path() {
let output_directory = PathBuf::from(r"C:\output");
let base_directory = PathBuf::from(r"C:\base");
// Test case 1: keep_structure = true
let input_file_path = PathBuf::from(r"C:\base\folder\test.jpg");
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
true,
"_suffix",
OutputFormat::Original,
)
.unwrap();
assert_eq!(
result,
(Path::new(r"C:\output\folder").to_path_buf(), "test_suffix.jpg".into())
);
// Test case 2: keep_structure = false
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Original,
)
.unwrap();
assert_eq!(
result,
(Path::new(r"C:\output").to_path_buf(), "test_suffix.jpg".into())
);
// Test case 3: input file without extension
let input_file_path = PathBuf::from(r"C:\base\folder\test");
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Original,
)
.unwrap();
assert_eq!(result, (Path::new(r"C:\output").to_path_buf(), "test_suffix".into()));
// Test case 4: input file with different base directory
let input_file_path = PathBuf::from(r"C:\different_base\folder\test.jpg");
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Original,
)
.unwrap();
assert_eq!(
result,
(Path::new(r"C:\output").to_path_buf(), "test_suffix.jpg".into())
);
// Test case 5: input file with OutputFormat::Jpeg
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Jpeg,
)
.unwrap();
assert_eq!(
result,
(Path::new(r"C:\output").to_path_buf(), "test_suffix.jpg".into())
);
// Test case 6: input file with OutputFormat::Png
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Png,
)
.unwrap();
assert_eq!(
result,
(Path::new(r"C:\output").to_path_buf(), "test_suffix.png".into())
);
// Test case 7: input file with OutputFormat::Webp
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Webp,
)
.unwrap();
assert_eq!(
result,
(Path::new(r"C:\output").to_path_buf(), "test_suffix.webp".into())
);
// Test case 8: input file with OutputFormat::Tiff
let result = compute_output_full_path(
&output_directory,
&input_file_path,
&base_directory,
false,
"_suffix",
OutputFormat::Tiff,
)
.unwrap();
assert_eq!(
result,
(Path::new(r"C:\output").to_path_buf(), "test_suffix.tiff".into())
);
}
}

View File

@ -1,25 +1,16 @@
use crate::options::{CommandLineArgs, OutputFormat, OverwritePolicy};
use crate::scan_files::{get_file_mime_type, scan_files};
use caesium::parameters::CSParameters;
use caesium::{compress_in_memory, compress_to_size_in_memory, convert_in_memory, SupportedFileTypes};
use crate::compressor::{perform_compression, CompressionResult};
use crate::options::VerboseLevel::{All, Progress, Quiet};
use crate::options::{CommandLineArgs, VerboseLevel};
use crate::scan_files::scan_files;
use clap::Parser;
use filetime::{set_file_times, FileTime};
use human_bytes::human_bytes;
use indicatif::{ParallelProgressIterator, ProgressBar, ProgressDrawTarget, ProgressStyle};
use rayon::iter::IntoParallelRefIterator;
use rayon::iter::ParallelIterator;
use std::error::Error;
use std::fs::File;
use std::io::{BufReader, Read, Write};
use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle};
use std::num::NonZero;
use std::path::{absolute, Path, PathBuf};
use std::time::Duration;
use std::{fs, io};
use std::ffi::OsString;
mod compressor;
mod options;
mod scan_files;
#[derive(Debug)]
enum CompressionStatus {
Success,
@ -27,205 +18,32 @@ enum CompressionStatus {
Error,
}
struct CompressionResult {
original_path: String,
output_path: String,
original_size: u64,
compressed_size: u64,
status: CompressionStatus,
message: String,
}
fn main() {
let args = CommandLineArgs::parse();
let quiet = args.quiet || args.verbose == 0;
let threads_number = get_parallelism_count(
args.threads,
std::thread::available_parallelism()
.unwrap_or(NonZero::new(1).unwrap())
.get(),
);
let verbose = if quiet { 0 } else { args.verbose };
let needs_resize = args.resize.width.is_some()
|| args.resize.height.is_some()
|| args.resize.long_edge.is_some()
|| args.resize.short_edge.is_some();
let (base_path, input_files) = scan_files(&args.files, args.recursive, quiet);
rayon::ThreadPoolBuilder::new()
.num_threads(threads_number)
.build_global()
.unwrap_or_default();
let quiet = args.quiet || args.verbose == Quiet;
let verbose = if quiet { Quiet } else { args.verbose };
let (base_path, input_files) = scan_files(&args.files, args.recursive, quiet);
let total_files = input_files.len();
let progress_bar = setup_progress_bar(total_files, verbose);
let compression_results: Vec<CompressionResult> = input_files
.par_iter()
.progress_with(progress_bar)
.map(|input_file| {
let mut compression_result = CompressionResult {
original_path: input_file.display().to_string(),
output_path: String::new(),
original_size: 0,
compressed_size: 0,
status: CompressionStatus::Error,
message: String::new(),
};
let input_file_metadata = match input_file.metadata() {
Ok(m) => m,
Err(_) => {
compression_result.message = "Error reading file metadata".to_string();
return compression_result;
}
};
let original_file_size = input_file_metadata.len();
compression_result.original_size = original_file_size;
let output_directory = if args.output_destination.same_folder_as_input {
match input_file.parent() {
Some(p) => p,
None => {
compression_result.message = "Error getting parent directory".to_string();
return compression_result;
}
}
} else {
match args.output_destination.output.as_ref() {
Some(p) => p,
None => {
compression_result.message = "Error getting output directory".to_string();
return compression_result;
}
}
};
let (output_directory, filename) = match compute_output_full_path(
output_directory.to_path_buf(),
input_file.to_path_buf(),
base_path.to_path_buf(),
args.keep_structure,
args.suffix.as_ref().unwrap_or(&String::new()).as_ref(),
) {
Some(p) => p,
None => {
compression_result.message = "Error computing output path".to_string();
return compression_result;
}
};
if !output_directory.exists() {
match fs::create_dir_all(&output_directory) {
Ok(_) => {}
Err(_) => {
compression_result.message = "Error creating output directory".to_string();
return compression_result;
}
}
}
let output_full_path = output_directory.join(filename);
if args.dry_run {
compression_result.status = CompressionStatus::Success;
return compression_result;
};
let mut compression_parameters = build_compression_parameters(&args, input_file, needs_resize);
let input_file_buffer = match read_file_to_vec(input_file) {
Ok(b) => b,
Err(_) => {
compression_result.message = "Error reading input file".to_string();
return compression_result;
}
};
let compression = if args.compression.max_size.is_some() {
compress_to_size_in_memory(
input_file_buffer,
&mut compression_parameters,
args.compression.max_size.unwrap() as usize,
true,
)
} else if args.format != OutputFormat::Original {
convert_in_memory(
input_file_buffer,
&compression_parameters,
map_supported_formats(args.format),
)
} else {
compress_in_memory(input_file_buffer, &compression_parameters)
};
let compressed_image = match compression {
Ok(v) => v,
Err(e) => {
compression_result.message = format!("Error compressing file: {}", e);
return compression_result;
}
};
compression_result.output_path = output_full_path.display().to_string();
let output_file_size = compressed_image.len() as u64;
if output_full_path.exists() {
match args.overwrite {
OverwritePolicy::Never => {
compression_result.status = CompressionStatus::Skipped;
compression_result.compressed_size = original_file_size;
compression_result.message = "File already exists, skipped due overwrite policy".to_string();
return compression_result;
}
OverwritePolicy::Bigger => {
if output_file_size >= original_file_size {
compression_result.status = CompressionStatus::Skipped;
compression_result.compressed_size = original_file_size;
compression_result.message =
"File already exists, skipped due overwrite policy".to_string();
return compression_result;
}
}
_ => {}
}
}
let mut output_file = match File::create(&output_full_path) {
Ok(f) => f,
Err(_) => {
compression_result.message = "Error creating output file".to_string();
return compression_result;
}
};
match output_file.write_all(&compressed_image) {
Ok(_) => {}
Err(_) => {
compression_result.message = "Error writing output file".to_string();
return compression_result;
}
};
if args.keep_dates {
let (last_modification_time, last_access_time) = (
FileTime::from_last_modification_time(&input_file_metadata),
FileTime::from_last_access_time(&input_file_metadata),
);
match preserve_dates(&output_full_path, last_modification_time, last_access_time) {
Ok(_) => {}
Err(_) => {
compression_result.message = "Error preserving file dates".to_string();
return compression_result;
}
}
}
compression_result.status = CompressionStatus::Success;
compression_result.compressed_size = output_file_size;
compression_result
})
.collect();
let compression_results = perform_compression(&input_files, &args, &base_path, progress_bar);
write_recap_message(&compression_results, verbose);
}
fn write_recap_message(compression_results: &[CompressionResult], verbose: u8) {
fn write_recap_message(compression_results: &[CompressionResult], verbose: VerboseLevel) {
let mut total_original_size = 0;
let mut total_compressed_size = 0;
let total_files = compression_results.len();
@ -242,8 +60,8 @@ fn write_recap_message(compression_results: &[CompressionResult], verbose: u8) {
_ => total_success += 1,
}
if verbose > 1 {
if verbose < 3 && matches!(result.status, CompressionStatus::Success) {
if verbose > Progress {
if verbose < All && matches!(result.status, CompressionStatus::Success) {
continue;
}
println!(
@ -266,8 +84,9 @@ fn write_recap_message(compression_results: &[CompressionResult], verbose: u8) {
let total_saved = total_original_size as f64 - total_compressed_size as f64;
let total_saved_percent = total_saved / total_original_size as f64 * 100.0;
if verbose > 0 {
println!("Total files: {}\nSuccess: {}\nSkipped: {}\nErrors: {}\nOriginal size: {}\nCompressed size: {}\nSaved: {} ({:.2}%)",
if verbose > Quiet {
println!(
"Compressed {} files ({} success, {} skipped, {} errors)\n{} -> {} [{} | ({:.2}%])",
total_files,
total_success,
total_skipped,
@ -279,16 +98,6 @@ fn write_recap_message(compression_results: &[CompressionResult], verbose: u8) {
);
}
}
fn map_supported_formats(format: OutputFormat) -> SupportedFileTypes {
match format {
OutputFormat::Jpeg => SupportedFileTypes::Jpeg,
OutputFormat::Png => SupportedFileTypes::Png,
OutputFormat::Webp => SupportedFileTypes::WebP,
OutputFormat::Tiff => SupportedFileTypes::Tiff,
_ => SupportedFileTypes::Unkn,
}
}
fn get_parallelism_count(requested_threads: u32, available_threads: usize) -> usize {
if requested_threads > 0 {
std::cmp::min(available_threads, requested_threads as usize)
@ -297,133 +106,9 @@ fn get_parallelism_count(requested_threads: u32, available_threads: usize) -> us
}
}
fn build_compression_parameters(args: &CommandLineArgs, input_file: &Path, needs_resize: bool) -> CSParameters {
let mut parameters = CSParameters::new();
let quality = args.compression.quality.unwrap_or(80) as u32;
parameters.jpeg.quality = quality;
parameters.png.quality = quality;
parameters.webp.quality = quality;
parameters.gif.quality = quality;
parameters.keep_metadata = args.exif;
parameters.png.optimization_level = args.png_opt_level;
parameters.png.force_zopfli = args.zopfli;
if needs_resize {
let mime_type = get_file_mime_type(input_file);
build_resize_parameters(args, &mut parameters, input_file, mime_type).unwrap();
//TODO
}
parameters
}
fn build_resize_parameters(
args: &CommandLineArgs,
parameters: &mut CSParameters,
input_file_path: &Path,
mime_type: Option<String>,
) -> Result<(), Box<dyn Error>> {
let (width, height) = get_real_resolution(input_file_path, mime_type, args.exif)?;
if args.resize.width.is_some() {
parameters.width = args.resize.width.unwrap_or(0);
} else if args.resize.height.is_some() {
parameters.height = args.resize.height.unwrap_or(0);
} else if args.resize.long_edge.is_some() {
let long_edge = args.resize.long_edge.unwrap_or(0);
if width > height {
parameters.width = long_edge;
} else {
parameters.height = long_edge;
}
} else if args.resize.short_edge.is_some() {
let short_edge = args.resize.short_edge.unwrap_or(0);
if width < height {
parameters.width = short_edge;
} else {
parameters.height = short_edge;
}
}
Ok(())
}
fn compute_output_full_path(
output_directory: PathBuf,
input_file_path: PathBuf,
base_directory: PathBuf,
keep_structure: bool,
suffix: &str,
) -> Option<(PathBuf, OsString)> {
let extension = input_file_path.extension().unwrap_or_default().to_os_string();
let base_name = input_file_path.file_stem().unwrap_or_default().to_os_string();
let mut output_file_name = base_name;
output_file_name.push(suffix);
if !extension.is_empty() {
output_file_name.push(".");
output_file_name.push(extension);
}
if keep_structure {
let parent = match absolute(input_file_path.parent()?) {
Ok(p) => p,
Err(_) => return None,
};
let output_path_prefix = match parent.strip_prefix(base_directory) {
Ok(p) => p,
Err(_) => return None,
};
let full_output_directory = output_directory.join(output_path_prefix);
Some((full_output_directory, output_file_name))
} else {
Some((output_directory, output_file_name))
}
}
fn read_file_to_vec(file_path: &PathBuf) -> io::Result<Vec<u8>> {
let mut file = File::open(file_path)?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)?;
Ok(buffer)
}
fn preserve_dates(output_file: &PathBuf, input_atime: FileTime, input_mtime: FileTime) -> io::Result<()> {
set_file_times(output_file, input_atime, input_mtime)
}
fn get_real_resolution(
file: &Path,
mime_type: Option<String>,
keep_metadata: bool,
) -> Result<(usize, usize), Box<dyn Error>> {
let resolution = imagesize::size(file)?;
let mut orientation = 1;
let mime = mime_type.unwrap_or("".to_string());
if mime == "image/jpeg" && keep_metadata {
let f = File::open(file)?;
if let Ok(e) = exif::Reader::new().read_from_container(&mut BufReader::new(&f)) {
let exif_field = match e.get_field(exif::Tag::Orientation, exif::In::PRIMARY) {
Some(f) => f,
None => return Ok((resolution.width, resolution.height)),
};
orientation = exif_field.value.get_uint(0).unwrap_or(1);
};
}
let (width, height) = match orientation {
5..=8 => (resolution.height, resolution.width),
_ => (resolution.width, resolution.height),
};
Ok((width, height))
}
fn setup_progress_bar(len: usize, verbose: u8) -> ProgressBar {
fn setup_progress_bar(len: usize, verbose: VerboseLevel) -> ProgressBar {
let progress_bar = ProgressBar::new(len as u64);
if verbose == 0 {
if verbose == Quiet {
progress_bar.set_draw_target(ProgressDrawTarget::hidden());
} else {
progress_bar.set_draw_target(ProgressDrawTarget::stdout());
@ -441,7 +126,6 @@ fn setup_progress_bar(len: usize, verbose: u8) -> ProgressBar {
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
#[test]
fn test_get_parallelism_count() {
@ -463,112 +147,4 @@ mod tests {
let result = get_parallelism_count(0, 0);
assert_eq!(result, 0);
}
#[cfg(not(target_os = "windows"))]
#[test]
fn test_compute_output_full_path() {
let output_directory = PathBuf::from("/output");
let base_directory = PathBuf::from("/base");
// Test case 1: keep_structure = true
let input_file_path = PathBuf::from("/base/folder/test.jpg");
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
true,
"_suffix",
)
.unwrap();
assert_eq!(result, (Path::new("/output/folder").to_path_buf(), "test_suffix.jpg".into()));
// Test case 2: keep_structure = false
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
false,
"_suffix",
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix.jpg".into()));
// Test case 3: input file without extension
let input_file_path = PathBuf::from("/base/folder/test");
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
false,
"_suffix",
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix".into()));
// Test case 4: input file with different base directory
let input_file_path = PathBuf::from("/different_base/folder/test.jpg");
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
false,
"_suffix",
)
.unwrap();
assert_eq!(result, (Path::new("/output").to_path_buf(), "test_suffix.jpg".into()));
}
#[cfg(target_os = "windows")]
#[test]
fn test_compute_output_full_path() {
let output_directory = PathBuf::from(r"C:\output");
let base_directory = PathBuf::from(r"C:\base");
// Test case 1: keep_structure = true
let input_file_path = PathBuf::from(r"C:\base\folder\test.jpg");
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
true,
"_suffix",
)
.unwrap();
assert_eq!(result, (Path::new(r"C:\output\folder").to_path_buf(), "test_suffix.jpg".into()));
// Test case 2: keep_structure = false
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
false,
"_suffix",
)
.unwrap();
assert_eq!(result, (Path::new(r"C:\output").to_path_buf(), "test_suffix.jpg".into()));
// Test case 3: input file without extension
let input_file_path = PathBuf::from(r"C:\base\folder\test");
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
false,
"_suffix",
)
.unwrap();
assert_eq!(result, (Path::new(r"C:\output").to_path_buf(), "test_suffix".into()));
// Test case 4: input file with different base directory
let input_file_path = PathBuf::from(r"C:\different_base\folder\test.jpg");
let result = compute_output_full_path(
output_directory.clone(),
input_file_path.clone(),
base_directory.clone(),
false,
"_suffix",
)
.unwrap();
assert_eq!(result, (Path::new(r"C:\output").to_path_buf(), "test_suffix.jpg".into()));
}
}

View File

@ -20,6 +20,18 @@ pub enum OutputFormat {
Original,
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum, Debug)]
pub enum VerboseLevel {
/// Suppress all output
Quiet = 0,
/// Show only progress and final results
Progress = 1,
/// Show also skipped and error messages
WarningsAndErrors = 2,
/// Print all
All = 3
}
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
pub struct CommandLineArgs {
@ -69,7 +81,7 @@ pub struct CommandLineArgs {
pub dry_run: bool,
/// specify the number of parallel jobs (max is the number of processors available)
#[arg(long, default_value = "0")]
#[arg(long, default_value = "1")]
pub threads: u32,
/// overwrite policy
@ -80,9 +92,9 @@ pub struct CommandLineArgs {
#[arg(short = 'Q', long, group = "verbosity")]
pub quiet: bool,
/// select how much output you want to see, 0 is equal to -Q, --quiet
#[arg(long, default_value = "1", group = "verbosity")]
pub verbose: u8,
/// select how much output you want to see
#[arg(long, value_enum, default_value = "progress", group = "verbosity")]
pub verbose: VerboseLevel,
pub files: Vec<String>,
}

View File

@ -44,18 +44,18 @@ pub fn scan_files(args: &[String], recursive: bool, quiet: bool) -> (PathBuf, Ve
for entry in walk_dir.into_iter().filter_map(|e| e.ok()) {
let path = entry.into_path();
if is_valid(&path) {
base_path = canonicalize_and_push(&path, base_path, &mut files);
base_path = make_absolute_and_push(&path, base_path, &mut files);
}
}
} else if is_valid(&input) {
base_path = canonicalize_and_push(&input, base_path, &mut files);
base_path = make_absolute_and_push(&input, base_path, &mut files);
}
}
(base_path, files)
}
fn canonicalize_and_push(path: &Path, mut base_path: PathBuf, files: &mut Vec<PathBuf>) -> PathBuf {
fn make_absolute_and_push(path: &Path, mut base_path: PathBuf, files: &mut Vec<PathBuf>) -> PathBuf {
if let Ok(ap) = absolute(path) {
base_path = compute_base_folder(&base_path, &ap);
files.push(ap);
@ -185,22 +185,6 @@ mod tests {
temp_file.write_all(bytes.as_slice()).unwrap();
assert!(!is_valid(temp_file.path()));
}
//
// #[test]
// fn test_scanfiles() {
// let temp_dir = tempfile::tempdir().unwrap();
// let file_path = temp_dir.path().join("test.jpg");
// let mut file = File::create(&file_path).unwrap();
// file.write_all(b"test").unwrap();
//
// let args = vec![file_path.to_str().unwrap().to_string()];
// let (base_path, files) = scanfiles(args, false);
//
// assert_eq!(files.len(), 1);
// assert_eq!(files[0], file_path);
// assert_eq!(base_path, temp_dir.path().canonicalize().unwrap());
// }
//
#[test]
fn test_compute_base_folder_with_files() {