From 9f198c973dbf8325ebce014ecdabcba474740cce Mon Sep 17 00:00:00 2001 From: Ed Page Date: Fri, 14 Jun 2019 06:43:21 -0600 Subject: [PATCH] chore: Run cargo fmt --- benches/corrections.rs | 1 - benches/data.rs | 1 - benches/file.rs | 48 ++++++++++++++++++++++++++++++++++++------ benches/tokenize.rs | 4 +++- src/lib.rs | 7 ++++-- src/main.rs | 18 ++++++++++------ src/report.rs | 25 +++++++++++++++++----- src/tokens.rs | 11 +++++----- 8 files changed, 86 insertions(+), 29 deletions(-) diff --git a/benches/corrections.rs b/benches/corrections.rs index e77cdef..86b6f98 100644 --- a/benches/corrections.rs +++ b/benches/corrections.rs @@ -20,4 +20,3 @@ fn no_correction(b: &mut test::Bencher) { assert_eq!(corrections.correct_str("success"), None); b.iter(|| corrections.correct_str("success")); } - diff --git a/benches/data.rs b/benches/data.rs index 36dfb89..353e9f0 100644 --- a/benches/data.rs +++ b/benches/data.rs @@ -29,4 +29,3 @@ fn main() { "; pub const CORPUS: &str = include_str!("../assets/words.csv"); - diff --git a/benches/file.rs b/benches/file.rs index a0fd8a2..a4808e5 100644 --- a/benches/file.rs +++ b/benches/file.rs @@ -13,7 +13,13 @@ fn process_empty(b: &mut test::Bencher) { sample_path.write_str(data::EMPTY).unwrap(); let corrections = defenestrate::Dictionary::new(); - b.iter(|| defenestrate::process_file(sample_path.path(), &corrections, defenestrate::report::print_silent)); + b.iter(|| { + defenestrate::process_file( + sample_path.path(), + &corrections, + defenestrate::report::print_silent, + ) + }); temp.close().unwrap(); } @@ -25,7 +31,13 @@ fn process_no_tokens(b: &mut test::Bencher) { sample_path.write_str(data::NO_TOKENS).unwrap(); let corrections = defenestrate::Dictionary::new(); - b.iter(|| defenestrate::process_file(sample_path.path(), &corrections, defenestrate::report::print_silent)); + b.iter(|| { + defenestrate::process_file( + sample_path.path(), + &corrections, + defenestrate::report::print_silent, + ) + }); temp.close().unwrap(); } @@ -37,7 +49,13 @@ fn process_single_token(b: &mut test::Bencher) { sample_path.write_str(data::SINGLE_TOKEN).unwrap(); let corrections = defenestrate::Dictionary::new(); - b.iter(|| defenestrate::process_file(sample_path.path(), &corrections, defenestrate::report::print_silent)); + b.iter(|| { + defenestrate::process_file( + sample_path.path(), + &corrections, + defenestrate::report::print_silent, + ) + }); temp.close().unwrap(); } @@ -49,7 +67,13 @@ fn process_sherlock(b: &mut test::Bencher) { sample_path.write_str(data::SHERLOCK).unwrap(); let corrections = defenestrate::Dictionary::new(); - b.iter(|| defenestrate::process_file(sample_path.path(), &corrections, defenestrate::report::print_silent)); + b.iter(|| { + defenestrate::process_file( + sample_path.path(), + &corrections, + defenestrate::report::print_silent, + ) + }); temp.close().unwrap(); } @@ -61,7 +85,13 @@ fn process_code(b: &mut test::Bencher) { sample_path.write_str(data::CODE).unwrap(); let corrections = defenestrate::Dictionary::new(); - b.iter(|| defenestrate::process_file(sample_path.path(), &corrections, defenestrate::report::print_silent)); + b.iter(|| { + defenestrate::process_file( + sample_path.path(), + &corrections, + defenestrate::report::print_silent, + ) + }); temp.close().unwrap(); } @@ -73,7 +103,13 @@ fn process_corpus(b: &mut test::Bencher) { sample_path.write_str(data::CORPUS).unwrap(); let corrections = defenestrate::Dictionary::new(); - b.iter(|| defenestrate::process_file(sample_path.path(), &corrections, defenestrate::report::print_silent)); + b.iter(|| { + defenestrate::process_file( + sample_path.path(), + &corrections, + defenestrate::report::print_silent, + ) + }); temp.close().unwrap(); } diff --git a/benches/tokenize.rs b/benches/tokenize.rs index df432df..37cc713 100644 --- a/benches/tokenize.rs +++ b/benches/tokenize.rs @@ -16,7 +16,9 @@ fn tokenize_no_tokens(b: &mut test::Bencher) { #[bench] fn tokenize_single_token(b: &mut test::Bencher) { - b.iter(|| defenestrate::tokens::Symbol::parse(data::SINGLE_TOKEN.as_bytes()).collect::>()); + b.iter(|| { + defenestrate::tokens::Symbol::parse(data::SINGLE_TOKEN.as_bytes()).collect::>() + }); } #[bench] diff --git a/src/lib.rs b/src/lib.rs index 45302d1..86156a9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -11,7 +11,11 @@ pub use crate::dict::*; use std::fs::File; use std::io::Read; -pub fn process_file(path: &std::path::Path, dictionary: &Dictionary, report: report::Report) -> Result<(), failure::Error> { +pub fn process_file( + path: &std::path::Path, + dictionary: &Dictionary, + report: report::Report, +) -> Result<(), failure::Error> { let mut buffer = Vec::new(); File::open(path)?.read_to_end(&mut buffer)?; for (line_idx, line) in grep_searcher::LineIter::new(b'\n', &buffer).enumerate() { @@ -38,4 +42,3 @@ pub fn process_file(path: &std::path::Path, dictionary: &Dictionary, report: rep Ok(()) } - diff --git a/src/main.rs b/src/main.rs index 1a69753..6a652e9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -4,7 +4,7 @@ extern crate clap; use structopt::StructOpt; -arg_enum!{ +arg_enum! { #[derive(Debug, Copy, Clone, PartialEq, Eq)] enum Format { Silent, @@ -37,13 +37,14 @@ struct Options { /// Paths to check path: Vec, - - #[structopt(long = "format", - raw(possible_values = "&Format::variants()", case_insensitive = "true"), - default_value = "long")] + #[structopt( + long = "format", + raw(possible_values = "&Format::variants()", case_insensitive = "true"), + default_value = "long" + )] pub format: Format, - #[structopt(short="j", long="threads", default_value="0")] + #[structopt(short = "j", long = "threads", default_value = "0")] /// The approximate number of threads to use. threads: usize, } @@ -65,7 +66,10 @@ fn run() -> Result<(), failure::Error> { let dictionary = defenestrate::Dictionary::new(); - let first_path = &options.path.get(0).expect("arg parsing enforces at least one"); + let first_path = &options + .path + .get(0) + .expect("arg parsing enforces at least one"); let mut walk = ignore::WalkBuilder::new(first_path); for path in &options.path[1..] { walk.add(path); diff --git a/src/report.rs b/src/report.rs index 3281be6..98290ba 100644 --- a/src/report.rs +++ b/src/report.rs @@ -13,11 +13,17 @@ pub struct Message<'m> { pub type Report = fn(msg: Message); -pub fn print_silent(_: Message) { -} +pub fn print_silent(_: Message) {} pub fn print_brief(msg: Message) { - println!("{}:{}:{}: {} -> {}", msg.path.display(), msg.line_num, msg.col_num, msg.word, msg.correction); + println!( + "{}:{}:{}: {} -> {}", + msg.path.display(), + msg.line_num, + msg.col_num, + msg.word, + msg.correction + ); } pub fn print_long(msg: Message) { @@ -28,9 +34,18 @@ pub fn print_long(msg: Message) { let hl: String = itertools::repeat_n("^", msg.word.len()).collect(); println!("error: `{}` should be `{}`", msg.word, msg.correction); - println!(" --> {}:{}:{}", msg.path.display(), msg.line_num, msg.col_num); + println!( + " --> {}:{}:{}", + msg.path.display(), + msg.line_num, + msg.col_num + ); println!("{} |", line_indent); - println!("{} | {}", msg.line_num, String::from_utf8_lossy(msg.line).trim_end()); + println!( + "{} | {}", + msg.line_num, + String::from_utf8_lossy(msg.line).trim_end() + ); println!("{} | {}{}", line_indent, hl_indent, hl); println!("{} |", line_indent); } diff --git a/src/tokens.rs b/src/tokens.rs index 412ef39..f3ca3a1 100644 --- a/src/tokens.rs +++ b/src/tokens.rs @@ -6,17 +6,16 @@ pub struct Symbol<'t> { impl<'t> Symbol<'t> { pub fn new(token: &'t [u8], offset: usize) -> Self { - Self { - token, - offset, - } + Self { token, offset } } - pub fn parse<'s>(content: &'s [u8]) -> impl Iterator> { + pub fn parse<'s>(content: &'s [u8]) -> impl Iterator> { lazy_static::lazy_static! { static ref SPLIT: regex::bytes::Regex = regex::bytes::Regex::new(r#"\b(\p{Alphabetic}|\d|_)+\b"#).unwrap(); } - SPLIT.find_iter(content).map(|m| Symbol::new(m.as_bytes(), m.start())) + SPLIT + .find_iter(content) + .map(|m| Symbol::new(m.as_bytes(), m.start())) } }