From f8d42116da4dffd9958e79a061c29c68852c876a Mon Sep 17 00:00:00 2001 From: Ed Page Date: Tue, 16 Apr 2019 20:16:31 -0600 Subject: [PATCH] refactor: Rename module --- Cargo.lock | 2 ++ benches/tokenize.rs | 12 ++++++------ src/lib.rs | 4 ++-- src/{identifier.rs => tokens.rs} | 0 4 files changed, 10 insertions(+), 8 deletions(-) rename src/{identifier.rs => tokens.rs} (100%) diff --git a/Cargo.lock b/Cargo.lock index 5199bfe..bb7c026 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. [[package]] name = "aho-corasick" version = "0.6.9" diff --git a/benches/tokenize.rs b/benches/tokenize.rs index 300ab46..8317856 100644 --- a/benches/tokenize.rs +++ b/benches/tokenize.rs @@ -6,30 +6,30 @@ mod data; #[bench] fn tokenize_empty(b: &mut test::Bencher) { - b.iter(|| defenestrate::identifier::tokenize(data::EMPTY.as_bytes()).collect::>()); + b.iter(|| defenestrate::tokens::tokenize(data::EMPTY.as_bytes()).collect::>()); } #[bench] fn tokenize_no_tokens(b: &mut test::Bencher) { - b.iter(|| defenestrate::identifier::tokenize(data::NO_TOKENS.as_bytes()).collect::>()); + b.iter(|| defenestrate::tokens::tokenize(data::NO_TOKENS.as_bytes()).collect::>()); } #[bench] fn tokenize_single_token(b: &mut test::Bencher) { - b.iter(|| defenestrate::identifier::tokenize(data::SINGLE_TOKEN.as_bytes()).collect::>()); + b.iter(|| defenestrate::tokens::tokenize(data::SINGLE_TOKEN.as_bytes()).collect::>()); } #[bench] fn tokenize_sherlock(b: &mut test::Bencher) { - b.iter(|| defenestrate::identifier::tokenize(data::SHERLOCK.as_bytes()).collect::>()); + b.iter(|| defenestrate::tokens::tokenize(data::SHERLOCK.as_bytes()).collect::>()); } #[bench] fn tokenize_code(b: &mut test::Bencher) { - b.iter(|| defenestrate::identifier::tokenize(data::CODE.as_bytes()).collect::>()); + b.iter(|| defenestrate::tokens::tokenize(data::CODE.as_bytes()).collect::>()); } #[bench] fn tokenize_corpus(b: &mut test::Bencher) { - b.iter(|| defenestrate::identifier::tokenize(data::CORPUS.as_bytes()).collect::>()); + b.iter(|| defenestrate::tokens::tokenize(data::CORPUS.as_bytes()).collect::>()); } diff --git a/src/lib.rs b/src/lib.rs index 2c43648..edad366 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -3,8 +3,8 @@ extern crate serde_derive; mod dict; -pub mod identifier; pub mod report; +pub mod tokens; pub use crate::dict::*; @@ -16,7 +16,7 @@ pub fn process_file(path: &std::path::Path, dictionary: &Dictionary, report: rep File::open(path)?.read_to_end(&mut buffer)?; for (line_idx, line) in grep_searcher::LineIter::new(b'\n', &buffer).enumerate() { let line_num = line_idx + 1; - for token in identifier::tokenize(line) { + for token in tokens::tokenize(line) { // Correct tokens as-is if let Some(correction) = dictionary.correct_bytes(token.token) { let word = String::from_utf8_lossy(token.token); diff --git a/src/identifier.rs b/src/tokens.rs similarity index 100% rename from src/identifier.rs rename to src/tokens.rs