mirror of
https://github.com/crate-ci/typos.git
synced 2024-11-21 08:30:57 -05:00
refactor: Rename module
This commit is contained in:
parent
b6aabc9392
commit
f8d42116da
4 changed files with 10 additions and 8 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -1,3 +1,5 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.6.9"
|
||||
|
|
|
@ -6,30 +6,30 @@ mod data;
|
|||
|
||||
#[bench]
|
||||
fn tokenize_empty(b: &mut test::Bencher) {
|
||||
b.iter(|| defenestrate::identifier::tokenize(data::EMPTY.as_bytes()).collect::<Vec<_>>());
|
||||
b.iter(|| defenestrate::tokens::tokenize(data::EMPTY.as_bytes()).collect::<Vec<_>>());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn tokenize_no_tokens(b: &mut test::Bencher) {
|
||||
b.iter(|| defenestrate::identifier::tokenize(data::NO_TOKENS.as_bytes()).collect::<Vec<_>>());
|
||||
b.iter(|| defenestrate::tokens::tokenize(data::NO_TOKENS.as_bytes()).collect::<Vec<_>>());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn tokenize_single_token(b: &mut test::Bencher) {
|
||||
b.iter(|| defenestrate::identifier::tokenize(data::SINGLE_TOKEN.as_bytes()).collect::<Vec<_>>());
|
||||
b.iter(|| defenestrate::tokens::tokenize(data::SINGLE_TOKEN.as_bytes()).collect::<Vec<_>>());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn tokenize_sherlock(b: &mut test::Bencher) {
|
||||
b.iter(|| defenestrate::identifier::tokenize(data::SHERLOCK.as_bytes()).collect::<Vec<_>>());
|
||||
b.iter(|| defenestrate::tokens::tokenize(data::SHERLOCK.as_bytes()).collect::<Vec<_>>());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn tokenize_code(b: &mut test::Bencher) {
|
||||
b.iter(|| defenestrate::identifier::tokenize(data::CODE.as_bytes()).collect::<Vec<_>>());
|
||||
b.iter(|| defenestrate::tokens::tokenize(data::CODE.as_bytes()).collect::<Vec<_>>());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn tokenize_corpus(b: &mut test::Bencher) {
|
||||
b.iter(|| defenestrate::identifier::tokenize(data::CORPUS.as_bytes()).collect::<Vec<_>>());
|
||||
b.iter(|| defenestrate::tokens::tokenize(data::CORPUS.as_bytes()).collect::<Vec<_>>());
|
||||
}
|
||||
|
|
|
@ -3,8 +3,8 @@ extern crate serde_derive;
|
|||
|
||||
mod dict;
|
||||
|
||||
pub mod identifier;
|
||||
pub mod report;
|
||||
pub mod tokens;
|
||||
|
||||
pub use crate::dict::*;
|
||||
|
||||
|
@ -16,7 +16,7 @@ pub fn process_file(path: &std::path::Path, dictionary: &Dictionary, report: rep
|
|||
File::open(path)?.read_to_end(&mut buffer)?;
|
||||
for (line_idx, line) in grep_searcher::LineIter::new(b'\n', &buffer).enumerate() {
|
||||
let line_num = line_idx + 1;
|
||||
for token in identifier::tokenize(line) {
|
||||
for token in tokens::tokenize(line) {
|
||||
// Correct tokens as-is
|
||||
if let Some(correction) = dictionary.correct_bytes(token.token) {
|
||||
let word = String::from_utf8_lossy(token.token);
|
||||
|
|
Loading…
Reference in a new issue