refactor(typos): Pull out file logic

This commit is contained in:
Ed Page 2020-12-30 19:41:08 -06:00
parent e741f96de3
commit bc90bacff2
15 changed files with 596 additions and 598 deletions

144
Cargo.lock generated
View file

@ -2,9 +2,9 @@
# It is not intended for manual editing.
[[package]]
name = "addr2line"
version = "0.14.0"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c0929d69e78dd9bf5408269919fcbcaeb2e35e5d43e5815517cdc6a8e11a423"
checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7"
dependencies = [
"gimli",
]
@ -17,9 +17,9 @@ checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
[[package]]
name = "ahash"
version = "0.6.1"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "865f8b0b3fced577b7df82e9b0eb7609595d7209c0b39e78d0646672e244b1b1"
checksum = "a75b7e6a93ecd6dbd2c225154d0fa7f86205574ecaa6c87429fb5f66ee677c44"
dependencies = [
"getrandom 0.2.0",
"lazy_static",
@ -46,9 +46,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.34"
version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf8dcb5b4bbaa28653b647d8c77bd4ed40183b48882e130c1f1ffb73de069fd7"
checksum = "ee67c11feeac938fae061b232e38e0b6d94f97a9df10e6271319325ac4c56a86"
[[package]]
name = "arrayvec"
@ -207,12 +207,6 @@ dependencies = [
"unicase",
]
[[package]]
name = "const_fn"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c478836e029dcef17fb47c89023448c64f781a046e0300e257ad8225ae59afab"
[[package]]
name = "content_inspector"
version = "0.2.4"
@ -224,13 +218,12 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
version = "0.8.0"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec91540d98355f690a86367e566ecad2e9e579f230230eb7c21398372be73ea5"
checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
dependencies = [
"autocfg",
"cfg-if 1.0.0",
"const_fn",
"lazy_static",
]
@ -275,9 +268,9 @@ dependencies = [
"fnv",
"ident_case",
"proc-macro2 1.0.24",
"quote 1.0.7",
"quote 1.0.8",
"strsim 0.9.3",
"syn 1.0.50",
"syn 1.0.57",
]
[[package]]
@ -287,8 +280,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72"
dependencies = [
"darling_core",
"quote 1.0.7",
"syn 1.0.50",
"quote 1.0.8",
"syn 1.0.57",
]
[[package]]
@ -312,8 +305,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.7",
"syn 1.0.50",
"quote 1.0.8",
"syn 1.0.57",
]
[[package]]
@ -324,8 +317,8 @@ checksum = "6604612c19dd3bb353650b715b61f09bcb089dd17bdca1a9a42637079bf5e428"
dependencies = [
"darling",
"proc-macro2 1.0.24",
"quote 1.0.7",
"syn 1.0.50",
"quote 1.0.8",
"syn 1.0.57",
]
[[package]]
@ -374,8 +367,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "946ee94e3dbf58fdd324f9ce245c7b238d46a66f00e86a020b71996349e46cce"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.7",
"syn 1.0.50",
"quote 1.0.8",
"syn 1.0.57",
]
[[package]]
@ -421,17 +414,17 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "funty"
version = "1.0.1"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ba62103ce691c2fd80fbae2213dfdda9ce60804973ac6b6e97de818ea7f52c8"
checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7"
[[package]]
name = "getrandom"
version = "0.1.15"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"libc",
"wasi",
]
@ -478,9 +471,9 @@ dependencies = [
[[package]]
name = "heck"
version = "0.3.1"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
checksum = "87cbf45460356b7deeb5e3415b5563308c0a9b057c85e12b06ad551f98d0a6ac"
dependencies = [
"unicode-segmentation",
]
@ -559,9 +552,9 @@ dependencies = [
[[package]]
name = "itoa"
version = "0.4.6"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6"
checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736"
[[package]]
name = "lazy_static"
@ -584,9 +577,9 @@ dependencies = [
[[package]]
name = "libc"
version = "0.2.80"
version = "0.2.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614"
checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb"
[[package]]
name = "log"
@ -730,9 +723,9 @@ checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
[[package]]
name = "predicates"
version = "1.0.5"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96bfead12e90dccead362d62bb2c90a5f6fc4584963645bc7f71a735e0b0735a"
checksum = "73dd9b7b200044694dfede9edf907c1ca19630908443e9447e624993700c6932"
dependencies = [
"difference",
"float-cmp",
@ -743,15 +736,15 @@ dependencies = [
[[package]]
name = "predicates-core"
version = "1.0.0"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06075c3a3e92559ff8929e7a280684489ea27fe44805174c3ebd9328dcb37178"
checksum = "fb3dbeaaf793584e29c58c7e3a82bbb3c7c06b63cea68d13b0e3cddc124104dc"
[[package]]
name = "predicates-tree"
version = "1.0.0"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e63c4859013b38a76eca2414c64911fba30def9e3202ac461a2d22831220124"
checksum = "aee95d988ee893cb35c06b148c80ed2cd52c8eea927f50ba7a0be1a786aeab73"
dependencies = [
"predicates-core",
"treeline",
@ -771,8 +764,8 @@ checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2 1.0.24",
"quote 1.0.7",
"syn 1.0.50",
"quote 1.0.8",
"syn 1.0.57",
"version_check",
]
@ -783,7 +776,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.7",
"quote 1.0.8",
"version_check",
]
@ -822,9 +815,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.7"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
dependencies = [
"proc-macro2 1.0.24",
]
@ -841,7 +834,7 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
"getrandom 0.1.15",
"getrandom 0.1.16",
"libc",
"rand_chacha",
"rand_core",
@ -865,7 +858,7 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
"getrandom 0.1.15",
"getrandom 0.1.16",
]
[[package]]
@ -975,29 +968,29 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]]
name = "serde"
version = "1.0.117"
version = "1.0.118"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b88fa983de7720629c9387e9f517353ed404164b1e482c970a90c1a4aaf7dc1a"
checksum = "06c64263859d87aa2eb554587e2d23183398d617427327cf2b3d0ed8c69e4800"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.117"
version = "1.0.118"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbd1ae72adb44aab48f325a02444a5fc079349a8d804c1fc922aed3f7454c74e"
checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.7",
"syn 1.0.50",
"quote 1.0.8",
"syn 1.0.57",
]
[[package]]
name = "serde_json"
version = "1.0.59"
version = "1.0.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcac07dbffa1c65e7f816ab9eba78eb142c6d44410f4eeba1e26e4f5dfa56b95"
checksum = "4fceb2595057b6891a4ee808f70054bd2d12f0e97f1cbb78689b59f676df325a"
dependencies = [
"itoa",
"ryu",
@ -1048,8 +1041,8 @@ dependencies = [
"heck",
"proc-macro-error",
"proc-macro2 1.0.24",
"quote 1.0.7",
"syn 1.0.50",
"quote 1.0.8",
"syn 1.0.57",
]
[[package]]
@ -1065,12 +1058,12 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.50"
version = "1.0.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "443b4178719c5a851e1bde36ce12da21d74a0e60b4d982ec3385a933c812f0f6"
checksum = "4211ce9909eb971f111059df92c45640aad50a619cf55cd76476be803c4c68e6"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.7",
"quote 1.0.8",
"unicode-xid 0.2.1",
]
@ -1114,22 +1107,22 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.22"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e9ae34b84616eedaaf1e9dd6026dbe00dcafa92aa0c8077cb69df1fcfe5e53e"
checksum = "76cc616c6abf8c8928e2fdcc0dbfab37175edd8fb49a4641066ad1364fdab146"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.22"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ba20f23e85b10754cd195504aebf6a27e2e6cbe28c17778a0c930724628dd56"
checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
dependencies = [
"proc-macro2 1.0.24",
"quote 1.0.7",
"syn 1.0.50",
"quote 1.0.8",
"syn 1.0.57",
]
[[package]]
@ -1143,9 +1136,9 @@ dependencies = [
[[package]]
name = "toml"
version = "0.5.7"
version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75cf45bb0bef80604d001caaec0d09da99611b3c0fd39d3080468875cdb65645"
checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa"
dependencies = [
"serde",
]
@ -1161,16 +1154,11 @@ name = "typos"
version = "0.3.0"
dependencies = [
"anyhow",
"bstr",
"content_inspector",
"derive_more 0.99.11",
"derive_setters",
"itertools",
"log",
"once_cell",
"regex",
"serde",
"serde_json",
"thiserror",
"unicode-segmentation",
]
@ -1185,21 +1173,27 @@ dependencies = [
"bstr",
"clap",
"clap-verbosity-flag",
"content_inspector",
"derive_more 0.99.11",
"derive_setters",
"difflib",
"env_logger 0.8.2",
"human-panic",
"ignore",
"itertools",
"log",
"phf",
"predicates",
"proc-exit",
"serde",
"serde_json",
"structopt",
"toml",
"typos",
"typos-dict",
"typos-vars",
"unicase",
"unicode-segmentation",
]
[[package]]

View file

@ -50,6 +50,12 @@ ahash = "0.6.1"
difflib = "0.4"
proc-exit = "1.0"
human-panic = "1.0.3"
content_inspector = "0.2.4"
unicode-segmentation = "1.6.0"
derive_more = "0.99.11"
derive_setters = "0.1"
itertools = "0.9"
serde_json = "1.0"
[dev-dependencies]
assert_fs = "1.0"

View file

@ -5,13 +5,13 @@ extern crate test;
mod data;
use assert_fs::prelude::*;
use typos::checks::Check;
use typos_cli::checks::Check;
fn bench_parse_ident_str(data: &str, b: &mut test::Bencher) {
let corrections = typos_cli::dict::BuiltIn::new(Default::default());
let parser = typos::tokens::Tokenizer::new();
let checks = typos::checks::TyposSettings::new().build_identifier_parser();
b.iter(|| checks.check_str(data, &parser, &corrections, &typos::report::PrintSilent));
let checks = typos_cli::checks::TyposSettings::new().build_identifier_parser();
b.iter(|| checks.check_str(data, &parser, &corrections, &typos_cli::report::PrintSilent));
}
#[bench]
@ -47,13 +47,13 @@ fn parse_idents_corpus_str(b: &mut test::Bencher) {
fn bench_parse_ident_bytes(data: &str, b: &mut test::Bencher) {
let corrections = typos_cli::dict::BuiltIn::new(Default::default());
let parser = typos::tokens::Tokenizer::new();
let checks = typos::checks::TyposSettings::new().build_identifier_parser();
let checks = typos_cli::checks::TyposSettings::new().build_identifier_parser();
b.iter(|| {
checks.check_bytes(
data.as_bytes(),
&parser,
&corrections,
&typos::report::PrintSilent,
&typos_cli::report::PrintSilent,
)
});
}
@ -91,8 +91,8 @@ fn parse_idents_corpus_bytes(b: &mut test::Bencher) {
fn bench_parse_word_str(data: &str, b: &mut test::Bencher) {
let corrections = typos_cli::dict::BuiltIn::new(Default::default());
let parser = typos::tokens::Tokenizer::new();
let checks = typos::checks::TyposSettings::new().build_word_parser();
b.iter(|| checks.check_str(data, &parser, &corrections, &typos::report::PrintSilent));
let checks = typos_cli::checks::TyposSettings::new().build_word_parser();
b.iter(|| checks.check_str(data, &parser, &corrections, &typos_cli::report::PrintSilent));
}
#[bench]
@ -128,8 +128,8 @@ fn parse_words_corpus(b: &mut test::Bencher) {
fn bench_typos(data: &str, b: &mut test::Bencher) {
let corrections = typos_cli::dict::BuiltIn::new(Default::default());
let parser = typos::tokens::Tokenizer::new();
let checks = typos::checks::TyposSettings::new().build_typos();
b.iter(|| checks.check_str(data, &parser, &corrections, &typos::report::PrintSilent));
let checks = typos_cli::checks::TyposSettings::new().build_typos();
b.iter(|| checks.check_str(data, &parser, &corrections, &typos_cli::report::PrintSilent));
}
#[bench]
@ -169,14 +169,14 @@ fn bench_check_file(data: &str, b: &mut test::Bencher) {
let corrections = typos_cli::dict::BuiltIn::new(Default::default());
let parser = typos::tokens::Tokenizer::new();
let checks = typos::checks::TyposSettings::new().build_typos();
let checks = typos_cli::checks::TyposSettings::new().build_typos();
b.iter(|| {
checks.check_file(
sample_path.path(),
true,
&parser,
&corrections,
&typos::report::PrintSilent,
&typos_cli::report::PrintSilent,
)
});

View file

@ -20,11 +20,6 @@ thiserror = "1.0"
regex = "1.3"
once_cell = "1.2.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
itertools = "0.9"
bstr = "0.2"
log = "0.4"
unicode-segmentation = "1.7.1"
derive_more = "0.99.11"
derive_setters = "0.1"
content_inspector = "0.2.4"

View file

@ -1,447 +0,0 @@
use bstr::ByteSlice;
use crate::report;
use crate::tokens;
use crate::Dictionary;
pub trait Check: Send + Sync {
fn check_str(
&self,
buffer: &str,
parser: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error>;
fn check_bytes(
&self,
buffer: &[u8],
parser: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error>;
fn check_filenames(&self) -> bool;
fn check_files(&self) -> bool;
fn binary(&self) -> bool;
fn check_filename(
&self,
path: &std::path::Path,
parser: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
if !self.check_filenames() {
return Ok(());
}
if let Some(file_name) = path.file_name().and_then(|s| s.to_str()) {
let context_reporter = ReportContext {
reporter,
context: report::PathContext { path }.into(),
};
self.check_str(file_name, parser, dictionary, &context_reporter)?;
}
Ok(())
}
fn check_file(
&self,
path: &std::path::Path,
explicit: bool,
parser: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
if !self.check_files() {
return Ok(());
}
let buffer = read_file(path, reporter)?;
let (buffer, content_type) = massage_data(buffer)?;
if !explicit && !self.binary() && content_type.is_binary() {
let msg = report::BinaryFile { path };
reporter.report(msg.into())?;
return Ok(());
}
for (line_idx, line) in buffer.lines().enumerate() {
let line_num = line_idx + 1;
let context_reporter = ReportContext {
reporter,
context: report::FileContext { path, line_num }.into(),
};
self.check_bytes(line, parser, dictionary, &context_reporter)?;
}
Ok(())
}
}
struct ReportContext<'m, 'r> {
reporter: &'r dyn report::Report,
context: report::Context<'m>,
}
impl<'m, 'r> report::Report for ReportContext<'m, 'r> {
fn report(&self, msg: report::Message) -> Result<(), std::io::Error> {
let msg = msg.context(Some(self.context.clone()));
self.reporter.report(msg)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TyposSettings {
check_filenames: bool,
check_files: bool,
binary: bool,
}
impl TyposSettings {
pub fn new() -> Self {
Default::default()
}
pub fn check_filenames(&mut self, yes: bool) -> &mut Self {
self.check_filenames = yes;
self
}
pub fn check_files(&mut self, yes: bool) -> &mut Self {
self.check_files = yes;
self
}
pub fn binary(&mut self, yes: bool) -> &mut Self {
self.binary = yes;
self
}
pub fn build_typos(&self) -> Typos {
Typos {
check_filenames: self.check_filenames,
check_files: self.check_files,
binary: self.binary,
}
}
pub fn build_identifier_parser(&self) -> ParseIdentifiers {
ParseIdentifiers {
check_filenames: self.check_filenames,
check_files: self.check_files,
binary: self.binary,
}
}
pub fn build_word_parser(&self) -> ParseWords {
ParseWords {
check_filenames: self.check_filenames,
check_files: self.check_files,
binary: self.binary,
}
}
pub fn build_files(&self) -> Files {
Files {}
}
}
impl Default for TyposSettings {
fn default() -> Self {
Self {
check_filenames: true,
check_files: true,
binary: false,
}
}
}
#[derive(Debug, Clone)]
pub struct Typos {
check_filenames: bool,
check_files: bool,
binary: bool,
}
impl Check for Typos {
fn check_str(
&self,
buffer: &str,
tokenizer: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = crate::ParserBuilder::new()
.tokenizer(tokenizer)
.dictionary(dictionary)
.typos();
for typo in parser.parse_str(buffer) {
let msg = report::Typo {
context: None,
buffer: std::borrow::Cow::Borrowed(buffer.as_bytes()),
byte_offset: typo.byte_offset,
typo: typo.typo,
corrections: typo.corrections,
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_bytes(
&self,
buffer: &[u8],
tokenizer: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = crate::ParserBuilder::new()
.tokenizer(tokenizer)
.dictionary(dictionary)
.typos();
for typo in parser.parse_bytes(buffer) {
let msg = report::Typo {
context: None,
buffer: std::borrow::Cow::Borrowed(buffer.as_bytes()),
byte_offset: typo.byte_offset,
typo: typo.typo,
corrections: typo.corrections,
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_filenames(&self) -> bool {
self.check_filenames
}
fn check_files(&self) -> bool {
self.check_files
}
fn binary(&self) -> bool {
self.binary
}
}
#[derive(Debug, Clone)]
pub struct ParseIdentifiers {
check_filenames: bool,
check_files: bool,
binary: bool,
}
impl Check for ParseIdentifiers {
fn check_str(
&self,
buffer: &str,
tokenizer: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = crate::ParserBuilder::new()
.tokenizer(tokenizer)
.identifiers();
for word in parser.parse_str(buffer) {
let msg = report::Parse {
context: None,
kind: report::ParseKind::Word,
data: word.token(),
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_bytes(
&self,
buffer: &[u8],
tokenizer: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = crate::ParserBuilder::new()
.tokenizer(tokenizer)
.identifiers();
for word in parser.parse_bytes(buffer) {
let msg = report::Parse {
context: None,
kind: report::ParseKind::Word,
data: word.token(),
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_filenames(&self) -> bool {
self.check_filenames
}
fn check_files(&self) -> bool {
self.check_files
}
fn binary(&self) -> bool {
self.binary
}
}
#[derive(Debug, Clone)]
pub struct ParseWords {
check_filenames: bool,
check_files: bool,
binary: bool,
}
impl Check for ParseWords {
fn check_str(
&self,
buffer: &str,
tokenizer: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let word_parser = crate::ParserBuilder::new().tokenizer(tokenizer).words();
for word in word_parser.parse_str(buffer) {
let msg = report::Parse {
context: None,
kind: report::ParseKind::Word,
data: word.token(),
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_bytes(
&self,
buffer: &[u8],
tokenizer: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = crate::ParserBuilder::new().tokenizer(tokenizer).words();
for word in parser.parse_bytes(buffer) {
let msg = report::Parse {
context: None,
kind: report::ParseKind::Word,
data: word.token(),
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_filenames(&self) -> bool {
self.check_filenames
}
fn check_files(&self) -> bool {
self.check_files
}
fn binary(&self) -> bool {
self.binary
}
}
#[derive(Debug, Clone)]
pub struct Files {}
impl Check for Files {
fn check_str(
&self,
_buffer: &str,
_parser: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
_reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
Ok(())
}
fn check_bytes(
&self,
_buffer: &[u8],
_parser: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
_reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
Ok(())
}
fn check_filenames(&self) -> bool {
true
}
fn check_files(&self) -> bool {
true
}
fn binary(&self) -> bool {
true
}
fn check_filename(
&self,
_path: &std::path::Path,
_parser: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
_reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
Ok(())
}
fn check_file(
&self,
path: &std::path::Path,
_explicit: bool,
_parser: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let msg = report::File::new(path);
reporter.report(msg.into())?;
Ok(())
}
}
fn read_file(
path: &std::path::Path,
reporter: &dyn report::Report,
) -> Result<Vec<u8>, std::io::Error> {
let buffer = match std::fs::read(path) {
Ok(buffer) => buffer,
Err(err) => {
let msg = report::Error::new(err.to_string());
reporter.report(msg.into())?;
Vec::new()
}
};
Ok(buffer)
}
fn massage_data(
buffer: Vec<u8>,
) -> Result<(Vec<u8>, content_inspector::ContentType), std::io::Error> {
let mut content_type = content_inspector::inspect(&buffer);
// HACK: We only support UTF-8 at the moment
if content_type != content_inspector::ContentType::UTF_8_BOM
&& content_type != content_inspector::ContentType::UTF_8
{
content_type = content_inspector::ContentType::BINARY;
}
Ok((buffer, content_type))
}

View file

@ -1,6 +1,6 @@
use std::borrow::Cow;
#[derive(Clone, PartialEq, Eq, Debug, serde::Serialize, derive_more::From)]
#[derive(Clone, PartialEq, Eq, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
#[serde(untagged)]
pub enum Status<'c> {

View file

@ -1,8 +1,6 @@
mod dict;
mod parser;
pub mod checks;
pub mod report;
pub mod tokens;
pub use dict::*;

View file

@ -115,7 +115,7 @@ impl<'p, 'd> TyposParser<'p, 'd> {
}
}
#[derive(Clone, Debug, derive_setters::Setters)]
#[derive(Clone, Debug)]
#[non_exhaustive]
pub struct Typo<'m> {
pub byte_offset: usize,

View file

@ -12,13 +12,13 @@ arg_enum! {
}
}
pub const PRINT_SILENT: typos::report::PrintSilent = typos::report::PrintSilent;
pub const PRINT_BRIEF: typos::report::PrintBrief = typos::report::PrintBrief;
pub const PRINT_LONG: typos::report::PrintLong = typos::report::PrintLong;
pub const PRINT_JSON: typos::report::PrintJson = typos::report::PrintJson;
pub const PRINT_SILENT: typos_cli::report::PrintSilent = typos_cli::report::PrintSilent;
pub const PRINT_BRIEF: typos_cli::report::PrintBrief = typos_cli::report::PrintBrief;
pub const PRINT_LONG: typos_cli::report::PrintLong = typos_cli::report::PrintLong;
pub const PRINT_JSON: typos_cli::report::PrintJson = typos_cli::report::PrintJson;
impl Format {
pub(crate) fn reporter(self) -> &'static dyn typos::report::Report {
pub(crate) fn reporter(self) -> &'static dyn typos_cli::report::Report {
match self {
Format::Silent => &PRINT_SILENT,
Format::Brief => &PRINT_BRIEF,

View file

@ -1,9 +1,456 @@
pub(crate) fn check_path(
use bstr::ByteSlice;
use crate::report;
use typos::tokens;
use typos::Dictionary;
pub trait Check: Send + Sync {
fn check_str(
&self,
buffer: &str,
parser: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error>;
fn check_bytes(
&self,
buffer: &[u8],
parser: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error>;
fn check_filenames(&self) -> bool;
fn check_files(&self) -> bool;
fn binary(&self) -> bool;
fn check_filename(
&self,
path: &std::path::Path,
parser: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
if !self.check_filenames() {
return Ok(());
}
if let Some(file_name) = path.file_name().and_then(|s| s.to_str()) {
let context_reporter = ReportContext {
reporter,
context: report::PathContext { path }.into(),
};
self.check_str(file_name, parser, dictionary, &context_reporter)?;
}
Ok(())
}
fn check_file(
&self,
path: &std::path::Path,
explicit: bool,
parser: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
if !self.check_files() {
return Ok(());
}
let buffer = read_file(path, reporter)?;
let (buffer, content_type) = massage_data(buffer)?;
if !explicit && !self.binary() && content_type.is_binary() {
let msg = report::BinaryFile { path };
reporter.report(msg.into())?;
return Ok(());
}
for (line_idx, line) in buffer.lines().enumerate() {
let line_num = line_idx + 1;
let context_reporter = ReportContext {
reporter,
context: report::FileContext { path, line_num }.into(),
};
self.check_bytes(line, parser, dictionary, &context_reporter)?;
}
Ok(())
}
}
struct ReportContext<'m, 'r> {
reporter: &'r dyn report::Report,
context: report::Context<'m>,
}
impl<'m, 'r> report::Report for ReportContext<'m, 'r> {
fn report(&self, msg: report::Message) -> Result<(), std::io::Error> {
let msg = msg.context(Some(self.context.clone()));
self.reporter.report(msg)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TyposSettings {
check_filenames: bool,
check_files: bool,
binary: bool,
}
impl TyposSettings {
pub fn new() -> Self {
Default::default()
}
pub fn check_filenames(&mut self, yes: bool) -> &mut Self {
self.check_filenames = yes;
self
}
pub fn check_files(&mut self, yes: bool) -> &mut Self {
self.check_files = yes;
self
}
pub fn binary(&mut self, yes: bool) -> &mut Self {
self.binary = yes;
self
}
pub fn build_typos(&self) -> Typos {
Typos {
check_filenames: self.check_filenames,
check_files: self.check_files,
binary: self.binary,
}
}
pub fn build_identifier_parser(&self) -> ParseIdentifiers {
ParseIdentifiers {
check_filenames: self.check_filenames,
check_files: self.check_files,
binary: self.binary,
}
}
pub fn build_word_parser(&self) -> ParseWords {
ParseWords {
check_filenames: self.check_filenames,
check_files: self.check_files,
binary: self.binary,
}
}
pub fn build_files(&self) -> Files {
Files {}
}
}
impl Default for TyposSettings {
fn default() -> Self {
Self {
check_filenames: true,
check_files: true,
binary: false,
}
}
}
#[derive(Debug, Clone)]
pub struct Typos {
check_filenames: bool,
check_files: bool,
binary: bool,
}
impl Check for Typos {
fn check_str(
&self,
buffer: &str,
tokenizer: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = typos::ParserBuilder::new()
.tokenizer(tokenizer)
.dictionary(dictionary)
.typos();
for typo in parser.parse_str(buffer) {
let msg = report::Typo {
context: None,
buffer: std::borrow::Cow::Borrowed(buffer.as_bytes()),
byte_offset: typo.byte_offset,
typo: typo.typo,
corrections: typo.corrections,
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_bytes(
&self,
buffer: &[u8],
tokenizer: &tokens::Tokenizer,
dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = typos::ParserBuilder::new()
.tokenizer(tokenizer)
.dictionary(dictionary)
.typos();
for typo in parser.parse_bytes(buffer) {
let msg = report::Typo {
context: None,
buffer: std::borrow::Cow::Borrowed(buffer.as_bytes()),
byte_offset: typo.byte_offset,
typo: typo.typo,
corrections: typo.corrections,
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_filenames(&self) -> bool {
self.check_filenames
}
fn check_files(&self) -> bool {
self.check_files
}
fn binary(&self) -> bool {
self.binary
}
}
#[derive(Debug, Clone)]
pub struct ParseIdentifiers {
check_filenames: bool,
check_files: bool,
binary: bool,
}
impl Check for ParseIdentifiers {
fn check_str(
&self,
buffer: &str,
tokenizer: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = typos::ParserBuilder::new()
.tokenizer(tokenizer)
.identifiers();
for word in parser.parse_str(buffer) {
let msg = report::Parse {
context: None,
kind: report::ParseKind::Word,
data: word.token(),
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_bytes(
&self,
buffer: &[u8],
tokenizer: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = typos::ParserBuilder::new()
.tokenizer(tokenizer)
.identifiers();
for word in parser.parse_bytes(buffer) {
let msg = report::Parse {
context: None,
kind: report::ParseKind::Word,
data: word.token(),
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_filenames(&self) -> bool {
self.check_filenames
}
fn check_files(&self) -> bool {
self.check_files
}
fn binary(&self) -> bool {
self.binary
}
}
#[derive(Debug, Clone)]
pub struct ParseWords {
check_filenames: bool,
check_files: bool,
binary: bool,
}
impl Check for ParseWords {
fn check_str(
&self,
buffer: &str,
tokenizer: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let word_parser = typos::ParserBuilder::new().tokenizer(tokenizer).words();
for word in word_parser.parse_str(buffer) {
let msg = report::Parse {
context: None,
kind: report::ParseKind::Word,
data: word.token(),
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_bytes(
&self,
buffer: &[u8],
tokenizer: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = typos::ParserBuilder::new().tokenizer(tokenizer).words();
for word in parser.parse_bytes(buffer) {
let msg = report::Parse {
context: None,
kind: report::ParseKind::Word,
data: word.token(),
};
reporter.report(msg.into())?;
}
Ok(())
}
fn check_filenames(&self) -> bool {
self.check_filenames
}
fn check_files(&self) -> bool {
self.check_files
}
fn binary(&self) -> bool {
self.binary
}
}
#[derive(Debug, Clone)]
pub struct Files {}
impl Check for Files {
fn check_str(
&self,
_buffer: &str,
_parser: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
_reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
Ok(())
}
fn check_bytes(
&self,
_buffer: &[u8],
_parser: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
_reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
Ok(())
}
fn check_filenames(&self) -> bool {
true
}
fn check_files(&self) -> bool {
true
}
fn binary(&self) -> bool {
true
}
fn check_filename(
&self,
_path: &std::path::Path,
_parser: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
_reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
Ok(())
}
fn check_file(
&self,
path: &std::path::Path,
_explicit: bool,
_parser: &tokens::Tokenizer,
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let msg = report::File::new(path);
reporter.report(msg.into())?;
Ok(())
}
}
fn read_file(
path: &std::path::Path,
reporter: &dyn report::Report,
) -> Result<Vec<u8>, std::io::Error> {
let buffer = match std::fs::read(path) {
Ok(buffer) => buffer,
Err(err) => {
let msg = report::Error::new(err.to_string());
reporter.report(msg.into())?;
Vec::new()
}
};
Ok(buffer)
}
fn massage_data(
buffer: Vec<u8>,
) -> Result<(Vec<u8>, content_inspector::ContentType), std::io::Error> {
let mut content_type = content_inspector::inspect(&buffer);
// HACK: We only support UTF-8 at the moment
if content_type != content_inspector::ContentType::UTF_8_BOM
&& content_type != content_inspector::ContentType::UTF_8
{
content_type = content_inspector::ContentType::BINARY;
}
Ok((buffer, content_type))
}
pub fn check_path(
walk: ignore::Walk,
checks: &dyn typos::checks::Check,
checks: &dyn Check,
parser: &typos::tokens::Tokenizer,
dictionary: &dyn typos::Dictionary,
reporter: &dyn typos::report::Report,
reporter: &dyn report::Report,
) -> Result<(), ignore::Error> {
for entry in walk {
check_entry(entry, checks, parser, dictionary, reporter)?;
@ -11,12 +458,12 @@ pub(crate) fn check_path(
Ok(())
}
pub(crate) fn check_path_parallel(
pub fn check_path_parallel(
walk: ignore::WalkParallel,
checks: &dyn typos::checks::Check,
checks: &dyn Check,
parser: &typos::tokens::Tokenizer,
dictionary: &dyn typos::Dictionary,
reporter: &dyn typos::report::Report,
reporter: &dyn report::Report,
) -> Result<(), ignore::Error> {
let error: std::sync::Mutex<Result<(), ignore::Error>> = std::sync::Mutex::new(Ok(()));
walk.run(|| {
@ -36,10 +483,10 @@ pub(crate) fn check_path_parallel(
fn check_entry(
entry: Result<ignore::DirEntry, ignore::Error>,
checks: &dyn typos::checks::Check,
checks: &dyn Check,
parser: &typos::tokens::Tokenizer,
dictionary: &dyn typos::Dictionary,
reporter: &dyn typos::report::Report,
reporter: &dyn report::Report,
) -> Result<(), ignore::Error> {
let entry = entry?;
if entry.file_type().map(|t| t.is_file()).unwrap_or(true) {

View file

@ -4,12 +4,12 @@ use std::sync;
use bstr::ByteSlice;
pub struct Diff<'r> {
reporter: &'r dyn typos::report::Report,
reporter: &'r dyn crate::report::Report,
deferred: sync::Mutex<crate::replace::Deferred>,
}
impl<'r> Diff<'r> {
pub(crate) fn new(reporter: &'r dyn typos::report::Report) -> Self {
pub fn new(reporter: &'r dyn crate::report::Report) -> Self {
Self {
reporter,
deferred: sync::Mutex::new(crate::replace::Deferred::default()),
@ -56,10 +56,10 @@ impl<'r> Diff<'r> {
}
}
impl<'r> typos::report::Report for Diff<'r> {
fn report(&self, msg: typos::report::Message<'_>) -> Result<(), std::io::Error> {
impl<'r> crate::report::Report for Diff<'r> {
fn report(&self, msg: crate::report::Message<'_>) -> Result<(), std::io::Error> {
let typo = match &msg {
typos::report::Message::Typo(typo) => typo,
crate::report::Message::Typo(typo) => typo,
_ => return self.reporter.report(msg),
};
@ -69,7 +69,7 @@ impl<'r> typos::report::Report for Diff<'r> {
};
match &typo.context {
Some(typos::report::Context::File(file)) => {
Some(crate::report::Context::File(file)) => {
let path = file.path.to_owned();
let line_num = file.line_num;
let correction = crate::replace::Correction::new(

View file

@ -1,2 +1,6 @@
pub mod checks;
pub mod config;
pub mod dict;
pub mod diff;
pub mod replace;
pub mod report;

View file

@ -7,11 +7,12 @@ use std::io::Write;
use structopt::StructOpt;
mod args;
mod checks;
mod config;
mod dict;
mod diff;
mod replace;
use typos_cli::checks;
use typos_cli::config;
use typos_cli::dict;
use typos_cli::diff;
use typos_cli::replace;
use typos_cli::report;
use proc_exit::WithCodeResultExt;
@ -74,7 +75,7 @@ fn run() -> proc_exit::ExitResult {
dictionary.identifiers(config.default.extend_identifiers());
dictionary.words(config.default.extend_words());
let mut settings = typos::checks::TyposSettings::new();
let mut settings = checks::TyposSettings::new();
settings
.check_filenames(config.default.check_filename())
.check_files(config.default.check_file())
@ -98,8 +99,8 @@ fn run() -> proc_exit::ExitResult {
} else {
args.format.reporter()
};
let status_reporter = typos::report::MessageStatus::new(output_reporter);
let mut reporter: &dyn typos::report::Report = &status_reporter;
let status_reporter = report::MessageStatus::new(output_reporter);
let mut reporter: &dyn report::Report = &status_reporter;
let replace_reporter = replace::Replace::new(reporter);
let diff_reporter = diff::Diff::new(reporter);
if args.diff {
@ -109,7 +110,7 @@ fn run() -> proc_exit::ExitResult {
}
let (files, identifier_parser, word_parser, checks);
let selected_checks: &dyn typos::checks::Check = if args.files {
let selected_checks: &dyn checks::Check = if args.files {
files = settings.build_files();
&files
} else if args.identifiers {

View file

@ -6,12 +6,12 @@ use std::sync;
use bstr::ByteSlice;
pub struct Replace<'r> {
reporter: &'r dyn typos::report::Report,
reporter: &'r dyn crate::report::Report,
deferred: sync::Mutex<Deferred>,
}
impl<'r> Replace<'r> {
pub(crate) fn new(reporter: &'r dyn typos::report::Report) -> Self {
pub fn new(reporter: &'r dyn crate::report::Report) -> Self {
Self {
reporter,
deferred: sync::Mutex::new(Deferred::default()),
@ -54,10 +54,10 @@ impl<'r> Replace<'r> {
}
}
impl<'r> typos::report::Report for Replace<'r> {
fn report(&self, msg: typos::report::Message<'_>) -> Result<(), std::io::Error> {
impl<'r> crate::report::Report for Replace<'r> {
fn report(&self, msg: crate::report::Message<'_>) -> Result<(), std::io::Error> {
let typo = match &msg {
typos::report::Message::Typo(typo) => typo,
crate::report::Message::Typo(typo) => typo,
_ => return self.reporter.report(msg),
};
@ -67,7 +67,7 @@ impl<'r> typos::report::Report for Replace<'r> {
};
match &typo.context {
Some(typos::report::Context::File(file)) => {
Some(crate::report::Context::File(file)) => {
let path = file.path.to_owned();
let line_num = file.line_num;
let correction =
@ -82,7 +82,7 @@ impl<'r> typos::report::Report for Replace<'r> {
content.push(correction);
Ok(())
}
Some(typos::report::Context::Path(path)) => {
Some(crate::report::Context::Path(path)) => {
let path = path.path.to_owned();
let correction =
Correction::new(typo.byte_offset, typo.typo, corrections[0].as_ref());
@ -97,20 +97,20 @@ impl<'r> typos::report::Report for Replace<'r> {
}
#[derive(Clone, Debug, Default)]
pub(crate) struct Deferred {
pub(crate) content: BTreeMap<path::PathBuf, BTreeMap<usize, Vec<Correction>>>,
pub(crate) paths: BTreeMap<path::PathBuf, Vec<Correction>>,
pub struct Deferred {
pub content: BTreeMap<path::PathBuf, BTreeMap<usize, Vec<Correction>>>,
pub paths: BTreeMap<path::PathBuf, Vec<Correction>>,
}
#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
pub(crate) struct Correction {
pub struct Correction {
pub byte_offset: usize,
pub typo: Vec<u8>,
pub correction: Vec<u8>,
}
impl Correction {
pub(crate) fn new(byte_offset: usize, typo: &str, correction: &str) -> Self {
pub fn new(byte_offset: usize, typo: &str, correction: &str) -> Self {
Self {
byte_offset,
typo: typo.as_bytes().to_vec(),
@ -119,7 +119,7 @@ impl Correction {
}
}
pub(crate) fn correct(mut line: Vec<u8>, corrections: &[Correction]) -> Vec<u8> {
pub fn correct(mut line: Vec<u8>, corrections: &[Correction]) -> Vec<u8> {
let mut corrections: Vec<_> = corrections.iter().collect();
corrections.sort_unstable();
corrections.reverse();
@ -137,8 +137,8 @@ pub(crate) fn correct(mut line: Vec<u8>, corrections: &[Correction]) -> Vec<u8>
mod test {
use super::*;
use crate::report::Report;
use assert_fs::prelude::*;
use typos::report::Report;
fn simple_correct(line: &str, corrections: Vec<(usize, &str, &str)>) -> String {
let line = line.as_bytes().to_vec();
@ -205,13 +205,13 @@ mod test {
let input_file = temp.child("foo.txt");
input_file.write_str("1 foo 2\n3 4 5").unwrap();
let primary = typos::report::PrintSilent;
let primary = crate::report::PrintSilent;
let replace = Replace::new(&primary);
replace
.report(
typos::report::Typo::default()
crate::report::Typo::default()
.context(Some(
typos::report::FileContext::default()
crate::report::FileContext::default()
.path(input_file.path())
.line_num(1)
.into(),
@ -236,13 +236,13 @@ mod test {
let input_file = temp.child("foo.txt");
input_file.write_str("foo foo foo").unwrap();
let primary = typos::report::PrintSilent;
let primary = crate::report::PrintSilent;
let replace = Replace::new(&primary);
replace
.report(
typos::report::Typo::default()
crate::report::Typo::default()
.context(Some(
typos::report::PathContext::default()
crate::report::PathContext::default()
.path(input_file.path())
.into(),
))

View file

@ -72,7 +72,7 @@ pub struct Typo<'m> {
pub buffer: Cow<'m, [u8]>,
pub byte_offset: usize,
pub typo: &'m str,
pub corrections: crate::Status<'m>,
pub corrections: typos::Status<'m>,
}
impl<'m> Default for Typo<'m> {
@ -82,7 +82,7 @@ impl<'m> Default for Typo<'m> {
buffer: Cow::Borrowed(&[]),
byte_offset: 0,
typo: "",
corrections: crate::Status::Invalid,
corrections: typos::Status::Invalid,
}
}
}
@ -308,8 +308,8 @@ fn print_brief_correction(msg: &Typo) -> Result<(), std::io::Error> {
)
.count();
match &msg.corrections {
crate::Status::Valid => {}
crate::Status::Invalid => {
typos::Status::Valid => {}
typos::Status::Invalid => {
writeln!(
io::stdout(),
"{}:{}: `{}` is disallowed",
@ -318,7 +318,7 @@ fn print_brief_correction(msg: &Typo) -> Result<(), std::io::Error> {
msg.typo,
)?;
}
crate::Status::Corrections(corrections) => {
typos::Status::Corrections(corrections) => {
writeln!(
io::stdout(),
"{}:{}: `{}` -> {}",
@ -345,11 +345,11 @@ fn print_long_correction(msg: &Typo) -> Result<(), std::io::Error> {
)
.count();
match &msg.corrections {
crate::Status::Valid => {}
crate::Status::Invalid => {
typos::Status::Valid => {}
typos::Status::Invalid => {
writeln!(handle, "error: `{}` is disallowed`", msg.typo,)?;
}
crate::Status::Corrections(corrections) => {
typos::Status::Corrections(corrections) => {
writeln!(
handle,
"error: `{}` should be {}",