mirror of
https://github.com/crate-ci/typos.git
synced 2024-11-29 04:21:06 -05:00
Merge pull request #882 from epage/regex
fix(config): Propagate validation errors
This commit is contained in:
commit
4362a6e5e4
7 changed files with 47 additions and 40 deletions
|
@ -146,14 +146,14 @@ impl FileArgs {
|
|||
binary: self.binary(),
|
||||
check_filename: self.check_filename(),
|
||||
check_file: self.check_file(),
|
||||
tokenizer: Some(config::TokenizerConfig {
|
||||
tokenizer: config::TokenizerConfig {
|
||||
unicode: self.unicode(),
|
||||
..Default::default()
|
||||
}),
|
||||
dict: Some(config::DictConfig {
|
||||
},
|
||||
dict: config::DictConfig {
|
||||
locale: self.locale,
|
||||
..Default::default()
|
||||
}),
|
||||
},
|
||||
extend_ignore_re: Default::default(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -213,7 +213,7 @@ impl TypeEngineConfig {
|
|||
GlobEngineConfig {
|
||||
extend_glob: Vec::new(),
|
||||
engine: EngineConfig {
|
||||
dict: Some(DictConfig {
|
||||
dict: DictConfig {
|
||||
extend_identifiers: dict_config
|
||||
.ignore_idents
|
||||
.iter()
|
||||
|
@ -225,7 +225,7 @@ impl TypeEngineConfig {
|
|||
.map(|key| ((*key).into(), (*key).into()))
|
||||
.collect(),
|
||||
..Default::default()
|
||||
}),
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
|
@ -280,9 +280,9 @@ pub struct EngineConfig {
|
|||
/// Verifying spelling in files.
|
||||
pub check_file: Option<bool>,
|
||||
#[serde(flatten)]
|
||||
pub tokenizer: Option<TokenizerConfig>,
|
||||
pub tokenizer: TokenizerConfig,
|
||||
#[serde(flatten)]
|
||||
pub dict: Option<DictConfig>,
|
||||
pub dict: DictConfig,
|
||||
#[serde(with = "serde_regex")]
|
||||
pub extend_ignore_re: Vec<regex::Regex>,
|
||||
}
|
||||
|
@ -294,12 +294,8 @@ impl EngineConfig {
|
|||
binary: Some(empty.binary()),
|
||||
check_filename: Some(empty.check_filename()),
|
||||
check_file: Some(empty.check_file()),
|
||||
tokenizer: Some(
|
||||
empty
|
||||
.tokenizer
|
||||
.unwrap_or_else(TokenizerConfig::from_defaults),
|
||||
),
|
||||
dict: Some(empty.dict.unwrap_or_else(DictConfig::from_defaults)),
|
||||
tokenizer: TokenizerConfig::from_defaults(),
|
||||
dict: DictConfig::from_defaults(),
|
||||
extend_ignore_re: Default::default(),
|
||||
}
|
||||
}
|
||||
|
@ -314,22 +310,8 @@ impl EngineConfig {
|
|||
if let Some(source) = source.check_file {
|
||||
self.check_file = Some(source);
|
||||
}
|
||||
if let Some(source) = source.tokenizer.as_ref() {
|
||||
let mut tokenizer = None;
|
||||
std::mem::swap(&mut tokenizer, &mut self.tokenizer);
|
||||
let mut tokenizer = tokenizer.unwrap_or_default();
|
||||
tokenizer.update(source);
|
||||
let mut tokenizer = Some(tokenizer);
|
||||
std::mem::swap(&mut tokenizer, &mut self.tokenizer);
|
||||
}
|
||||
if let Some(source) = source.dict.as_ref() {
|
||||
let mut dict = None;
|
||||
std::mem::swap(&mut dict, &mut self.dict);
|
||||
let mut dict = dict.unwrap_or_default();
|
||||
dict.update(source);
|
||||
let mut dict = Some(dict);
|
||||
std::mem::swap(&mut dict, &mut self.dict);
|
||||
}
|
||||
self.tokenizer.update(&source.tokenizer);
|
||||
self.dict.update(&source.dict);
|
||||
self.extend_ignore_re
|
||||
.extend(source.extend_ignore_re.iter().cloned());
|
||||
}
|
||||
|
@ -659,8 +641,8 @@ check-file = true
|
|||
GlobEngineConfig {
|
||||
extend_glob: vec!["*.po".into()],
|
||||
engine: EngineConfig {
|
||||
tokenizer: Some(TokenizerConfig::default()),
|
||||
dict: Some(DictConfig::default()),
|
||||
tokenizer: TokenizerConfig::default(),
|
||||
dict: DictConfig::default(),
|
||||
check_file: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
|
@ -687,13 +669,13 @@ inout = "inout"
|
|||
GlobEngineConfig {
|
||||
extend_glob: vec!["*.shader".into(), "*.cginc".into()],
|
||||
engine: EngineConfig {
|
||||
tokenizer: Some(TokenizerConfig::default()),
|
||||
dict: Some(DictConfig {
|
||||
tokenizer: TokenizerConfig::default(),
|
||||
dict: DictConfig {
|
||||
extend_words: maplit::hashmap! {
|
||||
"inout".into() => "inout".into(),
|
||||
},
|
||||
..Default::default()
|
||||
}),
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
|
|
|
@ -227,14 +227,13 @@ impl<'s> ConfigEngine<'s> {
|
|||
let check_filename = engine.check_filename();
|
||||
let check_file = engine.check_file();
|
||||
let crate::config::EngineConfig {
|
||||
tokenizer,
|
||||
dict,
|
||||
tokenizer: mut tokenizer_config,
|
||||
dict: mut dict_config,
|
||||
extend_ignore_re,
|
||||
..
|
||||
} = engine;
|
||||
let tokenizer_config =
|
||||
tokenizer.unwrap_or_else(crate::config::TokenizerConfig::from_defaults);
|
||||
let dict_config = dict.unwrap_or_else(crate::config::DictConfig::from_defaults);
|
||||
tokenizer_config.update(&crate::config::TokenizerConfig::from_defaults());
|
||||
dict_config.update(&crate::config::DictConfig::from_defaults());
|
||||
|
||||
if !tokenizer_config.ignore_hex() {
|
||||
log::warn!("`ignore-hex` is deprecated");
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
[default.extend-identifiers]
|
||||
hello = "goodbye"
|
||||
|
||||
[type.fail]
|
||||
extend-glob = ["*.fail"]
|
||||
|
||||
[type.ignore]
|
||||
extend-glob = ["*.ignore"]
|
||||
extend-ignore-identifiers-re = ["(he.*"]
|
|
@ -0,0 +1 @@
|
|||
hello
|
|
@ -0,0 +1 @@
|
|||
hello
|
|
@ -0,0 +1,15 @@
|
|||
bin.name = "typos"
|
||||
status.code = 78
|
||||
stdin = ""
|
||||
stdout = ""
|
||||
stderr = """
|
||||
TOML parse error at line 7, column 1
|
||||
|
|
||||
7 | [type.ignore]
|
||||
| ^[..]
|
||||
regex parse error:
|
||||
(he.*
|
||||
^
|
||||
error: unclosed group
|
||||
|
||||
"""
|
Loading…
Reference in a new issue