mirror of
https://github.com/crate-ci/typos.git
synced 2025-01-11 09:11:39 -05:00
fix(dictgen)!: Allow case-sensitive ordered maps
This commit is contained in:
parent
086f9d1558
commit
e7ff9cfc01
10 changed files with 650558 additions and 636484 deletions
|
@ -1,6 +1,7 @@
|
|||
// This file is @generated crates/codespell-dict/tests/codegen.rs
|
||||
|
||||
pub static WORD_DICTIONARY: dictgen::OrderedMap<&[&str]> = dictgen::OrderedMap {
|
||||
pub static WORD_DICTIONARY: dictgen::OrderedMap<dictgen::InsensitiveStr<'static>, &[&str]> =
|
||||
dictgen::OrderedMap {
|
||||
keys: &[
|
||||
dictgen::InsensitiveStr::Ascii("1nd"),
|
||||
dictgen::InsensitiveStr::Ascii("2rd"),
|
||||
|
|
|
@ -51,6 +51,18 @@ impl<'s2> PartialEq<InsensitiveStr<'s2>> for InsensitiveStr<'_> {
|
|||
|
||||
impl Eq for InsensitiveStr<'_> {}
|
||||
|
||||
impl PartialOrd for InsensitiveStr<'_> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for InsensitiveStr<'_> {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.convert().cmp(&other.convert())
|
||||
}
|
||||
}
|
||||
|
||||
impl core::hash::Hash for InsensitiveStr<'_> {
|
||||
#[inline]
|
||||
fn hash<H: core::hash::Hasher>(&self, hasher: &mut H) {
|
||||
|
|
|
@ -14,6 +14,7 @@ impl OrderedMapGen<'_> {
|
|||
data.sort_unstable_by_key(|v| unicase::UniCase::new(v.0));
|
||||
|
||||
let name = self.gen.name;
|
||||
let key_type = "dictgen::InsensitiveStr<'static>";
|
||||
let value_type = self.gen.value_type;
|
||||
|
||||
let mut smallest = usize::MAX;
|
||||
|
@ -21,7 +22,7 @@ impl OrderedMapGen<'_> {
|
|||
|
||||
writeln!(
|
||||
file,
|
||||
"pub static {name}: dictgen::OrderedMap<{value_type}> = dictgen::OrderedMap {{"
|
||||
"pub static {name}: dictgen::OrderedMap<{key_type}, {value_type}> = dictgen::OrderedMap {{"
|
||||
)?;
|
||||
writeln!(file, " keys: &[")?;
|
||||
for (key, _value) in data.iter() {
|
||||
|
@ -52,13 +53,13 @@ impl OrderedMapGen<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct OrderedMap<V: 'static> {
|
||||
pub keys: &'static [crate::InsensitiveStr<'static>],
|
||||
pub struct OrderedMap<K: 'static, V: 'static> {
|
||||
pub keys: &'static [K],
|
||||
pub values: &'static [V],
|
||||
pub range: core::ops::RangeInclusive<usize>,
|
||||
}
|
||||
|
||||
impl<V> OrderedMap<V> {
|
||||
impl<V> OrderedMap<crate::InsensitiveStr<'_>, V> {
|
||||
#[inline]
|
||||
pub fn find(&self, word: &'_ unicase::UniCase<&str>) -> Option<&'static V> {
|
||||
if self.range.contains(&word.len()) {
|
||||
|
@ -71,3 +72,14 @@ impl<V> OrderedMap<V> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<V> OrderedMap<&str, V> {
|
||||
#[inline]
|
||||
pub fn find(&self, word: &'_ &str) -> Option<&'static V> {
|
||||
if self.range.contains(&word.len()) {
|
||||
self.keys.binary_search(word).map(|i| &self.values[i]).ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ impl TrieGen<'_> {
|
|||
|
||||
pub struct Trie<V: 'static> {
|
||||
pub root: &'static TrieNode<V>,
|
||||
pub unicode: &'static crate::OrderedMap<V>,
|
||||
pub unicode: &'static crate::OrderedMap<crate::InsensitiveStr<'static>, V>,
|
||||
pub range: core::ops::RangeInclusive<usize>,
|
||||
}
|
||||
|
||||
|
@ -91,7 +91,7 @@ pub struct TrieNode<V: 'static> {
|
|||
|
||||
pub enum TrieChild<V: 'static> {
|
||||
Nested(&'static [Option<&'static TrieNode<V>>; 26]),
|
||||
Flat(&'static crate::OrderedMap<V>),
|
||||
Flat(&'static crate::OrderedMap<crate::InsensitiveStr<'static>, V>),
|
||||
}
|
||||
|
||||
#[cfg(feature = "codegen")]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// This file is @generated by crates/misspell-dict/tests/codegen.rs
|
||||
|
||||
pub static MAIN_DICTIONARY: dictgen::OrderedMap<&[&str]> = dictgen::OrderedMap {
|
||||
pub static MAIN_DICTIONARY: dictgen::OrderedMap<dictgen::InsensitiveStr<'static>, &[&str]> =
|
||||
dictgen::OrderedMap {
|
||||
keys: &[
|
||||
dictgen::InsensitiveStr::Ascii("abandenment"),
|
||||
dictgen::InsensitiveStr::Ascii("abandining"),
|
||||
|
@ -56101,7 +56102,8 @@ pub static MAIN_DICTIONARY: dictgen::OrderedMap<&[&str]> = dictgen::OrderedMap {
|
|||
],
|
||||
range: 3..=19,
|
||||
};
|
||||
pub static AMERICAN_DICTIONARY: dictgen::OrderedMap<&[&str]> = dictgen::OrderedMap {
|
||||
pub static AMERICAN_DICTIONARY: dictgen::OrderedMap<dictgen::InsensitiveStr<'static>, &[&str]> =
|
||||
dictgen::OrderedMap {
|
||||
keys: &[
|
||||
dictgen::InsensitiveStr::Ascii("accessorise"),
|
||||
dictgen::InsensitiveStr::Ascii("accessorised"),
|
||||
|
@ -59346,7 +59348,8 @@ pub static AMERICAN_DICTIONARY: dictgen::OrderedMap<&[&str]> = dictgen::OrderedM
|
|||
],
|
||||
range: 4..=20,
|
||||
};
|
||||
pub static BRITISH_DICTIONARY: dictgen::OrderedMap<&[&str]> = dictgen::OrderedMap {
|
||||
pub static BRITISH_DICTIONARY: dictgen::OrderedMap<dictgen::InsensitiveStr<'static>, &[&str]> =
|
||||
dictgen::OrderedMap {
|
||||
keys: &[
|
||||
dictgen::InsensitiveStr::Ascii("accessorize"),
|
||||
dictgen::InsensitiveStr::Ascii("accessorized"),
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
#![allow(clippy::unreadable_literal)]
|
||||
#![allow(unreachable_pub)]
|
||||
|
||||
pub static WORD: dictgen::OrderedMap<&[&str]> = dictgen::OrderedMap {
|
||||
pub static WORD: dictgen::OrderedMap<dictgen::InsensitiveStr<'static>, &[&str]> =
|
||||
dictgen::OrderedMap {
|
||||
keys: &[
|
||||
dictgen::InsensitiveStr::Ascii("aaccess"),
|
||||
dictgen::InsensitiveStr::Ascii("aaccessibility"),
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,7 @@
|
|||
// This file is @generated by crates/wikipedia-dict/tests/codegen.rs
|
||||
|
||||
pub static WORD_DICTIONARY: dictgen::OrderedMap<&[&str]> = dictgen::OrderedMap {
|
||||
pub static WORD_DICTIONARY: dictgen::OrderedMap<dictgen::InsensitiveStr<'static>, &[&str]> =
|
||||
dictgen::OrderedMap {
|
||||
keys: &[
|
||||
dictgen::InsensitiveStr::Ascii("abandonned"),
|
||||
dictgen::InsensitiveStr::Ascii("abbout"),
|
||||
|
|
Loading…
Reference in a new issue