mirror of
https://github.com/crate-ci/typos.git
synced 2024-12-22 23:52:12 -05:00
Merge pull request #464 from foriequal0/fix/uuid-uppercase
Fix tokenizer for uppercase UUID
This commit is contained in:
commit
bbdb5863e2
1 changed files with 38 additions and 10 deletions
|
@ -319,16 +319,29 @@ mod parser {
|
|||
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
|
||||
<T as nom::InputIter>::Item: AsChar + Copy,
|
||||
{
|
||||
recognize(tuple((
|
||||
take_while_m_n(8, 8, is_lower_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_lower_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_lower_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_lower_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(12, 12, is_lower_hex_digit),
|
||||
recognize(alt((
|
||||
tuple((
|
||||
take_while_m_n(8, 8, is_lower_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_lower_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_lower_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_lower_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(12, 12, is_lower_hex_digit),
|
||||
)),
|
||||
tuple((
|
||||
take_while_m_n(8, 8, is_upper_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_upper_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_upper_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(4, 4, is_upper_hex_digit),
|
||||
char('-'),
|
||||
take_while_m_n(12, 12, is_upper_hex_digit),
|
||||
)),
|
||||
)))(input)
|
||||
}
|
||||
|
||||
|
@ -1087,6 +1100,21 @@ mod test {
|
|||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_ignore_uuid_uppercase() {
|
||||
let parser = TokenizerBuilder::new().build();
|
||||
|
||||
let input = "Hello 123E4567-E89B-12D3-A456-426652340000 World";
|
||||
let expected: Vec<Identifier> = vec![
|
||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||
Identifier::new_unchecked("World", Case::None, 43),
|
||||
];
|
||||
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
||||
assert_eq!(expected, actual);
|
||||
let actual: Vec<_> = parser.parse_str(input).collect();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenize_ignore_hash() {
|
||||
let parser = TokenizerBuilder::new().build();
|
||||
|
|
Loading…
Reference in a new issue