Merge pull request #1005 from epage/template

chore: Update from _rust/main template
This commit is contained in:
Ed Page 2024-04-30 14:16:18 -05:00 committed by GitHub
commit 919912762c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
55 changed files with 414 additions and 203 deletions

View file

@ -1,5 +1,4 @@
msrv = "1.75" # MSRV
warn-on-all-wildcard-imports = true
allow-print-in-tests = true
allow-expect-in-tests = true
allow-unwrap-in-tests = true
allow-dbg-in-tests = true

View file

@ -3,6 +3,7 @@
'before 5am on the first day of the month',
],
semanticCommits: 'enabled',
commitMessageLowerCase: 'never',
configMigration: true,
dependencyDashboard: true,
customManagers: [
@ -17,29 +18,28 @@
'^\\.github/workflows/rust-next.yml$',
],
matchStrings: [
'MSRV.*?(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)',
'(?<currentValue>\\d+\\.\\d+(\\.\\d+)?).*?MSRV',
'STABLE.*?(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)',
'(?<currentValue>\\d+\\.\\d+(\\.\\d+)?).*?STABLE',
],
depNameTemplate: 'rust',
depNameTemplate: 'STABLE',
packageNameTemplate: 'rust-lang/rust',
datasourceTemplate: 'github-releases',
},
],
packageRules: [
{
commitMessageTopic: 'MSRV',
commitMessageTopic: 'Rust Stable',
matchManagers: [
'custom.regex',
],
matchPackageNames: [
'rust',
'STABLE',
],
minimumReleaseAge: '126 days', // 3 releases * 6 weeks per release * 7 days per week
internalChecksFilter: 'strict',
extractVersion: '^(?<version>\\d+\\.\\d+)', // Drop the patch version
schedule: [
'* * * * *',
],
automerge: true,
},
// Goals:
// - Rollup safe upgrades to reduce CI runner load
@ -62,6 +62,7 @@
matchCurrentVersion: '>=1.0.0',
matchUpdateTypes: [
'minor',
'patch',
],
automerge: true,
groupName: 'compatible',

26
.github/settings.yml vendored
View file

@ -41,14 +41,18 @@ labels:
color: '#c2e0c6'
description: "Help wanted!"
branches:
- name: master
protection:
required_pull_request_reviews: null
required_conversation_resolution: true
required_status_checks:
# Required. Require branches to be up to date before merging.
strict: false
contexts: ["CI", "Lint Commits", "Spell Check with Typos"]
enforce_admins: false
restrictions: null
# This serves more as documentation.
# Branch protection API was replaced by rulesets but settings isn't updated.
# See https://github.com/repository-settings/app/issues/825
#
# branches:
# - name: master
# protection:
# required_pull_request_reviews: null
# required_conversation_resolution: true
# required_status_checks:
# # Required. Require branches to be up to date before merging.
# strict: false
# contexts: ["CI", "Spell Check with Typos"]
# enforce_admins: false
# restrictions: null

View file

@ -17,6 +17,10 @@ env:
CARGO_TERM_COLOR: always
CLICOLOR: 1
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
jobs:
security_audit:
permissions:

View file

@ -15,21 +15,27 @@ env:
CLICOLOR: 1
COLUMNS: 130
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
jobs:
ci:
permissions:
contents: none
name: CI
needs: [test, msrv, docs, rustfmt, clippy]
needs: [test, msrv, lockfile, docs, rustfmt, clippy]
runs-on: ubuntu-latest
if: "always()"
steps:
- name: Done
run: exit 0
- name: Failed
run: exit 1
if: "contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped')"
test:
name: Test
strategy:
matrix:
os: ["ubuntu-latest", "windows-latest", "macos-latest"]
os: ["ubuntu-latest", "windows-latest", "macos-14"]
rust: ["stable"]
continue-on-error: ${{ matrix.rust != 'stable' }}
runs-on: ${{ matrix.os }}
@ -42,16 +48,13 @@ jobs:
toolchain: ${{ matrix.rust }}
components: rustfmt
- uses: Swatinem/rust-cache@v2
- uses: taiki-e/install-action@cargo-hack
- name: Build
run: cargo test --no-run --workspace --all-features
- name: Default features
run: cargo test --workspace
- name: All features
run: cargo test --workspace --all-features
- name: No-default features
run: cargo test --workspace --no-default-features
run: cargo test --workspace --no-run
- name: Test
run: cargo hack test --feature-powerset --workspace
msrv:
name: "Check MSRV: 1.75"
name: "Check MSRV"
runs-on: ubuntu-latest
steps:
- name: Checkout repository
@ -59,14 +62,11 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: "1.75" # MSRV
toolchain: stable
- uses: Swatinem/rust-cache@v2
- uses: taiki-e/install-action@cargo-hack
- name: Default features
run: cargo check --workspace --all-targets
- name: All features
run: cargo check --workspace --all-targets --all-features
- name: No-default features
run: cargo check --workspace --all-targets --no-default-features
run: cargo hack check --feature-powerset --locked --rust-version --ignore-private --workspace --all-targets
lockfile:
runs-on: ubuntu-latest
steps:
@ -78,7 +78,7 @@ jobs:
toolchain: stable
- uses: Swatinem/rust-cache@v2
- name: "Is lockfile updated?"
run: cargo fetch --locked
run: cargo update --workspace --locked
docs:
name: Docs
runs-on: ubuntu-latest
@ -88,7 +88,7 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
toolchain: "1.76" # STABLE
- uses: Swatinem/rust-cache@v2
- name: Check documentation
env:
@ -103,9 +103,7 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
# Not MSRV because its harder to jump between versions and people are
# more likely to have stable
toolchain: stable
toolchain: "1.76" # STABLE
components: rustfmt
- uses: Swatinem/rust-cache@v2
- name: Check formatting
@ -121,13 +119,13 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: "1.75" # MSRV
toolchain: "1.76" # STABLE
components: clippy
- uses: Swatinem/rust-cache@v2
- name: Install SARIF tools
run: cargo install clippy-sarif --version 0.3.4 --locked # Held back due to msrv
run: cargo install clippy-sarif --locked
- name: Install SARIF tools
run: cargo install sarif-fmt --version 0.3.4 --locked # Held back due to msrv
run: cargo install sarif-fmt --locked
- name: Check
run: >
cargo clippy --workspace --all-features --all-targets --message-format=json -- -D warnings --allow deprecated
@ -142,3 +140,22 @@ jobs:
wait-for-processing: true
- name: Report status
run: cargo clippy --workspace --all-features --all-targets -- -D warnings --allow deprecated
coverage:
name: Coverage
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2
- name: Install cargo-tarpaulin
run: cargo install cargo-tarpaulin
- name: Gather coverage
run: cargo tarpaulin --output-dir coverage --out lcov
- name: Publish to Coveralls
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}

View file

@ -11,6 +11,10 @@ env:
CARGO_TERM_COLOR: always
CLICOLOR: 1
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
jobs:
committed:
name: Lint Commits

View file

@ -12,6 +12,10 @@ env:
CARGO_TERM_COLOR: always
CLICOLOR: 1
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
jobs:
pre-commit:
permissions:

View file

@ -13,12 +13,16 @@ env:
CLICOLOR: 1
COLUMNS: 130
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
jobs:
test:
name: Test
strategy:
matrix:
os: ["ubuntu-latest", "windows-latest", "macos-latest"]
os: ["ubuntu-latest", "windows-latest", "macos-latest", "macos-14"]
rust: ["stable", "beta"]
include:
- os: ubuntu-latest
@ -34,12 +38,11 @@ jobs:
toolchain: ${{ matrix.rust }}
components: rustfmt
- uses: Swatinem/rust-cache@v2
- name: Default features
run: cargo test --workspace
- name: All features
run: cargo test --workspace --all-features
- name: No-default features
run: cargo test --workspace --no-default-features
- uses: taiki-e/install-action@cargo-hack
- name: Build
run: cargo test --workspace --no-run
- name: Test
run: cargo hack test --feature-powerset --workspace
latest:
name: "Check latest dependencies"
runs-on: ubuntu-latest
@ -52,11 +55,10 @@ jobs:
toolchain: stable
components: rustfmt
- uses: Swatinem/rust-cache@v2
- uses: taiki-e/install-action@cargo-hack
- name: Update dependencues
run: cargo update
- name: Default features
run: cargo test --workspace
- name: All features
run: cargo test --workspace --all-features
- name: No-default features
run: cargo test --workspace --no-default-features
- name: Build
run: cargo test --workspace --no-run
- name: Test
run: cargo hack test --feature-powerset --workspace

View file

@ -21,6 +21,74 @@ include = [
"examples/**/*"
]
[workspace.lints.rust]
rust_2018_idioms = "warn"
unreachable_pub = "warn"
unsafe_op_in_unsafe_fn = "warn"
unused_lifetimes = "warn"
unused_macro_rules = "warn"
unused_qualifications = "warn"
[workspace.lints.clippy]
bool_assert_comparison = "allow"
branches_sharing_code = "allow"
checked_conversions = "warn"
collapsible_else_if = "allow"
create_dir = "warn"
dbg_macro = "warn"
debug_assert_with_mut_call = "warn"
doc_markdown = "warn"
empty_enum = "warn"
enum_glob_use = "warn"
expl_impl_clone_on_copy = "warn"
explicit_deref_methods = "warn"
explicit_into_iter_loop = "warn"
fallible_impl_from = "warn"
filter_map_next = "warn"
flat_map_option = "warn"
float_cmp_const = "warn"
fn_params_excessive_bools = "warn"
from_iter_instead_of_collect = "warn"
if_same_then_else = "allow"
implicit_clone = "warn"
imprecise_flops = "warn"
inconsistent_struct_constructor = "warn"
inefficient_to_string = "warn"
infinite_loop = "warn"
invalid_upcast_comparisons = "warn"
large_digit_groups = "warn"
large_stack_arrays = "warn"
large_types_passed_by_value = "warn"
let_and_return = "allow" # sometimes good to name what you are returning
linkedlist = "warn"
lossy_float_literal = "warn"
macro_use_imports = "warn"
match_wildcard_for_single_variants = "warn"
mem_forget = "warn"
mutex_integer = "warn"
needless_for_each = "warn"
negative_feature_names = "warn"
path_buf_push_overwrite = "warn"
ptr_as_ptr = "warn"
rc_mutex = "warn"
redundant_feature_names = "warn"
ref_option_ref = "warn"
rest_pat_in_fully_bound_structs = "warn"
same_functions_in_if_condition = "warn"
self_named_module_files = "warn"
semicolon_if_nothing_returned = "warn"
single_match_else = "warn"
str_to_string = "warn"
string_add = "warn"
string_add_assign = "warn"
string_lit_as_bytes = "warn"
string_to_string = "warn"
todo = "warn"
trait_duplication_in_bounds = "warn"
verbose_file_reads = "warn"
wildcard_imports = "warn"
zero_sized_map_values = "warn"
[profile.dev]
panic = "abort"

View file

@ -12,8 +12,9 @@ edition.workspace = true
rust-version.workspace = true
include.workspace = true
[package.metadata.release]
release = false
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
unicase = "2.7"
@ -26,3 +27,6 @@ codegenrs = "3.0"
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
snapbox = { version = "0.5.9", features = ["path"] }
typos = { path = "../typos" }
[lints]
workspace = true

View file

@ -1,3 +1,7 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
mod dict_codegen;
pub use crate::dict_codegen::*;

View file

@ -10,6 +10,10 @@ edition.workspace = true
rust-version.workspace = true
include.workspace = true
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[features]
default = ["std"]
std = []
@ -21,3 +25,6 @@ unicase = "2.7"
phf = { version = "0.11", features = ["unicase"], optional = true }
phf_codegen = { version = "0.11", optional = true }
phf_shared = { version = "0.11", optional = true }
[lints]
workspace = true

View file

@ -1,3 +1,7 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
#[cfg(feature = "map")]
mod map;
mod table;

View file

@ -66,7 +66,7 @@ impl<V> DictMap<V> {
impl<'s> phf_shared::PhfHash for crate::InsensitiveStr<'s> {
#[inline]
fn phf_hash<H: core::hash::Hasher>(&self, state: &mut H) {
core::hash::Hash::hash(self, state)
core::hash::Hash::hash(self, state);
}
}
@ -75,7 +75,7 @@ impl<'s> phf_shared::FmtConst for crate::InsensitiveStr<'s> {
match self {
crate::InsensitiveStr::Ascii(_) => f.write_str("dictgen::InsensitiveStr::Ascii(")?,
crate::InsensitiveStr::Unicode(_) => {
f.write_str("dictgen::InsensitiveStr::Unicode(")?
f.write_str("dictgen::InsensitiveStr::Unicode(")?;
}
}

View file

@ -67,7 +67,7 @@ impl<V> DictTable<V> {
}
}
/// UniCase look-alike that avoids const-fn so large tables don't OOM
/// `UniCase` look-alike that avoids const-fn so large tables don't OOM
#[derive(Copy, Clone)]
pub enum InsensitiveStr<'s> {
Unicode(&'s str),
@ -111,20 +111,20 @@ impl<'s> Eq for InsensitiveStr<'s> {}
impl<'s> core::hash::Hash for InsensitiveStr<'s> {
#[inline]
fn hash<H: core::hash::Hasher>(&self, hasher: &mut H) {
self.convert().hash(hasher)
self.convert().hash(hasher);
}
}
impl<'s> core::fmt::Debug for InsensitiveStr<'s> {
#[inline]
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(self.into_inner(), fmt)
}
}
impl<'s> core::fmt::Display for InsensitiveStr<'s> {
#[inline]
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Display::fmt(self.into_inner(), fmt)
}
}

View file

@ -189,7 +189,7 @@ mod codegen {
}
}
fn gen_type_name<V>(leaf: &DynChild<V>) -> &'static str {
fn gen_type_name<V>(leaf: &DynChild<'_, V>) -> &'static str {
match leaf {
DynChild::Nested(_) => "dictgen::DictTrieChild::Nested",
DynChild::Flat(_) => "dictgen::DictTrieChild::Flat",
@ -250,7 +250,7 @@ mod codegen {
impl<'s, V> DynNode<'s, V> {
fn burst(&mut self, limit: usize) {
self.children.burst(limit)
self.children.burst(limit);
}
}

View file

@ -13,6 +13,7 @@ include.workspace = true
[package.metadata.release]
release = false
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
unicase = "2.7"
@ -25,3 +26,6 @@ codegenrs = "3.0"
regex = "1"
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
snapbox = { version = "0.5.9", features = ["path"] }
[lints]
workspace = true

View file

@ -1,3 +1,7 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
mod dict_codegen;
pub use crate::dict_codegen::*;

View file

@ -60,7 +60,7 @@ struct Words<'s> {
british: HashMap<&'s str, Vec<&'s str>>,
}
fn parse_dict(raw: &str) -> Words {
fn parse_dict(raw: &str) -> Words<'_> {
let mut bad = HashMap::new();
let mut main = HashMap::new();
let mut american = HashMap::new();

View file

@ -12,7 +12,8 @@ rust-version.workspace = true
include.workspace = true
[package.metadata.docs.rs]
no-default-features = true
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.release]
tag-prefix = ""
@ -94,3 +95,6 @@ harness = false
[[bench]]
name = "tokenize"
harness = false
[lints]
workspace = true

View file

@ -1,3 +1,5 @@
#![allow(elided_lifetimes_in_paths)]
mod data;
use assert_fs::prelude::*;
@ -19,7 +21,7 @@ fn found_files(bencher: divan::Bencher, sample: &data::Data) {
.counter(divan::counter::BytesCount::of_str(sample.content()))
.bench_local(|| {
typos_cli::file::FoundFiles.check_file(sample_path.path(), true, &policy, &PrintSilent)
})
});
}
#[divan::bench(args = data::DATA)]
@ -38,7 +40,7 @@ fn identifiers(bencher: divan::Bencher, sample: &data::Data) {
.counter(divan::counter::BytesCount::of_str(sample.content()))
.bench_local(|| {
typos_cli::file::Identifiers.check_file(sample_path.path(), true, &policy, &PrintSilent)
})
});
}
#[divan::bench(args = data::DATA)]
@ -57,7 +59,7 @@ fn words(bencher: divan::Bencher, sample: &data::Data) {
.counter(divan::counter::BytesCount::of_str(sample.content()))
.bench_local(|| {
typos_cli::file::Words.check_file(sample_path.path(), true, &policy, &PrintSilent)
})
});
}
#[divan::bench(args = data::DATA)]
@ -76,7 +78,7 @@ fn typos(bencher: divan::Bencher, sample: &data::Data) {
.counter(divan::counter::BytesCount::of_str(sample.content()))
.bench_local(|| {
typos_cli::file::Typos.check_file(sample_path.path(), true, &policy, &PrintSilent)
})
});
}
#[derive(Debug, Default)]

View file

@ -1,3 +1,5 @@
#![allow(elided_lifetimes_in_paths)]
mod regular {
mod ok {
#[divan::bench]

View file

@ -1,11 +1,13 @@
pub static EMPTY: &str = "";
#![allow(dead_code)]
pub static NO_TOKENS: &str = " ";
pub(crate) static EMPTY: &str = "";
pub static SINGLE_TOKEN: &str = "success";
pub(crate) static NO_TOKENS: &str = " ";
pub(crate) static SINGLE_TOKEN: &str = "success";
// Stolen from https://github.com/BurntSushi/ripgrep/blob/master/grep-searcher/src/searcher/glue.rs
pub static SHERLOCK: &str = "\
pub(crate) static SHERLOCK: &str = "\
For the Doctor Watsons of this world, as opposed to the Sherlock
Holmeses, success in the province of detective work must always
be, to a very large extent, the result of luck. Sherlock Holmes
@ -15,7 +17,7 @@ and exhibited clearly, with a label attached.\
";
// Stolen from https://github.com/BurntSushi/ripgrep/blob/master/grep-searcher/src/searcher/glue.rs
pub static CODE: &str = "\
pub(crate) static CODE: &str = "\
extern crate snap;
use std::io;
fn main() {
@ -28,17 +30,17 @@ fn main() {
}
";
pub static CORPUS: &str = include_str!("../../typos-dict/assets/words.csv");
pub(crate) static CORPUS: &str = include_str!("../../typos-dict/assets/words.csv");
#[derive(Debug)]
pub struct Data(&'static str, &'static str);
pub(crate) struct Data(&'static str, &'static str);
impl Data {
pub const fn name(&self) -> &'static str {
pub(crate) const fn name(&self) -> &'static str {
self.0
}
pub const fn content(&self) -> &'static str {
pub(crate) const fn content(&self) -> &'static str {
self.1
}
}
@ -49,7 +51,7 @@ impl std::fmt::Display for Data {
}
}
pub static DATA: &[Data] = &[
pub(crate) static DATA: &[Data] = &[
Data("empty", EMPTY),
Data("no_tokens", NO_TOKENS),
Data("single_token", SINGLE_TOKEN),

View file

@ -1,7 +1,9 @@
#![allow(elided_lifetimes_in_paths)]
mod data;
mod parse_str {
use super::*;
use super::data;
#[divan::bench(args = data::DATA)]
fn ascii(bencher: divan::Bencher, sample: &data::Data) {
@ -12,7 +14,7 @@ mod parse_str {
bencher
.with_inputs(|| sample.content())
.input_counter(divan::counter::BytesCount::of_str)
.bench_local_values(|sample| parser.parse_str(sample).last())
.bench_local_values(|sample| parser.parse_str(sample).last());
}
#[divan::bench(args = data::DATA)]
@ -24,12 +26,12 @@ mod parse_str {
bencher
.with_inputs(|| sample.content())
.input_counter(divan::counter::BytesCount::of_str)
.bench_local_values(|sample| parser.parse_str(sample).last())
.bench_local_values(|sample| parser.parse_str(sample).last());
}
}
mod parse_bytes {
use super::*;
use super::data;
#[divan::bench(args = data::DATA)]
fn ascii(bencher: divan::Bencher, sample: &data::Data) {
@ -40,7 +42,7 @@ mod parse_bytes {
bencher
.with_inputs(|| sample.content().as_bytes())
.input_counter(divan::counter::BytesCount::of_slice)
.bench_local_values(|sample| parser.parse_bytes(sample).last())
.bench_local_values(|sample| parser.parse_bytes(sample).last());
}
#[divan::bench(args = data::DATA)]
@ -52,7 +54,7 @@ mod parse_bytes {
bencher
.with_inputs(|| sample.content().as_bytes())
.input_counter(divan::counter::BytesCount::of_slice)
.bench_local_values(|sample| parser.parse_bytes(sample).last())
.bench_local_values(|sample| parser.parse_bytes(sample).last());
}
}
@ -62,11 +64,11 @@ fn split(bencher: divan::Bencher, sample: &data::Data) {
typos::tokens::Identifier::new_unchecked(sample.content(), typos::tokens::Case::None, 0);
bencher
.counter(divan::counter::BytesCount::of_str(sample.content()))
.bench_local(|| symbol.split().last())
.bench_local(|| symbol.split().last());
}
mod parse_split_bytes {
use super::*;
use super::data;
#[divan::bench(args = data::DATA)]
fn ascii(bencher: divan::Bencher, sample: &data::Data) {
@ -77,7 +79,7 @@ mod parse_split_bytes {
bencher
.with_inputs(|| sample.content().as_bytes())
.input_counter(divan::counter::BytesCount::of_slice)
.bench_local_values(|sample| parser.parse_bytes(sample).flat_map(|i| i.split()).last())
.bench_local_values(|sample| parser.parse_bytes(sample).flat_map(|i| i.split()).last());
}
#[divan::bench(args = data::DATA)]
@ -89,7 +91,7 @@ mod parse_split_bytes {
bencher
.with_inputs(|| sample.content().as_bytes())
.input_counter(divan::counter::BytesCount::of_slice)
.bench_local_values(|sample| parser.parse_bytes(sample).flat_map(|i| i.split()).last())
.bench_local_values(|sample| parser.parse_bytes(sample).flat_map(|i| i.split()).last());
}
}

View file

@ -4,7 +4,7 @@ use clap::Parser;
use typos_cli::config;
#[derive(Debug, Copy, Clone, PartialEq, Eq, clap::ValueEnum, Default)]
pub enum Format {
pub(crate) enum Format {
Silent,
Brief,
#[default]
@ -145,7 +145,7 @@ pub(crate) struct FileArgs {
}
impl FileArgs {
pub fn to_config(&self) -> config::EngineConfig {
pub(crate) fn to_config(&self) -> config::EngineConfig {
config::EngineConfig {
binary: self.binary(),
check_filename: self.check_filename(),
@ -189,7 +189,7 @@ pub(crate) struct ConfigArgs {
}
impl ConfigArgs {
pub fn to_config(&self) -> config::Config {
pub(crate) fn to_config(&self) -> config::Config {
config::Config {
files: self.walk.to_config(),
overrides: self.overrides.to_config(),
@ -243,7 +243,7 @@ pub(crate) struct WalkArgs {
}
impl WalkArgs {
pub fn to_config(&self) -> config::Walk {
pub(crate) fn to_config(&self) -> config::Walk {
config::Walk {
extend_exclude: self.exclude.clone(),
ignore_hidden: self.ignore_hidden(),
@ -296,6 +296,6 @@ mod test {
#[test]
fn verify_app() {
use clap::CommandFactory;
Args::command().debug_assert()
Args::command().debug_assert();
}
}

View file

@ -12,14 +12,14 @@ const ERROR: anstyle::Style = anstyle::AnsiColor::BrightRed.on_default();
const INFO: anstyle::Style = anstyle::AnsiColor::BrightBlue.on_default();
const GOOD: anstyle::Style = anstyle::AnsiColor::BrightGreen.on_default();
pub struct MessageStatus<'r> {
pub(crate) struct MessageStatus<'r> {
typos_found: atomic::AtomicBool,
errors_found: atomic::AtomicBool,
reporter: &'r dyn Report,
}
impl<'r> MessageStatus<'r> {
pub fn new(reporter: &'r dyn Report) -> Self {
pub(crate) fn new(reporter: &'r dyn Report) -> Self {
Self {
typos_found: atomic::AtomicBool::new(false),
errors_found: atomic::AtomicBool::new(false),
@ -27,17 +27,17 @@ impl<'r> MessageStatus<'r> {
}
}
pub fn typos_found(&self) -> bool {
pub(crate) fn typos_found(&self) -> bool {
self.typos_found.load(atomic::Ordering::Relaxed)
}
pub fn errors_found(&self) -> bool {
pub(crate) fn errors_found(&self) -> bool {
self.errors_found.load(atomic::Ordering::Relaxed)
}
}
impl<'r> Report for MessageStatus<'r> {
fn report(&self, msg: Message) -> Result<(), std::io::Error> {
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error> {
if msg.is_typo() {
self.typos_found.store(true, atomic::Ordering::Relaxed);
}
@ -49,18 +49,18 @@ impl<'r> Report for MessageStatus<'r> {
}
#[derive(Debug, Default)]
pub struct PrintSilent;
pub(crate) struct PrintSilent;
impl Report for PrintSilent {
fn report(&self, _msg: Message) -> Result<(), std::io::Error> {
fn report(&self, _msg: Message<'_>) -> Result<(), std::io::Error> {
Ok(())
}
}
pub struct PrintBrief;
pub(crate) struct PrintBrief;
impl Report for PrintBrief {
fn report(&self, msg: Message) -> Result<(), std::io::Error> {
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error> {
match &msg {
Message::BinaryFile(msg) => {
log::info!("{}", msg);
@ -91,10 +91,10 @@ impl Report for PrintBrief {
}
}
pub struct PrintLong;
pub(crate) struct PrintLong;
impl Report for PrintLong {
fn report(&self, msg: Message) -> Result<(), std::io::Error> {
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error> {
match &msg {
Message::BinaryFile(msg) => {
log::info!("{}", msg);
@ -125,7 +125,7 @@ impl Report for PrintLong {
}
}
fn print_brief_correction(msg: &Typo) -> Result<(), std::io::Error> {
fn print_brief_correction(msg: &Typo<'_>) -> Result<(), std::io::Error> {
let error = ERROR.render();
let good = GOOD.render();
let info = INFO.render();
@ -163,7 +163,7 @@ fn print_brief_correction(msg: &Typo) -> Result<(), std::io::Error> {
Ok(())
}
fn print_long_correction(msg: &Typo) -> Result<(), std::io::Error> {
fn print_long_correction(msg: &Typo<'_>) -> Result<(), std::io::Error> {
let error = ERROR.render();
let good = GOOD.render();
let info = INFO.render();
@ -271,10 +271,10 @@ fn context_display<'c>(context: &'c Option<Context<'c>>) -> &'c dyn std::fmt::Di
}
#[derive(Copy, Clone, Debug)]
pub struct PrintJson;
pub(crate) struct PrintJson;
impl Report for PrintJson {
fn report(&self, msg: Message) -> Result<(), std::io::Error> {
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error> {
writeln!(stdout().lock(), "{}", serde_json::to_string(&msg).unwrap())?;
Ok(())
}

View file

@ -187,7 +187,7 @@ impl Walk {
#[serde(default)]
#[serde(transparent)]
pub struct TypeEngineConfig {
pub patterns: std::collections::HashMap<KString, GlobEngineConfig>,
pub patterns: HashMap<KString, GlobEngineConfig>,
}
impl TypeEngineConfig {
@ -244,7 +244,7 @@ impl TypeEngineConfig {
}
}
pub fn patterns(&self) -> impl Iterator<Item = (kstring::KString, GlobEngineConfig)> {
pub fn patterns(&self) -> impl Iterator<Item = (KString, GlobEngineConfig)> {
let mut engine = Self::from_defaults();
engine.update(self);
engine.patterns.into_iter()
@ -256,7 +256,7 @@ impl TypeEngineConfig {
#[serde(default)]
#[serde(rename_all = "kebab-case")]
pub struct GlobEngineConfig {
pub extend_glob: Vec<kstring::KString>,
pub extend_glob: Vec<KString>,
#[serde(flatten)]
pub engine: EngineConfig,
}
@ -406,10 +406,10 @@ pub struct DictConfig {
pub locale: Option<Locale>,
#[serde(with = "serde_regex")]
pub extend_ignore_identifiers_re: Vec<regex::Regex>,
pub extend_identifiers: HashMap<kstring::KString, kstring::KString>,
pub extend_identifiers: HashMap<KString, KString>,
#[serde(with = "serde_regex")]
pub extend_ignore_words_re: Vec<regex::Regex>,
pub extend_words: HashMap<kstring::KString, kstring::KString>,
pub extend_words: HashMap<KString, KString>,
}
impl DictConfig {
@ -536,7 +536,7 @@ impl Locale {
impl std::str::FromStr for Locale {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"en" => Ok(Locale::En),
"en-us" => Ok(Locale::EnUs),
@ -549,7 +549,7 @@ impl std::str::FromStr for Locale {
}
impl std::fmt::Display for Locale {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
Locale::En => write!(f, "en"),
Locale::EnUs => write!(f, "en-us"),
@ -625,7 +625,7 @@ mod test {
let mut actual = base;
actual.update(&extended);
let expected: Vec<kstring::KString> = vec!["*.foo".into(), "*.bar".into()];
let expected: Vec<KString> = vec!["*.foo".into(), "*.bar".into()];
assert_eq!(actual.extend_glob, expected);
}

View file

@ -9,7 +9,7 @@
/// Please try to keep this list sorted lexicographically and wrapped to 79
/// columns (inclusive).
#[rustfmt::skip]
pub const DEFAULT_TYPES: &[(&str, &[&str])] = &[
pub(crate) const DEFAULT_TYPES: &[(&str, &[&str])] = &[
("agda", &["*.agda", "*.lagda"]),
("aidl", &["*.aidl"]),
("amake", &["*.mk", "*.bp"]),

View file

@ -43,7 +43,7 @@ impl BuiltIn {
self.correct_with_vars(word_case)?
};
for s in corrections.corrections_mut() {
case_correct(s, word_token.case())
case_correct(s, word_token.case());
}
Some(corrections)
}
@ -60,10 +60,7 @@ impl BuiltIn {
}
// Not using `Status` to avoid the allocations
fn correct_word_with_dict(
&self,
word: unicase::UniCase<&str>,
) -> Option<&'static [&'static str]> {
fn correct_word_with_dict(&self, word: UniCase<&str>) -> Option<&'static [&'static str]> {
typos_dict::WORD_TRIE.find(&word).copied()
}
}
@ -107,7 +104,7 @@ impl BuiltIn {
}
}
fn correct_with_vars(&self, word: unicase::UniCase<&str>) -> Option<Status<'static>> {
fn correct_with_vars(&self, word: UniCase<&str>) -> Option<Status<'static>> {
if self.is_vars_enabled() {
typos_vars::VARS_TRIE
.find(&word)
@ -218,7 +215,7 @@ pub struct Override<'i, 'w, D> {
ignored_identifiers: Vec<regex::Regex>,
identifiers: HashMap<&'i str, Status<'i>, ahash::RandomState>,
ignored_words: Vec<regex::Regex>,
words: HashMap<unicase::UniCase<&'w str>, Status<'w>, ahash::RandomState>,
words: HashMap<UniCase<&'w str>, Status<'w>, ahash::RandomState>,
inner: D,
}
@ -302,7 +299,7 @@ impl<'i, 'w, D: typos::Dictionary> typos::Dictionary for Override<'i, 'w, D> {
// HACK: couldn't figure out the lifetime issue with replacing `cloned` with `borrow`
if let Some(mut corrections) = self.words.get(&w).cloned() {
for s in corrections.corrections_mut() {
case_correct(s, word_token.case())
case_correct(s, word_token.case());
}
return Some(corrections);
}

View file

@ -9,7 +9,7 @@ pub trait FileChecker: Send + Sync {
&self,
path: &std::path::Path,
explicit: bool,
policy: &crate::policy::Policy,
policy: &crate::policy::Policy<'_, '_, '_>,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error>;
}
@ -22,7 +22,7 @@ impl FileChecker for Typos {
&self,
path: &std::path::Path,
explicit: bool,
policy: &crate::policy::Policy,
policy: &crate::policy::Policy<'_, '_, '_>,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
if policy.check_filenames {
@ -74,7 +74,7 @@ impl FileChecker for FixTypos {
&self,
path: &std::path::Path,
explicit: bool,
policy: &crate::policy::Policy,
policy: &crate::policy::Policy<'_, '_, '_>,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
if policy.check_files {
@ -149,7 +149,7 @@ impl FileChecker for DiffTypos {
&self,
path: &std::path::Path,
explicit: bool,
policy: &crate::policy::Policy,
policy: &crate::policy::Policy<'_, '_, '_>,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let mut content = Vec::new();
@ -180,7 +180,7 @@ impl FileChecker for DiffTypos {
}
if !fixes.is_empty() {
new_content = fix_buffer(buffer.clone(), fixes.into_iter());
content = buffer
content = buffer;
}
}
}
@ -253,7 +253,7 @@ impl FileChecker for Identifiers {
&self,
path: &std::path::Path,
explicit: bool,
policy: &crate::policy::Policy,
policy: &crate::policy::Policy<'_, '_, '_>,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let mut ignores: Option<Ignores> = None;
@ -315,7 +315,7 @@ impl FileChecker for Words {
&self,
path: &std::path::Path,
explicit: bool,
policy: &crate::policy::Policy,
policy: &crate::policy::Policy<'_, '_, '_>,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let mut ignores: Option<Ignores> = None;
@ -385,7 +385,7 @@ impl FileChecker for FileTypes {
&self,
path: &std::path::Path,
explicit: bool,
policy: &crate::policy::Policy,
policy: &crate::policy::Policy<'_, '_, '_>,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
// Check `policy.binary` first so we can easily check performance of walking vs reading
@ -415,7 +415,7 @@ impl FileChecker for FoundFiles {
&self,
path: &std::path::Path,
explicit: bool,
policy: &crate::policy::Policy,
policy: &crate::policy::Policy<'_, '_, '_>,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
// Check `policy.binary` first so we can easily check performance of walking vs reading
@ -667,7 +667,7 @@ fn fix_buffer(mut buffer: Vec<u8>, typos: impl Iterator<Item = typos::Typo<'stat
pub fn walk_path(
walk: ignore::Walk,
checks: &dyn FileChecker,
engine: &crate::policy::ConfigEngine,
engine: &crate::policy::ConfigEngine<'_>,
reporter: &dyn report::Report,
) -> Result<(), ignore::Error> {
for entry in walk {
@ -679,7 +679,7 @@ pub fn walk_path(
pub fn walk_path_parallel(
walk: ignore::WalkParallel,
checks: &dyn FileChecker,
engine: &crate::policy::ConfigEngine,
engine: &crate::policy::ConfigEngine<'_>,
reporter: &dyn report::Report,
) -> Result<(), ignore::Error> {
let error: std::sync::Mutex<Result<(), ignore::Error>> = std::sync::Mutex::new(Ok(()));
@ -701,7 +701,7 @@ pub fn walk_path_parallel(
fn walk_entry(
entry: Result<ignore::DirEntry, ignore::Error>,
checks: &dyn FileChecker,
engine: &crate::policy::ConfigEngine,
engine: &crate::policy::ConfigEngine<'_>,
reporter: &dyn report::Report,
) -> Result<(), ignore::Error> {
let entry = match entry {

View file

@ -4,16 +4,16 @@ use std::path::Path;
use kstring::KString;
#[derive(Default, Clone, Debug)]
pub struct TypesBuilder {
pub(crate) struct TypesBuilder {
definitions: BTreeMap<KString, Vec<(KString, usize)>>,
}
impl TypesBuilder {
pub fn new() -> Self {
pub(crate) fn new() -> Self {
Default::default()
}
pub fn add_defaults(&mut self) {
pub(crate) fn add_defaults(&mut self) {
self.definitions.extend(
crate::default_types::DEFAULT_TYPES
.iter()
@ -25,11 +25,11 @@ impl TypesBuilder {
);
}
pub fn contains_name(&self, name: &str) -> bool {
pub(crate) fn contains_name(&self, name: &str) -> bool {
self.definitions.contains_key(name)
}
pub fn add(&mut self, name: impl Into<KString>, glob: impl Into<KString>) {
pub(crate) fn add(&mut self, name: impl Into<KString>, glob: impl Into<KString>) {
let name = name.into();
let glob = glob.into();
let weight = self.definitions.len();
@ -39,7 +39,7 @@ impl TypesBuilder {
.push((glob, weight));
}
pub fn build(self) -> Result<Types, anyhow::Error> {
pub(crate) fn build(self) -> Result<Types, anyhow::Error> {
let mut definitions = self
.definitions
.iter()
@ -110,7 +110,7 @@ enum GlobPart<'s> {
}
#[derive(Default, Clone, Debug)]
pub struct Types {
pub(crate) struct Types {
definitions: BTreeMap<KString, Vec<KString>>,
glob_to_name: Vec<KString>,
set: globset::GlobSet,
@ -119,11 +119,11 @@ pub struct Types {
}
impl Types {
pub fn definitions(&self) -> &BTreeMap<KString, Vec<KString>> {
pub(crate) fn definitions(&self) -> &BTreeMap<KString, Vec<KString>> {
&self.definitions
}
pub fn file_matched(&self, path: &std::path::Path) -> Option<&str> {
pub(crate) fn file_matched(&self, path: &Path) -> Option<&str> {
let mut mpath = Path::new(path);
let mut matches = self.matches.get_or_default().borrow_mut();
loop {

View file

@ -4,9 +4,9 @@
//! [`default_types`]: crate::default_types
/// Set `check_file` to `false` for these types.
pub const NO_CHECK_TYPES: &[&str] = &["cert", "lock"];
pub(crate) const NO_CHECK_TYPES: &[&str] = &["cert", "lock"];
pub const TYPE_SPECIFIC_DICTS: &[(&str, StaticDictConfig)] = &[
pub(crate) const TYPE_SPECIFIC_DICTS: &[(&str, StaticDictConfig)] = &[
(
"css",
StaticDictConfig {
@ -80,9 +80,9 @@ pub const TYPE_SPECIFIC_DICTS: &[(&str, StaticDictConfig)] = &[
),
];
pub struct StaticDictConfig {
pub ignore_idents: &'static [&'static str],
pub ignore_words: &'static [&'static str],
pub(crate) struct StaticDictConfig {
pub(crate) ignore_idents: &'static [&'static str],
pub(crate) ignore_words: &'static [&'static str],
}
#[cfg(test)]

View file

@ -1,5 +1,9 @@
/// `typos_cli`'s API is unstable. Open an issue for starting a discussion on getting a subset
/// stabilized.
//! `typos_cli`'s API is unstable. Open an issue for starting a discussion on getting a subset
//! stabilized.
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
#[doc(hidden)]
pub mod config;

View file

@ -282,19 +282,19 @@ struct Intern<T> {
}
impl<T> Intern<T> {
pub fn new() -> Self {
pub(crate) fn new() -> Self {
Self {
data: Default::default(),
}
}
pub fn intern(&mut self, value: T) -> usize {
pub(crate) fn intern(&mut self, value: T) -> usize {
let symbol = self.data.len();
self.data.push(value);
symbol
}
pub fn get(&self, symbol: usize) -> &T {
pub(crate) fn get(&self, symbol: usize) -> &T {
&self.data[symbol]
}
}

View file

@ -3,7 +3,7 @@
use std::borrow::Cow;
pub trait Report: Send + Sync {
fn report(&self, msg: Message) -> Result<(), std::io::Error>;
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error>;
}
#[derive(Clone, Debug, serde::Serialize, derive_more::From)]
@ -101,7 +101,7 @@ pub enum Context<'m> {
}
impl<'m> std::fmt::Display for Context<'m> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match self {
Context::File(c) => write!(f, "{}:{}", c.path.display(), c.line_num),
Context::Path(c) => write!(f, "{}", c.path.display()),

View file

@ -11,6 +11,10 @@ edition.workspace = true
rust-version.workspace = true
include.workspace = true
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
unicase = "2.7"
dictgen = { version = "^0.2", path = "../dictgen" }
@ -25,3 +29,6 @@ dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
varcon = { version = "^0.7", path = "../varcon" }
snapbox = { version = "0.5.9", features = ["path"] }
indexmap = "2.2.6"
[lints]
workspace = true

View file

@ -1,3 +1,7 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
mod word_codegen;
pub use crate::word_codegen::WORD_TRIE;

View file

@ -1,5 +1,6 @@
// This file is @generated by crates/typos-dict/tests/codegen.rs
#![allow(clippy::unreadable_literal)]
#![allow(unreachable_pub)]
pub static WORD_TRIE: dictgen::DictTrie<&'static [&'static str]> = dictgen::DictTrie {
root: &WORD_NODE,

View file

@ -17,6 +17,7 @@ fn generate<W: std::io::Write>(file: &mut W, prefix: &str, dict: &[u8]) {
)
.unwrap();
writeln!(file, "#![allow(clippy::unreadable_literal)]",).unwrap();
writeln!(file, "#![allow(unreachable_pub)]",).unwrap();
writeln!(file).unwrap();
let records: Vec<_> = csv::ReaderBuilder::new()

View file

@ -189,7 +189,7 @@ fn is_word(word: &str) -> bool {
word.chars().all(|c| c.is_alphabetic())
}
fn varcon_words() -> HashSet<unicase::UniCase<&'static str>> {
fn varcon_words() -> HashSet<UniCase<&'static str>> {
// Even include improper ones because we should be letting varcon handle that rather than our
// dictionary
varcon::VARCON
@ -241,7 +241,7 @@ fn find_best_match<'c>(
matches.into_iter().next().map(|(_, r)| r)
}
fn allowed_words() -> std::collections::HashMap<String, String> {
fn allowed_words() -> HashMap<String, String> {
let allowed_path = "assets/english.csv";
let english_data = std::fs::read(allowed_path).unwrap();
let mut allowed_english = csv::ReaderBuilder::new()

View file

@ -11,6 +11,10 @@ edition.workspace = true
rust-version.workspace = true
include.workspace = true
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
unicase = "2.7"
dictgen = { version = "^0.2", path = "../dictgen" }
@ -25,3 +29,6 @@ codegenrs = "3.0"
itertools = "0.12"
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
snapbox = { version = "0.5.9", features = ["path"] }
[lints]
workspace = true

View file

@ -1,3 +1,7 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
mod vars_codegen;
pub use crate::vars_codegen::*;

View file

@ -22,7 +22,7 @@ static CATEGORIES: [varcon::Category; 4] = [
// Other basically means all
];
fn generate_variations<W: std::io::Write>(file: &mut W) {
fn generate_variations<W: Write>(file: &mut W) {
let entries = entries();
writeln!(
@ -91,7 +91,7 @@ fn generate_variations<W: std::io::Write>(file: &mut W) {
file,
"VARS",
"&[(u8, &VariantsMap)]",
entry_sets.iter().flat_map(|kv| {
entry_sets.iter().filter_map(|kv| {
let (word, data) = kv;
if is_always_valid(data) {
// No need to convert from current form to target form
@ -119,7 +119,7 @@ fn generate_variations<W: std::io::Write>(file: &mut W) {
}
}
fn generate_entry(file: &mut impl std::io::Write, symbol: &str, entry: &varcon_core::Entry) {
fn generate_entry(file: &mut impl Write, symbol: &str, entry: &varcon_core::Entry) {
writeln!(file, "pub(crate) static {}: VariantsMap = [", symbol).unwrap();
for category in &CATEGORIES {
let corrections = collect_correct(entry, *category);

View file

@ -11,6 +11,10 @@ edition.workspace = true
rust-version.workspace = true
include.workspace = true
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
anyhow = "1.0"
thiserror = "1.0"
@ -22,3 +26,6 @@ simdutf8 = "0.1.4"
itertools = "0.12"
unicode-segmentation = "1.11.0"
bstr = "1.9"
[lints]
workspace = true

View file

@ -1,3 +1,7 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
mod check;
mod dict;

View file

@ -130,7 +130,7 @@ impl<'s> Iterator for Utf8Chunks<'s> {
mod parser {
use winnow::combinator::trace;
use winnow::combinator::*;
use winnow::combinator::{alt, eof, opt, peek, preceded, repeat, terminated};
use winnow::error::ParserError;
use winnow::prelude::*;
use winnow::stream::AsBStr;
@ -139,7 +139,7 @@ mod parser {
use winnow::stream::SliceLen;
use winnow::stream::Stream;
use winnow::stream::StreamIsPartial;
use winnow::token::*;
use winnow::token::{one_of, take_while};
pub(crate) fn next_identifier<T>(input: &mut T) -> PResult<<T as Stream>::Slice, ()>
where
@ -944,7 +944,7 @@ mod test {
let parser = Tokenizer::new();
let input = "";
let expected: Vec<Identifier> = vec![];
let expected: Vec<Identifier<'_>> = vec![];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
let actual: Vec<_> = parser.parse_str(input).collect();
@ -956,7 +956,7 @@ mod test {
let parser = Tokenizer::new();
let input = "word";
let expected: Vec<Identifier> = vec![Identifier::new_unchecked("word", Case::None, 0)];
let expected: Vec<Identifier<'_>> = vec![Identifier::new_unchecked("word", Case::None, 0)];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
let actual: Vec<_> = parser.parse_str(input).collect();
@ -968,7 +968,7 @@ mod test {
let parser = Tokenizer::new();
let input = "A B";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("A", Case::None, 0),
Identifier::new_unchecked("B", Case::None, 2),
];
@ -983,7 +983,7 @@ mod test {
let parser = Tokenizer::new();
let input = "A.B";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("A", Case::None, 0),
Identifier::new_unchecked("B", Case::None, 2),
];
@ -998,7 +998,7 @@ mod test {
let parser = Tokenizer::new();
let input = "A::B";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("A", Case::None, 0),
Identifier::new_unchecked("B", Case::None, 3),
];
@ -1013,7 +1013,7 @@ mod test {
let parser = Tokenizer::new();
let input = "A_B";
let expected: Vec<Identifier> = vec![Identifier::new_unchecked("A_B", Case::None, 0)];
let expected: Vec<Identifier<'_>> = vec![Identifier::new_unchecked("A_B", Case::None, 0)];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
let actual: Vec<_> = parser.parse_str(input).collect();
@ -1025,7 +1025,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello 1st 2nd 3rd 4th __5th__ World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("World", Case::None, 30),
];
@ -1040,7 +1040,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello 0xDEADBEEF World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("World", Case::None, 17),
];
@ -1055,7 +1055,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello 123e4567-e89b-12d3-a456-426652340000 World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("World", Case::None, 43),
];
@ -1070,7 +1070,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello 123E4567-E89B-12D3-A456-426652340000 World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("World", Case::None, 43),
];
@ -1099,7 +1099,7 @@ mod test {
("D41D8CD98F00B204E9800998ECF8427", false),
] {
let input = format!("Hello {} World", hashlike);
let mut expected: Vec<Identifier> = vec![
let mut expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("World", Case::None, 7+hashlike.len()),
];
@ -1118,7 +1118,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = " /// at /rustc/c7087fe00d2ba919df1d813c040a5d47e43b0fe7\\/src\\libstd\\rt.rs:51";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("at", Case::None, 25),
// `rustc...` looks like the start of a URL
Identifier::new_unchecked("rs", Case::None, 91),
@ -1134,7 +1134,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Good Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X1Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X122Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X12== Bye";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Good", Case::None, 0),
Identifier::new_unchecked("Bye", Case::None, 134),
];
@ -1149,7 +1149,8 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = r#""ed25519:1": "Wm+VzmOUOz08Ds+0NTWb1d4CZrVsJSikkeRxh6aCcUwu6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA==""#;
let expected: Vec<Identifier> = vec![Identifier::new_unchecked("ed25519", Case::None, 1)];
let expected: Vec<Identifier<'_>> =
vec![Identifier::new_unchecked("ed25519", Case::None, 1)];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
let actual: Vec<_> = parser.parse_str(input).collect();
@ -1161,7 +1162,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = r#" "integrity": "sha512-hCmlUAIlUiav8Xdqw3Io4LcpA1DOt7h3LSTAC4G6JGHFFaWzI6qvFt9oilvl8BmkbBRX1IhM90ZAmpk68zccQA==","#;
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("integrity", Case::None, 8),
Identifier::new_unchecked("sha512", Case::None, 21),
];
@ -1176,7 +1177,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Good example@example.com Bye";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Good", Case::None, 0),
Identifier::new_unchecked("Bye", Case::None, 25),
];
@ -1191,7 +1192,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Good example.com/hello Bye";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Good", Case::None, 0),
Identifier::new_unchecked("Bye", Case::None, 23),
];
@ -1207,7 +1208,7 @@ mod test {
let input =
"Good http://user:password@example.com:3142/hello?query=value&extra=two#fragment,split Bye";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Good", Case::None, 0),
Identifier::new_unchecked("Bye", Case::None, 86),
];
@ -1222,7 +1223,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello 0Hello 124 0xDEADBEEF World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("0Hello", Case::None, 6),
Identifier::new_unchecked("World", Case::None, 28),
@ -1238,7 +1239,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello \\Hello \\ \\\\ World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("World", Case::None, 18),
];
@ -1253,7 +1254,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello \\n\\n World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("World", Case::None, 11),
];
@ -1268,7 +1269,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello \\nanana\\nanana World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("World", Case::None, 21),
];
@ -1283,7 +1284,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello %Hello World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("World", Case::None, 13),
];
@ -1298,7 +1299,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "#[derive(Clone)] #aaa # #111 #AABBCC #hello #AABBCCDD #1175BA World";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("derive", Case::None, 2),
Identifier::new_unchecked("Clone", Case::None, 9),
Identifier::new_unchecked("hello", Case::None, 38),
@ -1315,7 +1316,7 @@ mod test {
let parser = TokenizerBuilder::new().build();
let input = "Hello {{% foo %}} world!";
let expected: Vec<Identifier> = vec![
let expected: Vec<Identifier<'_>> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
Identifier::new_unchecked("foo", Case::None, 10),
Identifier::new_unchecked("world", Case::None, 18),
@ -1331,7 +1332,7 @@ mod test {
let parser = TokenizerBuilder::new().unicode(false).build();
let input = "appliqués";
let expected: Vec<Identifier> = vec![];
let expected: Vec<Identifier<'_>> = vec![];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
let actual: Vec<_> = parser.parse_str(input).collect();

View file

@ -10,6 +10,10 @@ edition.workspace = true
rust-version.workspace = true
include.workspace = true
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[features]
default = []
parser = ["dep:winnow"]
@ -19,5 +23,5 @@ flags = ["dep:enumflags2"]
winnow = { version = "0.6.5", optional = true }
enumflags2 = { version = "0.7", optional = true }
[package.metadata.docs.rs]
features = [ "parser", "flags" ]
[lints]
workspace = true

View file

@ -1,3 +1,7 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
pub mod borrowed;
#[cfg(feature = "parser")]

View file

@ -1,7 +1,7 @@
use winnow::combinator::trace;
use winnow::prelude::*;
use crate::*;
use crate::{Category, Cluster, Entry, Pos, Tag, Type, Variant};
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct ClusterIter<'i> {

View file

@ -11,6 +11,10 @@ edition.workspace = true
rust-version.workspace = true
include.workspace = true
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[features]
default = ["all"]
all = ["flags"]
@ -23,3 +27,6 @@ varcon-core = { version = "^4.0", path = "../varcon-core" }
codegenrs = "3.0"
varcon-core = { version = "^4.0", path = "../varcon-core", features = ["parser"] }
snapbox = { version = "0.5.9", features = ["path"] }
[lints]
workspace = true

View file

@ -1,7 +1,7 @@
// This file is @generated by crates/varcon/tests/codegen.rs
#![allow(clippy::unreadable_literal)]
use crate::*;
use crate::{Category, Cluster, Entry, Pos, Tag, Type, Variant};
pub static VARCON: &[Cluster] = &[
Cluster {

View file

@ -1,3 +1,7 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
mod codegen;
pub use codegen::*;

View file

@ -1,3 +1,5 @@
#![allow(clippy::self_named_module_files)] // false positive
const DICT: &[u8] = include_bytes!("../assets/varcon.txt");
#[test]
@ -22,7 +24,11 @@ fn generate<W: std::io::Write>(file: &mut W) {
.unwrap();
writeln!(file, "#![allow(clippy::unreadable_literal)]",).unwrap();
writeln!(file).unwrap();
writeln!(file, "use crate::*;").unwrap();
writeln!(
file,
"use crate::{{Category, Cluster, Entry, Pos, Tag, Type, Variant}};"
)
.unwrap();
writeln!(file).unwrap();
writeln!(file, "pub static VARCON: &[Cluster] = &[").unwrap();

View file

@ -25,3 +25,6 @@ itertools = "0.12"
codegenrs = "3.0"
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
snapbox = { version = "0.5.9", features = ["path"] }
[lints]
workspace = true

View file

@ -1,3 +1,7 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
mod dict_codegen;
pub use crate::dict_codegen::*;