mirror of
https://github.com/crate-ci/typos.git
synced 2024-11-24 01:50:56 -05:00
chore: Update from _rust/main template
This commit is contained in:
commit
b6c895ea49
55 changed files with 414 additions and 203 deletions
|
@ -1,5 +1,4 @@
|
||||||
msrv = "1.75" # MSRV
|
allow-print-in-tests = true
|
||||||
warn-on-all-wildcard-imports = true
|
|
||||||
allow-expect-in-tests = true
|
allow-expect-in-tests = true
|
||||||
allow-unwrap-in-tests = true
|
allow-unwrap-in-tests = true
|
||||||
allow-dbg-in-tests = true
|
allow-dbg-in-tests = true
|
||||||
|
|
15
.github/renovate.json5
vendored
15
.github/renovate.json5
vendored
|
@ -3,6 +3,7 @@
|
||||||
'before 5am on the first day of the month',
|
'before 5am on the first day of the month',
|
||||||
],
|
],
|
||||||
semanticCommits: 'enabled',
|
semanticCommits: 'enabled',
|
||||||
|
commitMessageLowerCase: 'never',
|
||||||
configMigration: true,
|
configMigration: true,
|
||||||
dependencyDashboard: true,
|
dependencyDashboard: true,
|
||||||
customManagers: [
|
customManagers: [
|
||||||
|
@ -17,29 +18,28 @@
|
||||||
'^\\.github/workflows/rust-next.yml$',
|
'^\\.github/workflows/rust-next.yml$',
|
||||||
],
|
],
|
||||||
matchStrings: [
|
matchStrings: [
|
||||||
'MSRV.*?(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)',
|
'STABLE.*?(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)',
|
||||||
'(?<currentValue>\\d+\\.\\d+(\\.\\d+)?).*?MSRV',
|
'(?<currentValue>\\d+\\.\\d+(\\.\\d+)?).*?STABLE',
|
||||||
],
|
],
|
||||||
depNameTemplate: 'rust',
|
depNameTemplate: 'STABLE',
|
||||||
packageNameTemplate: 'rust-lang/rust',
|
packageNameTemplate: 'rust-lang/rust',
|
||||||
datasourceTemplate: 'github-releases',
|
datasourceTemplate: 'github-releases',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
packageRules: [
|
packageRules: [
|
||||||
{
|
{
|
||||||
commitMessageTopic: 'MSRV',
|
commitMessageTopic: 'Rust Stable',
|
||||||
matchManagers: [
|
matchManagers: [
|
||||||
'custom.regex',
|
'custom.regex',
|
||||||
],
|
],
|
||||||
matchPackageNames: [
|
matchPackageNames: [
|
||||||
'rust',
|
'STABLE',
|
||||||
],
|
],
|
||||||
minimumReleaseAge: '126 days', // 3 releases * 6 weeks per release * 7 days per week
|
|
||||||
internalChecksFilter: 'strict',
|
|
||||||
extractVersion: '^(?<version>\\d+\\.\\d+)', // Drop the patch version
|
extractVersion: '^(?<version>\\d+\\.\\d+)', // Drop the patch version
|
||||||
schedule: [
|
schedule: [
|
||||||
'* * * * *',
|
'* * * * *',
|
||||||
],
|
],
|
||||||
|
automerge: true,
|
||||||
},
|
},
|
||||||
// Goals:
|
// Goals:
|
||||||
// - Rollup safe upgrades to reduce CI runner load
|
// - Rollup safe upgrades to reduce CI runner load
|
||||||
|
@ -62,6 +62,7 @@
|
||||||
matchCurrentVersion: '>=1.0.0',
|
matchCurrentVersion: '>=1.0.0',
|
||||||
matchUpdateTypes: [
|
matchUpdateTypes: [
|
||||||
'minor',
|
'minor',
|
||||||
|
'patch',
|
||||||
],
|
],
|
||||||
automerge: true,
|
automerge: true,
|
||||||
groupName: 'compatible',
|
groupName: 'compatible',
|
||||||
|
|
26
.github/settings.yml
vendored
26
.github/settings.yml
vendored
|
@ -41,14 +41,18 @@ labels:
|
||||||
color: '#c2e0c6'
|
color: '#c2e0c6'
|
||||||
description: "Help wanted!"
|
description: "Help wanted!"
|
||||||
|
|
||||||
branches:
|
# This serves more as documentation.
|
||||||
- name: master
|
# Branch protection API was replaced by rulesets but settings isn't updated.
|
||||||
protection:
|
# See https://github.com/repository-settings/app/issues/825
|
||||||
required_pull_request_reviews: null
|
#
|
||||||
required_conversation_resolution: true
|
# branches:
|
||||||
required_status_checks:
|
# - name: master
|
||||||
# Required. Require branches to be up to date before merging.
|
# protection:
|
||||||
strict: false
|
# required_pull_request_reviews: null
|
||||||
contexts: ["CI", "Lint Commits", "Spell Check with Typos"]
|
# required_conversation_resolution: true
|
||||||
enforce_admins: false
|
# required_status_checks:
|
||||||
restrictions: null
|
# # Required. Require branches to be up to date before merging.
|
||||||
|
# strict: false
|
||||||
|
# contexts: ["CI", "Spell Check with Typos"]
|
||||||
|
# enforce_admins: false
|
||||||
|
# restrictions: null
|
||||||
|
|
4
.github/workflows/audit.yml
vendored
4
.github/workflows/audit.yml
vendored
|
@ -17,6 +17,10 @@ env:
|
||||||
CARGO_TERM_COLOR: always
|
CARGO_TERM_COLOR: always
|
||||||
CLICOLOR: 1
|
CLICOLOR: 1
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
security_audit:
|
security_audit:
|
||||||
permissions:
|
permissions:
|
||||||
|
|
69
.github/workflows/ci.yml
vendored
69
.github/workflows/ci.yml
vendored
|
@ -15,21 +15,27 @@ env:
|
||||||
CLICOLOR: 1
|
CLICOLOR: 1
|
||||||
COLUMNS: 130
|
COLUMNS: 130
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
ci:
|
ci:
|
||||||
permissions:
|
permissions:
|
||||||
contents: none
|
contents: none
|
||||||
name: CI
|
name: CI
|
||||||
needs: [test, msrv, docs, rustfmt, clippy]
|
needs: [test, msrv, lockfile, docs, rustfmt, clippy]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: "always()"
|
||||||
steps:
|
steps:
|
||||||
- name: Done
|
- name: Failed
|
||||||
run: exit 0
|
run: exit 1
|
||||||
|
if: "contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped')"
|
||||||
test:
|
test:
|
||||||
name: Test
|
name: Test
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: ["ubuntu-latest", "windows-latest", "macos-latest"]
|
os: ["ubuntu-latest", "windows-latest", "macos-14"]
|
||||||
rust: ["stable"]
|
rust: ["stable"]
|
||||||
continue-on-error: ${{ matrix.rust != 'stable' }}
|
continue-on-error: ${{ matrix.rust != 'stable' }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
@ -42,16 +48,13 @@ jobs:
|
||||||
toolchain: ${{ matrix.rust }}
|
toolchain: ${{ matrix.rust }}
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- uses: taiki-e/install-action@cargo-hack
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo test --no-run --workspace --all-features
|
run: cargo test --workspace --no-run
|
||||||
- name: Default features
|
- name: Test
|
||||||
run: cargo test --workspace
|
run: cargo hack test --feature-powerset --workspace
|
||||||
- name: All features
|
|
||||||
run: cargo test --workspace --all-features
|
|
||||||
- name: No-default features
|
|
||||||
run: cargo test --workspace --no-default-features
|
|
||||||
msrv:
|
msrv:
|
||||||
name: "Check MSRV: 1.75"
|
name: "Check MSRV"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
|
@ -59,14 +62,11 @@ jobs:
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
with:
|
||||||
toolchain: "1.75" # MSRV
|
toolchain: stable
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- uses: taiki-e/install-action@cargo-hack
|
||||||
- name: Default features
|
- name: Default features
|
||||||
run: cargo check --workspace --all-targets
|
run: cargo hack check --feature-powerset --locked --rust-version --ignore-private --workspace --all-targets
|
||||||
- name: All features
|
|
||||||
run: cargo check --workspace --all-targets --all-features
|
|
||||||
- name: No-default features
|
|
||||||
run: cargo check --workspace --all-targets --no-default-features
|
|
||||||
lockfile:
|
lockfile:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
@ -78,7 +78,7 @@ jobs:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: "Is lockfile updated?"
|
- name: "Is lockfile updated?"
|
||||||
run: cargo fetch --locked
|
run: cargo update --workspace --locked
|
||||||
docs:
|
docs:
|
||||||
name: Docs
|
name: Docs
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@ -88,7 +88,7 @@ jobs:
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: "1.76" # STABLE
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: Check documentation
|
- name: Check documentation
|
||||||
env:
|
env:
|
||||||
|
@ -103,9 +103,7 @@ jobs:
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
with:
|
||||||
# Not MSRV because its harder to jump between versions and people are
|
toolchain: "1.76" # STABLE
|
||||||
# more likely to have stable
|
|
||||||
toolchain: stable
|
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: Check formatting
|
- name: Check formatting
|
||||||
|
@ -121,13 +119,13 @@ jobs:
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
with:
|
||||||
toolchain: "1.75" # MSRV
|
toolchain: "1.76" # STABLE
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: Install SARIF tools
|
- name: Install SARIF tools
|
||||||
run: cargo install clippy-sarif --version 0.3.4 --locked # Held back due to msrv
|
run: cargo install clippy-sarif --locked
|
||||||
- name: Install SARIF tools
|
- name: Install SARIF tools
|
||||||
run: cargo install sarif-fmt --version 0.3.4 --locked # Held back due to msrv
|
run: cargo install sarif-fmt --locked
|
||||||
- name: Check
|
- name: Check
|
||||||
run: >
|
run: >
|
||||||
cargo clippy --workspace --all-features --all-targets --message-format=json -- -D warnings --allow deprecated
|
cargo clippy --workspace --all-features --all-targets --message-format=json -- -D warnings --allow deprecated
|
||||||
|
@ -142,3 +140,22 @@ jobs:
|
||||||
wait-for-processing: true
|
wait-for-processing: true
|
||||||
- name: Report status
|
- name: Report status
|
||||||
run: cargo clippy --workspace --all-features --all-targets -- -D warnings --allow deprecated
|
run: cargo clippy --workspace --all-features --all-targets -- -D warnings --allow deprecated
|
||||||
|
coverage:
|
||||||
|
name: Coverage
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Install Rust
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- name: Install cargo-tarpaulin
|
||||||
|
run: cargo install cargo-tarpaulin
|
||||||
|
- name: Gather coverage
|
||||||
|
run: cargo tarpaulin --output-dir coverage --out lcov
|
||||||
|
- name: Publish to Coveralls
|
||||||
|
uses: coverallsapp/github-action@master
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
4
.github/workflows/committed.yml
vendored
4
.github/workflows/committed.yml
vendored
|
@ -11,6 +11,10 @@ env:
|
||||||
CARGO_TERM_COLOR: always
|
CARGO_TERM_COLOR: always
|
||||||
CLICOLOR: 1
|
CLICOLOR: 1
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
committed:
|
committed:
|
||||||
name: Lint Commits
|
name: Lint Commits
|
||||||
|
|
4
.github/workflows/pre-commit.yml
vendored
4
.github/workflows/pre-commit.yml
vendored
|
@ -12,6 +12,10 @@ env:
|
||||||
CARGO_TERM_COLOR: always
|
CARGO_TERM_COLOR: always
|
||||||
CLICOLOR: 1
|
CLICOLOR: 1
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-commit:
|
pre-commit:
|
||||||
permissions:
|
permissions:
|
||||||
|
|
28
.github/workflows/rust-next.yml
vendored
28
.github/workflows/rust-next.yml
vendored
|
@ -13,12 +13,16 @@ env:
|
||||||
CLICOLOR: 1
|
CLICOLOR: 1
|
||||||
COLUMNS: 130
|
COLUMNS: 130
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
name: Test
|
name: Test
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: ["ubuntu-latest", "windows-latest", "macos-latest"]
|
os: ["ubuntu-latest", "windows-latest", "macos-latest", "macos-14"]
|
||||||
rust: ["stable", "beta"]
|
rust: ["stable", "beta"]
|
||||||
include:
|
include:
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
|
@ -34,12 +38,11 @@ jobs:
|
||||||
toolchain: ${{ matrix.rust }}
|
toolchain: ${{ matrix.rust }}
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- name: Default features
|
- uses: taiki-e/install-action@cargo-hack
|
||||||
run: cargo test --workspace
|
- name: Build
|
||||||
- name: All features
|
run: cargo test --workspace --no-run
|
||||||
run: cargo test --workspace --all-features
|
- name: Test
|
||||||
- name: No-default features
|
run: cargo hack test --feature-powerset --workspace
|
||||||
run: cargo test --workspace --no-default-features
|
|
||||||
latest:
|
latest:
|
||||||
name: "Check latest dependencies"
|
name: "Check latest dependencies"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@ -52,11 +55,10 @@ jobs:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
- uses: taiki-e/install-action@cargo-hack
|
||||||
- name: Update dependencues
|
- name: Update dependencues
|
||||||
run: cargo update
|
run: cargo update
|
||||||
- name: Default features
|
- name: Build
|
||||||
run: cargo test --workspace
|
run: cargo test --workspace --no-run
|
||||||
- name: All features
|
- name: Test
|
||||||
run: cargo test --workspace --all-features
|
run: cargo hack test --feature-powerset --workspace
|
||||||
- name: No-default features
|
|
||||||
run: cargo test --workspace --no-default-features
|
|
||||||
|
|
68
Cargo.toml
68
Cargo.toml
|
@ -21,6 +21,74 @@ include = [
|
||||||
"examples/**/*"
|
"examples/**/*"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[workspace.lints.rust]
|
||||||
|
rust_2018_idioms = "warn"
|
||||||
|
unreachable_pub = "warn"
|
||||||
|
unsafe_op_in_unsafe_fn = "warn"
|
||||||
|
unused_lifetimes = "warn"
|
||||||
|
unused_macro_rules = "warn"
|
||||||
|
unused_qualifications = "warn"
|
||||||
|
|
||||||
|
[workspace.lints.clippy]
|
||||||
|
bool_assert_comparison = "allow"
|
||||||
|
branches_sharing_code = "allow"
|
||||||
|
checked_conversions = "warn"
|
||||||
|
collapsible_else_if = "allow"
|
||||||
|
create_dir = "warn"
|
||||||
|
dbg_macro = "warn"
|
||||||
|
debug_assert_with_mut_call = "warn"
|
||||||
|
doc_markdown = "warn"
|
||||||
|
empty_enum = "warn"
|
||||||
|
enum_glob_use = "warn"
|
||||||
|
expl_impl_clone_on_copy = "warn"
|
||||||
|
explicit_deref_methods = "warn"
|
||||||
|
explicit_into_iter_loop = "warn"
|
||||||
|
fallible_impl_from = "warn"
|
||||||
|
filter_map_next = "warn"
|
||||||
|
flat_map_option = "warn"
|
||||||
|
float_cmp_const = "warn"
|
||||||
|
fn_params_excessive_bools = "warn"
|
||||||
|
from_iter_instead_of_collect = "warn"
|
||||||
|
if_same_then_else = "allow"
|
||||||
|
implicit_clone = "warn"
|
||||||
|
imprecise_flops = "warn"
|
||||||
|
inconsistent_struct_constructor = "warn"
|
||||||
|
inefficient_to_string = "warn"
|
||||||
|
infinite_loop = "warn"
|
||||||
|
invalid_upcast_comparisons = "warn"
|
||||||
|
large_digit_groups = "warn"
|
||||||
|
large_stack_arrays = "warn"
|
||||||
|
large_types_passed_by_value = "warn"
|
||||||
|
let_and_return = "allow" # sometimes good to name what you are returning
|
||||||
|
linkedlist = "warn"
|
||||||
|
lossy_float_literal = "warn"
|
||||||
|
macro_use_imports = "warn"
|
||||||
|
match_wildcard_for_single_variants = "warn"
|
||||||
|
mem_forget = "warn"
|
||||||
|
mutex_integer = "warn"
|
||||||
|
needless_for_each = "warn"
|
||||||
|
negative_feature_names = "warn"
|
||||||
|
path_buf_push_overwrite = "warn"
|
||||||
|
ptr_as_ptr = "warn"
|
||||||
|
rc_mutex = "warn"
|
||||||
|
redundant_feature_names = "warn"
|
||||||
|
ref_option_ref = "warn"
|
||||||
|
rest_pat_in_fully_bound_structs = "warn"
|
||||||
|
same_functions_in_if_condition = "warn"
|
||||||
|
self_named_module_files = "warn"
|
||||||
|
semicolon_if_nothing_returned = "warn"
|
||||||
|
single_match_else = "warn"
|
||||||
|
str_to_string = "warn"
|
||||||
|
string_add = "warn"
|
||||||
|
string_add_assign = "warn"
|
||||||
|
string_lit_as_bytes = "warn"
|
||||||
|
string_to_string = "warn"
|
||||||
|
todo = "warn"
|
||||||
|
trait_duplication_in_bounds = "warn"
|
||||||
|
verbose_file_reads = "warn"
|
||||||
|
wildcard_imports = "warn"
|
||||||
|
zero_sized_map_values = "warn"
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
panic = "abort"
|
panic = "abort"
|
||||||
|
|
||||||
|
|
|
@ -12,8 +12,9 @@ edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
include.workspace = true
|
include.workspace = true
|
||||||
|
|
||||||
[package.metadata.release]
|
[package.metadata.docs.rs]
|
||||||
release = false
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
unicase = "2.7"
|
unicase = "2.7"
|
||||||
|
@ -26,3 +27,6 @@ codegenrs = "3.0"
|
||||||
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
|
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
|
||||||
snapbox = { version = "0.5.9", features = ["path"] }
|
snapbox = { version = "0.5.9", features = ["path"] }
|
||||||
typos = { path = "../typos" }
|
typos = { path = "../typos" }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
mod dict_codegen;
|
mod dict_codegen;
|
||||||
|
|
||||||
pub use crate::dict_codegen::*;
|
pub use crate::dict_codegen::*;
|
||||||
|
|
|
@ -10,6 +10,10 @@ edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
include.workspace = true
|
include.workspace = true
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
std = []
|
std = []
|
||||||
|
@ -21,3 +25,6 @@ unicase = "2.7"
|
||||||
phf = { version = "0.11", features = ["unicase"], optional = true }
|
phf = { version = "0.11", features = ["unicase"], optional = true }
|
||||||
phf_codegen = { version = "0.11", optional = true }
|
phf_codegen = { version = "0.11", optional = true }
|
||||||
phf_shared = { version = "0.11", optional = true }
|
phf_shared = { version = "0.11", optional = true }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
#[cfg(feature = "map")]
|
#[cfg(feature = "map")]
|
||||||
mod map;
|
mod map;
|
||||||
mod table;
|
mod table;
|
||||||
|
|
|
@ -66,7 +66,7 @@ impl<V> DictMap<V> {
|
||||||
impl<'s> phf_shared::PhfHash for crate::InsensitiveStr<'s> {
|
impl<'s> phf_shared::PhfHash for crate::InsensitiveStr<'s> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn phf_hash<H: core::hash::Hasher>(&self, state: &mut H) {
|
fn phf_hash<H: core::hash::Hasher>(&self, state: &mut H) {
|
||||||
core::hash::Hash::hash(self, state)
|
core::hash::Hash::hash(self, state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ impl<'s> phf_shared::FmtConst for crate::InsensitiveStr<'s> {
|
||||||
match self {
|
match self {
|
||||||
crate::InsensitiveStr::Ascii(_) => f.write_str("dictgen::InsensitiveStr::Ascii(")?,
|
crate::InsensitiveStr::Ascii(_) => f.write_str("dictgen::InsensitiveStr::Ascii(")?,
|
||||||
crate::InsensitiveStr::Unicode(_) => {
|
crate::InsensitiveStr::Unicode(_) => {
|
||||||
f.write_str("dictgen::InsensitiveStr::Unicode(")?
|
f.write_str("dictgen::InsensitiveStr::Unicode(")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl<V> DictTable<V> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// UniCase look-alike that avoids const-fn so large tables don't OOM
|
/// `UniCase` look-alike that avoids const-fn so large tables don't OOM
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub enum InsensitiveStr<'s> {
|
pub enum InsensitiveStr<'s> {
|
||||||
Unicode(&'s str),
|
Unicode(&'s str),
|
||||||
|
@ -111,20 +111,20 @@ impl<'s> Eq for InsensitiveStr<'s> {}
|
||||||
impl<'s> core::hash::Hash for InsensitiveStr<'s> {
|
impl<'s> core::hash::Hash for InsensitiveStr<'s> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn hash<H: core::hash::Hasher>(&self, hasher: &mut H) {
|
fn hash<H: core::hash::Hasher>(&self, hasher: &mut H) {
|
||||||
self.convert().hash(hasher)
|
self.convert().hash(hasher);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s> core::fmt::Debug for InsensitiveStr<'s> {
|
impl<'s> core::fmt::Debug for InsensitiveStr<'s> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
|
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||||
core::fmt::Debug::fmt(self.into_inner(), fmt)
|
core::fmt::Debug::fmt(self.into_inner(), fmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s> core::fmt::Display for InsensitiveStr<'s> {
|
impl<'s> core::fmt::Display for InsensitiveStr<'s> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
|
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||||
core::fmt::Display::fmt(self.into_inner(), fmt)
|
core::fmt::Display::fmt(self.into_inner(), fmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -189,7 +189,7 @@ mod codegen {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gen_type_name<V>(leaf: &DynChild<V>) -> &'static str {
|
fn gen_type_name<V>(leaf: &DynChild<'_, V>) -> &'static str {
|
||||||
match leaf {
|
match leaf {
|
||||||
DynChild::Nested(_) => "dictgen::DictTrieChild::Nested",
|
DynChild::Nested(_) => "dictgen::DictTrieChild::Nested",
|
||||||
DynChild::Flat(_) => "dictgen::DictTrieChild::Flat",
|
DynChild::Flat(_) => "dictgen::DictTrieChild::Flat",
|
||||||
|
@ -250,7 +250,7 @@ mod codegen {
|
||||||
|
|
||||||
impl<'s, V> DynNode<'s, V> {
|
impl<'s, V> DynNode<'s, V> {
|
||||||
fn burst(&mut self, limit: usize) {
|
fn burst(&mut self, limit: usize) {
|
||||||
self.children.burst(limit)
|
self.children.burst(limit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ include.workspace = true
|
||||||
|
|
||||||
[package.metadata.release]
|
[package.metadata.release]
|
||||||
release = false
|
release = false
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
unicase = "2.7"
|
unicase = "2.7"
|
||||||
|
@ -25,3 +26,6 @@ codegenrs = "3.0"
|
||||||
regex = "1"
|
regex = "1"
|
||||||
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
|
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
|
||||||
snapbox = { version = "0.5.9", features = ["path"] }
|
snapbox = { version = "0.5.9", features = ["path"] }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
mod dict_codegen;
|
mod dict_codegen;
|
||||||
|
|
||||||
pub use crate::dict_codegen::*;
|
pub use crate::dict_codegen::*;
|
||||||
|
|
|
@ -60,7 +60,7 @@ struct Words<'s> {
|
||||||
british: HashMap<&'s str, Vec<&'s str>>,
|
british: HashMap<&'s str, Vec<&'s str>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_dict(raw: &str) -> Words {
|
fn parse_dict(raw: &str) -> Words<'_> {
|
||||||
let mut bad = HashMap::new();
|
let mut bad = HashMap::new();
|
||||||
let mut main = HashMap::new();
|
let mut main = HashMap::new();
|
||||||
let mut american = HashMap::new();
|
let mut american = HashMap::new();
|
||||||
|
|
|
@ -12,7 +12,8 @@ rust-version.workspace = true
|
||||||
include.workspace = true
|
include.workspace = true
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
no-default-features = true
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[package.metadata.release]
|
[package.metadata.release]
|
||||||
tag-prefix = ""
|
tag-prefix = ""
|
||||||
|
@ -94,3 +95,6 @@ harness = false
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "tokenize"
|
name = "tokenize"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![allow(elided_lifetimes_in_paths)]
|
||||||
|
|
||||||
mod data;
|
mod data;
|
||||||
|
|
||||||
use assert_fs::prelude::*;
|
use assert_fs::prelude::*;
|
||||||
|
@ -19,7 +21,7 @@ fn found_files(bencher: divan::Bencher, sample: &data::Data) {
|
||||||
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
||||||
.bench_local(|| {
|
.bench_local(|| {
|
||||||
typos_cli::file::FoundFiles.check_file(sample_path.path(), true, &policy, &PrintSilent)
|
typos_cli::file::FoundFiles.check_file(sample_path.path(), true, &policy, &PrintSilent)
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[divan::bench(args = data::DATA)]
|
#[divan::bench(args = data::DATA)]
|
||||||
|
@ -38,7 +40,7 @@ fn identifiers(bencher: divan::Bencher, sample: &data::Data) {
|
||||||
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
||||||
.bench_local(|| {
|
.bench_local(|| {
|
||||||
typos_cli::file::Identifiers.check_file(sample_path.path(), true, &policy, &PrintSilent)
|
typos_cli::file::Identifiers.check_file(sample_path.path(), true, &policy, &PrintSilent)
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[divan::bench(args = data::DATA)]
|
#[divan::bench(args = data::DATA)]
|
||||||
|
@ -57,7 +59,7 @@ fn words(bencher: divan::Bencher, sample: &data::Data) {
|
||||||
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
||||||
.bench_local(|| {
|
.bench_local(|| {
|
||||||
typos_cli::file::Words.check_file(sample_path.path(), true, &policy, &PrintSilent)
|
typos_cli::file::Words.check_file(sample_path.path(), true, &policy, &PrintSilent)
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[divan::bench(args = data::DATA)]
|
#[divan::bench(args = data::DATA)]
|
||||||
|
@ -76,7 +78,7 @@ fn typos(bencher: divan::Bencher, sample: &data::Data) {
|
||||||
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
||||||
.bench_local(|| {
|
.bench_local(|| {
|
||||||
typos_cli::file::Typos.check_file(sample_path.path(), true, &policy, &PrintSilent)
|
typos_cli::file::Typos.check_file(sample_path.path(), true, &policy, &PrintSilent)
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![allow(elided_lifetimes_in_paths)]
|
||||||
|
|
||||||
mod regular {
|
mod regular {
|
||||||
mod ok {
|
mod ok {
|
||||||
#[divan::bench]
|
#[divan::bench]
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
pub static EMPTY: &str = "";
|
#![allow(dead_code)]
|
||||||
|
|
||||||
pub static NO_TOKENS: &str = " ";
|
pub(crate) static EMPTY: &str = "";
|
||||||
|
|
||||||
pub static SINGLE_TOKEN: &str = "success";
|
pub(crate) static NO_TOKENS: &str = " ";
|
||||||
|
|
||||||
|
pub(crate) static SINGLE_TOKEN: &str = "success";
|
||||||
|
|
||||||
// Stolen from https://github.com/BurntSushi/ripgrep/blob/master/grep-searcher/src/searcher/glue.rs
|
// Stolen from https://github.com/BurntSushi/ripgrep/blob/master/grep-searcher/src/searcher/glue.rs
|
||||||
pub static SHERLOCK: &str = "\
|
pub(crate) static SHERLOCK: &str = "\
|
||||||
For the Doctor Watsons of this world, as opposed to the Sherlock
|
For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||||
Holmeses, success in the province of detective work must always
|
Holmeses, success in the province of detective work must always
|
||||||
be, to a very large extent, the result of luck. Sherlock Holmes
|
be, to a very large extent, the result of luck. Sherlock Holmes
|
||||||
|
@ -15,7 +17,7 @@ and exhibited clearly, with a label attached.\
|
||||||
";
|
";
|
||||||
|
|
||||||
// Stolen from https://github.com/BurntSushi/ripgrep/blob/master/grep-searcher/src/searcher/glue.rs
|
// Stolen from https://github.com/BurntSushi/ripgrep/blob/master/grep-searcher/src/searcher/glue.rs
|
||||||
pub static CODE: &str = "\
|
pub(crate) static CODE: &str = "\
|
||||||
extern crate snap;
|
extern crate snap;
|
||||||
use std::io;
|
use std::io;
|
||||||
fn main() {
|
fn main() {
|
||||||
|
@ -28,17 +30,17 @@ fn main() {
|
||||||
}
|
}
|
||||||
";
|
";
|
||||||
|
|
||||||
pub static CORPUS: &str = include_str!("../../typos-dict/assets/words.csv");
|
pub(crate) static CORPUS: &str = include_str!("../../typos-dict/assets/words.csv");
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Data(&'static str, &'static str);
|
pub(crate) struct Data(&'static str, &'static str);
|
||||||
|
|
||||||
impl Data {
|
impl Data {
|
||||||
pub const fn name(&self) -> &'static str {
|
pub(crate) const fn name(&self) -> &'static str {
|
||||||
self.0
|
self.0
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const fn content(&self) -> &'static str {
|
pub(crate) const fn content(&self) -> &'static str {
|
||||||
self.1
|
self.1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -49,7 +51,7 @@ impl std::fmt::Display for Data {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub static DATA: &[Data] = &[
|
pub(crate) static DATA: &[Data] = &[
|
||||||
Data("empty", EMPTY),
|
Data("empty", EMPTY),
|
||||||
Data("no_tokens", NO_TOKENS),
|
Data("no_tokens", NO_TOKENS),
|
||||||
Data("single_token", SINGLE_TOKEN),
|
Data("single_token", SINGLE_TOKEN),
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
|
#![allow(elided_lifetimes_in_paths)]
|
||||||
|
|
||||||
mod data;
|
mod data;
|
||||||
|
|
||||||
mod parse_str {
|
mod parse_str {
|
||||||
use super::*;
|
use super::data;
|
||||||
|
|
||||||
#[divan::bench(args = data::DATA)]
|
#[divan::bench(args = data::DATA)]
|
||||||
fn ascii(bencher: divan::Bencher, sample: &data::Data) {
|
fn ascii(bencher: divan::Bencher, sample: &data::Data) {
|
||||||
|
@ -12,7 +14,7 @@ mod parse_str {
|
||||||
bencher
|
bencher
|
||||||
.with_inputs(|| sample.content())
|
.with_inputs(|| sample.content())
|
||||||
.input_counter(divan::counter::BytesCount::of_str)
|
.input_counter(divan::counter::BytesCount::of_str)
|
||||||
.bench_local_values(|sample| parser.parse_str(sample).last())
|
.bench_local_values(|sample| parser.parse_str(sample).last());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[divan::bench(args = data::DATA)]
|
#[divan::bench(args = data::DATA)]
|
||||||
|
@ -24,12 +26,12 @@ mod parse_str {
|
||||||
bencher
|
bencher
|
||||||
.with_inputs(|| sample.content())
|
.with_inputs(|| sample.content())
|
||||||
.input_counter(divan::counter::BytesCount::of_str)
|
.input_counter(divan::counter::BytesCount::of_str)
|
||||||
.bench_local_values(|sample| parser.parse_str(sample).last())
|
.bench_local_values(|sample| parser.parse_str(sample).last());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mod parse_bytes {
|
mod parse_bytes {
|
||||||
use super::*;
|
use super::data;
|
||||||
|
|
||||||
#[divan::bench(args = data::DATA)]
|
#[divan::bench(args = data::DATA)]
|
||||||
fn ascii(bencher: divan::Bencher, sample: &data::Data) {
|
fn ascii(bencher: divan::Bencher, sample: &data::Data) {
|
||||||
|
@ -40,7 +42,7 @@ mod parse_bytes {
|
||||||
bencher
|
bencher
|
||||||
.with_inputs(|| sample.content().as_bytes())
|
.with_inputs(|| sample.content().as_bytes())
|
||||||
.input_counter(divan::counter::BytesCount::of_slice)
|
.input_counter(divan::counter::BytesCount::of_slice)
|
||||||
.bench_local_values(|sample| parser.parse_bytes(sample).last())
|
.bench_local_values(|sample| parser.parse_bytes(sample).last());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[divan::bench(args = data::DATA)]
|
#[divan::bench(args = data::DATA)]
|
||||||
|
@ -52,7 +54,7 @@ mod parse_bytes {
|
||||||
bencher
|
bencher
|
||||||
.with_inputs(|| sample.content().as_bytes())
|
.with_inputs(|| sample.content().as_bytes())
|
||||||
.input_counter(divan::counter::BytesCount::of_slice)
|
.input_counter(divan::counter::BytesCount::of_slice)
|
||||||
.bench_local_values(|sample| parser.parse_bytes(sample).last())
|
.bench_local_values(|sample| parser.parse_bytes(sample).last());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,11 +64,11 @@ fn split(bencher: divan::Bencher, sample: &data::Data) {
|
||||||
typos::tokens::Identifier::new_unchecked(sample.content(), typos::tokens::Case::None, 0);
|
typos::tokens::Identifier::new_unchecked(sample.content(), typos::tokens::Case::None, 0);
|
||||||
bencher
|
bencher
|
||||||
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
.counter(divan::counter::BytesCount::of_str(sample.content()))
|
||||||
.bench_local(|| symbol.split().last())
|
.bench_local(|| symbol.split().last());
|
||||||
}
|
}
|
||||||
|
|
||||||
mod parse_split_bytes {
|
mod parse_split_bytes {
|
||||||
use super::*;
|
use super::data;
|
||||||
|
|
||||||
#[divan::bench(args = data::DATA)]
|
#[divan::bench(args = data::DATA)]
|
||||||
fn ascii(bencher: divan::Bencher, sample: &data::Data) {
|
fn ascii(bencher: divan::Bencher, sample: &data::Data) {
|
||||||
|
@ -77,7 +79,7 @@ mod parse_split_bytes {
|
||||||
bencher
|
bencher
|
||||||
.with_inputs(|| sample.content().as_bytes())
|
.with_inputs(|| sample.content().as_bytes())
|
||||||
.input_counter(divan::counter::BytesCount::of_slice)
|
.input_counter(divan::counter::BytesCount::of_slice)
|
||||||
.bench_local_values(|sample| parser.parse_bytes(sample).flat_map(|i| i.split()).last())
|
.bench_local_values(|sample| parser.parse_bytes(sample).flat_map(|i| i.split()).last());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[divan::bench(args = data::DATA)]
|
#[divan::bench(args = data::DATA)]
|
||||||
|
@ -89,7 +91,7 @@ mod parse_split_bytes {
|
||||||
bencher
|
bencher
|
||||||
.with_inputs(|| sample.content().as_bytes())
|
.with_inputs(|| sample.content().as_bytes())
|
||||||
.input_counter(divan::counter::BytesCount::of_slice)
|
.input_counter(divan::counter::BytesCount::of_slice)
|
||||||
.bench_local_values(|sample| parser.parse_bytes(sample).flat_map(|i| i.split()).last())
|
.bench_local_values(|sample| parser.parse_bytes(sample).flat_map(|i| i.split()).last());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use clap::Parser;
|
||||||
use typos_cli::config;
|
use typos_cli::config;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, clap::ValueEnum, Default)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, clap::ValueEnum, Default)]
|
||||||
pub enum Format {
|
pub(crate) enum Format {
|
||||||
Silent,
|
Silent,
|
||||||
Brief,
|
Brief,
|
||||||
#[default]
|
#[default]
|
||||||
|
@ -145,7 +145,7 @@ pub(crate) struct FileArgs {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileArgs {
|
impl FileArgs {
|
||||||
pub fn to_config(&self) -> config::EngineConfig {
|
pub(crate) fn to_config(&self) -> config::EngineConfig {
|
||||||
config::EngineConfig {
|
config::EngineConfig {
|
||||||
binary: self.binary(),
|
binary: self.binary(),
|
||||||
check_filename: self.check_filename(),
|
check_filename: self.check_filename(),
|
||||||
|
@ -189,7 +189,7 @@ pub(crate) struct ConfigArgs {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConfigArgs {
|
impl ConfigArgs {
|
||||||
pub fn to_config(&self) -> config::Config {
|
pub(crate) fn to_config(&self) -> config::Config {
|
||||||
config::Config {
|
config::Config {
|
||||||
files: self.walk.to_config(),
|
files: self.walk.to_config(),
|
||||||
overrides: self.overrides.to_config(),
|
overrides: self.overrides.to_config(),
|
||||||
|
@ -243,7 +243,7 @@ pub(crate) struct WalkArgs {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WalkArgs {
|
impl WalkArgs {
|
||||||
pub fn to_config(&self) -> config::Walk {
|
pub(crate) fn to_config(&self) -> config::Walk {
|
||||||
config::Walk {
|
config::Walk {
|
||||||
extend_exclude: self.exclude.clone(),
|
extend_exclude: self.exclude.clone(),
|
||||||
ignore_hidden: self.ignore_hidden(),
|
ignore_hidden: self.ignore_hidden(),
|
||||||
|
@ -296,6 +296,6 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn verify_app() {
|
fn verify_app() {
|
||||||
use clap::CommandFactory;
|
use clap::CommandFactory;
|
||||||
Args::command().debug_assert()
|
Args::command().debug_assert();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,14 +12,14 @@ const ERROR: anstyle::Style = anstyle::AnsiColor::BrightRed.on_default();
|
||||||
const INFO: anstyle::Style = anstyle::AnsiColor::BrightBlue.on_default();
|
const INFO: anstyle::Style = anstyle::AnsiColor::BrightBlue.on_default();
|
||||||
const GOOD: anstyle::Style = anstyle::AnsiColor::BrightGreen.on_default();
|
const GOOD: anstyle::Style = anstyle::AnsiColor::BrightGreen.on_default();
|
||||||
|
|
||||||
pub struct MessageStatus<'r> {
|
pub(crate) struct MessageStatus<'r> {
|
||||||
typos_found: atomic::AtomicBool,
|
typos_found: atomic::AtomicBool,
|
||||||
errors_found: atomic::AtomicBool,
|
errors_found: atomic::AtomicBool,
|
||||||
reporter: &'r dyn Report,
|
reporter: &'r dyn Report,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'r> MessageStatus<'r> {
|
impl<'r> MessageStatus<'r> {
|
||||||
pub fn new(reporter: &'r dyn Report) -> Self {
|
pub(crate) fn new(reporter: &'r dyn Report) -> Self {
|
||||||
Self {
|
Self {
|
||||||
typos_found: atomic::AtomicBool::new(false),
|
typos_found: atomic::AtomicBool::new(false),
|
||||||
errors_found: atomic::AtomicBool::new(false),
|
errors_found: atomic::AtomicBool::new(false),
|
||||||
|
@ -27,17 +27,17 @@ impl<'r> MessageStatus<'r> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn typos_found(&self) -> bool {
|
pub(crate) fn typos_found(&self) -> bool {
|
||||||
self.typos_found.load(atomic::Ordering::Relaxed)
|
self.typos_found.load(atomic::Ordering::Relaxed)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn errors_found(&self) -> bool {
|
pub(crate) fn errors_found(&self) -> bool {
|
||||||
self.errors_found.load(atomic::Ordering::Relaxed)
|
self.errors_found.load(atomic::Ordering::Relaxed)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'r> Report for MessageStatus<'r> {
|
impl<'r> Report for MessageStatus<'r> {
|
||||||
fn report(&self, msg: Message) -> Result<(), std::io::Error> {
|
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error> {
|
||||||
if msg.is_typo() {
|
if msg.is_typo() {
|
||||||
self.typos_found.store(true, atomic::Ordering::Relaxed);
|
self.typos_found.store(true, atomic::Ordering::Relaxed);
|
||||||
}
|
}
|
||||||
|
@ -49,18 +49,18 @@ impl<'r> Report for MessageStatus<'r> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct PrintSilent;
|
pub(crate) struct PrintSilent;
|
||||||
|
|
||||||
impl Report for PrintSilent {
|
impl Report for PrintSilent {
|
||||||
fn report(&self, _msg: Message) -> Result<(), std::io::Error> {
|
fn report(&self, _msg: Message<'_>) -> Result<(), std::io::Error> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct PrintBrief;
|
pub(crate) struct PrintBrief;
|
||||||
|
|
||||||
impl Report for PrintBrief {
|
impl Report for PrintBrief {
|
||||||
fn report(&self, msg: Message) -> Result<(), std::io::Error> {
|
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error> {
|
||||||
match &msg {
|
match &msg {
|
||||||
Message::BinaryFile(msg) => {
|
Message::BinaryFile(msg) => {
|
||||||
log::info!("{}", msg);
|
log::info!("{}", msg);
|
||||||
|
@ -91,10 +91,10 @@ impl Report for PrintBrief {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct PrintLong;
|
pub(crate) struct PrintLong;
|
||||||
|
|
||||||
impl Report for PrintLong {
|
impl Report for PrintLong {
|
||||||
fn report(&self, msg: Message) -> Result<(), std::io::Error> {
|
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error> {
|
||||||
match &msg {
|
match &msg {
|
||||||
Message::BinaryFile(msg) => {
|
Message::BinaryFile(msg) => {
|
||||||
log::info!("{}", msg);
|
log::info!("{}", msg);
|
||||||
|
@ -125,7 +125,7 @@ impl Report for PrintLong {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_brief_correction(msg: &Typo) -> Result<(), std::io::Error> {
|
fn print_brief_correction(msg: &Typo<'_>) -> Result<(), std::io::Error> {
|
||||||
let error = ERROR.render();
|
let error = ERROR.render();
|
||||||
let good = GOOD.render();
|
let good = GOOD.render();
|
||||||
let info = INFO.render();
|
let info = INFO.render();
|
||||||
|
@ -163,7 +163,7 @@ fn print_brief_correction(msg: &Typo) -> Result<(), std::io::Error> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_long_correction(msg: &Typo) -> Result<(), std::io::Error> {
|
fn print_long_correction(msg: &Typo<'_>) -> Result<(), std::io::Error> {
|
||||||
let error = ERROR.render();
|
let error = ERROR.render();
|
||||||
let good = GOOD.render();
|
let good = GOOD.render();
|
||||||
let info = INFO.render();
|
let info = INFO.render();
|
||||||
|
@ -271,10 +271,10 @@ fn context_display<'c>(context: &'c Option<Context<'c>>) -> &'c dyn std::fmt::Di
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub struct PrintJson;
|
pub(crate) struct PrintJson;
|
||||||
|
|
||||||
impl Report for PrintJson {
|
impl Report for PrintJson {
|
||||||
fn report(&self, msg: Message) -> Result<(), std::io::Error> {
|
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error> {
|
||||||
writeln!(stdout().lock(), "{}", serde_json::to_string(&msg).unwrap())?;
|
writeln!(stdout().lock(), "{}", serde_json::to_string(&msg).unwrap())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -187,7 +187,7 @@ impl Walk {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
#[serde(transparent)]
|
#[serde(transparent)]
|
||||||
pub struct TypeEngineConfig {
|
pub struct TypeEngineConfig {
|
||||||
pub patterns: std::collections::HashMap<KString, GlobEngineConfig>,
|
pub patterns: HashMap<KString, GlobEngineConfig>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypeEngineConfig {
|
impl TypeEngineConfig {
|
||||||
|
@ -244,7 +244,7 @@ impl TypeEngineConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn patterns(&self) -> impl Iterator<Item = (kstring::KString, GlobEngineConfig)> {
|
pub fn patterns(&self) -> impl Iterator<Item = (KString, GlobEngineConfig)> {
|
||||||
let mut engine = Self::from_defaults();
|
let mut engine = Self::from_defaults();
|
||||||
engine.update(self);
|
engine.update(self);
|
||||||
engine.patterns.into_iter()
|
engine.patterns.into_iter()
|
||||||
|
@ -256,7 +256,7 @@ impl TypeEngineConfig {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct GlobEngineConfig {
|
pub struct GlobEngineConfig {
|
||||||
pub extend_glob: Vec<kstring::KString>,
|
pub extend_glob: Vec<KString>,
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub engine: EngineConfig,
|
pub engine: EngineConfig,
|
||||||
}
|
}
|
||||||
|
@ -406,10 +406,10 @@ pub struct DictConfig {
|
||||||
pub locale: Option<Locale>,
|
pub locale: Option<Locale>,
|
||||||
#[serde(with = "serde_regex")]
|
#[serde(with = "serde_regex")]
|
||||||
pub extend_ignore_identifiers_re: Vec<regex::Regex>,
|
pub extend_ignore_identifiers_re: Vec<regex::Regex>,
|
||||||
pub extend_identifiers: HashMap<kstring::KString, kstring::KString>,
|
pub extend_identifiers: HashMap<KString, KString>,
|
||||||
#[serde(with = "serde_regex")]
|
#[serde(with = "serde_regex")]
|
||||||
pub extend_ignore_words_re: Vec<regex::Regex>,
|
pub extend_ignore_words_re: Vec<regex::Regex>,
|
||||||
pub extend_words: HashMap<kstring::KString, kstring::KString>,
|
pub extend_words: HashMap<KString, KString>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DictConfig {
|
impl DictConfig {
|
||||||
|
@ -536,7 +536,7 @@ impl Locale {
|
||||||
impl std::str::FromStr for Locale {
|
impl std::str::FromStr for Locale {
|
||||||
type Err = String;
|
type Err = String;
|
||||||
|
|
||||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
match s {
|
match s {
|
||||||
"en" => Ok(Locale::En),
|
"en" => Ok(Locale::En),
|
||||||
"en-us" => Ok(Locale::EnUs),
|
"en-us" => Ok(Locale::EnUs),
|
||||||
|
@ -549,7 +549,7 @@ impl std::str::FromStr for Locale {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for Locale {
|
impl std::fmt::Display for Locale {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
Locale::En => write!(f, "en"),
|
Locale::En => write!(f, "en"),
|
||||||
Locale::EnUs => write!(f, "en-us"),
|
Locale::EnUs => write!(f, "en-us"),
|
||||||
|
@ -625,7 +625,7 @@ mod test {
|
||||||
let mut actual = base;
|
let mut actual = base;
|
||||||
actual.update(&extended);
|
actual.update(&extended);
|
||||||
|
|
||||||
let expected: Vec<kstring::KString> = vec!["*.foo".into(), "*.bar".into()];
|
let expected: Vec<KString> = vec!["*.foo".into(), "*.bar".into()];
|
||||||
assert_eq!(actual.extend_glob, expected);
|
assert_eq!(actual.extend_glob, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
/// Please try to keep this list sorted lexicographically and wrapped to 79
|
/// Please try to keep this list sorted lexicographically and wrapped to 79
|
||||||
/// columns (inclusive).
|
/// columns (inclusive).
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
pub const DEFAULT_TYPES: &[(&str, &[&str])] = &[
|
pub(crate) const DEFAULT_TYPES: &[(&str, &[&str])] = &[
|
||||||
("agda", &["*.agda", "*.lagda"]),
|
("agda", &["*.agda", "*.lagda"]),
|
||||||
("aidl", &["*.aidl"]),
|
("aidl", &["*.aidl"]),
|
||||||
("amake", &["*.mk", "*.bp"]),
|
("amake", &["*.mk", "*.bp"]),
|
||||||
|
|
|
@ -43,7 +43,7 @@ impl BuiltIn {
|
||||||
self.correct_with_vars(word_case)?
|
self.correct_with_vars(word_case)?
|
||||||
};
|
};
|
||||||
for s in corrections.corrections_mut() {
|
for s in corrections.corrections_mut() {
|
||||||
case_correct(s, word_token.case())
|
case_correct(s, word_token.case());
|
||||||
}
|
}
|
||||||
Some(corrections)
|
Some(corrections)
|
||||||
}
|
}
|
||||||
|
@ -60,10 +60,7 @@ impl BuiltIn {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Not using `Status` to avoid the allocations
|
// Not using `Status` to avoid the allocations
|
||||||
fn correct_word_with_dict(
|
fn correct_word_with_dict(&self, word: UniCase<&str>) -> Option<&'static [&'static str]> {
|
||||||
&self,
|
|
||||||
word: unicase::UniCase<&str>,
|
|
||||||
) -> Option<&'static [&'static str]> {
|
|
||||||
typos_dict::WORD_TRIE.find(&word).copied()
|
typos_dict::WORD_TRIE.find(&word).copied()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -107,7 +104,7 @@ impl BuiltIn {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn correct_with_vars(&self, word: unicase::UniCase<&str>) -> Option<Status<'static>> {
|
fn correct_with_vars(&self, word: UniCase<&str>) -> Option<Status<'static>> {
|
||||||
if self.is_vars_enabled() {
|
if self.is_vars_enabled() {
|
||||||
typos_vars::VARS_TRIE
|
typos_vars::VARS_TRIE
|
||||||
.find(&word)
|
.find(&word)
|
||||||
|
@ -218,7 +215,7 @@ pub struct Override<'i, 'w, D> {
|
||||||
ignored_identifiers: Vec<regex::Regex>,
|
ignored_identifiers: Vec<regex::Regex>,
|
||||||
identifiers: HashMap<&'i str, Status<'i>, ahash::RandomState>,
|
identifiers: HashMap<&'i str, Status<'i>, ahash::RandomState>,
|
||||||
ignored_words: Vec<regex::Regex>,
|
ignored_words: Vec<regex::Regex>,
|
||||||
words: HashMap<unicase::UniCase<&'w str>, Status<'w>, ahash::RandomState>,
|
words: HashMap<UniCase<&'w str>, Status<'w>, ahash::RandomState>,
|
||||||
inner: D,
|
inner: D,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -302,7 +299,7 @@ impl<'i, 'w, D: typos::Dictionary> typos::Dictionary for Override<'i, 'w, D> {
|
||||||
// HACK: couldn't figure out the lifetime issue with replacing `cloned` with `borrow`
|
// HACK: couldn't figure out the lifetime issue with replacing `cloned` with `borrow`
|
||||||
if let Some(mut corrections) = self.words.get(&w).cloned() {
|
if let Some(mut corrections) = self.words.get(&w).cloned() {
|
||||||
for s in corrections.corrections_mut() {
|
for s in corrections.corrections_mut() {
|
||||||
case_correct(s, word_token.case())
|
case_correct(s, word_token.case());
|
||||||
}
|
}
|
||||||
return Some(corrections);
|
return Some(corrections);
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ pub trait FileChecker: Send + Sync {
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
explicit: bool,
|
explicit: bool,
|
||||||
policy: &crate::policy::Policy,
|
policy: &crate::policy::Policy<'_, '_, '_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), std::io::Error>;
|
) -> Result<(), std::io::Error>;
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,7 @@ impl FileChecker for Typos {
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
explicit: bool,
|
explicit: bool,
|
||||||
policy: &crate::policy::Policy,
|
policy: &crate::policy::Policy<'_, '_, '_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
if policy.check_filenames {
|
if policy.check_filenames {
|
||||||
|
@ -74,7 +74,7 @@ impl FileChecker for FixTypos {
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
explicit: bool,
|
explicit: bool,
|
||||||
policy: &crate::policy::Policy,
|
policy: &crate::policy::Policy<'_, '_, '_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
if policy.check_files {
|
if policy.check_files {
|
||||||
|
@ -149,7 +149,7 @@ impl FileChecker for DiffTypos {
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
explicit: bool,
|
explicit: bool,
|
||||||
policy: &crate::policy::Policy,
|
policy: &crate::policy::Policy<'_, '_, '_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
let mut content = Vec::new();
|
let mut content = Vec::new();
|
||||||
|
@ -180,7 +180,7 @@ impl FileChecker for DiffTypos {
|
||||||
}
|
}
|
||||||
if !fixes.is_empty() {
|
if !fixes.is_empty() {
|
||||||
new_content = fix_buffer(buffer.clone(), fixes.into_iter());
|
new_content = fix_buffer(buffer.clone(), fixes.into_iter());
|
||||||
content = buffer
|
content = buffer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -253,7 +253,7 @@ impl FileChecker for Identifiers {
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
explicit: bool,
|
explicit: bool,
|
||||||
policy: &crate::policy::Policy,
|
policy: &crate::policy::Policy<'_, '_, '_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
let mut ignores: Option<Ignores> = None;
|
let mut ignores: Option<Ignores> = None;
|
||||||
|
@ -315,7 +315,7 @@ impl FileChecker for Words {
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
explicit: bool,
|
explicit: bool,
|
||||||
policy: &crate::policy::Policy,
|
policy: &crate::policy::Policy<'_, '_, '_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
let mut ignores: Option<Ignores> = None;
|
let mut ignores: Option<Ignores> = None;
|
||||||
|
@ -385,7 +385,7 @@ impl FileChecker for FileTypes {
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
explicit: bool,
|
explicit: bool,
|
||||||
policy: &crate::policy::Policy,
|
policy: &crate::policy::Policy<'_, '_, '_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
// Check `policy.binary` first so we can easily check performance of walking vs reading
|
// Check `policy.binary` first so we can easily check performance of walking vs reading
|
||||||
|
@ -415,7 +415,7 @@ impl FileChecker for FoundFiles {
|
||||||
&self,
|
&self,
|
||||||
path: &std::path::Path,
|
path: &std::path::Path,
|
||||||
explicit: bool,
|
explicit: bool,
|
||||||
policy: &crate::policy::Policy,
|
policy: &crate::policy::Policy<'_, '_, '_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
// Check `policy.binary` first so we can easily check performance of walking vs reading
|
// Check `policy.binary` first so we can easily check performance of walking vs reading
|
||||||
|
@ -667,7 +667,7 @@ fn fix_buffer(mut buffer: Vec<u8>, typos: impl Iterator<Item = typos::Typo<'stat
|
||||||
pub fn walk_path(
|
pub fn walk_path(
|
||||||
walk: ignore::Walk,
|
walk: ignore::Walk,
|
||||||
checks: &dyn FileChecker,
|
checks: &dyn FileChecker,
|
||||||
engine: &crate::policy::ConfigEngine,
|
engine: &crate::policy::ConfigEngine<'_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), ignore::Error> {
|
) -> Result<(), ignore::Error> {
|
||||||
for entry in walk {
|
for entry in walk {
|
||||||
|
@ -679,7 +679,7 @@ pub fn walk_path(
|
||||||
pub fn walk_path_parallel(
|
pub fn walk_path_parallel(
|
||||||
walk: ignore::WalkParallel,
|
walk: ignore::WalkParallel,
|
||||||
checks: &dyn FileChecker,
|
checks: &dyn FileChecker,
|
||||||
engine: &crate::policy::ConfigEngine,
|
engine: &crate::policy::ConfigEngine<'_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), ignore::Error> {
|
) -> Result<(), ignore::Error> {
|
||||||
let error: std::sync::Mutex<Result<(), ignore::Error>> = std::sync::Mutex::new(Ok(()));
|
let error: std::sync::Mutex<Result<(), ignore::Error>> = std::sync::Mutex::new(Ok(()));
|
||||||
|
@ -701,7 +701,7 @@ pub fn walk_path_parallel(
|
||||||
fn walk_entry(
|
fn walk_entry(
|
||||||
entry: Result<ignore::DirEntry, ignore::Error>,
|
entry: Result<ignore::DirEntry, ignore::Error>,
|
||||||
checks: &dyn FileChecker,
|
checks: &dyn FileChecker,
|
||||||
engine: &crate::policy::ConfigEngine,
|
engine: &crate::policy::ConfigEngine<'_>,
|
||||||
reporter: &dyn report::Report,
|
reporter: &dyn report::Report,
|
||||||
) -> Result<(), ignore::Error> {
|
) -> Result<(), ignore::Error> {
|
||||||
let entry = match entry {
|
let entry = match entry {
|
||||||
|
|
|
@ -4,16 +4,16 @@ use std::path::Path;
|
||||||
use kstring::KString;
|
use kstring::KString;
|
||||||
|
|
||||||
#[derive(Default, Clone, Debug)]
|
#[derive(Default, Clone, Debug)]
|
||||||
pub struct TypesBuilder {
|
pub(crate) struct TypesBuilder {
|
||||||
definitions: BTreeMap<KString, Vec<(KString, usize)>>,
|
definitions: BTreeMap<KString, Vec<(KString, usize)>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypesBuilder {
|
impl TypesBuilder {
|
||||||
pub fn new() -> Self {
|
pub(crate) fn new() -> Self {
|
||||||
Default::default()
|
Default::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_defaults(&mut self) {
|
pub(crate) fn add_defaults(&mut self) {
|
||||||
self.definitions.extend(
|
self.definitions.extend(
|
||||||
crate::default_types::DEFAULT_TYPES
|
crate::default_types::DEFAULT_TYPES
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -25,11 +25,11 @@ impl TypesBuilder {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contains_name(&self, name: &str) -> bool {
|
pub(crate) fn contains_name(&self, name: &str) -> bool {
|
||||||
self.definitions.contains_key(name)
|
self.definitions.contains_key(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add(&mut self, name: impl Into<KString>, glob: impl Into<KString>) {
|
pub(crate) fn add(&mut self, name: impl Into<KString>, glob: impl Into<KString>) {
|
||||||
let name = name.into();
|
let name = name.into();
|
||||||
let glob = glob.into();
|
let glob = glob.into();
|
||||||
let weight = self.definitions.len();
|
let weight = self.definitions.len();
|
||||||
|
@ -39,7 +39,7 @@ impl TypesBuilder {
|
||||||
.push((glob, weight));
|
.push((glob, weight));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build(self) -> Result<Types, anyhow::Error> {
|
pub(crate) fn build(self) -> Result<Types, anyhow::Error> {
|
||||||
let mut definitions = self
|
let mut definitions = self
|
||||||
.definitions
|
.definitions
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -110,7 +110,7 @@ enum GlobPart<'s> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Clone, Debug)]
|
#[derive(Default, Clone, Debug)]
|
||||||
pub struct Types {
|
pub(crate) struct Types {
|
||||||
definitions: BTreeMap<KString, Vec<KString>>,
|
definitions: BTreeMap<KString, Vec<KString>>,
|
||||||
glob_to_name: Vec<KString>,
|
glob_to_name: Vec<KString>,
|
||||||
set: globset::GlobSet,
|
set: globset::GlobSet,
|
||||||
|
@ -119,11 +119,11 @@ pub struct Types {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Types {
|
impl Types {
|
||||||
pub fn definitions(&self) -> &BTreeMap<KString, Vec<KString>> {
|
pub(crate) fn definitions(&self) -> &BTreeMap<KString, Vec<KString>> {
|
||||||
&self.definitions
|
&self.definitions
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_matched(&self, path: &std::path::Path) -> Option<&str> {
|
pub(crate) fn file_matched(&self, path: &Path) -> Option<&str> {
|
||||||
let mut mpath = Path::new(path);
|
let mut mpath = Path::new(path);
|
||||||
let mut matches = self.matches.get_or_default().borrow_mut();
|
let mut matches = self.matches.get_or_default().borrow_mut();
|
||||||
loop {
|
loop {
|
||||||
|
|
|
@ -4,9 +4,9 @@
|
||||||
//! [`default_types`]: crate::default_types
|
//! [`default_types`]: crate::default_types
|
||||||
|
|
||||||
/// Set `check_file` to `false` for these types.
|
/// Set `check_file` to `false` for these types.
|
||||||
pub const NO_CHECK_TYPES: &[&str] = &["cert", "lock"];
|
pub(crate) const NO_CHECK_TYPES: &[&str] = &["cert", "lock"];
|
||||||
|
|
||||||
pub const TYPE_SPECIFIC_DICTS: &[(&str, StaticDictConfig)] = &[
|
pub(crate) const TYPE_SPECIFIC_DICTS: &[(&str, StaticDictConfig)] = &[
|
||||||
(
|
(
|
||||||
"css",
|
"css",
|
||||||
StaticDictConfig {
|
StaticDictConfig {
|
||||||
|
@ -80,9 +80,9 @@ pub const TYPE_SPECIFIC_DICTS: &[(&str, StaticDictConfig)] = &[
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
pub struct StaticDictConfig {
|
pub(crate) struct StaticDictConfig {
|
||||||
pub ignore_idents: &'static [&'static str],
|
pub(crate) ignore_idents: &'static [&'static str],
|
||||||
pub ignore_words: &'static [&'static str],
|
pub(crate) ignore_words: &'static [&'static str],
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
/// `typos_cli`'s API is unstable. Open an issue for starting a discussion on getting a subset
|
//! `typos_cli`'s API is unstable. Open an issue for starting a discussion on getting a subset
|
||||||
/// stabilized.
|
//! stabilized.
|
||||||
|
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub mod config;
|
pub mod config;
|
||||||
|
|
|
@ -282,19 +282,19 @@ struct Intern<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Intern<T> {
|
impl<T> Intern<T> {
|
||||||
pub fn new() -> Self {
|
pub(crate) fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
data: Default::default(),
|
data: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn intern(&mut self, value: T) -> usize {
|
pub(crate) fn intern(&mut self, value: T) -> usize {
|
||||||
let symbol = self.data.len();
|
let symbol = self.data.len();
|
||||||
self.data.push(value);
|
self.data.push(value);
|
||||||
symbol
|
symbol
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get(&self, symbol: usize) -> &T {
|
pub(crate) fn get(&self, symbol: usize) -> &T {
|
||||||
&self.data[symbol]
|
&self.data[symbol]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
pub trait Report: Send + Sync {
|
pub trait Report: Send + Sync {
|
||||||
fn report(&self, msg: Message) -> Result<(), std::io::Error>;
|
fn report(&self, msg: Message<'_>) -> Result<(), std::io::Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, serde::Serialize, derive_more::From)]
|
#[derive(Clone, Debug, serde::Serialize, derive_more::From)]
|
||||||
|
@ -101,7 +101,7 @@ pub enum Context<'m> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'m> std::fmt::Display for Context<'m> {
|
impl<'m> std::fmt::Display for Context<'m> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
||||||
match self {
|
match self {
|
||||||
Context::File(c) => write!(f, "{}:{}", c.path.display(), c.line_num),
|
Context::File(c) => write!(f, "{}:{}", c.path.display(), c.line_num),
|
||||||
Context::Path(c) => write!(f, "{}", c.path.display()),
|
Context::Path(c) => write!(f, "{}", c.path.display()),
|
||||||
|
|
|
@ -11,6 +11,10 @@ edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
include.workspace = true
|
include.workspace = true
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
unicase = "2.7"
|
unicase = "2.7"
|
||||||
dictgen = { version = "^0.2", path = "../dictgen" }
|
dictgen = { version = "^0.2", path = "../dictgen" }
|
||||||
|
@ -25,3 +29,6 @@ dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
|
||||||
varcon = { version = "^0.7", path = "../varcon" }
|
varcon = { version = "^0.7", path = "../varcon" }
|
||||||
snapbox = { version = "0.5.9", features = ["path"] }
|
snapbox = { version = "0.5.9", features = ["path"] }
|
||||||
indexmap = "2.2.6"
|
indexmap = "2.2.6"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
mod word_codegen;
|
mod word_codegen;
|
||||||
|
|
||||||
pub use crate::word_codegen::WORD_TRIE;
|
pub use crate::word_codegen::WORD_TRIE;
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
// This file is @generated by crates/typos-dict/tests/codegen.rs
|
// This file is @generated by crates/typos-dict/tests/codegen.rs
|
||||||
#![allow(clippy::unreadable_literal)]
|
#![allow(clippy::unreadable_literal)]
|
||||||
|
#![allow(unreachable_pub)]
|
||||||
|
|
||||||
pub static WORD_TRIE: dictgen::DictTrie<&'static [&'static str]> = dictgen::DictTrie {
|
pub static WORD_TRIE: dictgen::DictTrie<&'static [&'static str]> = dictgen::DictTrie {
|
||||||
root: &WORD_NODE,
|
root: &WORD_NODE,
|
||||||
|
|
|
@ -17,6 +17,7 @@ fn generate<W: std::io::Write>(file: &mut W, prefix: &str, dict: &[u8]) {
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
writeln!(file, "#![allow(clippy::unreadable_literal)]",).unwrap();
|
writeln!(file, "#![allow(clippy::unreadable_literal)]",).unwrap();
|
||||||
|
writeln!(file, "#![allow(unreachable_pub)]",).unwrap();
|
||||||
writeln!(file).unwrap();
|
writeln!(file).unwrap();
|
||||||
|
|
||||||
let records: Vec<_> = csv::ReaderBuilder::new()
|
let records: Vec<_> = csv::ReaderBuilder::new()
|
||||||
|
|
|
@ -189,7 +189,7 @@ fn is_word(word: &str) -> bool {
|
||||||
word.chars().all(|c| c.is_alphabetic())
|
word.chars().all(|c| c.is_alphabetic())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn varcon_words() -> HashSet<unicase::UniCase<&'static str>> {
|
fn varcon_words() -> HashSet<UniCase<&'static str>> {
|
||||||
// Even include improper ones because we should be letting varcon handle that rather than our
|
// Even include improper ones because we should be letting varcon handle that rather than our
|
||||||
// dictionary
|
// dictionary
|
||||||
varcon::VARCON
|
varcon::VARCON
|
||||||
|
@ -241,7 +241,7 @@ fn find_best_match<'c>(
|
||||||
matches.into_iter().next().map(|(_, r)| r)
|
matches.into_iter().next().map(|(_, r)| r)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn allowed_words() -> std::collections::HashMap<String, String> {
|
fn allowed_words() -> HashMap<String, String> {
|
||||||
let allowed_path = "assets/english.csv";
|
let allowed_path = "assets/english.csv";
|
||||||
let english_data = std::fs::read(allowed_path).unwrap();
|
let english_data = std::fs::read(allowed_path).unwrap();
|
||||||
let mut allowed_english = csv::ReaderBuilder::new()
|
let mut allowed_english = csv::ReaderBuilder::new()
|
||||||
|
|
|
@ -11,6 +11,10 @@ edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
include.workspace = true
|
include.workspace = true
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
unicase = "2.7"
|
unicase = "2.7"
|
||||||
dictgen = { version = "^0.2", path = "../dictgen" }
|
dictgen = { version = "^0.2", path = "../dictgen" }
|
||||||
|
@ -25,3 +29,6 @@ codegenrs = "3.0"
|
||||||
itertools = "0.12"
|
itertools = "0.12"
|
||||||
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
|
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
|
||||||
snapbox = { version = "0.5.9", features = ["path"] }
|
snapbox = { version = "0.5.9", features = ["path"] }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
mod vars_codegen;
|
mod vars_codegen;
|
||||||
|
|
||||||
pub use crate::vars_codegen::*;
|
pub use crate::vars_codegen::*;
|
||||||
|
|
|
@ -22,7 +22,7 @@ static CATEGORIES: [varcon::Category; 4] = [
|
||||||
// Other basically means all
|
// Other basically means all
|
||||||
];
|
];
|
||||||
|
|
||||||
fn generate_variations<W: std::io::Write>(file: &mut W) {
|
fn generate_variations<W: Write>(file: &mut W) {
|
||||||
let entries = entries();
|
let entries = entries();
|
||||||
|
|
||||||
writeln!(
|
writeln!(
|
||||||
|
@ -91,7 +91,7 @@ fn generate_variations<W: std::io::Write>(file: &mut W) {
|
||||||
file,
|
file,
|
||||||
"VARS",
|
"VARS",
|
||||||
"&[(u8, &VariantsMap)]",
|
"&[(u8, &VariantsMap)]",
|
||||||
entry_sets.iter().flat_map(|kv| {
|
entry_sets.iter().filter_map(|kv| {
|
||||||
let (word, data) = kv;
|
let (word, data) = kv;
|
||||||
if is_always_valid(data) {
|
if is_always_valid(data) {
|
||||||
// No need to convert from current form to target form
|
// No need to convert from current form to target form
|
||||||
|
@ -119,7 +119,7 @@ fn generate_variations<W: std::io::Write>(file: &mut W) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_entry(file: &mut impl std::io::Write, symbol: &str, entry: &varcon_core::Entry) {
|
fn generate_entry(file: &mut impl Write, symbol: &str, entry: &varcon_core::Entry) {
|
||||||
writeln!(file, "pub(crate) static {}: VariantsMap = [", symbol).unwrap();
|
writeln!(file, "pub(crate) static {}: VariantsMap = [", symbol).unwrap();
|
||||||
for category in &CATEGORIES {
|
for category in &CATEGORIES {
|
||||||
let corrections = collect_correct(entry, *category);
|
let corrections = collect_correct(entry, *category);
|
||||||
|
|
|
@ -11,6 +11,10 @@ edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
include.workspace = true
|
include.workspace = true
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
|
@ -22,3 +26,6 @@ simdutf8 = "0.1.4"
|
||||||
itertools = "0.12"
|
itertools = "0.12"
|
||||||
unicode-segmentation = "1.11.0"
|
unicode-segmentation = "1.11.0"
|
||||||
bstr = "1.9"
|
bstr = "1.9"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
mod check;
|
mod check;
|
||||||
mod dict;
|
mod dict;
|
||||||
|
|
||||||
|
|
|
@ -130,7 +130,7 @@ impl<'s> Iterator for Utf8Chunks<'s> {
|
||||||
|
|
||||||
mod parser {
|
mod parser {
|
||||||
use winnow::combinator::trace;
|
use winnow::combinator::trace;
|
||||||
use winnow::combinator::*;
|
use winnow::combinator::{alt, eof, opt, peek, preceded, repeat, terminated};
|
||||||
use winnow::error::ParserError;
|
use winnow::error::ParserError;
|
||||||
use winnow::prelude::*;
|
use winnow::prelude::*;
|
||||||
use winnow::stream::AsBStr;
|
use winnow::stream::AsBStr;
|
||||||
|
@ -139,7 +139,7 @@ mod parser {
|
||||||
use winnow::stream::SliceLen;
|
use winnow::stream::SliceLen;
|
||||||
use winnow::stream::Stream;
|
use winnow::stream::Stream;
|
||||||
use winnow::stream::StreamIsPartial;
|
use winnow::stream::StreamIsPartial;
|
||||||
use winnow::token::*;
|
use winnow::token::{one_of, take_while};
|
||||||
|
|
||||||
pub(crate) fn next_identifier<T>(input: &mut T) -> PResult<<T as Stream>::Slice, ()>
|
pub(crate) fn next_identifier<T>(input: &mut T) -> PResult<<T as Stream>::Slice, ()>
|
||||||
where
|
where
|
||||||
|
@ -944,7 +944,7 @@ mod test {
|
||||||
let parser = Tokenizer::new();
|
let parser = Tokenizer::new();
|
||||||
|
|
||||||
let input = "";
|
let input = "";
|
||||||
let expected: Vec<Identifier> = vec![];
|
let expected: Vec<Identifier<'_>> = vec![];
|
||||||
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
||||||
assert_eq!(expected, actual);
|
assert_eq!(expected, actual);
|
||||||
let actual: Vec<_> = parser.parse_str(input).collect();
|
let actual: Vec<_> = parser.parse_str(input).collect();
|
||||||
|
@ -956,7 +956,7 @@ mod test {
|
||||||
let parser = Tokenizer::new();
|
let parser = Tokenizer::new();
|
||||||
|
|
||||||
let input = "word";
|
let input = "word";
|
||||||
let expected: Vec<Identifier> = vec![Identifier::new_unchecked("word", Case::None, 0)];
|
let expected: Vec<Identifier<'_>> = vec![Identifier::new_unchecked("word", Case::None, 0)];
|
||||||
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
||||||
assert_eq!(expected, actual);
|
assert_eq!(expected, actual);
|
||||||
let actual: Vec<_> = parser.parse_str(input).collect();
|
let actual: Vec<_> = parser.parse_str(input).collect();
|
||||||
|
@ -968,7 +968,7 @@ mod test {
|
||||||
let parser = Tokenizer::new();
|
let parser = Tokenizer::new();
|
||||||
|
|
||||||
let input = "A B";
|
let input = "A B";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("A", Case::None, 0),
|
Identifier::new_unchecked("A", Case::None, 0),
|
||||||
Identifier::new_unchecked("B", Case::None, 2),
|
Identifier::new_unchecked("B", Case::None, 2),
|
||||||
];
|
];
|
||||||
|
@ -983,7 +983,7 @@ mod test {
|
||||||
let parser = Tokenizer::new();
|
let parser = Tokenizer::new();
|
||||||
|
|
||||||
let input = "A.B";
|
let input = "A.B";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("A", Case::None, 0),
|
Identifier::new_unchecked("A", Case::None, 0),
|
||||||
Identifier::new_unchecked("B", Case::None, 2),
|
Identifier::new_unchecked("B", Case::None, 2),
|
||||||
];
|
];
|
||||||
|
@ -998,7 +998,7 @@ mod test {
|
||||||
let parser = Tokenizer::new();
|
let parser = Tokenizer::new();
|
||||||
|
|
||||||
let input = "A::B";
|
let input = "A::B";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("A", Case::None, 0),
|
Identifier::new_unchecked("A", Case::None, 0),
|
||||||
Identifier::new_unchecked("B", Case::None, 3),
|
Identifier::new_unchecked("B", Case::None, 3),
|
||||||
];
|
];
|
||||||
|
@ -1013,7 +1013,7 @@ mod test {
|
||||||
let parser = Tokenizer::new();
|
let parser = Tokenizer::new();
|
||||||
|
|
||||||
let input = "A_B";
|
let input = "A_B";
|
||||||
let expected: Vec<Identifier> = vec![Identifier::new_unchecked("A_B", Case::None, 0)];
|
let expected: Vec<Identifier<'_>> = vec![Identifier::new_unchecked("A_B", Case::None, 0)];
|
||||||
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
||||||
assert_eq!(expected, actual);
|
assert_eq!(expected, actual);
|
||||||
let actual: Vec<_> = parser.parse_str(input).collect();
|
let actual: Vec<_> = parser.parse_str(input).collect();
|
||||||
|
@ -1025,7 +1025,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello 1st 2nd 3rd 4th __5th__ World";
|
let input = "Hello 1st 2nd 3rd 4th __5th__ World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("World", Case::None, 30),
|
Identifier::new_unchecked("World", Case::None, 30),
|
||||||
];
|
];
|
||||||
|
@ -1040,7 +1040,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello 0xDEADBEEF World";
|
let input = "Hello 0xDEADBEEF World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("World", Case::None, 17),
|
Identifier::new_unchecked("World", Case::None, 17),
|
||||||
];
|
];
|
||||||
|
@ -1055,7 +1055,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello 123e4567-e89b-12d3-a456-426652340000 World";
|
let input = "Hello 123e4567-e89b-12d3-a456-426652340000 World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("World", Case::None, 43),
|
Identifier::new_unchecked("World", Case::None, 43),
|
||||||
];
|
];
|
||||||
|
@ -1070,7 +1070,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello 123E4567-E89B-12D3-A456-426652340000 World";
|
let input = "Hello 123E4567-E89B-12D3-A456-426652340000 World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("World", Case::None, 43),
|
Identifier::new_unchecked("World", Case::None, 43),
|
||||||
];
|
];
|
||||||
|
@ -1099,7 +1099,7 @@ mod test {
|
||||||
("D41D8CD98F00B204E9800998ECF8427", false),
|
("D41D8CD98F00B204E9800998ECF8427", false),
|
||||||
] {
|
] {
|
||||||
let input = format!("Hello {} World", hashlike);
|
let input = format!("Hello {} World", hashlike);
|
||||||
let mut expected: Vec<Identifier> = vec![
|
let mut expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("World", Case::None, 7+hashlike.len()),
|
Identifier::new_unchecked("World", Case::None, 7+hashlike.len()),
|
||||||
];
|
];
|
||||||
|
@ -1118,7 +1118,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = " /// at /rustc/c7087fe00d2ba919df1d813c040a5d47e43b0fe7\\/src\\libstd\\rt.rs:51";
|
let input = " /// at /rustc/c7087fe00d2ba919df1d813c040a5d47e43b0fe7\\/src\\libstd\\rt.rs:51";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("at", Case::None, 25),
|
Identifier::new_unchecked("at", Case::None, 25),
|
||||||
// `rustc...` looks like the start of a URL
|
// `rustc...` looks like the start of a URL
|
||||||
Identifier::new_unchecked("rs", Case::None, 91),
|
Identifier::new_unchecked("rs", Case::None, 91),
|
||||||
|
@ -1134,7 +1134,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Good Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X1Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X122Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X12== Bye";
|
let input = "Good Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X1Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X122Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X12== Bye";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Good", Case::None, 0),
|
Identifier::new_unchecked("Good", Case::None, 0),
|
||||||
Identifier::new_unchecked("Bye", Case::None, 134),
|
Identifier::new_unchecked("Bye", Case::None, 134),
|
||||||
];
|
];
|
||||||
|
@ -1149,7 +1149,8 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = r#""ed25519:1": "Wm+VzmOUOz08Ds+0NTWb1d4CZrVsJSikkeRxh6aCcUwu6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA==""#;
|
let input = r#""ed25519:1": "Wm+VzmOUOz08Ds+0NTWb1d4CZrVsJSikkeRxh6aCcUwu6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA==""#;
|
||||||
let expected: Vec<Identifier> = vec![Identifier::new_unchecked("ed25519", Case::None, 1)];
|
let expected: Vec<Identifier<'_>> =
|
||||||
|
vec![Identifier::new_unchecked("ed25519", Case::None, 1)];
|
||||||
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
||||||
assert_eq!(expected, actual);
|
assert_eq!(expected, actual);
|
||||||
let actual: Vec<_> = parser.parse_str(input).collect();
|
let actual: Vec<_> = parser.parse_str(input).collect();
|
||||||
|
@ -1161,7 +1162,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = r#" "integrity": "sha512-hCmlUAIlUiav8Xdqw3Io4LcpA1DOt7h3LSTAC4G6JGHFFaWzI6qvFt9oilvl8BmkbBRX1IhM90ZAmpk68zccQA==","#;
|
let input = r#" "integrity": "sha512-hCmlUAIlUiav8Xdqw3Io4LcpA1DOt7h3LSTAC4G6JGHFFaWzI6qvFt9oilvl8BmkbBRX1IhM90ZAmpk68zccQA==","#;
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("integrity", Case::None, 8),
|
Identifier::new_unchecked("integrity", Case::None, 8),
|
||||||
Identifier::new_unchecked("sha512", Case::None, 21),
|
Identifier::new_unchecked("sha512", Case::None, 21),
|
||||||
];
|
];
|
||||||
|
@ -1176,7 +1177,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Good example@example.com Bye";
|
let input = "Good example@example.com Bye";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Good", Case::None, 0),
|
Identifier::new_unchecked("Good", Case::None, 0),
|
||||||
Identifier::new_unchecked("Bye", Case::None, 25),
|
Identifier::new_unchecked("Bye", Case::None, 25),
|
||||||
];
|
];
|
||||||
|
@ -1191,7 +1192,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Good example.com/hello Bye";
|
let input = "Good example.com/hello Bye";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Good", Case::None, 0),
|
Identifier::new_unchecked("Good", Case::None, 0),
|
||||||
Identifier::new_unchecked("Bye", Case::None, 23),
|
Identifier::new_unchecked("Bye", Case::None, 23),
|
||||||
];
|
];
|
||||||
|
@ -1207,7 +1208,7 @@ mod test {
|
||||||
|
|
||||||
let input =
|
let input =
|
||||||
"Good http://user:password@example.com:3142/hello?query=value&extra=two#fragment,split Bye";
|
"Good http://user:password@example.com:3142/hello?query=value&extra=two#fragment,split Bye";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Good", Case::None, 0),
|
Identifier::new_unchecked("Good", Case::None, 0),
|
||||||
Identifier::new_unchecked("Bye", Case::None, 86),
|
Identifier::new_unchecked("Bye", Case::None, 86),
|
||||||
];
|
];
|
||||||
|
@ -1222,7 +1223,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello 0Hello 124 0xDEADBEEF World";
|
let input = "Hello 0Hello 124 0xDEADBEEF World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("0Hello", Case::None, 6),
|
Identifier::new_unchecked("0Hello", Case::None, 6),
|
||||||
Identifier::new_unchecked("World", Case::None, 28),
|
Identifier::new_unchecked("World", Case::None, 28),
|
||||||
|
@ -1238,7 +1239,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello \\Hello \\ \\\\ World";
|
let input = "Hello \\Hello \\ \\\\ World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("World", Case::None, 18),
|
Identifier::new_unchecked("World", Case::None, 18),
|
||||||
];
|
];
|
||||||
|
@ -1253,7 +1254,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello \\n\\n World";
|
let input = "Hello \\n\\n World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("World", Case::None, 11),
|
Identifier::new_unchecked("World", Case::None, 11),
|
||||||
];
|
];
|
||||||
|
@ -1268,7 +1269,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello \\nanana\\nanana World";
|
let input = "Hello \\nanana\\nanana World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("World", Case::None, 21),
|
Identifier::new_unchecked("World", Case::None, 21),
|
||||||
];
|
];
|
||||||
|
@ -1283,7 +1284,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello %Hello World";
|
let input = "Hello %Hello World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("World", Case::None, 13),
|
Identifier::new_unchecked("World", Case::None, 13),
|
||||||
];
|
];
|
||||||
|
@ -1298,7 +1299,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "#[derive(Clone)] #aaa # #111 #AABBCC #hello #AABBCCDD #1175BA World";
|
let input = "#[derive(Clone)] #aaa # #111 #AABBCC #hello #AABBCCDD #1175BA World";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("derive", Case::None, 2),
|
Identifier::new_unchecked("derive", Case::None, 2),
|
||||||
Identifier::new_unchecked("Clone", Case::None, 9),
|
Identifier::new_unchecked("Clone", Case::None, 9),
|
||||||
Identifier::new_unchecked("hello", Case::None, 38),
|
Identifier::new_unchecked("hello", Case::None, 38),
|
||||||
|
@ -1315,7 +1316,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().build();
|
let parser = TokenizerBuilder::new().build();
|
||||||
|
|
||||||
let input = "Hello {{% foo %}} world!";
|
let input = "Hello {{% foo %}} world!";
|
||||||
let expected: Vec<Identifier> = vec![
|
let expected: Vec<Identifier<'_>> = vec![
|
||||||
Identifier::new_unchecked("Hello", Case::None, 0),
|
Identifier::new_unchecked("Hello", Case::None, 0),
|
||||||
Identifier::new_unchecked("foo", Case::None, 10),
|
Identifier::new_unchecked("foo", Case::None, 10),
|
||||||
Identifier::new_unchecked("world", Case::None, 18),
|
Identifier::new_unchecked("world", Case::None, 18),
|
||||||
|
@ -1331,7 +1332,7 @@ mod test {
|
||||||
let parser = TokenizerBuilder::new().unicode(false).build();
|
let parser = TokenizerBuilder::new().unicode(false).build();
|
||||||
|
|
||||||
let input = "appliqués";
|
let input = "appliqués";
|
||||||
let expected: Vec<Identifier> = vec![];
|
let expected: Vec<Identifier<'_>> = vec![];
|
||||||
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
|
||||||
assert_eq!(expected, actual);
|
assert_eq!(expected, actual);
|
||||||
let actual: Vec<_> = parser.parse_str(input).collect();
|
let actual: Vec<_> = parser.parse_str(input).collect();
|
||||||
|
|
|
@ -10,6 +10,10 @@ edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
include.workspace = true
|
include.workspace = true
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
parser = ["dep:winnow"]
|
parser = ["dep:winnow"]
|
||||||
|
@ -19,5 +23,5 @@ flags = ["dep:enumflags2"]
|
||||||
winnow = { version = "0.6.5", optional = true }
|
winnow = { version = "0.6.5", optional = true }
|
||||||
enumflags2 = { version = "0.7", optional = true }
|
enumflags2 = { version = "0.7", optional = true }
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[lints]
|
||||||
features = [ "parser", "flags" ]
|
workspace = true
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
pub mod borrowed;
|
pub mod borrowed;
|
||||||
|
|
||||||
#[cfg(feature = "parser")]
|
#[cfg(feature = "parser")]
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use winnow::combinator::trace;
|
use winnow::combinator::trace;
|
||||||
use winnow::prelude::*;
|
use winnow::prelude::*;
|
||||||
|
|
||||||
use crate::*;
|
use crate::{Category, Cluster, Entry, Pos, Tag, Type, Variant};
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
pub struct ClusterIter<'i> {
|
pub struct ClusterIter<'i> {
|
||||||
|
|
|
@ -11,6 +11,10 @@ edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
include.workspace = true
|
include.workspace = true
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["all"]
|
default = ["all"]
|
||||||
all = ["flags"]
|
all = ["flags"]
|
||||||
|
@ -23,3 +27,6 @@ varcon-core = { version = "^4.0", path = "../varcon-core" }
|
||||||
codegenrs = "3.0"
|
codegenrs = "3.0"
|
||||||
varcon-core = { version = "^4.0", path = "../varcon-core", features = ["parser"] }
|
varcon-core = { version = "^4.0", path = "../varcon-core", features = ["parser"] }
|
||||||
snapbox = { version = "0.5.9", features = ["path"] }
|
snapbox = { version = "0.5.9", features = ["path"] }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
// This file is @generated by crates/varcon/tests/codegen.rs
|
// This file is @generated by crates/varcon/tests/codegen.rs
|
||||||
#![allow(clippy::unreadable_literal)]
|
#![allow(clippy::unreadable_literal)]
|
||||||
|
|
||||||
use crate::*;
|
use crate::{Category, Cluster, Entry, Pos, Tag, Type, Variant};
|
||||||
|
|
||||||
pub static VARCON: &[Cluster] = &[
|
pub static VARCON: &[Cluster] = &[
|
||||||
Cluster {
|
Cluster {
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
mod codegen;
|
mod codegen;
|
||||||
|
|
||||||
pub use codegen::*;
|
pub use codegen::*;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![allow(clippy::self_named_module_files)] // false positive
|
||||||
|
|
||||||
const DICT: &[u8] = include_bytes!("../assets/varcon.txt");
|
const DICT: &[u8] = include_bytes!("../assets/varcon.txt");
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -22,7 +24,11 @@ fn generate<W: std::io::Write>(file: &mut W) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
writeln!(file, "#![allow(clippy::unreadable_literal)]",).unwrap();
|
writeln!(file, "#![allow(clippy::unreadable_literal)]",).unwrap();
|
||||||
writeln!(file).unwrap();
|
writeln!(file).unwrap();
|
||||||
writeln!(file, "use crate::*;").unwrap();
|
writeln!(
|
||||||
|
file,
|
||||||
|
"use crate::{{Category, Cluster, Entry, Pos, Tag, Type, Variant}};"
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
writeln!(file).unwrap();
|
writeln!(file).unwrap();
|
||||||
|
|
||||||
writeln!(file, "pub static VARCON: &[Cluster] = &[").unwrap();
|
writeln!(file, "pub static VARCON: &[Cluster] = &[").unwrap();
|
||||||
|
|
|
@ -25,3 +25,6 @@ itertools = "0.12"
|
||||||
codegenrs = "3.0"
|
codegenrs = "3.0"
|
||||||
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
|
dictgen = { version = "^0.2", path = "../dictgen", features = ["codegen"] }
|
||||||
snapbox = { version = "0.5.9", features = ["path"] }
|
snapbox = { version = "0.5.9", features = ["path"] }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![warn(clippy::print_stderr)]
|
||||||
|
#![warn(clippy::print_stdout)]
|
||||||
|
|
||||||
mod dict_codegen;
|
mod dict_codegen;
|
||||||
|
|
||||||
pub use crate::dict_codegen::*;
|
pub use crate::dict_codegen::*;
|
||||||
|
|
Loading…
Reference in a new issue