Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

Commit 67db55c

Browse files
committed
(wip,chore) clippy and deny update
1 parent f1d9308 commit 67db55c

18 files changed

+163
-241
lines changed

.pre-commit-config.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
# See https://pre-commit.com for more information
22
# See https://pre-commit.com/hooks.html for more hooks
33
repos:
4+
- repo: https://github.com/EmbarkStudios/cargo-deny
5+
rev: 0.14.16 # choose your preferred tag
6+
hooks:
7+
- id: cargo-deny
48
- repo: https://github.com/pre-commit/pre-commit-hooks
59
rev: v3.2.0
610
hooks:

Cargo.lock

Lines changed: 6 additions & 6 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

deny.toml

Lines changed: 12 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
# The values provided in this template are the default values that will be used
1010
# when any section or field is not specified in your own configuration
1111

12-
# Root options
12+
[graph]
1313

1414
# If 1 or more target triples (and optionally, target_features) are specified,
1515
# only the specified targets will be checked when running `cargo deny check`.
@@ -50,6 +50,8 @@ no-default-features = false
5050
# If set, these feature will be enabled when collecting metadata. If `--features`
5151
# is specified on the cmd line they will take precedence over this option.
5252
#features = []
53+
54+
[output]
5355
# When outputting inclusion graphs in diagnostics that include features, this
5456
# option can be used to specify the depth at which feature edges will be added.
5557
# This option is included since the graphs can be quite large and the addition
@@ -61,24 +63,18 @@ feature-depth = 1
6163
# More documentation for the advisories section can be found here:
6264
# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html
6365
[advisories]
66+
version = 2
6467
# The path where the advisory database is cloned/fetched into
6568
db-path = "~/.cargo/advisory-db"
6669
# The url(s) of the advisory databases to use
6770
db-urls = ["https://github.com/rustsec/advisory-db"]
6871
# The lint level for security vulnerabilities
69-
vulnerability = "deny"
70-
# The lint level for unmaintained crates
71-
unmaintained = "warn"
72-
# The lint level for crates that have been yanked from their source registry
7372
yanked = "warn"
74-
# The lint level for crates with security notices. Note that as of
75-
# 2019-12-17 there are no security notice advisories in
76-
# https://github.com/rustsec/advisory-db
77-
notice = "warn"
7873
# A list of advisory IDs to ignore. Note that ignored advisories will still
7974
# output a note when they are encountered.
8075
ignore = [
8176
#"RUSTSEC-0000-0000",
77+
"RUSTSEC-2021-0145", # atty... potential unaligner when using custom allocators.
8278
]
8379
# Threshold for security vulnerabilities, any vulnerability with a CVSS score
8480
# lower than the range specified will be ignored. Note that ignored advisories
@@ -100,60 +96,28 @@ ignore = [
10096
# More documentation for the licenses section can be found here:
10197
# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
10298
[licenses]
103-
# The lint level for crates which do not have a detectable license
104-
unlicensed = "deny"
99+
version = 2
105100
# List of explicitly allowed licenses
106101
# See https://spdx.org/licenses/ for list of possible licenses
107102
# [possible values: any SPDX 3.11 short identifier (+ optional exception)].
108103
allow = [
109104
"MIT",
110105
"Apache-2.0",
111106
"Apache-2.0 WITH LLVM-exception",
112-
"CC0-1.0",
113107
"BSD-3-Clause",
114-
"ISC",
115108
"Unicode-DFS-2016",
116109
"Zlib",
117110
# Thank Jesus for rerun that compiled the following for me...
118-
"OFL-1.1", # https://spdx.org/licenses/OFL-1.1.html
119-
"MPL-2.0", # https://www.mozilla.org/en-US/MPL/2.0/FAQ/ - see Q11. Used by webpki-roots on Linux.
120-
"OpenSSL", # https://www.openssl.org/source/license.html - used on Linux
121-
"LicenseRef-UFL-1.0", # See https://github.com/emilk/egui/issues/2321
122-
]
123-
# List of explicitly disallowed licenses
124-
# See https://spdx.org/licenses/ for list of possible licenses
125-
# [possible values: any SPDX 3.11 short identifier (+ optional exception)].
126-
deny = [
127-
#"Nokia",
128111
]
129-
# Lint level for licenses considered copyleft
130-
copyleft = "deny"
131-
# Blanket approval or denial for OSI-approved or FSF Free/Libre licenses
132-
# * both - The license will be approved if it is both OSI-approved *AND* FSF
133-
# * either - The license will be approved if it is either OSI-approved *OR* FSF
134-
# * osi - The license will be approved if it is OSI approved
135-
# * fsf - The license will be approved if it is FSF Free
136-
# * osi-only - The license will be approved if it is OSI-approved *AND NOT* FSF
137-
# * fsf-only - The license will be approved if it is FSF *AND NOT* OSI-approved
138-
# * neither - This predicate is ignored and the default lint level is used
139-
allow-osi-fsf-free = "neither"
140-
# Lint level used when no other predicates are matched
141-
# 1. License isn't in the allow or deny lists
142-
# 2. License isn't copyleft
143-
# 3. License isn't OSI/FSF, or allow-osi-fsf-free = "neither"
144-
default = "deny"
112+
145113
# The confidence threshold for detecting a license from license text.
146114
# The higher the value, the more closely the license text must be to the
147115
# canonical license text of a valid SPDX license file.
148116
# [possible values: any between 0.0 and 1.0].
149117
confidence-threshold = 0.95
150118
# Allow 1 or more licenses on a per-crate basis, so that particular licenses
151119
# aren't accepted for every possible crate as with the normal allow list
152-
exceptions = [
153-
# Each entry is the crate and version constraint, and its specific allow
154-
# list
155-
{ allow = ["Zlib"], name = "adler32", version = "*" },
156-
]
120+
exceptions = []
157121

158122
# Some crates don't have (easily) machine readable licensing information,
159123
# adding a clarification entry for it allows you to manually specify the
@@ -260,19 +224,12 @@ deny = [
260224
#exact = true
261225

262226
# Certain crates/versions that will be skipped when doing duplicate detection.
263-
skip = [
264-
# Following line comes from the rerun repo ...https://github.com/rerun-io/rerun/blob/main/deny.toml
265-
{ name = "webpki-roots" }, # ureq and tungstenite are on different version 😭
266-
267-
#{ name = "ansi_term", version = "=0.11.0" },
268-
]
227+
skip = []
269228
# Similarly to `skip` allows you to skip certain crates during duplicate
270229
# detection. Unlike skip, it also includes the entire tree of transitive
271230
# dependencies starting at the specified crate, up to a certain depth, which is
272231
# by default infinite.
273-
skip-tree = [
274-
#{ name = "ansi_term", version = "=0.11.0", depth = 20 },
275-
]
232+
skip-tree = []
276233

277234
# This section is considered when running `cargo deny check sources`.
278235
# More documentation about the 'sources' section can be found here:
@@ -288,12 +245,8 @@ unknown-git = "warn"
288245
# if not specified. If it is specified but empty, no registries are allowed.
289246
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
290247
# List of URLs for allowed Git repositories
291-
allow-git = []
248+
allow-git = ["https://github.com/lazear/sage"]
292249

293250
[sources.allow-org]
294251
# 1 or more github.com organizations to allow git sources for
295-
github = [""]
296-
# 1 or more gitlab.com organizations to allow git sources for
297-
gitlab = [""]
298-
# 1 or more bitbucket.org organizations to allow git sources for
299-
bitbucket = [""]
252+
# github = [""]

src/aggregation/aggregators.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
use crate::ms::frames::TimsPeak;
2-
use crate::space::space_generics::{AsAggregableAtIndex, HasIntensity, IntenseAtIndex};
2+
use crate::space::space_generics::{AsAggregableAtIndex, HasIntensity};
33
use crate::utils;
4-
use std::ops::Index;
54

65
use rayon::prelude::*;
76

src/aggregation/dbscan/dbscan.rs

Lines changed: 13 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
1-
use crate::aggregation::aggregators::{aggregate_clusters, ClusterAggregator, ClusterLabel};
1+
use crate::aggregation::aggregators::{aggregate_clusters, ClusterAggregator};
22
use crate::space::kdtree::RadiusKDTree;
33
use crate::space::space_generics::{
44
AsAggregableAtIndex, AsNDPointsAtIndex, DistantAtIndex, HasIntensity, IntenseAtIndex, NDPoint,
55
NDPointConverter, QueriableIndexedPoints,
66
};
77
use crate::utils::{self, ContextTimer};
8-
use log::{debug, info, trace};
8+
use log::info;
99
use rayon::prelude::*;
1010
use std::fmt::Debug;
11-
use std::ops::{Add, Index};
11+
use std::ops::Add;
1212

1313
use crate::aggregation::dbscan::runner::dbscan_label_clusters;
1414

@@ -64,15 +64,13 @@ pub fn reassign_centroid<
6464
}
6565

6666
// 1/1000 show the first and last neighbor, as well as the centroid
67-
if neighbors.len() > 0 {
68-
if rand::random::<f32>() < 0.001 {
69-
println!(
70-
"Centroid: {:?}, First: {:?}, Last: {:?}",
71-
centroid,
72-
neighbors[0],
73-
neighbors[neighbors.len() - 1]
74-
);
75-
}
67+
if !neighbors.is_empty() && rand::random::<f32>() < 0.001 {
68+
println!(
69+
"Centroid: {:?}, First: {:?}, Last: {:?}",
70+
centroid,
71+
neighbors[0],
72+
neighbors[neighbors.len() - 1]
73+
);
7674
}
7775

7876
let mut aggregator = def_aggregator();
@@ -149,7 +147,7 @@ where
149147
let quad_indices = (0..ndpoints.len()).collect::<Vec<_>>();
150148

151149
for (quad_point, i) in ndpoints.iter().zip(quad_indices.iter()) {
152-
tree.insert_ndpoint(quad_point.clone(), i);
150+
tree.insert_ndpoint(*quad_point, i);
153151
}
154152
i_timer.stop(true);
155153

@@ -234,13 +232,12 @@ pub fn dbscan_aggregate<
234232
);
235233
i_timer.stop(true);
236234

237-
let centroids = aggregate_clusters(
235+
aggregate_clusters(
238236
cluster_labels.num_clusters,
239237
cluster_labels.cluster_labels,
240238
prefiltered_peaks,
241239
&def_aggregator,
242240
log_level,
243241
keep_unclustered,
244-
);
245-
centroids
242+
)
246243
}

src/aggregation/dbscan/denseframe_dbscan.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -95,15 +95,15 @@ impl AsAggregableAtIndex<TimsPeak> for Vec<TimsPeak> {
9595
impl DistantAtIndex<f32> for Vec<TimsPeak> {
9696
fn distance_at_indices(
9797
&self,
98-
index: usize,
99-
other: usize,
98+
_index: usize,
99+
_other: usize,
100100
) -> f32 {
101101
panic!("I dont think this is called ever ...");
102-
let mut sum = 0.0;
103-
let diff_mz = (self[index].mz - self[other].mz) as f32;
104-
sum += diff_mz * diff_mz;
105-
let diff_ims = self[index].mobility - self[other].mobility;
106-
sum += diff_ims * diff_ims;
107-
sum.sqrt()
102+
// let mut sum = 0.0;
103+
// let diff_mz = (self[index].mz - self[other].mz) as f32;
104+
// sum += diff_mz * diff_mz;
105+
// let diff_ims = self[index].mobility - self[other].mobility;
106+
// sum += diff_ims * diff_ims;
107+
// sum.sqrt()
108108
}
109109
}

src/aggregation/dbscan/runner.rs

Lines changed: 10 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use crate::space::space_generics::{
55
use crate::utils;
66
use core::fmt::Debug;
77
use indicatif::ProgressIterator;
8-
use log::{debug, trace};
8+
use log::debug;
99
use std::marker::PhantomData;
1010
use std::sync::Arc;
1111

@@ -181,10 +181,7 @@ impl DBSCANRunnerState {
181181
{
182182
let cluster_labels = ClusterLabels::new(nlabels);
183183

184-
let filter_fun_cache = match usize_filterfun {
185-
Some(_) => Some(FilterFunCache::new(nlabels)),
186-
None => None,
187-
};
184+
let filter_fun_cache = usize_filterfun.map(|_| FilterFunCache::new(nlabels));
188185
//FilterFunCache::new(Box::new(&usize_filterfun), nlabels);
189186
let timers = DBScanTimers::new();
190187
let candidate_metrics = CandidateCountMetrics::new();
@@ -374,8 +371,8 @@ where
374371
let cl = |a: &usize, b: &usize| {
375372
filterfun(&raw_distance_calculator.distance_at_indices(*a, *b))
376373
};
377-
let bind = Some(cl);
378-
bind
374+
375+
Some(cl)
379376
},
380377
None => None,
381378
};
@@ -388,7 +385,7 @@ where
388385

389386
let points: DBSCANPoints<N, PP, PE, DAI, D, QIP> = DBSCANPoints {
390387
raw_elements,
391-
intensity_sorted_indices: intensity_sorted_indices,
388+
intensity_sorted_indices,
392389
indexed_points,
393390
projected_elements,
394391
raw_dist: raw_distance_calculator,
@@ -591,7 +588,7 @@ where
591588
.map(|i| points.intensity_at_index(*i))
592589
.sum::<u64>();
593590
timers.outer_intensity_calculation.stop(false);
594-
return neighbor_intensity_total >= self.min_intensity;
591+
neighbor_intensity_total >= self.min_intensity
595592
}
596593

597594
fn main_loop_expand_cluster<PP, PE, DAI, QIP>(
@@ -661,11 +658,7 @@ where
661658
{
662659
timers.inner_loop_nn_timer.reset_start();
663660
let binding = Arc::clone(&points).get_ndpoint(neighbor_index);
664-
let local_neighbors: Vec<usize> = points
665-
.query_ndpoint(&binding)
666-
.iter()
667-
.map(|x| *x)
668-
.collect::<Vec<_>>();
661+
let local_neighbors: Vec<usize> = points.query_ndpoint(&binding).to_vec();
669662
// Should I warn if nothing is gotten here?
670663
// every point should have at least itself as a neighbor ...
671664
debug_assert!(!local_neighbors.is_empty());
@@ -862,18 +855,16 @@ pub fn dbscan_label_clusters<
862855
min_n,
863856
min_intensity,
864857
progress,
865-
filter_fun: filter_fun,
858+
filter_fun,
866859
max_extension_distances,
867860
_phantom: PhantomData::<D>,
868861
};
869862

870-
let cluster_labels = runner.run(
863+
runner.run(
871864
raw_elements,
872865
intensity_sorted_indices,
873866
indexed_points,
874867
projected_elements,
875868
raw_elements,
876-
);
877-
878-
cluster_labels
869+
)
879870
}

0 commit comments

Comments
 (0)