Skip to content
This repository has been archived by the owner on Dec 6, 2024. It is now read-only.

style: Fix clippy warnings #295

Merged
merged 1 commit into from
Dec 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions benches/array_bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@ use std::iter::FromIterator;
fn run_benchmark(c: &mut Criterion) {
let mut group = c.benchmark_group("bench-byte-array");
group.bench_function("from-bytes", |b| {
b.iter(|| SSeq::from_bytes(b"AGTCCTCTGCATTTTG"))
b.iter(|| SSeq::from_bytes(b"AGTCCTCTGCATTTTG"));
});
group.bench_function("from-bytes-unchecked", |b| {
b.iter(|| SSeq::from_bytes_unchecked(b"AGTCCTCTGCATTTTG"))
b.iter(|| SSeq::from_bytes_unchecked(b"AGTCCTCTGCATTTTG"));
});
group.bench_function("from-iter", |b| {
b.iter(|| SSeq::from_iter(b"AGTCCTCTGCATTTTG"))
b.iter(|| SSeq::from_iter(b"AGTCCTCTGCATTTTG"));
});
group.finish();
}
Expand Down
14 changes: 7 additions & 7 deletions benches/benchmarks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,12 +115,12 @@ fn run_fastq_lz4_benchmark(c: &mut Criterion) {
c.bench_function("bench-lz4-wc", |b| {
b.iter(
|| assert_eq!(lz4_count(INTERLEAVED_LZ4_FASTQ), 16000), // 16000 lines
)
);
});
c.bench_function("bench-lz4-read-pair-iter-count", |b| {
b.iter(
|| assert_eq!(read_pair_count(INTERLEAVED_LZ4_FASTQ), 2000), // 2000 read pairs
)
);
});
// let chemistry: ChemistryDef =
// serde_json::from_reader(File::open("tests/rna_read/sc_vdj_chemistry.json").unwrap())
Expand All @@ -141,12 +141,12 @@ fn run_fastq_gz_benchmark(c: &mut Criterion) {
c.bench_function("bench-gz-wc", |b| {
b.iter(
|| assert_eq!(gz_count(INTERLEAVED_GZ_FASTQ), 16000), // 16000 lines
)
);
});
c.bench_function("bench-gz-read-pair-iter-count", |b| {
b.iter(
|| assert_eq!(read_pair_count(INTERLEAVED_GZ_FASTQ), 2000), // 2000 read pairs
)
);
});
// let chemistry: ChemistryDef =
// serde_json::from_reader(File::open("tests/rna_read/sc_vdj_chemistry.json").unwrap())
Expand All @@ -167,12 +167,12 @@ fn run_fastq_benchmark(c: &mut Criterion) {
c.bench_function("bench-fastq-wc", |b| {
b.iter(
|| assert_eq!(simple_count(INTERLEAVED_FASTQ), 16000), // 16000 lines
)
);
});
c.bench_function("bench-fastq-read-pair-iter-count", |b| {
b.iter(
|| assert_eq!(read_pair_count(INTERLEAVED_FASTQ), 2000), // 2000 read pairs
)
);
});
// let chemistry: ChemistryDef =
// serde_json::from_reader(File::open("tests/rna_read/sc_vdj_chemistry.json").unwrap())
Expand Down Expand Up @@ -222,7 +222,7 @@ fn sseq_serde_bincode(c: &mut Criterion) {
let mut b = Vec::new();
bincode::serialize_into(&mut b, &sseqs).unwrap();
assert!(!b.is_empty());
})
});
});
}

Expand Down
19 changes: 7 additions & 12 deletions src/adapter_trimmer.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
//! Trim adapters from reads using a combination of
//! k-mer matches, sparse alignment and banded alignment.
//! Inspired by the [cutadapt](https://cutadapt.readthedocs.io/)
//! tool.
//! Inspired by the [cutadapt](https://cutadapt.readthedocs.io/) tool.
//!
//! # Features/Limitations
//! * Supports regular, anchored and non-internal, 3' and 5'
//! adapter trimming
//! * Supports regular, anchored and non-internal, 3' and 5' adapter trimming
//! * Linked adapters are not supported as of now
//! * Allowed error rate for the adapter is 10%
//!
Expand All @@ -23,13 +21,10 @@ use bio::alignment::pairwise::{self, MatchParams, Scoring};
use bio::alignment::sparse;
use bio::alignment::sparse::HashMapFx;
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
use std::cmp::{max, min};
use std::collections::HashMap;
use std::collections::HashSet;
use std::cmp::{max, min, Ordering};
use std::collections::{HashMap, HashSet};
use std::fmt::Debug;
use std::hash::BuildHasher;
use std::i32;
use std::ops::Range;

type Aligner = pairwise::banded::Aligner<MatchParams>;
Expand Down Expand Up @@ -122,7 +117,7 @@ pub enum AdapterLoc {
/// * `end`: whether it's a `FivePrime` adapter or a `ThreePrime` adapter
/// * `location`: Specify the location of the adapter (See [`AdapterLoc`](enum.AdapterLoc.html))
/// * `seq`: The sequence of the adapter as a `String`. One could use a `Vec<u8>` here, but
/// chose a String for the ease of auto (de)serialization from a json file.
/// choose a String for the ease of auto (de)serialization from a json file.
///
/// # Example
/// The example below shows how you can create an `Adapter` from a JSON string
Expand Down Expand Up @@ -270,7 +265,7 @@ impl<'a> AdapterTrimmer<'a> {
///
/// Ouput:
/// * `Option<TrimResult>`: `None` if the adapter is not found,
/// otherwise `Some(TrimResult)` (See [`TrimResult`](struct.TrimResult.html))
/// otherwise `Some(TrimResult)` (See [`TrimResult`](struct.TrimResult.html))
pub fn find(&mut self, read: &[u8]) -> Option<TrimResult> {
use bio::alignment::AlignmentOperation::{Del, Ins, Match, Subst};

Expand Down Expand Up @@ -642,7 +637,7 @@ fn compute_path(
{
let mut max_l_score = pairwise::MIN_SCORE;
let mut max_r_score = pairwise::MIN_SCORE;
for &this_match in matches.iter() {
for &this_match in matches {
max_l_score = max(max_l_score, l_score(this_match));
max_r_score = max(max_r_score, r_score(this_match));
}
Expand Down
19 changes: 7 additions & 12 deletions src/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,12 +106,11 @@ where
*l = *r.borrow();
len += 1;
}
if src.next().is_some() {
panic!(
"Error: Input iter exceeds capacity of {} bytes.",
bytes.len()
);
}
assert!(
src.next().is_none(),
"Error: Input iter exceeds capacity of {} bytes.",
bytes.len()
);

ByteArray {
length: len,
Expand Down Expand Up @@ -200,9 +199,7 @@ where
type Output = u8;

fn index(&self, index: usize) -> &Self::Output {
if index >= self.length as usize {
panic!("index out of bounds")
}
assert!(index < self.length as usize, "index out of bounds");

&self.bytes[index]
}
Expand All @@ -213,9 +210,7 @@ where
T: ArrayContent,
{
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
if index >= self.length as usize {
panic!("index out of bounds")
}
assert!(index < self.length as usize, "index out of bounds");
&mut self.bytes[index]
}
}
Expand Down
4 changes: 1 addition & 3 deletions src/background_iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,7 @@ mod test {
let v = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];

let iter = (0..11usize).map(move |i| {
if v[i] == 5 {
panic!("simulated panic");
}
assert!(v[i] != 5, "simulated panic");
v[i]
});

Expand Down
2 changes: 1 addition & 1 deletion src/filenames/bcl2fastq.rs
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,7 @@ mod tests_from_tenkit {
#[test]
fn test_sample_name_verification() -> Result<()> {
let path = "tests/filenames/tenkit91";
for &s in ["test_sample", "test_sample_suffix"].iter() {
for &s in &["test_sample", "test_sample_suffix"] {
let query = Bcl2FastqDef {
fastq_path: path.to_string(),
sample_name_spec: s.into(),
Expand Down
2 changes: 1 addition & 1 deletion src/filenames/bcl_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ impl FindFastqs for BclProcessorFastqDef {
if self.sample_index_spec.matches(&bcl_proc.si)
&& self.lane_spec.contains(bcl_proc.lane_mode())
{
res.push(fastqs)
res.push(fastqs);
}
}

Expand Down
13 changes: 6 additions & 7 deletions src/filenames/fastq_dir.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,21 +37,20 @@ impl Bcl2FastqDir {
});
}

let is_lane_split = match fastq_data
let Ok(is_lane_split) = fastq_data
.iter()
.map(|(g, _)| match g.lane_mode {
LaneMode::NoLaneSplitting => false,
LaneMode::SingleLane(_) => true,
})
.dedup()
.exactly_one()
{
Ok(val) => val,
Err(_) => bail!(
else {
bail!(
"Some files in the fastq path {} are split by lane, while some are not. \
This is not supported.",
fastq_path.display()
),
);
};

let samples = fastq_data.iter().map(|(g, _)| g.sample.clone()).collect();
Expand Down Expand Up @@ -230,7 +229,7 @@ impl FastqChecker {

let lane_spec = match lanes {
None => LaneSpec::Any,
Some(ref l) => LaneSpec::Lanes(l.iter().cloned().collect()),
Some(l) => LaneSpec::Lanes(l.iter().copied().collect()),
};

if bcl_dir
Expand All @@ -243,7 +242,7 @@ impl FastqChecker {

Ok(match sample_name_spec {
SampleNameSpec::Names(names) => names,
_ => unreachable!(),
SampleNameSpec::Any => unreachable!(),
})
}

Expand Down
2 changes: 1 addition & 1 deletion src/illumina_header_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ impl InputFastqs {
.ok_or_else(|| anyhow!("No Read1 in FASTQ data"))?;

let header = std::str::from_utf8(header)?;
let header_prefix = header.split(|x: char| x == ' ' || x == '/').next();
let header_prefix = header.split([' ', '/']).next();
if header_prefix.is_none() {
return Ok(None);
}
Expand Down
10 changes: 5 additions & 5 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
//! Major functionality includes:
//! * Find groups FASTQs (R1/R2/I1/I2) following Illumina filename conventions
//! * Parsing flowcell information from Illumina FASTQ headers
//! * High-speed FASTQ I/O (via the `fastq` crate), with careful validation of FASTQ correctness and good error message.
//! * Containers for FASTQ read-pairs (along with index reads), providing access to 'technical' read components like cell barcode and
//! UMI sequences.
//! * High-speed FASTQ I/O (via the `fastq` crate), with careful validation of
//! FASTQ correctness and good error message.
//! * Containers for FASTQ read-pairs (along with index reads), providing access
//! to 'technical' read components like cell barcode and UMI sequences.
//! * Flexible read trimming inspired by `cutadapt`
#![deny(warnings, unused)]
Expand Down Expand Up @@ -35,8 +36,7 @@ use crate::read_pair_iter::{AnyReadPairIter, InputFastqs, ReadPairIter};
pub use crate::squality::SQuality;
pub use crate::sseq::SSeq;
use anyhow::Error;
pub use fastq::OwnedRecord;
pub use fastq::Record;
pub use fastq::{OwnedRecord, Record};
pub use read_pair::WhichRead;
use read_pair_iter::FastqError;
use serde::{Deserialize, Serialize};
Expand Down
60 changes: 26 additions & 34 deletions src/read_pair.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
//! including the primary 'R1' and 'R2' and index 'I1' and 'I2' reads.
use crate::WhichEnd;
use anyhow::{bail, Result};
use anyhow::{bail, ensure, Result};
use bytes::{Bytes, BytesMut};
use fastq::{OwnedRecord, Record};
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -186,9 +186,9 @@ impl RpRange {
/// # Args
/// * `read` - Specify `WhichRead`
/// * `offset` - Start of the interval. Must be less than 2^15 (=32,768)
/// * `len` - Optional length that determines the end of the interval. A
/// value `None` indicates everything from `offset` until the end of the
/// `read`. Must be less than 2^15 (=32,768)
/// * `len` - Optional length that determines the end of the interval.
/// A value `None` indicates everything from `offset` until the end of the `read`.
/// Must be less than 2^15 (=32,768)
///
/// # Panics
/// * If `offset` or `len` is >= `2^15`
Expand All @@ -205,11 +205,11 @@ impl RpRange {
///
/// # Tests
/// * `test_rprange_invalid_offset()` - Test that this function panics with
/// an offset that is too large
/// an offset that is too large
/// * `test_rprange_invalid_len()` - Test that this function panics with
/// a length that is too large
/// a length that is too large
/// * `prop_test_rprange_representation()` - Test that arbitrary construction of RpRange
/// stores the values correctly.
/// stores the values correctly.
pub fn new(read: WhichRead, offset: usize, len: Option<usize>) -> RpRange {
assert!(offset < (1 << 15));
let len_bits = match len {
Expand Down Expand Up @@ -296,7 +296,7 @@ impl RpRange {
///
/// # Tests
/// * `test_rprange_intersect_panic()` - Make sure that this function panics
/// if the reads do not match
/// if the reads do not match
/// * `test_rprange_intersect_both_open()` - Test a case when both lengths are not set
/// * `test_rprange_intersect_self_open()` - Test a case when only self length is set
/// * `test_rprange_intersect_other_open()` - Test a case when only other length is set
Expand Down Expand Up @@ -363,7 +363,7 @@ impl RpRange {
/// * `test_shrink_invalid_range_3()`: Test for panic if shrink range start > end
/// * `test_rprange_trivial_shrink()`: Test shrink to an empty range.
/// * `prop_test_rprange_shrink()`: Test shrink for arbitrary values of
/// `RpRange` and valid `shrink_range`
/// `RpRange` and valid `shrink_range`
pub fn shrink(&mut self, shrink_range: &ops::Range<usize>) {
assert!(
shrink_range.start <= shrink_range.end,
Expand Down Expand Up @@ -496,14 +496,12 @@ impl<'a> MutReadPair<'a> {

pub fn new<R: Record>(buffer: &'a mut BytesMut, rr: &[Option<R>; 4]) -> MutReadPair<'a> {
let mut rp = MutReadPair::empty(buffer);

for (_rec, which) in rr.iter().zip(WhichRead::read_types().iter()) {
if let Some(ref rec) = *_rec {
rp.push_read(rec, *which)
for (rec, which) in rr.iter().zip(WhichRead::read_types()) {
if let Some(rec) = rec {
rp.push_read(rec, which);
}
// default ReadOffsets is exists = false
}

rp
}

Expand Down Expand Up @@ -600,32 +598,26 @@ impl ReadPair {

pub fn check_range(&self, range: &RpRange, region_name: &str) -> Result<()> {
let req_len = range.offset() + range.len().unwrap_or(0);

match self.get(range.read(), ReadPart::Seq) {
Some(read) => {
if read.len() < req_len {
bail!(
"{} is expected in positions {}-{} in Read {}, but read is {} bp long.",
region_name,
range.offset(),
req_len,
range.read(),
read.len()
);
} else {
Ok(())
}
}
None => bail!(
let Some(read) = self.get(range.read(), ReadPart::Seq) else {
bail!(
"{region_name} is missing from FASTQ. Read {} is not present.",
range.read()
),
}
);
};
ensure!(
read.len() >= req_len,
"{region_name} is expected in positions {}-{} in Read {}, but read is {} bp long.",
range.offset(),
req_len,
range.read(),
read.len()
);
Ok(())
}

pub fn to_owned_record(&self) -> HashMap<WhichRead, OwnedRecord> {
let mut result = HashMap::new();
for &which in WhichRead::read_types().iter() {
for &which in &WhichRead::read_types() {
if self.offsets[which as usize].exists {
let w = self.offsets[which as usize];
let rec = OwnedRecord {
Expand Down
Loading