Skip to content

Commit

Permalink
CHORE: added todos and changed unwraps to expects when known to not p…
Browse files Browse the repository at this point in the history
…anic
  • Loading branch information
sander-willems-bruker committed Oct 3, 2024
1 parent 2a7f6bf commit 3285ec6
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 7 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ TODO
* Improve docs
* Improve tests
* Pase CompressionType1
* Tarred file reader
* Clean up src (FrameReader, ...)
* Cleaner try_from conversions/readers
* Make Path of TimsTOF data into special type
* Single access point for all readers?
* Few unchecked unwraps left
Expand Down
4 changes: 3 additions & 1 deletion src/io/readers/file_readers/tdf_blob_reader/tdf_blobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@ impl TdfBlob {

pub fn get_all(&self) -> Vec<u32> {
(0..self.len())
.map(|index| self.get(index).unwrap())
.map(|index| self.get(index).expect(
"When iterating over the length of a tdf blob, you cannot go out of bounds"
))
.collect()
}

Expand Down
5 changes: 4 additions & 1 deletion src/io/readers/metadata_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,10 @@ fn get_im_converter(
) -> Result<Scan2ImConverter, MetadataReaderError> {
let scan_counts: Vec<u32> =
tdf_sql_reader.read_column_from_table("NumScans", "Frames")?;
let scan_max_index = *scan_counts.iter().max().unwrap(); // SqlReader cannot return empty vecs, so always succeeds
let scan_max_index = *scan_counts
.iter()
.max()
.expect("SqlReader cannot return empty vecs, so there is always a max scan index");
let (im_min, im_max) = get_im_bounds(sql_metadata)?;
Ok(Scan2ImConverter::from_boundaries(
im_min,
Expand Down
18 changes: 14 additions & 4 deletions src/io/readers/quad_settings_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ impl QuadrupoleSettingsReader {
.iter()
.map(|x| x.window_group)
.max()
.unwrap() as usize; // SqlReader cannot return empty vecs, so always succeeds
.expect("SqlReader cannot return empty vecs, so there is always a max window_group")
as usize;
let quadrupole_settings = (0..window_group_count)
.map(|window_group| {
let mut quad = QuadrupoleSettings::default();
Expand Down Expand Up @@ -306,9 +307,18 @@ fn expand_window_settings(
let window = window_group.window_group;
let frame = window_group.frame;
let group = &quadrupole_settings[window as usize - 1];
let window_group_start =
group.scan_starts.iter().min().unwrap().clone(); // SqlReader cannot return empty vecs, so always succeeds
let window_group_end = group.scan_ends.iter().max().unwrap().clone(); // SqlReader cannot return empty vecs, so always succeeds
let window_group_start = group
.scan_starts
.iter()
.min()
.expect("SqlReader cannot return empty vecs, so there is always min window_group index")
.clone();
let window_group_end = group
.scan_ends
.iter()
.max()
.expect("SqlReader cannot return empty vecs, so there is always max window_group index")
.clone();
for (sws, swe) in
scan_range_subsplit(window_group_start, window_group_end, &strategy)
{
Expand Down
5 changes: 4 additions & 1 deletion src/io/readers/spectrum_reader/tdf/dda.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,10 @@ impl DDARawSpectrumReader {
let pasef_precursors =
&pasef_frames.iter().map(|x| x.precursor).collect();
let order: Vec<usize> = argsort(&pasef_precursors);
let max_precursor = pasef_precursors.iter().max().unwrap(); // SqlReader cannot return empty vecs, so always succeeds
let max_precursor = pasef_precursors
.iter()
.max()
.expect("SqlReader cannot return empty vecs, so there is always a max precursor index");
let mut offsets: Vec<usize> = Vec::with_capacity(max_precursor + 1);
offsets.push(0);
for (offset, &index) in order.iter().enumerate().take(order.len() - 1) {
Expand Down

0 comments on commit 3285ec6

Please sign in to comment.