Skip to content

Commit

Permalink
chore: cargo fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
hschimke committed Feb 1, 2024
1 parent 1755111 commit 13c5bb5
Show file tree
Hide file tree
Showing 6 changed files with 15 additions and 26 deletions.
2 changes: 1 addition & 1 deletion src/common/bit_matrix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -530,7 +530,7 @@ impl BitMatrix {
if theBits != 0 {
top = top.min(y);
bottom = bottom.max(y);

if x32 * 32 < left as usize {
let mut bit = 0;
while (theBits << (31 - bit)) == 0 {
Expand Down
2 changes: 1 addition & 1 deletion src/common/global_histogram_binarizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ impl<LS: LuminanceSource> GlobalHistogramBinarizer<LS> {
let row = height * y / 5;
let localLuminances = source.get_row(row);
let right = (width * 4) / 5;
for x in (width/5)..right {
for x in (width / 5)..right {
let pixel = localLuminances[x];
localBuckets[(pixel >> LUMINANCE_SHIFT) as usize] += 1;
}
Expand Down
7 changes: 1 addition & 6 deletions src/common/grid_sampler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,12 +172,7 @@ pub trait GridSampler {
}
// dbg!(bits.to_string());

Ok((
bits,
[
Point::default();4
],
))
Ok((bits, [Point::default(); 4]))
}

/**
Expand Down
12 changes: 6 additions & 6 deletions src/common/hybrid_binarizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -170,11 +170,11 @@ impl<LS: LuminanceSource> HybridBinarizer<LS> {
for y in 0..sub_height {
// for (int y = 0; y < subHeight; y++) {
let yoffset = u32::min(y << BLOCK_SIZE_POWER, maxYOffset);

let top = Self::cap(y, sub_height - 3);
for x in 0..sub_width {
// for (int x = 0; x < subWidth; x++) {
let xoffset = u32::min(x << BLOCK_SIZE_POWER,maxXOffset);
let xoffset = u32::min(x << BLOCK_SIZE_POWER, maxXOffset);

let left = Self::cap(x, sub_width - 3);
let mut sum = 0;
Expand Down Expand Up @@ -240,12 +240,12 @@ impl<LS: LuminanceSource> HybridBinarizer<LS> {
let mut blackPoints = vec![vec![0; subWidth as usize]; subHeight as usize];
for y in 0..subHeight {
// for (int y = 0; y < subHeight; y++) {
let yoffset = u32::min(y << BLOCK_SIZE_POWER,maxYOffset as u32);
let yoffset = u32::min(y << BLOCK_SIZE_POWER, maxYOffset as u32);

for x in 0..subWidth {
// for (int x = 0; x < subWidth; x++) {
let xoffset = u32::min(x << BLOCK_SIZE_POWER,maxXOffset as u32);
let xoffset = u32::min(x << BLOCK_SIZE_POWER, maxXOffset as u32);

let mut sum: u32 = 0;
let mut min = 0xff;
let mut max = 0;
Expand Down
10 changes: 4 additions & 6 deletions src/common/string_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,11 +92,9 @@ impl StringUtils {
}

// First try UTF-16, assuming anything with its BOM is UTF-16

if bytes.len() > 2
&& ((bytes[0..=1] == [0xFE,0xFF]) || (bytes[0..=1] == [0xFF , 0xFE]))
{
if bytes[0..=1] == [0xFE , 0xFF] {

if bytes.len() > 2 && ((bytes[0..=1] == [0xFE, 0xFF]) || (bytes[0..=1] == [0xFF, 0xFE])) {
if bytes[0..=1] == [0xFE, 0xFF] {
return Some(CharacterSet::UTF16BE);
} else {
return Some(CharacterSet::UTF16LE);
Expand All @@ -121,7 +119,7 @@ impl StringUtils {
let mut sjis_max_double_bytes_word_length = 0;
let mut iso_high_other = 0;

let utf8bom = bytes.len() > 3 && bytes[0..=2] == [ 0xEF , 0xBB , 0xBF];
let utf8bom = bytes.len() > 3 && bytes[0..=2] == [0xEF, 0xBB, 0xBF];

// for i in 0..length {
for value in bytes.iter().take(length).copied() {
Expand Down
8 changes: 2 additions & 6 deletions src/oned/telepen_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,8 @@ impl OneDReader for TelepenReader {
// largest gaps.
while j <= end {
let currentCounter = theCounters[j];
if currentCounter < minBar {
minBar = currentCounter;
}
if currentCounter > maxBar {
maxBar = currentCounter;
}
minBar = u32::min(currentCounter, minBar);
maxBar = u32::max(currentCounter, maxBar);

j += 1;
}
Expand Down

0 comments on commit 13c5bb5

Please sign in to comment.