@@ -352,8 +352,10 @@ impl<'a> CoverageSpansGenerator<'a> {
352
352
353
353
let prev = self . take_prev ( ) ;
354
354
debug ! ( " AT END, adding last prev={prev:?}" ) ;
355
- let pending_dups = self . pending_dups . split_off ( 0 ) ;
356
- for dup in pending_dups {
355
+
356
+ // Take `pending_dups` so that we can drain it while calling self methods.
357
+ // It is never used as a field after this point.
358
+ for dup in std:: mem:: take ( & mut self . pending_dups ) {
357
359
debug ! ( " ...adding at least one pending dup={:?}" , dup) ;
358
360
self . push_refined_span ( dup) ;
359
361
}
@@ -468,11 +470,16 @@ impl<'a> CoverageSpansGenerator<'a> {
468
470
previous iteration, or prev started a new disjoint span"
469
471
) ;
470
472
if dup. span . hi ( ) <= self . curr ( ) . span . lo ( ) {
471
- let pending_dups = self . pending_dups . split_off ( 0 ) ;
472
- for dup in pending_dups. into_iter ( ) {
473
+ // Temporarily steal `pending_dups` into a local, so that we can
474
+ // drain it while calling other self methods.
475
+ let mut pending_dups = std:: mem:: take ( & mut self . pending_dups ) ;
476
+ for dup in pending_dups. drain ( ..) {
473
477
debug ! ( " ...adding at least one pending={:?}" , dup) ;
474
478
self . push_refined_span ( dup) ;
475
479
}
480
+ // The list of dups is now empty, but we can recycle its capacity.
481
+ assert ! ( pending_dups. is_empty( ) && self . pending_dups. is_empty( ) ) ;
482
+ self . pending_dups = pending_dups;
476
483
} else {
477
484
self . pending_dups . clear ( ) ;
478
485
}
@@ -521,7 +528,10 @@ impl<'a> CoverageSpansGenerator<'a> {
521
528
let has_pre_closure_span = prev. span . lo ( ) < right_cutoff;
522
529
let has_post_closure_span = prev. span . hi ( ) > right_cutoff;
523
530
524
- let mut pending_dups = self . pending_dups . split_off ( 0 ) ;
531
+ // Temporarily steal `pending_dups` into a local, so that we can
532
+ // mutate and/or drain it while calling other self methods.
533
+ let mut pending_dups = std:: mem:: take ( & mut self . pending_dups ) ;
534
+
525
535
if has_pre_closure_span {
526
536
let mut pre_closure = self . prev ( ) . clone ( ) ;
527
537
pre_closure. span = pre_closure. span . with_hi ( left_cutoff) ;
@@ -535,6 +545,7 @@ impl<'a> CoverageSpansGenerator<'a> {
535
545
}
536
546
self . push_refined_span ( pre_closure) ;
537
547
}
548
+
538
549
if has_post_closure_span {
539
550
// Mutate `prev.span()` to start after the closure (and discard curr).
540
551
// (**NEVER** update `prev_original_span` because it affects the assumptions
@@ -545,12 +556,15 @@ impl<'a> CoverageSpansGenerator<'a> {
545
556
debug ! ( " ...and at least one overlapping dup={:?}" , dup) ;
546
557
dup. span = dup. span . with_lo ( right_cutoff) ;
547
558
}
548
- self . pending_dups . append ( & mut pending_dups) ;
549
559
let closure_covspan = self . take_curr ( ) ; // Prevent this curr from becoming prev.
550
560
self . push_refined_span ( closure_covspan) ; // since self.prev() was already updated
551
561
} else {
552
562
pending_dups. clear ( ) ;
553
563
}
564
+
565
+ // Restore the modified post-closure spans, or the empty vector's capacity.
566
+ assert ! ( self . pending_dups. is_empty( ) ) ;
567
+ self . pending_dups = pending_dups;
554
568
}
555
569
556
570
/// Called if `curr.span` equals `prev_original_span` (and potentially equal to all
0 commit comments