@@ -353,8 +353,10 @@ impl<'a> CoverageSpansGenerator<'a> {
353353
354354 let prev = self . take_prev ( ) ;
355355 debug ! ( " AT END, adding last prev={prev:?}" ) ;
356- let pending_dups = self . pending_dups . split_off ( 0 ) ;
357- for dup in pending_dups {
356+
357+ // Take `pending_dups` so that we can drain it while calling self methods.
358+ // It is never used as a field after this point.
359+ for dup in std:: mem:: take ( & mut self . pending_dups ) {
358360 debug ! ( " ...adding at least one pending dup={:?}" , dup) ;
359361 self . push_refined_span ( dup) ;
360362 }
@@ -475,11 +477,16 @@ impl<'a> CoverageSpansGenerator<'a> {
475477 previous iteration, or prev started a new disjoint span"
476478 ) ;
477479 if last_dup. span . hi ( ) <= self . curr ( ) . span . lo ( ) {
478- let pending_dups = self . pending_dups . split_off ( 0 ) ;
479- for dup in pending_dups. into_iter ( ) {
480+ // Temporarily steal `pending_dups` into a local, so that we can
481+ // drain it while calling other self methods.
482+ let mut pending_dups = std:: mem:: take ( & mut self . pending_dups ) ;
483+ for dup in pending_dups. drain ( ..) {
480484 debug ! ( " ...adding at least one pending={:?}" , dup) ;
481485 self . push_refined_span ( dup) ;
482486 }
487+ // The list of dups is now empty, but we can recycle its capacity.
488+ assert ! ( pending_dups. is_empty( ) && self . pending_dups. is_empty( ) ) ;
489+ self . pending_dups = pending_dups;
483490 } else {
484491 self . pending_dups . clear ( ) ;
485492 }
@@ -527,7 +534,10 @@ impl<'a> CoverageSpansGenerator<'a> {
527534 let has_pre_closure_span = prev. span . lo ( ) < right_cutoff;
528535 let has_post_closure_span = prev. span . hi ( ) > right_cutoff;
529536
530- let mut pending_dups = self . pending_dups . split_off ( 0 ) ;
537+ // Temporarily steal `pending_dups` into a local, so that we can
538+ // mutate and/or drain it while calling other self methods.
539+ let mut pending_dups = std:: mem:: take ( & mut self . pending_dups ) ;
540+
531541 if has_pre_closure_span {
532542 let mut pre_closure = self . prev ( ) . clone ( ) ;
533543 pre_closure. span = pre_closure. span . with_hi ( left_cutoff) ;
@@ -541,6 +551,7 @@ impl<'a> CoverageSpansGenerator<'a> {
541551 }
542552 self . push_refined_span ( pre_closure) ;
543553 }
554+
544555 if has_post_closure_span {
545556 // Mutate `prev.span()` to start after the closure (and discard curr).
546557 // (**NEVER** update `prev_original_span` because it affects the assumptions
@@ -551,12 +562,15 @@ impl<'a> CoverageSpansGenerator<'a> {
551562 debug ! ( " ...and at least one overlapping dup={:?}" , dup) ;
552563 dup. span = dup. span . with_lo ( right_cutoff) ;
553564 }
554- self . pending_dups . append ( & mut pending_dups) ;
555565 let closure_covspan = self . take_curr ( ) ; // Prevent this curr from becoming prev.
556566 self . push_refined_span ( closure_covspan) ; // since self.prev() was already updated
557567 } else {
558568 pending_dups. clear ( ) ;
559569 }
570+
571+ // Restore the modified post-closure spans, or the empty vector's capacity.
572+ assert ! ( self . pending_dups. is_empty( ) ) ;
573+ self . pending_dups = pending_dups;
560574 }
561575
562576 /// Called if `curr.span` equals `prev_original_span` (and potentially equal to all
0 commit comments