@@ -54,13 +54,6 @@ int StartPosition(SharedFunctionInfo info) {
54
54
return start;
55
55
}
56
56
57
- bool CompareSharedFunctionInfo (SharedFunctionInfo a, SharedFunctionInfo b) {
58
- int a_start = StartPosition (a);
59
- int b_start = StartPosition (b);
60
- if (a_start == b_start) return a.EndPosition () > b.EndPosition ();
61
- return a_start < b_start;
62
- }
63
-
64
57
bool CompareCoverageBlock (const CoverageBlock& a, const CoverageBlock& b) {
65
58
DCHECK_NE (kNoSourcePosition , a.start );
66
59
DCHECK_NE (kNoSourcePosition , b.start );
@@ -482,32 +475,12 @@ void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
482
475
// Reset all counters on the DebugInfo to zero.
483
476
ResetAllBlockCounts (info);
484
477
}
485
- } // anonymous namespace
486
-
487
- std::unique_ptr<Coverage> Coverage::CollectPrecise (Isolate* isolate) {
488
- DCHECK (!isolate->is_best_effort_code_coverage ());
489
- std::unique_ptr<Coverage> result =
490
- Collect (isolate, isolate->code_coverage_mode ());
491
- if (!isolate->is_collecting_type_profile () &&
492
- (isolate->is_precise_binary_code_coverage () ||
493
- isolate->is_block_binary_code_coverage ())) {
494
- // We do not have to hold onto feedback vectors for invocations we already
495
- // reported. So we can reset the list.
496
- isolate->SetFeedbackVectorsForProfilingTools (*ArrayList::New (isolate, 0 ));
497
- }
498
- return result;
499
- }
500
-
501
- std::unique_ptr<Coverage> Coverage::CollectBestEffort (Isolate* isolate) {
502
- return Collect (isolate, v8::debug::CoverageMode::kBestEffort );
503
- }
504
-
505
- std::unique_ptr<Coverage> Coverage::Collect (
506
- Isolate* isolate, v8::debug::CoverageMode collectionMode) {
507
- SharedToCounterMap counter_map;
508
478
479
+ void CollectAndMaybeResetCounts (Isolate* isolate,
480
+ SharedToCounterMap* counter_map,
481
+ v8::debug::CoverageMode coverage_mode) {
509
482
const bool reset_count =
510
- collectionMode != v8::debug::CoverageMode::kBestEffort ;
483
+ coverage_mode != v8::debug::CoverageMode::kBestEffort ;
511
484
512
485
switch (isolate->code_coverage_mode ()) {
513
486
case v8::debug::CoverageMode::kBlockBinary :
@@ -526,15 +499,15 @@ std::unique_ptr<Coverage> Coverage::Collect(
526
499
DCHECK (shared.IsSubjectToDebugging ());
527
500
uint32_t count = static_cast <uint32_t >(vector.invocation_count ());
528
501
if (reset_count) vector.clear_invocation_count ();
529
- counter_map. Add (shared, count);
502
+ counter_map-> Add (shared, count);
530
503
}
531
504
break ;
532
505
}
533
506
case v8::debug::CoverageMode::kBestEffort : {
534
507
DCHECK (!isolate->factory ()
535
508
->feedback_vectors_for_profiling_tools ()
536
509
->IsArrayList ());
537
- DCHECK_EQ (v8::debug::CoverageMode::kBestEffort , collectionMode );
510
+ DCHECK_EQ (v8::debug::CoverageMode::kBestEffort , coverage_mode );
538
511
HeapObjectIterator heap_iterator (isolate->heap ());
539
512
for (HeapObject current_obj = heap_iterator.Next ();
540
513
!current_obj.is_null (); current_obj = heap_iterator.Next ()) {
@@ -543,8 +516,9 @@ std::unique_ptr<Coverage> Coverage::Collect(
543
516
SharedFunctionInfo shared = func.shared ();
544
517
if (!shared.IsSubjectToDebugging ()) continue ;
545
518
if (!(func.has_feedback_vector () ||
546
- func.has_closure_feedback_cell_array ()))
519
+ func.has_closure_feedback_cell_array ())) {
547
520
continue ;
521
+ }
548
522
uint32_t count = 0 ;
549
523
if (func.has_feedback_vector ()) {
550
524
count =
@@ -555,7 +529,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
555
529
// atleast once. We don't have precise invocation count here.
556
530
count = 1 ;
557
531
}
558
- counter_map. Add (shared, count);
532
+ counter_map-> Add (shared, count);
559
533
}
560
534
561
535
// Also check functions on the stack to collect the count map. With lazy
@@ -564,12 +538,64 @@ std::unique_ptr<Coverage> Coverage::Collect(
564
538
// updated (i.e. it didn't execute return / jump).
565
539
for (JavaScriptFrameIterator it (isolate); !it.done (); it.Advance ()) {
566
540
SharedFunctionInfo shared = it.frame ()->function ().shared ();
567
- if (counter_map. Get (shared) != 0 ) continue ;
568
- counter_map. Add (shared, 1 );
541
+ if (counter_map-> Get (shared) != 0 ) continue ;
542
+ counter_map-> Add (shared, 1 );
569
543
}
570
544
break ;
571
545
}
572
546
}
547
+ }
548
+
549
+ // A {SFI, count} tuple is used to sort by source range (stored on
550
+ // the SFI) and call count (in the counter map).
551
+ struct SharedFunctionInfoAndCount {
552
+ SharedFunctionInfoAndCount (SharedFunctionInfo info, uint32_t count)
553
+ : info(info),
554
+ count (count),
555
+ start(StartPosition(info)),
556
+ end(info.EndPosition()) {}
557
+
558
+ // Sort by:
559
+ // - start, ascending.
560
+ // - end, descending.
561
+ // - count, ascending.
562
+ bool operator <(const SharedFunctionInfoAndCount& that) const {
563
+ if (this ->start != that.start ) return this ->start < that.start ;
564
+ if (this ->end != that.end ) return this ->end > that.end ;
565
+ return this ->count < that.count ;
566
+ }
567
+
568
+ SharedFunctionInfo info;
569
+ uint32_t count;
570
+ int start;
571
+ int end;
572
+ };
573
+
574
+ } // anonymous namespace
575
+
576
+ std::unique_ptr<Coverage> Coverage::CollectPrecise (Isolate* isolate) {
577
+ DCHECK (!isolate->is_best_effort_code_coverage ());
578
+ std::unique_ptr<Coverage> result =
579
+ Collect (isolate, isolate->code_coverage_mode ());
580
+ if (!isolate->is_collecting_type_profile () &&
581
+ (isolate->is_precise_binary_code_coverage () ||
582
+ isolate->is_block_binary_code_coverage ())) {
583
+ // We do not have to hold onto feedback vectors for invocations we already
584
+ // reported. So we can reset the list.
585
+ isolate->SetFeedbackVectorsForProfilingTools (*ArrayList::New (isolate, 0 ));
586
+ }
587
+ return result;
588
+ }
589
+
590
+ std::unique_ptr<Coverage> Coverage::CollectBestEffort (Isolate* isolate) {
591
+ return Collect (isolate, v8::debug::CoverageMode::kBestEffort );
592
+ }
593
+
594
+ std::unique_ptr<Coverage> Coverage::Collect (
595
+ Isolate* isolate, v8::debug::CoverageMode collectionMode) {
596
+ // Collect call counts for all functions.
597
+ SharedToCounterMap counter_map;
598
+ CollectAndMaybeResetCounts (isolate, &counter_map, collectionMode);
573
599
574
600
// Iterate shared function infos of every script and build a mapping
575
601
// between source ranges and invocation counts.
@@ -584,30 +610,40 @@ std::unique_ptr<Coverage> Coverage::Collect(
584
610
result->emplace_back (script_handle);
585
611
std::vector<CoverageFunction>* functions = &result->back ().functions ;
586
612
587
- std::vector<SharedFunctionInfo > sorted;
613
+ std::vector<SharedFunctionInfoAndCount > sorted;
588
614
589
615
{
590
616
// Sort functions by start position, from outer to inner functions.
591
617
SharedFunctionInfo::ScriptIterator infos (isolate, *script_handle);
592
618
for (SharedFunctionInfo info = infos.Next (); !info.is_null ();
593
619
info = infos.Next ()) {
594
- sorted.push_back (info);
620
+ sorted.emplace_back (info, counter_map. Get (info) );
595
621
}
596
- std::sort (sorted.begin (), sorted.end (), CompareSharedFunctionInfo );
622
+ std::sort (sorted.begin (), sorted.end ());
597
623
}
598
624
599
625
// Stack to track nested functions, referring function by index.
600
626
std::vector<size_t > nesting;
601
627
602
628
// Use sorted list to reconstruct function nesting.
603
- for (SharedFunctionInfo info : sorted) {
604
- int start = StartPosition (info);
605
- int end = info.EndPosition ();
606
- uint32_t count = counter_map.Get (info);
629
+ for (const SharedFunctionInfoAndCount& v : sorted) {
630
+ SharedFunctionInfo info = v.info ;
631
+ int start = v.start ;
632
+ int end = v.end ;
633
+ uint32_t count = v.count ;
634
+
607
635
// Find the correct outer function based on start position.
636
+ //
637
+ // This is not robust when considering two functions with identical source
638
+ // ranges. In this case, it is unclear which function is the inner / outer
639
+ // function. Above, we ensure that such functions are sorted in ascending
640
+ // `count` order, so at least our `parent_is_covered` optimization below
641
+ // should be fine.
642
+ // TODO(jgruber): Consider removing the optimization.
608
643
while (!nesting.empty () && functions->at (nesting.back ()).end <= start) {
609
644
nesting.pop_back ();
610
645
}
646
+
611
647
if (count != 0 ) {
612
648
switch (collectionMode) {
613
649
case v8::debug::CoverageMode::kBlockCount :
0 commit comments