本文整理汇总了C++中GenCollectedHeap::gen_process_strong_roots方法的典型用法代码示例。如果您正苦于以下问题:C++ GenCollectedHeap::gen_process_strong_roots方法的具体用法?C++ GenCollectedHeap::gen_process_strong_roots怎么用?C++ GenCollectedHeap::gen_process_strong_roots使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类GenCollectedHeap
的用法示例。
在下文中一共展示了GenCollectedHeap::gen_process_strong_roots方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: collect
void DefNewGeneration::collect(bool full,
bool clear_all_soft_refs,
size_t size,
bool is_tlab) {
assert(full || size > 0, "otherwise we don't want to collect");
GenCollectedHeap* gch = GenCollectedHeap::heap();
_gc_timer->register_gc_start();
DefNewTracer gc_tracer;
gc_tracer.report_gc_start(gch->gc_cause(), _gc_timer->gc_start());
_next_gen = gch->next_gen(this);
// If the next generation is too full to accommodate promotion
// from this generation, pass on collection; let the next generation
// do it.
if (!collection_attempt_is_safe()) {
if (Verbose && PrintGCDetails) {
gclog_or_tty->print(" :: Collection attempt not safe :: ");
}
gch->set_incremental_collection_failed(); // Slight lie: we did not even attempt one
return;
}
assert(to()->is_empty(), "Else not collection_attempt_is_safe");
init_assuming_no_promotion_failure();
GCTraceTime t1(GCCauseString("GC", gch->gc_cause()), PrintGC && !PrintGCDetails, true, NULL);
// Capture heap used before collection (for printing).
size_t gch_prev_used = gch->used();
gch->trace_heap_before_gc(&gc_tracer);
SpecializationStats::clear();
// These can be shared for all code paths
IsAliveClosure is_alive(this);
ScanWeakRefClosure scan_weak_ref(this);
age_table()->clear();
to()->clear(SpaceDecorator::Mangle);
gch->rem_set()->prepare_for_younger_refs_iterate(false);
assert(gch->no_allocs_since_save_marks(0),
"save marks have not been newly set.");
// Not very pretty.
CollectorPolicy* cp = gch->collector_policy();
FastScanClosure fsc_with_no_gc_barrier(this, false);
FastScanClosure fsc_with_gc_barrier(this, true);
KlassScanClosure klass_scan_closure(&fsc_with_no_gc_barrier,
gch->rem_set()->klass_rem_set());
set_promo_failure_scan_stack_closure(&fsc_with_no_gc_barrier);
FastEvacuateFollowersClosure evacuate_followers(gch, _level, this,
&fsc_with_no_gc_barrier,
&fsc_with_gc_barrier);
assert(gch->no_allocs_since_save_marks(0),
"save marks have not been newly set.");
int so = SharedHeap::SO_AllClasses | SharedHeap::SO_Strings | SharedHeap::SO_CodeCache;
gch->gen_process_strong_roots(_level,
true, // Process younger gens, if any,
// as strong roots.
true, // activate StrongRootsScope
true, // is scavenging
SharedHeap::ScanningOption(so),
&fsc_with_no_gc_barrier,
true, // walk *all* scavengable nmethods
&fsc_with_gc_barrier,
&klass_scan_closure);
// "evacuate followers".
evacuate_followers.do_void();
FastKeepAliveClosure keep_alive(this, &scan_weak_ref);
ReferenceProcessor* rp = ref_processor();
rp->setup_policy(clear_all_soft_refs);
const ReferenceProcessorStats& stats =
rp->process_discovered_references(&is_alive, &keep_alive, &evacuate_followers,
NULL, _gc_timer);
gc_tracer.report_gc_reference_stats(stats);
if (!_promotion_failed) {
// Swap the survivor spaces.
eden()->clear(SpaceDecorator::Mangle);
from()->clear(SpaceDecorator::Mangle);
if (ZapUnusedHeapArea) {
// This is now done here because of the piece-meal mangling which
// can check for valid mangling at intermediate points in the
// collection(s). When a minor collection fails to collect
// sufficient space resizing of the young generation can occur
// an redistribute the spaces in the young generation. Mangle
// here so that unzapped regions don't get distributed to
//.........这里部分代码省略.........
示例2: collect
void DefNewGeneration::collect(bool full,
bool clear_all_soft_refs,
size_t size,
bool is_tlab) {
assert(full || size > 0, "otherwise we don't want to collect");
GenCollectedHeap* gch = GenCollectedHeap::heap();
_next_gen = gch->next_gen(this);
assert(_next_gen != NULL,
"This must be the youngest gen, and not the only gen");
// If the next generation is too full to accomodate promotion
// from this generation, pass on collection; let the next generation
// do it.
if (!collection_attempt_is_safe()) {
gch->set_incremental_collection_will_fail();
return;
}
assert(to()->is_empty(), "Else not collection_attempt_is_safe");
init_assuming_no_promotion_failure();
TraceTime t1("GC", PrintGC && !PrintGCDetails, true, gclog_or_tty);
// Capture heap used before collection (for printing).
size_t gch_prev_used = gch->used();
SpecializationStats::clear();
// These can be shared for all code paths
IsAliveClosure is_alive(this);
ScanWeakRefClosure scan_weak_ref(this);
age_table()->clear();
to()->clear(SpaceDecorator::Mangle);
gch->rem_set()->prepare_for_younger_refs_iterate(false);
assert(gch->no_allocs_since_save_marks(0),
"save marks have not been newly set.");
// Not very pretty.
CollectorPolicy* cp = gch->collector_policy();
FastScanClosure fsc_with_no_gc_barrier(this, false);
FastScanClosure fsc_with_gc_barrier(this, true);
set_promo_failure_scan_stack_closure(&fsc_with_no_gc_barrier);
FastEvacuateFollowersClosure evacuate_followers(gch, _level, this,
&fsc_with_no_gc_barrier,
&fsc_with_gc_barrier);
assert(gch->no_allocs_since_save_marks(0),
"save marks have not been newly set.");
gch->gen_process_strong_roots(_level,
true, // Process younger gens, if any,
// as strong roots.
true, // activate StrongRootsScope
false, // not collecting perm generation.
SharedHeap::SO_AllClasses,
&fsc_with_no_gc_barrier,
true, // walk *all* scavengable nmethods
&fsc_with_gc_barrier);
// "evacuate followers".
evacuate_followers.do_void();
FastKeepAliveClosure keep_alive(this, &scan_weak_ref);
ReferenceProcessor* rp = ref_processor();
rp->setup_policy(clear_all_soft_refs);
rp->process_discovered_references(&is_alive, &keep_alive, &evacuate_followers,
NULL);
if (!promotion_failed()) {
// Swap the survivor spaces.
eden()->clear(SpaceDecorator::Mangle);
from()->clear(SpaceDecorator::Mangle);
if (ZapUnusedHeapArea) {
// This is now done here because of the piece-meal mangling which
// can check for valid mangling at intermediate points in the
// collection(s). When a minor collection fails to collect
// sufficient space resizing of the young generation can occur
// an redistribute the spaces in the young generation. Mangle
// here so that unzapped regions don't get distributed to
// other spaces.
to()->mangle_unused_area();
}
swap_spaces();
assert(to()->is_empty(), "to space should be empty now");
// Set the desired survivor size to half the real survivor space
_tenuring_threshold =
age_table()->compute_tenuring_threshold(to()->capacity()/HeapWordSize);
if (PrintGC && !PrintGCDetails) {
gch->print_heap_change(gch_prev_used);
}
} else {
assert(HandlePromotionFailure,
"Should not be here unless promotion failure handling is on");
assert(_promo_failure_scan_stack != NULL &&
//.........这里部分代码省略.........