Skip to content

Commit

Permalink
Revert "Enable GC regions by default (#59283)"
Browse files Browse the repository at this point in the history
This reverts commit cf91716.
  • Loading branch information
agocke committed Jan 18, 2022
1 parent 126db6d commit e8ef5b5
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 98 deletions.
130 changes: 41 additions & 89 deletions src/coreclr/gc/gc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2522,8 +2522,6 @@ size_t gc_heap::bgc_loh_size_increased = 0;

size_t gc_heap::bgc_poh_size_increased = 0;

size_t gc_heap::background_soh_size_end_mark = 0;

size_t gc_heap::background_soh_alloc_count = 0;

size_t gc_heap::background_uoh_alloc_count = 0;
Expand Down Expand Up @@ -28894,14 +28892,6 @@ void gc_heap::plan_phase (int condemned_gen_number)
{
dprintf (2,( "**** Doing Compacting GC ****"));

#if defined(USE_REGIONS) && defined(BACKGROUND_GC)
if (should_update_end_mark_size())
{
background_soh_size_end_mark += generation_end_seg_allocated (older_gen) -
r_older_gen_end_seg_allocated;
}
#endif //USE_REGIONS && BACKGROUND_GC

#ifndef USE_REGIONS
if (should_expand)
{
Expand Down Expand Up @@ -29406,13 +29396,6 @@ void gc_heap::fix_generation_bounds (int condemned_gen_number,
}
}
#endif //MULTIPLE_HEAPS

#ifdef BACKGROUND_GC
if (should_update_end_mark_size())
{
background_soh_size_end_mark = generation_size (max_generation);
}
#endif //BACKGROUND_GC
#endif //!USE_REGIONS

{
Expand Down Expand Up @@ -29631,14 +29614,6 @@ void gc_heap::thread_final_regions (bool compact_p)
generation_final_regions[gen_idx].tail = generation_tail_region (gen);
}

#ifdef BACKGROUND_GC
heap_segment* max_gen_tail_region = 0;
if (should_update_end_mark_size())
{
max_gen_tail_region = generation_final_regions[max_generation].tail;
}
#endif //BACKGROUND_GC

// Step 2: for each region in the condemned generations, we thread it onto its planned generation
// in our generation_final_regions array.
for (int gen_idx = condemned_gen_number; gen_idx >= 0; gen_idx--)
Expand Down Expand Up @@ -29689,21 +29664,6 @@ void gc_heap::thread_final_regions (bool compact_p)
}
}

#ifdef BACKGROUND_GC
if (max_gen_tail_region)
{
max_gen_tail_region = heap_segment_next (max_gen_tail_region);

while (max_gen_tail_region)
{
background_soh_size_end_mark += heap_segment_allocated (max_gen_tail_region) -
heap_segment_mem (max_gen_tail_region);

max_gen_tail_region = heap_segment_next (max_gen_tail_region);
}
}
#endif //BACKGROUND_GC

// Step 4: if a generation doesn't have any regions, we need to get a new one for it;
// otherwise we just set the head region as the start region for that generation.
for (int gen_idx = 0; gen_idx <= max_generation; gen_idx++)
Expand Down Expand Up @@ -32973,11 +32933,6 @@ void gc_heap::decommit_mark_array_by_seg (heap_segment* seg)
}
}

bool gc_heap::should_update_end_mark_size()
{
return ((settings.condemned_generation == (max_generation - 1)) && (current_c_gc_state == c_gc_state_planning));
}

void gc_heap::background_mark_phase ()
{
verify_mark_array_cleared();
Expand Down Expand Up @@ -33052,7 +33007,6 @@ void gc_heap::background_mark_phase ()
bgc_begin_poh_size = total_poh_size;
bgc_loh_size_increased = 0;
bgc_poh_size_increased = 0;
background_soh_size_end_mark = 0;

dprintf (GTC_LOG, ("BM: h%d: loh: %Id, soh: %Id, poh: %Id", heap_number, total_loh_size, total_soh_size, total_poh_size));

Expand Down Expand Up @@ -33533,8 +33487,6 @@ void gc_heap::background_mark_phase ()
heap_segment_background_allocated (seg) = heap_segment_allocated (seg);
}

background_soh_size_end_mark += heap_segment_background_allocated (seg) - heap_segment_mem (seg);

dprintf (3333, ("h%d gen%d seg %Ix (%Ix) background allocated is %Ix",
heap_number, i, (size_t)(seg), heap_segment_mem (seg),
heap_segment_background_allocated (seg)));
Expand Down Expand Up @@ -45151,11 +45103,11 @@ size_t GCHeap::GetTotalBytesInUse ()
for (int i = 0; i < gc_heap::n_heaps; i++)
{
GCHeap* Hp = gc_heap::g_heaps [i]->vm_heap;
tot_size += Hp->ApproxTotalBytesInUse();
tot_size += Hp->ApproxTotalBytesInUse (FALSE);
}
return tot_size;
#else
return ApproxTotalBytesInUse();
return ApproxTotalBytesInUse ();
#endif //MULTIPLE_HEAPS
}

Expand Down Expand Up @@ -45210,58 +45162,58 @@ size_t GCHeap::ApproxTotalBytesInUse(BOOL small_heap_only)
size_t totsize = 0;
enter_spin_lock (&pGenGCHeap->gc_lock);

// For gen0 it's a bit complicated because we are currently allocating in it. We get the fragmentation first
// just so that we don't give a negative number for the resulting size.
generation* gen = pGenGCHeap->generation_of (0);
size_t gen0_frag = generation_free_list_space (gen) + generation_free_obj_space (gen);
uint8_t* current_alloc_allocated = pGenGCHeap->alloc_allocated;
heap_segment* current_eph_seg = pGenGCHeap->ephemeral_heap_segment;
size_t gen0_size = 0;
#ifdef USE_REGIONS
heap_segment* gen0_seg = generation_start_segment (gen);
while (gen0_seg)
// the complication with the following code is that background GC may
// remove the ephemeral segment while we are iterating
// if so, we retry a couple times and ultimately may report a slightly wrong result
for (int tries = 1; tries <= 3; tries++)
{
uint8_t* end = in_range_for_segment (current_alloc_allocated, gen0_seg) ?
current_alloc_allocated : heap_segment_allocated (gen0_seg);
gen0_size += end - heap_segment_mem (gen0_seg);

if (gen0_seg == current_eph_seg)
heap_segment* eph_seg = generation_allocation_segment (pGenGCHeap->generation_of (0));
// Get small block heap size info
totsize = (pGenGCHeap->alloc_allocated - heap_segment_mem (eph_seg));
heap_segment* seg1 = generation_start_segment (pGenGCHeap->generation_of (max_generation));
while ((seg1 != eph_seg) && (seg1 != nullptr)
#ifdef BACKGROUND_GC
&& (seg1 != pGenGCHeap->freeable_soh_segment)
#endif //BACKGROUND_GC
)
{
break;
#ifdef BACKGROUND_GC
if (!heap_segment_decommitted_p (seg1))
#endif //BACKGROUND_GC
{
totsize += heap_segment_allocated (seg1) -
heap_segment_mem (seg1);
}
seg1 = heap_segment_next (seg1);
}

gen0_seg = heap_segment_next (gen0_seg);
}
#else //USE_REGIONS
// For segments ephemeral seg does not change.
gen0_size = current_alloc_allocated - heap_segment_mem (current_eph_seg);
#endif //USE_REGIONS

totsize = gen0_size - gen0_frag;

int stop_gen_index = max_generation;

if (gc_heap::current_c_gc_state == c_gc_state_planning)
{
// During BGC sweep since we can be deleting SOH segments, we avoid walking the segment
// list.
generation* oldest_gen = pGenGCHeap->generation_of (max_generation);
totsize = pGenGCHeap->background_soh_size_end_mark - generation_free_list_space (oldest_gen) - generation_free_obj_space (oldest_gen);
stop_gen_index--;
if (seg1 == eph_seg)
break;
}

for (int i = (max_generation - 1); i <= stop_gen_index; i++)
//discount the fragmentation
for (int i = 0; i <= max_generation; i++)
{
generation* gen = pGenGCHeap->generation_of (i);
totsize += pGenGCHeap->generation_size (i) - generation_free_list_space (gen) - generation_free_obj_space (gen);
totsize -= (generation_free_list_space (gen) + generation_free_obj_space (gen));
}

if (!small_heap_only)
{
for (int i = uoh_start_generation; i < total_generation_count; i++)
{
generation* gen = pGenGCHeap->generation_of (i);
totsize += pGenGCHeap->generation_size (i) - generation_free_list_space (gen) - generation_free_obj_space (gen);
heap_segment* seg2 = generation_start_segment (pGenGCHeap->generation_of (i));

while (seg2 != 0)
{
totsize += heap_segment_allocated (seg2) -
heap_segment_mem (seg2);
seg2 = heap_segment_next (seg2);
}

//discount the fragmentation
generation* uoh_gen = pGenGCHeap->generation_of (i);
size_t frag = generation_free_list_space (uoh_gen) + generation_free_obj_space (uoh_gen);
totsize -= frag;
}
}
leave_spin_lock (&pGenGCHeap->gc_lock);
Expand Down
10 changes: 2 additions & 8 deletions src/coreclr/gc/gcpriv.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ inline void FATAL_GC_ERROR()
//
// This means any empty regions can be freely used for any generation. For
// Server GC we will balance regions between heaps.
// For now disable regions StandAlone GC builds
#if defined (HOST_64BIT) && !defined (BUILD_AS_STANDALONE)
// For now enable regions by default for only StandAlone GC builds
#if defined (HOST_64BIT) && defined (BUILD_AS_STANDALONE)
#define USE_REGIONS
#endif //HOST_64BIT && BUILD_AS_STANDALONE

Expand Down Expand Up @@ -3440,9 +3440,6 @@ class gc_heap
PER_HEAP
void decommit_mark_array_by_seg (heap_segment* seg);

PER_HEAP_ISOLATED
bool should_update_end_mark_size();

PER_HEAP
void background_mark_phase();

Expand Down Expand Up @@ -4271,9 +4268,6 @@ class gc_heap
PER_HEAP
size_t bgc_poh_size_increased;

PER_HEAP
size_t background_soh_size_end_mark;

PER_HEAP
size_t background_soh_alloc_count;

Expand Down
2 changes: 1 addition & 1 deletion src/coreclr/nativeaot/Runtime/gcrhenv.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -923,7 +923,7 @@ void GCToEEInterface::DisablePreemptiveGC()
Thread* GCToEEInterface::GetThread()
{
#ifndef DACCESS_COMPILE
return ThreadStore::GetCurrentThreadIfAvailable();
return ThreadStore::GetCurrentThread();
#else
return NULL;
#endif
Expand Down

0 comments on commit e8ef5b5

Please sign in to comment.