X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;f=rts%2Fsm%2FGC.c;h=4aa210cd7eb51d3f40f39b05f489efbdeb560683;hb=b0ca990457eaf7991e72b13d0040d937b5759b36;hp=2870be150c3639eed44a44a7721ce5179d8d51e5;hpb=1cb0eb071f1316d6650f354166506789a2638720;p=ghc-hetmet.git diff --git a/rts/sm/GC.c b/rts/sm/GC.c index 2870be1..4aa210c 100644 --- a/rts/sm/GC.c +++ b/rts/sm/GC.c @@ -109,11 +109,6 @@ rtsBool eager_promotion; */ rtsBool failed_to_evac; -/* Saved nursery (used for 2-space collector only) - */ -static bdescr *saved_nursery; -static nat saved_n_blocks; - /* Data used for allocation area sizing. */ lnat new_blocks; // blocks allocated during this GC @@ -124,6 +119,7 @@ static lnat g0s0_pcnt_kept = 30; // percentage of g0s0 live at last minor GC #ifdef DEBUG nat mutlist_MUTVARS, mutlist_MUTARRS, + mutlist_MVARS, mutlist_OTHERS; #endif @@ -199,17 +195,19 @@ GarbageCollect ( rtsBool force_major_gc ) lnat oldgen_saved_blocks = 0; nat g, s, i; - ACQUIRE_SM_LOCK; - #ifdef PROFILING CostCentreStack *prev_CCS; #endif + ACQUIRE_SM_LOCK; + debugTrace(DEBUG_gc, "starting GC"); #if defined(RTS_USER_SIGNALS) - // block signals - blockUserSignals(); + if (RtsFlags.MiscFlags.install_signal_handlers) { + // block signals + blockUserSignals(); + } #endif // tell the STM to discard any cached closures its hoping to re-use @@ -271,17 +269,6 @@ GarbageCollect ( rtsBool force_major_gc ) static_objects = END_OF_STATIC_LIST; scavenged_static_objects = END_OF_STATIC_LIST; - /* Save the nursery if we're doing a two-space collection. - * g0s0->blocks will be used for to-space, so we need to get the - * nursery out of the way. - */ - if (RtsFlags.GcFlags.generations == 1) { - saved_nursery = g0s0->blocks; - saved_n_blocks = g0s0->n_blocks; - g0s0->blocks = NULL; - g0s0->n_blocks = 0; - } - /* Keep a count of how many new blocks we allocated during this GC * (used for resizing the allocation area, later). */ @@ -482,10 +469,6 @@ GarbageCollect ( rtsBool force_major_gc ) */ markStablePtrTable(mark_root); - /* Mark the root pointer table. - */ - markRootPtrTable(mark_root); - /* ------------------------------------------------------------------------- * Repeatedly scavenge all the areas we know about until there's no * more scavenging to be done. @@ -639,9 +622,9 @@ GarbageCollect ( rtsBool force_major_gc ) copied += mut_list_size; debugTrace(DEBUG_gc, - "mut_list_size: %lu (%d vars, %d arrays, %d others)", + "mut_list_size: %lu (%d vars, %d arrays, %d MVARs, %d others)", (unsigned long)(mut_list_size * sizeof(W_)), - mutlist_MUTVARS, mutlist_MUTARRS, mutlist_OTHERS); + mutlist_MUTVARS, mutlist_MUTARRS, mutlist_MVARS, mutlist_OTHERS); } for (s = 0; s < generations[g].n_steps; s++) { @@ -653,7 +636,7 @@ GarbageCollect ( rtsBool force_major_gc ) if (g <= N) { copied -= stp->hp_bd->start + BLOCK_SIZE_W - stp->hp_bd->free; - scavd_copied -= (P_)(BLOCK_ROUND_UP(stp->scavd_hp)) - stp->scavd_hp; + scavd_copied -= stp->scavd_hpLim - stp->scavd_hp; } } @@ -664,7 +647,7 @@ GarbageCollect ( rtsBool force_major_gc ) * the collected steps (except the allocation area). These * freed blocks will probaby be quickly recycled. */ - if (!(g == 0 && s == 0)) { + if (!(g == 0 && s == 0 && RtsFlags.GcFlags.generations > 1)) { if (stp->is_compacted) { // for a compacted step, just shift the new to-space // onto the front of the now-compacted existing blocks. @@ -818,13 +801,14 @@ GarbageCollect ( rtsBool force_major_gc ) /* Free the small objects allocated via allocate(), since this will * all have been copied into G0S1 now. */ - if (small_alloc_list != NULL) { - freeChain(small_alloc_list); + if (RtsFlags.GcFlags.generations > 1) { + if (g0s0->blocks != NULL) { + freeChain(g0s0->blocks); + g0s0->blocks = NULL; + } + g0s0->n_blocks = 0; } - small_alloc_list = NULL; alloc_blocks = 0; - alloc_Hp = NULL; - alloc_HpLim = NULL; alloc_blocks_lim = RtsFlags.GcFlags.minAllocAreaSize; // Start a new pinned_object_block @@ -854,17 +838,6 @@ GarbageCollect ( rtsBool force_major_gc ) if (RtsFlags.GcFlags.generations == 1) { nat blocks; - if (g0s0->old_blocks != NULL) { - freeChain(g0s0->old_blocks); - } - for (bd = g0s0->blocks; bd != NULL; bd = bd->link) { - bd->flags = 0; // now from-space - } - g0s0->old_blocks = g0s0->blocks; - g0s0->n_old_blocks = g0s0->n_blocks; - g0s0->blocks = saved_nursery; - g0s0->n_blocks = saved_n_blocks; - /* For a two-space collector, we need to resize the nursery. */ /* set up a new nursery. Allocate a nursery size based on a @@ -881,7 +854,7 @@ GarbageCollect ( rtsBool force_major_gc ) * performance we get from 3L bytes, reducing to the same * performance at 2L bytes. */ - blocks = g0s0->n_old_blocks; + blocks = g0s0->n_blocks; if ( RtsFlags.GcFlags.maxHeapSize != 0 && blocks * RtsFlags.GcFlags.oldGenFactor * 2 > @@ -1018,8 +991,10 @@ GarbageCollect ( rtsBool force_major_gc ) stat_endGC(allocated, live, copied, scavd_copied, N); #if defined(RTS_USER_SIGNALS) - // unblock signals again - unblockUserSignals(); + if (RtsFlags.MiscFlags.install_signal_handlers) { + // unblock signals again + unblockUserSignals(); + } #endif RELEASE_SM_LOCK; @@ -1031,6 +1006,7 @@ GarbageCollect ( rtsBool force_major_gc ) closure if it is alive, or NULL otherwise. NOTE: Use it before compaction only! + It untags and (if needed) retags pointers to closures. -------------------------------------------------------------------------- */ @@ -1039,11 +1015,16 @@ isAlive(StgClosure *p) { const StgInfoTable *info; bdescr *bd; + StgWord tag; + StgClosure *q; while (1) { + /* The tag and the pointer are split, to be merged later when needed. */ + tag = GET_CLOSURE_TAG(p); + q = UNTAG_CLOSURE(p); - ASSERT(LOOKS_LIKE_CLOSURE_PTR(p)); - info = get_itbl(p); + ASSERT(LOOKS_LIKE_CLOSURE_PTR(q)); + info = get_itbl(q); // ignore static closures // @@ -1051,12 +1032,12 @@ isAlive(StgClosure *p) // Problem here is that we sometimes don't set the link field, eg. // for static closures with an empty SRT or CONSTR_STATIC_NOCAFs. // - if (!HEAP_ALLOCED(p)) { + if (!HEAP_ALLOCED(q)) { return p; } // ignore closures in generations that we're not collecting. - bd = Bdescr((P_)p); + bd = Bdescr((P_)q); if (bd->gen_no > N) { return p; } @@ -1072,7 +1053,7 @@ isAlive(StgClosure *p) } // check the mark bit for compacted steps - if ((bd->flags & BF_COMPACTED) && is_marked((P_)p,bd)) { + if ((bd->flags & BF_COMPACTED) && is_marked((P_)q,bd)) { return p; } @@ -1084,16 +1065,16 @@ isAlive(StgClosure *p) case IND_OLDGEN: // rely on compatible layout with StgInd case IND_OLDGEN_PERM: // follow indirections - p = ((StgInd *)p)->indirectee; + p = ((StgInd *)q)->indirectee; continue; case EVACUATED: // alive! - return ((StgEvacuated *)p)->evacuee; + return ((StgEvacuated *)q)->evacuee; case TSO: - if (((StgTSO *)p)->what_next == ThreadRelocated) { - p = (StgClosure *)((StgTSO *)p)->link; + if (((StgTSO *)q)->what_next == ThreadRelocated) { + p = (StgClosure *)((StgTSO *)q)->link; continue; } return NULL;