X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;f=ghc%2Frts%2FGC.c;h=c0e1a4aaa3c5cab979bd42a92c8a73a89ac4ab92;hb=29e55dea299569979d4e73d64b709a97aaea36e8;hp=563392cb20d519376e6f7ea7352903d4a3671600;hpb=256a006fc9ae215926e5a0645b85d86cd683701d;p=ghc-hetmet.git diff --git a/ghc/rts/GC.c b/ghc/rts/GC.c index 563392c..c0e1a4a 100644 --- a/ghc/rts/GC.c +++ b/ghc/rts/GC.c @@ -1,5 +1,5 @@ /* ----------------------------------------------------------------------------- - * $Id: GC.c,v 1.134 2002/04/19 10:25:00 simonmar Exp $ + * $Id: GC.c,v 1.143 2002/09/18 06:34:07 mthomas Exp $ * * (c) The GHC Team 1998-1999 * @@ -27,6 +27,7 @@ #include "Prelude.h" #include "ParTicky.h" // ToDo: move into Rts.h #include "GCCompact.h" +#include "Signals.h" #if defined(GRAN) || defined(PAR) # include "GranSimRts.h" # include "ParallelRts.h" @@ -45,6 +46,8 @@ #include "RetainerProfile.h" #include "LdvProfile.h" +#include + /* STATIC OBJECT LIST. * * During GC: @@ -79,8 +82,8 @@ * We build up a static object list while collecting generations 0..N, * which is then appended to the static object list of generation N+1. */ -StgClosure* static_objects; // live static objects -StgClosure* scavenged_static_objects; // static objects scavenged so far +static StgClosure* static_objects; // live static objects +StgClosure* scavenged_static_objects; // static objects scavenged so far /* N is the oldest generation being collected, where the generations * are numbered starting at 0. A major GC (indicated by the major_gc @@ -118,17 +121,17 @@ static rtsBool failed_to_evac; /* Old to-space (used for two-space collector only) */ -bdescr *old_to_blocks; +static bdescr *old_to_blocks; /* Data used for allocation area sizing. */ -lnat new_blocks; // blocks allocated during this GC -lnat g0s0_pcnt_kept = 30; // percentage of g0s0 live at last minor GC +static lnat new_blocks; // blocks allocated during this GC +static lnat g0s0_pcnt_kept = 30; // percentage of g0s0 live at last minor GC /* Used to avoid long recursion due to selector thunks */ -lnat thunk_selector_depth = 0; -#define MAX_THUNK_SELECTOR_DEPTH 256 +static lnat thunk_selector_depth = 0; +#define MAX_THUNK_SELECTOR_DEPTH 8 /* ----------------------------------------------------------------------------- Static function declarations @@ -142,6 +145,8 @@ static void zero_mutable_list ( StgMutClosure *first ); static rtsBool traverse_weak_ptr_list ( void ); static void mark_weak_ptr_list ( StgWeak **list ); +static StgClosure * eval_thunk_selector ( nat field, StgSelector * p ); + static void scavenge ( step * ); static void scavenge_mark_stack ( void ); static void scavenge_stack ( StgPtr p, StgPtr stack_end ); @@ -245,6 +250,11 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) Now, Now)); #endif +#ifndef mingw32_TARGET_OS + // block signals + blockUserSignals(); +#endif + // tell the stats department that we've started a GC stat_startGC(); @@ -990,11 +1000,11 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) // start any pending finalizers scheduleFinalizers(old_weak_ptr_list); - ACQUIRE_LOCK(&sched_mutex); - // send exceptions to any threads which were about to die resurrectThreads(resurrected_threads); + ACQUIRE_LOCK(&sched_mutex); + // Update the stable pointer hash table. updateStablePtrTable(major_gc); @@ -1026,6 +1036,11 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) // ok, GC over: tell the stats department what happened. stat_endGC(allocated, collected, live, copied, N); +#ifndef mingw32_TARGET_OS + // unblock signals again + unblockUserSignals(); +#endif + //PAR_TICKY_TP(); } @@ -1286,6 +1301,7 @@ isAlive(StgClosure *p) loop: bd = Bdescr((P_)p); + // ignore closures in generations that we're not collecting. if (LOOKS_LIKE_STATIC(p) || bd->gen_no > N) { return p; @@ -1466,8 +1482,8 @@ copyPart(StgClosure *src, nat size_to_reserve, nat size_to_copy, step *stp) Evacuate a large object This just consists of removing the object from the (doubly-linked) - large_alloc_list, and linking it on to the (singly-linked) - new_large_objects list, from where it will be scavenged later. + step->large_objects list, and linking it on to the (singly-linked) + step->new_large_objects list, from where it will be scavenged later. Convention: bd->flags has BF_EVACUATED set for a large object that has been evacuated, or unset otherwise. @@ -1596,9 +1612,6 @@ loop: if (HEAP_ALLOCED(q)) { bd = Bdescr((P_)q); - // not a group head: find the group head - if (bd->blocks == 0) { bd = bd->link; } - if (bd->gen_no > N) { /* Can't evacuate this object, because it's in a generation * older than the ones we're collecting. Let's hope that it's @@ -1724,143 +1737,26 @@ loop: case THUNK_SELECTOR: { - const StgInfoTable* selectee_info; - StgClosure* selectee = ((StgSelector*)q)->selectee; + StgClosure *p; - selector_loop: - selectee_info = get_itbl(selectee); - switch (selectee_info->type) { - case CONSTR: - case CONSTR_1_0: - case CONSTR_0_1: - case CONSTR_2_0: - case CONSTR_1_1: - case CONSTR_0_2: - case CONSTR_STATIC: - case CONSTR_NOCAF_STATIC: - { - StgWord offset = info->layout.selector_offset; - - // check that the size is in range - ASSERT(offset < - (StgWord32)(selectee_info->layout.payload.ptrs + - selectee_info->layout.payload.nptrs)); - - // perform the selection! - q = selectee->payload[offset]; - if (major_gc==rtsTrue) {TICK_GC_SEL_MAJOR();} else {TICK_GC_SEL_MINOR();} - - /* if we're already in to-space, there's no need to continue - * with the evacuation, just update the source address with - * a pointer to the (evacuated) constructor field. - */ - if (HEAP_ALLOCED(q)) { - bdescr *bd = Bdescr((P_)q); - if (bd->flags & BF_EVACUATED) { - if (bd->gen_no < evac_gen) { - failed_to_evac = rtsTrue; - TICK_GC_FAILED_PROMOTION(); - } - return q; - } - } - - /* otherwise, carry on and evacuate this constructor field, - * (but not the constructor itself) - */ - goto loop; + if (thunk_selector_depth > MAX_THUNK_SELECTOR_DEPTH) { + return copy(q,THUNK_SELECTOR_sizeW(),stp); } - case IND: - case IND_STATIC: - case IND_PERM: - case IND_OLDGEN: - case IND_OLDGEN_PERM: - selectee = ((StgInd *)selectee)->indirectee; - goto selector_loop; - - case EVACUATED: - selectee = ((StgEvacuated *)selectee)->evacuee; - goto selector_loop; - - case THUNK_SELECTOR: -# if 0 - /* Disabled 03 April 2001 by JRS; it seems to cause the GC (or - something) to go into an infinite loop when the nightly - stage2 compiles PrelTup.lhs. */ - - /* we can't recurse indefinitely in evacuate(), so set a - * limit on the number of times we can go around this - * loop. - */ - if (thunk_selector_depth < MAX_THUNK_SELECTOR_DEPTH) { - bdescr *bd; - bd = Bdescr((P_)selectee); - if (!bd->flags & BF_EVACUATED) { - thunk_selector_depth++; - selectee = evacuate(selectee); - thunk_selector_depth--; - goto selector_loop; - } - } else { - TICK_GC_SEL_ABANDONED(); - // and fall through... - } -# endif + p = eval_thunk_selector(info->layout.selector_offset, + (StgSelector *)q); - case AP_UPD: - case THUNK: - case THUNK_1_0: - case THUNK_0_1: - case THUNK_2_0: - case THUNK_1_1: - case THUNK_0_2: - case THUNK_STATIC: - case CAF_BLACKHOLE: - case SE_CAF_BLACKHOLE: - case SE_BLACKHOLE: - case BLACKHOLE: - case BLACKHOLE_BQ: - // not evaluated yet - break; - -#if defined(PAR) - // a copy of the top-level cases below - case RBH: // cf. BLACKHOLE_BQ - { - //StgInfoTable *rip = get_closure_info(q, &size, &ptrs, &nonptrs, &vhs, str); - to = copy(q,BLACKHOLE_sizeW(),stp); - //ToDo: derive size etc from reverted IP - //to = copy(q,size,stp); - // recordMutable((StgMutClosure *)to); - return to; - } - - case BLOCKED_FETCH: - ASSERT(sizeofW(StgBlockedFetch) >= MIN_NONUPD_SIZE); - to = copy(q,sizeofW(StgBlockedFetch),stp); - return to; - -# ifdef DIST - case REMOTE_REF: -# endif - case FETCH_ME: - ASSERT(sizeofW(StgBlockedFetch) >= MIN_UPD_SIZE); - to = copy(q,sizeofW(StgFetchMe),stp); - return to; - - case FETCH_ME_BQ: - ASSERT(sizeofW(StgBlockedFetch) >= MIN_UPD_SIZE); - to = copy(q,sizeofW(StgFetchMeBlockingQueue),stp); - return to; -#endif - - default: - barf("evacuate: THUNK_SELECTOR: strange selectee %d", - (int)(selectee_info->type)); - } + if (p == NULL) { + return copy(q,THUNK_SELECTOR_sizeW(),stp); + } else { + // q is still BLACKHOLE'd. + thunk_selector_depth++; + p = evacuate(p); + thunk_selector_depth--; + upd_evacuee(q,p); + return p; + } } - return copy(q,THUNK_SELECTOR_sizeW(),stp); case IND: case IND_OLDGEN: @@ -1942,7 +1838,7 @@ loop: */ if (evac_gen > 0) { // optimisation StgClosure *p = ((StgEvacuated*)q)->evacuee; - if (Bdescr((P_)p)->gen_no < evac_gen) { + if (HEAP_ALLOCED(p) && Bdescr((P_)p)->gen_no < evac_gen) { failed_to_evac = rtsTrue; TICK_GC_FAILED_PROMOTION(); } @@ -2028,6 +1924,144 @@ loop: } /* ----------------------------------------------------------------------------- + Evaluate a THUNK_SELECTOR if possible. + + returns: NULL if we couldn't evaluate this THUNK_SELECTOR, or + a closure pointer if we evaluated it and this is the result. Note + that "evaluating" the THUNK_SELECTOR doesn't necessarily mean + reducing it to HNF, just that we have eliminated the selection. + The result might be another thunk, or even another THUNK_SELECTOR. + + If the return value is non-NULL, the original selector thunk has + been BLACKHOLE'd, and should be updated with an indirection or a + forwarding pointer. If the return value is NULL, then the selector + thunk is unchanged. + -------------------------------------------------------------------------- */ + +static StgClosure * +eval_thunk_selector( nat field, StgSelector * p ) +{ + StgInfoTable *info; + const StgInfoTable *info_ptr; + StgClosure *selectee; + + selectee = p->selectee; + + // Save the real info pointer (NOTE: not the same as get_itbl()). + info_ptr = p->header.info; + + // If the THUNK_SELECTOR is in a generation that we are not + // collecting, then bail out early. We won't be able to save any + // space in any case, and updating with an indirection is trickier + // in an old gen. + if (Bdescr((StgPtr)p)->gen_no > N) { + return NULL; + } + + // BLACKHOLE the selector thunk, since it is now under evaluation. + // This is important to stop us going into an infinite loop if + // this selector thunk eventually refers to itself. + SET_INFO(p,&stg_BLACKHOLE_info); + +selector_loop: + + info = get_itbl(selectee); + switch (info->type) { + case CONSTR: + case CONSTR_1_0: + case CONSTR_0_1: + case CONSTR_2_0: + case CONSTR_1_1: + case CONSTR_0_2: + case CONSTR_STATIC: + case CONSTR_NOCAF_STATIC: + // check that the size is in range + ASSERT(field < (StgWord32)(info->layout.payload.ptrs + + info->layout.payload.nptrs)); + + return selectee->payload[field]; + + case IND: + case IND_STATIC: + case IND_PERM: + case IND_OLDGEN: + case IND_OLDGEN_PERM: + selectee = ((StgInd *)selectee)->indirectee; + goto selector_loop; + + case EVACUATED: + // We don't follow pointers into to-space; the constructor + // has already been evacuated, so we won't save any space + // leaks by evaluating this selector thunk anyhow. + break; + + case THUNK_SELECTOR: + { + StgClosure *val; + + // check that we don't recurse too much, re-using the + // depth bound also used in evacuate(). + thunk_selector_depth++; + if (thunk_selector_depth > MAX_THUNK_SELECTOR_DEPTH) { + break; + } + + val = eval_thunk_selector(info->layout.selector_offset, + (StgSelector *)selectee); + + thunk_selector_depth--; + + if (val == NULL) { + break; + } else { + // We evaluated this selector thunk, so update it with + // an indirection. NOTE: we don't use UPD_IND here, + // because we are guaranteed that p is in a generation + // that we are collecting, and we never want to put the + // indirection on a mutable list. + ((StgInd *)selectee)->indirectee = val; + SET_INFO(selectee,&stg_IND_info); + selectee = val; + goto selector_loop; + } + } + + case AP_UPD: + case THUNK: + case THUNK_1_0: + case THUNK_0_1: + case THUNK_2_0: + case THUNK_1_1: + case THUNK_0_2: + case THUNK_STATIC: + case CAF_BLACKHOLE: + case SE_CAF_BLACKHOLE: + case SE_BLACKHOLE: + case BLACKHOLE: + case BLACKHOLE_BQ: +#if defined(PAR) + case RBH: + case BLOCKED_FETCH: +# ifdef DIST + case REMOTE_REF: +# endif + case FETCH_ME: + case FETCH_ME_BQ: +#endif + // not evaluated yet + break; + + default: + barf("eval_thunk_selector: strange selectee %d", + (int)(info->type)); + } + + // We didn't manage to evaluate this thunk; restore the old info pointer + SET_INFO(p, info_ptr); + return NULL; +} + +/* ----------------------------------------------------------------------------- move_TSO is called to update the TSO structure after it has been moved from one place to another. -------------------------------------------------------------------------- */ @@ -2199,6 +2233,8 @@ scavenge(step *stp) info = get_itbl((StgClosure *)p); ASSERT(p && (LOOKS_LIKE_GHC_INFO(info) || IS_HUGS_CONSTR_INFO(info))); + ASSERT(thunk_selector_depth == 0); + q = p; switch (info->type) {