X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;f=ghc%2Frts%2FGC.c;h=a4f0c178292ba1c5e2ea80b6178b5603a57e4ff5;hb=6444de585f3d7cd44119eb651629f5b7b7f6eb45;hp=9d25926ecd4866587c5f51d631ced7270ff1be0e;hpb=95ca6bff6fc9918203173b442192d9298ef9757a;p=ghc-hetmet.git diff --git a/ghc/rts/GC.c b/ghc/rts/GC.c index 9d25926..a4f0c17 100644 --- a/ghc/rts/GC.c +++ b/ghc/rts/GC.c @@ -1,5 +1,4 @@ /* ----------------------------------------------------------------------------- - * $Id: GC.c,v 1.169 2004/09/03 15:28:20 simonmar Exp $ * * (c) The GHC Team 1998-2003 * @@ -12,12 +11,12 @@ #include "RtsFlags.h" #include "RtsUtils.h" #include "Apply.h" +#include "OSThreads.h" #include "Storage.h" #include "LdvProfile.h" #include "Updates.h" #include "Stats.h" #include "Schedule.h" -#include "SchedAPI.h" // for ReverCAFs prototype #include "Sanity.h" #include "BlockAlloc.h" #include "MBlock.h" @@ -28,6 +27,7 @@ #include "ParTicky.h" // ToDo: move into Rts.h #include "GCCompact.h" #include "Signals.h" +#include "STM.h" #if defined(GRAN) || defined(PAR) # include "GranSimRts.h" # include "ParallelRts.h" @@ -47,6 +47,12 @@ #include +// Turn off inlining when debugging - it obfuscates things +#ifdef DEBUG +# undef STATIC_INLINE +# define STATIC_INLINE static +#endif + /* STATIC OBJECT LIST. * * During GC: @@ -149,7 +155,6 @@ static void mark_root ( StgClosure **root ); REGPARM1 static StgClosure * evacuate (StgClosure *q); static void zero_static_object_list ( StgClosure* first_static ); -static void zero_mutable_list ( StgMutClosure *first ); static rtsBool traverse_weak_ptr_list ( void ); static void mark_weak_ptr_list ( StgWeak **list ); @@ -164,7 +169,6 @@ static rtsBool scavenge_one ( StgPtr p ); static void scavenge_large ( step * ); static void scavenge_static ( void ); static void scavenge_mutable_list ( generation *g ); -static void scavenge_mut_once_list ( generation *g ); static void scavenge_large_bitmap ( StgPtr p, StgLargeBitmap *large_bitmap, @@ -266,7 +270,7 @@ gc_alloc_block(step *stp) (and all younger generations): - follow all pointers in the root set. the root set includes all - mutable objects in all generations (mutable_list and mut_once_list). + mutable objects in all generations (mutable_list). - for each pointer, evacuate the object it points to into either @@ -278,7 +282,7 @@ gc_alloc_block(step *stp) When we evacuate an object we attempt to evacuate everything it points to into the same generation - this is achieved by setting evac_gen to the desired generation. If - we can't do this, then an entry in the mut_once list has to + we can't do this, then an entry in the mut list has to be made for the cross-generation pointer. + if the object is already in a generation > N, then leave @@ -316,6 +320,9 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) blockUserSignals(); #endif + // tell the STM to discard any cached closures its hoping to re-use + stmPreGCHook(); + // tell the stats department that we've started a GC stat_startGC(); @@ -367,13 +374,6 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) static_objects = END_OF_STATIC_LIST; scavenged_static_objects = END_OF_STATIC_LIST; - /* zero the mutable list for the oldest generation (see comment by - * zero_mutable_list below). - */ - if (major_gc) { - zero_mutable_list(generations[RtsFlags.GcFlags.generations-1].mut_once_list); - } - /* Save the old to-space if we're doing a two-space collection */ if (RtsFlags.GcFlags.generations == 1) { @@ -391,8 +391,14 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) // collecting. // for (g = 0; g <= N; g++) { - generations[g].mut_once_list = END_MUT_LIST; - generations[g].mut_list = END_MUT_LIST; + + // throw away the mutable list. Invariant: the mutable list + // always has at least one block; this means we can avoid a check for + // NULL in recordMutable(). + if (g != 0) { + freeChain(generations[g].mut_list); + generations[g].mut_list = allocBlock(); + } for (s = 0; s < generations[g].n_steps; s++) { @@ -435,7 +441,7 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) bitmap_size = stp->n_blocks * BLOCK_SIZE / (sizeof(W_)*BITS_PER_BYTE); if (bitmap_size > 0) { - bitmap_bdescr = allocGroup((nat)BLOCK_ROUND_UP(bitmap_size) + bitmap_bdescr = allocGroup((lnat)BLOCK_ROUND_UP(bitmap_size) / BLOCK_SIZE); stp->bitmap = bitmap_bdescr; bitmap = bitmap_bdescr->start; @@ -515,23 +521,12 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) int st; for (g = RtsFlags.GcFlags.generations-1; g > N; g--) { generations[g].saved_mut_list = generations[g].mut_list; - generations[g].mut_list = END_MUT_LIST; - } - - // Do the mut-once lists first - for (g = RtsFlags.GcFlags.generations-1; g > N; g--) { - IF_PAR_DEBUG(verbose, - printMutOnceList(&generations[g])); - scavenge_mut_once_list(&generations[g]); - evac_gen = g; - for (st = generations[g].n_steps-1; st >= 0; st--) { - scavenge(&generations[g].steps[st]); - } + generations[g].mut_list = allocBlock(); + // mut_list always has at least one block. } for (g = RtsFlags.GcFlags.generations-1; g > N; g--) { - IF_PAR_DEBUG(verbose, - printMutableList(&generations[g])); + IF_PAR_DEBUG(verbose, printMutableList(&generations[g])); scavenge_mutable_list(&generations[g]); evac_gen = g; for (st = generations[g].n_steps-1; st >= 0; st--) { @@ -717,6 +712,14 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) generations[g].collections++; // for stats } + // Count the mutable list as bytes "copied" for the purposes of + // stats. Every mutable list is copied during every GC. + if (g > 0) { + for (bd = generations[g].mut_list; bd != NULL; bd = bd->link) { + copied += (bd->free - bd->start) * sizeof(StgWord); + } + } + for (s = 0; s < generations[g].n_steps; s++) { bdescr *next; stp = &generations[g].steps[s]; @@ -981,7 +984,7 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) blocks = RtsFlags.GcFlags.minAllocAreaSize; } } - resizeNursery(blocks); + resizeNurseries(blocks); } else { /* Generational collector: @@ -998,7 +1001,7 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) * percentage of g0s0 that was live at the last minor GC. */ if (N == 0) { - g0s0_pcnt_kept = (new_blocks * 100) / g0s0->n_blocks; + g0s0_pcnt_kept = (new_blocks * 100) / countNurseryBlocks(); } /* Estimate a size for the allocation area based on the @@ -1021,12 +1024,12 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) blocks = RtsFlags.GcFlags.minAllocAreaSize; } - resizeNursery((nat)blocks); + resizeNurseries((nat)blocks); } else { // we might have added extra large blocks to the nursery, so // resize back to minAllocAreaSize again. - resizeNursery(RtsFlags.GcFlags.minAllocAreaSize); + resizeNurseries(RtsFlags.GcFlags.minAllocAreaSize); } } @@ -1238,7 +1241,7 @@ traverse_weak_ptr_list(void) prev = &old_all_threads; for (t = old_all_threads; t != END_TSO_QUEUE; t = next) { - (StgClosure *)tmp = isAlive((StgClosure *)t); + tmp = (StgTSO *)isAlive((StgClosure *)t); if (tmp != NULL) { t = tmp; @@ -1285,7 +1288,7 @@ traverse_weak_ptr_list(void) StgTSO *t, *tmp, *next; for (t = old_all_threads; t != END_TSO_QUEUE; t = next) { next = t->global_link; - (StgClosure *)tmp = evacuate((StgClosure *)t); + tmp = (StgTSO *)evacuate((StgClosure *)t); tmp->global_link = resurrected_threads; resurrected_threads = tmp; } @@ -1324,7 +1327,7 @@ mark_weak_ptr_list ( StgWeak **list ) // w might be WEAK, EVACUATED, or DEAD_WEAK (actually CON_STATIC) here ASSERT(w->header.info == &stg_DEAD_WEAK_info || get_itbl(w)->type == WEAK || get_itbl(w)->type == EVACUATED); - (StgClosure *)w = evacuate((StgClosure *)w); + w = (StgWeak *)evacuate((StgClosure *)w); *last_w = w; last_w = &(w->link); } @@ -1419,8 +1422,6 @@ mark_root(StgClosure **root) STATIC_INLINE void upd_evacuee(StgClosure *p, StgClosure *dest) { - // Source object must be in from-space: - ASSERT((Bdescr((P_)p)->flags & BF_EVACUATED) == 0); // not true: (ToDo: perhaps it should be) // ASSERT(Bdescr((P_)dest)->flags & BF_EVACUATED); SET_INFO(p, &stg_EVACUATED_info); @@ -1586,39 +1587,6 @@ evacuate_large(StgPtr p) } /* ----------------------------------------------------------------------------- - Adding a MUT_CONS to an older generation. - - This is necessary from time to time when we end up with an - old-to-new generation pointer in a non-mutable object. We defer - the promotion until the next GC. - -------------------------------------------------------------------------- */ - -static StgClosure * -mkMutCons(StgClosure *ptr, generation *gen) -{ - StgMutVar *q; - step *stp; - - stp = &gen->steps[0]; - - /* chain a new block onto the to-space for the destination step if - * necessary. - */ - if (stp->hp + sizeofW(StgIndOldGen) >= stp->hpLim) { - gc_alloc_block(stp); - } - - q = (StgMutVar *)stp->hp; - stp->hp += sizeofW(StgMutVar); - - SET_HDR(q,&stg_MUT_CONS_info,CCS_GC); - q->var = ptr; - recordOldToNewPtrs((StgMutClosure *)q); - - return (StgClosure *)q; -} - -/* ----------------------------------------------------------------------------- Evacuate This is called (eventually) for every live object in the system. @@ -1663,7 +1631,9 @@ mkMutCons(StgClosure *ptr, generation *gen) REGPARM1 static StgClosure * evacuate(StgClosure *q) { +#if defined(PAR) StgClosure *to; +#endif bdescr *bd = NULL; step *stp; const StgInfoTable *info; @@ -1714,6 +1684,9 @@ loop: return q; } + /* Object is not already evacuated. */ + ASSERT((bd->flags & BF_EVACUATED) == 0); + stp = bd->step->to; } #ifdef DEBUG @@ -1750,8 +1723,10 @@ loop: case CONSTR_1_0: return copy(q,sizeofW(StgHeader)+1,stp); - case THUNK_1_0: // here because of MIN_UPD_SIZE + case THUNK_1_0: case THUNK_0_1: + return copy(q,sizeofW(StgThunk)+1,stp); + case THUNK_1_1: case THUNK_0_2: case THUNK_2_0: @@ -1762,7 +1737,7 @@ loop: stp = bd->step; } #endif - return copy(q,sizeofW(StgHeader)+2,stp); + return copy(q,sizeofW(StgThunk)+2,stp); case FUN_1_1: case FUN_0_2: @@ -1772,8 +1747,10 @@ loop: case CONSTR_2_0: return copy(q,sizeofW(StgHeader)+2,stp); - case FUN: case THUNK: + return copy(q,thunk_sizeW_fromITBL(info),stp); + + case FUN: case CONSTR: case IND_PERM: case IND_OLDGEN_PERM: @@ -1791,10 +1768,6 @@ loop: case BLACKHOLE: return copyPart(q,BLACKHOLE_sizeW(),sizeofW(StgHeader),stp); - case BLACKHOLE_BQ: - to = copy(q,BLACKHOLE_sizeW(),stp); - return to; - case THUNK_SELECTOR: { StgClosure *p; @@ -1832,16 +1805,16 @@ loop: case THUNK_STATIC: if (info->srt_bitmap != 0 && major_gc && - THUNK_STATIC_LINK((StgClosure *)q) == NULL) { - THUNK_STATIC_LINK((StgClosure *)q) = static_objects; + *THUNK_STATIC_LINK((StgClosure *)q) == NULL) { + *THUNK_STATIC_LINK((StgClosure *)q) = static_objects; static_objects = (StgClosure *)q; } return q; case FUN_STATIC: if (info->srt_bitmap != 0 && major_gc && - FUN_STATIC_LINK((StgClosure *)q) == NULL) { - FUN_STATIC_LINK((StgClosure *)q) = static_objects; + *FUN_STATIC_LINK((StgClosure *)q) == NULL) { + *FUN_STATIC_LINK((StgClosure *)q) = static_objects; static_objects = (StgClosure *)q; } return q; @@ -1853,15 +1826,15 @@ loop: */ if (major_gc && ((StgIndStatic *)q)->saved_info == NULL - && IND_STATIC_LINK((StgClosure *)q) == NULL) { - IND_STATIC_LINK((StgClosure *)q) = static_objects; + && *IND_STATIC_LINK((StgClosure *)q) == NULL) { + *IND_STATIC_LINK((StgClosure *)q) = static_objects; static_objects = (StgClosure *)q; } return q; case CONSTR_STATIC: - if (major_gc && STATIC_LINK(info,(StgClosure *)q) == NULL) { - STATIC_LINK(info,(StgClosure *)q) = static_objects; + if (major_gc && *STATIC_LINK(info,(StgClosure *)q) == NULL) { + *STATIC_LINK(info,(StgClosure *)q) = static_objects; static_objects = (StgClosure *)q; } return q; @@ -1883,13 +1856,18 @@ loop: case UPDATE_FRAME: case STOP_FRAME: case CATCH_FRAME: + case CATCH_STM_FRAME: + case CATCH_RETRY_FRAME: + case ATOMICALLY_FRAME: // shouldn't see these barf("evacuate: stack frame at %p\n", q); case PAP: - case AP: return copy(q,pap_sizeW((StgPAP*)q),stp); + case AP: + return copy(q,ap_sizeW((StgAP*)q),stp); + case AP_STACK: return copy(q,ap_stack_sizeW((StgAP_STACK*)q),stp); @@ -1916,6 +1894,7 @@ loop: case MUT_ARR_PTRS: case MUT_ARR_PTRS_FROZEN: + case MUT_ARR_PTRS_FROZEN0: // just copy the block return copy(q,mut_arr_ptrs_sizeW((StgMutArrPtrs *)q),stp); @@ -1951,7 +1930,7 @@ loop: } #if defined(PAR) - case RBH: // cf. BLACKHOLE_BQ + case RBH: { //StgInfoTable *rip = get_closure_info(q, &size, &ptrs, &nonptrs, &vhs, str); to = copy(q,BLACKHOLE_sizeW(),stp); @@ -1991,6 +1970,18 @@ loop: return to; #endif + case TREC_HEADER: + return copy(q,sizeofW(StgTRecHeader),stp); + + case TVAR_WAIT_QUEUE: + return copy(q,sizeofW(StgTVarWaitQueue),stp); + + case TVAR: + return copy(q,sizeofW(StgTVar),stp); + + case TREC_CHUNK: + return copy(q,sizeofW(StgTRecChunk),stp); + default: barf("evacuate: strange closure type %d", (int)(info->type)); } @@ -2011,6 +2002,48 @@ loop: been BLACKHOLE'd, and should be updated with an indirection or a forwarding pointer. If the return value is NULL, then the selector thunk is unchanged. + + *** + ToDo: the treatment of THUNK_SELECTORS could be improved in the + following way (from a suggestion by Ian Lynagh): + + We can have a chain like this: + + sel_0 --> (a,b) + | + |-----> sel_0 --> (a,b) + | + |-----> sel_0 --> ... + + and the depth limit means we don't go all the way to the end of the + chain, which results in a space leak. This affects the recursive + call to evacuate() in the THUNK_SELECTOR case in evacuate(): *not* + the recursive call to eval_thunk_selector() in + eval_thunk_selector(). + + We could eliminate the depth bound in this case, in the following + way: + + - traverse the chain once to discover the *value* of the + THUNK_SELECTOR. Mark all THUNK_SELECTORS that we + visit on the way as having been visited already (somehow). + + - in a second pass, traverse the chain again updating all + THUNK_SEELCTORS that we find on the way with indirections to + the value. + + - if we encounter a "marked" THUNK_SELECTOR in a normal + evacuate(), we konw it can't be updated so just evac it. + + Program that illustrates the problem: + + foo [] = ([], []) + foo (x:xs) = let (ys, zs) = foo xs + in if x >= 0 then (x:ys, zs) else (ys, x:zs) + + main = bar [1..(100000000::Int)] + bar xs = (\(ys, zs) -> print ys >> print zs) (foo xs) + -------------------------------------------------------------------------- */ static inline rtsBool @@ -2138,10 +2171,10 @@ selector_loop: // check that we don't recurse too much, re-using the // depth bound also used in evacuate(). - thunk_selector_depth++; - if (thunk_selector_depth > MAX_THUNK_SELECTOR_DEPTH) { + if (thunk_selector_depth >= MAX_THUNK_SELECTOR_DEPTH) { break; } + thunk_selector_depth++; val = eval_thunk_selector(info->layout.selector_offset, (StgSelector *)selectee); @@ -2187,7 +2220,6 @@ selector_loop: case SE_CAF_BLACKHOLE: case SE_BLACKHOLE: case BLACKHOLE: - case BLACKHOLE_BQ: #if defined(PAR) case RBH: case BLOCKED_FETCH: @@ -2305,7 +2337,7 @@ scavenge_thunk_srt(const StgInfoTable *info) StgThunkInfoTable *thunk_info; thunk_info = itbl_to_thunk_itbl(info); - scavenge_srt((StgClosure **)thunk_info->srt, thunk_info->i.srt_bitmap); + scavenge_srt((StgClosure **)GET_SRT(thunk_info), thunk_info->i.srt_bitmap); } STATIC_INLINE void @@ -2314,16 +2346,7 @@ scavenge_fun_srt(const StgInfoTable *info) StgFunInfoTable *fun_info; fun_info = itbl_to_fun_itbl(info); - scavenge_srt((StgClosure **)fun_info->f.srt, fun_info->i.srt_bitmap); -} - -STATIC_INLINE void -scavenge_ret_srt(const StgInfoTable *info) -{ - StgRetInfoTable *ret_info; - - ret_info = itbl_to_ret_itbl(info); - scavenge_srt((StgClosure **)ret_info->srt, ret_info->i.srt_bitmap); + scavenge_srt((StgClosure **)GET_FUN_SRT(fun_info), fun_info->i.srt_bitmap); } /* ----------------------------------------------------------------------------- @@ -2334,7 +2357,7 @@ static void scavengeTSO (StgTSO *tso) { // chase the link field for any TSOs on the same queue - (StgClosure *)tso->link = evacuate((StgClosure *)tso->link); + tso->link = (StgTSO *)evacuate((StgClosure *)tso->link); if ( tso->why_blocked == BlockedOnMVar || tso->why_blocked == BlockedOnBlackHole || tso->why_blocked == BlockedOnException @@ -2350,6 +2373,9 @@ scavengeTSO (StgTSO *tso) (StgTSO *)evacuate((StgClosure *)tso->blocked_exceptions); } + // scavange current transaction record + tso->trec = (StgTRecHeader *)evacuate((StgClosure *)tso->trec); + // scavenge this thread's stack scavenge_stack(tso->sp, &(tso->stack[tso->stack_size])); } @@ -2369,12 +2395,12 @@ scavenge_arg_block (StgFunInfoTable *fun_info, StgClosure **args) p = (StgPtr)args; switch (fun_info->f.fun_type) { case ARG_GEN: - bitmap = BITMAP_BITS(fun_info->f.bitmap); - size = BITMAP_SIZE(fun_info->f.bitmap); + bitmap = BITMAP_BITS(fun_info->f.b.bitmap); + size = BITMAP_SIZE(fun_info->f.b.bitmap); goto small_bitmap; case ARG_GEN_BIG: - size = ((StgLargeBitmap *)fun_info->f.bitmap)->size; - scavenge_large_bitmap(p, (StgLargeBitmap *)fun_info->f.bitmap, size); + size = GET_FUN_LARGE_BITMAP(fun_info)->size; + scavenge_large_bitmap(p, GET_FUN_LARGE_BITMAP(fun_info), size); p += size; break; default: @@ -2383,7 +2409,7 @@ scavenge_arg_block (StgFunInfoTable *fun_info, StgClosure **args) small_bitmap: while (size > 0) { if ((bitmap & 1) == 0) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } p++; bitmap = bitmap >> 1; @@ -2395,38 +2421,34 @@ scavenge_arg_block (StgFunInfoTable *fun_info, StgClosure **args) } STATIC_INLINE StgPtr -scavenge_PAP (StgPAP *pap) +scavenge_PAP_payload (StgClosure *fun, StgClosure **payload, StgWord size) { StgPtr p; - StgWord bitmap, size; + StgWord bitmap; StgFunInfoTable *fun_info; - - pap->fun = evacuate(pap->fun); - fun_info = get_fun_itbl(pap->fun); + + fun_info = get_fun_itbl(fun); ASSERT(fun_info->i.type != PAP); - - p = (StgPtr)pap->payload; - size = pap->n_args; + p = (StgPtr)payload; switch (fun_info->f.fun_type) { case ARG_GEN: - bitmap = BITMAP_BITS(fun_info->f.bitmap); + bitmap = BITMAP_BITS(fun_info->f.b.bitmap); goto small_bitmap; case ARG_GEN_BIG: - scavenge_large_bitmap(p, (StgLargeBitmap *)fun_info->f.bitmap, size); + scavenge_large_bitmap(p, GET_FUN_LARGE_BITMAP(fun_info), size); p += size; break; case ARG_BCO: - scavenge_large_bitmap((StgPtr)pap->payload, BCO_BITMAP(pap->fun), size); + scavenge_large_bitmap((StgPtr)payload, BCO_BITMAP(fun), size); p += size; break; default: bitmap = BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]); small_bitmap: - size = pap->n_args; while (size > 0) { if ((bitmap & 1) == 0) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } p++; bitmap = bitmap >> 1; @@ -2437,6 +2459,20 @@ scavenge_PAP (StgPAP *pap) return p; } +STATIC_INLINE StgPtr +scavenge_PAP (StgPAP *pap) +{ + pap->fun = evacuate(pap->fun); + return scavenge_PAP_payload (pap->fun, pap->payload, pap->n_args); +} + +STATIC_INLINE StgPtr +scavenge_AP (StgAP *ap) +{ + ap->fun = evacuate(ap->fun); + return scavenge_PAP_payload (ap->fun, ap->payload, ap->n_args); +} + /* ----------------------------------------------------------------------------- Scavenge a given step until there are no more objects in this step to scavenge. @@ -2485,18 +2521,14 @@ scavenge(step *stp) switch (info->type) { case MVAR: - /* treat MVars specially, because we don't want to evacuate the - * mut_link field in the middle of the closure. - */ { StgMVar *mvar = ((StgMVar *)p); evac_gen = 0; - (StgClosure *)mvar->head = evacuate((StgClosure *)mvar->head); - (StgClosure *)mvar->tail = evacuate((StgClosure *)mvar->tail); - (StgClosure *)mvar->value = evacuate((StgClosure *)mvar->value); + mvar->head = (StgTSO *)evacuate((StgClosure *)mvar->head); + mvar->tail = (StgTSO *)evacuate((StgClosure *)mvar->tail); + mvar->value = evacuate((StgClosure *)mvar->value); evac_gen = saved_evac_gen; - recordMutable((StgMutClosure *)mvar); - failed_to_evac = rtsFalse; // mutable. + failed_to_evac = rtsTrue; // mutable. p += sizeofW(StgMVar); break; } @@ -2510,6 +2542,11 @@ scavenge(step *stp) case THUNK_2_0: scavenge_thunk_srt(info); + ((StgThunk *)p)->payload[1] = evacuate(((StgThunk *)p)->payload[1]); + ((StgThunk *)p)->payload[0] = evacuate(((StgThunk *)p)->payload[0]); + p += sizeofW(StgThunk) + 2; + break; + case CONSTR_2_0: ((StgClosure *)p)->payload[1] = evacuate(((StgClosure *)p)->payload[1]); ((StgClosure *)p)->payload[0] = evacuate(((StgClosure *)p)->payload[0]); @@ -2518,8 +2555,8 @@ scavenge(step *stp) case THUNK_1_0: scavenge_thunk_srt(info); - ((StgClosure *)p)->payload[0] = evacuate(((StgClosure *)p)->payload[0]); - p += sizeofW(StgHeader) + 2; // MIN_UPD_SIZE + ((StgThunk *)p)->payload[0] = evacuate(((StgThunk *)p)->payload[0]); + p += sizeofW(StgThunk) + 1; break; case FUN_1_0: @@ -2531,7 +2568,7 @@ scavenge(step *stp) case THUNK_0_1: scavenge_thunk_srt(info); - p += sizeofW(StgHeader) + 2; // MIN_UPD_SIZE + p += sizeofW(StgThunk) + 1; break; case FUN_0_1: @@ -2542,7 +2579,7 @@ scavenge(step *stp) case THUNK_0_2: scavenge_thunk_srt(info); - p += sizeofW(StgHeader) + 2; + p += sizeofW(StgThunk) + 2; break; case FUN_0_2: @@ -2553,8 +2590,8 @@ scavenge(step *stp) case THUNK_1_1: scavenge_thunk_srt(info); - ((StgClosure *)p)->payload[0] = evacuate(((StgClosure *)p)->payload[0]); - p += sizeofW(StgHeader) + 2; + ((StgThunk *)p)->payload[0] = evacuate(((StgThunk *)p)->payload[0]); + p += sizeofW(StgThunk) + 2; break; case FUN_1_1: @@ -2569,8 +2606,17 @@ scavenge(step *stp) goto gen_obj; case THUNK: + { + StgPtr end; + scavenge_thunk_srt(info); - // fall through + end = (P_)((StgThunk *)p)->payload + info->layout.payload.ptrs; + for (p = (P_)((StgThunk *)p)->payload; p < end; p++) { + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); + } + p += info->layout.payload.nptrs; + break; + } gen_obj: case CONSTR: @@ -2582,7 +2628,7 @@ scavenge(step *stp) end = (P_)((StgClosure *)p)->payload + info->layout.payload.ptrs; for (p = (P_)((StgClosure *)p)->payload; p < end; p++) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } p += info->layout.payload.nptrs; break; @@ -2590,10 +2636,10 @@ scavenge(step *stp) case BCO: { StgBCO *bco = (StgBCO *)p; - (StgClosure *)bco->instrs = evacuate((StgClosure *)bco->instrs); - (StgClosure *)bco->literals = evacuate((StgClosure *)bco->literals); - (StgClosure *)bco->ptrs = evacuate((StgClosure *)bco->ptrs); - (StgClosure *)bco->itbls = evacuate((StgClosure *)bco->itbls); + bco->instrs = (StgArrWords *)evacuate((StgClosure *)bco->instrs); + bco->literals = (StgArrWords *)evacuate((StgClosure *)bco->literals); + bco->ptrs = (StgMutArrPtrs *)evacuate((StgClosure *)bco->ptrs); + bco->itbls = (StgArrWords *)evacuate((StgClosure *)bco->itbls); p += bco_sizeW(bco); break; } @@ -2616,27 +2662,15 @@ scavenge(step *stp) } // fall through case IND_OLDGEN_PERM: - ((StgIndOldGen *)p)->indirectee = - evacuate(((StgIndOldGen *)p)->indirectee); - if (failed_to_evac) { - failed_to_evac = rtsFalse; - recordOldToNewPtrs((StgMutClosure *)p); - } - p += sizeofW(StgIndOldGen); + ((StgInd *)p)->indirectee = evacuate(((StgInd *)p)->indirectee); + p += sizeofW(StgInd); break; case MUT_VAR: evac_gen = 0; ((StgMutVar *)p)->var = evacuate(((StgMutVar *)p)->var); evac_gen = saved_evac_gen; - recordMutable((StgMutClosure *)p); - failed_to_evac = rtsFalse; // mutable anyhow - p += sizeofW(StgMutVar); - break; - - case MUT_CONS: - // ignore these - failed_to_evac = rtsFalse; // mutable anyhow + failed_to_evac = rtsTrue; // mutable anyhow p += sizeofW(StgMutVar); break; @@ -2647,17 +2681,6 @@ scavenge(step *stp) p += BLACKHOLE_sizeW(); break; - case BLACKHOLE_BQ: - { - StgBlockingQueue *bh = (StgBlockingQueue *)p; - (StgClosure *)bh->blocking_queue = - evacuate((StgClosure *)bh->blocking_queue); - recordMutable((StgMutClosure *)bh); - failed_to_evac = rtsFalse; - p += BLACKHOLE_sizeW(); - break; - } - case THUNK_SELECTOR: { StgSelector *s = (StgSelector *)p; @@ -2678,10 +2701,13 @@ scavenge(step *stp) } case PAP: - case AP: p = scavenge_PAP((StgPAP *)p); break; + case AP: + p = scavenge_AP((StgAP *)p); + break; + case ARR_WORDS: // nothing to follow p += arr_words_sizeW((StgArrWords *)p); @@ -2695,27 +2721,22 @@ scavenge(step *stp) evac_gen = 0; // repeatedly mutable next = p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); for (p = (P_)((StgMutArrPtrs *)p)->payload; p < next; p++) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } evac_gen = saved_evac_gen; - recordMutable((StgMutClosure *)q); - failed_to_evac = rtsFalse; // mutable anyhow. + failed_to_evac = rtsTrue; // mutable anyhow. break; } case MUT_ARR_PTRS_FROZEN: + case MUT_ARR_PTRS_FROZEN0: // follow everything { StgPtr next; - // Set the mut_link field to NULL, so that we will put this - // array back on the mutable list if it is subsequently thawed - // by unsafeThaw#. - ((StgMutArrPtrs*)p)->mut_link = NULL; - next = p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); for (p = (P_)((StgMutArrPtrs *)p)->payload; p < next; p++) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } // it's tempting to recordMutable() if failed_to_evac is // false, but that breaks some assumptions (eg. every @@ -2730,14 +2751,13 @@ scavenge(step *stp) evac_gen = 0; scavengeTSO(tso); evac_gen = saved_evac_gen; - recordMutable((StgMutClosure *)tso); - failed_to_evac = rtsFalse; // mutable anyhow. + failed_to_evac = rtsTrue; // mutable anyhow. p += tso_sizeW(tso); break; } #if defined(PAR) - case RBH: // cf. BLACKHOLE_BQ + case RBH: { #if 0 nat size, ptrs, nonptrs, vhs; @@ -2747,8 +2767,7 @@ scavenge(step *stp) StgRBH *rbh = (StgRBH *)p; (StgClosure *)rbh->blocking_queue = evacuate((StgClosure *)rbh->blocking_queue); - recordMutable((StgMutClosure *)to); - failed_to_evac = rtsFalse; // mutable anyhow. + failed_to_evac = rtsTrue; // mutable anyhow. IF_DEBUG(gc, debugBelch("@@ scavenge: RBH %p (%s) (new blocking_queue link=%p)", p, info_type(p), (StgClosure *)rbh->blocking_queue)); @@ -2766,10 +2785,6 @@ scavenge(step *stp) // follow the link to the rest of the blocking queue (StgClosure *)bf->link = evacuate((StgClosure *)bf->link); - if (failed_to_evac) { - failed_to_evac = rtsFalse; - recordMutable((StgMutClosure *)bf); - } IF_DEBUG(gc, debugBelch("@@ scavenge: %p (%s); node is now %p; exciting, isn't it", bf, info_type((StgClosure *)bf), @@ -2785,15 +2800,11 @@ scavenge(step *stp) p += sizeofW(StgFetchMe); break; // nothing to do in this case - case FETCH_ME_BQ: // cf. BLACKHOLE_BQ + case FETCH_ME_BQ: { StgFetchMeBlockingQueue *fmbq = (StgFetchMeBlockingQueue *)p; (StgClosure *)fmbq->blocking_queue = evacuate((StgClosure *)fmbq->blocking_queue); - if (failed_to_evac) { - failed_to_evac = rtsFalse; - recordMutable((StgMutClosure *)fmbq); - } IF_DEBUG(gc, debugBelch("@@ scavenge: %p (%s) exciting, isn't it", p, info_type((StgClosure *)p))); @@ -2802,18 +2813,76 @@ scavenge(step *stp) } #endif + case TVAR_WAIT_QUEUE: + { + StgTVarWaitQueue *wq = ((StgTVarWaitQueue *) p); + evac_gen = 0; + wq->waiting_tso = (StgTSO *)evacuate((StgClosure*)wq->waiting_tso); + wq->next_queue_entry = (StgTVarWaitQueue *)evacuate((StgClosure*)wq->next_queue_entry); + wq->prev_queue_entry = (StgTVarWaitQueue *)evacuate((StgClosure*)wq->prev_queue_entry); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + p += sizeofW(StgTVarWaitQueue); + break; + } + + case TVAR: + { + StgTVar *tvar = ((StgTVar *) p); + evac_gen = 0; + tvar->current_value = evacuate((StgClosure*)tvar->current_value); + tvar->first_wait_queue_entry = (StgTVarWaitQueue *)evacuate((StgClosure*)tvar->first_wait_queue_entry); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + p += sizeofW(StgTVar); + break; + } + + case TREC_HEADER: + { + StgTRecHeader *trec = ((StgTRecHeader *) p); + evac_gen = 0; + trec->enclosing_trec = (StgTRecHeader *)evacuate((StgClosure*)trec->enclosing_trec); + trec->current_chunk = (StgTRecChunk *)evacuate((StgClosure*)trec->current_chunk); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + p += sizeofW(StgTRecHeader); + break; + } + + case TREC_CHUNK: + { + StgWord i; + StgTRecChunk *tc = ((StgTRecChunk *) p); + TRecEntry *e = &(tc -> entries[0]); + evac_gen = 0; + tc->prev_chunk = (StgTRecChunk *)evacuate((StgClosure*)tc->prev_chunk); + for (i = 0; i < tc -> next_entry_idx; i ++, e++ ) { + e->tvar = (StgTVar *)evacuate((StgClosure*)e->tvar); + e->expected_value = evacuate((StgClosure*)e->expected_value); + e->new_value = evacuate((StgClosure*)e->new_value); + } + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + p += sizeofW(StgTRecChunk); + break; + } + default: barf("scavenge: unimplemented/strange closure type %d @ %p", info->type, p); } - /* If we didn't manage to promote all the objects pointed to by - * the current object, then we have to designate this object as - * mutable (because it contains old-to-new generation pointers). + /* + * We need to record the current object on the mutable list if + * (a) It is actually mutable, or + * (b) It contains pointers to a younger generation. + * Case (b) arises if we didn't manage to promote everything that + * the current object points to into the current generation. */ if (failed_to_evac) { failed_to_evac = rtsFalse; - mkMutCons((StgClosure *)q, &generations[evac_gen]); + recordMutableGen((StgClosure *)q, stp->gen); } } @@ -2850,17 +2919,14 @@ linear_scan: switch (info->type) { case MVAR: - /* treat MVars specially, because we don't want to evacuate the - * mut_link field in the middle of the closure. - */ { StgMVar *mvar = ((StgMVar *)p); evac_gen = 0; - (StgClosure *)mvar->head = evacuate((StgClosure *)mvar->head); - (StgClosure *)mvar->tail = evacuate((StgClosure *)mvar->tail); - (StgClosure *)mvar->value = evacuate((StgClosure *)mvar->value); + mvar->head = (StgTSO *)evacuate((StgClosure *)mvar->head); + mvar->tail = (StgTSO *)evacuate((StgClosure *)mvar->tail); + mvar->value = evacuate((StgClosure *)mvar->value); evac_gen = saved_evac_gen; - failed_to_evac = rtsFalse; // mutable. + failed_to_evac = rtsTrue; // mutable. break; } @@ -2872,6 +2938,10 @@ linear_scan: case THUNK_2_0: scavenge_thunk_srt(info); + ((StgThunk *)p)->payload[1] = evacuate(((StgThunk *)p)->payload[1]); + ((StgThunk *)p)->payload[0] = evacuate(((StgThunk *)p)->payload[0]); + break; + case CONSTR_2_0: ((StgClosure *)p)->payload[1] = evacuate(((StgClosure *)p)->payload[1]); ((StgClosure *)p)->payload[0] = evacuate(((StgClosure *)p)->payload[0]); @@ -2886,6 +2956,9 @@ linear_scan: case THUNK_1_0: case THUNK_1_1: scavenge_thunk_srt(info); + ((StgThunk *)p)->payload[0] = evacuate(((StgThunk *)p)->payload[0]); + break; + case CONSTR_1_0: case CONSTR_1_1: ((StgClosure *)p)->payload[0] = evacuate(((StgClosure *)p)->payload[0]); @@ -2910,8 +2983,16 @@ linear_scan: goto gen_obj; case THUNK: + { + StgPtr end; + scavenge_thunk_srt(info); - // fall through + end = (P_)((StgThunk *)p)->payload + info->layout.payload.ptrs; + for (p = (P_)((StgThunk *)p)->payload; p < end; p++) { + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); + } + break; + } gen_obj: case CONSTR: @@ -2923,17 +3004,17 @@ linear_scan: end = (P_)((StgClosure *)p)->payload + info->layout.payload.ptrs; for (p = (P_)((StgClosure *)p)->payload; p < end; p++) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } break; } case BCO: { StgBCO *bco = (StgBCO *)p; - (StgClosure *)bco->instrs = evacuate((StgClosure *)bco->instrs); - (StgClosure *)bco->literals = evacuate((StgClosure *)bco->literals); - (StgClosure *)bco->ptrs = evacuate((StgClosure *)bco->ptrs); - (StgClosure *)bco->itbls = evacuate((StgClosure *)bco->itbls); + bco->instrs = (StgArrWords *)evacuate((StgClosure *)bco->instrs); + bco->literals = (StgArrWords *)evacuate((StgClosure *)bco->literals); + bco->ptrs = (StgMutArrPtrs *)evacuate((StgClosure *)bco->ptrs); + bco->itbls = (StgArrWords *)evacuate((StgClosure *)bco->itbls); break; } @@ -2945,24 +3026,15 @@ linear_scan: case IND_OLDGEN: case IND_OLDGEN_PERM: - ((StgIndOldGen *)p)->indirectee = - evacuate(((StgIndOldGen *)p)->indirectee); - if (failed_to_evac) { - recordOldToNewPtrs((StgMutClosure *)p); - } - failed_to_evac = rtsFalse; + ((StgInd *)p)->indirectee = + evacuate(((StgInd *)p)->indirectee); break; case MUT_VAR: evac_gen = 0; ((StgMutVar *)p)->var = evacuate(((StgMutVar *)p)->var); evac_gen = saved_evac_gen; - failed_to_evac = rtsFalse; - break; - - case MUT_CONS: - // ignore these - failed_to_evac = rtsFalse; + failed_to_evac = rtsTrue; break; case CAF_BLACKHOLE: @@ -2972,15 +3044,6 @@ linear_scan: case ARR_WORDS: break; - case BLACKHOLE_BQ: - { - StgBlockingQueue *bh = (StgBlockingQueue *)p; - (StgClosure *)bh->blocking_queue = - evacuate((StgClosure *)bh->blocking_queue); - failed_to_evac = rtsFalse; - break; - } - case THUNK_SELECTOR: { StgSelector *s = (StgSelector *)p; @@ -2999,9 +3062,12 @@ linear_scan: } case PAP: - case AP: scavenge_PAP((StgPAP *)p); break; + + case AP: + scavenge_AP((StgAP *)p); + break; case MUT_ARR_PTRS: // follow everything @@ -3011,26 +3077,22 @@ linear_scan: evac_gen = 0; // repeatedly mutable next = p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); for (p = (P_)((StgMutArrPtrs *)p)->payload; p < next; p++) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } evac_gen = saved_evac_gen; - failed_to_evac = rtsFalse; // mutable anyhow. + failed_to_evac = rtsTrue; // mutable anyhow. break; } case MUT_ARR_PTRS_FROZEN: + case MUT_ARR_PTRS_FROZEN0: // follow everything { StgPtr next; - // Set the mut_link field to NULL, so that we will put this - // array on the mutable list if it is subsequently thawed - // by unsafeThaw#. - ((StgMutArrPtrs*)p)->mut_link = NULL; - next = p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); for (p = (P_)((StgMutArrPtrs *)p)->payload; p < next; p++) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } break; } @@ -3041,12 +3103,12 @@ linear_scan: evac_gen = 0; scavengeTSO(tso); evac_gen = saved_evac_gen; - failed_to_evac = rtsFalse; + failed_to_evac = rtsTrue; break; } #if defined(PAR) - case RBH: // cf. BLACKHOLE_BQ + case RBH: { #if 0 nat size, ptrs, nonptrs, vhs; @@ -3054,10 +3116,9 @@ linear_scan: StgInfoTable *rip = get_closure_info(p, &size, &ptrs, &nonptrs, &vhs, str); #endif StgRBH *rbh = (StgRBH *)p; - (StgClosure *)rbh->blocking_queue = - evacuate((StgClosure *)rbh->blocking_queue); - recordMutable((StgMutClosure *)rbh); - failed_to_evac = rtsFalse; // mutable anyhow. + bh->blocking_queue = + (StgTSO *)evacuate((StgClosure *)bh->blocking_queue); + failed_to_evac = rtsTrue; // mutable anyhow. IF_DEBUG(gc, debugBelch("@@ scavenge: RBH %p (%s) (new blocking_queue link=%p)", p, info_type(p), (StgClosure *)rbh->blocking_queue)); @@ -3073,10 +3134,6 @@ linear_scan: // follow the link to the rest of the blocking queue (StgClosure *)bf->link = evacuate((StgClosure *)bf->link); - if (failed_to_evac) { - failed_to_evac = rtsFalse; - recordMutable((StgMutClosure *)bf); - } IF_DEBUG(gc, debugBelch("@@ scavenge: %p (%s); node is now %p; exciting, isn't it", bf, info_type((StgClosure *)bf), @@ -3090,21 +3147,68 @@ linear_scan: case FETCH_ME: break; // nothing to do in this case - case FETCH_ME_BQ: // cf. BLACKHOLE_BQ + case FETCH_ME_BQ: { StgFetchMeBlockingQueue *fmbq = (StgFetchMeBlockingQueue *)p; (StgClosure *)fmbq->blocking_queue = evacuate((StgClosure *)fmbq->blocking_queue); - if (failed_to_evac) { - failed_to_evac = rtsFalse; - recordMutable((StgMutClosure *)fmbq); - } IF_DEBUG(gc, debugBelch("@@ scavenge: %p (%s) exciting, isn't it", p, info_type((StgClosure *)p))); break; } -#endif // PAR +#endif /* PAR */ + + case TVAR_WAIT_QUEUE: + { + StgTVarWaitQueue *wq = ((StgTVarWaitQueue *) p); + evac_gen = 0; + wq->waiting_tso = (StgTSO *)evacuate((StgClosure*)wq->waiting_tso); + wq->next_queue_entry = (StgTVarWaitQueue *)evacuate((StgClosure*)wq->next_queue_entry); + wq->prev_queue_entry = (StgTVarWaitQueue *)evacuate((StgClosure*)wq->prev_queue_entry); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + break; + } + + case TVAR: + { + StgTVar *tvar = ((StgTVar *) p); + evac_gen = 0; + tvar->current_value = evacuate((StgClosure*)tvar->current_value); + tvar->first_wait_queue_entry = (StgTVarWaitQueue *)evacuate((StgClosure*)tvar->first_wait_queue_entry); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + break; + } + + case TREC_CHUNK: + { + StgWord i; + StgTRecChunk *tc = ((StgTRecChunk *) p); + TRecEntry *e = &(tc -> entries[0]); + evac_gen = 0; + tc->prev_chunk = (StgTRecChunk *)evacuate((StgClosure*)tc->prev_chunk); + for (i = 0; i < tc -> next_entry_idx; i ++, e++ ) { + e->tvar = (StgTVar *)evacuate((StgClosure*)e->tvar); + e->expected_value = evacuate((StgClosure*)e->expected_value); + e->new_value = evacuate((StgClosure*)e->new_value); + } + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + break; + } + + case TREC_HEADER: + { + StgTRecHeader *trec = ((StgTRecHeader *) p); + evac_gen = 0; + trec->enclosing_trec = (StgTRecHeader *)evacuate((StgClosure*)trec->enclosing_trec); + trec->current_chunk = (StgTRecChunk *)evacuate((StgClosure*)trec->current_chunk); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + break; + } default: barf("scavenge_mark_stack: unimplemented/strange closure type %d @ %p", @@ -3113,7 +3217,7 @@ linear_scan: if (failed_to_evac) { failed_to_evac = rtsFalse; - mkMutCons((StgClosure *)q, &generations[evac_gen]); + recordMutableGen((StgClosure *)q, &generations[evac_gen]); } // mark the next bit to indicate "scavenged" @@ -3180,18 +3284,40 @@ scavenge_one(StgPtr p) switch (info->type) { - case FUN: - case FUN_1_0: // hardly worth specialising these guys - case FUN_0_1: - case FUN_1_1: - case FUN_0_2: - case FUN_2_0: + case MVAR: + { + StgMVar *mvar = ((StgMVar *)p); + evac_gen = 0; + mvar->head = (StgTSO *)evacuate((StgClosure *)mvar->head); + mvar->tail = (StgTSO *)evacuate((StgClosure *)mvar->tail); + mvar->value = evacuate((StgClosure *)mvar->value); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable. + break; + } + case THUNK: case THUNK_1_0: case THUNK_0_1: case THUNK_1_1: case THUNK_0_2: case THUNK_2_0: + { + StgPtr q, end; + + end = (StgPtr)((StgThunk *)p)->payload + info->layout.payload.ptrs; + for (q = (StgPtr)((StgThunk *)p)->payload; q < end; q++) { + *q = (StgWord)(StgPtr)evacuate((StgClosure *)*q); + } + break; + } + + case FUN: + case FUN_1_0: // hardly worth specialising these guys + case FUN_0_1: + case FUN_1_1: + case FUN_0_2: + case FUN_2_0: case CONSTR: case CONSTR_1_0: case CONSTR_0_1: @@ -3201,17 +3327,23 @@ scavenge_one(StgPtr p) case WEAK: case FOREIGN: case IND_PERM: - case IND_OLDGEN_PERM: { StgPtr q, end; end = (StgPtr)((StgClosure *)p)->payload + info->layout.payload.ptrs; for (q = (StgPtr)((StgClosure *)p)->payload; q < end; q++) { - (StgClosure *)*q = evacuate((StgClosure *)*q); + *q = (StgWord)(StgPtr)evacuate((StgClosure *)*q); } break; } + case MUT_VAR: + evac_gen = 0; + ((StgMutVar *)p)->var = evacuate(((StgMutVar *)p)->var); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable anyhow + break; + case CAF_BLACKHOLE: case SE_CAF_BLACKHOLE: case SE_BLACKHOLE: @@ -3225,39 +3357,52 @@ scavenge_one(StgPtr p) break; } - case ARR_WORDS: - // nothing to follow + case AP_STACK: + { + StgAP_STACK *ap = (StgAP_STACK *)p; + + ap->fun = evacuate(ap->fun); + scavenge_stack((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size); + p = (StgPtr)ap->payload + ap->size; break; + } - case MUT_ARR_PTRS: - { - // follow everything - StgPtr next; - - evac_gen = 0; // repeatedly mutable - recordMutable((StgMutClosure *)p); + case PAP: + p = scavenge_AP((StgAP *)p); + break; + + case AP: + p = scavenge_PAP((StgPAP *)p); + break; + + case ARR_WORDS: + // nothing to follow + break; + + case MUT_ARR_PTRS: + { + // follow everything + StgPtr next; + + evac_gen = 0; // repeatedly mutable next = p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); for (p = (P_)((StgMutArrPtrs *)p)->payload; p < next; p++) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } evac_gen = saved_evac_gen; - failed_to_evac = rtsFalse; + failed_to_evac = rtsTrue; break; } case MUT_ARR_PTRS_FROZEN: + case MUT_ARR_PTRS_FROZEN0: { // follow everything StgPtr next; - // Set the mut_link field to NULL, so that we will put this - // array on the mutable list if it is subsequently thawed - // by unsafeThaw#. - ((StgMutArrPtrs*)p)->mut_link = NULL; - next = p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); for (p = (P_)((StgMutArrPtrs *)p)->payload; p < next; p++) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } break; } @@ -3268,83 +3413,133 @@ scavenge_one(StgPtr p) evac_gen = 0; // repeatedly mutable scavengeTSO(tso); - recordMutable((StgMutClosure *)tso); evac_gen = saved_evac_gen; - failed_to_evac = rtsFalse; + failed_to_evac = rtsTrue; break; } - case AP_STACK: - { - StgAP_STACK *ap = (StgAP_STACK *)p; - - ap->fun = evacuate(ap->fun); - scavenge_stack((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size); - p = (StgPtr)ap->payload + ap->size; +#if defined(PAR) + case RBH: + { +#if 0 + nat size, ptrs, nonptrs, vhs; + char str[80]; + StgInfoTable *rip = get_closure_info(p, &size, &ptrs, &nonptrs, &vhs, str); +#endif + StgRBH *rbh = (StgRBH *)p; + (StgClosure *)rbh->blocking_queue = + evacuate((StgClosure *)rbh->blocking_queue); + failed_to_evac = rtsTrue; // mutable anyhow. + IF_DEBUG(gc, + debugBelch("@@ scavenge: RBH %p (%s) (new blocking_queue link=%p)", + p, info_type(p), (StgClosure *)rbh->blocking_queue)); + // ToDo: use size of reverted closure here! break; } - case PAP: - case AP: - p = scavenge_PAP((StgPAP *)p); - break; - - case IND_OLDGEN: - // This might happen if for instance a MUT_CONS was pointing to a - // THUNK which has since been updated. The IND_OLDGEN will - // be on the mutable list anyway, so we don't need to do anything - // here. + case BLOCKED_FETCH: + { + StgBlockedFetch *bf = (StgBlockedFetch *)p; + // follow the pointer to the node which is being demanded + (StgClosure *)bf->node = + evacuate((StgClosure *)bf->node); + // follow the link to the rest of the blocking queue + (StgClosure *)bf->link = + evacuate((StgClosure *)bf->link); + IF_DEBUG(gc, + debugBelch("@@ scavenge: %p (%s); node is now %p; exciting, isn't it", + bf, info_type((StgClosure *)bf), + bf->node, info_type(bf->node))); break; + } - default: - barf("scavenge_one: strange object %d", (int)(info->type)); - } - - no_luck = failed_to_evac; - failed_to_evac = rtsFalse; - return (no_luck); -} - -/* ----------------------------------------------------------------------------- - Scavenging mutable lists. +#ifdef DIST + case REMOTE_REF: +#endif + case FETCH_ME: + break; // nothing to do in this case - We treat the mutable list of each generation > N (i.e. all the - generations older than the one being collected) as roots. We also - remove non-mutable objects from the mutable list at this point. - -------------------------------------------------------------------------- */ + case FETCH_ME_BQ: + { + StgFetchMeBlockingQueue *fmbq = (StgFetchMeBlockingQueue *)p; + (StgClosure *)fmbq->blocking_queue = + evacuate((StgClosure *)fmbq->blocking_queue); + IF_DEBUG(gc, + debugBelch("@@ scavenge: %p (%s) exciting, isn't it", + p, info_type((StgClosure *)p))); + break; + } +#endif -static void -scavenge_mut_once_list(generation *gen) -{ - const StgInfoTable *info; - StgMutClosure *p, *next, *new_list; + case TVAR_WAIT_QUEUE: + { + StgTVarWaitQueue *wq = ((StgTVarWaitQueue *) p); + evac_gen = 0; + wq->waiting_tso = (StgTSO *)evacuate((StgClosure*)wq->waiting_tso); + wq->next_queue_entry = (StgTVarWaitQueue *)evacuate((StgClosure*)wq->next_queue_entry); + wq->prev_queue_entry = (StgTVarWaitQueue *)evacuate((StgClosure*)wq->prev_queue_entry); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + break; + } - p = gen->mut_once_list; - new_list = END_MUT_LIST; - next = p->mut_link; + case TVAR: + { + StgTVar *tvar = ((StgTVar *) p); + evac_gen = 0; + tvar->current_value = evacuate((StgClosure*)tvar->current_value); + tvar->first_wait_queue_entry = (StgTVarWaitQueue *)evacuate((StgClosure*)tvar->first_wait_queue_entry); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + break; + } - evac_gen = gen->no; - failed_to_evac = rtsFalse; + case TREC_HEADER: + { + StgTRecHeader *trec = ((StgTRecHeader *) p); + evac_gen = 0; + trec->enclosing_trec = (StgTRecHeader *)evacuate((StgClosure*)trec->enclosing_trec); + trec->current_chunk = (StgTRecChunk *)evacuate((StgClosure*)trec->current_chunk); + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + break; + } - for (; p != END_MUT_LIST; p = next, next = p->mut_link) { + case TREC_CHUNK: + { + StgWord i; + StgTRecChunk *tc = ((StgTRecChunk *) p); + TRecEntry *e = &(tc -> entries[0]); + evac_gen = 0; + tc->prev_chunk = (StgTRecChunk *)evacuate((StgClosure*)tc->prev_chunk); + for (i = 0; i < tc -> next_entry_idx; i ++, e++ ) { + e->tvar = (StgTVar *)evacuate((StgClosure*)e->tvar); + e->expected_value = evacuate((StgClosure*)e->expected_value); + e->new_value = evacuate((StgClosure*)e->new_value); + } + evac_gen = saved_evac_gen; + failed_to_evac = rtsTrue; // mutable + break; + } - ASSERT(LOOKS_LIKE_CLOSURE_PTR(p)); - info = get_itbl(p); - /* - if (info->type==RBH) - info = REVERT_INFOPTR(info); // if it's an RBH, look at the orig closure - */ - switch(info->type) { - case IND_OLDGEN: case IND_OLDGEN_PERM: case IND_STATIC: - /* Try to pull the indirectee into this generation, so we can - * remove the indirection from the mutable list. - */ - ((StgIndOldGen *)p)->indirectee = - evacuate(((StgIndOldGen *)p)->indirectee); - + { + /* Careful here: a THUNK can be on the mutable list because + * it contains pointers to young gen objects. If such a thunk + * is updated, the IND_OLDGEN will be added to the mutable + * list again, and we'll scavenge it twice. evacuate() + * doesn't check whether the object has already been + * evacuated, so we perform that check here. + */ + StgClosure *q = ((StgInd *)p)->indirectee; + if (HEAP_ALLOCED(q) && Bdescr((StgPtr)q)->flags & BF_EVACUATED) { + break; + } + ((StgInd *)p)->indirectee = evacuate(q); + } + #if 0 && defined(DEBUG) if (RtsFlags.DebugFlags.gc) /* Debugging code to print out the size of the thing we just @@ -3370,237 +3565,50 @@ scavenge_mut_once_list(generation *gen) debugBelch("evac IND_OLDGEN: %ld bytes", size * sizeof(W_)); } #endif - - /* failed_to_evac might happen if we've got more than two - * generations, we're collecting only generation 0, the - * indirection resides in generation 2 and the indirectee is - * in generation 1. - */ - if (failed_to_evac) { - failed_to_evac = rtsFalse; - p->mut_link = new_list; - new_list = p; - } else { - /* the mut_link field of an IND_STATIC is overloaded as the - * static link field too (it just so happens that we don't need - * both at the same time), so we need to NULL it out when - * removing this object from the mutable list because the static - * link fields are all assumed to be NULL before doing a major - * collection. - */ - p->mut_link = NULL; - } - continue; - - case MUT_CONS: - /* MUT_CONS is a kind of MUT_VAR, except it that we try to remove - * it from the mutable list if possible by promoting whatever it - * points to. - */ - if (scavenge_one((StgPtr)((StgMutVar *)p)->var)) { - /* didn't manage to promote everything, so put the - * MUT_CONS back on the list. - */ - p->mut_link = new_list; - new_list = p; - } - continue; + break; default: - // shouldn't have anything else on the mutables list - barf("scavenge_mut_once_list: strange object? %d", (int)(info->type)); - } - } + barf("scavenge_one: strange object %d", (int)(info->type)); + } - gen->mut_once_list = new_list; + no_luck = failed_to_evac; + failed_to_evac = rtsFalse; + return (no_luck); } +/* ----------------------------------------------------------------------------- + Scavenging mutable lists. + + We treat the mutable list of each generation > N (i.e. all the + generations older than the one being collected) as roots. We also + remove non-mutable objects from the mutable list at this point. + -------------------------------------------------------------------------- */ static void scavenge_mutable_list(generation *gen) { - const StgInfoTable *info; - StgMutClosure *p, *next; - - p = gen->saved_mut_list; - next = p->mut_link; - - evac_gen = 0; - failed_to_evac = rtsFalse; - - for (; p != END_MUT_LIST; p = next, next = p->mut_link) { - - ASSERT(LOOKS_LIKE_CLOSURE_PTR(p)); - info = get_itbl(p); - /* - if (info->type==RBH) - info = REVERT_INFOPTR(info); // if it's an RBH, look at the orig closure - */ - switch(info->type) { - - case MUT_ARR_PTRS: - // follow everything - p->mut_link = gen->mut_list; - gen->mut_list = p; - { - StgPtr end, q; - - end = (P_)p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); - for (q = (P_)((StgMutArrPtrs *)p)->payload; q < end; q++) { - (StgClosure *)*q = evacuate((StgClosure *)*q); - } - continue; - } - - // Happens if a MUT_ARR_PTRS in the old generation is frozen - case MUT_ARR_PTRS_FROZEN: - { - StgPtr end, q; - - evac_gen = gen->no; - end = (P_)p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); - for (q = (P_)((StgMutArrPtrs *)p)->payload; q < end; q++) { - (StgClosure *)*q = evacuate((StgClosure *)*q); - } - evac_gen = 0; - // Set the mut_link field to NULL, so that we will put this - // array back on the mutable list if it is subsequently thawed - // by unsafeThaw#. - p->mut_link = NULL; - if (failed_to_evac) { - failed_to_evac = rtsFalse; - mkMutCons((StgClosure *)p, gen); - } - continue; - } - - case MUT_VAR: - ((StgMutVar *)p)->var = evacuate(((StgMutVar *)p)->var); - p->mut_link = gen->mut_list; - gen->mut_list = p; - continue; - - case MVAR: - { - StgMVar *mvar = (StgMVar *)p; - (StgClosure *)mvar->head = evacuate((StgClosure *)mvar->head); - (StgClosure *)mvar->tail = evacuate((StgClosure *)mvar->tail); - (StgClosure *)mvar->value = evacuate((StgClosure *)mvar->value); - p->mut_link = gen->mut_list; - gen->mut_list = p; - continue; - } - - case TSO: - { - StgTSO *tso = (StgTSO *)p; - - scavengeTSO(tso); - - /* Don't take this TSO off the mutable list - it might still - * point to some younger objects (because we set evac_gen to 0 - * above). - */ - tso->mut_link = gen->mut_list; - gen->mut_list = (StgMutClosure *)tso; - continue; - } - - case BLACKHOLE_BQ: - { - StgBlockingQueue *bh = (StgBlockingQueue *)p; - (StgClosure *)bh->blocking_queue = - evacuate((StgClosure *)bh->blocking_queue); - p->mut_link = gen->mut_list; - gen->mut_list = p; - continue; - } - - /* Happens if a BLACKHOLE_BQ in the old generation is updated: - */ - case IND_OLDGEN: - case IND_OLDGEN_PERM: - /* Try to pull the indirectee into this generation, so we can - * remove the indirection from the mutable list. - */ - evac_gen = gen->no; - ((StgIndOldGen *)p)->indirectee = - evacuate(((StgIndOldGen *)p)->indirectee); - evac_gen = 0; - - if (failed_to_evac) { - failed_to_evac = rtsFalse; - p->mut_link = gen->mut_once_list; - gen->mut_once_list = p; - } else { - p->mut_link = NULL; - } - continue; - -#if defined(PAR) - // HWL: check whether all of these are necessary - - case RBH: // cf. BLACKHOLE_BQ - { - // nat size, ptrs, nonptrs, vhs; - // char str[80]; - // StgInfoTable *rip = get_closure_info(p, &size, &ptrs, &nonptrs, &vhs, str); - StgRBH *rbh = (StgRBH *)p; - (StgClosure *)rbh->blocking_queue = - evacuate((StgClosure *)rbh->blocking_queue); - if (failed_to_evac) { - failed_to_evac = rtsFalse; - recordMutable((StgMutClosure *)rbh); - } - // ToDo: use size of reverted closure here! - p += BLACKHOLE_sizeW(); - break; - } - - case BLOCKED_FETCH: - { - StgBlockedFetch *bf = (StgBlockedFetch *)p; - // follow the pointer to the node which is being demanded - (StgClosure *)bf->node = - evacuate((StgClosure *)bf->node); - // follow the link to the rest of the blocking queue - (StgClosure *)bf->link = - evacuate((StgClosure *)bf->link); - if (failed_to_evac) { - failed_to_evac = rtsFalse; - recordMutable((StgMutClosure *)bf); - } - p += sizeofW(StgBlockedFetch); - break; - } + bdescr *bd; + StgPtr p, q; -#ifdef DIST - case REMOTE_REF: - barf("scavenge_mutable_list: REMOTE_REF %d", (int)(info->type)); -#endif - case FETCH_ME: - p += sizeofW(StgFetchMe); - break; // nothing to do in this case + bd = gen->saved_mut_list; - case FETCH_ME_BQ: // cf. BLACKHOLE_BQ - { - StgFetchMeBlockingQueue *fmbq = (StgFetchMeBlockingQueue *)p; - (StgClosure *)fmbq->blocking_queue = - evacuate((StgClosure *)fmbq->blocking_queue); - if (failed_to_evac) { - failed_to_evac = rtsFalse; - recordMutable((StgMutClosure *)fmbq); + evac_gen = gen->no; + for (; bd != NULL; bd = bd->link) { + for (q = bd->start; q < bd->free; q++) { + p = (StgPtr)*q; + ASSERT(LOOKS_LIKE_CLOSURE_PTR(p)); + if (scavenge_one(p)) { + /* didn't manage to promote everything, so put the + * object back on the list. + */ + recordMutableGen((StgClosure *)p,gen); + } } - p += sizeofW(StgFetchMeBlockingQueue); - break; - } -#endif - - default: - // shouldn't have anything else on the mutables list - barf("scavenge_mutable_list: strange object? %d", (int)(info->type)); } - } + + // free the old mut_list + freeChain(gen->saved_mut_list); + gen->saved_mut_list = NULL; } @@ -3629,8 +3637,8 @@ scavenge_static(void) /* Take this object *off* the static_objects list, * and put it on the scavenged_static_objects list. */ - static_objects = STATIC_LINK(info,p); - STATIC_LINK(info,p) = scavenged_static_objects; + static_objects = *STATIC_LINK(info,p); + *STATIC_LINK(info,p) = scavenged_static_objects; scavenged_static_objects = p; switch (info -> type) { @@ -3641,15 +3649,13 @@ scavenge_static(void) ind->indirectee = evacuate(ind->indirectee); /* might fail to evacuate it, in which case we have to pop it - * back on the mutable list (and take it off the - * scavenged_static list because the static link and mut link - * pointers are one and the same). + * back on the mutable list of the oldest generation. We + * leave it *on* the scavenged_static_objects list, though, + * in case we visit this object again. */ if (failed_to_evac) { failed_to_evac = rtsFalse; - scavenged_static_objects = IND_STATIC_LINK(p); - ((StgMutClosure *)ind)->mut_link = oldest_gen->mut_once_list; - oldest_gen->mut_once_list = (StgMutClosure *)ind; + recordMutableGen((StgClosure *)p,oldest_gen); } break; } @@ -3669,7 +3675,7 @@ scavenge_static(void) next = (P_)p->payload + info->layout.payload.ptrs; // evacuate the pointers for (q = (P_)p->payload; q < next; q++) { - (StgClosure *)*q = evacuate((StgClosure *)*q); + *q = (StgWord)(StgPtr)evacuate((StgClosure *)*q); } break; } @@ -3702,7 +3708,7 @@ scavenge_large_bitmap( StgPtr p, StgLargeBitmap *large_bitmap, nat size ) bitmap = large_bitmap->bitmap[b]; for (i = 0; i < size; ) { if ((bitmap & 1) == 0) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } i++; p++; @@ -3720,7 +3726,7 @@ scavenge_small_bitmap (StgPtr p, nat size, StgWord bitmap) { while (size > 0) { if ((bitmap & 1) == 0) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); } p++; bitmap = bitmap >> 1; @@ -3762,6 +3768,9 @@ scavenge_stack(StgPtr p, StgPtr stack_end) continue; // small bitmap (< 32 entries, or 64 on a 64-bit machine) + case CATCH_STM_FRAME: + case CATCH_RETRY_FRAME: + case ATOMICALLY_FRAME: case STOP_FRAME: case CATCH_FRAME: case RET_SMALL: @@ -3774,7 +3783,7 @@ scavenge_stack(StgPtr p, StgPtr stack_end) p = scavenge_small_bitmap(p, size, bitmap); follow_srt: - scavenge_srt((StgClosure **)info->srt, info->i.srt_bitmap); + scavenge_srt((StgClosure **)GET_SRT(info), info->i.srt_bitmap); continue; case RET_BCO: { @@ -3782,7 +3791,7 @@ scavenge_stack(StgPtr p, StgPtr stack_end) nat size; p++; - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); bco = (StgBCO *)*p; p++; size = BCO_BITMAP_SIZE(bco); @@ -3797,9 +3806,9 @@ scavenge_stack(StgPtr p, StgPtr stack_end) { nat size; - size = info->i.layout.large_bitmap->size; + size = GET_LARGE_BITMAP(&info->i)->size; p++; - scavenge_large_bitmap(p, info->i.layout.large_bitmap, size); + scavenge_large_bitmap(p, GET_LARGE_BITMAP(&info->i), size); p += size; // and don't forget to follow the SRT goto follow_srt; @@ -3825,7 +3834,7 @@ scavenge_stack(StgPtr p, StgPtr stack_end) // follow the ptr words for (size = RET_DYN_PTRS(dyn); size > 0; size--) { - (StgClosure *)*p = evacuate((StgClosure *)*p); + *p = (StgWord)(StgPtr)evacuate((StgClosure *)*p); p++; } continue; @@ -3880,7 +3889,7 @@ scavenge_large(step *stp) p = bd->start; if (scavenge_one(p)) { - mkMutCons((StgClosure *)p, stp->gen); + recordMutableGen((StgClosure *)p, stp->gen); } } } @@ -3898,28 +3907,8 @@ zero_static_object_list(StgClosure* first_static) for (p = first_static; p != END_OF_STATIC_LIST; p = link) { info = get_itbl(p); - link = STATIC_LINK(info, p); - STATIC_LINK(info,p) = NULL; - } -} - -/* This function is only needed because we share the mutable link - * field with the static link field in an IND_STATIC, so we have to - * zero the mut_link field before doing a major GC, which needs the - * static link field. - * - * It doesn't do any harm to zero all the mutable link fields on the - * mutable list. - */ - -static void -zero_mutable_list( StgMutClosure *first ) -{ - StgMutClosure *next, *c; - - for (c = first; c != END_MUT_LIST; c = next) { - next = c->mut_link; - c->mut_link = NULL; + link = *STATIC_LINK(info, p); + *STATIC_LINK(info,p) = NULL; } } @@ -3932,14 +3921,14 @@ revertCAFs( void ) { StgIndStatic *c; - for (c = (StgIndStatic *)caf_list; c != NULL; + for (c = (StgIndStatic *)revertible_caf_list; c != NULL; c = (StgIndStatic *)c->static_link) { SET_INFO(c, c->saved_info); c->saved_info = NULL; // could, but not necessary: c->static_link = NULL; } - caf_list = NULL; + revertible_caf_list = NULL; } void @@ -3952,6 +3941,11 @@ markCAFs( evac_fn evac ) { evac(&c->indirectee); } + for (c = (StgIndStatic *)revertible_caf_list; c != NULL; + c = (StgIndStatic *)c->static_link) + { + evac(&c->indirectee); + } } /* ----------------------------------------------------------------------------- @@ -4019,7 +4013,7 @@ threadLazyBlackHole(StgTSO *tso) { StgClosure *frame; StgRetInfoTable *info; - StgBlockingQueue *bh; + StgClosure *bh; StgPtr stack_end; stack_end = &tso->stack[tso->stack_size]; @@ -4032,7 +4026,7 @@ threadLazyBlackHole(StgTSO *tso) switch (info->i.type) { case UPDATE_FRAME: - bh = (StgBlockingQueue *)((StgUpdateFrame *)frame)->updatee; + bh = ((StgUpdateFrame *)frame)->updatee; /* if the thunk is already blackholed, it means we've also * already blackholed the rest of the thunks on this stack, @@ -4045,10 +4039,9 @@ threadLazyBlackHole(StgTSO *tso) return; } - if (bh->header.info != &stg_BLACKHOLE_BQ_info && - bh->header.info != &stg_CAF_BLACKHOLE_info) { + if (bh->header.info != &stg_CAF_BLACKHOLE_info) { #if (!defined(LAZY_BLACKHOLING)) && defined(DEBUG) - debugBelch("Unexpected lazy BHing required at 0x%04x",(int)bh); + debugBelch("Unexpected lazy BHing required at 0x%04x\n",(int)bh); #endif #ifdef PROFILING // @LDV profiling @@ -4069,7 +4062,7 @@ threadLazyBlackHole(StgTSO *tso) // normal stack frames; do nothing except advance the pointer default: - (StgPtr)frame += stack_frame_sizeW(frame); + frame = (StgClosure *)((StgPtr)frame + stack_frame_sizeW(frame)); } } } @@ -4150,7 +4143,6 @@ threadSqueezeStack(StgTSO *tso) * screw us up if we don't check. */ if (upd->updatee != updatee && !closure_IND(upd->updatee)) { - // this wakes the threads up UPD_IND_NOLOCK(upd->updatee, updatee); } @@ -4168,11 +4160,10 @@ threadSqueezeStack(StgTSO *tso) // single update frame, or the topmost update frame in a series else { - StgBlockingQueue *bh = (StgBlockingQueue *)upd->updatee; + StgClosure *bh = upd->updatee; // Do lazy black-holing if (bh->header.info != &stg_BLACKHOLE_info && - bh->header.info != &stg_BLACKHOLE_BQ_info && bh->header.info != &stg_CAF_BLACKHOLE_info) { #if (!defined(LAZY_BLACKHOLING)) && defined(DEBUG) debugBelch("Unexpected lazy BHing required at 0x%04x",(int)bh); @@ -4191,8 +4182,8 @@ threadSqueezeStack(StgTSO *tso) * same size as a BLACKHOLE in any case. */ if (bh_info->type != THUNK_SELECTOR) { - for (i = np; i < np + nw; i++) { - ((StgClosure *)bh)->payload[i] = 0; + for (i = 0; i < np + nw; i++) { + ((StgClosure *)bh)->payload[i] = INVALID_OBJECT; } } } @@ -4273,7 +4264,7 @@ done_traversing: next_gap_start = (void *)((unsigned char*)gap + sizeof(StgUpdateFrame)); chunk_size = (unsigned char*)gap_end - (unsigned char*)next_gap_start; - (unsigned char*)sp -= chunk_size; + sp -= chunk_size; memmove(sp, next_gap_start, chunk_size); } @@ -4303,35 +4294,19 @@ threadPaused(StgTSO *tso) #if DEBUG void -printMutOnceList(generation *gen) -{ - StgMutClosure *p, *next; - - p = gen->mut_once_list; - next = p->mut_link; - - debugBelch("@@ Mut once list %p: ", gen->mut_once_list); - for (; p != END_MUT_LIST; p = next, next = p->mut_link) { - debugBelch("%p (%s), ", - p, info_type((StgClosure *)p)); - } - debugBelch("\n"); -} - -void printMutableList(generation *gen) { - StgMutClosure *p, *next; + bdescr *bd; + StgPtr p; - p = gen->mut_list; - next = p->mut_link; + debugBelch("@@ Mutable list %p: ", gen->mut_list); - debugBelch("@@ Mutable list %p: ", gen->mut_list); - for (; p != END_MUT_LIST; p = next, next = p->mut_link) { - debugBelch("%p (%s), ", - p, info_type((StgClosure *)p)); - } - debugBelch("\n"); + for (bd = gen->mut_list; bd != NULL; bd = bd->link) { + for (p = bd->start; p < bd->free; p++) { + debugBelch("%p (%s), ", (void *)*p, info_type((StgClosure *)*p)); + } + } + debugBelch("\n"); } STATIC_INLINE rtsBool @@ -4343,9 +4318,10 @@ maybeLarge(StgClosure *closure) see scavenge_large */ return (info->type == MUT_ARR_PTRS || info->type == MUT_ARR_PTRS_FROZEN || + info->type == MUT_ARR_PTRS_FROZEN0 || info->type == TSO || info->type == ARR_WORDS); } -#endif // DEBUG +#endif /* DEBUG */