X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;f=ghc%2Frts%2FGC.c;h=210f15cfa69ab38a728ec5e662ce311a10d528e6;hb=d28ae8e1a272c7827622b2b83f1d779402732103;hp=3ab057a1d12f5dfefeff6ac4b52a7104223f4e47;hpb=7a236a564b90cd060612e1e979ce7d552da61fa1;p=ghc-hetmet.git diff --git a/ghc/rts/GC.c b/ghc/rts/GC.c index 3ab057a..210f15c 100644 --- a/ghc/rts/GC.c +++ b/ghc/rts/GC.c @@ -1,5 +1,5 @@ /* ----------------------------------------------------------------------------- - * $Id: GC.c,v 1.155 2003/05/14 09:13:59 simonmar Exp $ + * $Id: GC.c,v 1.165 2004/05/07 21:19:21 panne Exp $ * * (c) The GHC Team 1998-2003 * @@ -142,11 +142,13 @@ static void mark_root ( StgClosure **root ); // Use a register argument for evacuate, if available. #if __GNUC__ >= 2 -static StgClosure * evacuate (StgClosure *q) __attribute__((regparm(1))); +#define REGPARM1 __attribute__((regparm(1))) #else -static StgClosure * evacuate (StgClosure *q); +#define REGPARM1 #endif +REGPARM1 static StgClosure * evacuate (StgClosure *q); + static void zero_static_object_list ( StgClosure* first_static ); static void zero_mutable_list ( StgMutClosure *first ); @@ -190,31 +192,31 @@ static rtsBool mark_stack_overflowed; static bdescr *oldgen_scan_bd; static StgPtr oldgen_scan; -static inline rtsBool +STATIC_INLINE rtsBool mark_stack_empty(void) { return mark_sp == mark_stack; } -static inline rtsBool +STATIC_INLINE rtsBool mark_stack_full(void) { return mark_sp >= mark_splim; } -static inline void +STATIC_INLINE void reset_mark_stack(void) { mark_sp = mark_stack; } -static inline void +STATIC_INLINE void push_mark_stack(StgPtr p) { *mark_sp++ = p; } -static inline StgPtr +STATIC_INLINE StgPtr pop_mark_stack(void) { return *--mark_sp; @@ -378,6 +380,7 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) if (RtsFlags.GcFlags.generations == 1) { old_to_blocks = g0s0->to_blocks; g0s0->to_blocks = NULL; + g0s0->n_to_blocks = 0; } /* Keep a count of how many new blocks we allocated during this GC @@ -421,7 +424,7 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) // mark the large objects as not evacuated yet for (bd = stp->large_objects; bd; bd = bd->link) { - bd->flags = BF_LARGE; + bd->flags &= ~BF_EVACUATED; } // for a compacted step, we need to allocate the bitmap @@ -751,6 +754,7 @@ GarbageCollect ( void (*get_roots)(evac_fn), rtsBool force_major_gc ) // onto the front of the now-compacted existing blocks. for (bd = stp->to_blocks; bd != NULL; bd = bd->link) { bd->flags &= ~BF_EVACUATED; // now from-space + bd->flags |= BF_COMPACTED; // compacted next time } // tack the new blocks on the end of the existing blocks if (stp->blocks == NULL) { @@ -1293,6 +1297,7 @@ traverse_weak_ptr_list(void) default: barf("traverse_weak_ptr_list"); + return rtsTrue; } } @@ -1373,7 +1378,7 @@ isAlive(StgClosure *p) } // check the mark bit for compacted steps - if (bd->step->is_compacted && is_marked((P_)p,bd)) { + if ((bd->flags & BF_COMPACTED) && is_marked((P_)p,bd)) { return p; } @@ -1412,7 +1417,7 @@ mark_root(StgClosure **root) *root = evacuate(*root); } -static __inline__ void +STATIC_INLINE void upd_evacuee(StgClosure *p, StgClosure *dest) { // Source object must be in from-space: @@ -1424,7 +1429,7 @@ upd_evacuee(StgClosure *p, StgClosure *dest) } -static __inline__ StgClosure * +STATIC_INLINE StgClosure * copy(StgClosure *src, nat size, step *stp) { P_ to, from, dest; @@ -1530,7 +1535,7 @@ copyPart(StgClosure *src, nat size_to_reserve, nat size_to_copy, step *stp) -------------------------------------------------------------------------- */ -static inline void +STATIC_INLINE void evacuate_large(StgPtr p) { bdescr *bd = Bdescr(p); @@ -1656,7 +1661,7 @@ mkMutCons(StgClosure *ptr, generation *gen) extra reads/writes than we save. -------------------------------------------------------------------------- */ -static StgClosure * +REGPARM1 static StgClosure * evacuate(StgClosure *q) { StgClosure *to; @@ -1698,7 +1703,7 @@ loop: /* If the object is in a step that we're compacting, then we * need to use an alternative evacuate procedure. */ - if (bd->step->is_compacted) { + if (bd->flags & BF_COMPACTED) { if (!is_marked((P_)q,bd)) { mark((P_)q,bd); if (mark_stack_full()) { @@ -1930,8 +1935,18 @@ loop: * list it contains. */ { - StgTSO *new_tso = (StgTSO *)copy((StgClosure *)tso,tso_sizeW(tso),stp); + StgTSO *new_tso; + StgPtr p, q; + + new_tso = (StgTSO *)copyPart((StgClosure *)tso, + tso_sizeW(tso), + sizeofW(StgTSO), stp); move_TSO(tso, new_tso); + for (p = tso->sp, q = new_tso->sp; + p < tso->stack+tso->stack_size;) { + *q++ = *p++; + } + return (StgClosure *)new_tso; } } @@ -2005,6 +2020,7 @@ eval_thunk_selector( nat field, StgSelector * p ) StgInfoTable *info; const StgInfoTable *info_ptr; StgClosure *selectee; + bdescr *bd; selectee = p->selectee; @@ -2031,17 +2047,32 @@ selector_loop: // eval_thunk_selector(). There are various ways this could // happen: // - // - following an IND_STATIC + // 1. following an IND_STATIC // - // - when the old generation is compacted, the mark phase updates - // from-space pointers to be to-space pointers, and we can't - // reliably tell which we're following (eg. from an IND_STATIC). + // 2. when the old generation is compacted, the mark phase updates + // from-space pointers to be to-space pointers, and we can't + // reliably tell which we're following (eg. from an IND_STATIC). // - // So we use the block-descriptor test to find out if we're in - // to-space. + // 3. compacting GC again: if we're looking at a constructor in + // the compacted generation, it might point directly to objects + // in to-space. We must bale out here, otherwise doing the selection + // will result in a to-space pointer being returned. + // + // (1) is dealt with using a BF_EVACUATED test on the + // selectee. (2) and (3): we can tell if we're looking at an + // object in the compacted generation that might point to + // to-space objects by testing that (a) it is BF_COMPACTED, (b) + // the compacted generation is being collected, and (c) the + // object is marked. Only a marked object may have pointers that + // point to to-space objects, because that happens when + // scavenging. // + bd = Bdescr((StgPtr)selectee); if (HEAP_ALLOCED(selectee) && - Bdescr((StgPtr)selectee)->flags & BF_EVACUATED) { + ((bd->flags & BF_EVACUATED) + || ((bd->flags & BF_COMPACTED) && + bd->gen_no <= N && + is_marked((P_)selectee,bd)))) { goto bale_out; } @@ -2120,6 +2151,7 @@ selector_loop: } case AP: + case AP_STACK: case THUNK: case THUNK_1_0: case THUNK_0_1: @@ -2165,7 +2197,7 @@ move_TSO (StgTSO *src, StgTSO *dest) { ptrdiff_t diff; - // relocate the stack pointers... + // relocate the stack pointer... diff = (StgPtr)dest - (StgPtr)src; // In *words* dest->sp = (StgPtr)dest->sp + diff; } @@ -2183,7 +2215,7 @@ scavenge_large_srt_bitmap( StgLargeSRT *large_srt ) b = 0; bitmap = large_srt->l.bitmap[b]; size = (nat)large_srt->l.size; - p = large_srt->srt; + p = (StgClosure **)large_srt->srt; for (i = 0; i < size; ) { if ((bitmap & 1) != 0) { evacuate(*p); @@ -2203,7 +2235,7 @@ scavenge_large_srt_bitmap( StgLargeSRT *large_srt ) * srt field in the info table. That's ok, because we'll * never dereference it. */ -static inline void +STATIC_INLINE void scavenge_srt (StgClosure **srt, nat srt_bitmap) { nat bitmap; @@ -2243,7 +2275,7 @@ scavenge_srt (StgClosure **srt, nat srt_bitmap) } -static inline void +STATIC_INLINE void scavenge_thunk_srt(const StgInfoTable *info) { StgThunkInfoTable *thunk_info; @@ -2252,7 +2284,7 @@ scavenge_thunk_srt(const StgInfoTable *info) scavenge_srt((StgClosure **)thunk_info->srt, thunk_info->i.srt_bitmap); } -static inline void +STATIC_INLINE void scavenge_fun_srt(const StgInfoTable *info) { StgFunInfoTable *fun_info; @@ -2261,7 +2293,7 @@ scavenge_fun_srt(const StgInfoTable *info) scavenge_srt((StgClosure **)fun_info->srt, fun_info->i.srt_bitmap); } -static inline void +STATIC_INLINE void scavenge_ret_srt(const StgInfoTable *info) { StgRetInfoTable *ret_info; @@ -2303,7 +2335,7 @@ scavengeTSO (StgTSO *tso) in PAPs. -------------------------------------------------------------------------- */ -static inline StgPtr +STATIC_INLINE StgPtr scavenge_arg_block (StgFunInfoTable *fun_info, StgClosure **args) { StgPtr p; @@ -2338,7 +2370,7 @@ scavenge_arg_block (StgFunInfoTable *fun_info, StgClosure **args) return p; } -static inline StgPtr +STATIC_INLINE StgPtr scavenge_PAP (StgPAP *pap) { StgPtr p; @@ -2654,6 +2686,11 @@ scavenge(step *stp) { StgPtr next; + // Set the mut_link field to NULL, so that we will put this + // array back on the mutable list if it is subsequently thawed + // by unsafeThaw#. + ((StgMutArrPtrs*)p)->mut_link = NULL; + next = p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); for (p = (P_)((StgMutArrPtrs *)p)->payload; p < next; p++) { (StgClosure *)*p = evacuate((StgClosure *)*p); @@ -2964,6 +3001,11 @@ linear_scan: { StgPtr next; + // Set the mut_link field to NULL, so that we will put this + // array on the mutable list if it is subsequently thawed + // by unsafeThaw#. + ((StgMutArrPtrs*)p)->mut_link = NULL; + next = p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); for (p = (P_)((StgMutArrPtrs *)p)->payload; p < next; p++) { (StgClosure *)*p = evacuate((StgClosure *)*p); @@ -3186,6 +3228,11 @@ scavenge_one(StgPtr p) // follow everything StgPtr next; + // Set the mut_link field to NULL, so that we will put this + // array on the mutable list if it is subsequently thawed + // by unsafeThaw#. + ((StgMutArrPtrs*)p)->mut_link = NULL; + next = p + mut_arr_ptrs_sizeW((StgMutArrPtrs*)p); for (p = (P_)((StgMutArrPtrs *)p)->payload; p < next; p++) { (StgClosure *)*p = evacuate((StgClosure *)*p); @@ -3394,6 +3441,9 @@ scavenge_mutable_list(generation *gen) (StgClosure *)*q = evacuate((StgClosure *)*q); } evac_gen = 0; + // Set the mut_link field to NULL, so that we will put this + // array back on the mutable list if it is subsequently thawed + // by unsafeThaw#. p->mut_link = NULL; if (failed_to_evac) { failed_to_evac = rtsFalse; @@ -3643,7 +3693,7 @@ scavenge_large_bitmap( StgPtr p, StgLargeBitmap *large_bitmap, nat size ) } } -static inline StgPtr +STATIC_INLINE StgPtr scavenge_small_bitmap (StgPtr p, nat size, StgWord bitmap) { while (size > 0) { @@ -4191,20 +4241,20 @@ done_traversing: void *gap_start, *next_gap_start, *gap_end; nat chunk_size; - next_gap_start = (void *)gap + sizeof(StgUpdateFrame); + next_gap_start = (void *)((unsigned char*)gap + sizeof(StgUpdateFrame)); sp = next_gap_start; while ((StgPtr)gap > tso->sp) { // we're working in *bytes* now... gap_start = next_gap_start; - gap_end = gap_start - gap->gap_size * sizeof(W_); + gap_end = (void*) ((unsigned char*)gap_start - gap->gap_size * sizeof(W_)); gap = gap->next_gap; - next_gap_start = (void *)gap + sizeof(StgUpdateFrame); + next_gap_start = (void *)((unsigned char*)gap + sizeof(StgUpdateFrame)); - chunk_size = gap_end - next_gap_start; - sp -= chunk_size; + chunk_size = (unsigned char*)gap_end - (unsigned char*)next_gap_start; + (unsigned char*)sp -= chunk_size; memmove(sp, next_gap_start, chunk_size); } @@ -4265,7 +4315,7 @@ printMutableList(generation *gen) fputc('\n', stderr); } -static inline rtsBool +STATIC_INLINE rtsBool maybeLarge(StgClosure *closure) { StgInfoTable *info = get_itbl(closure);