X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;f=rts%2Fsm%2FEvac.c;h=61cf10bcbea6ff4af26c3a442befb7a8ed65cde6;hb=addf865136274069fe72793aa6f82a6e0fd4758a;hp=76026b0bac9ce0da881e1ea8ed8a232e4afd1769;hpb=c24c65f518f153ff59acf27417fa36c14365951e;p=ghc-hetmet.git diff --git a/rts/sm/Evac.c b/rts/sm/Evac.c index 76026b0..61cf10b 100644 --- a/rts/sm/Evac.c +++ b/rts/sm/Evac.c @@ -139,15 +139,19 @@ copy_tag_nolock(StgClosure **p, const StgInfoTable *info, nat i; to = alloc_for_copy(size,gen); - *p = TAG_CLOSURE(tag,(StgClosure*)to); - src->header.info = (const StgInfoTable *)MK_FORWARDING_PTR(to); - + from = (StgPtr)src; to[0] = (W_)info; for (i = 1; i < size; i++) { // unroll for small i to[i] = from[i]; } + // if somebody else reads the forwarding pointer, we better make + // sure there's a closure at the end of it. + write_barrier(); + *p = TAG_CLOSURE(tag,(StgClosure*)to); + src->header.info = (const StgInfoTable *)MK_FORWARDING_PTR(to); + // if (to+size+2 < bd->start + BLOCK_SIZE_W) { // __builtin_prefetch(to + size + 2, 1); // } @@ -162,7 +166,7 @@ copy_tag_nolock(StgClosure **p, const StgInfoTable *info, /* Special version of copy() for when we only want to copy the info * pointer of an object, but reserve some padding after it. This is - * used to optimise evacuation of BLACKHOLEs. + * used to optimise evacuation of TSOs. */ static rtsBool copyPart(StgClosure **p, StgClosure *src, nat size_to_reserve, @@ -191,7 +195,6 @@ spin: #endif to = alloc_for_copy(size_to_reserve, gen); - *p = (StgClosure *)to; from = (StgPtr)src; to[0] = info; @@ -199,10 +202,9 @@ spin: to[i] = from[i]; } -#if defined(PARALLEL_GC) write_barrier(); -#endif src->header.info = (const StgInfoTable*)MK_FORWARDING_PTR(to); + *p = (StgClosure *)to; #ifdef PROFILING // We store the size of the just evacuated object in the LDV word so that @@ -362,7 +364,7 @@ loop: tag = GET_CLOSURE_TAG(q); q = UNTAG_CLOSURE(q); - ASSERT(LOOKS_LIKE_CLOSURE_PTR(q)); + ASSERTM(LOOKS_LIKE_CLOSURE_PTR(q), "invalid closure, info=%p", q->header.info); if (!HEAP_ALLOCED_GC(q)) { @@ -413,8 +415,7 @@ loop: * on the CAF list, so don't do anything with it here (we'll * scavenge it later). */ - if (((StgIndStatic *)q)->saved_info == NULL) { - if (*IND_STATIC_LINK((StgClosure *)q) == NULL) { + if (*IND_STATIC_LINK((StgClosure *)q) == NULL) { #ifndef THREADED_RTS *IND_STATIC_LINK((StgClosure *)q) = gct->static_objects; gct->static_objects = (StgClosure *)q; @@ -427,7 +428,6 @@ loop: gct->static_objects = (StgClosure *)q; } #endif - } } return; @@ -620,31 +620,51 @@ loop: case FUN: case IND_PERM: - case IND_OLDGEN_PERM: case CONSTR: copy_tag_nolock(p,info,q,sizeW_fromITBL(INFO_PTR_TO_STRUCT(info)),gen,tag); return; + case BLACKHOLE: + { + StgClosure *r; + const StgInfoTable *i; + r = ((StgInd*)q)->indirectee; + if (GET_CLOSURE_TAG(r) == 0) { + i = r->header.info; + if (IS_FORWARDING_PTR(i)) { + r = (StgClosure *)UN_FORWARDING_PTR(i); + i = r->header.info; + } + if (i == &stg_TSO_info + || i == &stg_WHITEHOLE_info + || i == &stg_BLOCKING_QUEUE_CLEAN_info + || i == &stg_BLOCKING_QUEUE_DIRTY_info) { + copy(p,info,q,sizeofW(StgInd),gen); + return; + } + ASSERT(i != &stg_IND_info); + } + q = r; + *p = r; + goto loop; + } + + case BLOCKING_QUEUE: case WEAK: - case STABLE_NAME: - copy_tag(p,info,q,sizeW_fromITBL(INFO_PTR_TO_STRUCT(info)),gen,tag); + case PRIM: + case MUT_PRIM: + copy(p,info,q,sizeW_fromITBL(INFO_PTR_TO_STRUCT(info)),gen); return; case BCO: copy(p,info,q,bco_sizeW((StgBCO *)q),gen); return; - case CAF_BLACKHOLE: - case BLACKHOLE: - copyPart(p,q,BLACKHOLE_sizeW(),sizeofW(StgHeader),gen); - return; - case THUNK_SELECTOR: eval_thunk_selector(p, (StgSelector *)q, rtsTrue); return; case IND: - case IND_OLDGEN: // follow chains of indirections, don't evacuate them q = ((StgInd*)q)->indirectee; *p = q; @@ -721,30 +741,10 @@ loop: } } - case TREC_HEADER: - copy(p,info,q,sizeofW(StgTRecHeader),gen); - return; - - case TVAR_WATCH_QUEUE: - copy(p,info,q,sizeofW(StgTVarWatchQueue),gen); - return; - - case TVAR: - copy(p,info,q,sizeofW(StgTVar),gen); - return; - case TREC_CHUNK: copy(p,info,q,sizeofW(StgTRecChunk),gen); return; - case ATOMIC_INVARIANT: - copy(p,info,q,sizeofW(StgAtomicInvariant),gen); - return; - - case INVARIANT_CHECK_QUEUE: - copy(p,info,q,sizeofW(StgInvariantCheckQueue),gen); - return; - default: barf("evacuate: strange closure type %d", (int)(INFO_PTR_TO_STRUCT(info)->type)); } @@ -771,11 +771,7 @@ unchain_thunk_selectors(StgSelector *p, StgClosure *val) prev = NULL; while (p) { -#ifdef THREADED_RTS ASSERT(p->header.info == &stg_WHITEHOLE_info); -#else - ASSERT(p->header.info == &stg_BLACKHOLE_info); -#endif // val must be in to-space. Not always: when we recursively // invoke eval_thunk_selector(), the recursive calls will not // evacuate the value (because we want to select on the value, @@ -798,7 +794,13 @@ unchain_thunk_selectors(StgSelector *p, StgClosure *val) // indirection pointing to itself, and we want the program // to deadlock if it ever enters this closure, so // BLACKHOLE is correct. - SET_INFO(p, &stg_BLACKHOLE_info); + + // XXX we do not have BLACKHOLEs any more; replace with + // a THUNK_SELECTOR again. This will go into a loop if it is + // entered, and should result in a NonTermination exception. + ((StgThunk *)p)->payload[0] = val; + write_barrier(); + SET_INFO(p, &stg_sel_0_upd_info); } else { ((StgInd *)p)->indirectee = val; write_barrier(); @@ -828,7 +830,7 @@ eval_thunk_selector (StgClosure **q, StgSelector * p, rtsBool evac) prev_thunk_selector = NULL; // this is a chain of THUNK_SELECTORs that we are going to update // to point to the value of the current THUNK_SELECTOR. Each - // closure on the chain is a BLACKHOLE, and points to the next in the + // closure on the chain is a WHITEHOLE, and points to the next in the // chain with payload[0]. selector_chain: @@ -866,7 +868,7 @@ selector_chain: } - // BLACKHOLE the selector thunk, since it is now under evaluation. + // WHITEHOLE the selector thunk, since it is now under evaluation. // This is important to stop us going into an infinite loop if // this selector thunk eventually refers to itself. #if defined(THREADED_RTS) @@ -899,7 +901,7 @@ selector_chain: #else // Save the real info pointer (NOTE: not the same as get_itbl()). info_ptr = (StgWord)p->header.info; - SET_INFO(p,&stg_BLACKHOLE_info); + SET_INFO(p,&stg_WHITEHOLE_info); #endif field = INFO_PTR_TO_STRUCT(info_ptr)->layout.selector_offset; @@ -951,11 +953,7 @@ selector_loop: // the original selector thunk, p. SET_INFO(p, (StgInfoTable *)info_ptr); LDV_RECORD_DEAD_FILL_SLOP_DYNAMIC((StgClosure *)p); -#if defined(THREADED_RTS) SET_INFO(p, &stg_WHITEHOLE_info); -#else - SET_INFO(p, &stg_BLACKHOLE_info); -#endif #endif // the closure in val is now the "value" of the @@ -971,8 +969,6 @@ selector_loop: switch (info->type) { case IND: case IND_PERM: - case IND_OLDGEN: - case IND_OLDGEN_PERM: case IND_STATIC: val = ((StgInd *)val)->indirectee; goto val_loop; @@ -1006,13 +1002,38 @@ selector_loop: case IND: case IND_PERM: - case IND_OLDGEN: - case IND_OLDGEN_PERM: case IND_STATIC: // Again, we might need to untag a constructor. selectee = UNTAG_CLOSURE( ((StgInd *)selectee)->indirectee ); goto selector_loop; + case BLACKHOLE: + { + StgClosure *r; + const StgInfoTable *i; + r = ((StgInd*)selectee)->indirectee; + + // establish whether this BH has been updated, and is now an + // indirection, as in evacuate(). + if (GET_CLOSURE_TAG(r) == 0) { + i = r->header.info; + if (IS_FORWARDING_PTR(i)) { + r = (StgClosure *)UN_FORWARDING_PTR(i); + i = r->header.info; + } + if (i == &stg_TSO_info + || i == &stg_WHITEHOLE_info + || i == &stg_BLOCKING_QUEUE_CLEAN_info + || i == &stg_BLOCKING_QUEUE_DIRTY_info) { + goto bale_out; + } + ASSERT(i != &stg_IND_info); + } + + selectee = UNTAG_CLOSURE( ((StgInd *)selectee)->indirectee ); + goto selector_loop; + } + case THUNK_SELECTOR: { StgClosure *val; @@ -1047,8 +1068,6 @@ selector_loop: case THUNK_1_1: case THUNK_0_2: case THUNK_STATIC: - case CAF_BLACKHOLE: - case BLACKHOLE: // not evaluated yet goto bale_out;