From ebaff0a85ffb62f7875473dc65eeeb7c62800981 Mon Sep 17 00:00:00 2001 From: Simon Marlow Date: Thu, 10 Jan 2008 12:28:20 +0000 Subject: [PATCH] more fixes for THUNK_SELECTORs --- rts/sm/Evac.c | 37 +++++++++++++++++++++++++++---------- rts/sm/Evac.c-inc | 4 +++- 2 files changed, 30 insertions(+), 11 deletions(-) diff --git a/rts/sm/Evac.c b/rts/sm/Evac.c index 3593943..1c0a2d8 100644 --- a/rts/sm/Evac.c +++ b/rts/sm/Evac.c @@ -169,6 +169,9 @@ unchain_thunk_selectors(StgSelector *p, StgClosure *val) #else ASSERT(p->header.info == &stg_BLACKHOLE_info); #endif + // val must be in to-space. + ASSERT(Bdescr((P_)val)->gen_no > N || (Bdescr((P_)val)->flags & BF_EVACUATED)); + prev = (StgSelector*)((StgClosure *)p)->payload[0]; // Update the THUNK_SELECTOR with an indirection to the @@ -177,8 +180,9 @@ unchain_thunk_selectors(StgSelector *p, StgClosure *val) // EVACUATED closure always points to an object in the // same or an older generation (required by the short-cut // test in the EVACUATED case, below). - SET_INFO(p, &stg_IND_info); ((StgInd *)p)->indirectee = val; + write_barrier(); + SET_INFO(p, &stg_IND_info); // For the purposes of LDV profiling, we have created an // indirection. @@ -243,16 +247,26 @@ selector_chain: // In threaded mode, we'll use WHITEHOLE to lock the selector // thunk while we evaluate it. { - info_ptr = xchg((StgPtr)&p->header.info, (W_)&stg_WHITEHOLE_info); - if (info_ptr == (W_)&stg_WHITEHOLE_info) { - do { - info_ptr = xchg((StgPtr)&p->header.info, (W_)&stg_WHITEHOLE_info); - } while (info_ptr == (W_)&stg_WHITEHOLE_info); - goto bale_out; - } - // make sure someone else didn't get here first + do { + info_ptr = xchg((StgPtr)&p->header.info, (W_)&stg_WHITEHOLE_info); + } while (info_ptr == (W_)&stg_WHITEHOLE_info); + + // make sure someone else didn't get here first... if (INFO_PTR_TO_STRUCT(info_ptr)->type != THUNK_SELECTOR) { - goto bale_out; + // v. tricky now. The THUNK_SELECTOR has been evacuated + // by another thread, and is now either EVACUATED or IND. + // We need to extract ourselves from the current situation + // as cleanly as possible. + // - unlock the closure + // - update *q, we may have done *some* evaluation + // - if evac, we need to call evacuate(), because we + // need the write-barrier stuff. + // - undo the chain we've built to point to p. + SET_INFO(p, (const StgInfoTable *)info_ptr); + *q = (StgClosure *)p; + if (evac) evacuate(q); + unchain_thunk_selectors(prev_thunk_selector, (StgClosure *)p); + return; } } #else @@ -404,6 +418,9 @@ bale_out: // We didn't manage to evaluate this thunk; restore the old info // pointer. But don't forget: we still need to evacuate the thunk itself. SET_INFO(p, (const StgInfoTable *)info_ptr); + // THREADED_RTS: we just unlocked the thunk, so another thread + // might get in and update it. copy() will lock it again and + // check whether it was updated in the meantime. *q = (StgClosure *)p; if (evac) { copy(q,(StgClosure *)p,THUNK_SELECTOR_sizeW(),bd->step->to); diff --git a/rts/sm/Evac.c-inc b/rts/sm/Evac.c-inc index 4fe9d5d..367d277 100644 --- a/rts/sm/Evac.c-inc +++ b/rts/sm/Evac.c-inc @@ -34,7 +34,9 @@ copy_tag(StgClosure **p, StgClosure *src, nat size, step *stp, StgWord tag) info = xchg((StgPtr)&src->header.info, (W_)&stg_WHITEHOLE_info); // so.. what is it? } while (info == (W_)&stg_WHITEHOLE_info); - if (info == (W_)&stg_EVACUATED_info) { + if (info == (W_)&stg_EVACUATED_info || info == (W_)&stg_IND_info) { + // NB. a closure might be updated with an IND by + // unchain_selector_thunks(), hence the test above. src->header.info = (const StgInfoTable *)info; return evacuate(p); // does the failed_to_evac stuff } -- 1.7.10.4