X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;f=ghc%2Frts%2FException.cmm;h=9d8d9d69544c040263a2348e454eeb2dd0ef1941;hb=ce9d03fa27ce85072d8ac1426d5420a5c0c215ee;hp=6192f6d77637b4882d14b522427421de3ca399f1;hpb=03d63424f3034c34d61fe0f654e05d20c9eded89;p=ghc-hetmet.git diff --git a/ghc/rts/Exception.cmm b/ghc/rts/Exception.cmm index 6192f6d..9d8d9d6 100644 --- a/ghc/rts/Exception.cmm +++ b/ghc/rts/Exception.cmm @@ -35,17 +35,30 @@ it. The action of unblocking exceptions in a thread will release all the threads waiting to deliver exceptions to that thread. + NB. there's a bug in here. If a thread is inside an + unsafePerformIO, and inside blockAsyncExceptions# (there is an + unblockAsyncExceptions_ret on the stack), and it is blocked in an + interruptible operation, and it receives an exception, then the + unsafePerformIO thunk will be updated with a stack object + containing the unblockAsyncExceptions_ret frame. Later, when + someone else evaluates this thunk, the blocked exception state is + not restored, and the result is that unblockAsyncExceptions_ret + will attempt to unblock exceptions in the current thread, but it'll + find that the CurrentTSO->blocked_exceptions is NULL. Hence, we + work around this by checking for NULL in awakenBlockedQueue(). + -------------------------------------------------------------------------- */ INFO_TABLE_RET( stg_unblockAsyncExceptionszh_ret, 0/*framesize*/, 0/*bitmap*/, RET_SMALL ) { - ASSERT(StgTSO_blocked_exceptions(CurrentTSO) != NULL); + // Not true: see comments above + // ASSERT(StgTSO_blocked_exceptions(CurrentTSO) != NULL); #if defined(GRAN) || defined(PAR) - foreign "C" awakenBlockedQueue(StgTSO_blocked_exceptions(CurrentTSO) "ptr", + foreign "C" awakenBlockedQueue(MyCapability() "ptr", StgTSO_blocked_exceptions(CurrentTSO) "ptr", NULL "ptr"); #else - foreign "C" awakenBlockedQueue(StgTSO_blocked_exceptions(CurrentTSO) "ptr"); + foreign "C" awakenBlockedQueue(MyCapability() "ptr", StgTSO_blocked_exceptions(CurrentTSO) "ptr"); #endif StgTSO_blocked_exceptions(CurrentTSO) = NULL; #ifdef REG_R1 @@ -61,7 +74,8 @@ INFO_TABLE_RET( stg_unblockAsyncExceptionszh_ret, INFO_TABLE_RET( stg_blockAsyncExceptionszh_ret, 0/*framesize*/, 0/*bitmap*/, RET_SMALL ) { - ASSERT(StgTSO_blocked_exceptions(CurrentTSO) == NULL); + // Not true: see comments above + // ASSERT(StgTSO_blocked_exceptions(CurrentTSO) == NULL); StgTSO_blocked_exceptions(CurrentTSO) = END_TSO_QUEUE; #ifdef REG_R1 Sp_adj(1); @@ -101,10 +115,10 @@ unblockAsyncExceptionszh_fast if (StgTSO_blocked_exceptions(CurrentTSO) != NULL) { #if defined(GRAN) || defined(PAR) - foreign "C" awakenBlockedQueue(StgTSO_blocked_exceptions(CurrentTSO) "ptr", + foreign "C" awakenBlockedQueue(MyCapability() "ptr", StgTSO_blocked_exceptions(CurrentTSO) "ptr", StgTSO_block_info(CurrentTSO) "ptr"); #else - foreign "C" awakenBlockedQueue(StgTSO_blocked_exceptions(CurrentTSO) "ptr"); + foreign "C" awakenBlockedQueue(MyCapability() "ptr", StgTSO_blocked_exceptions(CurrentTSO) "ptr"); #endif StgTSO_blocked_exceptions(CurrentTSO) = NULL; @@ -177,7 +191,7 @@ killThreadzh_fast */ if (R1 == CurrentTSO) { SAVE_THREAD_STATE(); - foreign "C" raiseAsyncWithLock(R1 "ptr", R2 "ptr"); + foreign "C" raiseAsync(MyCapability() "ptr", R1 "ptr", R2 "ptr"); if (StgTSO_what_next(CurrentTSO) == ThreadKilled::I16) { R1 = ThreadFinished; jump StgReturn; @@ -187,7 +201,7 @@ killThreadzh_fast jump %ENTRY_CODE(Sp(0)); } } else { - foreign "C" raiseAsyncWithLock(R1 "ptr", R2 "ptr"); + foreign "C" raiseAsync(MyCapability() "ptr", R1 "ptr", R2 "ptr"); } jump %ENTRY_CODE(Sp(0)); @@ -309,7 +323,7 @@ catchzh_fast * It is used in raisezh_fast to update thunks on the update list * -------------------------------------------------------------------------- */ -INFO_TABLE(stg_raise,1,0,THUNK,"raise","raise") +INFO_TABLE(stg_raise,1,0,THUNK_1_0,"raise","raise") { R1 = StgThunk_payload(R1,0); jump raisezh_fast; @@ -336,7 +350,7 @@ raisezh_fast retry_pop_stack: StgTSO_sp(CurrentTSO) = Sp; - frame_type = foreign "C" raiseExceptionHelper(CurrentTSO "ptr", R1 "ptr"); + frame_type = foreign "C" raiseExceptionHelper(BaseReg "ptr", CurrentTSO "ptr", R1 "ptr"); Sp = StgTSO_sp(CurrentTSO); if (frame_type == ATOMICALLY_FRAME) { /* The exception has reached the edge of a memory transaction. Check that @@ -345,8 +359,8 @@ retry_pop_stack: W_ trec; W_ r; trec = StgTSO_trec(CurrentTSO); - r = foreign "C" stmValidateTransaction(trec "ptr"); - foreign "C" stmAbortTransaction(trec "ptr"); + r = foreign "C" stmValidateNestOfTransactions(trec "ptr"); + foreign "C" stmAbortTransaction(MyCapability() "ptr", trec "ptr"); StgTSO_trec(CurrentTSO) = NO_TREC; if (r) { // Transaction was valid: continue searching for a catch frame @@ -355,7 +369,7 @@ retry_pop_stack: } else { // Transaction was not valid: we retry the exception (otherwise continue // with a further call to raiseExceptionHelper) - "ptr" trec = foreign "C" stmStartTransaction(NO_TREC "ptr"); + "ptr" trec = foreign "C" stmStartTransaction(MyCapability() "ptr", NO_TREC "ptr"); StgTSO_trec(CurrentTSO) = trec; R1 = StgAtomicallyFrame_code(Sp); Sp_adj(-1); @@ -364,13 +378,22 @@ retry_pop_stack: } if (frame_type == STOP_FRAME) { - /* We've stripped the entire stack, the thread is now dead. */ - Sp = CurrentTSO + OFFSET_StgTSO_stack - + WDS(StgTSO_stack_size(CurrentTSO)) - WDS(1); - Sp(0) = R1; /* save the exception */ + /* + * We've stripped the entire stack, the thread is now dead. + * We will leave the stack in a GC'able state, see the stg_stop_thread + * entry code in StgStartup.cmm. + */ + Sp = CurrentTSO + TSO_OFFSET_StgTSO_stack + + WDS(StgTSO_stack_size(CurrentTSO)) - WDS(2); + Sp(1) = R1; /* save the exception */ + Sp(0) = stg_enter_info; /* so that GC can traverse this stack */ StgTSO_what_next(CurrentTSO) = ThreadKilled::I16; SAVE_THREAD_STATE(); /* inline! */ - R1 = ThreadFinished; + + /* The return code goes in BaseReg->rRet, and BaseReg is returned in R1 */ + StgRegTable_rRet(BaseReg) = ThreadFinished; + R1 = BaseReg; + jump StgReturn; }