1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2004
7 * This file is written in a subset of C--, extended with various
8 * features specific to GHC. It is compiled by GHC directly. For the
9 * syntax of .cmm files, see the parser in ghc/compiler/cmm/CmmParse.y.
11 * ---------------------------------------------------------------------------*/
15 /* -----------------------------------------------------------------------------
18 A thread can request that asynchronous exceptions not be delivered
19 ("blocked") for the duration of an I/O computation. The primitive
21 blockAsyncExceptions# :: IO a -> IO a
23 is used for this purpose. During a blocked section, asynchronous
24 exceptions may be unblocked again temporarily:
26 unblockAsyncExceptions# :: IO a -> IO a
28 Furthermore, asynchronous exceptions are blocked automatically during
29 the execution of an exception handler. Both of these primitives
30 leave a continuation on the stack which reverts to the previous
31 state (blocked or unblocked) on exit.
33 A thread which wants to raise an exception in another thread (using
34 killThread#) must block until the target thread is ready to receive
35 it. The action of unblocking exceptions in a thread will release all
36 the threads waiting to deliver exceptions to that thread.
38 -------------------------------------------------------------------------- */
40 INFO_TABLE_RET( stg_unblockAsyncExceptionszh_ret,
41 0/*framesize*/, 0/*bitmap*/, RET_SMALL )
43 ASSERT(StgTSO_blocked_exceptions(CurrentTSO) != NULL);
44 #if defined(GRAN) || defined(PAR)
45 foreign "C" awakenBlockedQueue(StgTSO_blocked_exceptions(CurrentTSO) "ptr",
48 foreign "C" awakenBlockedQueue(StgTSO_blocked_exceptions(CurrentTSO) "ptr");
50 StgTSO_blocked_exceptions(CurrentTSO) = NULL;
53 jump %ENTRY_CODE(Sp(0));
57 jump %ENTRY_CODE(Sp(1));
61 INFO_TABLE_RET( stg_blockAsyncExceptionszh_ret,
62 0/*framesize*/, 0/*bitmap*/, RET_SMALL )
64 ASSERT(StgTSO_blocked_exceptions(CurrentTSO) == NULL);
65 StgTSO_blocked_exceptions(CurrentTSO) = END_TSO_QUEUE;
68 jump %ENTRY_CODE(Sp(0));
72 jump %ENTRY_CODE(Sp(1));
76 blockAsyncExceptionszh_fast
78 /* Args: R1 :: IO a */
79 STK_CHK_GEN( WDS(2)/* worst case */, R1_PTR, blockAsyncExceptionszh_fast);
81 if (StgTSO_blocked_exceptions(CurrentTSO) == NULL) {
82 StgTSO_blocked_exceptions(CurrentTSO) = END_TSO_QUEUE;
83 /* avoid growing the stack unnecessarily */
84 if (Sp(0) == stg_blockAsyncExceptionszh_ret_info) {
88 Sp(0) = stg_unblockAsyncExceptionszh_ret_info;
94 jump RET_LBL(stg_ap_v);
97 unblockAsyncExceptionszh_fast
99 /* Args: R1 :: IO a */
100 STK_CHK_GEN( WDS(2), R1_PTR, unblockAsyncExceptionszh_fast);
102 if (StgTSO_blocked_exceptions(CurrentTSO) != NULL) {
103 #if defined(GRAN) || defined(PAR)
104 foreign "C" awakenBlockedQueue(StgTSO_blocked_exceptions(CurrentTSO) "ptr",
105 StgTSO_block_info(CurrentTSO) "ptr");
107 foreign "C" awakenBlockedQueue(StgTSO_blocked_exceptions(CurrentTSO) "ptr");
109 StgTSO_blocked_exceptions(CurrentTSO) = NULL;
111 /* avoid growing the stack unnecessarily */
112 if (Sp(0) == stg_unblockAsyncExceptionszh_ret_info) {
116 Sp(0) = stg_blockAsyncExceptionszh_ret_info;
122 jump RET_LBL(stg_ap_v);
126 #define interruptible(what_next) \
127 ( what_next == BlockedOnMVar \
128 || what_next == BlockedOnException \
129 || what_next == BlockedOnRead \
130 || what_next == BlockedOnWrite \
131 || what_next == BlockedOnDelay \
132 || what_next == BlockedOnDoProc)
136 /* args: R1 = TSO to kill, R2 = Exception */
140 /* This thread may have been relocated.
141 * (see Schedule.c:threadStackOverflow)
144 if (StgTSO_what_next(R1) == ThreadRelocated::I16) {
145 R1 = StgTSO_link(R1);
149 /* Determine whether this thread is interruptible or not */
151 /* If the target thread is currently blocking async exceptions,
152 * we'll have to block until it's ready to accept them. The
153 * exception is interruptible threads - ie. those that are blocked
156 why_blocked = TO_W_(StgTSO_why_blocked(R1));
157 if (StgTSO_blocked_exceptions(R1) != NULL && !interruptible(why_blocked))
159 StgTSO_link(CurrentTSO) = StgTSO_blocked_exceptions(R1);
160 StgTSO_blocked_exceptions(R1) = CurrentTSO;
162 StgTSO_why_blocked(CurrentTSO) = BlockedOnException::I16;
163 StgTSO_block_info(CurrentTSO) = R1;
165 BLOCK( R1_PTR & R2_PTR, killThreadzh_fast );
168 /* Killed threads turn into zombies, which might be garbage
169 * collected at a later date. That's why we don't have to
170 * explicitly remove them from any queues they might be on.
173 /* We might have killed ourselves. In which case, better be *very*
174 * careful. If the exception killed us, then return to the scheduler.
175 * If the exception went to a catch frame, we'll just continue from
178 if (R1 == CurrentTSO) {
180 foreign "C" raiseAsyncWithLock(R1 "ptr", R2 "ptr");
181 if (StgTSO_what_next(CurrentTSO) == ThreadKilled::I16) {
186 ASSERT(StgTSO_what_next(CurrentTSO) == ThreadRunGHC::I16);
187 jump %ENTRY_CODE(Sp(0));
190 foreign "C" raiseAsyncWithLock(R1 "ptr", R2 "ptr");
193 jump %ENTRY_CODE(Sp(0));
196 /* -----------------------------------------------------------------------------
198 -------------------------------------------------------------------------- */
201 #define CATCH_FRAME_ENTRY_TEMPLATE(label,ret) \
204 Sp = Sp + SIZEOF_StgCatchFrame; \
208 #define CATCH_FRAME_ENTRY_TEMPLATE(label,ret) \
213 Sp = Sp + SIZEOF_StgCatchFrame; \
225 CATCH_FRAME_ENTRY_TEMPLATE(stg_catch_frame_0_ret,%RET_VEC(Sp(SP_OFF),0))
226 CATCH_FRAME_ENTRY_TEMPLATE(stg_catch_frame_1_ret,%RET_VEC(Sp(SP_OFF),1))
227 CATCH_FRAME_ENTRY_TEMPLATE(stg_catch_frame_2_ret,%RET_VEC(Sp(SP_OFF),2))
228 CATCH_FRAME_ENTRY_TEMPLATE(stg_catch_frame_3_ret,%RET_VEC(Sp(SP_OFF),3))
229 CATCH_FRAME_ENTRY_TEMPLATE(stg_catch_frame_4_ret,%RET_VEC(Sp(SP_OFF),4))
230 CATCH_FRAME_ENTRY_TEMPLATE(stg_catch_frame_5_ret,%RET_VEC(Sp(SP_OFF),5))
231 CATCH_FRAME_ENTRY_TEMPLATE(stg_catch_frame_6_ret,%RET_VEC(Sp(SP_OFF),6))
232 CATCH_FRAME_ENTRY_TEMPLATE(stg_catch_frame_7_ret,%RET_VEC(Sp(SP_OFF),7))
234 #if MAX_VECTORED_RTN > 8
235 #error MAX_VECTORED_RTN has changed: please modify stg_catch_frame too.
238 #if defined(PROFILING)
239 #define CATCH_FRAME_BITMAP 7
240 #define CATCH_FRAME_WORDS 4
242 #define CATCH_FRAME_BITMAP 1
243 #define CATCH_FRAME_WORDS 2
246 /* Catch frames are very similar to update frames, but when entering
247 * one we just pop the frame off the stack and perform the correct
248 * kind of return to the activation record underneath us on the stack.
251 INFO_TABLE_RET(stg_catch_frame,
252 CATCH_FRAME_WORDS, CATCH_FRAME_BITMAP,
254 stg_catch_frame_0_ret,
255 stg_catch_frame_1_ret,
256 stg_catch_frame_2_ret,
257 stg_catch_frame_3_ret,
258 stg_catch_frame_4_ret,
259 stg_catch_frame_5_ret,
260 stg_catch_frame_6_ret,
261 stg_catch_frame_7_ret)
262 CATCH_FRAME_ENTRY_TEMPLATE(,%ENTRY_CODE(Sp(SP_OFF)))
264 /* -----------------------------------------------------------------------------
265 * The catch infotable
267 * This should be exactly the same as would be generated by this STG code
269 * catch = {x,h} \n {} -> catch#{x,h}
271 * It is used in deleteThread when reverting blackholes.
272 * -------------------------------------------------------------------------- */
274 INFO_TABLE(stg_catch,2,0,FUN,"catch","catch")
276 R2 = StgClosure_payload(R1,1); /* h */
277 R1 = StgClosure_payload(R1,0); /* x */
283 /* args: R1 = m :: IO a, R2 = handler :: Exception -> IO a */
284 STK_CHK_GEN(SIZEOF_StgCatchFrame + WDS(1), R1_PTR & R2_PTR, catchzh_fast);
286 /* Set up the catch frame */
287 Sp = Sp - SIZEOF_StgCatchFrame;
288 SET_HDR(Sp,stg_catch_frame_info,CCCS);
290 StgCatchFrame_handler(Sp) = R2;
291 StgCatchFrame_exceptions_blocked(Sp) =
292 (StgTSO_blocked_exceptions(CurrentTSO) != NULL);
293 TICK_CATCHF_PUSHED();
295 /* Apply R1 to the realworld token */
299 jump RET_LBL(stg_ap_v);
302 /* -----------------------------------------------------------------------------
303 * The raise infotable
305 * This should be exactly the same as would be generated by this STG code
307 * raise = {err} \n {} -> raise#{err}
309 * It is used in raisezh_fast to update thunks on the update list
310 * -------------------------------------------------------------------------- */
312 INFO_TABLE(stg_raise,1,0,THUNK,"raise","raise")
314 R1 = StgClosure_payload(R1,0);
323 /* args : R1 :: Exception */
326 #if defined(PROFILING)
327 /* Debugging tool: on raising an exception, show where we are. */
329 /* ToDo: currently this is a hack. Would be much better if
330 * the info was only displayed for an *uncaught* exception.
332 if (RtsFlags_ProfFlags_showCCSOnException(RtsFlags)) {
333 foreign "C" fprintCCS(stderr,CCCS);
338 StgTSO_sp(CurrentTSO) = Sp;
339 frame_type = foreign "C" raiseExceptionHelper(CurrentTSO "ptr", R1 "ptr");
340 Sp = StgTSO_sp(CurrentTSO);
341 if (frame_type == ATOMICALLY_FRAME) {
342 /* The exception has reached the edge of a memory transaction. Check that
343 * the transaction is valid. If not then perhaps the exception should
344 * not have been thrown: re-run the transaction */
347 trec = StgTSO_trec(CurrentTSO);
348 r = foreign "C" stmValidateTransaction(trec "ptr");
349 foreign "C" stmAbortTransaction(trec "ptr");
350 StgTSO_trec(CurrentTSO) = NO_TREC;
352 // Transaction was valid: continue searching for a catch frame
353 Sp = Sp + SIZEOF_StgAtomicallyFrame;
354 goto retry_pop_stack;
356 // Transaction was not valid: we retry the exception (otherwise continue
357 // with a further call to raiseExceptionHelper)
358 "ptr" trec = foreign "C" stmStartTransaction(NO_TREC "ptr");
359 StgTSO_trec(CurrentTSO) = trec;
360 R1 = StgAtomicallyFrame_code(Sp);
362 jump RET_LBL(stg_ap_v);
366 if (frame_type == STOP_FRAME) {
367 /* We've stripped the entire stack, the thread is now dead. */
368 Sp = CurrentTSO + OFFSET_StgTSO_stack
369 + WDS(StgTSO_stack_size(CurrentTSO)) - WDS(1);
370 Sp(0) = R1; /* save the exception */
371 StgTSO_what_next(CurrentTSO) = ThreadKilled::I16;
372 SAVE_THREAD_STATE(); /* inline! */
377 /* Ok, Sp points to the enclosing CATCH_FRAME or CATCH_STM_FRAME. Pop everything
378 * down to and including this frame, update Su, push R1, and enter the handler.
380 if (frame_type == CATCH_FRAME) {
381 handler = StgCatchFrame_handler(Sp);
383 handler = StgCatchSTMFrame_handler(Sp);
386 /* Restore the blocked/unblocked state for asynchronous exceptions
387 * at the CATCH_FRAME.
389 * If exceptions were unblocked, arrange that they are unblocked
390 * again after executing the handler by pushing an
391 * unblockAsyncExceptions_ret stack frame.
395 if (frame_type == CATCH_FRAME) {
396 Sp = Sp + SIZEOF_StgCatchFrame;
397 if (StgCatchFrame_exceptions_blocked(frame) == 0) {
399 Sp(0) = stg_unblockAsyncExceptionszh_ret_info;
402 Sp = Sp + SIZEOF_StgCatchSTMFrame;
405 /* Ensure that async excpetions are blocked when running the handler.
407 if (StgTSO_blocked_exceptions(CurrentTSO) == NULL) {
408 StgTSO_blocked_exceptions(CurrentTSO) = END_TSO_QUEUE;
411 /* Call the handler, passing the exception value and a realworld
412 * token as arguments.
420 jump RET_LBL(stg_ap_pv);
425 /* Args :: R1 :: Exception */