1 /* -----------------------------------------------------------------------------
2 * $Id: StgMiscClosures.hc,v 1.84 2003/03/27 13:54:32 simonmar Exp $
4 * (c) The GHC Team, 1998-2002
6 * Entry code for various built-in closure types.
8 * ---------------------------------------------------------------------------*/
14 #include "StgMiscClosures.h"
16 #include "StoragePriv.h"
17 #include "Profiling.h"
21 #if defined(GRAN) || defined(PAR)
22 # include "GranSimRts.h" /* for DumpRawGranEvent */
23 # include "StgRun.h" /* for StgReturn and register saving */
30 /* ToDo: make the printing of panics more win32-friendly, i.e.,
31 * pop up some lovely message boxes (as well).
33 #define DUMP_ERRMSG(msg) STGCALL2(fprintf,stderr,msg)
36 Template for the entry code of non-enterable closures.
39 #define NON_ENTERABLE_ENTRY_CODE(type) \
40 IF_(stg_##type##_entry) \
43 STGCALL1(barf, #type " object entered!"); \
48 /* -----------------------------------------------------------------------------
49 Support for the bytecode interpreter.
50 -------------------------------------------------------------------------- */
52 /* 9 bits of return code for constructors created by the interpreter. */
53 FN_(stg_interp_constr_entry)
55 /* R1 points at the constructor */
57 /* STGCALL2(fprintf,stderr,"stg_interp_constr_entry (direct return)!\n"); */
58 /* Pointless, since SET_TAG doesn't do anything */
59 SET_TAG( GET_TAG(GET_INFO(R1.cl)));
60 JMP_(ENTRY_CODE((P_)(*Sp)));
64 FN_(stg_interp_constr1_entry) { FB_ JMP_(RET_VEC((P_)(*Sp),0)); FE_ }
65 FN_(stg_interp_constr2_entry) { FB_ JMP_(RET_VEC((P_)(*Sp),1)); FE_ }
66 FN_(stg_interp_constr3_entry) { FB_ JMP_(RET_VEC((P_)(*Sp),2)); FE_ }
67 FN_(stg_interp_constr4_entry) { FB_ JMP_(RET_VEC((P_)(*Sp),3)); FE_ }
68 FN_(stg_interp_constr5_entry) { FB_ JMP_(RET_VEC((P_)(*Sp),4)); FE_ }
69 FN_(stg_interp_constr6_entry) { FB_ JMP_(RET_VEC((P_)(*Sp),5)); FE_ }
70 FN_(stg_interp_constr7_entry) { FB_ JMP_(RET_VEC((P_)(*Sp),6)); FE_ }
71 FN_(stg_interp_constr8_entry) { FB_ JMP_(RET_VEC((P_)(*Sp),7)); FE_ }
73 /* Some info tables to be used when compiled code returns a value to
74 the interpreter, i.e. the interpreter pushes one of these onto the
75 stack before entering a value. What the code does is to
76 impedance-match the compiled return convention (in R1p/R1n/F1/D1 etc) to
77 the interpreter's convention (returned value is on top of stack),
78 and then cause the scheduler to enter the interpreter.
80 On entry, the stack (growing down) looks like this:
82 ptr to BCO holding return continuation
83 ptr to one of these info tables.
85 The info table code, both direct and vectored, must:
86 * push R1/F1/D1 on the stack, and its tag if necessary
87 * push the BCO (so it's now on the stack twice)
88 * Yield, ie, go to the scheduler.
90 Scheduler examines the t.o.s, discovers it is a BCO, and proceeds
91 directly to the bytecode interpreter. That pops the top element
92 (the BCO, containing the return continuation), and interprets it.
93 Net result: return continuation gets interpreted, with the
97 ptr to the info table just jumped thru
100 which is just what we want -- the "standard" return layout for the
103 Don't ask me how unboxed tuple returns are supposed to work. We
104 haven't got a good story about that yet.
107 // When the returned value is a pointer in R1...
108 #define STG_CtoI_RET_R1p_Template(label) \
114 Sp[0] = (W_)&stg_enter_info; \
115 JMP_(stg_yield_to_interpreter); \
119 STG_CtoI_RET_R1p_Template(stg_ctoi_ret_R1p_ret);
120 STG_CtoI_RET_R1p_Template(stg_ctoi_ret_R1p_0_ret);
121 STG_CtoI_RET_R1p_Template(stg_ctoi_ret_R1p_1_ret);
122 STG_CtoI_RET_R1p_Template(stg_ctoi_ret_R1p_2_ret);
123 STG_CtoI_RET_R1p_Template(stg_ctoi_ret_R1p_3_ret);
124 STG_CtoI_RET_R1p_Template(stg_ctoi_ret_R1p_4_ret);
125 STG_CtoI_RET_R1p_Template(stg_ctoi_ret_R1p_5_ret);
126 STG_CtoI_RET_R1p_Template(stg_ctoi_ret_R1p_6_ret);
127 STG_CtoI_RET_R1p_Template(stg_ctoi_ret_R1p_7_ret);
129 VEC_POLY_INFO_TABLE( stg_ctoi_ret_R1p, 0/* special layout */,
130 0/*srt*/, 0/*srt_off*/, 0/*srt_len*/,
133 // When the returned value is a pointer, but unlifted, in R1 ...
134 INFO_TABLE_RET( stg_ctoi_ret_R1unpt_info, stg_ctoi_ret_R1unpt_entry,
135 0/* special layout */,
136 0/*srt*/, 0/*srt_off*/, 0/*srt_len*/, RET_BCO,, IF_, 0, 0);
137 IF_(stg_ctoi_ret_R1unpt_entry)
142 Sp[0] = (W_)&stg_gc_unpt_r1_info;
143 JMP_(stg_yield_to_interpreter);
147 // When the returned value is a non-pointer in R1 ...
148 INFO_TABLE_RET( stg_ctoi_ret_R1n_info, stg_ctoi_ret_R1n_entry,
149 0/* special layout */,
150 0/*srt*/, 0/*srt_off*/, 0/*srt_len*/, RET_BCO,, IF_, 0, 0);
151 IF_(stg_ctoi_ret_R1n_entry)
156 Sp[0] = (W_)&stg_gc_unbx_r1_info;
157 JMP_(stg_yield_to_interpreter);
162 // When the returned value is in F1 ...
163 INFO_TABLE_RET( stg_ctoi_ret_F1_info, stg_ctoi_ret_F1_entry,
164 0/* special layout */,
165 0/*srt*/, 0/*srt_off*/, 0/*srt_len*/, RET_BCO,, IF_, 0, 0);
166 IF_(stg_ctoi_ret_F1_entry)
170 ASSIGN_FLT(Sp+1, F1);
171 Sp[0] = (W_)&stg_gc_f1_info;
172 JMP_(stg_yield_to_interpreter);
176 // When the returned value is in D1 ...
177 INFO_TABLE_RET( stg_ctoi_ret_D1_info, stg_ctoi_ret_D1_entry,
178 0/* special layout */,
179 0/*srt*/, 0/*srt_off*/, 0/*srt_len*/, RET_BCO,, IF_, 0, 0);
180 IF_(stg_ctoi_ret_D1_entry)
183 Sp -= 1 + sizeofW(StgDouble);
184 ASSIGN_DBL(Sp+1, D1);
185 Sp[0] = (W_)&stg_gc_d1_info;
186 JMP_(stg_yield_to_interpreter);
190 // When the returned value is in L1 ...
191 INFO_TABLE_RET( stg_ctoi_ret_L1_info, stg_ctoi_ret_L1_entry,
192 0/* special layout */,
193 0/*srt*/, 0/*srt_off*/, 0/*srt_len*/, RET_BCO,, IF_, 0, 0);
194 IF_(stg_ctoi_ret_L1_entry)
197 Sp -= 1 + sizeofW(StgInt64);
198 ASSIGN_Word64(Sp+1, L1);
199 Sp[0] = (W_)&stg_gc_l1_info;
200 JMP_(stg_yield_to_interpreter);
204 // When the returned value a VoidRep ...
205 INFO_TABLE_RET( stg_ctoi_ret_V_info, stg_ctoi_ret_V_entry,
206 0/* special layout */,
207 0/*srt*/, 0/*srt_off*/, 0/*srt_len*/, RET_BCO,, IF_, 0, 0);
208 IF_(stg_ctoi_ret_V_entry)
212 Sp[0] = (W_)&stg_gc_void_info;
213 JMP_(stg_yield_to_interpreter);
217 // Dummy info table pushed on the top of the stack when the interpreter
218 // should apply the BCO on the stack to its arguments, also on the stack.
219 INFO_TABLE_RET( stg_apply_interp_info, stg_apply_interp_entry,
220 0/* special layout */,
221 0/*srt*/, 0/*srt_off*/, 0/*srt_len*/, RET_BCO,, IF_, 0, 0);
222 IF_(stg_apply_interp_entry)
225 // Just in case we end up in here... (we shouldn't)
226 JMP_(stg_yield_to_interpreter);
230 /* -----------------------------------------------------------------------------
232 -------------------------------------------------------------------------- */
234 INFO_TABLE_FUN_GEN(stg_BCO_info,stg_BCO_entry,4,0,
236 ARG_BCO, 0/*dummy arity*/, 0/*dummy bitmap*/, NULL/*slow_apply*/,
237 BCO,,EF_,"BCO","BCO");
240 // entering a BCO means "apply it", same as a function
243 Sp[0] = (W_)&stg_apply_interp_info;
244 JMP_(stg_yield_to_interpreter);
248 /* -----------------------------------------------------------------------------
249 Info tables for indirections.
251 SPECIALISED INDIRECTIONS: we have a specialised indirection for each
252 kind of return (direct, vectored 0-7), so that we can avoid entering
253 the object when we know what kind of return it will do. The update
254 code (Updates.hc) updates objects with the appropriate kind of
255 indirection. We only do this for young-gen indirections.
256 -------------------------------------------------------------------------- */
258 INFO_TABLE(stg_IND_info,stg_IND_entry,1,0,IND,,IF_,"IND","IND");
262 TICK_ENT_DYN_IND(Node); /* tick */
263 R1.p = (P_) ((StgInd*)R1.p)->indirectee;
265 JMP_(GET_ENTRY(R1.cl));
269 #define IND_SPEC(n,ret) \
270 INFO_TABLE(stg_IND_##n##_info,stg_IND_##n##_entry,1,0,IND,,IF_,"IND","IND"); \
271 IF_(stg_IND_##n##_entry) \
274 TICK_ENT_DYN_IND(Node); /* tick */ \
275 R1.p = (P_) ((StgInd*)R1.p)->indirectee; \
276 TICK_ENT_VIA_NODE(); \
281 IND_SPEC(direct, ENTRY_CODE(Sp[0]))
282 IND_SPEC(0, RET_VEC(Sp[0],0))
283 IND_SPEC(1, RET_VEC(Sp[0],1))
284 IND_SPEC(2, RET_VEC(Sp[0],2))
285 IND_SPEC(3, RET_VEC(Sp[0],3))
286 IND_SPEC(4, RET_VEC(Sp[0],4))
287 IND_SPEC(5, RET_VEC(Sp[0],5))
288 IND_SPEC(6, RET_VEC(Sp[0],6))
289 IND_SPEC(7, RET_VEC(Sp[0],7))
291 INFO_TABLE(stg_IND_STATIC_info,stg_IND_STATIC_entry,1,0,IND_STATIC,,IF_,"IND_STATIC","IND_STATIC");
292 IF_(stg_IND_STATIC_entry)
295 TICK_ENT_STATIC_IND(Node); /* tick */
296 R1.p = (P_) ((StgIndStatic*)R1.p)->indirectee;
298 JMP_(GET_ENTRY(R1.cl));
302 INFO_TABLE(stg_IND_PERM_info,stg_IND_PERM_entry,1,1,IND_PERM,,IF_,"IND_PERM","IND_PERM");
303 IF_(stg_IND_PERM_entry)
306 /* Don't add INDs to granularity cost */
307 /* Dont: TICK_ENT_STATIC_IND(Node); for ticky-ticky; this ind is here only to help profiling */
309 #if defined(TICKY_TICKY) && !defined(PROFILING)
310 /* TICKY_TICKY && !PROFILING means PERM_IND *replaces* an IND, rather than being extra */
311 TICK_ENT_PERM_IND(R1.p); /* tick */
314 LDV_ENTER((StgInd *)R1.p);
316 /* Enter PAP cost centre -- lexical scoping only */
317 ENTER_CCS_PAP_CL(R1.cl);
319 /* For ticky-ticky, change the perm_ind to a normal ind on first
320 * entry, so the number of ent_perm_inds is the number of *thunks*
321 * entered again, not the number of subsequent entries.
323 * Since this screws up cost centres, we die if profiling and
324 * ticky_ticky are on at the same time. KSW 1999-01.
329 # error Profiling and ticky-ticky do not mix at present!
330 # endif /* PROFILING */
331 SET_INFO((StgInd*)R1.p,&stg_IND_info);
332 #endif /* TICKY_TICKY */
334 R1.p = (P_) ((StgInd*)R1.p)->indirectee;
336 /* Dont: TICK_ENT_VIA_NODE(); for ticky-ticky; as above */
338 #if defined(TICKY_TICKY) && !defined(PROFILING)
342 JMP_(GET_ENTRY(R1.cl));
346 INFO_TABLE(stg_IND_OLDGEN_info,stg_IND_OLDGEN_entry,1,1,IND_OLDGEN,,IF_,"IND_OLDGEN","IND_OLDGEN");
347 IF_(stg_IND_OLDGEN_entry)
350 TICK_ENT_STATIC_IND(Node); /* tick */
351 R1.p = (P_) ((StgInd*)R1.p)->indirectee;
353 JMP_(GET_ENTRY(R1.cl));
357 INFO_TABLE(stg_IND_OLDGEN_PERM_info,stg_IND_OLDGEN_PERM_entry,1,1,IND_OLDGEN_PERM,,IF_,"IND_OLDGEN_PERM","IND_OLDGEN_PERM");
358 IF_(stg_IND_OLDGEN_PERM_entry)
361 /* Dont: TICK_ENT_STATIC_IND(Node); for ticky-ticky; this ind is here only to help profiling */
363 #if defined(TICKY_TICKY) && !defined(PROFILING)
364 /* TICKY_TICKY && !PROFILING means PERM_IND *replaces* an IND, rather than being extra */
365 TICK_ENT_PERM_IND(R1.p); /* tick */
368 LDV_ENTER((StgInd *)R1.p);
370 /* Enter PAP cost centre -- lexical scoping only */
371 ENTER_CCS_PAP_CL(R1.cl);
373 /* see comment in IND_PERM */
376 # error Profiling and ticky-ticky do not mix at present!
377 # endif /* PROFILING */
378 SET_INFO((StgInd*)R1.p,&stg_IND_OLDGEN_info);
379 #endif /* TICKY_TICKY */
381 R1.p = (P_) ((StgInd*)R1.p)->indirectee;
383 JMP_(GET_ENTRY(R1.cl));
387 /* -----------------------------------------------------------------------------
388 Entry code for a black hole.
390 Entering a black hole normally causes a cyclic data dependency, but
391 in the concurrent world, black holes are synchronization points,
392 and they are turned into blocking queues when there are threads
393 waiting for the evaluation of the closure to finish.
394 -------------------------------------------------------------------------- */
396 /* Note: a BLACKHOLE and BLACKHOLE_BQ must be big enough to be
397 * overwritten with an indirection/evacuee/catch. Thus we claim it
398 * has 1 non-pointer word of payload (in addition to the pointer word
399 * for the blocking queue in a BQ), which should be big enough for an
400 * old-generation indirection.
403 INFO_TABLE(stg_BLACKHOLE_info, stg_BLACKHOLE_entry,0,2,BLACKHOLE,,IF_,"BLACKHOLE","BLACKHOLE");
404 IF_(stg_BLACKHOLE_entry)
408 /* Before overwriting TSO_LINK */
409 STGCALL3(GranSimBlock,CurrentTSO,CurrentProc,(StgClosure *)R1.p /*Node*/);
414 bdescr *bd = Bdescr(R1.p);
415 if (bd->u.back != (bdescr *)BaseReg) {
416 if (bd->gen_no >= 1 || bd->step->no >= 1) {
417 CMPXCHG(R1.cl->header.info, &stg_BLACKHOLE_info, &stg_WHITEHOLE_info);
419 EXTFUN_RTS(stg_gc_enter_1_hponly);
420 JMP_(stg_gc_enter_1_hponly);
427 // Actually this is not necessary because R1.p is about to be destroyed.
428 LDV_ENTER((StgClosure *)R1.p);
430 /* Put ourselves on the blocking queue for this black hole */
431 #if defined(GRAN) || defined(PAR)
432 // in fact, only difference is the type of the end-of-queue marker!
433 CurrentTSO->link = END_BQ_QUEUE;
434 ((StgBlockingQueue *)R1.p)->blocking_queue = (StgBlockingQueueElement *)CurrentTSO;
436 CurrentTSO->link = END_TSO_QUEUE;
437 ((StgBlockingQueue *)R1.p)->blocking_queue = CurrentTSO;
439 // jot down why and on what closure we are blocked
440 CurrentTSO->why_blocked = BlockedOnBlackHole;
441 CurrentTSO->block_info.closure = R1.cl;
443 /* Change the BLACKHOLE into a BLACKHOLE_BQ */
446 // The size remains the same, so we call LDV_recordDead() - no need to fill slop.
447 LDV_recordDead((StgClosure *)R1.p, BLACKHOLE_sizeW());
450 // Todo: maybe use SET_HDR() and remove LDV_recordCreate()?
452 ((StgBlockingQueue *)R1.p)->header.info = &stg_BLACKHOLE_BQ_info;
454 LDV_recordCreate((StgClosure *)R1.p);
457 // closure is mutable since something has just been added to its BQ
458 recordMutable((StgMutClosure *)R1.cl);
460 // PAR: dumping of event now done in blockThread -- HWL
462 // stg_gen_block is too heavyweight, use a specialised one
467 INFO_TABLE(stg_BLACKHOLE_BQ_info, stg_BLACKHOLE_BQ_entry,1,1,BLACKHOLE_BQ,,IF_,"BLACKHOLE","BLACKHOLE");
468 IF_(stg_BLACKHOLE_BQ_entry)
472 /* Before overwriting TSO_LINK */
473 STGCALL3(GranSimBlock,CurrentTSO,CurrentProc,(StgClosure *)R1.p /*Node*/);
478 bdescr *bd = Bdescr(R1.p);
479 if (bd->u.back != (bdescr *)BaseReg) {
480 if (bd->gen_no >= 1 || bd->step->no >= 1) {
481 CMPXCHG(R1.cl->header.info, &stg_BLACKHOLE_info, &stg_WHITEHOLE_info);
483 EXTFUN_RTS(stg_gc_enter_1_hponly);
484 JMP_(stg_gc_enter_1_hponly);
491 LDV_ENTER((StgClosure *)R1.p);
493 /* Put ourselves on the blocking queue for this black hole */
494 CurrentTSO->link = ((StgBlockingQueue *)R1.p)->blocking_queue;
495 ((StgBlockingQueue *)R1.p)->blocking_queue = CurrentTSO;
496 /* jot down why and on what closure we are blocked */
497 CurrentTSO->why_blocked = BlockedOnBlackHole;
498 CurrentTSO->block_info.closure = R1.cl;
500 ((StgBlockingQueue *)R1.p)->header.info = &stg_BLACKHOLE_BQ_info;
503 /* PAR: dumping of event now done in blockThread -- HWL */
505 /* stg_gen_block is too heavyweight, use a specialised one */
511 Revertible black holes are needed in the parallel world, to handle
512 negative acknowledgements of messages containing updatable closures.
513 The idea is that when the original message is transmitted, the closure
514 is turned into a revertible black hole...an object which acts like a
515 black hole when local threads try to enter it, but which can be reverted
516 back to the original closure if necessary.
518 It's actually a lot like a blocking queue (BQ) entry, because revertible
519 black holes are initially set up with an empty blocking queue.
522 #if defined(PAR) || defined(GRAN)
524 INFO_TABLE(stg_RBH_info, stg_RBH_entry,1,1,RBH,,IF_,"RBH","RBH");
529 /* mainly statistics gathering for GranSim simulation */
530 STGCALL3(GranSimBlock,CurrentTSO,CurrentProc,(StgClosure *)R1.p /*Node*/);
533 /* exactly the same as a BLACKHOLE_BQ_entry -- HWL */
534 /* Put ourselves on the blocking queue for this black hole */
535 CurrentTSO->link = ((StgBlockingQueue *)R1.p)->blocking_queue;
536 ((StgBlockingQueue *)R1.p)->blocking_queue = CurrentTSO;
537 /* jot down why and on what closure we are blocked */
538 CurrentTSO->why_blocked = BlockedOnBlackHole;
539 CurrentTSO->block_info.closure = R1.cl;
541 /* PAR: dumping of event now done in blockThread -- HWL */
543 /* stg_gen_block is too heavyweight, use a specialised one */
548 INFO_TABLE(stg_RBH_Save_0_info, stg_RBH_Save_0_entry,0,2,CONSTR,,IF_,"RBH_Save_0","RBH_Save_0");
549 NON_ENTERABLE_ENTRY_CODE(RBH_Save_0);
551 INFO_TABLE(stg_RBH_Save_1_info, stg_RBH_Save_1_entry,1,1,CONSTR,,IF_,"RBH_Save_1","RBH_Save_1");
552 NON_ENTERABLE_ENTRY_CODE(RBH_Save_1);
554 INFO_TABLE(stg_RBH_Save_2_info, stg_RBH_Save_2_entry,2,0,CONSTR,,IF_,"RBH_Save_2","RBH_Save_2");
555 NON_ENTERABLE_ENTRY_CODE(RBH_Save_2);
556 #endif /* defined(PAR) || defined(GRAN) */
558 /* identical to BLACKHOLEs except for the infotag */
559 INFO_TABLE(stg_CAF_BLACKHOLE_info, stg_CAF_BLACKHOLE_entry,0,2,CAF_BLACKHOLE,,IF_,"CAF_BLACKHOLE","CAF_BLACKHOLE");
560 IF_(stg_CAF_BLACKHOLE_entry)
564 /* mainly statistics gathering for GranSim simulation */
565 STGCALL3(GranSimBlock,CurrentTSO,CurrentProc,(StgClosure *)R1.p /*Node*/);
570 bdescr *bd = Bdescr(R1.p);
571 if (bd->u.back != (bdescr *)BaseReg) {
572 if (bd->gen_no >= 1 || bd->step->no >= 1) {
573 CMPXCHG(R1.cl->header.info, &stg_CAF_BLACKHOLE_info, &stg_WHITEHOLE_info);
575 EXTFUN_RTS(stg_gc_enter_1_hponly);
576 JMP_(stg_gc_enter_1_hponly);
583 LDV_ENTER((StgClosure *)R1.p);
585 // Put ourselves on the blocking queue for this black hole
586 #if defined(GRAN) || defined(PAR)
587 // in fact, only difference is the type of the end-of-queue marker!
588 CurrentTSO->link = END_BQ_QUEUE;
589 ((StgBlockingQueue *)R1.p)->blocking_queue = (StgBlockingQueueElement *)CurrentTSO;
591 CurrentTSO->link = END_TSO_QUEUE;
592 ((StgBlockingQueue *)R1.p)->blocking_queue = CurrentTSO;
594 // jot down why and on what closure we are blocked
595 CurrentTSO->why_blocked = BlockedOnBlackHole;
596 CurrentTSO->block_info.closure = R1.cl;
598 // Change the CAF_BLACKHOLE into a BLACKHOLE_BQ_STATIC
599 ((StgBlockingQueue *)R1.p)->header.info = &stg_BLACKHOLE_BQ_info;
601 // closure is mutable since something has just been added to its BQ
602 recordMutable((StgMutClosure *)R1.cl);
604 // PAR: dumping of event now done in blockThread -- HWL
606 // stg_gen_block is too heavyweight, use a specialised one
611 #ifdef EAGER_BLACKHOLING
612 INFO_TABLE(stg_SE_BLACKHOLE_info, stg_SE_BLACKHOLE_entry,0,2,SE_BLACKHOLE,,IF_,"SE_BLACKHOLE","SE_BLACKHOLE");
613 IF_(stg_SE_BLACKHOLE_entry)
616 STGCALL3(fprintf,stderr,"SE_BLACKHOLE at %p entered!\n",R1.p);
617 STGCALL1(shutdownHaskellAndExit,EXIT_FAILURE);
621 INFO_TABLE(stg_SE_CAF_BLACKHOLE_info, SE_CAF_BLACKHOLE_entry,0,2,SE_CAF_BLACKHOLE,,IF_,"CAF_BLACKHOLE","CAF_BLACKHOLE");
622 IF_(stg_SE_CAF_BLACKHOLE_entry)
625 STGCALL3(fprintf,stderr,"SE_CAF_BLACKHOLE at %p entered!\n",R1.p);
626 STGCALL1(shutdownHaskellAndExit,EXIT_FAILURE);
632 INFO_TABLE(stg_WHITEHOLE_info, stg_WHITEHOLE_entry,0,2,CONSTR_NOCAF_STATIC,,IF_,"WHITEHOLE","WHITEHOLE");
633 IF_(stg_WHITEHOLE_entry)
636 JMP_(GET_ENTRY(R1.cl));
641 /* -----------------------------------------------------------------------------
642 Some static info tables for things that don't get entered, and
643 therefore don't need entry code (i.e. boxed but unpointed objects)
644 NON_ENTERABLE_ENTRY_CODE now defined at the beginning of the file
645 -------------------------------------------------------------------------- */
647 INFO_TABLE(stg_TSO_info, stg_TSO_entry, 0,0,TSO,,IF_,"TSO","TSO");
648 NON_ENTERABLE_ENTRY_CODE(TSO);
650 /* -----------------------------------------------------------------------------
651 Evacuees are left behind by the garbage collector. Any attempt to enter
653 -------------------------------------------------------------------------- */
655 INFO_TABLE(stg_EVACUATED_info,stg_EVACUATED_entry,1,0,EVACUATED,,IF_,"EVACUATED","EVACUATED");
656 NON_ENTERABLE_ENTRY_CODE(EVACUATED);
658 /* -----------------------------------------------------------------------------
661 Live weak pointers have a special closure type. Dead ones are just
662 nullary constructors (although they live on the heap - we overwrite
663 live weak pointers with dead ones).
664 -------------------------------------------------------------------------- */
666 INFO_TABLE(stg_WEAK_info,stg_WEAK_entry,0,4,WEAK,,IF_,"WEAK","WEAK");
667 NON_ENTERABLE_ENTRY_CODE(WEAK);
669 // It's important when turning an existing WEAK into a DEAD_WEAK
670 // (which is what finalizeWeak# does) that we don't lose the link
671 // field and break the linked list of weak pointers. Hence, we give
672 // DEAD_WEAK 4 non-pointer fields, the same as WEAK.
674 INFO_TABLE_CONSTR(stg_DEAD_WEAK_info,stg_DEAD_WEAK_entry,0,4,0,CONSTR,,IF_,"DEAD_WEAK","DEAD_WEAK");
675 NON_ENTERABLE_ENTRY_CODE(DEAD_WEAK);
677 /* -----------------------------------------------------------------------------
680 This is a static nullary constructor (like []) that we use to mark an empty
681 finalizer in a weak pointer object.
682 -------------------------------------------------------------------------- */
684 INFO_TABLE_CONSTR(stg_NO_FINALIZER_info,stg_NO_FINALIZER_entry,0,0,0,CONSTR_NOCAF_STATIC,,IF_,"NO_FINALIZER","NO_FINALIZER");
685 NON_ENTERABLE_ENTRY_CODE(NO_FINALIZER);
687 SET_STATIC_HDR(stg_NO_FINALIZER_closure,stg_NO_FINALIZER_info,0/*CC*/,,extern const StgInfoTable)
690 /* -----------------------------------------------------------------------------
691 Foreign Objects are unlifted and therefore never entered.
692 -------------------------------------------------------------------------- */
694 INFO_TABLE(stg_FOREIGN_info,stg_FOREIGN_entry,0,1,FOREIGN,,IF_,"FOREIGN","FOREIGN");
695 NON_ENTERABLE_ENTRY_CODE(FOREIGN);
697 /* -----------------------------------------------------------------------------
698 Stable Names are unlifted too.
699 -------------------------------------------------------------------------- */
701 INFO_TABLE(stg_STABLE_NAME_info,stg_STABLE_NAME_entry,0,1,STABLE_NAME,,IF_,"STABLE_NAME","STABLE_NAME");
702 NON_ENTERABLE_ENTRY_CODE(STABLE_NAME);
704 /* -----------------------------------------------------------------------------
707 There are two kinds of these: full and empty. We need an info table
708 and entry code for each type.
709 -------------------------------------------------------------------------- */
711 INFO_TABLE(stg_FULL_MVAR_info,stg_FULL_MVAR_entry,4,0,MVAR,,IF_,"MVAR","MVAR");
712 NON_ENTERABLE_ENTRY_CODE(FULL_MVAR);
714 INFO_TABLE(stg_EMPTY_MVAR_info,stg_EMPTY_MVAR_entry,4,0,MVAR,,IF_,"MVAR","MVAR");
715 NON_ENTERABLE_ENTRY_CODE(EMPTY_MVAR);
717 /* -----------------------------------------------------------------------------
720 This is a static nullary constructor (like []) that we use to mark the
721 end of a linked TSO queue.
722 -------------------------------------------------------------------------- */
724 INFO_TABLE_CONSTR(stg_END_TSO_QUEUE_info,stg_END_TSO_QUEUE_entry,0,0,0,CONSTR_NOCAF_STATIC,,IF_,"END_TSO_QUEUE","END_TSO_QUEUE");
725 NON_ENTERABLE_ENTRY_CODE(END_TSO_QUEUE);
727 SET_STATIC_HDR(stg_END_TSO_QUEUE_closure,stg_END_TSO_QUEUE_info,0/*CC*/,,extern const StgInfoTable)
730 /* -----------------------------------------------------------------------------
733 Mutable lists (used by the garbage collector) consist of a chain of
734 StgMutClosures connected through their mut_link fields, ending in
735 an END_MUT_LIST closure.
736 -------------------------------------------------------------------------- */
738 INFO_TABLE_CONSTR(stg_END_MUT_LIST_info,stg_END_MUT_LIST_entry,0,0,0,CONSTR_NOCAF_STATIC,,IF_,"END_MUT_LIST","END_MUT_LIST");
739 NON_ENTERABLE_ENTRY_CODE(END_MUT_LIST);
741 SET_STATIC_HDR(stg_END_MUT_LIST_closure,stg_END_MUT_LIST_info,0/*CC*/,,extern const StgInfoTable)
744 INFO_TABLE(stg_MUT_CONS_info, stg_MUT_CONS_entry, 1, 1, MUT_CONS, , IF_, "MUT_CONS", "MUT_CONS");
745 NON_ENTERABLE_ENTRY_CODE(MUT_CONS);
747 /* -----------------------------------------------------------------------------
749 -------------------------------------------------------------------------- */
751 INFO_TABLE_CONSTR(stg_END_EXCEPTION_LIST_info,stg_END_EXCEPTION_LIST_entry,0,0,0,CONSTR_NOCAF_STATIC,,IF_,"END_EXCEPTION_LIST","END_EXCEPTION_LIST");
752 NON_ENTERABLE_ENTRY_CODE(END_EXCEPTION_LIST);
754 SET_STATIC_HDR(stg_END_EXCEPTION_LIST_closure,stg_END_EXCEPTION_LIST_info,0/*CC*/,,extern const StgInfoTable)
757 INFO_TABLE(stg_EXCEPTION_CONS_info, stg_EXCEPTION_CONS_entry, 1, 1, CONSTR, , IF_, "EXCEPTION_CONS", "EXCEPTION_CONS");
758 NON_ENTERABLE_ENTRY_CODE(EXCEPTION_CONS);
760 /* -----------------------------------------------------------------------------
763 These come in two basic flavours: arrays of data (StgArrWords) and arrays of
764 pointers (StgArrPtrs). They all have a similar layout:
766 ___________________________
767 | Info | No. of | data....
769 ---------------------------
771 These are *unpointed* objects: i.e. they cannot be entered.
773 -------------------------------------------------------------------------- */
775 #define ArrayInfo(type) \
776 INFO_TABLE(stg_##type##_info, stg_##type##_entry, 0, 0, type, , IF_,"" # type "","" # type "");
778 ArrayInfo(ARR_WORDS);
779 NON_ENTERABLE_ENTRY_CODE(ARR_WORDS);
780 ArrayInfo(MUT_ARR_PTRS);
781 NON_ENTERABLE_ENTRY_CODE(MUT_ARR_PTRS);
782 ArrayInfo(MUT_ARR_PTRS_FROZEN);
783 NON_ENTERABLE_ENTRY_CODE(MUT_ARR_PTRS_FROZEN);
787 /* -----------------------------------------------------------------------------
789 -------------------------------------------------------------------------- */
791 INFO_TABLE(stg_MUT_VAR_info, stg_MUT_VAR_entry, 1, 1, MUT_VAR, , IF_, "MUT_VAR", "MUT_VAR");
792 NON_ENTERABLE_ENTRY_CODE(MUT_VAR);
794 /* -----------------------------------------------------------------------------
797 Entering this closure will just return to the address on the top of the
798 stack. Useful for getting a thread in a canonical form where we can
799 just enter the top stack word to start the thread. (see deleteThread)
800 * -------------------------------------------------------------------------- */
802 INFO_TABLE( stg_dummy_ret_info, stg_dummy_ret_entry,
803 0, 0, CONSTR_NOCAF_STATIC, , EF_, "DUMMY_RET", "DUMMY_RET");
805 STGFUN(stg_dummy_ret_entry)
808 JMP_(ENTRY_CODE(Sp[0]));
811 SET_STATIC_HDR(stg_dummy_ret_closure,stg_dummy_ret_info,CCS_DONT_CARE,,extern const StgInfoTable)
814 /* -----------------------------------------------------------------------------
815 CHARLIKE and INTLIKE closures.
817 These are static representations of Chars and small Ints, so that
818 we can remove dynamic Chars and Ints during garbage collection and
819 replace them with references to the static objects.
820 -------------------------------------------------------------------------- */
822 #if defined(INTERPRETER) || defined(ENABLE_WIN32_DLL_SUPPORT)
824 * When sticking the RTS in a DLL, we delay populating the
825 * Charlike and Intlike tables until load-time, which is only
826 * when we've got the real addresses to the C# and I# closures.
829 static INFO_TBL_CONST StgInfoTable czh_static_info;
830 static INFO_TBL_CONST StgInfoTable izh_static_info;
831 #define Char_hash_static_info czh_static_info
832 #define Int_hash_static_info izh_static_info
834 #define Char_hash_static_info GHCziBase_Czh_static_info
835 #define Int_hash_static_info GHCziBase_Izh_static_info
838 #define CHARLIKE_HDR(n) \
840 STATIC_HDR(Char_hash_static_info, /* C# */ \
845 #define INTLIKE_HDR(n) \
847 STATIC_HDR(Int_hash_static_info, /* I# */ \
852 /* put these in the *data* section, since the garbage collector relies
853 * on the fact that static closures live in the data section.
856 /* end the name with _closure, to convince the mangler this is a closure */
858 StgIntCharlikeClosure stg_CHARLIKE_closure[] = {
1117 StgIntCharlikeClosure stg_INTLIKE_closure[] = {
1118 INTLIKE_HDR(-16), /* MIN_INTLIKE == -16 */
1150 INTLIKE_HDR(16) /* MAX_INTLIKE == 16 */