36c6090dba4350668cce4660dc39a41927db6b67
[ghc-hetmet.git] / ghc / rts / HeapStackCheck.hc
1 /* -----------------------------------------------------------------------------
2  * $Id: HeapStackCheck.hc,v 1.15 2000/12/14 15:19:48 sewardj Exp $
3  *
4  * (c) The GHC Team, 1998-1999
5  *
6  * Canned Heap-Check and Stack-Check sequences.
7  *
8  * ---------------------------------------------------------------------------*/
9
10 #include "Rts.h"
11 #include "Storage.h"    /* for CurrentTSO */
12 #include "StgRun.h"     /* for StgReturn and register saving */
13 #include "Schedule.h"   /* for context_switch */
14 #include "HeapStackCheck.h"
15
16 /* Stack/Heap Check Failure
17  * ------------------------
18  *
19  * On discovering that a stack or heap check has failed, we do the following:
20  *
21  *    - If the context_switch flag is set, indicating that there are more
22  *      threads waiting to run, we yield to the scheduler 
23  *      (return ThreadYielding).
24  *
25  *    - If Hp > HpLim, we've had a heap check failure.  This means we've
26  *      come to the end of the current heap block, so we try to chain
27  *      another block on with ExtendNursery().  
28  *
29  *           - If this succeeds, we carry on without returning to the 
30  *             scheduler.  
31  *
32  *           - If it fails, we return to the scheduler claiming HeapOverflow
33  *             so that a garbage collection can be performed.
34  *
35  *    - If Hp <= HpLim, it must have been a stack check that failed.  In
36  *      which case, we return to the scheduler claiming StackOverflow, the
37  *      scheduler will either increase the size of our stack, or flag
38  *      an error if the stack is already too big.
39  *
40  * The effect of checking for context switch only in the heap/stack check
41  * failure code is that we'll switch threads after the current thread has
42  * reached the end of its heap block.  If a thread isn't allocating
43  * at all, it won't yield.  Hopefully this won't be a problem in practice.
44  */
45  
46 /* Remember that the return address is *removed* when returning to a
47  * ThreadRunGHC thread.
48  */
49
50
51 #define GC_GENERIC                                      \
52   if (Hp > HpLim) {                                     \
53     if (ExtendNursery(Hp,HpLim)) {                      \
54         if (context_switch) {                           \
55             R1.i = ThreadYielding;                      \
56         } else {                                        \
57            Sp++;                                        \
58            JMP_(ENTRY_CODE(Sp[-1]));                    \
59         }                                               \
60     } else {                                            \
61       R1.i = HeapOverflow;                              \
62     }                                                   \
63   } else {                                              \
64     R1.i = StackOverflow;                               \
65   }                                                     \
66   SaveThreadState();                                    \
67   CurrentTSO->what_next = ThreadRunGHC;                 \
68   JMP_(StgReturn);
69
70 #define GC_ENTER                                        \
71   if (Hp > HpLim) {                                     \
72     if (ExtendNursery(Hp,HpLim)) {                      \
73         if (context_switch) {                           \
74             R1.i = ThreadYielding;                      \
75         } else {                                        \
76            R1.w = *Sp;                                  \
77            Sp++;                                        \
78            JMP_(ENTRY_CODE(*R1.p));                     \
79         }                                               \
80     } else {                                            \
81       R1.i = HeapOverflow;                              \
82     }                                                   \
83   } else {                                              \
84     R1.i = StackOverflow;                               \
85   }                                                     \
86   SaveThreadState();                                    \
87   CurrentTSO->what_next = ThreadEnterGHC;               \
88   JMP_(StgReturn);
89
90 #define HP_GENERIC                      \
91   SaveThreadState();                    \
92   CurrentTSO->what_next = ThreadRunGHC; \
93   R1.i = HeapOverflow;                  \
94   JMP_(StgReturn);
95
96 #define STK_GENERIC                     \
97   SaveThreadState();                    \
98   CurrentTSO->what_next = ThreadRunGHC; \
99   R1.i = StackOverflow;                 \
100   JMP_(StgReturn);
101
102 #define YIELD_GENERIC                   \
103   SaveThreadState();                    \
104   CurrentTSO->what_next = ThreadRunGHC; \
105   R1.i = ThreadYielding;                \
106   JMP_(StgReturn);
107
108 #define YIELD_TO_INTERPRETER              \
109   SaveThreadState();                      \
110   CurrentTSO->what_next = ThreadEnterInterp; \
111   R1.i = ThreadYielding;                  \
112   JMP_(StgReturn);
113
114 #define BLOCK_GENERIC                   \
115   SaveThreadState();                    \
116   CurrentTSO->what_next = ThreadRunGHC; \
117   R1.i = ThreadBlocked;                 \
118   JMP_(StgReturn);
119
120 #define BLOCK_ENTER                     \
121   SaveThreadState();                    \
122   CurrentTSO->what_next = ThreadEnterGHC;\
123   R1.i = ThreadBlocked;                 \
124   JMP_(StgReturn);
125
126
127 /* -----------------------------------------------------------------------------
128    Heap Checks
129    -------------------------------------------------------------------------- */
130
131 /*
132  * This one is used when we want to *enter* the top thing on the stack
133  * when we return, instead of the just returning to an address.  See
134  * UpdatePAP for an example.
135  */
136
137 EXTFUN(stg_gc_entertop)
138 {
139   FB_
140   GC_ENTER
141   FE_
142 }
143
144 /* -----------------------------------------------------------------------------
145    Heap checks in non-top-level thunks/functions.
146
147    In these cases, node always points to the function closure.  This gives
148    us an easy way to return to the function: just leave R1 on the top of
149    the stack, and have the scheduler enter it to return.
150
151    There are canned sequences for 'n' pointer values in registers.
152    -------------------------------------------------------------------------- */
153
154 EXTFUN(stg_gc_enter_1)
155 {
156   FB_
157   Sp -= 1;
158   Sp[0] = R1.w;
159   GC_ENTER
160   FE_
161 }
162
163 EXTFUN(stg_gc_enter_1_hponly)
164 {
165   FB_
166   Sp -= 1;
167   Sp[0] = R1.w;
168   R1.i = HeapOverflow;
169   SaveThreadState();
170   CurrentTSO->what_next = ThreadEnterGHC;
171   JMP_(StgReturn);
172   FE_
173 }
174
175 /*- 2 Regs--------------------------------------------------------------------*/
176
177 EXTFUN(stg_gc_enter_2)
178 {
179   FB_
180   Sp -= 2;
181   Sp[1] = R2.w;
182   Sp[0] = R1.w;
183   GC_ENTER;
184   FE_
185 }
186
187 /*- 3 Regs -------------------------------------------------------------------*/
188
189 EXTFUN(stg_gc_enter_3)
190 {
191   FB_
192   Sp -= 3;
193   Sp[2] = R3.w;
194   Sp[1] = R2.w;
195   Sp[0] = R1.w;
196   GC_ENTER;
197   FE_
198 }
199
200 /*- 4 Regs -------------------------------------------------------------------*/
201
202 EXTFUN(stg_gc_enter_4)
203 {
204   FB_
205   Sp -= 4;
206   Sp[3] = R4.w;
207   Sp[2] = R3.w;
208   Sp[1] = R2.w;
209   Sp[0] = R1.w;
210   GC_ENTER;
211   FE_
212 }
213
214 /*- 5 Regs -------------------------------------------------------------------*/
215
216 EXTFUN(stg_gc_enter_5)
217 {
218   FB_
219   Sp -= 5;
220   Sp[4] = R5.w;
221   Sp[3] = R4.w;
222   Sp[2] = R3.w;
223   Sp[1] = R2.w;
224   Sp[0] = R1.w;
225   GC_ENTER;
226   FE_
227 }
228
229 /*- 6 Regs -------------------------------------------------------------------*/
230
231 EXTFUN(stg_gc_enter_6)
232 {
233   FB_
234   Sp -= 6;
235   Sp[5] = R6.w;
236   Sp[4] = R5.w;
237   Sp[3] = R4.w;
238   Sp[2] = R3.w;
239   Sp[1] = R2.w;
240   Sp[0] = R1.w;
241   GC_ENTER;
242   FE_
243 }
244
245 /*- 7 Regs -------------------------------------------------------------------*/
246
247 EXTFUN(stg_gc_enter_7)
248 {
249   FB_
250   Sp -= 7;
251   Sp[6] = R7.w;
252   Sp[5] = R6.w;
253   Sp[4] = R5.w;
254   Sp[3] = R4.w;
255   Sp[2] = R3.w;
256   Sp[1] = R2.w;
257   Sp[0] = R1.w;
258   GC_ENTER;
259   FE_
260 }
261
262 /*- 8 Regs -------------------------------------------------------------------*/
263
264 EXTFUN(stg_gc_enter_8)
265 {
266   FB_
267   Sp -= 8;
268   Sp[7] = R8.w;
269   Sp[6] = R7.w;
270   Sp[5] = R6.w;
271   Sp[4] = R5.w;
272   Sp[3] = R4.w;
273   Sp[2] = R3.w;
274   Sp[1] = R2.w;
275   Sp[0] = R1.w;
276   GC_ENTER;
277   FE_
278 }
279
280 #if defined(GRAN)
281 /*
282   ToDo: merge the block and yield macros, calling something like BLOCK(N)
283         at the end;
284 */
285
286 /* 
287    Should we actually ever do a yield in such a case?? -- HWL
288 */
289 EXTFUN(gran_yield_0)
290 {
291   FB_
292   SaveThreadState();                                    
293   CurrentTSO->what_next = ThreadEnterGHC;               
294   R1.i = ThreadYielding;
295   JMP_(StgReturn);
296   FE_
297 }
298
299 EXTFUN(gran_yield_1)
300 {
301   FB_
302   Sp -= 1;
303   Sp[0] = R1.w;
304   SaveThreadState();                                    
305   CurrentTSO->what_next = ThreadEnterGHC;               
306   R1.i = ThreadYielding;
307   JMP_(StgReturn);
308   FE_
309 }
310
311 /*- 2 Regs--------------------------------------------------------------------*/
312
313 EXTFUN(gran_yield_2)
314 {
315   FB_
316   Sp -= 2;
317   Sp[1] = R2.w;
318   Sp[0] = R1.w;
319   SaveThreadState();                                    
320   CurrentTSO->what_next = ThreadEnterGHC;               
321   R1.i = ThreadYielding;
322   JMP_(StgReturn);
323   FE_
324 }
325
326 /*- 3 Regs -------------------------------------------------------------------*/
327
328 EXTFUN(gran_yield_3)
329 {
330   FB_
331   Sp -= 3;
332   Sp[2] = R3.w;
333   Sp[1] = R2.w;
334   Sp[0] = R1.w;
335   SaveThreadState();                                    
336   CurrentTSO->what_next = ThreadEnterGHC;               
337   R1.i = ThreadYielding;
338   JMP_(StgReturn);
339   FE_
340 }
341
342 /*- 4 Regs -------------------------------------------------------------------*/
343
344 EXTFUN(gran_yield_4)
345 {
346   FB_
347   Sp -= 4;
348   Sp[3] = R4.w;
349   Sp[2] = R3.w;
350   Sp[1] = R2.w;
351   Sp[0] = R1.w;
352   SaveThreadState();                                    
353   CurrentTSO->what_next = ThreadEnterGHC;               
354   R1.i = ThreadYielding;
355   JMP_(StgReturn);
356   FE_
357 }
358
359 /*- 5 Regs -------------------------------------------------------------------*/
360
361 EXTFUN(gran_yield_5)
362 {
363   FB_
364   Sp -= 5;
365   Sp[4] = R5.w;
366   Sp[3] = R4.w;
367   Sp[2] = R3.w;
368   Sp[1] = R2.w;
369   Sp[0] = R1.w;
370   SaveThreadState();                                    
371   CurrentTSO->what_next = ThreadEnterGHC;               
372   R1.i = ThreadYielding;
373   JMP_(StgReturn);
374   FE_
375 }
376
377 /*- 6 Regs -------------------------------------------------------------------*/
378
379 EXTFUN(gran_yield_6)
380 {
381   FB_
382   Sp -= 6;
383   Sp[5] = R6.w;
384   Sp[4] = R5.w;
385   Sp[3] = R4.w;
386   Sp[2] = R3.w;
387   Sp[1] = R2.w;
388   Sp[0] = R1.w;
389   SaveThreadState();                                    
390   CurrentTSO->what_next = ThreadEnterGHC;               
391   R1.i = ThreadYielding;
392   JMP_(StgReturn);
393   FE_
394 }
395
396 /*- 7 Regs -------------------------------------------------------------------*/
397
398 EXTFUN(gran_yield_7)
399 {
400   FB_
401   Sp -= 7;
402   Sp[6] = R7.w;
403   Sp[5] = R6.w;
404   Sp[4] = R5.w;
405   Sp[3] = R4.w;
406   Sp[2] = R3.w;
407   Sp[1] = R2.w;
408   Sp[0] = R1.w;
409   SaveThreadState();                                    
410   CurrentTSO->what_next = ThreadEnterGHC;               
411   R1.i = ThreadYielding;
412   JMP_(StgReturn);
413   FE_
414 }
415
416 /*- 8 Regs -------------------------------------------------------------------*/
417
418 EXTFUN(gran_yield_8)
419 {
420   FB_
421   Sp -= 8;
422   Sp[7] = R8.w;
423   Sp[6] = R7.w;
424   Sp[5] = R6.w;
425   Sp[4] = R5.w;
426   Sp[3] = R4.w;
427   Sp[2] = R3.w;
428   Sp[1] = R2.w;
429   Sp[0] = R1.w;
430   SaveThreadState();                                    
431   CurrentTSO->what_next = ThreadEnterGHC;               
432   R1.i = ThreadYielding;
433   JMP_(StgReturn);
434   FE_
435 }
436
437 // the same routines but with a block rather than a yield
438
439 EXTFUN(gran_block_1)
440 {
441   FB_
442   Sp -= 1;
443   Sp[0] = R1.w;
444   SaveThreadState();                                    
445   CurrentTSO->what_next = ThreadEnterGHC;               
446   R1.i = ThreadBlocked;
447   JMP_(StgReturn);
448   FE_
449 }
450
451 /*- 2 Regs--------------------------------------------------------------------*/
452
453 EXTFUN(gran_block_2)
454 {
455   FB_
456   Sp -= 2;
457   Sp[1] = R2.w;
458   Sp[0] = R1.w;
459   SaveThreadState();                                    
460   CurrentTSO->what_next = ThreadEnterGHC;               
461   R1.i = ThreadBlocked;
462   JMP_(StgReturn);
463   FE_
464 }
465
466 /*- 3 Regs -------------------------------------------------------------------*/
467
468 EXTFUN(gran_block_3)
469 {
470   FB_
471   Sp -= 3;
472   Sp[2] = R3.w;
473   Sp[1] = R2.w;
474   Sp[0] = R1.w;
475   SaveThreadState();                                    
476   CurrentTSO->what_next = ThreadEnterGHC;               
477   R1.i = ThreadBlocked;
478   JMP_(StgReturn);
479   FE_
480 }
481
482 /*- 4 Regs -------------------------------------------------------------------*/
483
484 EXTFUN(gran_block_4)
485 {
486   FB_
487   Sp -= 4;
488   Sp[3] = R4.w;
489   Sp[2] = R3.w;
490   Sp[1] = R2.w;
491   Sp[0] = R1.w;
492   SaveThreadState();                                    
493   CurrentTSO->what_next = ThreadEnterGHC;               
494   R1.i = ThreadBlocked;
495   JMP_(StgReturn);
496   FE_
497 }
498
499 /*- 5 Regs -------------------------------------------------------------------*/
500
501 EXTFUN(gran_block_5)
502 {
503   FB_
504   Sp -= 5;
505   Sp[4] = R5.w;
506   Sp[3] = R4.w;
507   Sp[2] = R3.w;
508   Sp[1] = R2.w;
509   Sp[0] = R1.w;
510   SaveThreadState();                                    
511   CurrentTSO->what_next = ThreadEnterGHC;               
512   R1.i = ThreadBlocked;
513   JMP_(StgReturn);
514   FE_
515 }
516
517 /*- 6 Regs -------------------------------------------------------------------*/
518
519 EXTFUN(gran_block_6)
520 {
521   FB_
522   Sp -= 6;
523   Sp[5] = R6.w;
524   Sp[4] = R5.w;
525   Sp[3] = R4.w;
526   Sp[2] = R3.w;
527   Sp[1] = R2.w;
528   Sp[0] = R1.w;
529   SaveThreadState();                                    
530   CurrentTSO->what_next = ThreadEnterGHC;               
531   R1.i = ThreadBlocked;
532   JMP_(StgReturn);
533   FE_
534 }
535
536 /*- 7 Regs -------------------------------------------------------------------*/
537
538 EXTFUN(gran_block_7)
539 {
540   FB_
541   Sp -= 7;
542   Sp[6] = R7.w;
543   Sp[5] = R6.w;
544   Sp[4] = R5.w;
545   Sp[3] = R4.w;
546   Sp[2] = R3.w;
547   Sp[1] = R2.w;
548   Sp[0] = R1.w;
549   SaveThreadState();                                    
550   CurrentTSO->what_next = ThreadEnterGHC;               
551   R1.i = ThreadBlocked;
552   JMP_(StgReturn);
553   FE_
554 }
555
556 /*- 8 Regs -------------------------------------------------------------------*/
557
558 EXTFUN(gran_block_8)
559 {
560   FB_
561   Sp -= 8;
562   Sp[7] = R8.w;
563   Sp[6] = R7.w;
564   Sp[5] = R6.w;
565   Sp[4] = R5.w;
566   Sp[3] = R4.w;
567   Sp[2] = R3.w;
568   Sp[1] = R2.w;
569   Sp[0] = R1.w;
570   SaveThreadState();                                    
571   CurrentTSO->what_next = ThreadEnterGHC;               
572   R1.i = ThreadBlocked;
573   JMP_(StgReturn);
574   FE_
575 }
576
577 #endif
578
579 #if 0 && defined(PAR)
580
581 /*
582   Similar to stg_block_1 (called via StgMacro BLOCK_NP) but separates the
583   saving of the thread state from the actual jump via an StgReturn.
584   We need this separation because we call RTS routines in blocking entry codes
585   before jumping back into the RTS (see parallel/FetchMe.hc).
586 */
587
588 EXTFUN(par_block_1_no_jump)
589 {
590   FB_
591   Sp -= 1;
592   Sp[0] = R1.w;
593   SaveThreadState();                                    
594   FE_
595 }
596
597 EXTFUN(par_jump)
598 {
599   FB_
600   CurrentTSO->what_next = ThreadEnterGHC;               
601   R1.i = ThreadBlocked;
602   JMP_(StgReturn);
603   FE_
604 }
605
606 #endif
607
608 /* -----------------------------------------------------------------------------
609    For a case expression on a polymorphic or function-typed object, if
610    the default branch (there can only be one branch) of the case fails
611    a heap-check, instead of using stg_gc_enter_1 as normal, we must
612    push a new SEQ frame on the stack, followed by the object returned.  
613
614    Otherwise, if the object is a function, it won't return to the
615    correct activation record on returning from garbage collection.  It will
616    assume it has some arguments and apply itself.
617    -------------------------------------------------------------------------- */
618
619 EXTFUN(stg_gc_seq_1)
620 {
621   FB_
622   Sp -= 1 + sizeofW(StgSeqFrame);
623   PUSH_SEQ_FRAME(Sp+1);
624   *Sp = R1.w;
625   GC_ENTER;
626   FE_
627 }
628
629 /* -----------------------------------------------------------------------------
630    Heap checks in Primitive case alternatives
631
632    A primitive case alternative is entered with a value either in 
633    R1, FloatReg1 or D1 depending on the return convention.  All the
634    cases are covered below.
635    -------------------------------------------------------------------------- */
636
637 /*-- No regsiters live (probably a void return) ----------------------------- */
638
639 /* If we change the policy for thread startup to *not* remove the
640  * return address from the stack, we can get rid of this little
641  * function/info table...  
642  */
643 INFO_TABLE_SRT_BITMAP(stg_gc_noregs_ret_info, stg_gc_noregs_ret, 0/*BITMAP*/, 
644                       0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/, 
645                       RET_SMALL,, EF_, 0, 0);
646
647 EXTFUN(stg_gc_noregs_ret)
648 {
649   FB_
650   JMP_(ENTRY_CODE(Sp[0]));
651   FE_
652 }
653
654 EXTFUN(stg_gc_noregs)
655 {
656   FB_
657   Sp -= 1;
658   Sp[0] = (W_)&stg_gc_noregs_ret_info;
659   GC_GENERIC
660   FE_
661 }
662
663 /*-- R1 is boxed/unpointed -------------------------------------------------- */
664
665 INFO_TABLE_SRT_BITMAP(stg_gc_unpt_r1_info, stg_gc_unpt_r1_entry, 0/*BITMAP*/, 
666                       0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/, 
667                       RET_SMALL,, EF_, 0, 0);
668
669 EXTFUN(stg_gc_unpt_r1_entry)
670 {
671   FB_
672   R1.w = Sp[0];
673   Sp += 1;
674   JMP_(ENTRY_CODE(Sp[0]));
675   FE_
676 }
677
678 EXTFUN(stg_gc_unpt_r1)
679 {
680   FB_
681   Sp -= 2;
682   Sp[1] = R1.w;
683   Sp[0] = (W_)&stg_gc_unpt_r1_info;
684   GC_GENERIC
685   FE_
686 }
687
688 /*-- R1 is unboxed -------------------------------------------------- */
689
690 INFO_TABLE_SRT_BITMAP(stg_gc_unbx_r1_info, stg_gc_unbx_r1_entry, 1/*BITMAP*/,
691                       0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/, 
692                       RET_SMALL,, EF_, 0, 0);
693 /* the 1 is a bitmap - i.e. 1 non-pointer word on the stack. */
694
695 EXTFUN(stg_gc_unbx_r1_entry)
696 {
697   FB_
698   R1.w = Sp[0];
699   Sp += 1;
700   JMP_(ENTRY_CODE(Sp[0]));
701   FE_
702 }
703
704 EXTFUN(stg_gc_unbx_r1)
705 {
706   FB_
707   Sp -= 2;
708   Sp[1] = R1.w;
709   Sp[0] = (W_)&stg_gc_unbx_r1_info;
710   GC_GENERIC
711   FE_
712 }
713
714 /*-- F1 contains a float ------------------------------------------------- */
715
716 INFO_TABLE_SRT_BITMAP(stg_gc_f1_info, stg_gc_f1_entry, 1/*BITMAP*/,
717                       0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/, 
718                       RET_SMALL,, EF_, 0, 0);
719
720 EXTFUN(stg_gc_f1_entry)
721 {
722   FB_
723   F1 = PK_FLT(Sp);
724   Sp += 1;
725   JMP_(ENTRY_CODE(Sp[0]));
726   FE_
727 }
728
729 EXTFUN(stg_gc_f1)
730 {
731   FB_
732   Sp -= 2;
733   ASSIGN_FLT(Sp+1, F1);
734   Sp[0] = (W_)&stg_gc_f1_info;
735   GC_GENERIC
736   FE_
737 }
738
739 /*-- D1 contains a double ------------------------------------------------- */
740
741 /* we support doubles of either 1 or 2 words in size */
742
743 #if SIZEOF_DOUBLE == SIZEOF_VOID_P
744 #  define DBL_BITMAP 1
745 #else
746 #  define DBL_BITMAP 3
747 #endif 
748
749 INFO_TABLE_SRT_BITMAP(stg_gc_d1_info, stg_gc_d1_entry, DBL_BITMAP,
750                       0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/, 
751                       RET_SMALL,, EF_, 0, 0);
752
753 EXTFUN(stg_gc_d1_entry)
754 {
755   FB_
756   D1 = PK_DBL(Sp);
757   Sp += sizeofW(StgDouble);
758   JMP_(ENTRY_CODE(Sp[0]));
759   FE_
760 }
761
762 EXTFUN(stg_gc_d1)
763 {
764   FB_
765   Sp -= 1 + sizeofW(StgDouble);
766   ASSIGN_DBL(Sp+1,D1);
767   Sp[0] = (W_)&stg_gc_d1_info;
768   GC_GENERIC
769   FE_
770 }
771
772 /* -----------------------------------------------------------------------------
773    Heap checks for unboxed tuple case alternatives
774
775    The story is: 
776
777       - for an unboxed tuple with n components, we rearrange the components
778         with pointers first followed by non-pointers. (NB: not done yet)
779  
780       - The first k components are allocated registers, where k is the
781         number of components that will fit in real registers.
782
783       - The rest are placed on the stack, with space left for tagging
784         of the non-pointer block if necessary.
785
786       - On failure of a heap check:
787                 - the tag is filled in if necessary,
788                 - we load Ri with the address of the continuation,
789                   where i is the lowest unused vanilla register.
790                 - jump to 'stg_gc_ut_x_y' where x is the number of pointer
791                   registers and y the number of non-pointers.
792                 - if the required canned sequence isn't available, it will
793                   have to be generated at compile-time by the code
794                   generator (this will probably happen if there are
795                   floating-point values, for instance).
796   
797    For now, just deal with R1, hence R2 contains the sequel address.
798    -------------------------------------------------------------------------- */
799
800 /*---- R1 contains a pointer: ------ */
801
802 INFO_TABLE_SRT_BITMAP(stg_gc_ut_1_0_info, stg_gc_ut_1_0_entry, 1/*BITMAP*/, 
803                       0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/, 
804                       RET_SMALL,, EF_, 0, 0);
805
806 EXTFUN(stg_gc_ut_1_0_entry)
807 {
808   FB_
809   R1.w = Sp[1];
810   Sp += 2;
811   JMP_(ENTRY_CODE(Sp[-2]));
812   FE_
813 }
814
815 EXTFUN(stg_gc_ut_1_0)
816 {
817   FB_
818   Sp -= 3;
819   Sp[2] = R1.w;
820   Sp[1] = R2.w;
821   Sp[0] = (W_)&stg_gc_ut_1_0_info;
822   GC_GENERIC
823   FE_
824 }
825
826 /*---- R1 contains a non-pointer: ------ */
827
828 INFO_TABLE_SRT_BITMAP(stg_gc_ut_0_1_info, stg_gc_ut_0_1_entry, 3/*BITMAP*/, 
829                       0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/, 
830                       RET_SMALL,, EF_, 0, 0);
831
832 EXTFUN(stg_gc_ut_0_1_entry)
833 {
834   FB_
835   R1.w = Sp[1];
836   Sp += 2;
837   JMP_(ENTRY_CODE(Sp[-2]));
838   FE_
839 }
840
841 EXTFUN(stg_gc_ut_0_1)
842 {
843   FB_
844   Sp -= 3;
845   Sp[0] = (W_)&stg_gc_ut_0_1_info;
846   Sp[1] = R2.w;
847   Sp[2] = R1.w;
848   GC_GENERIC
849   FE_
850 }
851
852 /* -----------------------------------------------------------------------------
853    Standard top-level fast-entry heap checks.
854
855    - we want to make the stack look like it should at the slow entry
856      point for the function.  That way we can just push the slow
857      entry point on the stack and return using ThreadRunGHC.
858
859    - The compiler will generate code to fill in any tags on the stack,
860      in case we arrived directly at the fast entry point and these tags
861      aren't present.
862
863    - The rest is hopefully handled by jumping to a canned sequence.
864      We currently have canned sequences for 0-8 pointer registers.  If
865      any registers contain non-pointers, we must reduce to an all-pointers
866      situation by pushing as many registers on the stack as necessary.
867
868      eg. if R1, R2 contain pointers and R3 contains a word, the heap check
869          failure sequence looks like this:
870
871                 Sp[-1] = R3.w;
872                 Sp[-2] = WORD_TAG;
873                 Sp -= 2;
874                 JMP_(stg_chk_2)
875
876           after pushing R3, we have pointers in R1 and R2 which corresponds
877           to the 2-pointer canned sequence.
878
879   -------------------------------------------------------------------------- */
880
881 /*- 0 Regs -------------------------------------------------------------------*/
882
883 EXTFUN(stg_chk_0)
884 {
885   FB_
886   Sp -= 1;
887   Sp[0] = R1.w;
888   GC_GENERIC;
889   FE_
890 }
891
892 /*- 1 Reg --------------------------------------------------------------------*/
893
894 EXTFUN(stg_chk_1)
895 {
896   FB_
897   Sp -= 2;
898   Sp[1] = R1.w;
899   Sp[0] = R2.w;
900   GC_GENERIC;
901   FE_
902 }
903
904 /*- 1 Reg (non-ptr) ----------------------------------------------------------*/
905
906 EXTFUN(stg_chk_1n)
907 {
908   FB_
909   Sp -= 3;
910   Sp[2] = R1.w;
911   Sp[1] = WORD_TAG; /* ToDo: or maybe its an int? */
912   Sp[0] = R2.w;
913   GC_GENERIC;
914   FE_
915 }
916
917 /*- 2 Regs--------------------------------------------------------------------*/
918
919 EXTFUN(stg_chk_2)
920 {
921   FB_
922   Sp -= 3;
923   Sp[2] = R2.w;
924   Sp[1] = R1.w;
925   Sp[0] = R3.w;
926   GC_GENERIC;
927   FE_
928 }
929
930 /*- 3 Regs -------------------------------------------------------------------*/
931
932 EXTFUN(stg_chk_3)
933 {
934   FB_
935   Sp -= 4;
936   Sp[3] = R3.w;
937   Sp[2] = R2.w;
938   Sp[1] = R1.w;
939   Sp[0] = R4.w;
940   GC_GENERIC;
941   FE_
942 }
943
944 /*- 4 Regs -------------------------------------------------------------------*/
945
946 EXTFUN(stg_chk_4)
947 {
948   FB_
949   Sp -= 5;
950   Sp[4] = R4.w;
951   Sp[3] = R3.w;
952   Sp[2] = R2.w;
953   Sp[1] = R1.w;
954   Sp[0] = R5.w;
955   GC_GENERIC;
956   FE_
957 }
958
959 /*- 5 Regs -------------------------------------------------------------------*/
960
961 EXTFUN(stg_chk_5)
962 {
963   FB_
964   Sp -= 6;
965   Sp[5] = R5.w;
966   Sp[4] = R4.w;
967   Sp[3] = R3.w;
968   Sp[2] = R2.w;
969   Sp[1] = R1.w;
970   Sp[0] = R6.w;
971   GC_GENERIC;
972   FE_
973 }
974
975 /*- 6 Regs -------------------------------------------------------------------*/
976
977 EXTFUN(stg_chk_6)
978 {
979   FB_
980   Sp -= 7;
981   Sp[6] = R6.w;
982   Sp[5] = R5.w;
983   Sp[4] = R4.w;
984   Sp[3] = R3.w;
985   Sp[2] = R2.w;
986   Sp[1] = R1.w;
987   Sp[0] = R7.w;
988   GC_GENERIC;
989   FE_
990 }
991
992 /*- 7 Regs -------------------------------------------------------------------*/
993
994 EXTFUN(stg_chk_7)
995 {
996   FB_
997   Sp -= 8;
998   Sp[7] = R7.w;
999   Sp[6] = R6.w;
1000   Sp[5] = R5.w;
1001   Sp[4] = R4.w;
1002   Sp[3] = R3.w;
1003   Sp[2] = R2.w;
1004   Sp[1] = R1.w;
1005   Sp[0] = R8.w;
1006   GC_GENERIC;
1007   FE_
1008 }
1009
1010 /*- 8 Regs -------------------------------------------------------------------*/
1011
1012 EXTFUN(stg_chk_8)
1013 {
1014   FB_
1015   Sp -= 9;
1016   Sp[8] = R8.w;
1017   Sp[7] = R7.w;
1018   Sp[6] = R6.w;
1019   Sp[5] = R5.w;
1020   Sp[4] = R4.w;
1021   Sp[3] = R3.w;
1022   Sp[2] = R2.w;
1023   Sp[1] = R1.w;
1024   Sp[0] = R9.w;
1025   GC_GENERIC;
1026   FE_
1027 }
1028
1029 /* -----------------------------------------------------------------------------
1030    Generic Heap Check Code.
1031
1032    Called with Liveness mask in R9,  Return address in R10.
1033    Stack must be consistent (tagged, and containing all necessary info pointers
1034    to relevant SRTs).
1035
1036    We also define an stg_gen_yield here, because it's very similar.
1037    -------------------------------------------------------------------------- */
1038
1039 #if SIZEOF_DOUBLE > SIZEOF_VOID_P
1040
1041 #define RESTORE_EVERYTHING                      \
1042     D2   = PK_DBL(Sp+16);                       \
1043     D1   = PK_DBL(Sp+14);                       \
1044     F4   = PK_FLT(Sp+13);                       \
1045     F3   = PK_FLT(Sp+12);                       \
1046     F2   = PK_FLT(Sp+11);                       \
1047     F1   = PK_FLT(Sp+10);                       \
1048     R8.w = Sp[9];                               \
1049     R7.w = Sp[8];                               \
1050     R6.w = Sp[7];                               \
1051     R5.w = Sp[6];                               \
1052     R4.w = Sp[5];                               \
1053     R3.w = Sp[4];                               \
1054     R2.w = Sp[3];                               \
1055     R1.w = Sp[2];                               \
1056     Sp += 18;
1057
1058 #define RET_OFFSET (-17)
1059
1060 #define SAVE_EVERYTHING                         \
1061     ASSIGN_DBL(Sp-2,D2);                        \
1062     ASSIGN_DBL(Sp-4,D1);                        \
1063     ASSIGN_FLT(Sp-5,F4);                        \
1064     ASSIGN_FLT(Sp-6,F3);                        \
1065     ASSIGN_FLT(Sp-7,F2);                        \
1066     ASSIGN_FLT(Sp-8,F1);                        \
1067     Sp[-9]  = R8.w;                             \
1068     Sp[-10] = R7.w;                             \
1069     Sp[-11] = R6.w;                             \
1070     Sp[-12] = R5.w;                             \
1071     Sp[-13] = R4.w;                             \
1072     Sp[-14] = R3.w;                             \
1073     Sp[-15] = R2.w;                             \
1074     Sp[-16] = R1.w;                             \
1075     Sp[-17] = R10.w;    /* return address */    \
1076     Sp[-18] = R9.w;     /* liveness mask  */    \
1077     Sp[-19] = (W_)&stg_gen_chk_info;            \
1078     Sp -= 19;
1079
1080 #else
1081
1082 #define RESTORE_EVERYTHING                      \
1083     D2   = PK_DBL(Sp+15);                       \
1084     D1   = PK_DBL(Sp+14);                       \
1085     F4   = PK_FLT(Sp+13);                       \
1086     F3   = PK_FLT(Sp+12);                       \
1087     F2   = PK_FLT(Sp+11);                       \
1088     F1   = PK_FLT(Sp+10);                       \
1089     R8.w = Sp[9];                               \
1090     R7.w = Sp[8];                               \
1091     R6.w = Sp[7];                               \
1092     R5.w = Sp[6];                               \
1093     R4.w = Sp[5];                               \
1094     R3.w = Sp[4];                               \
1095     R2.w = Sp[3];                               \
1096     R1.w = Sp[2];                               \
1097     Sp += 16;
1098
1099 #define RET_OFFSET (-15)
1100
1101 #define SAVE_EVERYTHING                         \
1102     ASSIGN_DBL(Sp-1,D2);                        \
1103     ASSIGN_DBL(Sp-2,D1);                        \
1104     ASSIGN_FLT(Sp-3,F4);                        \
1105     ASSIGN_FLT(Sp-4,F3);                        \
1106     ASSIGN_FLT(Sp-5,F2);                        \
1107     ASSIGN_FLT(Sp-6,F1);                        \
1108     Sp[-7]  = R8.w;                             \
1109     Sp[-8]  = R7.w;                             \
1110     Sp[-9]  = R6.w;                             \
1111     Sp[-10] = R5.w;                             \
1112     Sp[-11] = R4.w;                             \
1113     Sp[-12] = R3.w;                             \
1114     Sp[-13] = R2.w;                             \
1115     Sp[-14] = R1.w;                             \
1116     Sp[-15] = R10.w;    /* return address */    \
1117     Sp[-16] = R9.w;     /* liveness mask  */    \
1118     Sp[-17] = (W_)&stg_gen_chk_info;            \
1119     Sp -= 17;
1120
1121 #endif
1122
1123 INFO_TABLE_SRT_BITMAP(stg_gen_chk_info, stg_gen_chk_ret, 0,
1124                       0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/, 
1125                       RET_DYN,, EF_, 0, 0);
1126
1127 /* bitmap in the above info table is unused, the real one is on the stack. 
1128  */
1129
1130 FN_(stg_gen_chk_ret)
1131 {
1132   FB_
1133   RESTORE_EVERYTHING;
1134   JMP_(Sp[RET_OFFSET]); /* NO ENTRY_CODE() - this is a direct ret address */
1135   FE_
1136 }
1137
1138 FN_(stg_gen_chk)
1139 {
1140   FB_
1141   SAVE_EVERYTHING;
1142   GC_GENERIC
1143   FE_
1144 }         
1145
1146 /*
1147  * stg_gen_hp is used by MAYBE_GC, where we can't use GC_GENERIC
1148  * because we've just failed doYouWantToGC(), not a standard heap
1149  * check.  GC_GENERIC would end up returning StackOverflow.
1150  */
1151 FN_(stg_gen_hp)
1152 {
1153   FB_
1154   SAVE_EVERYTHING;
1155   HP_GENERIC
1156   FE_
1157 }         
1158
1159 /* -----------------------------------------------------------------------------
1160    Yields
1161    -------------------------------------------------------------------------- */
1162
1163 FN_(stg_gen_yield)
1164 {
1165   FB_
1166   SAVE_EVERYTHING;
1167   YIELD_GENERIC
1168   FE_
1169 }
1170
1171 FN_(stg_yield_noregs)
1172 {
1173   FB_
1174   Sp--;
1175   Sp[0] = (W_)&stg_gc_noregs_ret_info;
1176   YIELD_GENERIC;
1177   FE_
1178 }
1179
1180 FN_(stg_yield_to_interpreter)
1181 {
1182   FB_
1183   /* No need to save everything - no live registers */
1184   YIELD_TO_INTERPRETER
1185   FE_
1186 }
1187
1188 /* -----------------------------------------------------------------------------
1189    Blocks
1190    -------------------------------------------------------------------------- */
1191
1192 FN_(stg_gen_block)
1193 {
1194   FB_
1195   SAVE_EVERYTHING;
1196   BLOCK_GENERIC
1197   FE_
1198 }
1199
1200 FN_(stg_block_noregs)
1201 {
1202   FB_
1203   Sp--;
1204   Sp[0] = (W_)&stg_gc_noregs_ret_info;
1205   BLOCK_GENERIC;
1206   FE_
1207 }
1208
1209 FN_(stg_block_1)
1210 {
1211   FB_
1212   Sp--;
1213   Sp[0] = R1.w;
1214   BLOCK_ENTER;
1215   FE_
1216 }