1 /* -----------------------------------------------------------------------------
2 * $Id: HeapStackCheck.hc,v 1.2 1998/12/02 13:28:26 simonm Exp $
4 * Canned Heap-Check and Stack-Check sequences.
6 * ---------------------------------------------------------------------------*/
9 #include "Storage.h" /* for CurrentTSO */
10 #include "StgRun.h" /* for StgReturn and register saving */
11 #include "Schedule.h" /* for context_switch */
12 #include "HeapStackCheck.h"
14 /* Stack/Heap Check Failure
15 * ------------------------
17 * On discovering that a stack or heap check has failed, we do the following:
19 * - If the context_switch flag is set, indicating that there are more
20 * threads waiting to run, we yield to the scheduler
21 * (return ThreadYeilding).
23 * - If Hp > HpLim, we've had a heap check failure. This means we've
24 * come to the end of the current heap block, so we try to chain
25 * another block on with ExtendNursery().
27 * - If this succeeds, we carry on without returning to the
30 * - If it fails, we return to the scheduler claiming HeapOverflow
31 * so that a garbage collection can be performed.
33 * - If Hp <= HpLim, it must have been a stack check that failed. In
34 * which case, we return to the scheduler claiming StackOverflow, the
35 * scheduler will either increase the size of our stack, or flag
36 * an error if the stack is already too big.
38 * The effect of checking for context switch only in the heap/stack check
39 * failure code is that we'll switch threads after the current thread has
40 * reached the end of its heap block. If a thread isn't allocating
41 * at all, it won't yield. Hopefully this won't be a problem in practice.
44 /* Remember that the return address is *removed* when returning to a
45 * ThreadRunGHC thread.
51 if (ExtendNursery(Hp,HpLim)) { \
52 if (context_switch) { \
53 R1.i = ThreadYielding; \
56 JMP_(ENTRY_CODE(Sp[-1])); \
59 R1.i = HeapOverflow; \
62 R1.i = StackOverflow; \
65 CurrentTSO->whatNext = ThreadRunGHC; \
70 if (ExtendNursery(Hp,HpLim)) { \
71 if (context_switch) { \
72 R1.i = ThreadYielding; \
76 JMP_(ENTRY_CODE(*R1.p)); \
79 R1.i = HeapOverflow; \
82 R1.i = StackOverflow; \
85 CurrentTSO->whatNext = ThreadEnterGHC; \
90 CurrentTSO->whatNext = ThreadRunGHC; \
91 R1.i = HeapOverflow; \
96 CurrentTSO->whatNext = ThreadRunGHC; \
97 R1.i = StackOverflow; \
100 #define YIELD_GENERIC \
102 CurrentTSO->whatNext = ThreadRunGHC; \
103 R1.i = ThreadYielding; \
106 #define YIELD_TO_HUGS \
108 CurrentTSO->whatNext = ThreadEnterHugs; \
109 R1.i = ThreadYielding; \
112 #define BLOCK_GENERIC \
114 CurrentTSO->whatNext = ThreadRunGHC; \
115 R1.i = ThreadBlocked; \
118 #define BLOCK_ENTER \
120 CurrentTSO->whatNext = ThreadEnterGHC;\
121 R1.i = ThreadBlocked; \
124 /* -----------------------------------------------------------------------------
126 -------------------------------------------------------------------------- */
129 * This one is used when we want to *enter* the top thing on the stack
130 * when we return, instead of the just returning to an address. See
131 * UpdatePAP for an example.
134 EXTFUN(stg_gc_entertop)
141 /* -----------------------------------------------------------------------------
142 Heap checks in non-top-level thunks/functions.
144 In these cases, node always points to the function closure. This gives
145 us an easy way to return to the function: just leave R1 on the top of
146 the stack, and have the scheduler enter it to return.
148 There are canned sequences for 'n' pointer values in registers.
149 -------------------------------------------------------------------------- */
151 EXTFUN(stg_gc_enter_1)
160 /*- 2 Regs--------------------------------------------------------------------*/
162 EXTFUN(stg_gc_enter_2)
172 /*- 3 Regs -------------------------------------------------------------------*/
174 EXTFUN(stg_gc_enter_3)
185 /*- 4 Regs -------------------------------------------------------------------*/
187 EXTFUN(stg_gc_enter_4)
199 /*- 5 Regs -------------------------------------------------------------------*/
201 EXTFUN(stg_gc_enter_5)
214 /*- 6 Regs -------------------------------------------------------------------*/
216 EXTFUN(stg_gc_enter_6)
230 /*- 7 Regs -------------------------------------------------------------------*/
232 EXTFUN(stg_gc_enter_7)
247 /*- 8 Regs -------------------------------------------------------------------*/
249 EXTFUN(stg_gc_enter_8)
265 /* -----------------------------------------------------------------------------
266 For a case expression on a polymorphic or function-typed object, if
267 the default branch (there can only be one branch) of the case fails
268 a heap-check, instead of using stg_gc_enter_1 as normal, we must
269 push a new SEQ frame on the stack, followed by the object returned.
271 Otherwise, if the object is a function, it won't return to the
272 correct activation record on returning from garbage collection. It will
273 assume it has some arguments and apply itself.
274 -------------------------------------------------------------------------- */
279 Sp -= 1 + sizeofW(StgSeqFrame);
280 PUSH_SEQ_FRAME(Sp+1);
286 /* -----------------------------------------------------------------------------
287 Heap checks in Primitive case alternatives
289 A primitive case alternative is entered with a value either in
290 R1, FloatReg1 or D1 depending on the return convention. All the
291 cases are covered below.
292 -------------------------------------------------------------------------- */
294 /*-- No regsiters live, return address already on the stack: ---------------- */
296 EXTFUN(stg_gc_noregs)
303 /*-- R1 is boxed/unpointed -------------------------------------------------- */
305 INFO_TABLE_SRT_BITMAP(stg_gc_unpt_r1_info, stg_gc_unpt_r1_entry, 0/*BITMAP*/,
306 0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/,
307 RET_SMALL, const, EF_, 0, 0);
309 EXTFUN(stg_gc_unpt_r1_entry)
314 JMP_(ENTRY_CODE(Sp[0]));
318 EXTFUN(stg_gc_unpt_r1)
323 Sp[0] = (W_)&stg_gc_unpt_r1_info;
328 /*-- R1 is unboxed -------------------------------------------------- */
330 INFO_TABLE_SRT_BITMAP(stg_gc_unbx_r1_info, stg_gc_unbx_r1_entry, 1/*BITMAP*/,
331 0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/,
332 RET_SMALL, const, EF_, 0, 0);
333 /* the 1 is a bitmap - i.e. 1 non-pointer word on the stack. */
335 EXTFUN(stg_gc_unbx_r1_entry)
340 JMP_(ENTRY_CODE(Sp[0]));
344 EXTFUN(stg_gc_unbx_r1)
349 Sp[0] = (W_)&stg_gc_unbx_r1_info;
354 /*-- F1 contains a float ------------------------------------------------- */
356 INFO_TABLE_SRT_BITMAP(stg_gc_f1_info, stg_gc_f1_entry, 1/*BITMAP*/,
357 0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/,
358 RET_SMALL, const, EF_, 0, 0);
360 EXTFUN(stg_gc_f1_entry)
365 JMP_(ENTRY_CODE(Sp[0]));
373 ASSIGN_FLT(Sp+1, F1);
374 Sp[0] = (W_)&stg_gc_f1_info;
379 /*-- D1 contains a double ------------------------------------------------- */
381 /* we support doubles of either 1 or 2 words in size */
383 #if SIZEOF_DOUBLE == SIZEOF_VOID_P
384 # define DBL_BITMAP 1
386 # define DBL_BITMAP 3
389 INFO_TABLE_SRT_BITMAP(stg_gc_d1_info, stg_gc_d1_entry, DBL_BITMAP,
390 0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/,
391 RET_SMALL, const, EF_, 0, 0);
393 EXTFUN(stg_gc_d1_entry)
397 Sp += sizeofW(StgDouble);
398 JMP_(ENTRY_CODE(Sp[0]));
405 Sp -= 1 + sizeofW(StgDouble);
407 Sp[0] = (W_)&stg_gc_d1_info;
412 /* -----------------------------------------------------------------------------
413 Heap checks for unboxed tuple case alternatives
417 - for an unboxed tuple with n components, we rearrange the components
418 with pointers first followed by non-pointers. (NB: not done yet)
420 - The first k components are allocated registers, where k is the
421 number of components that will fit in real registers.
423 - The rest are placed on the stack, with space left for tagging
424 of the non-pointer block if necessary.
426 - On failure of a heap check:
427 - the tag is filled in if necessary,
428 - we load Ri with the address of the continuation,
429 where i is the lowest unused vanilla register.
430 - jump to 'stg_gc_ut_x_y' where x is the number of pointer
431 registers and y the number of non-pointers.
432 - if the required canned sequence isn't available, it will
433 have to be generated at compile-time by the code
434 generator (this will probably happen if there are
435 floating-point values, for instance).
437 For now, just deal with R1, hence R2 contains the sequel address.
438 -------------------------------------------------------------------------- */
440 /*---- R1 contains a pointer: ------ */
442 INFO_TABLE_SRT_BITMAP(stg_gc_ut_1_0_info, stg_gc_ut_1_0_entry, 1/*BITMAP*/,
443 0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/,
444 RET_SMALL, const, EF_, 0, 0);
446 EXTFUN(stg_gc_ut_1_0_entry)
455 EXTFUN(stg_gc_ut_1_0)
461 Sp[0] = (W_)&stg_gc_ut_1_0_info;
466 /*---- R1 contains a non-pointer: ------ */
468 INFO_TABLE_SRT_BITMAP(stg_gc_ut_0_1_info, stg_gc_ut_0_1_entry, 3/*BITMAP*/,
469 0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/,
470 RET_SMALL, const, EF_, 0, 0);
472 EXTFUN(stg_gc_ut_0_1_entry)
481 EXTFUN(stg_gc_ut_0_1)
485 Sp[0] = (W_)&stg_gc_ut_0_1_info;
492 /* -----------------------------------------------------------------------------
493 Standard top-level fast-entry heap checks.
495 - we want to make the stack look like it should at the slow entry
496 point for the function. That way we can just push the slow
497 entry point on the stack and return using ThreadRunGHC.
499 - The compiler will generate code to fill in any tags on the stack,
500 in case we arrived directly at the fast entry point and these tags
503 - The rest is hopefully handled by jumping to a canned sequence.
504 We currently have canned sequences for 0-8 pointer registers. If
505 any registers contain non-pointers, we must reduce to an all-pointers
506 situation by pushing as many registers on the stack as necessary.
508 eg. if R1, R2 contain pointers and R3 contains a word, the heap check
509 failure sequence looks like this:
516 after pushing R3, we have pointers in R1 and R2 which corresponds
517 to the 2-pointer canned sequence.
519 -------------------------------------------------------------------------- */
521 /*- 0 Regs -------------------------------------------------------------------*/
532 /*- 1 Reg --------------------------------------------------------------------*/
544 /*- 1 Reg (non-ptr) ----------------------------------------------------------*/
551 Sp[1] = WORD_TAG; /* ToDo: or maybe its an int? */
557 /*- 2 Regs--------------------------------------------------------------------*/
570 /*- 3 Regs -------------------------------------------------------------------*/
584 /*- 4 Regs -------------------------------------------------------------------*/
599 /*- 5 Regs -------------------------------------------------------------------*/
615 /*- 6 Regs -------------------------------------------------------------------*/
632 /*- 7 Regs -------------------------------------------------------------------*/
650 /*- 8 Regs -------------------------------------------------------------------*/
669 /* -----------------------------------------------------------------------------
670 Generic Heap Check Code.
672 Called with Liveness mask in R9, Return address in R10.
673 Stack must be consistent (tagged, and containing all necessary info pointers
676 We also define an stg_gen_yield here, because it's very similar.
677 -------------------------------------------------------------------------- */
679 #if SIZEOF_DOUBLE > SIZEOF_VOID_P
681 #define RESTORE_EVERYTHING \
682 D2 = PK_DBL(Sp+16); \
683 D1 = PK_DBL(Sp+14); \
684 F4 = PK_FLT(Sp+13); \
685 F3 = PK_FLT(Sp+12); \
686 F2 = PK_FLT(Sp+11); \
687 F1 = PK_FLT(Sp+10); \
698 #define RET_OFFSET (-17)
700 #define SAVE_EVERYTHING \
701 ASSIGN_DBL(Sp-2,D2); \
702 ASSIGN_DBL(Sp-4,D1); \
703 ASSIGN_FLT(Sp-5,F4); \
704 ASSIGN_FLT(Sp-6,F3); \
705 ASSIGN_FLT(Sp-7,F2); \
706 ASSIGN_FLT(Sp-8,F1); \
715 Sp[-17] = R10.w; /* return address */ \
716 Sp[-18] = R9.w; /* liveness mask */ \
717 Sp[-19] = (W_)&stg_gen_chk_info; \
722 #define RESTORE_EVERYTHING \
723 D2 = PK_DBL(Sp+15); \
724 D1 = PK_DBL(Sp+14); \
725 F4 = PK_FLT(Sp+13); \
726 F3 = PK_FLT(Sp+12); \
727 F2 = PK_FLT(Sp+11); \
728 F1 = PK_FLT(Sp+10); \
739 #define RET_OFFSET (-15)
741 #define SAVE_EVERYTHING \
742 ASSIGN_DBL(Sp-1,D2); \
743 ASSIGN_DBL(Sp-2,D1); \
744 ASSIGN_FLT(Sp-3,F4); \
745 ASSIGN_FLT(Sp-4,F3); \
746 ASSIGN_FLT(Sp-5,F2); \
747 ASSIGN_FLT(Sp-6,F1); \
756 Sp[-15] = R10.w; /* return address */ \
757 Sp[-16] = R9.w; /* liveness mask */ \
758 Sp[-17] = (W_)&stg_gen_chk_info; \
763 INFO_TABLE_SRT_BITMAP(stg_gen_chk_info, stg_gen_chk_ret, 0,
764 0/*SRT*/, 0/*SRT_OFF*/, 0/*SRT_LEN*/,
765 RET_DYN, const, EF_, 0, 0);
767 /* bitmap in the above info table is unused, the real one is on the stack.
774 JMP_(Sp[RET_OFFSET]);
787 * stg_gen_hp is used by MAYBE_GC, where we can't use GC_GENERIC
788 * because we've just failed doYouWantToGC(), not a standard heap
789 * check. GC_GENERIC would end up returning StackOverflow.
807 FN_(stg_yield_to_Hugs)
810 /* No need to save everything - no live registers */