1 %****************************************************************************
3 The files SMevac.lc and SMscav.lhc contain the basic routines required
4 for two-space copying garbage collection.
6 Two files are required as the evac routines are conventional call/return
7 routines while the scavenge routines are continuation routines.
9 This file SMevac.lc contains the evacuation routines ...
11 See SMscav.lhc for calling convention documentation.
13 %****************************************************************************
17 #include "SMinternal.h"
19 #if defined(_INFO_COPYING)
21 /* Moves ToHp to point at the info pointer of the new to-space closure */
22 #define START_ALLOC(size) ToHp += 1
24 /* Moves ToHp to point to the last word allocated in to-space */
25 #define FINISH_ALLOC(size) ToHp += (FIXED_HS-1) + (size)
28 /* Copy the ith word (starting at 0) */
29 #define COPY_WORD(position) ToHp[position] = evac[position]
31 /* Copy the ith ptr (starting at 0), adjusting by offset */
32 #define ADJUST_WORD(pos,off) ((PP_)ToHp)[pos] = ((PP_)evac)[pos] + (off)
34 /* Copy the nth free var word in a SPEC closure (starting at 1) */
35 #define SPEC_COPY_FREE_VAR(n) COPY_WORD((SPEC_HS-1) + (n))
38 #define COPY_FIXED_HDR COPY_WORD(0)
41 #define COPY_FIXED_HDR COPY_WORD(0);COPY_WORD(1)
44 #define COPY_FIXED_HDR COPY_WORD(0);COPY_WORD(1);COPY_WORD(2)
46 /* I don't think this will be needed (ToDo: #error?) */
47 #endif /* FIXED_HS != 1, 2, or 3 */
52 /*** DEBUGGING MACROS ***/
56 #define DEBUG_EVAC(sizevar) \
57 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
58 fprintf(stderr, "Evac: 0x%lx -> 0x%lx, info 0x%lx, size %ld\n", \
59 evac, ToHp, INFO_PTR(evac), sizevar)
61 #define DEBUG_EVAC_DYN \
62 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
63 fprintf(stderr, "Evac: 0x%lx -> 0x%lx, Dyn info 0x%lx, size %lu\n", \
64 evac, ToHp, INFO_PTR(evac), DYN_CLOSURE_SIZE(evac))
66 #define DEBUG_EVAC_TUPLE \
67 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
68 fprintf(stderr, "Evac: 0x%lx -> 0x%lx, Tuple info 0x%lx, size %lu\n", \
69 evac, ToHp, INFO_PTR(evac), TUPLE_CLOSURE_SIZE(evac))
71 #define DEBUG_EVAC_MUTUPLE \
72 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
73 fprintf(stderr, "Evac: 0x%lx -> 0x%lx, MuTuple info 0x%lx, size %lu\n", \
74 evac, ToHp, INFO_PTR(evac), MUTUPLE_CLOSURE_SIZE(evac))
76 #define DEBUG_EVAC_DATA \
77 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
78 fprintf(stderr, "Evac: 0x%lx -> 0x%lx, Data info 0x%lx, size %lu\n", \
79 evac, ToHp, INFO_PTR(evac), DATA_CLOSURE_SIZE(evac))
81 #define DEBUG_EVAC_BH(sizevar) \
82 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
83 fprintf(stderr, "Evac: 0x%lx -> 0x%lx, BH info 0x%lx, size %ld\n", \
84 evac, ToHp, INFO_PTR(evac), sizevar)
86 #define DEBUG_EVAC_FORWARD \
87 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
88 fprintf(stderr, "Evac: Forward 0x%lx -> 0x%lx, info 0x%lx\n", \
89 evac, FORWARD_ADDRESS(evac), INFO_PTR(evac))
91 #define DEBUG_EVAC_IND1 \
92 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
93 fprintf(stderr, "Evac: Indirection 0x%lx -> Evac(0x%lx), info 0x%lx\n", \
94 evac, IND_CLOSURE_PTR(evac), INFO_PTR(evac))
96 #define DEBUG_EVAC_IND2 \
97 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
98 fprintf(stderr, "Evac: Indirection Done -> 0x%lx\n", evac)
100 #define DEBUG_EVAC_PERM_IND \
101 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
102 fprintf(stderr, "Evac: Permanent Indirection 0x%lx -> Evac(0x%lx), info 0x%lx\n", \
103 evac, IND_CLOSURE_PTR(evac), INFO_PTR(evac))
105 #define DEBUG_EVAC_CAF_EVAC1 \
106 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
107 fprintf(stderr, "Evac: Caf 0x%lx -> Evac(0x%lx), info 0x%lx\n", \
108 evac, IND_CLOSURE_PTR(evac), INFO_PTR(evac))
110 #define DEBUG_EVAC_CAF_EVAC2 \
111 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
112 fprintf(stderr, "Evac: Caf Done -> 0x%lx\n", evac)
114 #define DEBUG_EVAC_CAF_RET \
115 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
116 fprintf(stderr, "Evac: Caf 0x%lx -> 0x%lx, info 0x%lx\n", \
117 evac, IND_CLOSURE_PTR(evac), INFO_PTR(evac))
119 #define DEBUG_EVAC_STAT \
120 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
121 fprintf(stderr, "Evac: Static 0x%lx -> 0x%lx, info 0x%lx\n", \
122 evac, evac, INFO_PTR(evac))
124 #define DEBUG_EVAC_CONST \
125 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
126 fprintf(stderr, "Evac: Const 0x%lx -> 0x%lx, info 0x%lx\n", \
127 evac, CONST_STATIC_CLOSURE(INFO_PTR(evac)), INFO_PTR(evac))
129 #define DEBUG_EVAC_CHARLIKE \
130 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
131 fprintf(stderr, "Evac: CharLike (%lx) 0x%lx -> 0x%lx, info 0x%lx\n", \
132 evac, CHARLIKE_VALUE(evac), CHARLIKE_CLOSURE(CHARLIKE_VALUE(evac)), INFO_PTR(evac))
134 #define DEBUG_EVAC_INTLIKE_TO_STATIC \
135 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
136 fprintf(stderr, "Evac: IntLike to Static (%ld) 0x%lx -> 0x%lx, info 0x%lx\n", \
137 INTLIKE_VALUE(evac), evac, INTLIKE_CLOSURE(INTLIKE_VALUE(evac)), INFO_PTR(evac))
139 #define DEBUG_EVAC_TO_OLD \
140 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
141 fprintf(stderr, "Old ")
143 #define DEBUG_EVAC_TO_NEW \
144 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
145 fprintf(stderr, "New ")
147 #define DEBUG_EVAC_OLD_TO_NEW(oldind, evac, new) \
148 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
149 fprintf(stderr, " OldRoot: 0x%lx -> Old 0x%lx (-> New 0x%lx)\n", \
150 evac, oldind, newevac)
152 #define DEBUG_EVAC_OLDROOT_FORWARD \
153 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) { \
154 fprintf(stderr, "Evac: OldRoot Forward 0x%lx -> Old 0x%lx ", evac, FORWARD_ADDRESS(evac)); \
155 if (! InOldGen(Scav)) fprintf(stderr, "-> New 0x%lx ", IND_CLOSURE_PTR(FORWARD_ADDRESS(evac))); \
156 fprintf(stderr, "info 0x%lx\n", INFO_PTR(evac)); \
160 #define DEBUG_EVAC_BQ \
161 if (RTSflags.GcFlags.trace & DEBUG_TRACE_CONCURRENT) \
162 fprintf(stderr, "Evac: 0x%lx -> 0x%lx, BQ info 0x%lx, size %lu\n", \
163 evac, ToHp, INFO_PTR(evac), BQ_CLOSURE_SIZE(evac))
165 #define DEBUG_EVAC_TSO(size) \
166 if (RTSflags.GcFlags.trace & DEBUG_TRACE_CONCURRENT) \
167 fprintf(stderr, "Evac TSO: 0x%lx -> 0x%lx, size %ld\n", \
170 #define DEBUG_EVAC_STKO(a,b) \
171 if (RTSflags.GcFlags.trace & DEBUG_TRACE_CONCURRENT) \
172 fprintf(stderr, "Evac StkO: 0x%lx -> 0x%lx, size %ld (A), %ld (B)\n", \
176 # define DEBUG_EVAC_BF \
177 if (RTSflags.GcFlags.trace & DEBUG_TRACE_CONCURRENT) \
178 fprintf(stderr, "Evac: 0x%lx -> 0x%lx, BF info 0x%lx, size %lu\n", \
179 evac, ToHp, INFO_PTR(evac), BF_CLOSURE_SIZE(dummy))
186 #define DEBUG_EVAC(size)
187 #define DEBUG_EVAC_DYN
188 #define DEBUG_EVAC_TUPLE
189 #define DEBUG_EVAC_MUTUPLE
190 #define DEBUG_EVAC_DATA
191 #define DEBUG_EVAC_BH(size)
192 #define DEBUG_EVAC_FORWARD
193 #define DEBUG_EVAC_IND1
194 #define DEBUG_EVAC_IND2
195 #define DEBUG_EVAC_PERM_IND
196 #define DEBUG_EVAC_CAF_EVAC1
197 #define DEBUG_EVAC_CAF_EVAC2
198 #define DEBUG_EVAC_CAF_RET
199 #define DEBUG_EVAC_STAT
200 #define DEBUG_EVAC_CONST
201 #define DEBUG_EVAC_CHARLIKE
202 #define DEBUG_EVAC_INTLIKE_TO_STATIC
203 #define DEBUG_EVAC_TO_OLD
204 #define DEBUG_EVAC_TO_NEW
205 #define DEBUG_EVAC_OLDROOT_FORWARD
206 #define DEBUG_EVAC_OLD_TO_NEW(oldind, evac, new)
209 # define DEBUG_EVAC_BQ
210 # define DEBUG_EVAC_TSO(size)
211 # define DEBUG_EVAC_STKO(s,size)
213 # define DEBUG_EVAC_BF
217 #endif /* not DEBUG */
222 /* Evacuation with Promotion -- Have to decide if we promote ! */
223 /* This is done by fiddling the ToHp pointer before calling */
224 /* the real _do_Evacute code, passing reqd forward ref info */
226 /* Is a heap ptr in the old generation ? */
227 #define InOldGen(hpptr) (((P_)(hpptr)) <= OldGen)
229 /* Should we promote to the old generation ? */
230 #define ShouldPromote(evac) (((P_)(evac)) < AllocGen)
233 /*** Real Evac Code -- passed closure & forward ref info ***/
235 #define EVAC_FN(suffix) \
236 P_ CAT2(_do_Evacuate_,suffix)(evac, forward_info) \
237 P_ evac; P_ forward_info;
240 /*** Evac Decision Code -- calls real evac code ***/
242 extern P_ _Evacuate_Old_to_New();
244 #define GEN_EVAC_CODE(suffix) \
245 P_ CAT2(_Evacuate_,suffix)(evac) \
249 if (ShouldPromote(evac)) { \
251 tmp = ToHp; ToHp = OldHp; \
252 newevac = CAT2(_do_Evacuate_,suffix)(evac, (P_)Forward_Ref_Old_info); \
253 OldHp = ToHp; ToHp = tmp; \
256 newevac = CAT2(_do_Evacuate_,suffix)(evac, (P_)Forward_Ref_New_info); \
258 /* Check if new gen closure is scavenged from the old gen */ \
259 if (InOldGen(Scav)) { \
260 newevac = (P_) _Evacuate_Old_to_New(newevac, evac); \
267 /*** FORWARD REF STUFF ***/
269 /*** Setting Forward Ref: grab argument passed to evac code ***/
271 /* Note that writing in the forwarding address trashes part of the
272 closure. This is normally fine since, if we want the data, we'll
273 have made a copy of it.
275 But, Malloc Pointer closures are special: we have to make sure that
276 we don't damage either the linked list (which will include both
277 copied and uncopied Malloc ptrs) or the data (which we must report
278 to the outside world). Malloc Ptr closures are carefully designed
279 to have a little extra space in them that can be safely
283 #define SET_FORWARD_REF(closure, forw) \
284 SET_INFO_PTR(closure,forward); /* arg passed to evac function */ \
285 FORWARD_ADDRESS(closure) = (W_)(forw)
288 EVAC_FN(Old_Forward_Ref)
290 /* Forward ref to old generation -- just return */
293 evac = (P_) FORWARD_ADDRESS(evac);
297 EVAC_FN(New_Forward_Ref)
299 /* Forward ref to new generation -- check scavenged from the old gen */
302 if (InOldGen(Scav)) {
303 evac = (P_) _Evacuate_Old_to_New(FORWARD_ADDRESS(evac), evac);
305 evac = (P_) FORWARD_ADDRESS(evac);
310 EVAC_FN(OldRoot_Forward)
312 /* Forward ref to old generation root -- return old root or new gen closure */
313 DEBUG_EVAC_OLDROOT_FORWARD;
315 /* grab old generation root */
316 evac = (P_) FORWARD_ADDRESS(evac);
318 /* if scavenging new generation return the new generation
319 closure rather than the old generation root */
320 if (! InOldGen(Scav)) {
321 evac = (P_) IND_CLOSURE_PTR(evac);
327 EXTDATA_RO(Forward_Ref_New_info);
328 EXTDATA_RO(Forward_Ref_Old_info);
329 EXTDATA_RO(OldRoot_Forward_Ref_info);
331 /*** Old Gen Reference to New Gen Closure ***/
334 _Evacuate_Old_to_New(newevac, evac)
337 /* New generation closure referenced from the old generation */
338 /* allocate old generation indirection to newevac */
339 /* reset forward reference in original allocation area to oldind */
340 /* evacuating this should return the old root or the new gen */
341 /* closure depending if referenced from the old generation */
342 /* return oldind as evacuated location */
343 /* reference from oldgen will be to this oldind closure */
345 P_ oldind = OldHp + 1; /* see START_ALLOC */
346 OldHp = oldind + (FIXED_HS-1) + MIN_UPD_SIZE; /* see FINISH_ALLOC */
348 DEBUG_EVAC_OLD_TO_NEW(oldind, evac, newevac);
350 INFO_PTR(evac) = (W_) OldRoot_Forward_Ref_info;
351 FORWARD_ADDRESS(evac) = (W_)oldind;
353 INFO_PTR(oldind) = (W_) OldRoot_info;
354 IND_CLOSURE_PTR(oldind) = (W_) newevac;
355 IND_CLOSURE_LINK(oldind) = (W_) genInfo.OldInNew;
356 genInfo.OldInNew = oldind;
357 genInfo.OldInNewno++;
362 #define PROMOTE_MUTABLE(evac) \
363 if (InOldGen(evac)) { \
364 MUT_LINK(evac) = (W_) genInfo.PromMutables; \
365 genInfo.PromMutables = (P_) evac; \
372 #define PROMOTE_MUTABLE(evac) \
373 MUT_LINK(evac) = (W_) appelInfo.PromMutables; \
374 appelInfo.PromMutables = (P_) evac;
378 #define PROMOTE_MUTABLE(evac)
382 /*** Real Evac Code -- simply passed closure ***/
384 #define EVAC_FN(suffix) P_ CAT2(_Evacuate_,suffix)(P_ evac)
386 /*** FORWARD REF STUFF ***/
388 #define SET_FORWARD_REF(closure, forw) \
389 SET_INFO_PTR(closure, Forward_Ref_info); \
390 FORWARD_ADDRESS(closure) = (W_) (forw)
393 _Evacuate_Forward_Ref(evac)
397 evac = (P_) FORWARD_ADDRESS(evac);
401 EXTDATA_RO(Forward_Ref_info);
406 /*** SPECIALISED CODE ***/
408 /* Note: code for evacuating selectors is given near that for Ind(irections) */
416 SPEC_COPY_FREE_VAR(1);
417 SET_FORWARD_REF(evac,ToHp);
428 SPEC_COPY_FREE_VAR(1);
429 SPEC_COPY_FREE_VAR(2);
430 SET_FORWARD_REF(evac,ToHp);
441 SPEC_COPY_FREE_VAR(1);
442 SPEC_COPY_FREE_VAR(2);
443 SPEC_COPY_FREE_VAR(3);
444 SET_FORWARD_REF(evac,ToHp);
455 SPEC_COPY_FREE_VAR(1);
456 SPEC_COPY_FREE_VAR(2);
457 SPEC_COPY_FREE_VAR(3);
458 SPEC_COPY_FREE_VAR(4);
459 SET_FORWARD_REF(evac,ToHp);
470 SPEC_COPY_FREE_VAR(1);
471 SPEC_COPY_FREE_VAR(2);
472 SPEC_COPY_FREE_VAR(3);
473 SPEC_COPY_FREE_VAR(4);
474 SPEC_COPY_FREE_VAR(5);
475 SET_FORWARD_REF(evac,ToHp);
481 #define BIG_SPEC_EVAC_FN(n) \
488 for (i = 1; i <= n; i++) { SPEC_COPY_FREE_VAR(i); } \
489 SET_FORWARD_REF(evac,ToHp); \
495 /* instantiate for 6--12 */
506 A @SPEC_RBH@ must be at least size @MIN_UPD_SIZE@ (Whom are we fooling?
507 This means 2), and the first word after the fixed header is a
508 @MUT_LINK@. The second word is a pointer to a blocking queue.
509 Remaining words are the same as the underlying @SPEC@ closure. Unlike
510 their @SPEC@ cousins, @SPEC_RBH@ closures require special handling for
511 generational collectors, because the blocking queue is a mutable
514 We don't expect to have a lot of these, so I haven't unrolled the
515 first five instantiations of the macro, but feel free to do so if it
522 #define SPEC_RBH_EVAC_FN(n) \
523 EVAC_FN(CAT2(RBH_,n)) \
525 I_ count = FIXED_HS - 1; \
526 I_ size = SPEC_RBH_VHS + (n); \
530 while (++count <= size + (FIXED_HS - 1)) { \
533 SET_FORWARD_REF(evac,ToHp); \
535 FINISH_ALLOC(size); \
537 PROMOTE_MUTABLE(evac); \
542 /* instantiate for 2--12 */
560 I_ size = MallocPtr_SIZE;
565 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MALLOCPTRS) {
566 printf("DEBUG: Evacuating MallocPtr(%x)=<%x,_,%x,%x>", evac, evac[0], evac[2], evac[3]);
567 printf(" Data = %x, Next = %x\n",
568 MallocPtr_CLOSURE_DATA(evac), MallocPtr_CLOSURE_LINK(evac) );
574 SET_FORWARD_REF(evac,ToHp);
575 MallocPtr_CLOSURE_DATA(ToHp) = MallocPtr_CLOSURE_DATA(evac);
576 MallocPtr_CLOSURE_LINK(ToHp) = MallocPtr_CLOSURE_LINK(evac);
579 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MALLOCPTRS) {
580 printf("DEBUG: Evacuated MallocPtr(%x)=<%x,_,%x,%x>", ToHp, ToHp[0], ToHp[2], ToHp[3]);
581 printf(" Data = %x, Next = %x\n",
582 MallocPtr_CLOSURE_DATA(ToHp), MallocPtr_CLOSURE_LINK(ToHp) );
592 /*** GENERIC CASE CODE ***/
596 I_ count = FIXED_HS - 1;
597 I_ size = GEN_CLOSURE_SIZE(evac);
602 while (++count <= size + (FIXED_HS - 1)) {
605 SET_FORWARD_REF(evac,ToHp);
613 Like a @SPEC_RBH@, a @GEN_RBH@ must be at least @MIN_UPD_SIZE@, and
614 the first word after the fixed header is a @MUT_LINK@. The second
615 word is a pointer to a blocking queue. Remaining words are the same
616 as the underlying @GEN@ closure.
623 I_ count = GEN_RBH_HS - 1;
624 I_ size = GEN_RBH_CLOSURE_SIZE(evac);
629 while (++count <= size + (FIXED_HS - 1)) {
632 SET_FORWARD_REF(evac,ToHp);
636 PROMOTE_MUTABLE(evac);
642 /*** DYNAMIC CLOSURE -- SIZE & PTRS STORED IN CLOSURE ***/
646 I_ count = FIXED_HS - 1;
647 I_ size = DYN_CLOSURE_SIZE(evac); /* Includes size and no-of-ptrs fields */
652 while (++count <= size + (FIXED_HS - 1)) {
655 SET_FORWARD_REF(evac,ToHp);
661 /*** TUPLE CLOSURE -- NO. OF PTRS STORED IN CLOSURE -- NO DATA ***/
665 I_ count = FIXED_HS - 1;
666 I_ size = TUPLE_CLOSURE_SIZE(evac);
671 while (++count <= size + (FIXED_HS - 1)) {
674 SET_FORWARD_REF(evac,ToHp);
680 /*** MUTUPLE CLOSURE -- NO. OF PTRS STORED IN CLOSURE -- NO DATA ***/
681 /* Only if special GC treatment required */
683 #ifdef GC_MUT_REQUIRED
686 I_ count = FIXED_HS - 1;
687 I_ size = MUTUPLE_CLOSURE_SIZE(evac);
693 while (++count <= size + (FIXED_HS - 1)) {
696 SET_FORWARD_REF(evac,ToHp);
700 /* Add to OldMutables list (if evacuated to old generation) */
701 PROMOTE_MUTABLE(evac);
705 #endif /* GCgn or GCap */
708 /*** DATA CLOSURE -- SIZE STORED IN CLOSURE -- NO POINTERS ***/
712 I_ count = FIXED_HS - 1;
713 I_ size = DATA_CLOSURE_SIZE(evac);
718 while (++count <= size + (FIXED_HS - 1)) {
721 SET_FORWARD_REF(evac,ToHp);
728 /*** STATIC CLOSURE CODE ***/
730 /* Evacuation: Just return static address (no copying required)
731 Evac already contains this address -- just return */
732 /* Scavenging: Static closures should never be scavenged */
740 /*** BLACK HOLE CODE ***/
744 START_ALLOC(BH_U_SIZE);
745 DEBUG_EVAC_BH(BH_U_SIZE);
747 SET_FORWARD_REF(evac,ToHp);
749 FINISH_ALLOC(BH_U_SIZE);
755 START_ALLOC(BH_N_SIZE);
756 DEBUG_EVAC_BH(BH_N_SIZE);
758 SET_FORWARD_REF(evac,ToHp);
760 FINISH_ALLOC(BH_N_SIZE);
764 /*** INDIRECTION CODE ***/
766 /* permanent indirections first */
767 #if defined(PROFILING) || defined(TICKY_TICKY)
770 EVAC_FN(PI) /* used for ticky in case just below... */
773 if (! AllFlags.doUpdEntryCounts) {
775 GC_SHORT_IND(); /* ticky: record that we shorted an indirection */
777 evac = (P_) IND_CLOSURE_PTR(evac);
779 # if defined(GCgn) || defined(GCap)
780 if (evac > OldGen) /* Only evacuate new gen with generational collector */
781 evac = EVACUATE_CLOSURE(evac);
783 evac = EVACUATE_CLOSURE(evac);
790 /* *not* shorting one out... */
791 START_ALLOC(IND_CLOSURE_SIZE(dummy));
795 SET_FORWARD_REF(evac,ToHp);
797 FINISH_ALLOC(IND_CLOSURE_SIZE(dummy));
804 #endif /* PROFILING or TICKY */
806 EVAC_FN(Ind) /* Indirections are shorted-out; if doing weird ticky
807 stuff, we will have used *permanent* indirections
808 for overwriting updatees...
812 GC_SHORT_IND(); /* ticky: record that we shorted an indirection */
814 evac = (P_) IND_CLOSURE_PTR(evac);
816 # if defined(GCgn) || defined(GCap)
817 if (evac > OldGen) /* Only evacuate new gen with generational collector */
818 evac = EVACUATE_CLOSURE(evac);
820 evac = EVACUATE_CLOSURE(evac);
825 /* This will generate a stack of returns for a chain of indirections!
826 However chains can only be 2 long.
832 /*** SELECTORS CODE (much like an indirection) ***/
834 /* Evacuate a thunk which is selector; it has one free variable which
835 points to something which will evaluate to a constructor in a
836 single-constructor data type.
838 If it is so evaluated at GC time, we want to simply select the n'th
841 This thunk is of course always a Spec thing, since it has only one
844 The constructor is guaranteed to be a Spec thing, so we know where
847 ToDo: what if the constructor is a Gen thing?
849 "selector_depth" stuff below: (WDP 95/12)
851 It is possible to have a *very* considerable number of selectors
852 all chained together, which will cause the code here to chew up
853 enormous C stack space (very deeply nested set of calls), which
854 can crash the program.
856 Various solutions are possible, but we opt for a simple one --
857 we run a "selector_depth" counter, and we stop doing the
858 selections if we get beyond that depth. The main nice property
859 is that it doesn't affect (or slow down) any of the rest of the
862 What should the depth be? For SPARC friendliness, it should
863 probably be very small (e.g., 8 or 16), to avoid register-window
864 spillage. However, that would increase the chances that
865 selectors are left undone and lots of junk is promoted to the
866 old generation. So we set it quite a bit higher -- we'd like to
867 do all the selections except in the most extreme circumstances.
869 static int selector_depth = 0;
870 #define MAX_SELECTOR_DEPTH 100 /* reasonably arbitrary */
873 _EvacuateSelector_n(P_ evac, I_ n)
875 P_ maybe_con = (P_) evac[_FHS];
877 /* must be a SPEC 2 1 closure */
878 ASSERT(MIN_UPD_SIZE == 2); /* otherwise you are hosed */
881 /* if a thunk, its update-entry count must be zero */
882 ASSERT(TICKY_HDR(evac) == 0);
885 selector_depth++; /* see story above */
888 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC)
889 fprintf(stderr, "Evac Selector (depth %ld): 0x%lx, info 0x%lx, maybe_con 0x%lx, info 0x%lx, tag %ld\n",
890 selector_depth, evac, INFO_PTR(evac), maybe_con,
891 INFO_PTR(maybe_con), INFO_TAG(INFO_PTR(maybe_con)));
894 if (INFO_TAG(INFO_PTR(maybe_con)) < 0 /* not in WHNF */
895 #if !defined(CONCURRENT)
896 || (! RTSflags.GcFlags.lazyBlackHoling) /* see "price of laziness" paper */
898 || selector_depth > MAX_SELECTOR_DEPTH
899 || (! RTSflags.GcFlags.doSelectorsAtGC)
902 if (INFO_TAG(INFO_PTR(maybe_con)) >= 0) { /* we *could* have done it */
906 /* Evacuate as normal (it is size *2* because of MIN_UPD_SIZE) */
907 return( _Evacuate_2(evac) );
911 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC)
912 fprintf(stderr, "Evac Selector:shorting: 0x%lx -> 0x%lx\n",
913 evac, maybe_con[_FHS + n]);
916 /* Ha! Short it out */
917 evac = (P_) (maybe_con[_FHS + n]); /* evac now has the result of the selection */
919 GC_SEL_MINOR(); /* ticky-ticky */
921 #if defined(GCgn) || defined(GCap)
922 if (evac > OldGen) /* Only evacuate new gen with generational collector */
923 evac = EVACUATE_CLOSURE(evac);
925 evac = EVACUATE_CLOSURE(evac);
928 selector_depth--; /* see story above */
933 #define DEF_SEL_EVAC(n) \
934 P_ CAT2(_EvacuateSelector_,n) (evac) P_ evac; \
935 { return(_EvacuateSelector_n(evac,n)); }
937 /* all the entry points */
955 START_ALLOC(BQ_CLOSURE_SIZE(dummy));
961 SET_FORWARD_REF(evac,ToHp);
963 FINISH_ALLOC(BQ_CLOSURE_SIZE(dummy));
965 /* Add to OldMutables list (if evacuated to old generation) */
966 PROMOTE_MUTABLE(evac);
974 I_ size = TSO_VHS + TSO_CTS_SIZE;
977 DEBUG_EVAC_TSO(size);
980 for (count = FIXED_HS; count < FIXED_HS + TSO_VHS; count++) {
984 *TSO_INTERNAL_PTR(ToHp) = *TSO_INTERNAL_PTR(evac);
986 SET_FORWARD_REF(evac, ToHp);
990 /* Add to OldMutables list (if evacuated to old generation) */
991 PROMOTE_MUTABLE(evac);
999 I_ size = STKO_CLOSURE_SIZE(evac);
1000 I_ spa_offset = STKO_SpA_OFFSET(evac);
1001 I_ spb_offset = STKO_SpB_OFFSET(evac);
1002 I_ sub_offset = STKO_SuB_OFFSET(evac);
1005 ASSERT(sanityChk_StkO(evac));
1008 DEBUG_EVAC_STKO(STKO_CLOSURE_CTS_SIZE(evac) - spa_offset + 1, spb_offset);
1012 COPY_WORD(STKO_ADEP_LOCN);
1013 COPY_WORD(STKO_BDEP_LOCN);
1015 COPY_WORD(STKO_SIZE_LOCN);
1016 COPY_WORD(STKO_RETURN_LOCN);
1017 COPY_WORD(STKO_LINK_LOCN);
1019 /* Adjust the four stack pointers -- ORDER IS IMPORTANT!! */
1020 offset = ToHp - evac;
1022 STKO_SuB(ToHp) = STKO_SuB(evac) + offset;
1023 STKO_SpB(ToHp) = STKO_SpB(evac) + offset;
1024 STKO_SpA(ToHp) = STKO_SpA(evac) + offset;
1025 STKO_SuA(ToHp) = STKO_SuA(evac) + offset;
1028 /* Slide the A stack */
1029 for (count = spa_offset; count <= STKO_CLOSURE_CTS_SIZE(evac); count++) {
1030 COPY_WORD((STKO_HS-1) + count);
1033 /* Slide the B stack, repairing internal pointers */
1034 for (count = spb_offset; count >= 1;) {
1035 if (count > sub_offset) {
1036 COPY_WORD((STKO_HS-1) + count);
1040 /* Repair the internal pointers in the update frame */
1041 COPY_WORD((STKO_HS-1) + count + BREL(UF_RET));
1042 COPY_WORD((STKO_HS-1) + count + BREL(UF_UPDATEE));
1043 ADJUST_WORD((STKO_HS-1) + count + BREL(UF_SUA),offset);
1044 ADJUST_WORD((STKO_HS-1) + count + BREL(UF_SUB),offset);
1045 subptr = GRAB_SuB(STKO_CLOSURE_ADDR(ToHp,sub_offset));
1046 sub_offset = STKO_CLOSURE_OFFSET(ToHp,subptr);
1047 count -= STD_UF_SIZE;
1051 SET_FORWARD_REF(evac, ToHp);
1055 /* Add to OldMutables list (if evacuated to old generation) */
1056 PROMOTE_MUTABLE(evac);
1067 COPY_WORD(FETCHME_GA_LOCN);
1068 ASSERT(GALAlookup(FETCHME_GA(evac)) != NULL);
1070 SET_FORWARD_REF(evac,ToHp);
1074 /* Add to OldMutables list (if evacuated to old generation) */
1075 PROMOTE_MUTABLE(evac);
1083 I_ size = BF_CLOSURE_SIZE(evac);
1089 for (count = FIXED_HS; count < FIXED_HS + BF_VHS; count++) {
1092 COPY_WORD(BF_LINK_LOCN);
1093 COPY_WORD(BF_NODE_LOCN);
1094 COPY_WORD(BF_GTID_LOCN);
1095 COPY_WORD(BF_SLOT_LOCN);
1096 COPY_WORD(BF_WEIGHT_LOCN);
1098 SET_FORWARD_REF(evac, ToHp);
1102 /* Add to OldMutables list (if evacuated to old generation) */
1103 PROMOTE_MUTABLE(evac);
1108 #endif /* CONCURRENT */
1110 /*** SPECIAL CAF CODE ***/
1112 /* Evacuation: Return closure pointed to (already explicitly evacuated) */
1117 GC_SHORT_CAF(); /* ticky: record that we shorted an indirection */
1119 evac = (P_) IND_CLOSURE_PTR(evac);
1123 /* In addition we need an internal Caf indirection which evacuates,
1124 updates and returns the indirection. Before GC is started, the
1125 @CAFlist@ must be traversed and the info tables set to this.
1128 EVAC_FN(Caf_Evac_Upd)
1132 DEBUG_EVAC_CAF_EVAC1;
1134 INFO_PTR(evac) = (W_) Caf_info; /* Change back to Caf_info */
1136 evac = (P_) IND_CLOSURE_PTR(evac); /* Grab reference and evacuate */
1138 #if defined(GCgn) || defined(GCap)
1139 if (evac > OldGen) /* Only evacuate new gen with generational collector */
1140 evac = EVACUATE_CLOSURE(evac);
1142 evac = EVACUATE_CLOSURE(evac);
1145 IND_CLOSURE_PTR(closure) = (W_) evac; /* Update reference */
1147 DEBUG_EVAC_CAF_EVAC2;
1150 /* This will generate a stack of returns for a chain of indirections!
1151 However chains can only be 2 long.
1156 /*** CONST CLOSURE CODE ***/
1158 /* Evacuation: Just return address of the static closure stored in the info table */
1163 if (AllFlags.doUpdEntryCounts) {
1164 /* evacuate as if a closure of size 0
1165 (there is no _Evacuate_0 to call)
1170 SET_FORWARD_REF(evac,ToHp);
1178 GC_COMMON_CONST(); /* ticky */
1180 evac = CONST_STATIC_CLOSURE(INFO_PTR(evac));
1188 /*** CHARLIKE CLOSURE CODE ***/
1190 /* Evacuation: Just return address of the static closure stored fixed array */
1195 if (AllFlags.doUpdEntryCounts) {
1196 evac = _Evacuate_1(evac); /* evacuate closure of size 1 */
1200 DEBUG_EVAC_CHARLIKE;
1201 GC_COMMON_CHARLIKE(); /* ticky */
1203 evac = (P_) CHARLIKE_CLOSURE(CHARLIKE_VALUE(evac));
1212 --- INTLIKE CLOSURE CODE ---
1214 Evacuation: Return address of the static closure if available
1215 Otherwise evacuate converting to aux closure.
1217 There are some tricks here:
1220 The main trick is that if the integer is in a certain range, we
1221 replace it by a pointer to a statically allocated integer.
1224 (Would it not be more efficient to update the copy directly since
1225 we're about to set a forwarding reference in the original? ADR)
1230 I_ val = INTLIKE_VALUE(evac);
1232 if (val >= MIN_INTLIKE /* in range of static closures */
1233 && val <= MAX_INTLIKE
1235 && !AllFlags.doUpdEntryCounts
1238 DEBUG_EVAC_INTLIKE_TO_STATIC;
1239 GC_COMMON_INTLIKE(); /* ticky */
1241 evac = (P_) INTLIKE_CLOSURE(val); /* return appropriate static closure */
1244 evac = _Evacuate_1(evac); /* evacuate closure of size 1 */
1247 if (!AllFlags.doUpdEntryCounts) GC_COMMON_INTLIKE_FAIL();
1269 GEN_EVAC_CODE(Tuple)
1271 GEN_EVAC_CODE(MuTuple)
1272 GEN_EVAC_CODE(IntLike) /* ToDo: may create oldgen roots referencing static ints */
1273 GEN_EVAC_CODE(CAT2(BH_,MIN_UPD_SIZE))
1274 GEN_EVAC_CODE(CAT2(BH_,MIN_NONUPD_SIZE))
1277 #else /* ! _INFO_COPYING */
1278 This really really should not ever ever come up!
1279 #endif /* ! _INFO_COPYING */