1 ****************************************************************************
3 The files SMevac.lc and SMscav.lhc contain the basic routines required
4 for two-space copying garbage collection.
6 Two files are required as the evac routines are conventional call/return
7 routines while the scavenge routines are continuation routines.
9 This file SMscav.lhc contains the scavenging routines ...
11 ****************************************************************************
14 All the routines are placed in the info tables of the appropriate closures.
17 Evacuation code: _Evacuate_...
19 USE: new = EVACUATE_CLOSURE(evac)
21 Evacuates a closure of size S words. Note the size excludes the info
22 and any other preceding fields (eg global address in Grip implementation)
23 Returns the address of the closures new location via the Evac register.
26 arg -- points to the closure
27 ToHp -- points to the last allocated word in to-space
29 ret -- points to the new address of the closure
30 ToHp -- points to the last allocated word in to-space
32 Example: Cons cell requires _Evacuate_2
34 Scavenging code: _Scavenge_S_N
36 Retrieved using SCAV_CODE(infoptr)
38 Scavenges a closure of size S words, with N pointers and returns.
39 If more closures are required to be scavenged the code to
40 scan the next closure can be called.
43 Scav -- points to the current closure
44 ToHp -- points to the last allocated word in to-space
46 OldGen -- Points to end of old generation (Appels collector only)
49 Scav -- points to the next closure
50 ToHp -- points to the (possibly new) location of the last allocated word
52 Example: Cons cell requires _Scavenge_2_2
55 The following registers are used by a two-space collection:
57 Scav -- Points to the current closure being scavenged
60 ToHp -- Points to the last word allocated in two-space
63 A copying pass is started by:
64 -- Setting ToHp to 1 before the start of to-space
65 -- Evacuating the roots pointing into from-space
66 -- root = EVACUATE_CLOSURE(root)
67 -- Setting Scav to point to the first closure in to-space
68 -- Execute while (Scav <= ToHp) (SCAV_CODE(INFO_PTR(Scav)))();
70 When Done ToHp will point to the last word allocated in to-space
74 /* The #define and #include come before the test because SMinternal.h
75 will suck in includes/SMinterface whcih defines (or doesn't)
76 _INFO_COPYING [ADR] */
79 #include "SMinternal.h"
81 #if defined(_INFO_COPYING)
83 RegisterTable ScavRegTable;
85 /* Moves Scav to point at the info pointer of the next closure to Scavenge */
86 #define NEXT_Scav(size) Scav += (size) + FIXED_HS
89 When doing a new generation copy collection for Appel's collector
90 only evacuate references that point to the new generation.
91 OldGen must be set to point to the end of old space.
96 #define DO_EVACUATE(closure, pos) \
97 { P_ evac = (P_) *(((P_)(closure))+(pos)); \
98 if (evac > OldGen) { \
99 *(((P_)(closure))+(pos)) = (W_) EVACUATE_CLOSURE(evac); \
105 #define DO_EVACUATE(closure, pos) \
106 { P_ evac = (P_) *(((P_)(closure))+(pos)); \
107 if (evac > OldGen) { \
108 *(((P_)(closure))+(pos)) = (W_) EVACUATE_CLOSURE(evac); \
111 #else /* ! GCgn && ! GCap */
113 #define DO_EVACUATE(closure, pos) \
114 { P_ evac = (P_) *(((P_)(closure))+(pos)); \
115 *(((P_)(closure))+(pos)) = (W_) EVACUATE_CLOSURE(evac); }
117 #endif /* ! GCgn && ! GCap */
121 /* Evacuate nth pointer in SPEC closure (starting at 1) */
122 #define SPEC_DO_EVACUATE(ptr) DO_EVACUATE(Scav, (SPEC_HS-1) + (ptr))
123 #define STKO_DO_EVACUATE(ptr) DO_EVACUATE(Scav, (STKO_HS-1) + (ptr))
126 /*** DEBUGGING MACROS ***/
130 #define DEBUG_SCAV(s,p) \
131 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
132 fprintf(stderr, "Scav: 0x%lx, info 0x%lx, size %ld, ptrs %ld\n", \
133 Scav, INFO_PTR(Scav), s, p)
135 #define DEBUG_SCAV_GEN(s,p) \
136 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
137 fprintf(stderr, "Scav: 0x%lx, Gen info 0x%lx, size %ld, ptrs %ld\n", \
138 Scav, INFO_PTR(Scav), s, p)
140 #define DEBUG_SCAV_DYN \
141 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
142 fprintf(stderr, "Scav: 0x%lx, Dyn info 0x%lx, size %ld, ptrs %ld\n", \
143 Scav, INFO_PTR(Scav), DYN_CLOSURE_SIZE(Scav), DYN_CLOSURE_NoPTRS(Scav))
145 #define DEBUG_SCAV_TUPLE \
146 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
147 fprintf(stderr, "Scav: 0x%lx, Tuple info 0x%lx, size %ld, ptrs %ld\n", \
148 Scav, INFO_PTR(Scav), TUPLE_CLOSURE_SIZE(Scav), TUPLE_CLOSURE_NoPTRS(Scav))
150 #define DEBUG_SCAV_MUTUPLE \
151 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
152 fprintf(stderr, "Scav: 0x%lx, MuTuple info 0x%lx, size %ld, ptrs %ld\n", \
153 Scav, INFO_PTR(Scav), MUTUPLE_CLOSURE_SIZE(Scav), MUTUPLE_CLOSURE_NoPTRS(Scav))
155 #define DEBUG_SCAV_DATA \
156 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
157 fprintf(stderr, "Scav: 0x%lx, Data info 0x%lx, size %ld\n", \
158 Scav, INFO_PTR(Scav), DATA_CLOSURE_SIZE(Scav))
160 #define DEBUG_SCAV_BH(s) \
161 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
162 fprintf(stderr, "Scav: 0x%lx, BH info 0x%lx, size %ld\n", \
163 Scav, INFO_PTR(Scav), s)
165 #define DEBUG_SCAV_IND \
166 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
167 fprintf(stderr, "Scav: 0x%lx, IND info 0x%lx, size %ld\n", \
168 Scav, INFO_PTR(Scav), IND_CLOSURE_SIZE(Scav))
170 #define DEBUG_SCAV_PERM_IND \
171 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
172 fprintf(stderr, "Scav: 0x%lx, PI info 0x%lx, size %ld\n", \
173 Scav, INFO_PTR(Scav), IND_CLOSURE_SIZE(Scav))
175 #define DEBUG_SCAV_OLDROOT(s) \
176 if (RTSflags.GcFlags.trace & DEBUG_TRACE_MINOR_GC) \
177 fprintf(stderr, "Scav: OLDROOT 0x%lx, info 0x%lx, size %ld\n", \
178 Scav, INFO_PTR(Scav), s)
181 #define DEBUG_SCAV_BQ \
182 if (RTSflags.GcFlags.trace & DEBUG_TRACE_CONCURRENT) \
183 fprintf(stderr, "Scav: 0x%lx, BQ info 0x%lx, size %ld, ptrs %ld\n", \
184 Scav, INFO_PTR(Scav), BQ_CLOSURE_SIZE(Scav), BQ_CLOSURE_NoPTRS(Scav))
186 #define DEBUG_SCAV_TSO \
187 if (RTSflags.GcFlags.trace & DEBUG_TRACE_CONCURRENT) \
188 fprintf(stderr, "Scav TSO: 0x%lx\n", \
191 #define DEBUG_SCAV_STKO \
192 if (RTSflags.GcFlags.trace & DEBUG_TRACE_CONCURRENT) \
193 fprintf(stderr, "Scav StkO: 0x%lx\n", \
196 # if defined(PAR) || defined(GRAN)
197 # define DEBUG_SCAV_RBH(s,p) \
198 if (RTSflags.GcFlags.trace & DEBUG_TRACE_CONCURRENT) \
199 fprintf(stderr, "Scav RBH: 0x%lx, info 0x%lx, size %ld, ptrs %ld\n", \
200 Scav, INFO_PTR(Scav), s, p)
202 # define DEBUG_SCAV_BF \
203 if (RTSflags.GcFlags.trace & DEBUG_TRACE_CONCURRENT) \
204 fprintf(stderr, "Scav: 0x%lx, BF info 0x%lx, size %ld, ptrs %ld\n", \
205 Scav, INFO_PTR(Scav), BF_CLOSURE_SIZE(dummy), 0)
211 #define DEBUG_SCAV(s,p)
212 #define DEBUG_SCAV_GEN(s,p)
213 #define DEBUG_SCAV_DYN
214 #define DEBUG_SCAV_TUPLE
215 #define DEBUG_SCAV_MUTUPLE
216 #define DEBUG_SCAV_DATA
217 #define DEBUG_SCAV_BH(s)
218 #define DEBUG_SCAV_IND
219 #define DEBUG_SCAV_PERM_IND
220 #define DEBUG_SCAV_OLDROOT(s)
223 # define DEBUG_SCAV_BQ
224 # define DEBUG_SCAV_TSO
225 # define DEBUG_SCAV_STKO
226 # if defined(PAR) || defined(GRAN)
227 # define DEBUG_SCAV_RBH(s,p)
228 # define DEBUG_SCAV_BF
234 #define PROFILE_CLOSURE(closure,size) \
235 HEAP_PROFILE_CLOSURE(closure,size)
237 /*** SPECIALISED CODE ***/
241 _Scavenge_0_0(STG_NO_ARGS)
244 PROFILE_CLOSURE(Scav,0);
245 NEXT_Scav(0); /* because "size" is defined to be 0 (size SPEC_VHS == 0) */
251 _Scavenge_1_0(STG_NO_ARGS)
254 PROFILE_CLOSURE(Scav,1);
255 NEXT_Scav(1); /* because "size" is defined to be 1 (size SPEC_VHS == 0) */
259 _Scavenge_1_1(STG_NO_ARGS)
262 PROFILE_CLOSURE(Scav,1);
268 _Scavenge_2_0(STG_NO_ARGS)
271 PROFILE_CLOSURE(Scav,2);
276 _Scavenge_2_1(STG_NO_ARGS)
279 PROFILE_CLOSURE(Scav,2);
285 _Scavenge_2_2(STG_NO_ARGS)
288 PROFILE_CLOSURE(Scav,2);
295 _Scavenge_3_0(STG_NO_ARGS)
298 PROFILE_CLOSURE(Scav,3);
303 _Scavenge_3_1(STG_NO_ARGS)
306 PROFILE_CLOSURE(Scav,3);
312 _Scavenge_3_2(STG_NO_ARGS)
315 PROFILE_CLOSURE(Scav,3);
322 _Scavenge_3_3(STG_NO_ARGS)
325 PROFILE_CLOSURE(Scav,3);
333 _Scavenge_4_0(STG_NO_ARGS)
336 PROFILE_CLOSURE(Scav,4);
341 _Scavenge_4_4(STG_NO_ARGS)
344 PROFILE_CLOSURE(Scav,4);
353 _Scavenge_5_0(STG_NO_ARGS)
356 PROFILE_CLOSURE(Scav,5);
361 _Scavenge_5_5(STG_NO_ARGS)
364 PROFILE_CLOSURE(Scav,5);
374 _Scavenge_6_6(STG_NO_ARGS)
377 PROFILE_CLOSURE(Scav,6);
388 _Scavenge_7_7(STG_NO_ARGS)
391 PROFILE_CLOSURE(Scav,7);
403 _Scavenge_8_8(STG_NO_ARGS)
406 PROFILE_CLOSURE(Scav,8);
419 _Scavenge_9_9(STG_NO_ARGS)
422 PROFILE_CLOSURE(Scav,9);
436 _Scavenge_10_10(STG_NO_ARGS)
439 PROFILE_CLOSURE(Scav,10);
449 SPEC_DO_EVACUATE(10);
454 _Scavenge_11_11(STG_NO_ARGS)
457 PROFILE_CLOSURE(Scav,11);
467 SPEC_DO_EVACUATE(10);
468 SPEC_DO_EVACUATE(11);
473 _Scavenge_12_12(STG_NO_ARGS)
476 PROFILE_CLOSURE(Scav,12);
486 SPEC_DO_EVACUATE(10);
487 SPEC_DO_EVACUATE(11);
488 SPEC_DO_EVACUATE(12);
494 The scavenge routines for revertible black holes with underlying @SPEC@
499 #if defined(PAR) || defined(GRAN)
503 # define SCAVENGE_SPEC_RBH_N_1(n) \
505 CAT3(_Scavenge_RBH_,n,_1)(STG_NO_ARGS) \
507 I_ size = n + SPEC_RBH_VHS; \
509 DEBUG_SCAV_RBH(size,1); \
512 DO_EVACUATE(save_Scav, SPEC_RBH_BQ_LOCN); \
514 PROFILE_CLOSURE(Scav,size); \
518 # define SCAVENGE_SPEC_RBH_N_N(n) \
520 CAT4(_Scavenge_RBH_,n,_,n)(STG_NO_ARGS) \
522 I_ size = n + SPEC_RBH_VHS; \
525 DEBUG_SCAV_RBH(size,size-1); \
528 for(i = 0; i < n - 1; i++) { \
529 DO_EVACUATE(save_Scav, SPEC_RBH_BQ_LOCN + i); \
532 PROFILE_CLOSURE(Scav,size); \
538 # define SCAVENGE_SPEC_RBH_N_1(n) \
540 CAT3(_Scavenge_RBH_,n,_1)(STG_NO_ARGS) \
542 I_ size = n + SPEC_RBH_VHS; \
543 DEBUG_SCAV_RBH(size,1); \
544 DO_EVACUATE(Scav, SPEC_RBH_BQ_LOCN);\
545 PROFILE_CLOSURE(Scav,size); \
549 # define SCAVENGE_SPEC_RBH_N_N(n) \
551 CAT4(_Scavenge_RBH_,n,_,n)(STG_NO_ARGS) \
553 I_ size = n + SPEC_RBH_VHS; \
555 DEBUG_SCAV_RBH(size,size-1); \
556 for(i = 0; i < n - 1; i++) { \
557 DO_EVACUATE(Scav, SPEC_RBH_BQ_LOCN + i); \
559 PROFILE_CLOSURE(Scav,size); \
565 SCAVENGE_SPEC_RBH_N_1(2)
567 SCAVENGE_SPEC_RBH_N_1(3)
568 SCAVENGE_SPEC_RBH_N_N(3)
570 SCAVENGE_SPEC_RBH_N_1(4)
571 SCAVENGE_SPEC_RBH_N_N(4)
573 SCAVENGE_SPEC_RBH_N_1(5)
574 SCAVENGE_SPEC_RBH_N_N(5)
576 SCAVENGE_SPEC_RBH_N_N(6)
577 SCAVENGE_SPEC_RBH_N_N(7)
578 SCAVENGE_SPEC_RBH_N_N(8)
579 SCAVENGE_SPEC_RBH_N_N(9)
580 SCAVENGE_SPEC_RBH_N_N(10)
581 SCAVENGE_SPEC_RBH_N_N(11)
582 SCAVENGE_SPEC_RBH_N_N(12)
591 /*** Foreign Object -- NOTHING TO SCAVENGE ***/
593 /* (The ForeignObjList is updated at the end of GC and any unevacuated
594 ForeignObjs are finalised) [ADR][SOF]
598 _Scavenge_ForeignObj(STG_NO_ARGS)
600 I_ size = ForeignObj_SIZE;
602 PROFILE_CLOSURE(Scav,size);
608 /*** GENERAL CASE CODE ***/
611 _Scavenge_S_N(STG_NO_ARGS)
613 I_ count = GEN_HS - 1;
614 /* Offset of first ptr word, less 1 */
615 I_ ptrs = count + GEN_CLOSURE_NoPTRS(Scav);
616 /* Offset of last ptr word */
617 I_ size = GEN_CLOSURE_SIZE(Scav);
619 DEBUG_SCAV_GEN(size, GEN_CLOSURE_NoPTRS(Scav));
621 while (++count <= ptrs) {
622 DO_EVACUATE(Scav, count);
624 PROFILE_CLOSURE(Scav,size);
631 The scavenge code for revertible black holes with underlying @GEN@ closures
635 #if defined(PAR) || defined(GRAN)
638 _Scavenge_RBH_N(STG_NO_ARGS)
644 I_ count = GEN_RBH_HS - 1; /* Offset of first ptr word, less 1 */
645 I_ ptrs = GEN_RBH_CLOSURE_NoPTRS(Scav);
646 I_ size = GEN_RBH_CLOSURE_SIZE(Scav);
649 * Get pointer count from original closure and adjust for one pointer
650 * in the first two words of the RBH.
657 ptrs += count; /* Offset of last ptr word */
659 DEBUG_SCAV_GEN(size, ptrs);
662 /* No old generation roots should be created for mutable */
663 /* pointer fields as they will be explicitly collected */
664 /* Ensure this by pointing Scav at the new generation */
668 while (++count <= ptrs) {
669 DO_EVACUATE(save_Scav, count);
673 while (++count <= ptrs) {
674 DO_EVACUATE(Scav, count);
678 PROFILE_CLOSURE(Scav,size);
689 /*** DYNAMIC CLOSURE -- SIZE & PTRS STORED IN CLOSURE ***/
692 _Scavenge_Dyn(STG_NO_ARGS)
694 I_ count = DYN_HS - 1;
695 /* Offset of first ptr word, less 1 */
696 I_ ptrs = count + DYN_CLOSURE_NoPTRS(Scav);
697 /* Offset of last ptr word */
698 I_ size = DYN_CLOSURE_SIZE(Scav);
701 while (++count <= ptrs) {
702 DO_EVACUATE(Scav, count);
704 PROFILE_CLOSURE(Scav,size);
709 /*** TUPLE CLOSURE -- NO PTRS STORED IN CLOSURE -- NO DATA ***/
712 _Scavenge_Tuple(STG_NO_ARGS)
714 I_ count = TUPLE_HS - 1;
715 /* Offset of first ptr word, less 1 */
716 I_ ptrs = count + TUPLE_CLOSURE_NoPTRS(Scav);
717 /* Offset of last ptr word */
718 I_ size = TUPLE_CLOSURE_SIZE(Scav);
721 while (++count <= ptrs) {
722 DO_EVACUATE(Scav, count);
724 PROFILE_CLOSURE(Scav,size);
729 /*** DATA CLOSURE -- SIZE STORED IN CLOSURE -- NO POINTERS ***/
732 _Scavenge_Data(STG_NO_ARGS)
734 I_ size = DATA_CLOSURE_SIZE(Scav);
737 PROFILE_CLOSURE(Scav,size);
742 /*** MUTUPLE CLOSURE -- ONLY PTRS STORED IN CLOSURE -- NO DATA ***/
743 /* Only if special GC treatment required */
745 #ifdef GC_MUT_REQUIRED
747 _Scavenge_MuTuple(STG_NO_ARGS)
752 I_ count = MUTUPLE_HS - 1;
753 /* Offset of first ptr word, less 1 */
754 I_ ptrs = count + MUTUPLE_CLOSURE_NoPTRS(Scav);
755 /* Offset of last ptr word */
756 I_ size = MUTUPLE_CLOSURE_SIZE(Scav);
761 /* No old generation roots should be created for mutable */
762 /* pointer fields as they will be explicitly collected */
763 /* Ensure this by pointing Scav at the new generation */
766 while (++count <= ptrs) {
767 DO_EVACUATE(save_Scav, count);
771 while (++count <= ptrs) {
772 DO_EVACUATE(Scav, count);
776 PROFILE_CLOSURE(Scav,size);
780 #endif /* something generational */
782 /*** BH CLOSURES -- NO POINTERS ***/
785 _Scavenge_BH_U(STG_NO_ARGS)
789 PROFILE_CLOSURE(Scav,size);
795 _Scavenge_BH_N(STG_NO_ARGS)
799 PROFILE_CLOSURE(Scav,size);
804 /* This is needed for scavenging indirections that "hang around";
805 e.g., because they are on the OldMutables list, or
806 because we have "turned off" shorting-out of indirections
810 _Scavenge_Ind(STG_NO_ARGS)
812 I_ size = IND_CLOSURE_SIZE(dummy);
814 PROFILE_CLOSURE(Scav,size);
815 DO_EVACUATE(Scav, IND_HS);
821 _Scavenge_Caf(STG_NO_ARGS)
823 I_ size = IND_CLOSURE_SIZE(dummy);
825 PROFILE_CLOSURE(Scav,size);
826 DO_EVACUATE(Scav, IND_HS);
831 #if defined(PROFILING) || defined(TICKY_TICKY)
833 /* Special permanent indirection for lexical scoping.
834 As for _Scavenge_Ind but no PROFILE_CLOSURE.
838 _Scavenge_PI(STG_NO_ARGS)
840 I_ size = IND_CLOSURE_SIZE(dummy);
842 /* PROFILE_CLOSURE(Scav,size); */
843 DO_EVACUATE(Scav, IND_HS);
847 #endif /* PROFILING or TICKY */
852 _Scavenge_BQ(STG_NO_ARGS)
854 I_ size = BQ_CLOSURE_SIZE(dummy);
862 /* No old generation roots should be created for mutable */
863 /* pointer fields as they will be explicitly collected */
864 /* Ensure this by pointing Scav at the new generation */
867 DO_EVACUATE(save_Scav, BQ_HS);
870 DO_EVACUATE(Scav, BQ_HS);
873 PROFILE_CLOSURE(Scav,size);
879 _Scavenge_TSO(STG_NO_ARGS)
881 I_ size = TSO_VHS + TSO_CTS_SIZE;
885 STGRegisterTable *r = TSO_INTERNAL_PTR(Scav);
886 W_ liveness = r->rLiveness;
892 /* old and probably wrong -- deleted (WDP 95/12) */
894 DO_EVACUATE(Scav, TSO_LINK_LOCN);
896 DO_EVACUATE(Scav, ((P_) &r->rStkO) - Scav);
898 for (i = 0; liveness != 0; liveness >>= 1, i++) {
900 DO_EVACUATE(Scav, ((P_) &r->rR[i].p) - Scav)
905 PROFILE_CLOSURE(Scav, size);
910 int /* ToDo: move? */
911 sanityChk_StkO(P_ stko)
913 I_ size = STKO_CLOSURE_SIZE(stko);
914 I_ cts_size = STKO_CLOSURE_CTS_SIZE(stko);
916 I_ sub = STKO_SuB_OFFSET(stko); /* Offset of first update frame in B stack */
918 P_ begin_stko = STKO_CLOSURE_ADDR(stko, 0);
919 P_ beyond_stko = STKO_CLOSURE_ADDR(stko, cts_size+1);
921 /*fprintf(stderr, "stko=%lx; SpA offset=%ld; first SuB=%ld, size=%ld; next=%lx\n",stko,STKO_SpA_OFFSET(stko),sub,STKO_CLOSURE_CTS_SIZE(stko),STKO_LINK(stko));*/
923 /* Evacuate the locations in the A stack */
924 for (count = STKO_SpA_OFFSET(stko); count <= cts_size; count++) {
935 ASSERT(sub <= cts_size);
937 retptr = GRAB_RET(STKO_CLOSURE_ADDR(stko,sub));
938 subptr = GRAB_SuB(STKO_CLOSURE_ADDR(stko,sub));
939 suaptr = GRAB_SuA(STKO_CLOSURE_ADDR(stko,sub));
940 updptr = GRAB_UPDATEE(STKO_CLOSURE_ADDR(stko,sub));
942 ASSERT(subptr >= begin_stko);
943 ASSERT(subptr < beyond_stko);
945 ASSERT(suaptr >= begin_stko);
946 ASSERT(suaptr <= beyond_stko);
948 /* ToDo: would be nice to chk that retptr is in text space */
950 sub = STKO_CLOSURE_OFFSET(stko, subptr);
957 _Scavenge_StkO(STG_NO_ARGS)
959 I_ size = STKO_CLOSURE_SIZE(Scav);
964 I_ sub = STKO_SuB_OFFSET(Scav); /* Offset of first update frame in B stack */
969 /* deleted; probably wrong */
971 ASSERT(sanityChk_StkO(Scav));
973 /* Evacuate the link */
974 DO_EVACUATE(Scav, STKO_LINK_LOCN);
976 /* Evacuate the locations in the A stack */
977 for (count = STKO_SpA_OFFSET(Scav); count <= STKO_CLOSURE_CTS_SIZE(Scav); count++) {
978 STKO_DO_EVACUATE(count);
981 /* Now evacuate the updatees in the update stack */
985 STKO_DO_EVACUATE(sub + BREL(UF_UPDATEE));
986 subptr = GRAB_SuB(STKO_CLOSURE_ADDR(Scav,sub));
988 sub = STKO_CLOSURE_OFFSET(Scav, subptr);
992 PROFILE_CLOSURE(Scav, size);
1000 _Scavenge_FetchMe(STG_NO_ARGS)
1002 I_ size = FETCHME_CLOSURE_SIZE(dummy);
1004 PROFILE_CLOSURE(Scav,size);
1010 _Scavenge_BF(STG_NO_ARGS)
1012 I_ size = BF_CLOSURE_SIZE(dummy);
1020 /* No old generation roots should be created for mutable */
1021 /* pointer fields as they will be explicitly collected */
1022 /* Ensure this by pointing Scav at the new generation */
1026 DO_EVACUATE(save_Scav, BF_LINK_LOCN);
1027 DO_EVACUATE(save_Scav, BF_NODE_LOCN);
1030 DO_EVACUATE(Scav, BF_LINK_LOCN);
1031 DO_EVACUATE(Scav, BF_NODE_LOCN);
1034 PROFILE_CLOSURE(Scav, size);
1040 #endif /* CONCURRENT */
1044 /* Recently allocated old roots for promoted objects refernecing
1045 the new generation will be scavenged -- Just move to the next
1049 _Scavenge_OldRoot(STG_NO_ARGS)
1052 DEBUG_SCAV_OLDROOT(size);
1058 _Evacuate_OldRoot(evac)
1061 fprintf(stderr,"Called _Evacuate_OldRoot: Closure %lx Info %lx\nShould never occur!\n",
1062 (W_) evac, (W_) INFO_PTR(evac));
1069 _Scavenge_Forward_Ref(STG_NO_ARGS)
1071 fprintf(stderr,"Called _Scavenge_Forward_Ref: Closure %lx Info %lx\nShould never occur!\n",
1072 (W_) Scav, (W_) INFO_PTR(Scav));
1077 #endif /* _INFO_COPYING */