1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team 1998-2008
5 * Generational garbage collector: evacuation functions
7 * Documentation on the architecture of the Garbage Collector can be
8 * found in the online commentary:
10 * http://hackage.haskell.org/trac/ghc/wiki/Commentary/Rts/Storage/GC
12 * ---------------------------------------------------------------------------*/
23 #include "LdvProfile.h"
25 #if defined(PROF_SPIN) && defined(THREADED_RTS)
26 StgWord64 whitehole_spin = 0;
29 /* Used to avoid long recursion due to selector thunks
31 #define MAX_THUNK_SELECTOR_DEPTH 16
33 static void eval_thunk_selector (StgClosure **q, StgSelector * p, rtsBool);
34 STATIC_INLINE void evacuate_large(StgPtr p);
36 /* -----------------------------------------------------------------------------
37 Allocate some space in which to copy an object.
38 -------------------------------------------------------------------------- */
41 alloc_for_copy (nat size, step *stp)
46 /* Find out where we're going, using the handy "to" pointer in
47 * the step of the source object. If it turns out we need to
48 * evacuate to an older generation, adjust it here (see comment
51 if (stp < gct->evac_step) {
52 if (gct->eager_promotion) {
55 gct->failed_to_evac = rtsTrue;
59 ws = &gct->steps[stp->abs_no];
60 // this compiles to a single mem access to stp->abs_no only
62 /* chain a new block onto the to-space for the destination step if
66 if (to + size > ws->todo_lim) {
67 to = todo_block_full(size, ws);
69 ws->todo_free = to + size;
70 ASSERT(ws->todo_free >= ws->todo_bd->free && ws->todo_free <= ws->todo_lim);
75 /* -----------------------------------------------------------------------------
77 -------------------------------------------------------------------------- */
85 /* -----------------------------------------------------------------------------
86 Evacuate a large object
88 This just consists of removing the object from the (doubly-linked)
89 step->large_objects list, and linking it on to the (singly-linked)
90 step->new_large_objects list, from where it will be scavenged later.
92 Convention: bd->flags has BF_EVACUATED set for a large object
93 that has been evacuated, or unset otherwise.
94 -------------------------------------------------------------------------- */
97 evacuate_large(StgPtr p)
99 bdescr *bd = Bdescr(p);
104 ACQUIRE_SPIN_LOCK(&stp->sync_large_objects);
106 // object must be at the beginning of the block (or be a ByteArray)
107 ASSERT(get_itbl((StgClosure *)p)->type == ARR_WORDS ||
108 (((W_)p & BLOCK_MASK) == 0));
110 // already evacuated?
111 if (bd->flags & BF_EVACUATED) {
112 /* Don't forget to set the gct->failed_to_evac flag if we didn't get
113 * the desired destination (see comments in evacuate()).
115 if (stp < gct->evac_step) {
116 gct->failed_to_evac = rtsTrue;
117 TICK_GC_FAILED_PROMOTION();
119 RELEASE_SPIN_LOCK(&stp->sync_large_objects);
123 // remove from large_object list
125 bd->u.back->link = bd->link;
126 } else { // first object in the list
127 stp->large_objects = bd->link;
130 bd->link->u.back = bd->u.back;
133 /* link it on to the evacuated large object list of the destination step
136 if (new_stp < gct->evac_step) {
137 if (gct->eager_promotion) {
138 new_stp = gct->evac_step;
140 gct->failed_to_evac = rtsTrue;
144 ws = &gct->steps[new_stp->abs_no];
145 bd->flags |= BF_EVACUATED;
147 bd->gen_no = new_stp->gen_no;
148 bd->link = ws->todo_large_objects;
149 ws->todo_large_objects = bd;
151 RELEASE_SPIN_LOCK(&stp->sync_large_objects);
154 /* -----------------------------------------------------------------------------
155 Evaluate a THUNK_SELECTOR if possible.
157 p points to a THUNK_SELECTOR that we want to evaluate. The
158 result of "evaluating" it will be evacuated and a pointer to the
159 to-space closure will be returned.
161 If the THUNK_SELECTOR could not be evaluated (its selectee is still
162 a THUNK, for example), then the THUNK_SELECTOR itself will be
164 -------------------------------------------------------------------------- */
166 unchain_thunk_selectors(StgSelector *p, StgClosure *val)
174 ASSERT(p->header.info == &stg_WHITEHOLE_info);
176 ASSERT(p->header.info == &stg_BLACKHOLE_info);
178 // val must be in to-space. Not always: when we recursively
179 // invoke eval_thunk_selector(), the recursive calls will not
180 // evacuate the value (because we want to select on the value,
181 // not evacuate it), so in this case val is in from-space.
182 // ASSERT(!HEAP_ALLOCED(val) || Bdescr((P_)val)->gen_no > N || (Bdescr((P_)val)->flags & BF_EVACUATED));
184 prev = (StgSelector*)((StgClosure *)p)->payload[0];
186 // Update the THUNK_SELECTOR with an indirection to the
187 // EVACUATED closure now at p. Why do this rather than
188 // upd_evacuee(q,p)? Because we have an invariant that an
189 // EVACUATED closure always points to an object in the
190 // same or an older generation (required by the short-cut
191 // test in the EVACUATED case, below).
192 ((StgInd *)p)->indirectee = val;
194 SET_INFO(p, &stg_IND_info);
196 // For the purposes of LDV profiling, we have created an
198 LDV_RECORD_CREATE(p);
205 eval_thunk_selector (StgClosure **q, StgSelector * p, rtsBool evac)
206 // NB. for legacy reasons, p & q are swapped around :(
211 StgClosure *selectee;
212 StgSelector *prev_thunk_selector;
216 prev_thunk_selector = NULL;
217 // this is a chain of THUNK_SELECTORs that we are going to update
218 // to point to the value of the current THUNK_SELECTOR. Each
219 // closure on the chain is a BLACKHOLE, and points to the next in the
220 // chain with payload[0].
224 bd = Bdescr((StgPtr)p);
225 if (HEAP_ALLOCED(p)) {
226 // If the THUNK_SELECTOR is in to-space or in a generation that we
227 // are not collecting, then bale out early. We won't be able to
228 // save any space in any case, and updating with an indirection is
229 // trickier in a non-collected gen: we would have to update the
231 if (bd->flags & BF_EVACUATED) {
232 unchain_thunk_selectors(prev_thunk_selector, (StgClosure *)p);
233 *q = (StgClosure *)p;
236 // we don't update THUNK_SELECTORS in the compacted
237 // generation, because compaction does not remove the INDs
238 // that result, this causes confusion later
239 // (scavenge_mark_stack doesn't deal with IND). BEWARE! This
240 // bit is very tricky to get right. If you make changes
241 // around here, test by compiling stage 3 with +RTS -c -RTS.
242 if (bd->flags & BF_COMPACTED) {
243 // must call evacuate() to mark this closure if evac==rtsTrue
244 *q = (StgClosure *)p;
245 if (evac) evacuate(q);
246 unchain_thunk_selectors(prev_thunk_selector, (StgClosure *)p);
252 // BLACKHOLE the selector thunk, since it is now under evaluation.
253 // This is important to stop us going into an infinite loop if
254 // this selector thunk eventually refers to itself.
255 #if defined(THREADED_RTS)
256 // In threaded mode, we'll use WHITEHOLE to lock the selector
257 // thunk while we evaluate it.
260 info_ptr = xchg((StgPtr)&p->header.info, (W_)&stg_WHITEHOLE_info);
261 } while (info_ptr == (W_)&stg_WHITEHOLE_info);
263 // make sure someone else didn't get here first...
264 if (INFO_PTR_TO_STRUCT(info_ptr)->type != THUNK_SELECTOR) {
265 // v. tricky now. The THUNK_SELECTOR has been evacuated
266 // by another thread, and is now either EVACUATED or IND.
267 // We need to extract ourselves from the current situation
268 // as cleanly as possible.
269 // - unlock the closure
270 // - update *q, we may have done *some* evaluation
271 // - if evac, we need to call evacuate(), because we
272 // need the write-barrier stuff.
273 // - undo the chain we've built to point to p.
274 SET_INFO(p, (const StgInfoTable *)info_ptr);
275 *q = (StgClosure *)p;
276 if (evac) evacuate(q);
277 unchain_thunk_selectors(prev_thunk_selector, (StgClosure *)p);
282 // Save the real info pointer (NOTE: not the same as get_itbl()).
283 info_ptr = (StgWord)p->header.info;
284 SET_INFO(p,&stg_BLACKHOLE_info);
287 field = INFO_PTR_TO_STRUCT(info_ptr)->layout.selector_offset;
289 // The selectee might be a constructor closure,
290 // so we untag the pointer.
291 selectee = UNTAG_CLOSURE(p->selectee);
294 // selectee now points to the closure that we're trying to select
295 // a field from. It may or may not be in to-space: we try not to
296 // end up in to-space, but it's impractical to avoid it in
297 // general. The compacting GC scatters to-space pointers in
298 // from-space during marking, for example. We rely on the property
299 // that evacuate() doesn't mind if it gets passed a to-space pointer.
301 info = get_itbl(selectee);
302 switch (info->type) {
304 goto bale_out; // about to be evacuated by another thread (or a loop).
313 case CONSTR_NOCAF_STATIC:
315 // check that the size is in range
316 ASSERT(field < (StgWord32)(info->layout.payload.ptrs +
317 info->layout.payload.nptrs));
319 // Select the right field from the constructor
320 val = selectee->payload[field];
323 // For the purposes of LDV profiling, we have destroyed
324 // the original selector thunk, p.
325 SET_INFO(p, (StgInfoTable *)info_ptr);
326 LDV_RECORD_DEAD_FILL_SLOP_DYNAMIC((StgClosure *)p);
327 SET_INFO(p, &stg_BLACKHOLE_info);
330 // the closure in val is now the "value" of the
331 // THUNK_SELECTOR in p. However, val may itself be a
332 // THUNK_SELECTOR, in which case we want to continue
333 // evaluating until we find the real value, and then
334 // update the whole chain to point to the value.
336 info = get_itbl(UNTAG_CLOSURE(val));
337 switch (info->type) {
341 case IND_OLDGEN_PERM:
343 val = ((StgInd *)val)->indirectee;
346 ((StgClosure*)p)->payload[0] = (StgClosure *)prev_thunk_selector;
347 prev_thunk_selector = p;
348 p = (StgSelector*)val;
351 ((StgClosure*)p)->payload[0] = (StgClosure *)prev_thunk_selector;
352 prev_thunk_selector = p;
355 if (evac) evacuate(q);
357 // evacuate() cannot recurse through
358 // eval_thunk_selector(), because we know val is not
360 unchain_thunk_selectors(prev_thunk_selector, val);
368 case IND_OLDGEN_PERM:
370 // Again, we might need to untag a constructor.
371 selectee = UNTAG_CLOSURE( ((StgInd *)selectee)->indirectee );
375 // We don't follow pointers into to-space; the constructor
376 // has already been evacuated, so we won't save any space
377 // leaks by evaluating this selector thunk anyhow.
384 // recursively evaluate this selector. We don't want to
385 // recurse indefinitely, so we impose a depth bound.
386 if (gct->thunk_selector_depth >= MAX_THUNK_SELECTOR_DEPTH) {
390 gct->thunk_selector_depth++;
391 // rtsFalse says "don't evacuate the result". It will,
392 // however, update any THUNK_SELECTORs that are evaluated
394 eval_thunk_selector(&val, (StgSelector*)selectee, rtsFalse);
395 gct->thunk_selector_depth--;
397 // did we actually manage to evaluate it?
398 if (val == selectee) goto bale_out;
400 // Of course this pointer might be tagged...
401 selectee = UNTAG_CLOSURE(val);
415 case SE_CAF_BLACKHOLE:
422 barf("eval_thunk_selector: strange selectee %d",
427 // We didn't manage to evaluate this thunk; restore the old info
428 // pointer. But don't forget: we still need to evacuate the thunk itself.
429 SET_INFO(p, (const StgInfoTable *)info_ptr);
430 // THREADED_RTS: we just unlocked the thunk, so another thread
431 // might get in and update it. copy() will lock it again and
432 // check whether it was updated in the meantime.
433 *q = (StgClosure *)p;
435 copy(q,(StgClosure *)p,THUNK_SELECTOR_sizeW(),bd->step->to);
437 unchain_thunk_selectors(prev_thunk_selector, *q);
441 /* -----------------------------------------------------------------------------
442 move_TSO is called to update the TSO structure after it has been
443 moved from one place to another.
444 -------------------------------------------------------------------------- */
447 move_TSO (StgTSO *src, StgTSO *dest)
451 // relocate the stack pointer...
452 diff = (StgPtr)dest - (StgPtr)src; // In *words*
453 dest->sp = (StgPtr)dest->sp + diff;