1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team 1998-2006
5 * Generational garbage collector: evacuation functions
7 * Documentation on the architecture of the Garbage Collector can be
8 * found in the online commentary:
10 * http://hackage.haskell.org/trac/ghc/wiki/Commentary/Rts/Storage/GC
12 * ---------------------------------------------------------------------------*/
22 #include "LdvProfile.h"
24 /* Used to avoid long recursion due to selector thunks
26 #define MAX_THUNK_SELECTOR_DEPTH 16
28 static void eval_thunk_selector (StgClosure **q, StgSelector * p, rtsBool);
29 STATIC_INLINE void evacuate_large(StgPtr p);
31 /* -----------------------------------------------------------------------------
32 Allocate some space in which to copy an object.
33 -------------------------------------------------------------------------- */
36 alloc_for_copy (nat size, step *stp)
42 /* Find out where we're going, using the handy "to" pointer in
43 * the step of the source object. If it turns out we need to
44 * evacuate to an older generation, adjust it here (see comment
47 if (stp < gct->evac_step) {
48 if (gct->eager_promotion) {
51 gct->failed_to_evac = rtsTrue;
55 ws = &gct->steps[stp->gen_no][stp->no];
57 /* chain a new block onto the to-space for the destination step if
62 if (to + size >= bd->start + BLOCK_SIZE_W) {
63 bd = gc_alloc_todo_block(ws);
72 copy_tag(StgClosure **p, StgClosure *src, nat size, step *stp,StgWord tag)
74 StgPtr to, tagged_to, from;
80 info = xchg((StgPtr)&src->header.info, (W_)&stg_WHITEHOLE_info);
82 } while (info == (W_)&stg_WHITEHOLE_info);
83 if (info == (W_)&stg_EVACUATED_info) {
84 src->header.info = (const StgInfoTable *)info;
85 return evacuate(p); // does the failed_to_evac stuff
88 info = (W_)src->header.info;
89 src->header.info = &stg_EVACUATED_info;
92 to = alloc_for_copy(size,stp);
93 tagged_to = (StgPtr)TAG_CLOSURE(tag,(StgClosure*)to);
94 *p = (StgClosure *)tagged_to;
96 TICK_GC_WORDS_COPIED(size);
100 for (i = 1; i < size; i++) { // unroll for small i
104 ((StgEvacuated*)from)->evacuee = (StgClosure *)tagged_to;
106 // retag pointer before updating EVACUATE closure and returning
108 // if (to+size+2 < bd->start + BLOCK_SIZE_W) {
109 // __builtin_prefetch(to + size + 2, 1);
114 ((StgEvacuated*)from)->header.info = &stg_EVACUATED_info;
118 // We store the size of the just evacuated object in the LDV word so that
119 // the profiler can guess the position of the next object later.
120 SET_EVACUAEE_FOR_LDV(from, size);
124 /* Special version of copy() for when we only want to copy the info
125 * pointer of an object, but reserve some padding after it. This is
126 * used to optimise evacuation of BLACKHOLEs.
129 copyPart(StgClosure **p, StgClosure *src, nat size_to_reserve, nat size_to_copy, step *stp)
137 info = xchg((StgPtr)&src->header.info, (W_)&stg_WHITEHOLE_info);
138 } while (info == (W_)&stg_WHITEHOLE_info);
139 if (info == (W_)&stg_EVACUATED_info) {
140 src->header.info = (const StgInfoTable *)info;
141 return evacuate(p); // does the failed_to_evac stuff
144 info = (W_)src->header.info;
145 src->header.info = &stg_EVACUATED_info;
148 to = alloc_for_copy(size_to_reserve, stp);
149 *p = (StgClosure *)to;
151 TICK_GC_WORDS_COPIED(size_to_copy);
155 for (i = 1; i < size_to_copy; i++) { // unroll for small i
159 ((StgEvacuated*)from)->evacuee = (StgClosure *)to;
162 ((StgEvacuated*)from)->header.info = &stg_EVACUATED_info;
166 // We store the size of the just evacuated object in the LDV word so that
167 // the profiler can guess the position of the next object later.
168 SET_EVACUAEE_FOR_LDV(from, size_to_reserve);
170 if (size_to_reserve - size_to_copy > 0)
171 LDV_FILL_SLOP(to + size_to_copy - 1, (int)(size_to_reserve - size_to_copy));
176 /* Copy wrappers that don't tag the closure after copying */
178 copy(StgClosure **p, StgClosure *src, nat size, step *stp)
180 copy_tag(p,src,size,stp,0);
183 /* -----------------------------------------------------------------------------
185 -------------------------------------------------------------------------- */
188 #include "Evac.c-inc"
191 #include "Evac.c-inc"
193 /* -----------------------------------------------------------------------------
194 Evacuate a large object
196 This just consists of removing the object from the (doubly-linked)
197 step->large_objects list, and linking it on to the (singly-linked)
198 step->new_large_objects list, from where it will be scavenged later.
200 Convention: bd->flags has BF_EVACUATED set for a large object
201 that has been evacuated, or unset otherwise.
202 -------------------------------------------------------------------------- */
205 evacuate_large(StgPtr p)
207 bdescr *bd = Bdescr(p);
211 // object must be at the beginning of the block (or be a ByteArray)
212 ASSERT(get_itbl((StgClosure *)p)->type == ARR_WORDS ||
213 (((W_)p & BLOCK_MASK) == 0));
215 // already evacuated?
216 if (bd->flags & BF_EVACUATED) {
217 /* Don't forget to set the gct->failed_to_evac flag if we didn't get
218 * the desired destination (see comments in evacuate()).
220 if (bd->step < gct->evac_step) {
221 gct->failed_to_evac = rtsTrue;
222 TICK_GC_FAILED_PROMOTION();
229 ACQUIRE_SPIN_LOCK(&stp->sync_large_objects);
230 // remove from large_object list
232 bd->u.back->link = bd->link;
233 } else { // first object in the list
234 stp->large_objects = bd->link;
237 bd->link->u.back = bd->u.back;
239 RELEASE_SPIN_LOCK(&stp->sync_large_objects);
241 /* link it on to the evacuated large object list of the destination step
244 if (stp < gct->evac_step) {
245 if (gct->eager_promotion) {
246 stp = gct->evac_step;
248 gct->failed_to_evac = rtsTrue;
252 ws = &gct->steps[stp->gen_no][stp->no];
254 bd->gen_no = stp->gen_no;
255 bd->link = ws->todo_large_objects;
256 ws->todo_large_objects = bd;
257 bd->flags |= BF_EVACUATED;
260 /* -----------------------------------------------------------------------------
261 Evaluate a THUNK_SELECTOR if possible.
263 p points to a THUNK_SELECTOR that we want to evaluate. The
264 result of "evaluating" it will be evacuated and a pointer to the
265 to-space closure will be returned.
267 If the THUNK_SELECTOR could not be evaluated (its selectee is still
268 a THUNK, for example), then the THUNK_SELECTOR itself will be
270 -------------------------------------------------------------------------- */
272 unchain_thunk_selectors(StgSelector *p, StgClosure *val)
279 ASSERT(p->header.info == &stg_BLACKHOLE_info);
280 prev = (StgSelector*)((StgClosure *)p)->payload[0];
282 // Update the THUNK_SELECTOR with an indirection to the
283 // EVACUATED closure now at p. Why do this rather than
284 // upd_evacuee(q,p)? Because we have an invariant that an
285 // EVACUATED closure always points to an object in the
286 // same or an older generation (required by the short-cut
287 // test in the EVACUATED case, below).
288 SET_INFO(p, &stg_IND_info);
289 ((StgInd *)p)->indirectee = val;
291 // For the purposes of LDV profiling, we have created an
293 LDV_RECORD_CREATE(p);
300 eval_thunk_selector (StgClosure **q, StgSelector * p, rtsBool evac)
301 // NB. for legacy reasons, p & q are swapped around :(
306 StgClosure *selectee;
307 StgSelector *prev_thunk_selector;
311 prev_thunk_selector = NULL;
312 // this is a chain of THUNK_SELECTORs that we are going to update
313 // to point to the value of the current THUNK_SELECTOR. Each
314 // closure on the chain is a BLACKHOLE, and points to the next in the
315 // chain with payload[0].
319 bd = Bdescr((StgPtr)p);
320 if (HEAP_ALLOCED(p)) {
321 // If the THUNK_SELECTOR is in to-space or in a generation that we
322 // are not collecting, then bale out early. We won't be able to
323 // save any space in any case, and updating with an indirection is
324 // trickier in a non-collected gen: we would have to update the
326 if ((bd->gen_no > N) || (bd->flags & BF_EVACUATED)) {
327 unchain_thunk_selectors(prev_thunk_selector, (StgClosure *)p);
328 *q = (StgClosure *)p;
331 // we don't update THUNK_SELECTORS in the compacted
332 // generation, because compaction does not remove the INDs
333 // that result, this causes confusion later
334 // (scavenge_mark_stack doesn't deal with IND). BEWARE! This
335 // bit is very tricky to get right. If you make changes
336 // around here, test by compiling stage 3 with +RTS -c -RTS.
337 if (bd->flags & BF_COMPACTED) {
338 // must call evacuate() to mark this closure if evac==rtsTrue
339 *q = (StgClosure *)p;
340 if (evac) evacuate(q);
341 unchain_thunk_selectors(prev_thunk_selector, (StgClosure *)p);
347 // BLACKHOLE the selector thunk, since it is now under evaluation.
348 // This is important to stop us going into an infinite loop if
349 // this selector thunk eventually refers to itself.
350 #if defined(THREADED_RTS)
351 // In threaded mode, we'll use WHITEHOLE to lock the selector
352 // thunk while we evaluate it.
354 info_ptr = (StgInfoTable *)xchg((StgPtr)&p->header.info, (W_)&stg_WHITEHOLE_info);
355 if (info_ptr == (W_)&stg_WHITEHOLE_info) {
357 info_ptr = xchg((StgPtr)&p->header.info, (W_)&stg_WHITEHOLE_info);
358 } while (info_ptr == (W_)&stg_WHITEHOLE_info);
361 // make sure someone else didn't get here first
362 if (INFO_PTR_TO_STRUCT(info_ptr)->type != THUNK_SELECTOR) {
367 // Save the real info pointer (NOTE: not the same as get_itbl()).
368 info_ptr = (StgWord)p->header.info;
369 SET_INFO(p,&stg_BLACKHOLE_info);
372 field = INFO_PTR_TO_STRUCT(info_ptr)->layout.selector_offset;
374 // The selectee might be a constructor closure,
375 // so we untag the pointer.
376 selectee = UNTAG_CLOSURE(p->selectee);
379 // selectee now points to the closure that we're trying to select
380 // a field from. It may or may not be in to-space: we try not to
381 // end up in to-space, but it's impractical to avoid it in
382 // general. The compacting GC scatters to-space pointers in
383 // from-space during marking, for example. We rely on the property
384 // that evacuate() doesn't mind if it gets passed a to-space pointer.
386 info = get_itbl(selectee);
387 switch (info->type) {
389 goto bale_out; // about to be evacuated by another thread (or a loop).
398 case CONSTR_NOCAF_STATIC:
400 // check that the size is in range
401 ASSERT(field < (StgWord32)(info->layout.payload.ptrs +
402 info->layout.payload.nptrs));
404 // Select the right field from the constructor
405 val = selectee->payload[field];
408 // For the purposes of LDV profiling, we have destroyed
409 // the original selector thunk, p.
410 SET_INFO(p, info_ptr);
411 LDV_RECORD_DEAD_FILL_SLOP_DYNAMIC((StgClosure *)p);
412 SET_INFO(p, &stg_BLACKHOLE_info);
415 // the closure in val is now the "value" of the
416 // THUNK_SELECTOR in p. However, val may itself be a
417 // THUNK_SELECTOR, in which case we want to continue
418 // evaluating until we find the real value, and then
419 // update the whole chain to point to the value.
421 info = get_itbl(UNTAG_CLOSURE(val));
422 switch (info->type) {
426 case IND_OLDGEN_PERM:
428 val = ((StgInd *)val)->indirectee;
431 ((StgClosure*)p)->payload[0] = (StgClosure *)prev_thunk_selector;
432 prev_thunk_selector = p;
433 p = (StgSelector*)val;
436 ((StgClosure*)p)->payload[0] = (StgClosure *)prev_thunk_selector;
437 prev_thunk_selector = p;
440 if (evac) evacuate(q);
442 // evacuate() cannot recurse through
443 // eval_thunk_selector(), because we know val is not
445 unchain_thunk_selectors(prev_thunk_selector, val);
453 case IND_OLDGEN_PERM:
455 // Again, we might need to untag a constructor.
456 selectee = UNTAG_CLOSURE( ((StgInd *)selectee)->indirectee );
460 // We don't follow pointers into to-space; the constructor
461 // has already been evacuated, so we won't save any space
462 // leaks by evaluating this selector thunk anyhow.
469 // recursively evaluate this selector. We don't want to
470 // recurse indefinitely, so we impose a depth bound.
471 if (gct->thunk_selector_depth >= MAX_THUNK_SELECTOR_DEPTH) {
475 gct->thunk_selector_depth++;
476 // rtsFalse says "don't evacuate the result". It will,
477 // however, update any THUNK_SELECTORs that are evaluated
479 eval_thunk_selector(&val, (StgSelector*)selectee, rtsFalse);
480 gct->thunk_selector_depth--;
482 // did we actually manage to evaluate it?
483 if (val == selectee) goto bale_out;
485 // Of course this pointer might be tagged...
486 selectee = UNTAG_CLOSURE(val);
500 case SE_CAF_BLACKHOLE:
507 barf("eval_thunk_selector: strange selectee %d",
512 // We didn't manage to evaluate this thunk; restore the old info
513 // pointer. But don't forget: we still need to evacuate the thunk itself.
514 SET_INFO(p, (const StgInfoTable *)info_ptr);
516 copy(&val,(StgClosure *)p,THUNK_SELECTOR_sizeW(),bd->step->to);
518 val = (StgClosure *)p;
521 unchain_thunk_selectors(prev_thunk_selector, val);
525 /* -----------------------------------------------------------------------------
526 move_TSO is called to update the TSO structure after it has been
527 moved from one place to another.
528 -------------------------------------------------------------------------- */
531 move_TSO (StgTSO *src, StgTSO *dest)
535 // relocate the stack pointer...
536 diff = (StgPtr)dest - (StgPtr)src; // In *words*
537 dest->sp = (StgPtr)dest->sp + diff;