1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2006
5 * Sanity checking code for the heap and stack.
7 * Used when debugging: check that everything reasonable.
9 * - All things that are supposed to be pointers look like pointers.
11 * - Objects in text space are marked as static closures, those
12 * in the heap are dynamic.
14 * ---------------------------------------------------------------------------*/
16 #include "PosixSource.h"
19 #ifdef DEBUG /* whole file */
22 #include "sm/Storage.h"
23 #include "sm/BlockAlloc.h"
29 /* -----------------------------------------------------------------------------
31 -------------------------------------------------------------------------- */
33 static void checkSmallBitmap ( StgPtr payload, StgWord bitmap, nat );
34 static void checkLargeBitmap ( StgPtr payload, StgLargeBitmap*, nat );
35 static void checkClosureShallow ( StgClosure * );
37 /* -----------------------------------------------------------------------------
39 -------------------------------------------------------------------------- */
42 checkSmallBitmap( StgPtr payload, StgWord bitmap, nat size )
48 for(i = 0; i < size; i++, bitmap >>= 1 ) {
49 if ((bitmap & 1) == 0) {
50 checkClosureShallow((StgClosure *)payload[i]);
56 checkLargeBitmap( StgPtr payload, StgLargeBitmap* large_bitmap, nat size )
62 for (bmp=0; i < size; bmp++) {
63 StgWord bitmap = large_bitmap->bitmap[bmp];
65 for(; i < size && j < BITS_IN(W_); j++, i++, bitmap >>= 1 ) {
66 if ((bitmap & 1) == 0) {
67 checkClosureShallow((StgClosure *)payload[i]);
74 * check that it looks like a valid closure - without checking its payload
75 * used to avoid recursion between checking PAPs and checking stack
80 checkClosureShallow( StgClosure* p )
85 ASSERT(LOOKS_LIKE_CLOSURE_PTR(q));
87 /* Is it a static closure? */
88 if (!HEAP_ALLOCED(q)) {
89 ASSERT(closure_STATIC(q));
91 ASSERT(!closure_STATIC(q));
95 // check an individual stack object
97 checkStackFrame( StgPtr c )
100 const StgRetInfoTable* info;
102 info = get_ret_itbl((StgClosure *)c);
104 /* All activation records have 'bitmap' style layout info. */
105 switch (info->i.type) {
106 case RET_DYN: /* Dynamic bitmap: the mask is stored on the stack */
115 p = (P_)(r->payload);
116 checkSmallBitmap(p,RET_DYN_LIVENESS(r->liveness),RET_DYN_BITMAP_SIZE);
117 p += RET_DYN_BITMAP_SIZE + RET_DYN_NONPTR_REGS_SIZE;
119 // skip over the non-pointers
120 p += RET_DYN_NONPTRS(dyn);
122 // follow the ptr words
123 for (size = RET_DYN_PTRS(dyn); size > 0; size--) {
124 checkClosureShallow((StgClosure *)*p);
128 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
129 RET_DYN_NONPTR_REGS_SIZE +
130 RET_DYN_NONPTRS(dyn) + RET_DYN_PTRS(dyn);
134 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgUpdateFrame*)c)->updatee));
135 case ATOMICALLY_FRAME:
136 case CATCH_RETRY_FRAME:
137 case CATCH_STM_FRAME:
139 // small bitmap cases (<= 32 entries)
142 size = BITMAP_SIZE(info->i.layout.bitmap);
143 checkSmallBitmap((StgPtr)c + 1,
144 BITMAP_BITS(info->i.layout.bitmap), size);
150 bco = (StgBCO *)*(c+1);
151 size = BCO_BITMAP_SIZE(bco);
152 checkLargeBitmap((StgPtr)c + 2, BCO_BITMAP(bco), size);
156 case RET_BIG: // large bitmap (> 32 entries)
157 size = GET_LARGE_BITMAP(&info->i)->size;
158 checkLargeBitmap((StgPtr)c + 1, GET_LARGE_BITMAP(&info->i), size);
163 StgFunInfoTable *fun_info;
166 ret_fun = (StgRetFun *)c;
167 fun_info = get_fun_itbl(UNTAG_CLOSURE(ret_fun->fun));
168 size = ret_fun->size;
169 switch (fun_info->f.fun_type) {
171 checkSmallBitmap((StgPtr)ret_fun->payload,
172 BITMAP_BITS(fun_info->f.b.bitmap), size);
175 checkLargeBitmap((StgPtr)ret_fun->payload,
176 GET_FUN_LARGE_BITMAP(fun_info), size);
179 checkSmallBitmap((StgPtr)ret_fun->payload,
180 BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
184 return sizeofW(StgRetFun) + size;
188 barf("checkStackFrame: weird activation record found on stack (%p %d).",c,info->i.type);
192 // check sections of stack between update frames
194 checkStackChunk( StgPtr sp, StgPtr stack_end )
199 while (p < stack_end) {
200 p += checkStackFrame( p );
202 // ASSERT( p == stack_end ); -- HWL
206 checkPAP (StgClosure *tagged_fun, StgClosure** payload, StgWord n_args)
210 StgFunInfoTable *fun_info;
212 fun = UNTAG_CLOSURE(tagged_fun);
213 ASSERT(LOOKS_LIKE_CLOSURE_PTR(fun));
214 fun_info = get_fun_itbl(fun);
216 p = (StgClosure *)payload;
217 switch (fun_info->f.fun_type) {
219 checkSmallBitmap( (StgPtr)payload,
220 BITMAP_BITS(fun_info->f.b.bitmap), n_args );
223 checkLargeBitmap( (StgPtr)payload,
224 GET_FUN_LARGE_BITMAP(fun_info),
228 checkLargeBitmap( (StgPtr)payload,
233 checkSmallBitmap( (StgPtr)payload,
234 BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
239 ASSERT(fun_info->f.arity > TAG_MASK ? GET_CLOSURE_TAG(tagged_fun) == 0
240 : GET_CLOSURE_TAG(tagged_fun) == fun_info->f.arity);
245 checkClosure( StgClosure* p )
247 const StgInfoTable *info;
249 ASSERT(LOOKS_LIKE_CLOSURE_PTR(p));
251 p = UNTAG_CLOSURE(p);
252 /* Is it a static closure (i.e. in the data segment)? */
253 if (!HEAP_ALLOCED(p)) {
254 ASSERT(closure_STATIC(p));
256 ASSERT(!closure_STATIC(p));
259 info = p->header.info;
261 if (IS_FORWARDING_PTR(info)) {
262 barf("checkClosure: found EVACUATED closure %d", info->type);
264 info = INFO_PTR_TO_STRUCT(info);
266 switch (info->type) {
271 StgMVar *mvar = (StgMVar *)p;
272 ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->head));
273 ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->tail));
274 ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->value));
275 return sizeofW(StgMVar);
286 for (i = 0; i < info->layout.payload.ptrs; i++) {
287 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgThunk *)p)->payload[i]));
289 return thunk_sizeW_fromITBL(info);
306 case IND_OLDGEN_PERM:
313 case CONSTR_NOCAF_STATIC:
318 for (i = 0; i < info->layout.payload.ptrs; i++) {
319 ASSERT(LOOKS_LIKE_CLOSURE_PTR(p->payload[i]));
321 return sizeW_fromITBL(info);
325 StgBCO *bco = (StgBCO *)p;
326 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->instrs));
327 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->literals));
328 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->ptrs));
329 return bco_sizeW(bco);
332 case IND_STATIC: /* (1, 0) closure */
333 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgIndStatic*)p)->indirectee));
334 return sizeW_fromITBL(info);
337 /* deal with these specially - the info table isn't
338 * representative of the actual layout.
340 { StgWeak *w = (StgWeak *)p;
341 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->key));
342 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->value));
343 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->finalizer));
345 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->link));
347 return sizeW_fromITBL(info);
351 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgSelector *)p)->selectee));
352 return THUNK_SELECTOR_sizeW();
356 /* we don't expect to see any of these after GC
357 * but they might appear during execution
359 StgInd *ind = (StgInd *)p;
360 ASSERT(LOOKS_LIKE_CLOSURE_PTR(ind->indirectee));
361 return sizeofW(StgInd);
371 case ATOMICALLY_FRAME:
372 case CATCH_RETRY_FRAME:
373 case CATCH_STM_FRAME:
374 barf("checkClosure: stack frame");
378 StgAP* ap = (StgAP *)p;
379 checkPAP (ap->fun, ap->payload, ap->n_args);
385 StgPAP* pap = (StgPAP *)p;
386 checkPAP (pap->fun, pap->payload, pap->n_args);
387 return pap_sizeW(pap);
392 StgAP_STACK *ap = (StgAP_STACK *)p;
393 ASSERT(LOOKS_LIKE_CLOSURE_PTR(ap->fun));
394 checkStackChunk((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size);
395 return ap_stack_sizeW(ap);
399 return arr_words_sizeW((StgArrWords *)p);
401 case MUT_ARR_PTRS_CLEAN:
402 case MUT_ARR_PTRS_DIRTY:
403 case MUT_ARR_PTRS_FROZEN:
404 case MUT_ARR_PTRS_FROZEN0:
406 StgMutArrPtrs* a = (StgMutArrPtrs *)p;
408 for (i = 0; i < a->ptrs; i++) {
409 ASSERT(LOOKS_LIKE_CLOSURE_PTR(a->payload[i]));
411 return mut_arr_ptrs_sizeW(a);
415 checkTSO((StgTSO *)p);
416 return tso_sizeW((StgTSO *)p);
418 case TVAR_WATCH_QUEUE:
420 StgTVarWatchQueue *wq = (StgTVarWatchQueue *)p;
421 ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->next_queue_entry));
422 ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->prev_queue_entry));
423 return sizeofW(StgTVarWatchQueue);
426 case INVARIANT_CHECK_QUEUE:
428 StgInvariantCheckQueue *q = (StgInvariantCheckQueue *)p;
429 ASSERT(LOOKS_LIKE_CLOSURE_PTR(q->invariant));
430 ASSERT(LOOKS_LIKE_CLOSURE_PTR(q->my_execution));
431 ASSERT(LOOKS_LIKE_CLOSURE_PTR(q->next_queue_entry));
432 return sizeofW(StgInvariantCheckQueue);
435 case ATOMIC_INVARIANT:
437 StgAtomicInvariant *invariant = (StgAtomicInvariant *)p;
438 ASSERT(LOOKS_LIKE_CLOSURE_PTR(invariant->code));
439 ASSERT(LOOKS_LIKE_CLOSURE_PTR(invariant->last_execution));
440 return sizeofW(StgAtomicInvariant);
445 StgTVar *tv = (StgTVar *)p;
446 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->current_value));
447 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->first_watch_queue_entry));
448 return sizeofW(StgTVar);
454 StgTRecChunk *tc = (StgTRecChunk *)p;
455 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->prev_chunk));
456 for (i = 0; i < tc -> next_entry_idx; i ++) {
457 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].tvar));
458 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].expected_value));
459 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].new_value));
461 return sizeofW(StgTRecChunk);
466 StgTRecHeader *trec = (StgTRecHeader *)p;
467 ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> enclosing_trec));
468 ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> current_chunk));
469 return sizeofW(StgTRecHeader);
473 barf("checkClosure (closure type %d)", info->type);
478 /* -----------------------------------------------------------------------------
481 After garbage collection, the live heap is in a state where we can
482 run through and check that all the pointers point to the right
483 place. This function starts at a given position and sanity-checks
484 all the objects in the remainder of the chain.
485 -------------------------------------------------------------------------- */
488 checkHeap(bdescr *bd)
492 #if defined(THREADED_RTS)
493 // heap sanity checking doesn't work with SMP, because we can't
494 // zero the slop (see Updates.h).
498 for (; bd != NULL; bd = bd->link) {
500 while (p < bd->free) {
501 nat size = checkClosure((StgClosure *)p);
502 /* This is the smallest size of closure that can live in the heap */
503 ASSERT( size >= MIN_PAYLOAD_SIZE + sizeofW(StgHeader) );
507 while (p < bd->free &&
508 (*p < 0x1000 || !LOOKS_LIKE_INFO_PTR(*p))) { p++; }
514 checkHeapChunk(StgPtr start, StgPtr end)
519 for (p=start; p<end; p+=size) {
520 ASSERT(LOOKS_LIKE_INFO_PTR(*p));
521 size = checkClosure((StgClosure *)p);
522 /* This is the smallest size of closure that can live in the heap. */
523 ASSERT( size >= MIN_PAYLOAD_SIZE + sizeofW(StgHeader) );
528 checkLargeObjects(bdescr *bd)
531 if (!(bd->flags & BF_PINNED)) {
532 checkClosure((StgClosure *)bd->start);
539 checkTSO(StgTSO *tso)
542 StgPtr stack = tso->stack;
543 StgOffset stack_size = tso->stack_size;
544 StgPtr stack_end = stack + stack_size;
546 if (tso->what_next == ThreadRelocated) {
547 checkTSO(tso->_link);
551 if (tso->what_next == ThreadKilled) {
552 /* The garbage collector doesn't bother following any pointers
553 * from dead threads, so don't check sanity here.
558 ASSERT(stack <= sp && sp < stack_end);
560 checkStackChunk(sp, stack_end);
564 Check that all TSOs have been evacuated.
565 Optionally also check the sanity of the TSOs.
568 checkGlobalTSOList (rtsBool checkTSOs)
573 for (s = 0; s < total_steps; s++) {
574 for (tso=all_steps[s].threads; tso != END_TSO_QUEUE;
575 tso = tso->global_link) {
576 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso));
577 ASSERT(get_itbl(tso)->type == TSO);
581 // If this TSO is dirty and in an old generation, it better
582 // be on the mutable list.
583 if (tso->what_next == ThreadRelocated) continue;
584 if (tso->dirty || (tso->flags & TSO_LINK_DIRTY)) {
585 ASSERT(Bdescr((P_)tso)->gen_no == 0 || (tso->flags & TSO_MARKED));
586 tso->flags &= ~TSO_MARKED;
592 /* -----------------------------------------------------------------------------
593 Check mutable list sanity.
594 -------------------------------------------------------------------------- */
597 checkMutableList( bdescr *mut_bd, nat gen )
603 for (bd = mut_bd; bd != NULL; bd = bd->link) {
604 for (q = bd->start; q < bd->free; q++) {
605 p = (StgClosure *)*q;
606 ASSERT(!HEAP_ALLOCED(p) || Bdescr((P_)p)->gen_no == gen);
607 if (get_itbl(p)->type == TSO) {
608 ((StgTSO *)p)->flags |= TSO_MARKED;
615 checkMutableLists (rtsBool checkTSOs)
619 for (g = 0; g < RtsFlags.GcFlags.generations; g++) {
620 checkMutableList(generations[g].mut_list, g);
621 for (i = 0; i < n_capabilities; i++) {
622 checkMutableList(capabilities[i].mut_lists[g], g);
625 checkGlobalTSOList(checkTSOs);
629 Check the static objects list.
632 checkStaticObjects ( StgClosure* static_objects )
634 StgClosure *p = static_objects;
637 while (p != END_OF_STATIC_LIST) {
640 switch (info->type) {
643 StgClosure *indirectee = UNTAG_CLOSURE(((StgIndStatic *)p)->indirectee);
645 ASSERT(LOOKS_LIKE_CLOSURE_PTR(indirectee));
646 ASSERT(LOOKS_LIKE_INFO_PTR((StgWord)indirectee->header.info));
647 p = *IND_STATIC_LINK((StgClosure *)p);
652 p = *THUNK_STATIC_LINK((StgClosure *)p);
656 p = *FUN_STATIC_LINK((StgClosure *)p);
660 p = *STATIC_LINK(info,(StgClosure *)p);
664 barf("checkStaticObjetcs: strange closure %p (%s)",