1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2006
5 * Sanity checking code for the heap and stack.
7 * Used when debugging: check that everything reasonable.
9 * - All things that are supposed to be pointers look like pointers.
11 * - Objects in text space are marked as static closures, those
12 * in the heap are dynamic.
14 * ---------------------------------------------------------------------------*/
16 #include "PosixSource.h"
19 #ifdef DEBUG /* whole file */
23 #include "BlockAlloc.h"
30 /* -----------------------------------------------------------------------------
32 -------------------------------------------------------------------------- */
34 static void checkSmallBitmap ( StgPtr payload, StgWord bitmap, nat );
35 static void checkLargeBitmap ( StgPtr payload, StgLargeBitmap*, nat );
36 static void checkClosureShallow ( StgClosure * );
38 /* -----------------------------------------------------------------------------
40 -------------------------------------------------------------------------- */
43 checkSmallBitmap( StgPtr payload, StgWord bitmap, nat size )
49 for(i = 0; i < size; i++, bitmap >>= 1 ) {
50 if ((bitmap & 1) == 0) {
51 checkClosureShallow((StgClosure *)payload[i]);
57 checkLargeBitmap( StgPtr payload, StgLargeBitmap* large_bitmap, nat size )
63 for (bmp=0; i < size; bmp++) {
64 StgWord bitmap = large_bitmap->bitmap[bmp];
66 for(; i < size && j < BITS_IN(W_); j++, i++, bitmap >>= 1 ) {
67 if ((bitmap & 1) == 0) {
68 checkClosureShallow((StgClosure *)payload[i]);
75 * check that it looks like a valid closure - without checking its payload
76 * used to avoid recursion between checking PAPs and checking stack
81 checkClosureShallow( StgClosure* p )
86 ASSERT(LOOKS_LIKE_CLOSURE_PTR(q));
88 /* Is it a static closure? */
89 if (!HEAP_ALLOCED(q)) {
90 ASSERT(closure_STATIC(q));
92 ASSERT(!closure_STATIC(q));
96 // check an individual stack object
98 checkStackFrame( StgPtr c )
101 const StgRetInfoTable* info;
103 info = get_ret_itbl((StgClosure *)c);
105 /* All activation records have 'bitmap' style layout info. */
106 switch (info->i.type) {
107 case RET_DYN: /* Dynamic bitmap: the mask is stored on the stack */
116 p = (P_)(r->payload);
117 checkSmallBitmap(p,RET_DYN_LIVENESS(r->liveness),RET_DYN_BITMAP_SIZE);
118 p += RET_DYN_BITMAP_SIZE + RET_DYN_NONPTR_REGS_SIZE;
120 // skip over the non-pointers
121 p += RET_DYN_NONPTRS(dyn);
123 // follow the ptr words
124 for (size = RET_DYN_PTRS(dyn); size > 0; size--) {
125 checkClosureShallow((StgClosure *)*p);
129 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
130 RET_DYN_NONPTR_REGS_SIZE +
131 RET_DYN_NONPTRS(dyn) + RET_DYN_PTRS(dyn);
135 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgUpdateFrame*)c)->updatee));
136 case ATOMICALLY_FRAME:
137 case CATCH_RETRY_FRAME:
138 case CATCH_STM_FRAME:
140 // small bitmap cases (<= 32 entries)
143 size = BITMAP_SIZE(info->i.layout.bitmap);
144 checkSmallBitmap((StgPtr)c + 1,
145 BITMAP_BITS(info->i.layout.bitmap), size);
151 bco = (StgBCO *)*(c+1);
152 size = BCO_BITMAP_SIZE(bco);
153 checkLargeBitmap((StgPtr)c + 2, BCO_BITMAP(bco), size);
157 case RET_BIG: // large bitmap (> 32 entries)
158 size = GET_LARGE_BITMAP(&info->i)->size;
159 checkLargeBitmap((StgPtr)c + 1, GET_LARGE_BITMAP(&info->i), size);
164 StgFunInfoTable *fun_info;
167 ret_fun = (StgRetFun *)c;
168 fun_info = get_fun_itbl(UNTAG_CLOSURE(ret_fun->fun));
169 size = ret_fun->size;
170 switch (fun_info->f.fun_type) {
172 checkSmallBitmap((StgPtr)ret_fun->payload,
173 BITMAP_BITS(fun_info->f.b.bitmap), size);
176 checkLargeBitmap((StgPtr)ret_fun->payload,
177 GET_FUN_LARGE_BITMAP(fun_info), size);
180 checkSmallBitmap((StgPtr)ret_fun->payload,
181 BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
185 return sizeofW(StgRetFun) + size;
189 barf("checkStackFrame: weird activation record found on stack (%p %d).",c,info->i.type);
193 // check sections of stack between update frames
195 checkStackChunk( StgPtr sp, StgPtr stack_end )
200 while (p < stack_end) {
201 p += checkStackFrame( p );
203 // ASSERT( p == stack_end ); -- HWL
207 checkPAP (StgClosure *tagged_fun, StgClosure** payload, StgWord n_args)
211 StgFunInfoTable *fun_info;
213 fun = UNTAG_CLOSURE(tagged_fun);
214 ASSERT(LOOKS_LIKE_CLOSURE_PTR(fun));
215 fun_info = get_fun_itbl(fun);
217 p = (StgClosure *)payload;
218 switch (fun_info->f.fun_type) {
220 checkSmallBitmap( (StgPtr)payload,
221 BITMAP_BITS(fun_info->f.b.bitmap), n_args );
224 checkLargeBitmap( (StgPtr)payload,
225 GET_FUN_LARGE_BITMAP(fun_info),
229 checkLargeBitmap( (StgPtr)payload,
234 checkSmallBitmap( (StgPtr)payload,
235 BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
240 ASSERT(fun_info->f.arity > TAG_MASK ? GET_CLOSURE_TAG(tagged_fun) == 1
241 : GET_CLOSURE_TAG(tagged_fun) == fun_info->f.arity);
246 checkClosure( StgClosure* p )
248 const StgInfoTable *info;
250 ASSERT(LOOKS_LIKE_CLOSURE_PTR(p));
252 p = UNTAG_CLOSURE(p);
253 /* Is it a static closure (i.e. in the data segment)? */
254 if (!HEAP_ALLOCED(p)) {
255 ASSERT(closure_STATIC(p));
257 ASSERT(!closure_STATIC(p));
260 info = p->header.info;
262 if (IS_FORWARDING_PTR(info)) {
263 barf("checkClosure: found EVACUATED closure %d", info->type);
265 info = INFO_PTR_TO_STRUCT(info);
267 switch (info->type) {
272 StgMVar *mvar = (StgMVar *)p;
273 ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->head));
274 ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->tail));
275 ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->value));
276 return sizeofW(StgMVar);
287 for (i = 0; i < info->layout.payload.ptrs; i++) {
288 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgThunk *)p)->payload[i]));
290 return thunk_sizeW_fromITBL(info);
307 case IND_OLDGEN_PERM:
314 case CONSTR_NOCAF_STATIC:
319 for (i = 0; i < info->layout.payload.ptrs; i++) {
320 ASSERT(LOOKS_LIKE_CLOSURE_PTR(p->payload[i]));
322 return sizeW_fromITBL(info);
326 StgBCO *bco = (StgBCO *)p;
327 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->instrs));
328 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->literals));
329 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->ptrs));
330 return bco_sizeW(bco);
333 case IND_STATIC: /* (1, 0) closure */
334 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgIndStatic*)p)->indirectee));
335 return sizeW_fromITBL(info);
338 /* deal with these specially - the info table isn't
339 * representative of the actual layout.
341 { StgWeak *w = (StgWeak *)p;
342 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->key));
343 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->value));
344 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->finalizer));
346 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->link));
348 return sizeW_fromITBL(info);
352 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgSelector *)p)->selectee));
353 return THUNK_SELECTOR_sizeW();
357 /* we don't expect to see any of these after GC
358 * but they might appear during execution
360 StgInd *ind = (StgInd *)p;
361 ASSERT(LOOKS_LIKE_CLOSURE_PTR(ind->indirectee));
362 return sizeofW(StgInd);
372 case ATOMICALLY_FRAME:
373 case CATCH_RETRY_FRAME:
374 case CATCH_STM_FRAME:
375 barf("checkClosure: stack frame");
379 StgAP* ap = (StgAP *)p;
380 checkPAP (ap->fun, ap->payload, ap->n_args);
386 StgPAP* pap = (StgPAP *)p;
387 checkPAP (pap->fun, pap->payload, pap->n_args);
388 return pap_sizeW(pap);
393 StgAP_STACK *ap = (StgAP_STACK *)p;
394 ASSERT(LOOKS_LIKE_CLOSURE_PTR(ap->fun));
395 checkStackChunk((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size);
396 return ap_stack_sizeW(ap);
400 return arr_words_sizeW((StgArrWords *)p);
402 case MUT_ARR_PTRS_CLEAN:
403 case MUT_ARR_PTRS_DIRTY:
404 case MUT_ARR_PTRS_FROZEN:
405 case MUT_ARR_PTRS_FROZEN0:
407 StgMutArrPtrs* a = (StgMutArrPtrs *)p;
409 for (i = 0; i < a->ptrs; i++) {
410 ASSERT(LOOKS_LIKE_CLOSURE_PTR(a->payload[i]));
412 return mut_arr_ptrs_sizeW(a);
416 checkTSO((StgTSO *)p);
417 return tso_sizeW((StgTSO *)p);
419 case TVAR_WATCH_QUEUE:
421 StgTVarWatchQueue *wq = (StgTVarWatchQueue *)p;
422 ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->next_queue_entry));
423 ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->prev_queue_entry));
424 return sizeofW(StgTVarWatchQueue);
427 case INVARIANT_CHECK_QUEUE:
429 StgInvariantCheckQueue *q = (StgInvariantCheckQueue *)p;
430 ASSERT(LOOKS_LIKE_CLOSURE_PTR(q->invariant));
431 ASSERT(LOOKS_LIKE_CLOSURE_PTR(q->my_execution));
432 ASSERT(LOOKS_LIKE_CLOSURE_PTR(q->next_queue_entry));
433 return sizeofW(StgInvariantCheckQueue);
436 case ATOMIC_INVARIANT:
438 StgAtomicInvariant *invariant = (StgAtomicInvariant *)p;
439 ASSERT(LOOKS_LIKE_CLOSURE_PTR(invariant->code));
440 ASSERT(LOOKS_LIKE_CLOSURE_PTR(invariant->last_execution));
441 return sizeofW(StgAtomicInvariant);
446 StgTVar *tv = (StgTVar *)p;
447 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->current_value));
448 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->first_watch_queue_entry));
449 return sizeofW(StgTVar);
455 StgTRecChunk *tc = (StgTRecChunk *)p;
456 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->prev_chunk));
457 for (i = 0; i < tc -> next_entry_idx; i ++) {
458 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].tvar));
459 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].expected_value));
460 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].new_value));
462 return sizeofW(StgTRecChunk);
467 StgTRecHeader *trec = (StgTRecHeader *)p;
468 ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> enclosing_trec));
469 ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> current_chunk));
470 return sizeofW(StgTRecHeader);
474 barf("checkClosure (closure type %d)", info->type);
479 /* -----------------------------------------------------------------------------
482 After garbage collection, the live heap is in a state where we can
483 run through and check that all the pointers point to the right
484 place. This function starts at a given position and sanity-checks
485 all the objects in the remainder of the chain.
486 -------------------------------------------------------------------------- */
489 checkHeap(bdescr *bd)
493 #if defined(THREADED_RTS)
494 // heap sanity checking doesn't work with SMP, because we can't
495 // zero the slop (see Updates.h).
499 for (; bd != NULL; bd = bd->link) {
501 while (p < bd->free) {
502 nat size = checkClosure((StgClosure *)p);
503 /* This is the smallest size of closure that can live in the heap */
504 ASSERT( size >= MIN_PAYLOAD_SIZE + sizeofW(StgHeader) );
508 while (p < bd->free &&
509 (*p < 0x1000 || !LOOKS_LIKE_INFO_PTR(*p))) { p++; }
515 checkHeapChunk(StgPtr start, StgPtr end)
520 for (p=start; p<end; p+=size) {
521 ASSERT(LOOKS_LIKE_INFO_PTR(*p));
522 size = checkClosure((StgClosure *)p);
523 /* This is the smallest size of closure that can live in the heap. */
524 ASSERT( size >= MIN_PAYLOAD_SIZE + sizeofW(StgHeader) );
529 checkLargeObjects(bdescr *bd)
532 if (!(bd->flags & BF_PINNED)) {
533 checkClosure((StgClosure *)bd->start);
540 checkTSO(StgTSO *tso)
543 StgPtr stack = tso->stack;
544 StgOffset stack_size = tso->stack_size;
545 StgPtr stack_end = stack + stack_size;
547 if (tso->what_next == ThreadRelocated) {
548 checkTSO(tso->_link);
552 if (tso->what_next == ThreadKilled) {
553 /* The garbage collector doesn't bother following any pointers
554 * from dead threads, so don't check sanity here.
559 ASSERT(stack <= sp && sp < stack_end);
561 checkStackChunk(sp, stack_end);
565 Check that all TSOs have been evacuated.
566 Optionally also check the sanity of the TSOs.
569 checkGlobalTSOList (rtsBool checkTSOs)
574 for (s = 0; s < total_steps; s++) {
575 for (tso=all_steps[s].threads; tso != END_TSO_QUEUE;
576 tso = tso->global_link) {
577 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso));
578 ASSERT(get_itbl(tso)->type == TSO);
582 // If this TSO is dirty and in an old generation, it better
583 // be on the mutable list.
584 if (tso->what_next == ThreadRelocated) continue;
585 if (tso->flags & (TSO_DIRTY|TSO_LINK_DIRTY)) {
586 ASSERT(Bdescr((P_)tso)->gen_no == 0 || (tso->flags & TSO_MARKED));
587 tso->flags &= ~TSO_MARKED;
593 /* -----------------------------------------------------------------------------
594 Check mutable list sanity.
595 -------------------------------------------------------------------------- */
598 checkMutableList( bdescr *mut_bd, nat gen )
604 for (bd = mut_bd; bd != NULL; bd = bd->link) {
605 for (q = bd->start; q < bd->free; q++) {
606 p = (StgClosure *)*q;
607 ASSERT(!HEAP_ALLOCED(p) || Bdescr((P_)p)->gen_no == gen);
608 if (get_itbl(p)->type == TSO) {
609 ((StgTSO *)p)->flags |= TSO_MARKED;
616 checkMutableLists (rtsBool checkTSOs)
620 for (g = 0; g < RtsFlags.GcFlags.generations; g++) {
621 checkMutableList(generations[g].mut_list, g);
622 for (i = 0; i < n_capabilities; i++) {
623 checkMutableList(capabilities[i].mut_lists[g], g);
626 checkGlobalTSOList(checkTSOs);
630 Check the static objects list.
633 checkStaticObjects ( StgClosure* static_objects )
635 StgClosure *p = static_objects;
638 while (p != END_OF_STATIC_LIST) {
641 switch (info->type) {
644 StgClosure *indirectee = UNTAG_CLOSURE(((StgIndStatic *)p)->indirectee);
646 ASSERT(LOOKS_LIKE_CLOSURE_PTR(indirectee));
647 ASSERT(LOOKS_LIKE_INFO_PTR((StgWord)indirectee->header.info));
648 p = *IND_STATIC_LINK((StgClosure *)p);
653 p = *THUNK_STATIC_LINK((StgClosure *)p);
657 p = *FUN_STATIC_LINK((StgClosure *)p);
661 p = *STATIC_LINK(info,(StgClosure *)p);
665 barf("checkStaticObjetcs: strange closure %p (%s)",