1 /* -----------------------------------------------------------------------------
2 * $Id: Sanity.c,v 1.27 2001/03/22 03:51:10 hwloidl Exp $
4 * (c) The GHC Team, 1998-1999
6 * Sanity checking code for the heap and stack.
8 * Used when debugging: check that the stack looks reasonable.
10 * - All things that are supposed to be pointers look like pointers.
12 * - Objects in text space are marked as static closures, those
13 * in the heap are dynamic.
15 * ---------------------------------------------------------------------------*/
23 //* Thread Queue Sanity::
24 //* Blackhole Sanity::
27 //@node Includes, Macros
28 //@subsection Includes
32 #ifdef DEBUG /* whole file */
36 #include "BlockAlloc.h"
41 #include "StoragePriv.h" // for END_OF_STATIC_LIST
43 //@node Macros, Stack sanity, Includes
46 #define LOOKS_LIKE_PTR(r) ((LOOKS_LIKE_STATIC_CLOSURE(r) || \
47 ((HEAP_ALLOCED(r) && Bdescr((P_)r)->free != (void *)-1))) && \
48 ((StgWord)(*(StgPtr)r)!=0xaaaaaaaa))
50 //@node Stack sanity, Heap Sanity, Macros
51 //@subsection Stack sanity
53 /* -----------------------------------------------------------------------------
55 -------------------------------------------------------------------------- */
57 StgOffset checkStackClosure( StgClosure* c );
59 StgOffset checkStackObject( StgPtr sp );
61 void checkStackChunk( StgPtr sp, StgPtr stack_end );
63 static StgOffset checkSmallBitmap( StgPtr payload, StgWord32 bitmap );
65 static StgOffset checkLargeBitmap( StgPtr payload,
66 StgLargeBitmap* large_bitmap );
68 void checkClosureShallow( StgClosure* p );
70 //@cindex checkSmallBitmap
72 checkSmallBitmap( StgPtr payload, StgWord32 bitmap )
77 for(; bitmap != 0; ++i, bitmap >>= 1 ) {
78 if ((bitmap & 1) == 0) {
79 checkClosure(stgCast(StgClosure*,payload[i]));
85 //@cindex checkLargeBitmap
87 checkLargeBitmap( StgPtr payload, StgLargeBitmap* large_bitmap )
93 for (bmp=0; bmp<large_bitmap->size; bmp++) {
94 StgWord32 bitmap = large_bitmap->bitmap[bmp];
95 for(; bitmap != 0; ++i, bitmap >>= 1 ) {
96 if ((bitmap & 1) == 0) {
97 checkClosure(stgCast(StgClosure*,payload[i]));
104 //@cindex checkStackClosure
106 checkStackClosure( StgClosure* c )
108 const StgInfoTable* info = get_itbl(c);
110 /* All activation records have 'bitmap' style layout info. */
111 switch (info->type) {
112 case RET_DYN: /* Dynamic bitmap: the mask is stored on the stack */
114 StgRetDyn* r = (StgRetDyn *)c;
115 return sizeofW(StgRetDyn) +
116 checkSmallBitmap(r->payload,r->liveness);
118 case RET_BCO: /* small bitmap (<= 32 entries) */
121 return 1 + checkSmallBitmap((StgPtr)c + 1,info->layout.bitmap);
124 ASSERT(LOOKS_LIKE_PTR(((StgUpdateFrame*)c)->updatee));
127 /* check that the link field points to another stack frame */
128 ASSERT(get_itbl(((StgFrame*)c)->link)->type == UPDATE_FRAME ||
129 get_itbl(((StgFrame*)c)->link)->type == CATCH_FRAME ||
130 get_itbl(((StgFrame*)c)->link)->type == STOP_FRAME ||
131 get_itbl(((StgFrame*)c)->link)->type == SEQ_FRAME);
139 checkSmallBitmap((StgPtr)c + 1,info->layout.bitmap);
140 case RET_BIG: /* large bitmap (> 32 entries) */
142 return 1 + checkLargeBitmap((StgPtr)c + 1,info->layout.large_bitmap);
144 case FUN_STATIC: /* probably a slow-entry point return address: */
145 #if 0 && defined(GRAN)
151 /* if none of the above, maybe it's a closure which looks a
152 * little like an infotable
154 checkClosureShallow(*(StgClosure **)c);
156 /* barf("checkStackClosure: weird activation record found on stack (%p).",c); */
161 * check that it looks like a valid closure - without checking its payload
162 * used to avoid recursion between checking PAPs and checking stack
166 //@cindex checkClosureShallow
168 checkClosureShallow( StgClosure* p )
171 ASSERT(LOOKS_LIKE_GHC_INFO(p->header.info)
172 || IS_HUGS_CONSTR_INFO(GET_INFO(p)));
174 /* Is it a static closure (i.e. in the data segment)? */
175 if (LOOKS_LIKE_STATIC(p)) {
176 ASSERT(closure_STATIC(p));
178 ASSERT(!closure_STATIC(p));
179 ASSERT(LOOKS_LIKE_PTR(p));
183 /* check an individual stack object */
184 //@cindex checkStackObject
186 checkStackObject( StgPtr sp )
188 if (IS_ARG_TAG(*sp)) {
189 /* Tagged words might be "stubbed" pointers, so there's no
190 * point checking to see whether they look like pointers or
191 * not (some of them will).
193 return ARG_SIZE(*sp) + 1;
194 } else if (LOOKS_LIKE_GHC_INFO(*stgCast(StgPtr*,sp))) {
195 return checkStackClosure(stgCast(StgClosure*,sp));
196 } else { /* must be an untagged closure pointer in the stack */
197 checkClosureShallow(*stgCast(StgClosure**,sp));
202 /* check sections of stack between update frames */
203 //@cindex checkStackChunk
205 checkStackChunk( StgPtr sp, StgPtr stack_end )
210 while (p < stack_end) {
211 p += checkStackObject( p );
213 // ASSERT( p == stack_end ); -- HWL
216 //@cindex checkStackChunk
218 checkClosure( StgClosure* p )
220 const StgInfoTable *info;
222 ASSERT(LOOKS_LIKE_GHC_INFO(p->header.info));
224 /* Is it a static closure (i.e. in the data segment)? */
225 if (LOOKS_LIKE_STATIC(p)) {
226 ASSERT(closure_STATIC(p));
228 ASSERT(!closure_STATIC(p));
229 ASSERT(LOOKS_LIKE_PTR(p));
233 switch (info->type) {
237 StgMVar *mvar = (StgMVar *)p;
238 ASSERT(LOOKS_LIKE_PTR(mvar->head));
239 ASSERT(LOOKS_LIKE_PTR(mvar->tail));
240 ASSERT(LOOKS_LIKE_PTR(mvar->value));
243 checkBQ((StgBlockingQueueElement *)mvar->head, p);
245 checkBQ(mvar->head, p);
248 return sizeofW(StgMVar);
259 for (i = 0; i < info->layout.payload.ptrs; i++) {
260 ASSERT(LOOKS_LIKE_PTR(p->payload[i]));
262 return stg_max(sizeW_fromITBL(info), sizeofW(StgHeader) + MIN_UPD_SIZE);
266 checkBQ(((StgBlockingQueue *)p)->blocking_queue, p);
267 /* fall through to basic ptr check */
282 case IND_OLDGEN_PERM:
285 case SE_CAF_BLACKHOLE:
294 case CONSTR_CHARLIKE:
296 case CONSTR_NOCAF_STATIC:
301 for (i = 0; i < info->layout.payload.ptrs; i++) {
302 ASSERT(LOOKS_LIKE_PTR(p->payload[i]));
304 return sizeW_fromITBL(info);
307 case IND_STATIC: /* (1, 0) closure */
308 ASSERT(LOOKS_LIKE_PTR(((StgIndStatic*)p)->indirectee));
309 return sizeW_fromITBL(info);
312 /* deal with these specially - the info table isn't
313 * representative of the actual layout.
315 { StgWeak *w = (StgWeak *)p;
316 ASSERT(LOOKS_LIKE_PTR(w->key));
317 ASSERT(LOOKS_LIKE_PTR(w->value));
318 ASSERT(LOOKS_LIKE_PTR(w->finalizer));
320 ASSERT(LOOKS_LIKE_PTR(w->link));
322 return sizeW_fromITBL(info);
326 ASSERT(LOOKS_LIKE_PTR(stgCast(StgSelector*,p)->selectee));
327 return sizeofW(StgHeader) + MIN_UPD_SIZE;
331 /* we don't expect to see any of these after GC
332 * but they might appear during execution
335 StgInd *ind = stgCast(StgInd*,p);
336 ASSERT(LOOKS_LIKE_PTR(ind->indirectee));
337 q = (P_)p + sizeofW(StgInd);
338 while (!*q) { q++; }; /* skip padding words (see GC.c: evacuate())*/
352 barf("checkClosure: stack frame");
354 case AP_UPD: /* we can treat this as being the same as a PAP */
357 StgPAP *pap = stgCast(StgPAP*,p);
358 ASSERT(LOOKS_LIKE_PTR(pap->fun));
359 checkStackChunk((StgPtr)pap->payload,
360 (StgPtr)pap->payload + pap->n_args
362 return pap_sizeW(pap);
366 return arr_words_sizeW(stgCast(StgArrWords*,p));
369 case MUT_ARR_PTRS_FROZEN:
371 StgMutArrPtrs* a = stgCast(StgMutArrPtrs*,p);
373 for (i = 0; i < a->ptrs; i++) {
374 ASSERT(LOOKS_LIKE_PTR(a->payload[i]));
376 return mut_arr_ptrs_sizeW(a);
380 checkTSO((StgTSO *)p);
381 return tso_sizeW((StgTSO *)p);
386 ASSERT(LOOKS_LIKE_GA(&(((StgBlockedFetch *)p)->ga)));
387 ASSERT(LOOKS_LIKE_PTR((((StgBlockedFetch *)p)->node)));
388 return sizeofW(StgBlockedFetch); // see size used in evacuate()
392 return sizeofW(StgFetchMe);
396 ASSERT(LOOKS_LIKE_GA(((StgFetchMe *)p)->ga));
397 return sizeofW(StgFetchMe); // see size used in evacuate()
400 checkBQ(((StgFetchMeBlockingQueue *)p)->blocking_queue, (StgClosure *)p);
401 return sizeofW(StgFetchMeBlockingQueue); // see size used in evacuate()
404 /* In an RBH the BQ may be empty (ie END_BQ_QUEUE) but not NULL */
405 ASSERT(((StgRBH *)p)->blocking_queue!=NULL);
406 if (((StgRBH *)p)->blocking_queue!=END_BQ_QUEUE)
407 checkBQ(((StgRBH *)p)->blocking_queue, p);
408 ASSERT(LOOKS_LIKE_GHC_INFO(REVERT_INFOPTR(get_itbl((StgClosure *)p))));
409 return BLACKHOLE_sizeW(); // see size used in evacuate()
410 // sizeW_fromITBL(REVERT_INFOPTR(get_itbl((StgClosure *)p)));
415 barf("checkClosure: found EVACUATED closure %d",
418 barf("checkClosure (closure type %d)", info->type);
424 #define PVM_PE_MASK 0xfffc0000
425 #define MAX_PVM_PES MAX_PES
426 #define MAX_PVM_TIDS MAX_PES
427 #define MAX_SLOTS 100000
430 looks_like_tid(StgInt tid)
432 StgInt hi = (tid & PVM_PE_MASK) >> 18;
433 StgInt lo = (tid & ~PVM_PE_MASK);
434 rtsBool ok = (hi != 0) && (lo < MAX_PVM_TIDS) && (hi < MAX_PVM_TIDS);
439 looks_like_slot(StgInt slot)
441 /* if tid is known better use looks_like_ga!! */
442 rtsBool ok = slot<MAX_SLOTS;
443 // This refers only to the no. of slots on the current PE
444 // rtsBool ok = slot<=highest_slot();
449 looks_like_ga(globalAddr *ga)
451 rtsBool is_tid = looks_like_tid((ga)->payload.gc.gtid);
452 rtsBool is_slot = ((ga)->payload.gc.gtid==mytid) ?
453 (ga)->payload.gc.slot<=highest_slot() :
454 (ga)->payload.gc.slot<MAX_SLOTS;
455 rtsBool ok = is_tid && is_slot;
461 //@node Heap Sanity, TSO Sanity, Stack sanity
462 //@subsection Heap Sanity
464 /* -----------------------------------------------------------------------------
467 After garbage collection, the live heap is in a state where we can
468 run through and check that all the pointers point to the right
469 place. This function starts at a given position and sanity-checks
470 all the objects in the remainder of the chain.
471 -------------------------------------------------------------------------- */
475 checkHeap(bdescr *bd, StgPtr start)
478 nat xxx = 0; // tmp -- HWL
481 if (bd != NULL) p = bd->start;
487 while (p < bd->free) {
488 nat size = checkClosure(stgCast(StgClosure*,p));
489 /* This is the smallest size of closure that can live in the heap. */
490 ASSERT( size >= MIN_NONUPD_SIZE + sizeofW(StgHeader) );
491 if (get_itbl(stgCast(StgClosure*,p))->type == IND_STATIC)
496 while (p < bd->free &&
497 (*p < 0x1000 || !LOOKS_LIKE_GHC_INFO((void*)*p))) { p++; }
504 fprintf(stderr,"@@@@ checkHeap: Heap ok; %d IND_STATIC closures checked\n",
510 Check heap between start and end. Used after unpacking graphs.
513 checkHeapChunk(StgPtr start, StgPtr end)
515 extern globalAddr *LAGAlookup(StgClosure *addr);
519 for (p=start; p<end; p+=size) {
520 ASSERT(LOOKS_LIKE_GHC_INFO((void*)*p));
521 if (get_itbl((StgClosure*)p)->type == FETCH_ME &&
522 *(p+1) == 0x0000eeee /* ie. unpack garbage (see SetGAandCommonUp) */) {
523 /* if it's a FM created during unpack and commoned up, it's not global */
524 ASSERT(LAGAlookup((StgClosure*)p)==NULL);
525 size = sizeofW(StgFetchMe);
526 } else if (get_itbl((StgClosure*)p)->type == IND) {
527 *(p+2) = 0x0000ee11; /* mark slop in IND as garbage */
530 size = checkClosure(stgCast(StgClosure*,p));
531 /* This is the smallest size of closure that can live in the heap. */
532 ASSERT( size >= MIN_NONUPD_SIZE + sizeofW(StgHeader) );
538 checkHeapChunk(StgPtr start, StgPtr end)
543 for (p=start; p<end; p+=size) {
544 ASSERT(LOOKS_LIKE_GHC_INFO((void*)*p));
545 size = checkClosure(stgCast(StgClosure*,p));
546 /* This is the smallest size of closure that can live in the heap. */
547 ASSERT( size >= MIN_NONUPD_SIZE + sizeofW(StgHeader) );
554 checkChain(bdescr *bd)
557 checkClosure((StgClosure *)bd->start);
562 /* check stack - making sure that update frames are linked correctly */
565 checkStack(StgPtr sp, StgPtr stack_end, StgUpdateFrame* su )
567 /* check everything down to the first update frame */
568 checkStackChunk( sp, stgCast(StgPtr,su) );
569 while ( stgCast(StgPtr,su) < stack_end) {
570 sp = stgCast(StgPtr,su);
571 switch (get_itbl(su)->type) {
576 su = stgCast(StgSeqFrame*,su)->link;
579 su = stgCast(StgCatchFrame*,su)->link;
582 /* not quite: ASSERT(stgCast(StgPtr,su) == stack_end); */
585 barf("checkStack: weird record found on update frame list.");
587 checkStackChunk( sp, stgCast(StgPtr,su) );
589 ASSERT(stgCast(StgPtr,su) == stack_end);
592 //@node TSO Sanity, Thread Queue Sanity, Heap Sanity
593 //@subsection TSO Sanity
597 checkTSO(StgTSO *tso)
600 StgPtr stack = tso->stack;
601 StgUpdateFrame* su = tso->su;
602 StgOffset stack_size = tso->stack_size;
603 StgPtr stack_end = stack + stack_size;
605 if (tso->what_next == ThreadRelocated) {
610 if (tso->what_next == ThreadComplete || tso->what_next == ThreadKilled) {
611 /* The garbage collector doesn't bother following any pointers
612 * from dead threads, so don't check sanity here.
617 ASSERT(stack <= sp && sp < stack_end);
618 ASSERT(sp <= stgCast(StgPtr,su));
621 ASSERT(tso->par.magic==TSO_MAGIC);
623 switch (tso->why_blocked) {
625 checkClosureShallow(tso->block_info.closure);
626 ASSERT(/* Can't be a FETCH_ME because *this* closure is on its BQ */
627 get_itbl(tso->block_info.closure)->type==FETCH_ME_BQ);
629 case BlockedOnGA_NoSend:
630 checkClosureShallow(tso->block_info.closure);
631 ASSERT(get_itbl(tso->block_info.closure)->type==FETCH_ME_BQ);
633 case BlockedOnBlackHole:
634 checkClosureShallow(tso->block_info.closure);
635 ASSERT(/* Can't be a BLACKHOLE because *this* closure is on its BQ */
636 get_itbl(tso->block_info.closure)->type==BLACKHOLE_BQ ||
637 get_itbl(tso->block_info.closure)->type==RBH);
642 /* isOnBQ(blocked_queue) */
644 case BlockedOnException:
645 /* isOnSomeBQ(tso) */
646 ASSERT(get_itbl(tso->block_info.tso)->type==TSO);
649 ASSERT(get_itbl(tso->block_info.closure)->type==MVAR);
653 Could check other values of why_blocked but I am more
654 lazy than paranoid (bad combination) -- HWL
658 /* if the link field is non-nil it most point to one of these
659 three closure types */
660 ASSERT(tso->link == END_TSO_QUEUE ||
661 get_itbl(tso->link)->type == TSO ||
662 get_itbl(tso->link)->type == BLOCKED_FETCH ||
663 get_itbl(tso->link)->type == CONSTR);
666 checkStack(sp, stack_end, su);
670 //@cindex checkTSOsSanity
672 checkTSOsSanity(void) {
676 belch("Checking sanity of all runnable TSOs:");
678 for (i=0, tsos=0; i<RtsFlags.GranFlags.proc; i++) {
679 for (tso=run_queue_hds[i]; tso!=END_TSO_QUEUE; tso=tso->link) {
680 fprintf(stderr, "TSO %p on PE %d ...", tso, i);
682 fprintf(stderr, "OK, ");
687 belch(" checked %d TSOs on %d PEs; ok\n", tsos, RtsFlags.GranFlags.proc);
690 //@node Thread Queue Sanity, Blackhole Sanity, TSO Sanity
691 //@subsection Thread Queue Sanity
695 //@cindex checkThreadQSanity
697 checkThreadQSanity (PEs proc, rtsBool check_TSO_too)
701 /* the NIL value for TSOs is END_TSO_QUEUE; thus, finding NULL is an error */
702 ASSERT(run_queue_hds[proc]!=NULL);
703 ASSERT(run_queue_tls[proc]!=NULL);
704 /* if either head or tail is NIL then the other one must be NIL, too */
705 ASSERT(run_queue_hds[proc]!=END_TSO_QUEUE || run_queue_tls[proc]==END_TSO_QUEUE);
706 ASSERT(run_queue_tls[proc]!=END_TSO_QUEUE || run_queue_hds[proc]==END_TSO_QUEUE);
707 for (tso=run_queue_hds[proc], prev=END_TSO_QUEUE;
709 prev=tso, tso=tso->link) {
710 ASSERT((prev!=END_TSO_QUEUE || tso==run_queue_hds[proc]) &&
711 (prev==END_TSO_QUEUE || prev->link==tso));
715 ASSERT(prev==run_queue_tls[proc]);
718 //@cindex checkThreadQsSanity
720 checkThreadQsSanity (rtsBool check_TSO_too)
724 for (p=0; p<RtsFlags.GranFlags.proc; p++)
725 checkThreadQSanity(p, check_TSO_too);
730 Check that all TSOs have been evacuated.
731 Optionally also check the sanity of the TSOs.
734 checkGlobalTSOList (rtsBool checkTSOs)
736 extern StgTSO *all_threads;
738 for (tso=all_threads; tso != END_TSO_QUEUE; tso = tso->global_link) {
739 ASSERT(Bdescr((P_)tso)->evacuated == 1);
745 //@node Blackhole Sanity, GALA table sanity, Thread Queue Sanity
746 //@subsection Blackhole Sanity
748 /* -----------------------------------------------------------------------------
749 Check Blackhole Sanity
751 Test whether an object is already on the update list.
752 It isn't necessarily an rts error if it is - it might be a programming
755 Future versions might be able to test for a blackhole without traversing
756 the update frame list.
758 -------------------------------------------------------------------------- */
759 //@cindex isBlackhole
761 isBlackhole( StgTSO* tso, StgClosure* p )
763 StgUpdateFrame* su = tso->su;
765 switch (get_itbl(su)->type) {
767 if (su->updatee == p) {
774 su = stgCast(StgSeqFrame*,su)->link;
777 su = stgCast(StgCatchFrame*,su)->link;
782 barf("isBlackhole: weird record found on update frame list.");
788 Check the static objects list.
791 checkStaticObjects ( void ) {
792 extern StgClosure* static_objects;
793 StgClosure *p = static_objects;
796 while (p != END_OF_STATIC_LIST) {
799 switch (info->type) {
802 StgClosure *indirectee = stgCast(StgIndStatic*,p)->indirectee;
804 ASSERT(LOOKS_LIKE_PTR(indirectee));
805 ASSERT(LOOKS_LIKE_GHC_INFO(indirectee->header.info));
806 p = IND_STATIC_LINK((StgClosure *)p);
811 p = THUNK_STATIC_LINK((StgClosure *)p);
815 p = FUN_STATIC_LINK((StgClosure *)p);
819 p = STATIC_LINK(info,(StgClosure *)p);
823 barf("checkStaticObjetcs: strange closure %p (%s)",
830 Check the sanity of a blocking queue starting at bqe with closure being
831 the closure holding the blocking queue.
832 Note that in GUM we can have several different closure types in a
838 checkBQ (StgBlockingQueueElement *bqe, StgClosure *closure)
840 rtsBool end = rtsFalse;
841 StgInfoTable *info = get_itbl(closure);
843 ASSERT(info->type == BLACKHOLE_BQ || info->type == MVAR
844 || info->type == FETCH_ME_BQ || info->type == RBH);
847 switch (get_itbl(bqe)->type) {
850 checkClosure((StgClosure *)bqe);
852 end = (bqe==END_BQ_QUEUE);
856 checkClosure((StgClosure *)bqe);
861 barf("checkBQ: strange closure %d in blocking queue for closure %p (%s)\n",
862 get_itbl(bqe)->type, closure, info_type(closure));
868 checkBQ (StgTSO *bqe, StgClosure *closure)
870 rtsBool end = rtsFalse;
871 StgInfoTable *info = get_itbl(closure);
873 ASSERT(info->type == BLACKHOLE_BQ || info->type == MVAR);
876 switch (get_itbl(bqe)->type) {
879 checkClosure((StgClosure *)bqe);
881 end = (bqe==END_BQ_QUEUE);
885 barf("checkBQ: strange closure %d in blocking queue for closure %p (%s)\n",
886 get_itbl(bqe)->type, closure, info_type(closure));
892 checkBQ (StgTSO *bqe, StgClosure *closure)
894 rtsBool end = rtsFalse;
895 StgInfoTable *info = get_itbl(closure);
897 ASSERT(info->type == BLACKHOLE_BQ || info->type == MVAR);
900 switch (get_itbl(bqe)->type) {
902 checkClosure((StgClosure *)bqe);
904 end = (bqe==END_TSO_QUEUE);
908 barf("checkBQ: strange closure %d in blocking queue for closure %p\n",
909 get_itbl(bqe)->type, closure, info->type);
917 //@node GALA table sanity, Index, Blackhole Sanity
918 //@subsection GALA table sanity
921 This routine checks the sanity of the LAGA and GALA tables. They are
922 implemented as lists through one hash table, LAtoGALAtable, because entries
923 in both tables have the same structure:
924 - the LAGA table maps local addresses to global addresses; it starts
925 with liveIndirections
926 - the GALA table maps global addresses to local addresses; it starts
933 /* hidden in parallel/Global.c; only accessed for testing here */
934 extern GALA *liveIndirections;
935 extern GALA *liveRemoteGAs;
936 extern HashTable *LAtoGALAtable;
938 //@cindex checkLAGAtable
940 checkLAGAtable(rtsBool check_closures)
943 nat n=0, m=0; // debugging
945 for (gala = liveIndirections; gala != NULL; gala = gala->next) {
947 gala0 = lookupHashTable(LAtoGALAtable, (StgWord) gala->la);
948 ASSERT(!gala->preferred || gala == gala0);
949 ASSERT(LOOKS_LIKE_GHC_INFO(((StgClosure *)gala->la)->header.info));
950 ASSERT(gala->next!=gala); // detect direct loops
951 if ( check_closures ) {
952 checkClosure(stgCast(StgClosure*,gala->la));
956 for (gala = liveRemoteGAs; gala != NULL; gala = gala->next) {
958 gala0 = lookupHashTable(LAtoGALAtable, (StgWord) gala->la);
959 ASSERT(!gala->preferred || gala == gala0);
960 ASSERT(LOOKS_LIKE_GHC_INFO(((StgClosure *)gala->la)->header.info));
961 ASSERT(gala->next!=gala); // detect direct loops
963 if ( check_closures ) {
964 checkClosure(stgCast(StgClosure*,gala->la));
971 //@node Index, , GALA table sanity
977 //* checkBQ:: @cindex\s-+checkBQ
978 //* checkChain:: @cindex\s-+checkChain
979 //* checkClosureShallow:: @cindex\s-+checkClosureShallow
980 //* checkHeap:: @cindex\s-+checkHeap
981 //* checkLargeBitmap:: @cindex\s-+checkLargeBitmap
982 //* checkSmallBitmap:: @cindex\s-+checkSmallBitmap
983 //* checkStack:: @cindex\s-+checkStack
984 //* checkStackChunk:: @cindex\s-+checkStackChunk
985 //* checkStackChunk:: @cindex\s-+checkStackChunk
986 //* checkStackClosure:: @cindex\s-+checkStackClosure
987 //* checkStackObject:: @cindex\s-+checkStackObject
988 //* checkTSO:: @cindex\s-+checkTSO
989 //* checkTSOsSanity:: @cindex\s-+checkTSOsSanity
990 //* checkThreadQSanity:: @cindex\s-+checkThreadQSanity
991 //* checkThreadQsSanity:: @cindex\s-+checkThreadQsSanity
992 //* isBlackhole:: @cindex\s-+isBlackhole