1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2006
5 * Sanity checking code for the heap and stack.
7 * Used when debugging: check that everything reasonable.
9 * - All things that are supposed to be pointers look like pointers.
11 * - Objects in text space are marked as static closures, those
12 * in the heap are dynamic.
14 * ---------------------------------------------------------------------------*/
16 #include "PosixSource.h"
19 #ifdef DEBUG /* whole file */
23 #include "BlockAlloc.h"
30 /* -----------------------------------------------------------------------------
32 -------------------------------------------------------------------------- */
34 static void checkSmallBitmap ( StgPtr payload, StgWord bitmap, nat );
35 static void checkLargeBitmap ( StgPtr payload, StgLargeBitmap*, nat );
36 static void checkClosureShallow ( StgClosure * );
38 /* -----------------------------------------------------------------------------
40 -------------------------------------------------------------------------- */
43 checkSmallBitmap( StgPtr payload, StgWord bitmap, nat size )
49 for(i = 0; i < size; i++, bitmap >>= 1 ) {
50 if ((bitmap & 1) == 0) {
51 checkClosureShallow((StgClosure *)payload[i]);
57 checkLargeBitmap( StgPtr payload, StgLargeBitmap* large_bitmap, nat size )
63 for (bmp=0; i < size; bmp++) {
64 StgWord bitmap = large_bitmap->bitmap[bmp];
66 for(; i < size && j < BITS_IN(W_); j++, i++, bitmap >>= 1 ) {
67 if ((bitmap & 1) == 0) {
68 checkClosureShallow((StgClosure *)payload[i]);
75 * check that it looks like a valid closure - without checking its payload
76 * used to avoid recursion between checking PAPs and checking stack
81 checkClosureShallow( StgClosure* p )
83 ASSERT(LOOKS_LIKE_CLOSURE_PTR(p));
85 /* Is it a static closure? */
86 if (!HEAP_ALLOCED(p)) {
87 ASSERT(closure_STATIC(p));
89 ASSERT(!closure_STATIC(p));
93 // check an individual stack object
95 checkStackFrame( StgPtr c )
98 const StgRetInfoTable* info;
100 info = get_ret_itbl((StgClosure *)c);
102 /* All activation records have 'bitmap' style layout info. */
103 switch (info->i.type) {
104 case RET_DYN: /* Dynamic bitmap: the mask is stored on the stack */
113 p = (P_)(r->payload);
114 checkSmallBitmap(p,RET_DYN_LIVENESS(r->liveness),RET_DYN_BITMAP_SIZE);
115 p += RET_DYN_BITMAP_SIZE + RET_DYN_NONPTR_REGS_SIZE;
117 // skip over the non-pointers
118 p += RET_DYN_NONPTRS(dyn);
120 // follow the ptr words
121 for (size = RET_DYN_PTRS(dyn); size > 0; size--) {
122 checkClosureShallow((StgClosure *)*p);
126 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
127 RET_DYN_NONPTR_REGS_SIZE +
128 RET_DYN_NONPTRS(dyn) + RET_DYN_PTRS(dyn);
132 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgUpdateFrame*)c)->updatee));
133 case ATOMICALLY_FRAME:
134 case CATCH_RETRY_FRAME:
135 case CATCH_STM_FRAME:
137 // small bitmap cases (<= 32 entries)
141 size = BITMAP_SIZE(info->i.layout.bitmap);
142 checkSmallBitmap((StgPtr)c + 1,
143 BITMAP_BITS(info->i.layout.bitmap), size);
149 bco = (StgBCO *)*(c+1);
150 size = BCO_BITMAP_SIZE(bco);
151 checkLargeBitmap((StgPtr)c + 2, BCO_BITMAP(bco), size);
155 case RET_BIG: // large bitmap (> 32 entries)
157 size = GET_LARGE_BITMAP(&info->i)->size;
158 checkLargeBitmap((StgPtr)c + 1, GET_LARGE_BITMAP(&info->i), size);
163 StgFunInfoTable *fun_info;
166 ret_fun = (StgRetFun *)c;
167 fun_info = get_fun_itbl(ret_fun->fun);
168 size = ret_fun->size;
169 switch (fun_info->f.fun_type) {
171 checkSmallBitmap((StgPtr)ret_fun->payload,
172 BITMAP_BITS(fun_info->f.b.bitmap), size);
175 checkLargeBitmap((StgPtr)ret_fun->payload,
176 GET_FUN_LARGE_BITMAP(fun_info), size);
179 checkSmallBitmap((StgPtr)ret_fun->payload,
180 BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
184 return sizeofW(StgRetFun) + size;
188 barf("checkStackFrame: weird activation record found on stack (%p %d).",c,info->i.type);
192 // check sections of stack between update frames
194 checkStackChunk( StgPtr sp, StgPtr stack_end )
199 while (p < stack_end) {
200 p += checkStackFrame( p );
202 // ASSERT( p == stack_end ); -- HWL
206 checkPAP (StgClosure *fun, StgClosure** payload, StgWord n_args)
209 StgFunInfoTable *fun_info;
211 ASSERT(LOOKS_LIKE_CLOSURE_PTR(fun));
212 fun_info = get_fun_itbl(fun);
214 p = (StgClosure *)payload;
215 switch (fun_info->f.fun_type) {
217 checkSmallBitmap( (StgPtr)payload,
218 BITMAP_BITS(fun_info->f.b.bitmap), n_args );
221 checkLargeBitmap( (StgPtr)payload,
222 GET_FUN_LARGE_BITMAP(fun_info),
226 checkLargeBitmap( (StgPtr)payload,
231 checkSmallBitmap( (StgPtr)payload,
232 BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
240 checkClosure( StgClosure* p )
242 const StgInfoTable *info;
244 ASSERT(LOOKS_LIKE_INFO_PTR(p->header.info));
246 /* Is it a static closure (i.e. in the data segment)? */
247 if (!HEAP_ALLOCED(p)) {
248 ASSERT(closure_STATIC(p));
250 ASSERT(!closure_STATIC(p));
254 switch (info->type) {
258 StgMVar *mvar = (StgMVar *)p;
259 ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->head));
260 ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->tail));
261 ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->value));
264 checkBQ((StgBlockingQueueElement *)mvar->head, p);
266 checkBQ(mvar->head, p);
269 return sizeofW(StgMVar);
280 for (i = 0; i < info->layout.payload.ptrs; i++) {
281 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgThunk *)p)->payload[i]));
283 return thunk_sizeW_fromITBL(info);
300 case IND_OLDGEN_PERM:
303 case SE_CAF_BLACKHOLE:
311 case CONSTR_CHARLIKE:
313 case CONSTR_NOCAF_STATIC:
318 for (i = 0; i < info->layout.payload.ptrs; i++) {
319 ASSERT(LOOKS_LIKE_CLOSURE_PTR(p->payload[i]));
321 return sizeW_fromITBL(info);
325 StgBCO *bco = (StgBCO *)p;
326 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->instrs));
327 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->literals));
328 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->ptrs));
329 ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->itbls));
330 return bco_sizeW(bco);
333 case IND_STATIC: /* (1, 0) closure */
334 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgIndStatic*)p)->indirectee));
335 return sizeW_fromITBL(info);
338 /* deal with these specially - the info table isn't
339 * representative of the actual layout.
341 { StgWeak *w = (StgWeak *)p;
342 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->key));
343 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->value));
344 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->finalizer));
346 ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->link));
348 return sizeW_fromITBL(info);
352 ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgSelector *)p)->selectee));
353 return THUNK_SELECTOR_sizeW();
357 /* we don't expect to see any of these after GC
358 * but they might appear during execution
360 StgInd *ind = (StgInd *)p;
361 ASSERT(LOOKS_LIKE_CLOSURE_PTR(ind->indirectee));
362 return sizeofW(StgInd);
374 case ATOMICALLY_FRAME:
375 case CATCH_RETRY_FRAME:
376 case CATCH_STM_FRAME:
377 barf("checkClosure: stack frame");
381 StgAP* ap = (StgAP *)p;
382 checkPAP (ap->fun, ap->payload, ap->n_args);
388 StgPAP* pap = (StgPAP *)p;
389 checkPAP (pap->fun, pap->payload, pap->n_args);
390 return pap_sizeW(pap);
395 StgAP_STACK *ap = (StgAP_STACK *)p;
396 ASSERT(LOOKS_LIKE_CLOSURE_PTR(ap->fun));
397 checkStackChunk((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size);
398 return ap_stack_sizeW(ap);
402 return arr_words_sizeW((StgArrWords *)p);
404 case MUT_ARR_PTRS_CLEAN:
405 case MUT_ARR_PTRS_DIRTY:
406 case MUT_ARR_PTRS_FROZEN:
407 case MUT_ARR_PTRS_FROZEN0:
409 StgMutArrPtrs* a = (StgMutArrPtrs *)p;
411 for (i = 0; i < a->ptrs; i++) {
412 ASSERT(LOOKS_LIKE_CLOSURE_PTR(a->payload[i]));
414 return mut_arr_ptrs_sizeW(a);
418 checkTSO((StgTSO *)p);
419 return tso_sizeW((StgTSO *)p);
424 ASSERT(LOOKS_LIKE_GA(&(((StgBlockedFetch *)p)->ga)));
425 ASSERT(LOOKS_LIKE_CLOSURE_PTR((((StgBlockedFetch *)p)->node)));
426 return sizeofW(StgBlockedFetch); // see size used in evacuate()
430 return sizeofW(StgFetchMe);
434 ASSERT(LOOKS_LIKE_GA(((StgFetchMe *)p)->ga));
435 return sizeofW(StgFetchMe); // see size used in evacuate()
438 checkBQ(((StgFetchMeBlockingQueue *)p)->blocking_queue, (StgClosure *)p);
439 return sizeofW(StgFetchMeBlockingQueue); // see size used in evacuate()
442 /* In an RBH the BQ may be empty (ie END_BQ_QUEUE) but not NULL */
443 ASSERT(((StgRBH *)p)->blocking_queue!=NULL);
444 if (((StgRBH *)p)->blocking_queue!=END_BQ_QUEUE)
445 checkBQ(((StgRBH *)p)->blocking_queue, p);
446 ASSERT(LOOKS_LIKE_INFO_PTR(REVERT_INFOPTR(get_itbl((StgClosure *)p))));
447 return BLACKHOLE_sizeW(); // see size used in evacuate()
448 // sizeW_fromITBL(REVERT_INFOPTR(get_itbl((StgClosure *)p)));
452 case TVAR_WAIT_QUEUE:
454 StgTVarWaitQueue *wq = (StgTVarWaitQueue *)p;
455 ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->next_queue_entry));
456 ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->prev_queue_entry));
457 return sizeofW(StgTVarWaitQueue);
462 StgTVar *tv = (StgTVar *)p;
463 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->current_value));
464 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->first_wait_queue_entry));
465 return sizeofW(StgTVar);
471 StgTRecChunk *tc = (StgTRecChunk *)p;
472 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->prev_chunk));
473 for (i = 0; i < tc -> next_entry_idx; i ++) {
474 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].tvar));
475 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].expected_value));
476 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].new_value));
478 return sizeofW(StgTRecChunk);
483 StgTRecHeader *trec = (StgTRecHeader *)p;
484 ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> enclosing_trec));
485 ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> current_chunk));
486 return sizeofW(StgTRecHeader);
491 barf("checkClosure: found EVACUATED closure %d",
494 barf("checkClosure (closure type %d)", info->type);
500 #define PVM_PE_MASK 0xfffc0000
501 #define MAX_PVM_PES MAX_PES
502 #define MAX_PVM_TIDS MAX_PES
503 #define MAX_SLOTS 100000
506 looks_like_tid(StgInt tid)
508 StgInt hi = (tid & PVM_PE_MASK) >> 18;
509 StgInt lo = (tid & ~PVM_PE_MASK);
510 rtsBool ok = (hi != 0) && (lo < MAX_PVM_TIDS) && (hi < MAX_PVM_TIDS);
515 looks_like_slot(StgInt slot)
517 /* if tid is known better use looks_like_ga!! */
518 rtsBool ok = slot<MAX_SLOTS;
519 // This refers only to the no. of slots on the current PE
520 // rtsBool ok = slot<=highest_slot();
525 looks_like_ga(globalAddr *ga)
527 rtsBool is_tid = looks_like_tid((ga)->payload.gc.gtid);
528 rtsBool is_slot = ((ga)->payload.gc.gtid==mytid) ?
529 (ga)->payload.gc.slot<=highest_slot() :
530 (ga)->payload.gc.slot<MAX_SLOTS;
531 rtsBool ok = is_tid && is_slot;
538 /* -----------------------------------------------------------------------------
541 After garbage collection, the live heap is in a state where we can
542 run through and check that all the pointers point to the right
543 place. This function starts at a given position and sanity-checks
544 all the objects in the remainder of the chain.
545 -------------------------------------------------------------------------- */
548 checkHeap(bdescr *bd)
552 #if defined(THREADED_RTS)
553 // heap sanity checking doesn't work with SMP, because we can't
554 // zero the slop (see Updates.h).
558 for (; bd != NULL; bd = bd->link) {
560 while (p < bd->free) {
561 nat size = checkClosure((StgClosure *)p);
562 /* This is the smallest size of closure that can live in the heap */
563 ASSERT( size >= MIN_PAYLOAD_SIZE + sizeofW(StgHeader) );
567 while (p < bd->free &&
568 (*p < 0x1000 || !LOOKS_LIKE_INFO_PTR((void*)*p))) { p++; }
575 Check heap between start and end. Used after unpacking graphs.
578 checkHeapChunk(StgPtr start, StgPtr end)
580 extern globalAddr *LAGAlookup(StgClosure *addr);
584 for (p=start; p<end; p+=size) {
585 ASSERT(LOOKS_LIKE_INFO_PTR((void*)*p));
586 if (get_itbl((StgClosure*)p)->type == FETCH_ME &&
587 *(p+1) == 0x0000eeee /* ie. unpack garbage (see SetGAandCommonUp) */) {
588 /* if it's a FM created during unpack and commoned up, it's not global */
589 ASSERT(LAGAlookup((StgClosure*)p)==NULL);
590 size = sizeofW(StgFetchMe);
591 } else if (get_itbl((StgClosure*)p)->type == IND) {
592 *(p+2) = 0x0000ee11; /* mark slop in IND as garbage */
593 size = sizeofW(StgInd);
595 size = checkClosure((StgClosure *)p);
596 /* This is the smallest size of closure that can live in the heap. */
597 ASSERT( size >= MIN_PAYLOAD_SIZE + sizeofW(StgHeader) );
603 checkHeapChunk(StgPtr start, StgPtr end)
608 for (p=start; p<end; p+=size) {
609 ASSERT(LOOKS_LIKE_INFO_PTR((void*)*p));
610 size = checkClosure((StgClosure *)p);
611 /* This is the smallest size of closure that can live in the heap. */
612 ASSERT( size >= MIN_PAYLOAD_SIZE + sizeofW(StgHeader) );
618 checkChain(bdescr *bd)
621 checkClosure((StgClosure *)bd->start);
627 checkTSO(StgTSO *tso)
630 StgPtr stack = tso->stack;
631 StgOffset stack_size = tso->stack_size;
632 StgPtr stack_end = stack + stack_size;
634 if (tso->what_next == ThreadRelocated) {
639 if (tso->what_next == ThreadKilled) {
640 /* The garbage collector doesn't bother following any pointers
641 * from dead threads, so don't check sanity here.
646 ASSERT(stack <= sp && sp < stack_end);
649 ASSERT(tso->par.magic==TSO_MAGIC);
651 switch (tso->why_blocked) {
653 checkClosureShallow(tso->block_info.closure);
654 ASSERT(/* Can't be a FETCH_ME because *this* closure is on its BQ */
655 get_itbl(tso->block_info.closure)->type==FETCH_ME_BQ);
657 case BlockedOnGA_NoSend:
658 checkClosureShallow(tso->block_info.closure);
659 ASSERT(get_itbl(tso->block_info.closure)->type==FETCH_ME_BQ);
661 case BlockedOnBlackHole:
662 checkClosureShallow(tso->block_info.closure);
663 ASSERT(get_itbl(tso->block_info.closure)->type==BLACKHOLE ||
664 get_itbl(tso->block_info.closure)->type==RBH);
669 #if defined(mingw32_HOST_OS)
670 case BlockedOnDoProc:
672 /* isOnBQ(blocked_queue) */
674 case BlockedOnException:
675 /* isOnSomeBQ(tso) */
676 ASSERT(get_itbl(tso->block_info.tso)->type==TSO);
679 ASSERT(get_itbl(tso->block_info.closure)->type==MVAR);
682 ASSERT(tso->block_info.closure == END_TSO_QUEUE);
686 Could check other values of why_blocked but I am more
687 lazy than paranoid (bad combination) -- HWL
691 /* if the link field is non-nil it most point to one of these
692 three closure types */
693 ASSERT(tso->link == END_TSO_QUEUE ||
694 get_itbl(tso->link)->type == TSO ||
695 get_itbl(tso->link)->type == BLOCKED_FETCH ||
696 get_itbl(tso->link)->type == CONSTR);
699 checkStackChunk(sp, stack_end);
704 checkTSOsSanity(void) {
708 debugBelch("Checking sanity of all runnable TSOs:");
710 for (i=0, tsos=0; i<RtsFlags.GranFlags.proc; i++) {
711 for (tso=run_queue_hds[i]; tso!=END_TSO_QUEUE; tso=tso->link) {
712 debugBelch("TSO %p on PE %d ...", tso, i);
719 debugBelch(" checked %d TSOs on %d PEs; ok\n", tsos, RtsFlags.GranFlags.proc);
726 checkThreadQSanity (PEs proc, rtsBool check_TSO_too)
730 /* the NIL value for TSOs is END_TSO_QUEUE; thus, finding NULL is an error */
731 ASSERT(run_queue_hds[proc]!=NULL);
732 ASSERT(run_queue_tls[proc]!=NULL);
733 /* if either head or tail is NIL then the other one must be NIL, too */
734 ASSERT(run_queue_hds[proc]!=END_TSO_QUEUE || run_queue_tls[proc]==END_TSO_QUEUE);
735 ASSERT(run_queue_tls[proc]!=END_TSO_QUEUE || run_queue_hds[proc]==END_TSO_QUEUE);
736 for (tso=run_queue_hds[proc], prev=END_TSO_QUEUE;
738 prev=tso, tso=tso->link) {
739 ASSERT((prev!=END_TSO_QUEUE || tso==run_queue_hds[proc]) &&
740 (prev==END_TSO_QUEUE || prev->link==tso));
744 ASSERT(prev==run_queue_tls[proc]);
748 checkThreadQsSanity (rtsBool check_TSO_too)
752 for (p=0; p<RtsFlags.GranFlags.proc; p++)
753 checkThreadQSanity(p, check_TSO_too);
758 Check that all TSOs have been evacuated.
759 Optionally also check the sanity of the TSOs.
762 checkGlobalTSOList (rtsBool checkTSOs)
764 extern StgTSO *all_threads;
766 for (tso=all_threads; tso != END_TSO_QUEUE; tso = tso->global_link) {
767 ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso));
768 ASSERT(get_itbl(tso)->type == TSO);
774 /* -----------------------------------------------------------------------------
775 Check mutable list sanity.
776 -------------------------------------------------------------------------- */
779 checkMutableList( bdescr *mut_bd, nat gen )
785 for (bd = mut_bd; bd != NULL; bd = bd->link) {
786 for (q = bd->start; q < bd->free; q++) {
787 p = (StgClosure *)*q;
788 ASSERT(!HEAP_ALLOCED(p) || Bdescr((P_)p)->gen_no == gen);
794 Check the static objects list.
797 checkStaticObjects ( StgClosure* static_objects )
799 StgClosure *p = static_objects;
802 while (p != END_OF_STATIC_LIST) {
805 switch (info->type) {
808 StgClosure *indirectee = ((StgIndStatic *)p)->indirectee;
810 ASSERT(LOOKS_LIKE_CLOSURE_PTR(indirectee));
811 ASSERT(LOOKS_LIKE_INFO_PTR(indirectee->header.info));
812 p = *IND_STATIC_LINK((StgClosure *)p);
817 p = *THUNK_STATIC_LINK((StgClosure *)p);
821 p = *FUN_STATIC_LINK((StgClosure *)p);
825 p = *STATIC_LINK(info,(StgClosure *)p);
829 barf("checkStaticObjetcs: strange closure %p (%s)",
836 Check the sanity of a blocking queue starting at bqe with closure being
837 the closure holding the blocking queue.
838 Note that in GUM we can have several different closure types in a
843 checkBQ (StgBlockingQueueElement *bqe, StgClosure *closure)
845 rtsBool end = rtsFalse;
846 StgInfoTable *info = get_itbl(closure);
848 ASSERT(info->type == MVAR || info->type == FETCH_ME_BQ || info->type == RBH);
851 switch (get_itbl(bqe)->type) {
854 checkClosure((StgClosure *)bqe);
856 end = (bqe==END_BQ_QUEUE);
860 checkClosure((StgClosure *)bqe);
865 barf("checkBQ: strange closure %d in blocking queue for closure %p (%s)\n",
866 get_itbl(bqe)->type, closure, info_type(closure));
872 checkBQ (StgTSO *bqe, StgClosure *closure)
874 rtsBool end = rtsFalse;
875 StgInfoTable *info = get_itbl(closure);
877 ASSERT(info->type == MVAR);
880 switch (get_itbl(bqe)->type) {
883 checkClosure((StgClosure *)bqe);
885 end = (bqe==END_BQ_QUEUE);
889 barf("checkBQ: strange closure %d in blocking queue for closure %p (%s)\n",
890 get_itbl(bqe)->type, closure, info_type(closure));
899 This routine checks the sanity of the LAGA and GALA tables. They are
900 implemented as lists through one hash table, LAtoGALAtable, because entries
901 in both tables have the same structure:
902 - the LAGA table maps local addresses to global addresses; it starts
903 with liveIndirections
904 - the GALA table maps global addresses to local addresses; it starts
911 /* hidden in parallel/Global.c; only accessed for testing here */
912 extern GALA *liveIndirections;
913 extern GALA *liveRemoteGAs;
914 extern HashTable *LAtoGALAtable;
917 checkLAGAtable(rtsBool check_closures)
920 nat n=0, m=0; // debugging
922 for (gala = liveIndirections; gala != NULL; gala = gala->next) {
924 gala0 = lookupHashTable(LAtoGALAtable, (StgWord) gala->la);
925 ASSERT(!gala->preferred || gala == gala0);
926 ASSERT(LOOKS_LIKE_INFO_PTR(((StgClosure *)gala->la)->header.info));
927 ASSERT(gala->next!=gala); // detect direct loops
928 if ( check_closures ) {
929 checkClosure((StgClosure *)gala->la);
933 for (gala = liveRemoteGAs; gala != NULL; gala = gala->next) {
935 gala0 = lookupHashTable(LAtoGALAtable, (StgWord) gala->la);
936 ASSERT(!gala->preferred || gala == gala0);
937 ASSERT(LOOKS_LIKE_INFO_PTR(((StgClosure *)gala->la)->header.info));
938 ASSERT(gala->next!=gala); // detect direct loops
940 if ( check_closures ) {
941 checkClosure((StgClosure *)gala->la);