[project @ 2006-01-17 16:03:47 by simonmar]
[ghc-hetmet.git] / ghc / rts / Sanity.c
index 920530a..f6947c9 100644 (file)
@@ -1,11 +1,10 @@
 /* -----------------------------------------------------------------------------
- * $Id: Sanity.c,v 1.14 1999/05/21 14:37:12 sof Exp $
  *
- * (c) The GHC Team, 1998-1999
+ * (c) The GHC Team, 1998-2001
  *
  * Sanity checking code for the heap and stack.
  *
- * Used when debugging: check that the stack looks reasonable.
+ * Used when debugging: check that everything reasonable.
  *
  *    - All things that are supposed to be pointers look like pointers.
  *
  *
  * ---------------------------------------------------------------------------*/
 
+#include "PosixSource.h"
 #include "Rts.h"
 
-#ifdef DEBUG
+#ifdef DEBUG                                                   /* whole file */
 
 #include "RtsFlags.h"
 #include "RtsUtils.h"
 #include "BlockAlloc.h"
 #include "Sanity.h"
-
-#define LOOKS_LIKE_PTR(r) (LOOKS_LIKE_STATIC_CLOSURE(r) || ((HEAP_ALLOCED(r) && Bdescr((P_)r)->free != (void *)-1)))
+#include "MBlock.h"
+#include "Storage.h"
+#include "Schedule.h"
+#include "Apply.h"
 
 /* -----------------------------------------------------------------------------
-   Check stack sanity
+   Forward decls.
    -------------------------------------------------------------------------- */
 
-StgOffset checkStackClosure( StgClosure* c );
-
-StgOffset checkStackObject( StgPtr sp );
-
-void      checkStackChunk( StgPtr sp, StgPtr stack_end );
+static void      checkSmallBitmap    ( StgPtr payload, StgWord bitmap, nat );
+static void      checkLargeBitmap    ( StgPtr payload, StgLargeBitmap*, nat );
+static void      checkClosureShallow ( StgClosure * );
 
-static StgOffset checkSmallBitmap(  StgPtr payload, StgWord32 bitmap );
-
-static StgOffset checkLargeBitmap( StgPtr payload, 
-                                  StgLargeBitmap* large_bitmap );
-
-void checkClosureShallow( StgClosure* p );
+/* -----------------------------------------------------------------------------
+   Check stack sanity
+   -------------------------------------------------------------------------- */
 
-static StgOffset 
-checkSmallBitmap( StgPtr payload, StgWord32 bitmap )
+static void
+checkSmallBitmap( StgPtr payload, StgWord bitmap, nat size )
 {
-    StgOffset i;
+    StgPtr p;
+    nat i;
 
-    i = 0;
-    for(; bitmap != 0; ++i, bitmap >>= 1 ) {
+    p = payload;
+    for(i = 0; i < size; i++, bitmap >>= 1 ) {
        if ((bitmap & 1) == 0) {
-           checkClosure(stgCast(StgClosure*,payload[i]));
+           checkClosureShallow((StgClosure *)payload[i]);
        }
     }
-    return i;
 }
 
-
-static StgOffset 
-checkLargeBitmap( StgPtr payload, StgLargeBitmap* large_bitmap )
+static void
+checkLargeBitmap( StgPtr payload, StgLargeBitmap* large_bitmap, nat size )
 {
-    StgWord32 bmp;
-    StgOffset i;
+    StgWord bmp;
+    nat i, j;
 
     i = 0;
-    for (bmp=0; bmp<large_bitmap->size; bmp++) {
-       StgWord32 bitmap = large_bitmap->bitmap[bmp];
-       for(; bitmap != 0; ++i, bitmap >>= 1 ) {
+    for (bmp=0; i < size; bmp++) {
+       StgWord bitmap = large_bitmap->bitmap[bmp];
+       j = 0;
+       for(; i < size && j < BITS_IN(W_); j++, i++, bitmap >>= 1 ) {
            if ((bitmap & 1) == 0) {
-               checkClosure(stgCast(StgClosure*,payload[i]));
+               checkClosureShallow((StgClosure *)payload[i]);
            }
        }
     }
-    return i;
-}
-
-StgOffset 
-checkStackClosure( StgClosure* c )
-{    
-    const StgInfoTable* info = get_itbl(c);
-
-    /* All activation records have 'bitmap' style layout info. */
-    switch (info->type) {
-    case RET_DYN: /* Dynamic bitmap: the mask is stored on the stack */
-       {
-           StgRetDyn* r = (StgRetDyn *)c;
-           return sizeofW(StgRetDyn) + 
-                  checkSmallBitmap(r->payload,r->liveness);
-       }
-    case RET_BCO: /* small bitmap (<= 32 entries) */
-    case RET_SMALL:
-    case RET_VEC_SMALL:
-    case UPDATE_FRAME:
-    case CATCH_FRAME:
-    case STOP_FRAME:
-    case SEQ_FRAME:
-           return 1 + checkSmallBitmap((StgPtr)c + 1,info->layout.bitmap);
-    case RET_BIG: /* large bitmap (> 32 entries) */
-    case RET_VEC_BIG:
-           return 1 + checkLargeBitmap((StgPtr)c + 1,info->layout.large_bitmap);
-    case FUN:
-    case FUN_STATIC: /* probably a slow-entry point return address: */
-           return 1;
-    default:
-                   /* if none of the above, maybe it's a closure which looks a
-                    * little like an infotable
-                    */
-           checkClosureShallow(*(StgClosure **)c);
-           return 1;
-           /* barf("checkStackClosure: weird activation record found on stack (%p).",c); */
-    }
 }
 
 /*
@@ -118,39 +77,119 @@ checkStackClosure( StgClosure* c )
  * chunks.
  */
  
-void 
+static void 
 checkClosureShallow( StgClosure* p )
 {
-    ASSERT(LOOKS_LIKE_GHC_INFO(p->header.info));
+    ASSERT(LOOKS_LIKE_CLOSURE_PTR(p));
 
-    /* Is it a static closure (i.e. in the data segment)? */
-    if (LOOKS_LIKE_STATIC(p)) {
+    /* Is it a static closure? */
+    if (!HEAP_ALLOCED(p)) {
        ASSERT(closure_STATIC(p));
     } else {
        ASSERT(!closure_STATIC(p));
-       ASSERT(LOOKS_LIKE_PTR(p));
     }
 }
 
-/* check an individual stack object */
+// check an individual stack object
 StgOffset 
-checkStackObject( StgPtr sp )
+checkStackFrame( StgPtr c )
 {
-    if (IS_ARG_TAG(*sp)) {
-        /* Tagged words might be "stubbed" pointers, so there's no
-        * point checking to see whether they look like pointers or
-        * not (some of them will).
-        */
-       return ARG_SIZE(*sp) + 1;
-    } else if (LOOKS_LIKE_GHC_INFO(*stgCast(StgPtr*,sp))) {
-        return checkStackClosure(stgCast(StgClosure*,sp));
-    } else { /* must be an untagged closure pointer in the stack */
-       checkClosureShallow(*stgCast(StgClosure**,sp));
-       return 1;
+    nat size;
+    const StgRetInfoTable* info;
+
+    info = get_ret_itbl((StgClosure *)c);
+
+    /* All activation records have 'bitmap' style layout info. */
+    switch (info->i.type) {
+    case RET_DYN: /* Dynamic bitmap: the mask is stored on the stack */
+    {
+       StgWord dyn;
+       StgPtr p;
+       StgRetDyn* r;
+       
+       r = (StgRetDyn *)c;
+       dyn = r->liveness;
+       
+       p = (P_)(r->payload);
+       checkSmallBitmap(p,RET_DYN_LIVENESS(r->liveness),RET_DYN_BITMAP_SIZE);
+       p += RET_DYN_BITMAP_SIZE + RET_DYN_NONPTR_REGS_SIZE;
+
+       // skip over the non-pointers
+       p += RET_DYN_NONPTRS(dyn);
+       
+       // follow the ptr words
+       for (size = RET_DYN_PTRS(dyn); size > 0; size--) {
+           checkClosureShallow((StgClosure *)*p);
+           p++;
+       }
+       
+       return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
+           RET_DYN_NONPTR_REGS_SIZE +
+           RET_DYN_NONPTRS(dyn) + RET_DYN_PTRS(dyn);
+    }
+
+    case UPDATE_FRAME:
+      ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgUpdateFrame*)c)->updatee));
+    case ATOMICALLY_FRAME:
+    case CATCH_RETRY_FRAME:
+    case CATCH_STM_FRAME:
+    case CATCH_FRAME:
+      // small bitmap cases (<= 32 entries)
+    case STOP_FRAME:
+    case RET_SMALL:
+    case RET_VEC_SMALL:
+       size = BITMAP_SIZE(info->i.layout.bitmap);
+       checkSmallBitmap((StgPtr)c + 1, 
+                        BITMAP_BITS(info->i.layout.bitmap), size);
+       return 1 + size;
+
+    case RET_BCO: {
+       StgBCO *bco;
+       nat size;
+       bco = (StgBCO *)*(c+1);
+       size = BCO_BITMAP_SIZE(bco);
+       checkLargeBitmap((StgPtr)c + 2, BCO_BITMAP(bco), size);
+       return 2 + size;
+    }
+
+    case RET_BIG: // large bitmap (> 32 entries)
+    case RET_VEC_BIG:
+       size = GET_LARGE_BITMAP(&info->i)->size;
+       checkLargeBitmap((StgPtr)c + 1, GET_LARGE_BITMAP(&info->i), size);
+       return 1 + size;
+
+    case RET_FUN:
+    {
+       StgFunInfoTable *fun_info;
+       StgRetFun *ret_fun;
+
+       ret_fun = (StgRetFun *)c;
+       fun_info = get_fun_itbl(ret_fun->fun);
+       size = ret_fun->size;
+       switch (fun_info->f.fun_type) {
+       case ARG_GEN:
+           checkSmallBitmap((StgPtr)ret_fun->payload, 
+                            BITMAP_BITS(fun_info->f.b.bitmap), size);
+           break;
+       case ARG_GEN_BIG:
+           checkLargeBitmap((StgPtr)ret_fun->payload,
+                            GET_FUN_LARGE_BITMAP(fun_info), size);
+           break;
+       default:
+           checkSmallBitmap((StgPtr)ret_fun->payload,
+                            BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
+                            size);
+           break;
+       }
+       return sizeofW(StgRetFun) + size;
+    }
+
+    default:
+       barf("checkStackFrame: weird activation record found on stack (%p %d).",c,info->i.type);
     }
 }
 
-/* check sections of stack between update frames */
+// check sections of stack between update frames
 void 
 checkStackChunk( StgPtr sp, StgPtr stack_end )
 {
@@ -158,46 +197,75 @@ checkStackChunk( StgPtr sp, StgPtr stack_end )
 
     p = sp;
     while (p < stack_end) {
-       p += checkStackObject( p );
+       p += checkStackFrame( p );
     }
-    ASSERT( p == stack_end );
+    // ASSERT( p == stack_end ); -- HWL
 }
 
+static void
+checkPAP (StgClosure *fun, StgClosure** payload, StgWord n_args)
+{ 
+    StgClosure *p;
+    StgFunInfoTable *fun_info;
+    
+    ASSERT(LOOKS_LIKE_CLOSURE_PTR(fun));
+    fun_info = get_fun_itbl(fun);
+    
+    p = (StgClosure *)payload;
+    switch (fun_info->f.fun_type) {
+    case ARG_GEN:
+       checkSmallBitmap( (StgPtr)payload, 
+                         BITMAP_BITS(fun_info->f.b.bitmap), n_args );
+       break;
+    case ARG_GEN_BIG:
+       checkLargeBitmap( (StgPtr)payload, 
+                         GET_FUN_LARGE_BITMAP(fun_info), 
+                         n_args );
+       break;
+    case ARG_BCO:
+       checkLargeBitmap( (StgPtr)payload, 
+                         BCO_BITMAP(fun), 
+                         n_args );
+       break;
+    default:
+       checkSmallBitmap( (StgPtr)payload, 
+                         BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]),
+                         n_args );
+       break;
+    }
+}
+
+
 StgOffset 
 checkClosure( StgClosure* p )
 {
     const StgInfoTable *info;
 
-#ifndef INTERPRETER    
-    ASSERT(LOOKS_LIKE_GHC_INFO(p->header.info));
-#endif
+    ASSERT(LOOKS_LIKE_INFO_PTR(p->header.info));
 
     /* Is it a static closure (i.e. in the data segment)? */
-    if (LOOKS_LIKE_STATIC(p)) {
+    if (!HEAP_ALLOCED(p)) {
        ASSERT(closure_STATIC(p));
     } else {
        ASSERT(!closure_STATIC(p));
-       ASSERT(LOOKS_LIKE_PTR(p));
     }
 
     info = get_itbl(p);
     switch (info->type) {
-    case BCO:
-       {
-           StgBCO* bco = stgCast(StgBCO*,p);
-           nat i;
-           for(i=0; i < bco->n_ptrs; ++i) {
-               ASSERT(LOOKS_LIKE_PTR(bcoConstPtr(bco,i)));
-           }
-           return bco_sizeW(bco);
-       }
 
     case MVAR:
       { 
        StgMVar *mvar = (StgMVar *)p;
-       ASSERT(LOOKS_LIKE_PTR(mvar->head));
-       ASSERT(LOOKS_LIKE_PTR(mvar->tail));
-       ASSERT(LOOKS_LIKE_PTR(mvar->value));
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->head));
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->tail));
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(mvar->value));
+#if 0
+#if defined(PAR)
+       checkBQ((StgBlockingQueueElement *)mvar->head, p);
+#else
+       checkBQ(mvar->head, p);
+#endif
+#endif
        return sizeofW(StgMVar);
       }
 
@@ -210,9 +278,9 @@ checkClosure( StgClosure* p )
       {
        nat i;
        for (i = 0; i < info->layout.payload.ptrs; i++) {
-         ASSERT(LOOKS_LIKE_PTR(payloadPtr(p,i)));
+         ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgThunk *)p)->payload[i]));
        }
-       return stg_max(sizeW_fromITBL(info), sizeofW(StgHeader) + MIN_UPD_SIZE);
+       return stg_max(thunk_sizeW_fromITBL(info), sizeofW(StgHeader)+MIN_UPD_SIZE);
       }
 
     case FUN:
@@ -230,16 +298,12 @@ checkClosure( StgClosure* p )
     case IND_PERM:
     case IND_OLDGEN:
     case IND_OLDGEN_PERM:
-    case CAF_UNENTERED:
-    case CAF_ENTERED:
-    case CAF_BLACKHOLE:
 #ifdef TICKY_TICKY
-    case SE_CAF_BLACKHOLE:
     case SE_BLACKHOLE:
+    case SE_CAF_BLACKHOLE:
 #endif
     case BLACKHOLE:
-    case BLACKHOLE_BQ:
-    case FOREIGN:
+    case CAF_BLACKHOLE:
     case STABLE_NAME:
     case MUT_VAR:
     case CONSTR_INTLIKE:
@@ -248,44 +312,53 @@ checkClosure( StgClosure* p )
     case CONSTR_NOCAF_STATIC:
     case THUNK_STATIC:
     case FUN_STATIC:
-    case IND_STATIC:
        {
            nat i;
            for (i = 0; i < info->layout.payload.ptrs; i++) {
-               ASSERT(LOOKS_LIKE_PTR(payloadPtr(p,i)));
+               ASSERT(LOOKS_LIKE_CLOSURE_PTR(p->payload[i]));
            }
            return sizeW_fromITBL(info);
        }
 
+    case BCO: {
+       StgBCO *bco = (StgBCO *)p;
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->instrs));
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->literals));
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->ptrs));
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->itbls));
+       return bco_sizeW(bco);
+    }
+
+    case IND_STATIC: /* (1, 0) closure */
+      ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgIndStatic*)p)->indirectee));
+      return sizeW_fromITBL(info);
+
     case WEAK:
       /* deal with these specially - the info table isn't
        * representative of the actual layout.
        */
       { StgWeak *w = (StgWeak *)p;
-       ASSERT(LOOKS_LIKE_PTR(w->key));
-       ASSERT(LOOKS_LIKE_PTR(w->value));
-       ASSERT(LOOKS_LIKE_PTR(w->finalizer));
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->key));
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->value));
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->finalizer));
        if (w->link) {
-         ASSERT(LOOKS_LIKE_PTR(w->link));
+         ASSERT(LOOKS_LIKE_CLOSURE_PTR(w->link));
        }
        return sizeW_fromITBL(info);
       }
 
     case THUNK_SELECTOR:
-           ASSERT(LOOKS_LIKE_PTR(stgCast(StgSelector*,p)->selectee));
-           return sizeofW(StgHeader) + MIN_UPD_SIZE;
+           ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgSelector *)p)->selectee));
+           return THUNK_SELECTOR_sizeW();
 
     case IND:
        { 
            /* we don't expect to see any of these after GC
             * but they might appear during execution
             */
-           P_ q;
-           StgInd *ind = stgCast(StgInd*,p);
-           ASSERT(LOOKS_LIKE_PTR(ind->indirectee));
-           q = (P_)p + sizeofW(StgInd);
-           while (!*q) { q++; }; /* skip padding words (see GC.c: evacuate())*/
-           return q - (P_)p;
+           StgInd *ind = (StgInd *)p;
+           ASSERT(LOOKS_LIKE_CLOSURE_PTR(ind->indirectee));
+           return sizeofW(StgHeader) + MIN_UPD_SIZE;
        }
 
     case RET_BCO:
@@ -297,30 +370,45 @@ checkClosure( StgClosure* p )
     case UPDATE_FRAME:
     case STOP_FRAME:
     case CATCH_FRAME:
-    case SEQ_FRAME:
+    case ATOMICALLY_FRAME:
+    case CATCH_RETRY_FRAME:
+    case CATCH_STM_FRAME:
            barf("checkClosure: stack frame");
 
-    case AP_UPD: /* we can treat this as being the same as a PAP */
+    case AP:
+    {
+       StgAP* ap = (StgAP *)p;
+       checkPAP (ap->fun, ap->payload, ap->n_args);
+       return ap_sizeW(ap);
+    }
+
     case PAP:
-       { 
-           StgPAP *pap = stgCast(StgPAP*,p);
-           ASSERT(LOOKS_LIKE_PTR(pap->fun));
-           checkStackChunk((StgPtr)pap->payload, 
-                           (StgPtr)pap->payload + pap->n_args
-                           );
-           return pap_sizeW(pap);
-       }
+    {
+       StgPAP* pap = (StgPAP *)p;
+       checkPAP (pap->fun, pap->payload, pap->n_args);
+       return pap_sizeW(pap);
+    }
+
+    case AP_STACK:
+    { 
+       StgAP_STACK *ap = (StgAP_STACK *)p;
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(ap->fun));
+       checkStackChunk((StgPtr)ap->payload, (StgPtr)ap->payload + ap->size);
+       return ap_stack_sizeW(ap);
+    }
 
     case ARR_WORDS:
-           return arr_words_sizeW(stgCast(StgArrWords*,p));
+           return arr_words_sizeW((StgArrWords *)p);
 
-    case MUT_ARR_PTRS:
+    case MUT_ARR_PTRS_CLEAN:
+    case MUT_ARR_PTRS_DIRTY:
     case MUT_ARR_PTRS_FROZEN:
+    case MUT_ARR_PTRS_FROZEN0:
        {
-           StgMutArrPtrs* a = stgCast(StgMutArrPtrs*,p);
+           StgMutArrPtrs* a = (StgMutArrPtrs *)p;
            nat i;
            for (i = 0; i < a->ptrs; i++) {
-               ASSERT(LOOKS_LIKE_PTR(a->payload[i]));
+               ASSERT(LOOKS_LIKE_CLOSURE_PTR(a->payload[i]));
            }
            return mut_arr_ptrs_sizeW(a);
        }
@@ -329,16 +417,123 @@ checkClosure( StgClosure* p )
         checkTSO((StgTSO *)p);
         return tso_sizeW((StgTSO *)p);
 
+#if defined(PAR)
+
     case BLOCKED_FETCH:
+      ASSERT(LOOKS_LIKE_GA(&(((StgBlockedFetch *)p)->ga)));
+      ASSERT(LOOKS_LIKE_CLOSURE_PTR((((StgBlockedFetch *)p)->node)));
+      return sizeofW(StgBlockedFetch);  // see size used in evacuate()
+
+#ifdef DIST
+    case REMOTE_REF:
+      return sizeofW(StgFetchMe); 
+#endif /*DIST */
+      
     case FETCH_ME:
+      ASSERT(LOOKS_LIKE_GA(((StgFetchMe *)p)->ga));
+      return sizeofW(StgFetchMe);  // see size used in evacuate()
+
+    case FETCH_ME_BQ:
+      checkBQ(((StgFetchMeBlockingQueue *)p)->blocking_queue, (StgClosure *)p);
+      return sizeofW(StgFetchMeBlockingQueue); // see size used in evacuate()
+
+    case RBH:
+      /* In an RBH the BQ may be empty (ie END_BQ_QUEUE) but not NULL */
+      ASSERT(((StgRBH *)p)->blocking_queue!=NULL);
+      if (((StgRBH *)p)->blocking_queue!=END_BQ_QUEUE)
+       checkBQ(((StgRBH *)p)->blocking_queue, p);
+      ASSERT(LOOKS_LIKE_INFO_PTR(REVERT_INFOPTR(get_itbl((StgClosure *)p))));
+      return BLACKHOLE_sizeW();   // see size used in evacuate()
+      // sizeW_fromITBL(REVERT_INFOPTR(get_itbl((StgClosure *)p)));
+
+#endif
+
+    case TVAR_WAIT_QUEUE:
+      {
+        StgTVarWaitQueue *wq = (StgTVarWaitQueue *)p;
+        ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->next_queue_entry));
+        ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->prev_queue_entry));
+        return sizeofW(StgTVarWaitQueue);
+      }
+
+    case TVAR:
+      {
+        StgTVar *tv = (StgTVar *)p;
+        ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->current_value));
+        ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->first_wait_queue_entry));
+        return sizeofW(StgTVar);
+      }
+
+    case TREC_CHUNK:
+      {
+        nat i;
+        StgTRecChunk *tc = (StgTRecChunk *)p;
+        ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->prev_chunk));
+        for (i = 0; i < tc -> next_entry_idx; i ++) {
+          ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].tvar));
+          ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].expected_value));
+          ASSERT(LOOKS_LIKE_CLOSURE_PTR(tc->entries[i].new_value));
+        }
+        return sizeofW(StgTRecChunk);
+      }
+
+    case TREC_HEADER:
+      {
+        StgTRecHeader *trec = (StgTRecHeader *)p;
+        ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> enclosing_trec));
+        ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> current_chunk));
+        return sizeofW(StgTRecHeader);
+      }
+      
+      
     case EVACUATED:
-           barf("checkClosure: unimplemented/strange closure type");
+           barf("checkClosure: found EVACUATED closure %d",
+                info->type);
     default:
-           barf("checkClosure");
+           barf("checkClosure (closure type %d)", info->type);
     }
-#undef LOOKS_LIKE_PTR
 }
 
+#if defined(PAR)
+
+#define PVM_PE_MASK    0xfffc0000
+#define MAX_PVM_PES    MAX_PES
+#define MAX_PVM_TIDS   MAX_PES
+#define MAX_SLOTS      100000
+
+rtsBool
+looks_like_tid(StgInt tid)
+{
+  StgInt hi = (tid & PVM_PE_MASK) >> 18;
+  StgInt lo = (tid & ~PVM_PE_MASK);
+  rtsBool ok = (hi != 0) && (lo < MAX_PVM_TIDS) && (hi < MAX_PVM_TIDS);
+  return ok;
+}
+
+rtsBool
+looks_like_slot(StgInt slot)
+{
+  /* if tid is known better use looks_like_ga!! */
+  rtsBool ok = slot<MAX_SLOTS;
+  // This refers only to the no. of slots on the current PE
+  // rtsBool ok = slot<=highest_slot();
+  return ok; 
+}
+
+rtsBool
+looks_like_ga(globalAddr *ga)
+{
+  rtsBool is_tid = looks_like_tid((ga)->payload.gc.gtid);
+  rtsBool is_slot = ((ga)->payload.gc.gtid==mytid) ? 
+                     (ga)->payload.gc.slot<=highest_slot() : 
+                     (ga)->payload.gc.slot<MAX_SLOTS;
+  rtsBool ok = is_tid && is_slot;
+  return ok;
+}
+
+#endif
+
+
 /* -----------------------------------------------------------------------------
    Check Heap Sanity
 
@@ -348,36 +543,77 @@ checkClosure( StgClosure* p )
    all the objects in the remainder of the chain.
    -------------------------------------------------------------------------- */
 
-extern void 
-checkHeap(bdescr *bd, StgPtr start)
+void 
+checkHeap(bdescr *bd)
 {
     StgPtr p;
 
-    if (start == NULL) {
-      p = bd->start;
-    } else {
-      p = start;
-    }
-
-    while (bd != NULL) {
-      while (p < bd->free) {
-        nat size = checkClosure(stgCast(StgClosure*,p));
-        /* This is the smallest size of closure that can live in the heap. */
-        ASSERT( size >= MIN_NONUPD_SIZE + sizeofW(StgHeader) );
-       p += size;
+#if defined(SMP)
+    // heap sanity checking doesn't work with SMP, because we can't
+    // zero the slop (see Updates.h).
+    return;
+#endif
 
-       /* skip over slop */
-       while (p < bd->free &&
-              (*p == 0 || !LOOKS_LIKE_GHC_INFO((void*)*p))) { p++; } 
-      }
-      bd = bd->link;
-      if (bd != NULL) {
+    for (; bd != NULL; bd = bd->link) {
        p = bd->start;
-      }
+       while (p < bd->free) {
+           nat size = checkClosure((StgClosure *)p);
+           /* This is the smallest size of closure that can live in the heap */
+           ASSERT( size >= MIN_NONUPD_SIZE + sizeofW(StgHeader) );
+           p += size;
+           
+           /* skip over slop */
+           while (p < bd->free &&
+                  (*p < 0x1000 || !LOOKS_LIKE_INFO_PTR((void*)*p))) { p++; } 
+       }
     }
 }
 
-extern void
+#if defined(PAR)
+/* 
+   Check heap between start and end. Used after unpacking graphs.
+*/
+void 
+checkHeapChunk(StgPtr start, StgPtr end)
+{
+  extern globalAddr *LAGAlookup(StgClosure *addr);
+  StgPtr p;
+  nat size;
+
+  for (p=start; p<end; p+=size) {
+    ASSERT(LOOKS_LIKE_INFO_PTR((void*)*p));
+    if (get_itbl((StgClosure*)p)->type == FETCH_ME &&
+       *(p+1) == 0x0000eeee /* ie. unpack garbage (see SetGAandCommonUp) */) {
+      /* if it's a FM created during unpack and commoned up, it's not global */
+      ASSERT(LAGAlookup((StgClosure*)p)==NULL);
+      size = sizeofW(StgFetchMe);
+    } else if (get_itbl((StgClosure*)p)->type == IND) {
+      *(p+2) = 0x0000ee11; /* mark slop in IND as garbage */
+      size = MIN_UPD_SIZE;
+    } else {
+      size = checkClosure((StgClosure *)p);
+      /* This is the smallest size of closure that can live in the heap. */
+      ASSERT( size >= MIN_NONUPD_SIZE + sizeofW(StgHeader) );
+    }
+  }
+}
+#else /* !PAR */
+void 
+checkHeapChunk(StgPtr start, StgPtr end)
+{
+  StgPtr p;
+  nat size;
+
+  for (p=start; p<end; p+=size) {
+    ASSERT(LOOKS_LIKE_INFO_PTR((void*)*p));
+    size = checkClosure((StgClosure *)p);
+    /* This is the smallest size of closure that can live in the heap. */
+    ASSERT( size >= MIN_NONUPD_SIZE + sizeofW(StgHeader) );
+  }
+}
+#endif
+
+void
 checkChain(bdescr *bd)
 {
   while (bd != NULL) {
@@ -386,45 +622,20 @@ checkChain(bdescr *bd)
   }
 }
 
-/* check stack - making sure that update frames are linked correctly */
-void 
-checkStack(StgPtr sp, StgPtr stack_end, StgUpdateFrame* su )
-{
-    /* check everything down to the first update frame */
-    checkStackChunk( sp, stgCast(StgPtr,su) );
-    while ( stgCast(StgPtr,su) < stack_end) {
-       sp = stgCast(StgPtr,su);
-       switch (get_itbl(su)->type) {
-       case UPDATE_FRAME:
-               su = su->link;
-               break;
-       case SEQ_FRAME:
-               su = stgCast(StgSeqFrame*,su)->link;
-               break;
-       case CATCH_FRAME:
-               su = stgCast(StgCatchFrame*,su)->link;
-               break;
-       case STOP_FRAME:
-               /* not quite: ASSERT(stgCast(StgPtr,su) == stack_end); */
-               return;
-       default:
-               barf("checkStack: weird record found on update frame list.");
-       }
-       checkStackChunk( sp, stgCast(StgPtr,su) );
-    }
-    ASSERT(stgCast(StgPtr,su) == stack_end);
-}
-
-extern void
+void
 checkTSO(StgTSO *tso)
 {
     StgPtr sp = tso->sp;
     StgPtr stack = tso->stack;
-    StgUpdateFrame* su = tso->su;
     StgOffset stack_size = tso->stack_size;
     StgPtr stack_end = stack + stack_size;
 
-    if (tso->whatNext == ThreadComplete ||  tso->whatNext == ThreadKilled) {
+    if (tso->what_next == ThreadRelocated) {
+      checkTSO(tso->link);
+      return;
+    }
+
+    if (tso->what_next == ThreadKilled) {
       /* The garbage collector doesn't bother following any pointers
        * from dead threads, so don't check sanity here.  
        */
@@ -432,46 +643,305 @@ checkTSO(StgTSO *tso)
     }
 
     ASSERT(stack <= sp && sp < stack_end);
-    ASSERT(sp <= stgCast(StgPtr,su));
 
-    checkStack(sp, stack_end, su);
+#if defined(PAR)
+    ASSERT(tso->par.magic==TSO_MAGIC);
+
+    switch (tso->why_blocked) {
+    case BlockedOnGA: 
+      checkClosureShallow(tso->block_info.closure);
+      ASSERT(/* Can't be a FETCH_ME because *this* closure is on its BQ */
+            get_itbl(tso->block_info.closure)->type==FETCH_ME_BQ);
+      break;
+    case BlockedOnGA_NoSend: 
+      checkClosureShallow(tso->block_info.closure);
+      ASSERT(get_itbl(tso->block_info.closure)->type==FETCH_ME_BQ);
+      break;
+    case BlockedOnBlackHole: 
+      checkClosureShallow(tso->block_info.closure);
+      ASSERT(get_itbl(tso->block_info.closure)->type==BLACKHOLE ||
+             get_itbl(tso->block_info.closure)->type==RBH);
+      break;
+    case BlockedOnRead:
+    case BlockedOnWrite:
+    case BlockedOnDelay:
+#if defined(mingw32_HOST_OS)
+    case BlockedOnDoProc:
+#endif
+      /* isOnBQ(blocked_queue) */
+      break;
+    case BlockedOnException:
+      /* isOnSomeBQ(tso) */
+      ASSERT(get_itbl(tso->block_info.tso)->type==TSO);
+      break;
+    case BlockedOnMVar:
+      ASSERT(get_itbl(tso->block_info.closure)->type==MVAR);
+      break;
+    case BlockedOnSTM:
+      ASSERT(tso->block_info.closure == END_TSO_QUEUE);
+      break;
+    default:
+      /* 
+        Could check other values of why_blocked but I am more 
+        lazy than paranoid (bad combination) -- HWL 
+      */
+    }
+
+    /* if the link field is non-nil it most point to one of these
+       three closure types */
+    ASSERT(tso->link == END_TSO_QUEUE ||
+          get_itbl(tso->link)->type == TSO ||
+          get_itbl(tso->link)->type == BLOCKED_FETCH ||
+          get_itbl(tso->link)->type == CONSTR);
+#endif
+
+    checkStackChunk(sp, stack_end);
 }
 
-/* -----------------------------------------------------------------------------
-   Check Blackhole Sanity
+#if defined(GRAN)
+void  
+checkTSOsSanity(void) {
+  nat i, tsos;
+  StgTSO *tso;
+  
+  debugBelch("Checking sanity of all runnable TSOs:");
+  
+  for (i=0, tsos=0; i<RtsFlags.GranFlags.proc; i++) {
+    for (tso=run_queue_hds[i]; tso!=END_TSO_QUEUE; tso=tso->link) {
+      debugBelch("TSO %p on PE %d ...", tso, i);
+      checkTSO(tso); 
+      debugBelch("OK, ");
+      tsos++;
+    }
+  }
+  
+  debugBelch(" checked %d TSOs on %d PEs; ok\n", tsos, RtsFlags.GranFlags.proc);
+}
 
-   Test whether an object is already on the update list.
-   It isn't necessarily an rts error if it is - it might be a programming
-   error.
 
-   Future versions might be able to test for a blackhole without traversing
-   the update frame list.
+// still GRAN only
 
+rtsBool
+checkThreadQSanity (PEs proc, rtsBool check_TSO_too) 
+{
+  StgTSO *tso, *prev;
+
+  /* the NIL value for TSOs is END_TSO_QUEUE; thus, finding NULL is an error */
+  ASSERT(run_queue_hds[proc]!=NULL);
+  ASSERT(run_queue_tls[proc]!=NULL);
+  /* if either head or tail is NIL then the other one must be NIL, too */
+  ASSERT(run_queue_hds[proc]!=END_TSO_QUEUE || run_queue_tls[proc]==END_TSO_QUEUE);
+  ASSERT(run_queue_tls[proc]!=END_TSO_QUEUE || run_queue_hds[proc]==END_TSO_QUEUE);
+  for (tso=run_queue_hds[proc], prev=END_TSO_QUEUE; 
+       tso!=END_TSO_QUEUE;
+       prev=tso, tso=tso->link) {
+    ASSERT((prev!=END_TSO_QUEUE || tso==run_queue_hds[proc]) &&
+          (prev==END_TSO_QUEUE || prev->link==tso));
+    if (check_TSO_too)
+      checkTSO(tso);
+  }
+  ASSERT(prev==run_queue_tls[proc]);
+}
+
+rtsBool
+checkThreadQsSanity (rtsBool check_TSO_too)
+{
+  PEs p;
+  
+  for (p=0; p<RtsFlags.GranFlags.proc; p++)
+    checkThreadQSanity(p, check_TSO_too);
+}
+#endif /* GRAN */
+
+/* 
+   Check that all TSOs have been evacuated.
+   Optionally also check the sanity of the TSOs.
+*/
+void
+checkGlobalTSOList (rtsBool checkTSOs)
+{
+  extern  StgTSO *all_threads;
+  StgTSO *tso;
+  for (tso=all_threads; tso != END_TSO_QUEUE; tso = tso->global_link) {
+      ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso));
+      ASSERT(get_itbl(tso)->type == TSO);
+      if (checkTSOs)
+         checkTSO(tso);
+  }
+}
+
+/* -----------------------------------------------------------------------------
+   Check mutable list sanity.
    -------------------------------------------------------------------------- */
-rtsBool isBlackhole( StgTSO* tso, StgClosure* p )
+
+void
+checkMutableList( bdescr *mut_bd, nat gen )
 {
-  StgUpdateFrame* su = tso->su;
-  do {
-    switch (get_itbl(su)->type) {
-    case UPDATE_FRAME:
-      if (su->updatee == p) {
-       return rtsTrue;
-      } else {
-       su = su->link;
+    bdescr *bd;
+    StgPtr q;
+    StgClosure *p;
+
+    for (bd = mut_bd; bd != NULL; bd = bd->link) {
+       for (q = bd->start; q < bd->free; q++) {
+           p = (StgClosure *)*q;
+           ASSERT(!HEAP_ALLOCED(p) || Bdescr((P_)p)->gen_no == gen);
+       }
+    }
+}
+
+/*
+  Check the static objects list.
+*/
+void
+checkStaticObjects ( StgClosure* static_objects )
+{
+  StgClosure *p = static_objects;
+  StgInfoTable *info;
+
+  while (p != END_OF_STATIC_LIST) {
+    checkClosure(p);
+    info = get_itbl(p);
+    switch (info->type) {
+    case IND_STATIC:
+      { 
+       StgClosure *indirectee = ((StgIndStatic *)p)->indirectee;
+
+       ASSERT(LOOKS_LIKE_CLOSURE_PTR(indirectee));
+       ASSERT(LOOKS_LIKE_INFO_PTR(indirectee->header.info));
+       p = *IND_STATIC_LINK((StgClosure *)p);
+       break;
       }
+
+    case THUNK_STATIC:
+      p = *THUNK_STATIC_LINK((StgClosure *)p);
       break;
-    case SEQ_FRAME:
-      su = stgCast(StgSeqFrame*,su)->link;
+
+    case FUN_STATIC:
+      p = *FUN_STATIC_LINK((StgClosure *)p);
       break;
-    case CATCH_FRAME:
-      su = stgCast(StgCatchFrame*,su)->link;
+
+    case CONSTR_STATIC:
+      p = *STATIC_LINK(info,(StgClosure *)p);
       break;
-    case STOP_FRAME:
-      return rtsFalse;
+
     default:
-      barf("isBlackhole: weird record found on update frame list.");
+      barf("checkStaticObjetcs: strange closure %p (%s)", 
+          p, info_type(p));
     }
-  } while (1);
+  }
 }
 
+/* 
+   Check the sanity of a blocking queue starting at bqe with closure being
+   the closure holding the blocking queue.
+   Note that in GUM we can have several different closure types in a 
+   blocking queue 
+*/
+#if defined(PAR)
+void
+checkBQ (StgBlockingQueueElement *bqe, StgClosure *closure) 
+{
+  rtsBool end = rtsFalse;
+  StgInfoTable *info = get_itbl(closure);
+
+  ASSERT(info->type == MVAR || info->type == FETCH_ME_BQ || info->type == RBH);
+
+  do {
+    switch (get_itbl(bqe)->type) {
+    case BLOCKED_FETCH:
+    case TSO:
+      checkClosure((StgClosure *)bqe);
+      bqe = bqe->link;
+      end = (bqe==END_BQ_QUEUE);
+      break;
+    
+    case CONSTR:
+      checkClosure((StgClosure *)bqe);
+      end = rtsTrue;
+      break;
+
+    default:
+      barf("checkBQ: strange closure %d in blocking queue for closure %p (%s)\n", 
+          get_itbl(bqe)->type, closure, info_type(closure));
+    }
+  } while (!end);
+}
+#elif defined(GRAN)
+void
+checkBQ (StgTSO *bqe, StgClosure *closure) 
+{  
+  rtsBool end = rtsFalse;
+  StgInfoTable *info = get_itbl(closure);
+
+  ASSERT(info->type == MVAR);
+
+  do {
+    switch (get_itbl(bqe)->type) {
+    case BLOCKED_FETCH:
+    case TSO:
+      checkClosure((StgClosure *)bqe);
+      bqe = bqe->link;
+      end = (bqe==END_BQ_QUEUE);
+      break;
+    
+    default:
+      barf("checkBQ: strange closure %d in blocking queue for closure %p (%s)\n", 
+          get_itbl(bqe)->type, closure, info_type(closure));
+    }
+  } while (!end);
+}
+#endif
+    
+
+
+/*
+  This routine checks the sanity of the LAGA and GALA tables. They are 
+  implemented as lists through one hash table, LAtoGALAtable, because entries 
+  in both tables have the same structure:
+   - the LAGA table maps local addresses to global addresses; it starts
+     with liveIndirections
+   - the GALA table maps global addresses to local addresses; it starts 
+     with liveRemoteGAs
+*/
+
+#if defined(PAR)
+#include "Hash.h"
+
+/* hidden in parallel/Global.c; only accessed for testing here */
+extern GALA *liveIndirections;
+extern GALA *liveRemoteGAs;
+extern HashTable *LAtoGALAtable;
+
+void
+checkLAGAtable(rtsBool check_closures)
+{
+  GALA *gala, *gala0;
+  nat n=0, m=0; // debugging
+
+  for (gala = liveIndirections; gala != NULL; gala = gala->next) {
+    n++;
+    gala0 = lookupHashTable(LAtoGALAtable, (StgWord) gala->la);
+    ASSERT(!gala->preferred || gala == gala0);
+    ASSERT(LOOKS_LIKE_INFO_PTR(((StgClosure *)gala->la)->header.info));
+    ASSERT(gala->next!=gala); // detect direct loops
+    if ( check_closures ) {
+      checkClosure((StgClosure *)gala->la);
+    }
+  }
+
+  for (gala = liveRemoteGAs; gala != NULL; gala = gala->next) {
+    m++;
+    gala0 = lookupHashTable(LAtoGALAtable, (StgWord) gala->la);
+    ASSERT(!gala->preferred || gala == gala0);
+    ASSERT(LOOKS_LIKE_INFO_PTR(((StgClosure *)gala->la)->header.info));
+    ASSERT(gala->next!=gala); // detect direct loops
+    /*
+    if ( check_closures ) {
+      checkClosure((StgClosure *)gala->la);
+    }
+    */
+  }
+}
+#endif
+
 #endif /* DEBUG */