fix retainer profiling: add missing case for TSO
[ghc-hetmet.git] / rts / sm / Evac.c
index 93d5d58..65da076 100644 (file)
@@ -139,15 +139,19 @@ copy_tag_nolock(StgClosure **p, const StgInfoTable *info,
     nat i;
 
     to = alloc_for_copy(size,gen);
-    *p = TAG_CLOSURE(tag,(StgClosure*)to);
-    src->header.info = (const StgInfoTable *)MK_FORWARDING_PTR(to);
-    
+
     from = (StgPtr)src;
     to[0] = (W_)info;
     for (i = 1; i < size; i++) { // unroll for small i
        to[i] = from[i];
     }
 
+    // if somebody else reads the forwarding pointer, we better make
+    // sure there's a closure at the end of it.
+    write_barrier();
+    *p = TAG_CLOSURE(tag,(StgClosure*)to);
+    src->header.info = (const StgInfoTable *)MK_FORWARDING_PTR(to);
+
 //  if (to+size+2 < bd->start + BLOCK_SIZE_W) {
 //      __builtin_prefetch(to + size + 2, 1);
 //  }
@@ -162,7 +166,7 @@ copy_tag_nolock(StgClosure **p, const StgInfoTable *info,
 
 /* Special version of copy() for when we only want to copy the info
  * pointer of an object, but reserve some padding after it.  This is
- * used to optimise evacuation of BLACKHOLEs.
+ * used to optimise evacuation of TSOs.
  */
 static rtsBool
 copyPart(StgClosure **p, StgClosure *src, nat size_to_reserve, 
@@ -191,7 +195,6 @@ spin:
 #endif
 
     to = alloc_for_copy(size_to_reserve, gen);
-    *p = (StgClosure *)to;
 
     from = (StgPtr)src;
     to[0] = info;
@@ -199,10 +202,9 @@ spin:
        to[i] = from[i];
     }
     
-#if defined(PARALLEL_GC)
     write_barrier();
-#endif
     src->header.info = (const StgInfoTable*)MK_FORWARDING_PTR(to);
+    *p = (StgClosure *)to;
     
 #ifdef PROFILING
     // We store the size of the just evacuated object in the LDV word so that
@@ -239,10 +241,11 @@ copy(StgClosure **p, const StgInfoTable *info,
 STATIC_INLINE void
 evacuate_large(StgPtr p)
 {
-  bdescr *bd = Bdescr(p);
+  bdescr *bd;
   generation *gen, *new_gen;
   gen_workspace *ws;
     
+  bd = Bdescr(p);
   gen = bd->gen;
   ACQUIRE_SPIN_LOCK(&gen->sync_large_objects);
 
@@ -271,7 +274,7 @@ evacuate_large(StgPtr p)
   
   /* link it on to the evacuated large object list of the destination gen
    */
-  new_gen = gen->to;
+  new_gen = bd->dest;
   if (new_gen < gct->evac_gen) {
       if (gct->eager_promotion) {
          new_gen = gct->evac_gen;
@@ -361,7 +364,7 @@ loop:
   tag = GET_CLOSURE_TAG(q);
   q = UNTAG_CLOSURE(q);
 
-  ASSERT(LOOKS_LIKE_CLOSURE_PTR(q));
+  ASSERTM(LOOKS_LIKE_CLOSURE_PTR(q), "invalid closure, info=%p", q->header.info);
 
   if (!HEAP_ALLOCED_GC(q)) {
 
@@ -412,8 +415,7 @@ loop:
           * on the CAF list, so don't do anything with it here (we'll
           * scavenge it later).
           */
-         if (((StgIndStatic *)q)->saved_info == NULL) {
-             if (*IND_STATIC_LINK((StgClosure *)q) == NULL) {
+          if (*IND_STATIC_LINK((StgClosure *)q) == NULL) {
 #ifndef THREADED_RTS
                  *IND_STATIC_LINK((StgClosure *)q) = gct->static_objects;
                  gct->static_objects = (StgClosure *)q;
@@ -426,7 +428,6 @@ loop:
                       gct->static_objects = (StgClosure *)q;
                   }
 #endif
-             }
          }
          return;
          
@@ -484,14 +485,7 @@ loop:
       /* evacuate large objects by re-linking them onto a different list.
        */
       if (bd->flags & BF_LARGE) {
-         info = get_itbl(q);
-         if (info->type == TSO && 
-             ((StgTSO *)q)->what_next == ThreadRelocated) {
-             q = (StgClosure *)((StgTSO *)q)->_link;
-              *p = q;
-             goto loop;
-         }
-         evacuate_large((P_)q);
+          evacuate_large((P_)q);
          return;
       }
       
@@ -619,31 +613,51 @@ loop:
 
   case FUN:
   case IND_PERM:
-  case IND_OLDGEN_PERM:
   case CONSTR:
       copy_tag_nolock(p,info,q,sizeW_fromITBL(INFO_PTR_TO_STRUCT(info)),gen,tag);
       return;
 
+  case BLACKHOLE:
+  {
+      StgClosure *r;
+      const StgInfoTable *i;
+      r = ((StgInd*)q)->indirectee;
+      if (GET_CLOSURE_TAG(r) == 0) {
+          i = r->header.info;
+          if (IS_FORWARDING_PTR(i)) {
+              r = (StgClosure *)UN_FORWARDING_PTR(i);
+              i = r->header.info;
+          }
+          if (i == &stg_TSO_info
+              || i == &stg_WHITEHOLE_info 
+              || i == &stg_BLOCKING_QUEUE_CLEAN_info
+              || i == &stg_BLOCKING_QUEUE_DIRTY_info) {
+              copy(p,info,q,sizeofW(StgInd),gen);
+              return;
+          }
+          ASSERT(i != &stg_IND_info);
+      }
+      q = r;
+      *p = r;
+      goto loop;
+  }
+
+  case BLOCKING_QUEUE:
   case WEAK:
-  case STABLE_NAME:
-      copy_tag(p,info,q,sizeW_fromITBL(INFO_PTR_TO_STRUCT(info)),gen,tag);
+  case PRIM:
+  case MUT_PRIM:
+      copy(p,info,q,sizeW_fromITBL(INFO_PTR_TO_STRUCT(info)),gen);
       return;
 
   case BCO:
       copy(p,info,q,bco_sizeW((StgBCO *)q),gen);
       return;
 
-  case CAF_BLACKHOLE:
-  case BLACKHOLE:
-      copyPart(p,q,BLACKHOLE_sizeW(),sizeofW(StgHeader),gen);
-      return;
-
   case THUNK_SELECTOR:
       eval_thunk_selector(p, (StgSelector *)q, rtsTrue);
       return;
 
   case IND:
-  case IND_OLDGEN:
     // follow chains of indirections, don't evacuate them 
     q = ((StgInd*)q)->indirectee;
     *p = q;
@@ -654,6 +668,7 @@ loop:
   case RET_BIG:
   case RET_DYN:
   case UPDATE_FRAME:
+  case UNDERFLOW_FRAME:
   case STOP_FRAME:
   case CATCH_FRAME:
   case CATCH_STM_FRAME:
@@ -688,31 +703,28 @@ loop:
       return;
 
   case TSO:
-    {
-      StgTSO *tso = (StgTSO *)q;
+      copy(p,info,q,sizeofW(StgTSO),gen);
+      evacuate((StgClosure**)&(((StgTSO*)(*p))->stackobj));
+      return;
 
-      /* Deal with redirected TSOs (a TSO that's had its stack enlarged).
-       */
-      if (tso->what_next == ThreadRelocated) {
-       q = (StgClosure *)tso->_link;
-       *p = q;
-       goto loop;
-      }
+  case STACK:
+    {
+      StgStack *stack = (StgStack *)q;
 
-      /* To evacuate a small TSO, we need to adjust the stack pointer
+      /* To evacuate a small STACK, we need to adjust the stack pointer
        */
       {
-         StgTSO *new_tso;
+          StgStack *new_stack;
          StgPtr r, s;
           rtsBool mine;
 
-         mine = copyPart(p,(StgClosure *)tso, tso_sizeW(tso), 
-                          sizeofW(StgTSO), gen);
+          mine = copyPart(p,(StgClosure *)stack, stack_sizeW(stack),
+                          sizeofW(StgStack), gen);
           if (mine) {
-              new_tso = (StgTSO *)*p;
-              move_TSO(tso, new_tso);
-              for (r = tso->sp, s = new_tso->sp;
-                   r < tso->stack+tso->stack_size;) {
+              new_stack = (StgStack *)*p;
+              move_STACK(stack, new_stack);
+              for (r = stack->sp, s = new_stack->sp;
+                   r < stack->stack + stack->stack_size;) {
                   *s++ = *r++;
               }
           }
@@ -720,30 +732,10 @@ loop:
       }
     }
 
-  case TREC_HEADER: 
-      copy(p,info,q,sizeofW(StgTRecHeader),gen);
-      return;
-
-  case TVAR_WATCH_QUEUE:
-      copy(p,info,q,sizeofW(StgTVarWatchQueue),gen);
-      return;
-
-  case TVAR:
-      copy(p,info,q,sizeofW(StgTVar),gen);
-      return;
-    
   case TREC_CHUNK:
       copy(p,info,q,sizeofW(StgTRecChunk),gen);
       return;
 
-  case ATOMIC_INVARIANT:
-      copy(p,info,q,sizeofW(StgAtomicInvariant),gen);
-      return;
-
-  case INVARIANT_CHECK_QUEUE:
-      copy(p,info,q,sizeofW(StgInvariantCheckQueue),gen);
-      return;
-
   default:
     barf("evacuate: strange closure type %d", (int)(INFO_PTR_TO_STRUCT(info)->type));
   }
@@ -770,11 +762,7 @@ unchain_thunk_selectors(StgSelector *p, StgClosure *val)
     prev = NULL;
     while (p)
     {
-#ifdef THREADED_RTS
         ASSERT(p->header.info == &stg_WHITEHOLE_info);
-#else
-        ASSERT(p->header.info == &stg_BLACKHOLE_info);
-#endif
         // val must be in to-space.  Not always: when we recursively
         // invoke eval_thunk_selector(), the recursive calls will not 
         // evacuate the value (because we want to select on the value,
@@ -797,7 +785,13 @@ unchain_thunk_selectors(StgSelector *p, StgClosure *val)
             // indirection pointing to itself, and we want the program
             // to deadlock if it ever enters this closure, so
             // BLACKHOLE is correct.
-            SET_INFO(p, &stg_BLACKHOLE_info);
+
+            // XXX we do not have BLACKHOLEs any more; replace with
+            // a THUNK_SELECTOR again.  This will go into a loop if it is
+            // entered, and should result in a NonTermination exception.
+            ((StgThunk *)p)->payload[0] = val;
+            write_barrier();
+            SET_INFO(p, &stg_sel_0_upd_info);
         } else {
             ((StgInd *)p)->indirectee = val;
             write_barrier();
@@ -827,7 +821,7 @@ eval_thunk_selector (StgClosure **q, StgSelector * p, rtsBool evac)
     prev_thunk_selector = NULL;
     // this is a chain of THUNK_SELECTORs that we are going to update
     // to point to the value of the current THUNK_SELECTOR.  Each
-    // closure on the chain is a BLACKHOLE, and points to the next in the
+    // closure on the chain is a WHITEHOLE, and points to the next in the
     // chain with payload[0].
 
 selector_chain:
@@ -865,7 +859,7 @@ selector_chain:
     }
 
 
-    // BLACKHOLE the selector thunk, since it is now under evaluation.
+    // WHITEHOLE the selector thunk, since it is now under evaluation.
     // This is important to stop us going into an infinite loop if
     // this selector thunk eventually refers to itself.
 #if defined(THREADED_RTS)
@@ -877,7 +871,7 @@ selector_chain:
         } while (info_ptr == (W_)&stg_WHITEHOLE_info);
 
         // make sure someone else didn't get here first...
-        if (IS_FORWARDING_PTR(p) || 
+        if (IS_FORWARDING_PTR(info_ptr) || 
             INFO_PTR_TO_STRUCT(info_ptr)->type != THUNK_SELECTOR) {
             // v. tricky now.  The THUNK_SELECTOR has been evacuated
             // by another thread, and is now either a forwarding ptr or IND.
@@ -898,7 +892,7 @@ selector_chain:
 #else
     // Save the real info pointer (NOTE: not the same as get_itbl()).
     info_ptr = (StgWord)p->header.info;
-    SET_INFO(p,&stg_BLACKHOLE_info);
+    SET_INFO(p,&stg_WHITEHOLE_info);
 #endif
 
     field = INFO_PTR_TO_STRUCT(info_ptr)->layout.selector_offset;
@@ -949,12 +943,8 @@ selector_loop:
               // For the purposes of LDV profiling, we have destroyed
               // the original selector thunk, p.
               SET_INFO(p, (StgInfoTable *)info_ptr);
-              LDV_RECORD_DEAD_FILL_SLOP_DYNAMIC((StgClosure *)p);
-#if defined(THREADED_RTS)
+              OVERWRITING_CLOSURE(p);
               SET_INFO(p, &stg_WHITEHOLE_info);
-#else
-              SET_INFO(p, &stg_BLACKHOLE_info);
-#endif
 #endif
 
               // the closure in val is now the "value" of the
@@ -970,8 +960,6 @@ selector_loop:
                   switch (info->type) {
                   case IND:
                   case IND_PERM:
-                  case IND_OLDGEN:
-                  case IND_OLDGEN_PERM:
                   case IND_STATIC:
                       val = ((StgInd *)val)->indirectee;
                       goto val_loop;
@@ -1005,13 +993,38 @@ selector_loop:
 
       case IND:
       case IND_PERM:
-      case IND_OLDGEN:
-      case IND_OLDGEN_PERM:
       case IND_STATIC:
           // Again, we might need to untag a constructor.
           selectee = UNTAG_CLOSURE( ((StgInd *)selectee)->indirectee );
          goto selector_loop;
 
+      case BLACKHOLE:
+      {
+          StgClosure *r;
+          const StgInfoTable *i;
+          r = ((StgInd*)selectee)->indirectee;
+
+          // establish whether this BH has been updated, and is now an
+          // indirection, as in evacuate().
+          if (GET_CLOSURE_TAG(r) == 0) {
+              i = r->header.info;
+              if (IS_FORWARDING_PTR(i)) {
+                  r = (StgClosure *)UN_FORWARDING_PTR(i);
+                  i = r->header.info;
+              }
+              if (i == &stg_TSO_info
+                  || i == &stg_WHITEHOLE_info 
+                  || i == &stg_BLOCKING_QUEUE_CLEAN_info
+                  || i == &stg_BLOCKING_QUEUE_DIRTY_info) {
+                  goto bale_out;
+              }
+              ASSERT(i != &stg_IND_info);
+          }
+
+          selectee = UNTAG_CLOSURE( ((StgInd *)selectee)->indirectee );
+          goto selector_loop;
+      }
+
       case THUNK_SELECTOR:
       {
          StgClosure *val;
@@ -1046,8 +1059,6 @@ selector_loop:
       case THUNK_1_1:
       case THUNK_0_2:
       case THUNK_STATIC:
-      case CAF_BLACKHOLE:
-      case BLACKHOLE:
          // not evaluated yet 
          goto bale_out;