Add ASSERTs to all calls of nameModule
[ghc-hetmet.git] / rts / Sparks.c
index 5ea296d..2e9e61c 100644 (file)
@@ -168,8 +168,8 @@ newSpark (StgRegTable *reg, StgClosure *p)
  * spark pool and the spark pool only contains sparkable closures 
  * -------------------------------------------------------------------------- */
 
-void
-updateSparkQueue (Capability *cap)
+static void
+pruneSparkQueue (Capability *cap)
 { 
     StgClosure *spark, **sparkp, **to_sparkp;
     nat n, pruned_sparks; // stats only
@@ -190,15 +190,15 @@ updateSparkQueue (Capability *cap)
         ASSERT(*sparkp!=NULL);
         ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgClosure *)*sparkp)));
         // ToDo?: statistics gathering here (also for GUM!)
-        spark = isAlive(*sparkp);
-        if (spark != NULL && closure_SHOULD_SPARK(spark)) {
+        spark = *sparkp;
+        if (!closure_SHOULD_SPARK(spark)) {
+            pruned_sparks++;
+        } else{
             *to_sparkp++ = spark;
             if (to_sparkp == pool->lim) {
                 to_sparkp = pool->base;
             }
             n++;
-        } else {
-            pruned_sparks++;
         }
         sparkp++;
         if (sparkp == pool->lim) {
@@ -209,16 +209,23 @@ updateSparkQueue (Capability *cap)
        
     PAR_TICKY_MARK_SPARK_QUEUE_END(n);
        
-    debugTrace(DEBUG_sched, 
-               "updated %d sparks and pruned %d sparks",
-               n, pruned_sparks);
+    debugTrace(DEBUG_sched, "pruned %d sparks", pruned_sparks);
     
     debugTrace(DEBUG_sched,
-               "new spark queue len=%d; (hd=%p; tl=%p)\n",
+               "new spark queue len=%d; (hd=%p; tl=%p)",
                sparkPoolSize(pool), pool->hd, pool->tl);
 }
 
 void
+pruneSparkQueues (void)
+{
+    nat i;
+    for (i = 0; i < n_capabilities; i++) {
+        pruneSparkQueue(&capabilities[i]);
+    }
+}
+
+void
 traverseSparkQueue (evac_fn evac, void *user, Capability *cap)
 {
     StgClosure **sparkp;
@@ -227,7 +234,7 @@ traverseSparkQueue (evac_fn evac, void *user, Capability *cap)
     pool = &(cap->r.rSparks);
     sparkp = pool->hd;
     while (sparkp != pool->tl) {
-        evac(sparkp, user);
+        evac(user, sparkp);
         sparkp++;
         if (sparkp == pool->lim) {
             sparkp = pool->base;