}
#endif
- // tell the STM to discard any cached closures it's hoping to re-use
- stmPreGCHook();
-
// tell the stats department that we've started a GC
stat_startGC();
-#ifdef DEBUG
- // check for memory leaks if DEBUG is on
- memInventory();
-#endif
+ // tell the STM to discard any cached closures it's hoping to re-use
+ stmPreGCHook();
#ifdef DEBUG
mutlist_MUTVARS = 0;
}
#endif
+#ifdef DEBUG
+ // check for memory leaks if DEBUG is on
+ memInventory(traceClass(DEBUG_gc));
+#endif
+
// check stack sanity *before* GC (ToDo: check all threads)
IF_DEBUG(sanity, checkFreeListSanity());
static_objects = END_OF_STATIC_LIST;
scavenged_static_objects = END_OF_STATIC_LIST;
-#ifdef THREADED_RTS
- initSpinLock(&static_objects_sync);
- initSpinLock(&recordMutableGen_sync);
- initSpinLock(&gc_alloc_block_sync);
-#endif
-
// Initialise all the generations/steps that we're collecting.
for (g = 0; g <= N; g++) {
init_collected_gen(g,n_gc_threads);
resize_generations();
// Guess the amount of live data for stats.
- live = calcLive();
+ live = calcLiveBlocks() * BLOCK_SIZE_W;
+ debugTrace(DEBUG_gc, "Slop: %ldKB",
+ (live - calcLiveWords()) / (1024/sizeof(W_)));
// Free the small objects allocated via allocate(), since this will
// all have been copied into G0S1 now.
#ifdef DEBUG
// check for memory leaks if DEBUG is on
- memInventory();
+ memInventory(traceClass(DEBUG_gc));
#endif
#ifdef RTS_GTK_FRONTPANEL
gct = saved_gct;
}
+/* -----------------------------------------------------------------------------
+ * Mark all nodes pointed to by sparks in the spark queues (for GC) Does an
+ * implicit slide i.e. after marking all sparks are at the beginning of the
+ * spark pool and the spark pool only contains sparkable closures
+ * -------------------------------------------------------------------------- */
+
+#ifdef THREADED_RTS
+static void
+markSparkQueue (evac_fn evac, Capability *cap)
+{
+ StgClosure **sparkp, **to_sparkp;
+ nat n, pruned_sparks; // stats only
+ StgSparkPool *pool;
+
+ PAR_TICKY_MARK_SPARK_QUEUE_START();
+
+ n = 0;
+ pruned_sparks = 0;
+
+ pool = &(cap->r.rSparks);
+
+ ASSERT_SPARK_POOL_INVARIANTS(pool);
+
+#if defined(PARALLEL_HASKELL)
+ // stats only
+ n = 0;
+ pruned_sparks = 0;
+#endif
+
+ sparkp = pool->hd;
+ to_sparkp = pool->hd;
+ while (sparkp != pool->tl) {
+ ASSERT(*sparkp!=NULL);
+ ASSERT(LOOKS_LIKE_CLOSURE_PTR(((StgClosure *)*sparkp)));
+ // ToDo?: statistics gathering here (also for GUM!)
+ if (closure_SHOULD_SPARK(*sparkp)) {
+ evac(sparkp);
+ *to_sparkp++ = *sparkp;
+ if (to_sparkp == pool->lim) {
+ to_sparkp = pool->base;
+ }
+ n++;
+ } else {
+ pruned_sparks++;
+ }
+ sparkp++;
+ if (sparkp == pool->lim) {
+ sparkp = pool->base;
+ }
+ }
+ pool->tl = to_sparkp;
+
+ PAR_TICKY_MARK_SPARK_QUEUE_END(n);
+
+#if defined(PARALLEL_HASKELL)
+ debugTrace(DEBUG_sched,
+ "marked %d sparks and pruned %d sparks on [%x]",
+ n, pruned_sparks, mytid);
+#else
+ debugTrace(DEBUG_sched,
+ "marked %d sparks and pruned %d sparks",
+ n, pruned_sparks);
+#endif
+
+ debugTrace(DEBUG_sched,
+ "new spark queue len=%d; (hd=%p; tl=%p)\n",
+ sparkPoolSize(pool), pool->hd, pool->tl);
+}
+#endif
+
/* ---------------------------------------------------------------------------
Where are the roots that we know about?