// ----------------------------------------------------------------------
// Run the current thread
+ ASSERT_FULL_CAPABILITY_INVARIANTS(cap,task);
+
prev_what_next = t->what_next;
errno = t->saved_errno;
// be running again, see code below.
t->saved_errno = errno;
-#ifdef SMP
+#if defined(THREADED_RTS)
// If ret is ThreadBlocked, and this Task is bound to the TSO that
// blocked, we are in limbo - the TSO is now owned by whatever it
// is blocked on, and may in fact already have been woken up,
if (performHeapProfile ||
(RtsFlags.ProfFlags.profileInterval==0 &&
RtsFlags.ProfFlags.doHeapProfile && ready_to_gc)) {
+
+ // checking black holes is necessary before GC, otherwise
+ // there may be threads that are unreachable except by the
+ // blackhole queue, which the GC will consider to be
+ // deadlocked.
+ scheduleCheckBlackHoles(&MainCapability);
+
+ IF_DEBUG(scheduler, sched_belch("garbage collecting before heap census"));
GarbageCollect(GetRoots, rtsTrue);
+
+ IF_DEBUG(scheduler, sched_belch("performing heap census"));
heapCensus();
+
performHeapProfile = rtsFalse;
return rtsTrue; // true <=> we already GC'd
}
/* We might have GC'd, mark the TSO dirty again */
dirtyTSO(tso);
+ IF_DEBUG(sanity, checkTSO(tso));
+
return &cap->r;
}