case CONSTR_0_2:
case CONSTR_2_0:
case IND_PERM:
- case IND_OLDGEN:
- case IND_OLDGEN_PERM:
case BLACKHOLE:
- case CAF_BLACKHOLE:
- case STABLE_NAME:
+ case PRIM:
+ case MUT_PRIM:
case MUT_VAR_CLEAN:
case MUT_VAR_DIRTY:
case CONSTR_STATIC:
return sizeW_fromITBL(info);
}
+ case BLOCKING_QUEUE:
+ {
+ StgBlockingQueue *bq = (StgBlockingQueue *)p;
+
+ // NO: the BH might have been updated now
+ // ASSERT(get_itbl(bq->bh)->type == BLACKHOLE);
+ ASSERT(LOOKS_LIKE_CLOSURE_PTR(bq->bh));
+
+ ASSERT(get_itbl(bq->owner)->type == TSO);
+ ASSERT(bq->queue == (MessageBlackHole*)END_TSO_QUEUE
+ || get_itbl(bq->queue)->type == TSO);
+ ASSERT(bq->link == (StgBlockingQueue*)END_TSO_QUEUE ||
+ get_itbl(bq->link)->type == IND ||
+ get_itbl(bq->link)->type == BLOCKING_QUEUE);
+
+ return sizeofW(StgBlockingQueue);
+ }
+
case BCO: {
StgBCO *bco = (StgBCO *)p;
ASSERT(LOOKS_LIKE_CLOSURE_PTR(bco->instrs));
checkTSO((StgTSO *)p);
return tso_sizeW((StgTSO *)p);
- case TVAR_WATCH_QUEUE:
- {
- StgTVarWatchQueue *wq = (StgTVarWatchQueue *)p;
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->next_queue_entry));
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(wq->prev_queue_entry));
- return sizeofW(StgTVarWatchQueue);
- }
-
- case INVARIANT_CHECK_QUEUE:
- {
- StgInvariantCheckQueue *q = (StgInvariantCheckQueue *)p;
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(q->invariant));
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(q->my_execution));
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(q->next_queue_entry));
- return sizeofW(StgInvariantCheckQueue);
- }
-
- case ATOMIC_INVARIANT:
- {
- StgAtomicInvariant *invariant = (StgAtomicInvariant *)p;
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(invariant->code));
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(invariant->last_execution));
- return sizeofW(StgAtomicInvariant);
- }
-
- case TVAR:
- {
- StgTVar *tv = (StgTVar *)p;
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->current_value));
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(tv->first_watch_queue_entry));
- return sizeofW(StgTVar);
- }
-
case TREC_CHUNK:
{
nat i;
}
return sizeofW(StgTRecChunk);
}
-
- case TREC_HEADER:
- {
- StgTRecHeader *trec = (StgTRecHeader *)p;
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> enclosing_trec));
- ASSERT(LOOKS_LIKE_CLOSURE_PTR(trec -> current_chunk));
- return sizeofW(StgTRecHeader);
- }
default:
barf("checkClosure (closure type %d)", info->type);
return;
}
+ ASSERT(tso->_link == END_TSO_QUEUE ||
+ tso->_link->header.info == &stg_MVAR_TSO_QUEUE_info ||
+ tso->_link->header.info == &stg_TSO_info);
+ ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso->block_info.closure));
+ ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso->bq));
+ ASSERT(LOOKS_LIKE_CLOSURE_PTR(tso->blocked_exceptions));
+
ASSERT(stack <= sp && sp < stack_end);
checkStackChunk(sp, stack_end);
if (checkTSOs)
checkTSO(tso);
- while (tso->what_next == ThreadRelocated) {
- tso = tso->_link;
- }
+ tso = deRefTSO(tso);
// If this TSO is dirty and in an old generation, it better
// be on the mutable list.
reportUnmarkedBlocks();
}
+void
+checkRunQueue(Capability *cap)
+{
+ StgTSO *prev, *tso;
+ prev = END_TSO_QUEUE;
+ for (tso = cap->run_queue_hd; tso != END_TSO_QUEUE;
+ prev = tso, tso = tso->_link) {
+ ASSERT(prev == END_TSO_QUEUE || prev->_link == tso);
+ ASSERT(tso->block_info.prev == prev);
+ }
+ ASSERT(cap->run_queue_tl == prev);
+}
/* -----------------------------------------------------------------------------
Memory leak detection