+static TRecEntry *get_entry_for(StgTRecHeader *trec, StgTVar *tvar, StgTRecHeader **in) {
+ TRecEntry *result = NULL;
+
+ TRACE("%p : get_entry_for TVar %p", trec, tvar);
+ ASSERT(trec != NO_TREC);
+
+ do {
+ FOR_EACH_ENTRY(trec, e, {
+ if (e -> tvar == tvar) {
+ result = e;
+ if (in != NULL) {
+ *in = trec;
+ }
+ BREAK_FOR_EACH;
+ }
+ });
+ trec = trec -> enclosing_trec;
+ } while (result == NULL && trec != NO_TREC);
+
+ return result;
+}
+
+/*......................................................................*/
+
+/*
+ * Add/remove links between an invariant TVars. The caller must have
+ * locked the TVars involved and the invariant.
+ */
+
+static void disconnect_invariant(Capability *cap,
+ StgAtomicInvariant *inv) {
+ StgTRecHeader *last_execution = inv -> last_execution;
+
+ TRACE("unhooking last execution inv=%p trec=%p", inv, last_execution);
+
+ FOR_EACH_ENTRY(last_execution, e, {
+ StgTVar *s = e -> tvar;
+ StgTVarWatchQueue *q = s -> first_watch_queue_entry;
+ StgBool found = FALSE;
+ TRACE(" looking for trec on tvar=%p", s);
+ for (q = s -> first_watch_queue_entry;
+ q != END_STM_WATCH_QUEUE;
+ q = q -> next_queue_entry) {
+ if (q -> closure == (StgClosure*)inv) {
+ StgTVarWatchQueue *pq;
+ StgTVarWatchQueue *nq;
+ nq = q -> next_queue_entry;
+ pq = q -> prev_queue_entry;
+ if (nq != END_STM_WATCH_QUEUE) {
+ nq -> prev_queue_entry = pq;
+ }
+ if (pq != END_STM_WATCH_QUEUE) {
+ pq -> next_queue_entry = nq;
+ } else {
+ ASSERT (s -> first_watch_queue_entry == q);
+ s -> first_watch_queue_entry = nq;
+ }
+ TRACE(" found it in watch queue entry %p", q);
+ free_stg_tvar_watch_queue(cap, q);
+ found = TRUE;
+ break;
+ }
+ }
+ ASSERT(found);
+ });
+ inv -> last_execution = NO_TREC;
+}
+
+static void connect_invariant_to_trec(Capability *cap,
+ StgAtomicInvariant *inv,
+ StgTRecHeader *my_execution) {
+ TRACE("connecting execution inv=%p trec=%p", inv, my_execution);
+
+ ASSERT(inv -> last_execution == NO_TREC);
+
+ FOR_EACH_ENTRY(my_execution, e, {
+ StgTVar *s = e -> tvar;
+ StgTVarWatchQueue *q = alloc_stg_tvar_watch_queue(cap, (StgClosure*)inv);
+ StgTVarWatchQueue *fq = s -> first_watch_queue_entry;
+
+ // We leave "last_execution" holding the values that will be
+ // in the heap after the transaction we're in the process
+ // of committing has finished.
+ TRecEntry *entry = get_entry_for(my_execution -> enclosing_trec, s, NULL);
+ if (entry != NULL) {
+ e -> expected_value = entry -> new_value;
+ e -> new_value = entry -> new_value;
+ }
+
+ TRACE(" linking trec on tvar=%p value=%p q=%p", s, e -> expected_value, q);
+ q -> next_queue_entry = fq;
+ q -> prev_queue_entry = END_STM_WATCH_QUEUE;
+ if (fq != END_STM_WATCH_QUEUE) {
+ fq -> prev_queue_entry = q;
+ }
+ s -> first_watch_queue_entry = q;
+ });
+
+ inv -> last_execution = my_execution;
+}
+
+/*
+ * Add a new invariant to the trec's list of invariants to check on commit
+ */
+void stmAddInvariantToCheck(Capability *cap,
+ StgTRecHeader *trec,
+ StgClosure *code) {
+ StgAtomicInvariant *invariant;
+ StgInvariantCheckQueue *q;
+ TRACE("%p : stmAddInvariantToCheck closure=%p", trec, code);
+ ASSERT(trec != NO_TREC);
+ ASSERT(trec -> state == TREC_ACTIVE ||
+ trec -> state == TREC_CONDEMNED);
+
+
+ // 1. Allocate an StgAtomicInvariant, set last_execution to NO_TREC
+ // to signal that this is a new invariant in the current atomic block
+
+ invariant = (StgAtomicInvariant *) allocate(cap, sizeofW(StgAtomicInvariant));
+ TRACE("%p : stmAddInvariantToCheck allocated invariant=%p", trec, invariant);
+ SET_HDR (invariant, &stg_ATOMIC_INVARIANT_info, CCS_SYSTEM);
+ invariant -> code = code;
+ invariant -> last_execution = NO_TREC;
+
+ // 2. Allocate an StgInvariantCheckQueue entry, link it to the current trec
+
+ q = alloc_stg_invariant_check_queue(cap, invariant);
+ TRACE("%p : stmAddInvariantToCheck allocated q=%p", trec, q);
+ q -> invariant = invariant;
+ q -> my_execution = NO_TREC;
+ q -> next_queue_entry = trec -> invariants_to_check;
+ trec -> invariants_to_check = q;
+
+ TRACE("%p : stmAddInvariantToCheck done", trec);
+}
+
+/*
+ * Fill in the trec's list of invariants that might be violated by the
+ * current transaction.
+ */
+
+StgInvariantCheckQueue *stmGetInvariantsToCheck(Capability *cap, StgTRecHeader *trec) {
+ StgTRecChunk *c;
+ TRACE("%p : stmGetInvariantsToCheck, head was %p",
+ trec,
+ trec -> invariants_to_check);
+
+ ASSERT(trec != NO_TREC);
+ ASSERT ((trec -> state == TREC_ACTIVE) ||
+ (trec -> state == TREC_WAITING) ||
+ (trec -> state == TREC_CONDEMNED));
+ ASSERT(trec -> enclosing_trec == NO_TREC);
+
+ lock_stm(trec);
+ c = trec -> current_chunk;
+ while (c != END_STM_CHUNK_LIST) {
+ unsigned int i;
+ for (i = 0; i < c -> next_entry_idx; i ++) {
+ TRecEntry *e = &(c -> entries[i]);
+ if (entry_is_update(e)) {
+ StgTVar *s = e -> tvar;
+ StgClosure *old = lock_tvar(trec, s);
+
+ // Pick up any invariants on the TVar being updated
+ // by entry "e"
+
+ StgTVarWatchQueue *q;
+ TRACE("%p : checking for invariants on %p", trec, s);
+ for (q = s -> first_watch_queue_entry;
+ q != END_STM_WATCH_QUEUE;
+ q = q -> next_queue_entry) {
+ if (watcher_is_invariant(q)) {
+ StgBool found = FALSE;
+ StgInvariantCheckQueue *q2;
+ TRACE("%p : Touching invariant %p", trec, q -> closure);
+ for (q2 = trec -> invariants_to_check;
+ q2 != END_INVARIANT_CHECK_QUEUE;
+ q2 = q2 -> next_queue_entry) {
+ if (q2 -> invariant == (StgAtomicInvariant*)(q -> closure)) {
+ TRACE("%p : Already found %p", trec, q -> closure);
+ found = TRUE;
+ break;
+ }
+ }
+
+ if (!found) {
+ StgInvariantCheckQueue *q3;
+ TRACE("%p : Not already found %p", trec, q -> closure);
+ q3 = alloc_stg_invariant_check_queue(cap,
+ (StgAtomicInvariant*) q -> closure);
+ q3 -> next_queue_entry = trec -> invariants_to_check;
+ trec -> invariants_to_check = q3;
+ }
+ }
+ }
+
+ unlock_tvar(trec, s, old, FALSE);
+ }
+ }
+ c = c -> prev_chunk;
+ }
+
+ unlock_stm(trec);
+
+ TRACE("%p : stmGetInvariantsToCheck, head now %p",
+ trec,
+ trec -> invariants_to_check);
+
+ return (trec -> invariants_to_check);
+}
+
+/*......................................................................*/
+