#define STORAGE_H
#include <stddef.h>
+#include "OSThreads.h"
/* -----------------------------------------------------------------------------
* Generational GC
*
* ------------------------------------------------------------------------- */
-typedef struct _step {
+typedef struct step_ {
unsigned int no; /* step number */
bdescr * blocks; /* blocks in this step */
unsigned int n_blocks; /* number of blocks */
- struct _step * to; /* destination step for live objects */
- struct _generation * gen; /* generation this step belongs to */
+ struct step_ * to; /* destination step for live objects */
+ struct generation_ * gen; /* generation this step belongs to */
unsigned int gen_no; /* generation number (cached) */
bdescr * large_objects; /* large objects (doubly linked) */
unsigned int n_large_blocks; /* no. of blocks used by large objs */
int is_compacted; /* compact this step? (old gen only) */
+ /* During GC, if we are collecting this step, blocks and n_blocks
+ * are copied into the following two fields. After GC, these blocks
+ * are freed. */
+ bdescr * old_blocks; /* bdescr of first from-space block */
+ unsigned int n_old_blocks; /* number of blocks in from-space */
+
/* temporary use during GC: */
StgPtr hp; /* next free locn in to-space */
StgPtr hpLim; /* end of current to-space block */
bdescr * hp_bd; /* bdescr of current to-space block */
- bdescr * to_blocks; /* bdescr of first to-space block */
- unsigned int n_to_blocks; /* number of blocks in to-space */
+ StgPtr scavd_hp; /* ... same as above, but already */
+ StgPtr scavd_hpLim; /* scavenged. */
bdescr * scan_bd; /* block currently being scanned */
StgPtr scan; /* scan pointer in current block */
bdescr * new_large_objects; /* large objects collected so far */
bdescr * bitmap; /* bitmap for compacting collection */
} step;
-typedef struct _generation {
+typedef struct generation_ {
unsigned int no; /* generation number */
step * steps; /* steps */
unsigned int n_steps; /* number of steps */
via allocate() since the last GC.
Used in the reporting of statistics.
- SMP: allocate and doYouWantToGC can be used from STG code, they are
+ THREADED_RTS: allocate and doYouWantToGC can be used from STG code, they are
surrounded by a mutex.
-------------------------------------------------------------------------- */
extern StgPtr allocate ( nat n );
+extern StgPtr allocateLocal ( Capability *cap, nat n );
extern StgPtr allocatePinned ( nat n );
extern lnat allocated_bytes ( void );
/*
* Storage manager mutex
*/
-#if defined(SMP)
+#if defined(THREADED_RTS)
extern Mutex sm_mutex;
-#define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex)
-#define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex)
+extern Mutex atomic_modify_mutvar_mutex;
+#endif
+
+#if defined(THREADED_RTS)
+#define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex);
+#define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex);
+#define ASSERT_SM_LOCK() ASSERT_LOCK_HELD(&sm_mutex);
#else
#define ACQUIRE_SM_LOCK
#define RELEASE_SM_LOCK
+#define ASSERT_SM_LOCK()
#endif
-/* ToDo: shouldn't recordMutable acquire some
- * kind of lock in the SMP case? Or do we need per-processor
- * mutable lists?
- */
INLINE_HEADER void
recordMutableGen(StgClosure *p, generation *gen)
{
gen->mut_list = bd;
}
*bd->free++ = (StgWord)p;
+
+}
+
+INLINE_HEADER void
+recordMutableGenLock(StgClosure *p, generation *gen)
+{
+ ACQUIRE_SM_LOCK;
+ recordMutableGen(p,gen);
+ RELEASE_SM_LOCK;
}
INLINE_HEADER void
if (bd->gen_no > 0) recordMutableGen(p, &RTS_DEREF(generations)[bd->gen_no]);
}
+INLINE_HEADER void
+recordMutableLock(StgClosure *p)
+{
+ ACQUIRE_SM_LOCK;
+ recordMutable(p);
+ RELEASE_SM_LOCK;
+}
+
/* -----------------------------------------------------------------------------
The CAF table - used to let us revert CAFs in GHCi
-------------------------------------------------------------------------- */
-void revertCAFs( void );
-
- // set to disable CAF garbage collection in GHCi.
- // (needed when dynamic libraries are used).
+/* set to disable CAF garbage collection in GHCi. */
+/* (needed when dynamic libraries are used). */
extern rtsBool keepCAFs;
/* -----------------------------------------------------------------------------
+ This is the write barrier for MUT_VARs, a.k.a. IORefs. A
+ MUT_VAR_CLEAN object is not on the mutable list; a MUT_VAR_DIRTY
+ is. When written to, a MUT_VAR_CLEAN turns into a MUT_VAR_DIRTY
+ and is put on the mutable list.
+ -------------------------------------------------------------------------- */
+
+void dirty_MUT_VAR(StgRegTable *reg, StgClosure *p);
+
+/* -----------------------------------------------------------------------------
DEBUGGING predicates for pointers
LOOKS_LIKE_INFO_PTR(p) returns False if p is definitely not an info ptr
INLINE_HEADER StgOffset PAP_sizeW ( nat n_args )
{ return sizeofW(StgPAP) + n_args; }
+INLINE_HEADER StgOffset AP_sizeW ( nat n_args )
+{ return sizeofW(StgAP) + n_args; }
+
INLINE_HEADER StgOffset AP_STACK_sizeW ( nat size )
{ return sizeofW(StgAP_STACK) + size; }
{ return sizeofW(StgHeader) + p + np; }
INLINE_HEADER StgOffset THUNK_SELECTOR_sizeW ( void )
-{ return stg_max(sizeofW(StgHeader)+MIN_UPD_SIZE, sizeofW(StgSelector)); }
+{ return sizeofW(StgSelector); }
INLINE_HEADER StgOffset BLACKHOLE_sizeW ( void )
-{ return stg_max(sizeofW(StgHeader)+MIN_UPD_SIZE, sizeofW(StgBlockingQueue)); }
+{ return sizeofW(StgHeader)+MIN_PAYLOAD_SIZE; }
/* --------------------------------------------------------------------------
Sizes of closures
+ sizeofW(StgPtr) * itbl->layout.payload.ptrs
+ sizeofW(StgWord) * itbl->layout.payload.nptrs; }
+INLINE_HEADER StgOffset thunk_sizeW_fromITBL( const StgInfoTable* itbl )
+{ return sizeofW(StgThunk)
+ + sizeofW(StgPtr) * itbl->layout.payload.ptrs
+ + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
+
INLINE_HEADER StgOffset ap_stack_sizeW( StgAP_STACK* x )
{ return AP_STACK_sizeW(x->size); }
+INLINE_HEADER StgOffset ap_sizeW( StgAP* x )
+{ return AP_sizeW(x->n_args); }
+
INLINE_HEADER StgOffset pap_sizeW( StgPAP* x )
{ return PAP_sizeW(x->n_args); }
INLINE_HEADER StgWord bco_sizeW ( StgBCO *bco )
{ return bco->size; }
+STATIC_INLINE nat
+closure_sizeW_ (StgClosure *p, StgInfoTable *info)
+{
+ switch (info->type) {
+ case THUNK_0_1:
+ case THUNK_1_0:
+ return sizeofW(StgThunk) + 1;
+ case FUN_0_1:
+ case CONSTR_0_1:
+ case FUN_1_0:
+ case CONSTR_1_0:
+ return sizeofW(StgHeader) + 1;
+ case THUNK_0_2:
+ case THUNK_1_1:
+ case THUNK_2_0:
+ return sizeofW(StgThunk) + 2;
+ case FUN_0_2:
+ case CONSTR_0_2:
+ case FUN_1_1:
+ case CONSTR_1_1:
+ case FUN_2_0:
+ case CONSTR_2_0:
+ return sizeofW(StgHeader) + 2;
+ case THUNK:
+ return thunk_sizeW_fromITBL(info);
+ case THUNK_SELECTOR:
+ return THUNK_SELECTOR_sizeW();
+ case AP_STACK:
+ return ap_stack_sizeW((StgAP_STACK *)p);
+ case AP:
+ case PAP:
+ return pap_sizeW((StgPAP *)p);
+ case IND:
+ case IND_PERM:
+ case IND_OLDGEN:
+ case IND_OLDGEN_PERM:
+ return sizeofW(StgInd);
+ case ARR_WORDS:
+ return arr_words_sizeW((StgArrWords *)p);
+ case MUT_ARR_PTRS_CLEAN:
+ case MUT_ARR_PTRS_DIRTY:
+ case MUT_ARR_PTRS_FROZEN:
+ case MUT_ARR_PTRS_FROZEN0:
+ return mut_arr_ptrs_sizeW((StgMutArrPtrs*)p);
+ case TSO:
+ return tso_sizeW((StgTSO *)p);
+ case BCO:
+ return bco_sizeW((StgBCO *)p);
+ case TVAR_WAIT_QUEUE:
+ return sizeofW(StgTVarWaitQueue);
+ case TVAR:
+ return sizeofW(StgTVar);
+ case TREC_CHUNK:
+ return sizeofW(StgTRecChunk);
+ case TREC_HEADER:
+ return sizeofW(StgTRecHeader);
+ default:
+ return sizeW_fromITBL(info);
+ }
+}
+
+// The definitive way to find the size, in words, of a heap-allocated closure
+STATIC_INLINE nat
+closure_sizeW (StgClosure *p)
+{
+ return closure_sizeW_(p, get_itbl(p));
+}
+
/* -----------------------------------------------------------------------------
Sizes of stack frames
-------------------------------------------------------------------------- */
Nursery manipulation
-------------------------------------------------------------------------- */
-extern void allocNurseries ( void );
-extern void resetNurseries ( void );
-extern bdescr * allocNursery ( bdescr *last_bd, nat blocks );
-extern void resizeNursery ( nat blocks );
-extern void tidyAllocateLists ( void );
+extern void allocNurseries ( void );
+extern void resetNurseries ( void );
+extern void resizeNurseries ( nat blocks );
+extern void resizeNurseriesFixed ( nat blocks );
+extern void tidyAllocateLists ( void );
+extern lnat countNurseryBlocks ( void );
/* -----------------------------------------------------------------------------
Functions from GC.c
-------------------------------------------------------------------------- */
-extern void threadPaused ( StgTSO * );
+extern void threadPaused ( Capability *cap, StgTSO * );
extern StgClosure * isAlive ( StgClosure *p );
extern void markCAFs ( evac_fn evac );
extern void memInventory(void);
extern void checkSanity(void);
extern nat countBlocks(bdescr *);
+extern void checkNurserySanity( step *stp );
#endif
#if defined(DEBUG)
extern void move_TSO(StgTSO *src, StgTSO *dest);
extern StgTSO *relocate_stack(StgTSO *dest, ptrdiff_t diff);
-extern StgClosure * RTS_VAR(static_objects);
extern StgClosure * RTS_VAR(scavenged_static_objects);
extern StgWeak * RTS_VAR(old_weak_ptr_list);
extern StgWeak * RTS_VAR(weak_ptr_list);
extern StgClosure * RTS_VAR(revertible_caf_list);
extern StgTSO * RTS_VAR(resurrected_threads);
-#endif // STORAGE_H
+#endif /* STORAGE_H */