1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2004
5 * External Storage Manger Interface
7 * ---------------------------------------------------------------------------*/
13 #include "OSThreads.h"
16 /* -----------------------------------------------------------------------------
19 * We support an arbitrary number of generations, with an arbitrary number
20 * of steps per generation. Notes (in no particular order):
22 * - all generations except the oldest should have two steps. This gives
23 * objects a decent chance to age before being promoted, and in
24 * particular will ensure that we don't end up with too many
25 * thunks being updated in older generations.
27 * - the oldest generation has one step. There's no point in aging
28 * objects in the oldest generation.
30 * - generation 0, step 0 (G0S0) is the allocation area. It is given
31 * a fixed set of blocks during initialisation, and these blocks
34 * - during garbage collection, each step which is an evacuation
35 * destination (i.e. all steps except G0S0) is allocated a to-space.
36 * evacuated objects are allocated into the step's to-space until
37 * GC is finished, when the original step's contents may be freed
38 * and replaced by the to-space.
40 * - the mutable-list is per-generation (not per-step). G0 doesn't
41 * have one (since every garbage collection collects at least G0).
43 * - block descriptors contain pointers to both the step and the
44 * generation that the block belongs to, for convenience.
46 * - static objects are stored in per-generation lists. See GC.c for
47 * details of how we collect CAFs in the generational scheme.
49 * - large objects are per-step, and are promoted in the same way
50 * as small objects, except that we may allocate large objects into
51 * generation 1 initially.
53 * ------------------------------------------------------------------------- */
55 typedef struct step_ {
56 unsigned int no; // step number
57 int is_compacted; // compact this step? (old gen only)
59 struct generation_ * gen; // generation this step belongs to
60 unsigned int gen_no; // generation number (cached)
62 bdescr * blocks; // blocks in this step
63 unsigned int n_blocks; // number of blocks
65 struct step_ * to; // destination step for live objects
67 bdescr * large_objects; // large objects (doubly linked)
68 unsigned int n_large_blocks; // no. of blocks used by large objs
70 // ------------------------------------
71 // Fields below are used during GC only
73 // During GC, if we are collecting this step, blocks and n_blocks
74 // are copied into the following two fields. After GC, these blocks
76 bdescr * old_blocks; // bdescr of first from-space block
77 unsigned int n_old_blocks; // number of blocks in from-space
79 bdescr * todos; // blocks waiting to be scavenged
80 unsigned int n_todos; // count of above
82 #if defined(THREADED_RTS)
83 SpinLock sync_todo; // lock for todos
84 SpinLock sync_large_objects; // lock for large_objects
85 // and scavenged_large_objects
88 bdescr * scavenged_large_objects; // live large objs after GC (d-link)
89 unsigned int n_scavenged_large_blocks; // size (not count) of above
91 bdescr * bitmap; // bitmap for compacting collection
95 typedef struct generation_ {
96 unsigned int no; // generation number
97 step * steps; // steps
98 unsigned int n_steps; // number of steps
99 unsigned int max_blocks; // max blocks in step 0
100 bdescr *mut_list; // mut objects in this gen (not G0)
103 unsigned int collections;
104 unsigned int failed_promotions;
106 // temporary use during GC:
107 bdescr *saved_mut_list;
110 extern generation * RTS_VAR(generations);
112 extern generation * RTS_VAR(g0);
113 extern step * RTS_VAR(g0s0);
114 extern generation * RTS_VAR(oldest_gen);
116 /* -----------------------------------------------------------------------------
117 Initialisation / De-initialisation
118 -------------------------------------------------------------------------- */
120 extern void initStorage(void);
121 extern void exitStorage(void);
122 extern void freeStorage(void);
124 /* -----------------------------------------------------------------------------
127 StgPtr allocateInGen(generation *g, nat n)
128 Allocates a chunk of contiguous store
129 n words long in generation g,
130 returning a pointer to the first word.
133 StgPtr allocate(nat n) Equaivalent to allocateInGen(g0)
135 StgPtr allocateLocal(Capability *cap, nat n)
136 Allocates memory from the nursery in
137 the current Capability. This can be
138 done without taking a global lock,
141 StgPtr allocatePinned(nat n) Allocates a chunk of contiguous store
142 n words long, which is at a fixed
143 address (won't be moved by GC).
144 Returns a pointer to the first word.
147 NOTE: the GC can't in general handle
148 pinned objects, so allocatePinned()
149 can only be used for ByteArrays at the
152 Don't forget to TICK_ALLOC_XXX(...)
153 after calling allocate or
154 allocatePinned, for the
155 benefit of the ticky-ticky profiler.
157 rtsBool doYouWantToGC(void) Returns True if the storage manager is
158 ready to perform a GC, False otherwise.
160 lnat allocatedBytes(void) Returns the number of bytes allocated
161 via allocate() since the last GC.
162 Used in the reporting of statistics.
164 -------------------------------------------------------------------------- */
166 extern StgPtr allocate ( nat n );
167 extern StgPtr allocateInGen ( generation *g, nat n );
168 extern StgPtr allocateLocal ( Capability *cap, nat n );
169 extern StgPtr allocatePinned ( nat n );
170 extern lnat allocatedBytes ( void );
172 extern bdescr * RTS_VAR(small_alloc_list);
173 extern bdescr * RTS_VAR(large_alloc_list);
174 extern bdescr * RTS_VAR(pinned_object_block);
176 extern nat RTS_VAR(alloc_blocks);
177 extern nat RTS_VAR(alloc_blocks_lim);
179 INLINE_HEADER rtsBool
180 doYouWantToGC( void )
182 return (alloc_blocks >= alloc_blocks_lim);
185 /* memory allocator for executable memory */
186 extern void *allocateExec (nat bytes);
187 extern void freeExec (void *p);
189 /* for splitting blocks groups in two */
190 extern bdescr * splitLargeBlock (bdescr *bd, nat blocks);
192 /* -----------------------------------------------------------------------------
193 Performing Garbage Collection
195 GarbageCollect(get_roots) Performs a garbage collection.
196 'get_roots' is called to find all the
197 roots that the system knows about.
200 -------------------------------------------------------------------------- */
202 extern void GarbageCollect(rtsBool force_major_gc);
204 /* -----------------------------------------------------------------------------
205 Generational garbage collection support
207 recordMutable(StgPtr p) Informs the garbage collector that a
208 previously immutable object has
209 become (permanently) mutable. Used
210 by thawArray and similar.
212 updateWithIndirection(p1,p2) Updates the object at p1 with an
213 indirection pointing to p2. This is
214 normally called for objects in an old
215 generation (>0) when they are updated.
217 updateWithPermIndirection(p1,p2) As above but uses a permanent indir.
219 -------------------------------------------------------------------------- */
222 * Storage manager mutex
224 #if defined(THREADED_RTS)
225 extern Mutex sm_mutex;
226 extern Mutex atomic_modify_mutvar_mutex;
227 extern SpinLock recordMutableGen_sync;
230 #if defined(THREADED_RTS)
231 #define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex);
232 #define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex);
233 #define ASSERT_SM_LOCK() ASSERT_LOCK_HELD(&sm_mutex);
235 #define ACQUIRE_SM_LOCK
236 #define RELEASE_SM_LOCK
237 #define ASSERT_SM_LOCK()
241 recordMutableGen(StgClosure *p, generation *gen)
246 if (bd->free >= bd->start + BLOCK_SIZE_W) {
248 new_bd = allocBlock();
253 *bd->free++ = (StgWord)p;
258 recordMutableGenLock(StgClosure *p, generation *gen)
261 recordMutableGen(p,gen);
265 extern bdescr *allocBlock_sync(void);
267 // Version of recordMutableGen() for use in parallel GC. The same as
268 // recordMutableGen(), except that we surround it with a spinlock and
269 // call the spinlock version of allocBlock().
271 recordMutableGen_GC(StgClosure *p, generation *gen)
275 ACQUIRE_SPIN_LOCK(&recordMutableGen_sync);
278 if (bd->free >= bd->start + BLOCK_SIZE_W) {
280 new_bd = allocBlock_sync();
285 *bd->free++ = (StgWord)p;
287 RELEASE_SPIN_LOCK(&recordMutableGen_sync);
291 recordMutable(StgClosure *p)
294 ASSERT(closure_MUTABLE(p));
296 if (bd->gen_no > 0) recordMutableGen(p, &RTS_DEREF(generations)[bd->gen_no]);
300 recordMutableLock(StgClosure *p)
307 /* -----------------------------------------------------------------------------
308 The CAF table - used to let us revert CAFs in GHCi
309 -------------------------------------------------------------------------- */
311 /* set to disable CAF garbage collection in GHCi. */
312 /* (needed when dynamic libraries are used). */
313 extern rtsBool keepCAFs;
315 /* -----------------------------------------------------------------------------
316 This is the write barrier for MUT_VARs, a.k.a. IORefs. A
317 MUT_VAR_CLEAN object is not on the mutable list; a MUT_VAR_DIRTY
318 is. When written to, a MUT_VAR_CLEAN turns into a MUT_VAR_DIRTY
319 and is put on the mutable list.
320 -------------------------------------------------------------------------- */
322 void dirty_MUT_VAR(StgRegTable *reg, StgClosure *p);
324 /* -----------------------------------------------------------------------------
325 DEBUGGING predicates for pointers
327 LOOKS_LIKE_INFO_PTR(p) returns False if p is definitely not an info ptr
328 LOOKS_LIKE_CLOSURE_PTR(p) returns False if p is definitely not a closure ptr
330 These macros are complete but not sound. That is, they might
331 return false positives. Do not rely on them to distinguish info
332 pointers from closure pointers, for example.
334 We don't use address-space predicates these days, for portability
335 reasons, and the fact that code/data can be scattered about the
336 address space in a dynamically-linked environment. Our best option
337 is to look at the alleged info table and see whether it seems to
339 -------------------------------------------------------------------------- */
341 #define LOOKS_LIKE_INFO_PTR(p) \
342 (p && LOOKS_LIKE_INFO_PTR_NOT_NULL(p))
344 #define LOOKS_LIKE_INFO_PTR_NOT_NULL(p) \
345 (((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type != INVALID_OBJECT && \
346 ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type < N_CLOSURE_TYPES)
348 #define LOOKS_LIKE_CLOSURE_PTR(p) \
349 (LOOKS_LIKE_INFO_PTR((UNTAG_CLOSURE((StgClosure *)(p)))->header.info))
351 /* -----------------------------------------------------------------------------
352 Macros for calculating how big a closure will be (used during allocation)
353 -------------------------------------------------------------------------- */
355 INLINE_HEADER StgOffset PAP_sizeW ( nat n_args )
356 { return sizeofW(StgPAP) + n_args; }
358 INLINE_HEADER StgOffset AP_sizeW ( nat n_args )
359 { return sizeofW(StgAP) + n_args; }
361 INLINE_HEADER StgOffset AP_STACK_sizeW ( nat size )
362 { return sizeofW(StgAP_STACK) + size; }
364 INLINE_HEADER StgOffset CONSTR_sizeW( nat p, nat np )
365 { return sizeofW(StgHeader) + p + np; }
367 INLINE_HEADER StgOffset THUNK_SELECTOR_sizeW ( void )
368 { return sizeofW(StgSelector); }
370 INLINE_HEADER StgOffset BLACKHOLE_sizeW ( void )
371 { return sizeofW(StgHeader)+MIN_PAYLOAD_SIZE; }
373 /* --------------------------------------------------------------------------
375 ------------------------------------------------------------------------*/
377 INLINE_HEADER StgOffset sizeW_fromITBL( const StgInfoTable* itbl )
378 { return sizeofW(StgClosure)
379 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
380 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
382 INLINE_HEADER StgOffset thunk_sizeW_fromITBL( const StgInfoTable* itbl )
383 { return sizeofW(StgThunk)
384 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
385 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
387 INLINE_HEADER StgOffset ap_stack_sizeW( StgAP_STACK* x )
388 { return AP_STACK_sizeW(x->size); }
390 INLINE_HEADER StgOffset ap_sizeW( StgAP* x )
391 { return AP_sizeW(x->n_args); }
393 INLINE_HEADER StgOffset pap_sizeW( StgPAP* x )
394 { return PAP_sizeW(x->n_args); }
396 INLINE_HEADER StgOffset arr_words_sizeW( StgArrWords* x )
397 { return sizeofW(StgArrWords) + x->words; }
399 INLINE_HEADER StgOffset mut_arr_ptrs_sizeW( StgMutArrPtrs* x )
400 { return sizeofW(StgMutArrPtrs) + x->ptrs; }
402 INLINE_HEADER StgWord tso_sizeW ( StgTSO *tso )
403 { return TSO_STRUCT_SIZEW + tso->stack_size; }
405 INLINE_HEADER StgWord bco_sizeW ( StgBCO *bco )
406 { return bco->size; }
409 closure_sizeW_ (StgClosure *p, StgInfoTable *info)
411 switch (info->type) {
414 return sizeofW(StgThunk) + 1;
419 return sizeofW(StgHeader) + 1;
423 return sizeofW(StgThunk) + 2;
430 return sizeofW(StgHeader) + 2;
432 return thunk_sizeW_fromITBL(info);
434 return THUNK_SELECTOR_sizeW();
436 return ap_stack_sizeW((StgAP_STACK *)p);
438 return ap_sizeW((StgAP *)p);
440 return pap_sizeW((StgPAP *)p);
444 case IND_OLDGEN_PERM:
445 return sizeofW(StgInd);
447 return arr_words_sizeW((StgArrWords *)p);
448 case MUT_ARR_PTRS_CLEAN:
449 case MUT_ARR_PTRS_DIRTY:
450 case MUT_ARR_PTRS_FROZEN:
451 case MUT_ARR_PTRS_FROZEN0:
452 return mut_arr_ptrs_sizeW((StgMutArrPtrs*)p);
454 return tso_sizeW((StgTSO *)p);
456 return bco_sizeW((StgBCO *)p);
457 case TVAR_WATCH_QUEUE:
458 return sizeofW(StgTVarWatchQueue);
460 return sizeofW(StgTVar);
462 return sizeofW(StgTRecChunk);
464 return sizeofW(StgTRecHeader);
465 case ATOMIC_INVARIANT:
466 return sizeofW(StgAtomicInvariant);
467 case INVARIANT_CHECK_QUEUE:
468 return sizeofW(StgInvariantCheckQueue);
470 return sizeW_fromITBL(info);
474 // The definitive way to find the size, in words, of a heap-allocated closure
476 closure_sizeW (StgClosure *p)
478 return closure_sizeW_(p, get_itbl(p));
481 /* -----------------------------------------------------------------------------
482 Sizes of stack frames
483 -------------------------------------------------------------------------- */
485 INLINE_HEADER StgWord stack_frame_sizeW( StgClosure *frame )
487 StgRetInfoTable *info;
489 info = get_ret_itbl(frame);
490 switch (info->i.type) {
494 StgRetDyn *dyn = (StgRetDyn *)frame;
495 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
496 RET_DYN_NONPTR_REGS_SIZE +
497 RET_DYN_PTRS(dyn->liveness) + RET_DYN_NONPTRS(dyn->liveness);
501 return sizeofW(StgRetFun) + ((StgRetFun *)frame)->size;
504 return 1 + GET_LARGE_BITMAP(&info->i)->size;
507 return 2 + BCO_BITMAP_SIZE((StgBCO *)((P_)frame)[1]);
510 return 1 + BITMAP_SIZE(info->i.layout.bitmap);
514 /* -----------------------------------------------------------------------------
516 -------------------------------------------------------------------------- */
518 extern void allocNurseries ( void );
519 extern void resetNurseries ( void );
520 extern void resizeNurseries ( nat blocks );
521 extern void resizeNurseriesFixed ( nat blocks );
522 extern lnat countNurseryBlocks ( void );
524 /* -----------------------------------------------------------------------------
526 -------------------------------------------------------------------------- */
528 typedef void (*evac_fn)(StgClosure **);
530 extern void threadPaused ( Capability *cap, StgTSO * );
531 extern StgClosure * isAlive ( StgClosure *p );
532 extern void markCAFs ( evac_fn evac );
533 extern void GetRoots ( evac_fn evac );
535 /* -----------------------------------------------------------------------------
536 Stats 'n' DEBUG stuff
537 -------------------------------------------------------------------------- */
539 extern ullong RTS_VAR(total_allocated);
541 extern lnat calcAllocated ( void );
542 extern lnat calcLiveBlocks ( void );
543 extern lnat calcLiveWords ( void );
544 extern lnat countOccupied ( bdescr *bd );
545 extern lnat calcNeeded ( void );
548 extern void memInventory(rtsBool show);
549 extern void checkSanity(void);
550 extern nat countBlocks(bdescr *);
551 extern void checkNurserySanity( step *stp );
555 void printMutOnceList(generation *gen);
556 void printMutableList(generation *gen);
559 /* ----------------------------------------------------------------------------
560 Storage manager internal APIs and globals
561 ------------------------------------------------------------------------- */
563 #define END_OF_STATIC_LIST stgCast(StgClosure*,1)
565 extern void newDynCAF(StgClosure *);
567 extern void move_TSO(StgTSO *src, StgTSO *dest);
568 extern StgTSO *relocate_stack(StgTSO *dest, ptrdiff_t diff);
570 extern StgClosure * RTS_VAR(scavenged_static_objects);
571 extern StgWeak * RTS_VAR(old_weak_ptr_list);
572 extern StgWeak * RTS_VAR(weak_ptr_list);
573 extern StgClosure * RTS_VAR(caf_list);
574 extern StgClosure * RTS_VAR(revertible_caf_list);
575 extern StgTSO * RTS_VAR(resurrected_threads);
577 #endif /* STORAGE_H */