1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2004
5 * External Storage Manger Interface
7 * ---------------------------------------------------------------------------*/
13 #include "OSThreads.h"
16 /* -----------------------------------------------------------------------------
19 * We support an arbitrary number of generations, with an arbitrary number
20 * of steps per generation. Notes (in no particular order):
22 * - all generations except the oldest should have two steps. This gives
23 * objects a decent chance to age before being promoted, and in
24 * particular will ensure that we don't end up with too many
25 * thunks being updated in older generations.
27 * - the oldest generation has one step. There's no point in aging
28 * objects in the oldest generation.
30 * - generation 0, step 0 (G0S0) is the allocation area. It is given
31 * a fixed set of blocks during initialisation, and these blocks
34 * - during garbage collection, each step which is an evacuation
35 * destination (i.e. all steps except G0S0) is allocated a to-space.
36 * evacuated objects are allocated into the step's to-space until
37 * GC is finished, when the original step's contents may be freed
38 * and replaced by the to-space.
40 * - the mutable-list is per-generation (not per-step). G0 doesn't
41 * have one (since every garbage collection collects at least G0).
43 * - block descriptors contain pointers to both the step and the
44 * generation that the block belongs to, for convenience.
46 * - static objects are stored in per-generation lists. See GC.c for
47 * details of how we collect CAFs in the generational scheme.
49 * - large objects are per-step, and are promoted in the same way
50 * as small objects, except that we may allocate large objects into
51 * generation 1 initially.
53 * ------------------------------------------------------------------------- */
55 typedef struct step_ {
56 unsigned int no; // step number in this generation
57 unsigned int abs_no; // absolute step number
58 int is_compacted; // compact this step? (old gen only)
60 struct generation_ * gen; // generation this step belongs to
61 unsigned int gen_no; // generation number (cached)
63 bdescr * blocks; // blocks in this step
64 unsigned int n_blocks; // number of blocks
65 unsigned int n_words; // number of words
67 struct step_ * to; // destination step for live objects
69 bdescr * large_objects; // large objects (doubly linked)
70 unsigned int n_large_blocks; // no. of blocks used by large objs
73 // ------------------------------------
74 // Fields below are used during GC only
76 // During GC, if we are collecting this step, blocks and n_blocks
77 // are copied into the following two fields. After GC, these blocks
80 #if defined(THREADED_RTS)
81 char pad[128]; // make sure the following is
82 // on a separate cache line.
83 SpinLock sync_todo; // lock for todos
84 SpinLock sync_large_objects; // lock for large_objects
85 // and scavenged_large_objects
88 bdescr * old_blocks; // bdescr of first from-space block
89 unsigned int n_old_blocks; // number of blocks in from-space
91 bdescr * todos; // blocks waiting to be scavenged
93 unsigned int n_todos; // count of above
95 bdescr * part_blocks; // partially-full scanned blocks
96 unsigned int n_part_blocks; // count of above
98 bdescr * scavenged_large_objects; // live large objs after GC (d-link)
99 unsigned int n_scavenged_large_blocks; // size (not count) of above
101 bdescr * bitmap; // bitmap for compacting collection
107 typedef struct generation_ {
108 unsigned int no; // generation number
109 step * steps; // steps
110 unsigned int n_steps; // number of steps
111 unsigned int max_blocks; // max blocks in step 0
112 bdescr *mut_list; // mut objects in this gen (not G0)
115 unsigned int collections;
116 unsigned int par_collections;
117 unsigned int failed_promotions;
119 // temporary use during GC:
120 bdescr *saved_mut_list;
123 extern generation * RTS_VAR(generations);
125 extern generation * RTS_VAR(g0);
126 extern step * RTS_VAR(g0s0);
127 extern generation * RTS_VAR(oldest_gen);
128 extern step * RTS_VAR(all_steps);
129 extern nat RTS_VAR(total_steps);
131 /* -----------------------------------------------------------------------------
132 Initialisation / De-initialisation
133 -------------------------------------------------------------------------- */
135 extern void initStorage(void);
136 extern void exitStorage(void);
137 extern void freeStorage(void);
139 /* -----------------------------------------------------------------------------
142 StgPtr allocateInGen(generation *g, nat n)
143 Allocates a chunk of contiguous store
144 n words long in generation g,
145 returning a pointer to the first word.
148 StgPtr allocate(nat n) Equaivalent to allocateInGen(g0)
150 StgPtr allocateLocal(Capability *cap, nat n)
151 Allocates memory from the nursery in
152 the current Capability. This can be
153 done without taking a global lock,
156 StgPtr allocatePinned(nat n) Allocates a chunk of contiguous store
157 n words long, which is at a fixed
158 address (won't be moved by GC).
159 Returns a pointer to the first word.
162 NOTE: the GC can't in general handle
163 pinned objects, so allocatePinned()
164 can only be used for ByteArrays at the
167 Don't forget to TICK_ALLOC_XXX(...)
168 after calling allocate or
169 allocatePinned, for the
170 benefit of the ticky-ticky profiler.
172 rtsBool doYouWantToGC(void) Returns True if the storage manager is
173 ready to perform a GC, False otherwise.
175 lnat allocatedBytes(void) Returns the number of bytes allocated
176 via allocate() since the last GC.
177 Used in the reporting of statistics.
179 -------------------------------------------------------------------------- */
181 extern StgPtr allocate ( nat n );
182 extern StgPtr allocateInGen ( generation *g, nat n );
183 extern StgPtr allocateLocal ( Capability *cap, nat n );
184 extern StgPtr allocatePinned ( nat n );
185 extern lnat allocatedBytes ( void );
187 extern bdescr * RTS_VAR(small_alloc_list);
188 extern bdescr * RTS_VAR(large_alloc_list);
189 extern bdescr * RTS_VAR(pinned_object_block);
191 extern nat RTS_VAR(alloc_blocks);
192 extern nat RTS_VAR(alloc_blocks_lim);
194 INLINE_HEADER rtsBool
195 doYouWantToGC( void )
197 return (alloc_blocks >= alloc_blocks_lim);
200 /* memory allocator for executable memory */
201 extern void *allocateExec (nat bytes);
202 extern void freeExec (void *p);
204 /* for splitting blocks groups in two */
205 extern bdescr * splitLargeBlock (bdescr *bd, nat blocks);
207 /* -----------------------------------------------------------------------------
208 Performing Garbage Collection
210 GarbageCollect(get_roots) Performs a garbage collection.
211 'get_roots' is called to find all the
212 roots that the system knows about.
215 -------------------------------------------------------------------------- */
217 extern void GarbageCollect(rtsBool force_major_gc);
219 /* -----------------------------------------------------------------------------
220 Generational garbage collection support
222 recordMutable(StgPtr p) Informs the garbage collector that a
223 previously immutable object has
224 become (permanently) mutable. Used
225 by thawArray and similar.
227 updateWithIndirection(p1,p2) Updates the object at p1 with an
228 indirection pointing to p2. This is
229 normally called for objects in an old
230 generation (>0) when they are updated.
232 updateWithPermIndirection(p1,p2) As above but uses a permanent indir.
234 -------------------------------------------------------------------------- */
237 * Storage manager mutex
239 #if defined(THREADED_RTS)
240 extern Mutex sm_mutex;
241 extern Mutex atomic_modify_mutvar_mutex;
242 extern SpinLock recordMutableGen_sync;
245 #if defined(THREADED_RTS)
246 #define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex);
247 #define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex);
248 #define ASSERT_SM_LOCK() ASSERT_LOCK_HELD(&sm_mutex);
250 #define ACQUIRE_SM_LOCK
251 #define RELEASE_SM_LOCK
252 #define ASSERT_SM_LOCK()
256 recordMutableGen(StgClosure *p, generation *gen)
261 if (bd->free >= bd->start + BLOCK_SIZE_W) {
263 new_bd = allocBlock();
268 *bd->free++ = (StgWord)p;
273 recordMutableGenLock(StgClosure *p, generation *gen)
276 recordMutableGen(p,gen);
280 extern bdescr *allocBlock_sync(void);
282 // Version of recordMutableGen() for use in parallel GC. The same as
283 // recordMutableGen(), except that we surround it with a spinlock and
284 // call the spinlock version of allocBlock().
286 recordMutableGen_GC(StgClosure *p, generation *gen)
290 ACQUIRE_SPIN_LOCK(&recordMutableGen_sync);
293 if (bd->free >= bd->start + BLOCK_SIZE_W) {
295 new_bd = allocBlock_sync();
300 *bd->free++ = (StgWord)p;
302 RELEASE_SPIN_LOCK(&recordMutableGen_sync);
306 recordMutable(StgClosure *p)
309 ASSERT(closure_MUTABLE(p));
311 if (bd->gen_no > 0) recordMutableGen(p, &RTS_DEREF(generations)[bd->gen_no]);
315 recordMutableLock(StgClosure *p)
322 /* -----------------------------------------------------------------------------
323 The CAF table - used to let us revert CAFs in GHCi
324 -------------------------------------------------------------------------- */
326 /* set to disable CAF garbage collection in GHCi. */
327 /* (needed when dynamic libraries are used). */
328 extern rtsBool keepCAFs;
330 /* -----------------------------------------------------------------------------
331 This is the write barrier for MUT_VARs, a.k.a. IORefs. A
332 MUT_VAR_CLEAN object is not on the mutable list; a MUT_VAR_DIRTY
333 is. When written to, a MUT_VAR_CLEAN turns into a MUT_VAR_DIRTY
334 and is put on the mutable list.
335 -------------------------------------------------------------------------- */
337 void dirty_MUT_VAR(StgRegTable *reg, StgClosure *p);
339 /* -----------------------------------------------------------------------------
340 DEBUGGING predicates for pointers
342 LOOKS_LIKE_INFO_PTR(p) returns False if p is definitely not an info ptr
343 LOOKS_LIKE_CLOSURE_PTR(p) returns False if p is definitely not a closure ptr
345 These macros are complete but not sound. That is, they might
346 return false positives. Do not rely on them to distinguish info
347 pointers from closure pointers, for example.
349 We don't use address-space predicates these days, for portability
350 reasons, and the fact that code/data can be scattered about the
351 address space in a dynamically-linked environment. Our best option
352 is to look at the alleged info table and see whether it seems to
354 -------------------------------------------------------------------------- */
356 #define LOOKS_LIKE_INFO_PTR(p) \
357 (p && LOOKS_LIKE_INFO_PTR_NOT_NULL(p))
359 #define LOOKS_LIKE_INFO_PTR_NOT_NULL(p) \
360 (((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type != INVALID_OBJECT && \
361 ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type < N_CLOSURE_TYPES)
363 #define LOOKS_LIKE_CLOSURE_PTR(p) \
364 (LOOKS_LIKE_INFO_PTR((UNTAG_CLOSURE((StgClosure *)(p)))->header.info))
366 /* -----------------------------------------------------------------------------
367 Macros for calculating how big a closure will be (used during allocation)
368 -------------------------------------------------------------------------- */
370 INLINE_HEADER StgOffset PAP_sizeW ( nat n_args )
371 { return sizeofW(StgPAP) + n_args; }
373 INLINE_HEADER StgOffset AP_sizeW ( nat n_args )
374 { return sizeofW(StgAP) + n_args; }
376 INLINE_HEADER StgOffset AP_STACK_sizeW ( nat size )
377 { return sizeofW(StgAP_STACK) + size; }
379 INLINE_HEADER StgOffset CONSTR_sizeW( nat p, nat np )
380 { return sizeofW(StgHeader) + p + np; }
382 INLINE_HEADER StgOffset THUNK_SELECTOR_sizeW ( void )
383 { return sizeofW(StgSelector); }
385 INLINE_HEADER StgOffset BLACKHOLE_sizeW ( void )
386 { return sizeofW(StgHeader)+MIN_PAYLOAD_SIZE; }
388 /* --------------------------------------------------------------------------
390 ------------------------------------------------------------------------*/
392 INLINE_HEADER StgOffset sizeW_fromITBL( const StgInfoTable* itbl )
393 { return sizeofW(StgClosure)
394 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
395 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
397 INLINE_HEADER StgOffset thunk_sizeW_fromITBL( const StgInfoTable* itbl )
398 { return sizeofW(StgThunk)
399 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
400 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
402 INLINE_HEADER StgOffset ap_stack_sizeW( StgAP_STACK* x )
403 { return AP_STACK_sizeW(x->size); }
405 INLINE_HEADER StgOffset ap_sizeW( StgAP* x )
406 { return AP_sizeW(x->n_args); }
408 INLINE_HEADER StgOffset pap_sizeW( StgPAP* x )
409 { return PAP_sizeW(x->n_args); }
411 INLINE_HEADER StgOffset arr_words_sizeW( StgArrWords* x )
412 { return sizeofW(StgArrWords) + x->words; }
414 INLINE_HEADER StgOffset mut_arr_ptrs_sizeW( StgMutArrPtrs* x )
415 { return sizeofW(StgMutArrPtrs) + x->ptrs; }
417 INLINE_HEADER StgWord tso_sizeW ( StgTSO *tso )
418 { return TSO_STRUCT_SIZEW + tso->stack_size; }
420 INLINE_HEADER StgWord bco_sizeW ( StgBCO *bco )
421 { return bco->size; }
424 closure_sizeW_ (StgClosure *p, StgInfoTable *info)
426 switch (info->type) {
429 return sizeofW(StgThunk) + 1;
434 return sizeofW(StgHeader) + 1;
438 return sizeofW(StgThunk) + 2;
445 return sizeofW(StgHeader) + 2;
447 return thunk_sizeW_fromITBL(info);
449 return THUNK_SELECTOR_sizeW();
451 return ap_stack_sizeW((StgAP_STACK *)p);
453 return ap_sizeW((StgAP *)p);
455 return pap_sizeW((StgPAP *)p);
459 case IND_OLDGEN_PERM:
460 return sizeofW(StgInd);
462 return arr_words_sizeW((StgArrWords *)p);
463 case MUT_ARR_PTRS_CLEAN:
464 case MUT_ARR_PTRS_DIRTY:
465 case MUT_ARR_PTRS_FROZEN:
466 case MUT_ARR_PTRS_FROZEN0:
467 return mut_arr_ptrs_sizeW((StgMutArrPtrs*)p);
469 return tso_sizeW((StgTSO *)p);
471 return bco_sizeW((StgBCO *)p);
472 case TVAR_WATCH_QUEUE:
473 return sizeofW(StgTVarWatchQueue);
475 return sizeofW(StgTVar);
477 return sizeofW(StgTRecChunk);
479 return sizeofW(StgTRecHeader);
480 case ATOMIC_INVARIANT:
481 return sizeofW(StgAtomicInvariant);
482 case INVARIANT_CHECK_QUEUE:
483 return sizeofW(StgInvariantCheckQueue);
485 return sizeW_fromITBL(info);
489 // The definitive way to find the size, in words, of a heap-allocated closure
491 closure_sizeW (StgClosure *p)
493 return closure_sizeW_(p, get_itbl(p));
496 /* -----------------------------------------------------------------------------
497 Sizes of stack frames
498 -------------------------------------------------------------------------- */
500 INLINE_HEADER StgWord stack_frame_sizeW( StgClosure *frame )
502 StgRetInfoTable *info;
504 info = get_ret_itbl(frame);
505 switch (info->i.type) {
509 StgRetDyn *dyn = (StgRetDyn *)frame;
510 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
511 RET_DYN_NONPTR_REGS_SIZE +
512 RET_DYN_PTRS(dyn->liveness) + RET_DYN_NONPTRS(dyn->liveness);
516 return sizeofW(StgRetFun) + ((StgRetFun *)frame)->size;
519 return 1 + GET_LARGE_BITMAP(&info->i)->size;
522 return 2 + BCO_BITMAP_SIZE((StgBCO *)((P_)frame)[1]);
525 return 1 + BITMAP_SIZE(info->i.layout.bitmap);
529 /* -----------------------------------------------------------------------------
531 -------------------------------------------------------------------------- */
533 extern void allocNurseries ( void );
534 extern void resetNurseries ( void );
535 extern void resizeNurseries ( nat blocks );
536 extern void resizeNurseriesFixed ( nat blocks );
537 extern lnat countNurseryBlocks ( void );
539 /* -----------------------------------------------------------------------------
541 -------------------------------------------------------------------------- */
543 typedef void (*evac_fn)(StgClosure **);
545 extern void threadPaused ( Capability *cap, StgTSO * );
546 extern StgClosure * isAlive ( StgClosure *p );
547 extern void markCAFs ( evac_fn evac );
548 extern void GetRoots ( evac_fn evac );
550 /* -----------------------------------------------------------------------------
551 Stats 'n' DEBUG stuff
552 -------------------------------------------------------------------------- */
554 extern ullong RTS_VAR(total_allocated);
556 extern lnat calcAllocated ( void );
557 extern lnat calcLiveBlocks ( void );
558 extern lnat calcLiveWords ( void );
559 extern lnat countOccupied ( bdescr *bd );
560 extern lnat calcNeeded ( void );
563 extern void memInventory(rtsBool show);
564 extern void checkSanity(void);
565 extern nat countBlocks(bdescr *);
566 extern void checkNurserySanity( step *stp );
570 void printMutOnceList(generation *gen);
571 void printMutableList(generation *gen);
574 /* ----------------------------------------------------------------------------
575 Storage manager internal APIs and globals
576 ------------------------------------------------------------------------- */
578 #define END_OF_STATIC_LIST stgCast(StgClosure*,1)
580 extern void newDynCAF(StgClosure *);
582 extern void move_TSO(StgTSO *src, StgTSO *dest);
583 extern StgTSO *relocate_stack(StgTSO *dest, ptrdiff_t diff);
585 extern StgWeak * RTS_VAR(old_weak_ptr_list);
586 extern StgWeak * RTS_VAR(weak_ptr_list);
587 extern StgClosure * RTS_VAR(caf_list);
588 extern StgClosure * RTS_VAR(revertible_caf_list);
589 extern StgTSO * RTS_VAR(resurrected_threads);
591 #endif /* STORAGE_H */