1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2004
5 * External Storage Manger Interface
7 * ---------------------------------------------------------------------------*/
13 #include "OSThreads.h"
16 /* -----------------------------------------------------------------------------
19 * We support an arbitrary number of generations, with an arbitrary number
20 * of steps per generation. Notes (in no particular order):
22 * - all generations except the oldest should have two steps. This gives
23 * objects a decent chance to age before being promoted, and in
24 * particular will ensure that we don't end up with too many
25 * thunks being updated in older generations.
27 * - the oldest generation has one step. There's no point in aging
28 * objects in the oldest generation.
30 * - generation 0, step 0 (G0S0) is the allocation area. It is given
31 * a fixed set of blocks during initialisation, and these blocks
34 * - during garbage collection, each step which is an evacuation
35 * destination (i.e. all steps except G0S0) is allocated a to-space.
36 * evacuated objects are allocated into the step's to-space until
37 * GC is finished, when the original step's contents may be freed
38 * and replaced by the to-space.
40 * - the mutable-list is per-generation (not per-step). G0 doesn't
41 * have one (since every garbage collection collects at least G0).
43 * - block descriptors contain pointers to both the step and the
44 * generation that the block belongs to, for convenience.
46 * - static objects are stored in per-generation lists. See GC.c for
47 * details of how we collect CAFs in the generational scheme.
49 * - large objects are per-step, and are promoted in the same way
50 * as small objects, except that we may allocate large objects into
51 * generation 1 initially.
53 * ------------------------------------------------------------------------- */
55 typedef struct step_ {
56 unsigned int no; // step number in this generation
57 unsigned int abs_no; // absolute step number
59 struct generation_ * gen; // generation this step belongs to
60 unsigned int gen_no; // generation number (cached)
62 bdescr * blocks; // blocks in this step
63 unsigned int n_blocks; // number of blocks
64 unsigned int n_words; // number of words
66 struct step_ * to; // destination step for live objects
68 bdescr * large_objects; // large objects (doubly linked)
69 unsigned int n_large_blocks; // no. of blocks used by large objs
71 StgTSO * threads; // threads in this step
72 // linked via global_link
74 // ------------------------------------
75 // Fields below are used during GC only
77 // During GC, if we are collecting this step, blocks and n_blocks
78 // are copied into the following two fields. After GC, these blocks
81 #if defined(THREADED_RTS)
82 char pad[128]; // make sure the following is
83 // on a separate cache line.
84 SpinLock sync_large_objects; // lock for large_objects
85 // and scavenged_large_objects
88 int mark; // mark (not copy)? (old gen only)
89 int compact; // compact (not sweep)? (old gen only)
91 bdescr * old_blocks; // bdescr of first from-space block
92 unsigned int n_old_blocks; // number of blocks in from-space
93 unsigned int live_estimate; // for sweeping: estimate of live data
95 bdescr * part_blocks; // partially-full scanned blocks
96 unsigned int n_part_blocks; // count of above
98 bdescr * scavenged_large_objects; // live large objs after GC (d-link)
99 unsigned int n_scavenged_large_blocks; // size (not count) of above
101 bdescr * bitmap; // bitmap for compacting collection
103 StgTSO * old_threads;
108 typedef struct generation_ {
109 unsigned int no; // generation number
110 step * steps; // steps
111 unsigned int n_steps; // number of steps
112 unsigned int max_blocks; // max blocks in step 0
113 bdescr *mut_list; // mut objects in this gen (not G0)
116 unsigned int collections;
117 unsigned int par_collections;
118 unsigned int failed_promotions;
120 // temporary use during GC:
121 bdescr *saved_mut_list;
124 extern generation * RTS_VAR(generations);
126 extern generation * RTS_VAR(g0);
127 extern step * RTS_VAR(g0s0);
128 extern generation * RTS_VAR(oldest_gen);
129 extern step * RTS_VAR(all_steps);
130 extern nat RTS_VAR(total_steps);
132 /* -----------------------------------------------------------------------------
133 Initialisation / De-initialisation
134 -------------------------------------------------------------------------- */
136 extern void initStorage(void);
137 extern void exitStorage(void);
138 extern void freeStorage(void);
140 /* -----------------------------------------------------------------------------
143 StgPtr allocateInGen(generation *g, nat n)
144 Allocates a chunk of contiguous store
145 n words long in generation g,
146 returning a pointer to the first word.
149 StgPtr allocate(nat n) Equaivalent to allocateInGen(g0)
151 StgPtr allocateLocal(Capability *cap, nat n)
152 Allocates memory from the nursery in
153 the current Capability. This can be
154 done without taking a global lock,
157 StgPtr allocatePinned(nat n) Allocates a chunk of contiguous store
158 n words long, which is at a fixed
159 address (won't be moved by GC).
160 Returns a pointer to the first word.
163 NOTE: the GC can't in general handle
164 pinned objects, so allocatePinned()
165 can only be used for ByteArrays at the
168 Don't forget to TICK_ALLOC_XXX(...)
169 after calling allocate or
170 allocatePinned, for the
171 benefit of the ticky-ticky profiler.
173 rtsBool doYouWantToGC(void) Returns True if the storage manager is
174 ready to perform a GC, False otherwise.
176 lnat allocatedBytes(void) Returns the number of bytes allocated
177 via allocate() since the last GC.
178 Used in the reporting of statistics.
180 -------------------------------------------------------------------------- */
182 extern StgPtr allocate ( lnat n );
183 extern StgPtr allocateInGen ( generation *g, lnat n );
184 extern StgPtr allocateLocal ( Capability *cap, lnat n );
185 extern StgPtr allocatePinned ( lnat n );
186 extern lnat allocatedBytes ( void );
188 extern bdescr * RTS_VAR(small_alloc_list);
189 extern bdescr * RTS_VAR(large_alloc_list);
190 extern bdescr * RTS_VAR(pinned_object_block);
192 extern nat RTS_VAR(alloc_blocks);
193 extern nat RTS_VAR(alloc_blocks_lim);
195 INLINE_HEADER rtsBool
196 doYouWantToGC( void )
198 return (alloc_blocks >= alloc_blocks_lim);
201 /* memory allocator for executable memory */
202 extern void* allocateExec(unsigned int len, void **exec_addr);
203 extern void freeExec (void *p);
205 /* for splitting blocks groups in two */
206 extern bdescr * splitLargeBlock (bdescr *bd, nat blocks);
208 /* -----------------------------------------------------------------------------
209 Performing Garbage Collection
211 GarbageCollect(get_roots) Performs a garbage collection.
212 'get_roots' is called to find all the
213 roots that the system knows about.
216 -------------------------------------------------------------------------- */
218 extern void GarbageCollect(rtsBool force_major_gc, nat gc_type, Capability *cap);
220 /* -----------------------------------------------------------------------------
221 Generational garbage collection support
223 recordMutable(StgPtr p) Informs the garbage collector that a
224 previously immutable object has
225 become (permanently) mutable. Used
226 by thawArray and similar.
228 updateWithIndirection(p1,p2) Updates the object at p1 with an
229 indirection pointing to p2. This is
230 normally called for objects in an old
231 generation (>0) when they are updated.
233 updateWithPermIndirection(p1,p2) As above but uses a permanent indir.
235 -------------------------------------------------------------------------- */
238 * Storage manager mutex
240 #if defined(THREADED_RTS)
241 extern Mutex sm_mutex;
242 extern Mutex atomic_modify_mutvar_mutex;
245 #if defined(THREADED_RTS)
246 #define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex);
247 #define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex);
248 #define ASSERT_SM_LOCK() ASSERT_LOCK_HELD(&sm_mutex);
250 #define ACQUIRE_SM_LOCK
251 #define RELEASE_SM_LOCK
252 #define ASSERT_SM_LOCK()
258 recordMutableGen(StgClosure *p, nat gen_no)
262 bd = generations[gen_no].mut_list;
263 if (bd->free >= bd->start + BLOCK_SIZE_W) {
265 new_bd = allocBlock();
268 generations[gen_no].mut_list = bd;
270 *bd->free++ = (StgWord)p;
275 recordMutableGenLock(StgClosure *p, nat gen_no)
278 recordMutableGen(p,gen_no);
283 recordMutable(StgClosure *p)
286 ASSERT(closure_MUTABLE(p));
288 if (bd->gen_no > 0) recordMutableGen(p, bd->gen_no);
292 recordMutableLock(StgClosure *p)
299 #endif // !IN_STG_CODE
301 /* -----------------------------------------------------------------------------
302 The CAF table - used to let us revert CAFs in GHCi
303 -------------------------------------------------------------------------- */
305 /* set to disable CAF garbage collection in GHCi. */
306 /* (needed when dynamic libraries are used). */
307 extern rtsBool keepCAFs;
309 /* -----------------------------------------------------------------------------
310 This is the write barrier for MUT_VARs, a.k.a. IORefs. A
311 MUT_VAR_CLEAN object is not on the mutable list; a MUT_VAR_DIRTY
312 is. When written to, a MUT_VAR_CLEAN turns into a MUT_VAR_DIRTY
313 and is put on the mutable list.
314 -------------------------------------------------------------------------- */
316 void dirty_MUT_VAR(StgRegTable *reg, StgClosure *p);
318 /* -----------------------------------------------------------------------------
319 DEBUGGING predicates for pointers
321 LOOKS_LIKE_INFO_PTR(p) returns False if p is definitely not an info ptr
322 LOOKS_LIKE_CLOSURE_PTR(p) returns False if p is definitely not a closure ptr
324 These macros are complete but not sound. That is, they might
325 return false positives. Do not rely on them to distinguish info
326 pointers from closure pointers, for example.
328 We don't use address-space predicates these days, for portability
329 reasons, and the fact that code/data can be scattered about the
330 address space in a dynamically-linked environment. Our best option
331 is to look at the alleged info table and see whether it seems to
333 -------------------------------------------------------------------------- */
335 INLINE_HEADER rtsBool LOOKS_LIKE_INFO_PTR (StgWord p);
336 INLINE_HEADER rtsBool LOOKS_LIKE_CLOSURE_PTR (void *p); // XXX StgClosure*
338 /* -----------------------------------------------------------------------------
339 Macros for calculating how big a closure will be (used during allocation)
340 -------------------------------------------------------------------------- */
342 INLINE_HEADER StgOffset PAP_sizeW ( nat n_args )
343 { return sizeofW(StgPAP) + n_args; }
345 INLINE_HEADER StgOffset AP_sizeW ( nat n_args )
346 { return sizeofW(StgAP) + n_args; }
348 INLINE_HEADER StgOffset AP_STACK_sizeW ( nat size )
349 { return sizeofW(StgAP_STACK) + size; }
351 INLINE_HEADER StgOffset CONSTR_sizeW( nat p, nat np )
352 { return sizeofW(StgHeader) + p + np; }
354 INLINE_HEADER StgOffset THUNK_SELECTOR_sizeW ( void )
355 { return sizeofW(StgSelector); }
357 INLINE_HEADER StgOffset BLACKHOLE_sizeW ( void )
358 { return sizeofW(StgHeader)+MIN_PAYLOAD_SIZE; }
360 /* --------------------------------------------------------------------------
362 ------------------------------------------------------------------------*/
364 INLINE_HEADER StgOffset sizeW_fromITBL( const StgInfoTable* itbl )
365 { return sizeofW(StgClosure)
366 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
367 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
369 INLINE_HEADER StgOffset thunk_sizeW_fromITBL( const StgInfoTable* itbl )
370 { return sizeofW(StgThunk)
371 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
372 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
374 INLINE_HEADER StgOffset ap_stack_sizeW( StgAP_STACK* x )
375 { return AP_STACK_sizeW(x->size); }
377 INLINE_HEADER StgOffset ap_sizeW( StgAP* x )
378 { return AP_sizeW(x->n_args); }
380 INLINE_HEADER StgOffset pap_sizeW( StgPAP* x )
381 { return PAP_sizeW(x->n_args); }
383 INLINE_HEADER StgOffset arr_words_sizeW( StgArrWords* x )
384 { return sizeofW(StgArrWords) + x->words; }
386 INLINE_HEADER StgOffset mut_arr_ptrs_sizeW( StgMutArrPtrs* x )
387 { return sizeofW(StgMutArrPtrs) + x->ptrs; }
389 INLINE_HEADER StgWord tso_sizeW ( StgTSO *tso )
390 { return TSO_STRUCT_SIZEW + tso->stack_size; }
392 INLINE_HEADER StgWord bco_sizeW ( StgBCO *bco )
393 { return bco->size; }
396 closure_sizeW_ (StgClosure *p, StgInfoTable *info)
398 switch (info->type) {
401 return sizeofW(StgThunk) + 1;
406 return sizeofW(StgHeader) + 1;
410 return sizeofW(StgThunk) + 2;
417 return sizeofW(StgHeader) + 2;
419 return thunk_sizeW_fromITBL(info);
421 return THUNK_SELECTOR_sizeW();
423 return ap_stack_sizeW((StgAP_STACK *)p);
425 return ap_sizeW((StgAP *)p);
427 return pap_sizeW((StgPAP *)p);
431 case IND_OLDGEN_PERM:
432 return sizeofW(StgInd);
434 return arr_words_sizeW((StgArrWords *)p);
435 case MUT_ARR_PTRS_CLEAN:
436 case MUT_ARR_PTRS_DIRTY:
437 case MUT_ARR_PTRS_FROZEN:
438 case MUT_ARR_PTRS_FROZEN0:
439 return mut_arr_ptrs_sizeW((StgMutArrPtrs*)p);
441 return tso_sizeW((StgTSO *)p);
443 return bco_sizeW((StgBCO *)p);
444 case TVAR_WATCH_QUEUE:
445 return sizeofW(StgTVarWatchQueue);
447 return sizeofW(StgTVar);
449 return sizeofW(StgTRecChunk);
451 return sizeofW(StgTRecHeader);
452 case ATOMIC_INVARIANT:
453 return sizeofW(StgAtomicInvariant);
454 case INVARIANT_CHECK_QUEUE:
455 return sizeofW(StgInvariantCheckQueue);
457 return sizeW_fromITBL(info);
461 // The definitive way to find the size, in words, of a heap-allocated closure
463 closure_sizeW (StgClosure *p)
465 return closure_sizeW_(p, get_itbl(p));
468 /* -----------------------------------------------------------------------------
469 Sizes of stack frames
470 -------------------------------------------------------------------------- */
472 INLINE_HEADER StgWord stack_frame_sizeW( StgClosure *frame )
474 StgRetInfoTable *info;
476 info = get_ret_itbl(frame);
477 switch (info->i.type) {
481 StgRetDyn *dyn = (StgRetDyn *)frame;
482 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
483 RET_DYN_NONPTR_REGS_SIZE +
484 RET_DYN_PTRS(dyn->liveness) + RET_DYN_NONPTRS(dyn->liveness);
488 return sizeofW(StgRetFun) + ((StgRetFun *)frame)->size;
491 return 1 + GET_LARGE_BITMAP(&info->i)->size;
494 return 2 + BCO_BITMAP_SIZE((StgBCO *)((P_)frame)[1]);
497 return 1 + BITMAP_SIZE(info->i.layout.bitmap);
501 /* -----------------------------------------------------------------------------
503 -------------------------------------------------------------------------- */
505 extern void allocNurseries ( void );
506 extern void resetNurseries ( void );
507 extern void resizeNurseries ( nat blocks );
508 extern void resizeNurseriesFixed ( nat blocks );
509 extern lnat countNurseryBlocks ( void );
512 /* -----------------------------------------------------------------------------
514 -------------------------------------------------------------------------- */
516 typedef void (*evac_fn)(void *user, StgClosure **root);
518 extern void threadPaused ( Capability *cap, StgTSO * );
519 extern StgClosure * isAlive ( StgClosure *p );
520 extern void markCAFs ( evac_fn evac, void *user );
521 extern void GetRoots ( evac_fn evac, void *user );
523 /* -----------------------------------------------------------------------------
524 Stats 'n' DEBUG stuff
525 -------------------------------------------------------------------------- */
527 extern ullong RTS_VAR(total_allocated);
529 extern lnat calcAllocated ( void );
530 extern lnat calcLiveBlocks ( void );
531 extern lnat calcLiveWords ( void );
532 extern lnat countOccupied ( bdescr *bd );
533 extern lnat calcNeeded ( void );
536 extern void memInventory(rtsBool show);
537 extern void checkSanity(void);
538 extern nat countBlocks(bdescr *);
539 extern void checkNurserySanity( step *stp );
543 void printMutOnceList(generation *gen);
544 void printMutableList(generation *gen);
547 /* ----------------------------------------------------------------------------
548 Storage manager internal APIs and globals
549 ------------------------------------------------------------------------- */
551 #define END_OF_STATIC_LIST stgCast(StgClosure*,1)
553 extern void newDynCAF(StgClosure *);
555 extern void move_TSO(StgTSO *src, StgTSO *dest);
556 extern StgTSO *relocate_stack(StgTSO *dest, ptrdiff_t diff);
558 extern StgWeak * RTS_VAR(old_weak_ptr_list);
559 extern StgWeak * RTS_VAR(weak_ptr_list);
560 extern StgClosure * RTS_VAR(caf_list);
561 extern StgClosure * RTS_VAR(revertible_caf_list);
562 extern StgTSO * RTS_VAR(resurrected_threads);
564 #define IS_FORWARDING_PTR(p) ((((StgWord)p) & 1) != 0)
565 #define MK_FORWARDING_PTR(p) (((StgWord)p) | 1)
566 #define UN_FORWARDING_PTR(p) (((StgWord)p) - 1)
568 INLINE_HEADER rtsBool LOOKS_LIKE_INFO_PTR_NOT_NULL (StgWord p)
570 StgInfoTable *info = INFO_PTR_TO_STRUCT(p);
571 return info->type != INVALID_OBJECT && info->type < N_CLOSURE_TYPES;
574 INLINE_HEADER rtsBool LOOKS_LIKE_INFO_PTR (StgWord p)
576 return p && (IS_FORWARDING_PTR(p) || LOOKS_LIKE_INFO_PTR_NOT_NULL(p));
579 INLINE_HEADER rtsBool LOOKS_LIKE_CLOSURE_PTR (void *p)
581 return LOOKS_LIKE_INFO_PTR((StgWord)(UNTAG_CLOSURE((StgClosure *)(p)))->header.info);
584 #endif /* STORAGE_H */