1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2004
5 * External Storage Manger Interface
7 * ---------------------------------------------------------------------------*/
13 #include "OSThreads.h"
15 /* -----------------------------------------------------------------------------
18 * We support an arbitrary number of generations, with an arbitrary number
19 * of steps per generation. Notes (in no particular order):
21 * - all generations except the oldest should have two steps. This gives
22 * objects a decent chance to age before being promoted, and in
23 * particular will ensure that we don't end up with too many
24 * thunks being updated in older generations.
26 * - the oldest generation has one step. There's no point in aging
27 * objects in the oldest generation.
29 * - generation 0, step 0 (G0S0) is the allocation area. It is given
30 * a fixed set of blocks during initialisation, and these blocks
33 * - during garbage collection, each step which is an evacuation
34 * destination (i.e. all steps except G0S0) is allocated a to-space.
35 * evacuated objects are allocated into the step's to-space until
36 * GC is finished, when the original step's contents may be freed
37 * and replaced by the to-space.
39 * - the mutable-list is per-generation (not per-step). G0 doesn't
40 * have one (since every garbage collection collects at least G0).
42 * - block descriptors contain pointers to both the step and the
43 * generation that the block belongs to, for convenience.
45 * - static objects are stored in per-generation lists. See GC.c for
46 * details of how we collect CAFs in the generational scheme.
48 * - large objects are per-step, and are promoted in the same way
49 * as small objects, except that we may allocate large objects into
50 * generation 1 initially.
52 * ------------------------------------------------------------------------- */
54 typedef struct step_ {
55 unsigned int no; /* step number */
56 bdescr * blocks; /* blocks in this step */
57 unsigned int n_blocks; /* number of blocks */
58 struct step_ * to; /* destination step for live objects */
59 struct generation_ * gen; /* generation this step belongs to */
60 unsigned int gen_no; /* generation number (cached) */
61 bdescr * large_objects; /* large objects (doubly linked) */
62 unsigned int n_large_blocks; /* no. of blocks used by large objs */
63 int is_compacted; /* compact this step? (old gen only) */
65 /* During GC, if we are collecting this step, blocks and n_blocks
66 * are copied into the following two fields. After GC, these blocks
68 bdescr * old_blocks; /* bdescr of first from-space block */
69 unsigned int n_old_blocks; /* number of blocks in from-space */
71 /* temporary use during GC: */
72 StgPtr hp; /* next free locn in to-space */
73 StgPtr hpLim; /* end of current to-space block */
74 bdescr * hp_bd; /* bdescr of current to-space block */
75 StgPtr scavd_hp; /* ... same as above, but already */
76 StgPtr scavd_hpLim; /* scavenged. */
77 bdescr * scan_bd; /* block currently being scanned */
78 StgPtr scan; /* scan pointer in current block */
79 bdescr * new_large_objects; /* large objects collected so far */
80 bdescr * scavenged_large_objects; /* live large objs after GC (d-link) */
81 unsigned int n_scavenged_large_blocks;/* size of above */
82 bdescr * bitmap; /* bitmap for compacting collection */
85 typedef struct generation_ {
86 unsigned int no; /* generation number */
87 step * steps; /* steps */
88 unsigned int n_steps; /* number of steps */
89 unsigned int max_blocks; /* max blocks in step 0 */
90 bdescr *mut_list; /* mut objects in this gen (not G0)*/
92 /* temporary use during GC: */
93 bdescr *saved_mut_list;
95 /* stats information */
96 unsigned int collections;
97 unsigned int failed_promotions;
100 extern generation * RTS_VAR(generations);
102 extern generation * RTS_VAR(g0);
103 extern step * RTS_VAR(g0s0);
104 extern generation * RTS_VAR(oldest_gen);
106 /* -----------------------------------------------------------------------------
107 Initialisation / De-initialisation
108 -------------------------------------------------------------------------- */
110 extern void initStorage(void);
111 extern void exitStorage(void);
112 extern void freeStorage(void);
114 /* -----------------------------------------------------------------------------
117 StgPtr allocateInGen(generation *g, nat n)
118 Allocates a chunk of contiguous store
119 n words long in generation g,
120 returning a pointer to the first word.
123 StgPtr allocate(nat n) Equaivalent to allocateInGen(g0)
125 StgPtr allocateLocal(Capability *cap, nat n)
126 Allocates memory from the nursery in
127 the current Capability. This can be
128 done without taking a global lock,
131 StgPtr allocatePinned(nat n) Allocates a chunk of contiguous store
132 n words long, which is at a fixed
133 address (won't be moved by GC).
134 Returns a pointer to the first word.
137 NOTE: the GC can't in general handle
138 pinned objects, so allocatePinned()
139 can only be used for ByteArrays at the
142 Don't forget to TICK_ALLOC_XXX(...)
143 after calling allocate or
144 allocatePinned, for the
145 benefit of the ticky-ticky profiler.
147 rtsBool doYouWantToGC(void) Returns True if the storage manager is
148 ready to perform a GC, False otherwise.
150 lnat allocatedBytes(void) Returns the number of bytes allocated
151 via allocate() since the last GC.
152 Used in the reporting of statistics.
154 -------------------------------------------------------------------------- */
156 extern StgPtr allocate ( nat n );
157 extern StgPtr allocateInGen ( generation *g, nat n );
158 extern StgPtr allocateLocal ( Capability *cap, nat n );
159 extern StgPtr allocatePinned ( nat n );
160 extern lnat allocatedBytes ( void );
162 extern bdescr * RTS_VAR(small_alloc_list);
163 extern bdescr * RTS_VAR(large_alloc_list);
164 extern bdescr * RTS_VAR(pinned_object_block);
166 extern StgPtr RTS_VAR(alloc_Hp);
167 extern StgPtr RTS_VAR(alloc_HpLim);
169 extern nat RTS_VAR(alloc_blocks);
170 extern nat RTS_VAR(alloc_blocks_lim);
172 INLINE_HEADER rtsBool
173 doYouWantToGC( void )
175 return (alloc_blocks >= alloc_blocks_lim);
178 /* memory allocator for executable memory */
179 extern void *allocateExec (nat bytes);
180 extern void freeExec (void *p);
182 /* -----------------------------------------------------------------------------
183 Performing Garbage Collection
185 GarbageCollect(get_roots) Performs a garbage collection.
186 'get_roots' is called to find all the
187 roots that the system knows about.
189 StgClosure Called by get_roots on each root.
190 MarkRoot(StgClosure *p) Returns the new location of the root.
191 -------------------------------------------------------------------------- */
193 extern void GarbageCollect(rtsBool force_major_gc);
195 /* -----------------------------------------------------------------------------
196 Generational garbage collection support
198 recordMutable(StgPtr p) Informs the garbage collector that a
199 previously immutable object has
200 become (permanently) mutable. Used
201 by thawArray and similar.
203 updateWithIndirection(p1,p2) Updates the object at p1 with an
204 indirection pointing to p2. This is
205 normally called for objects in an old
206 generation (>0) when they are updated.
208 updateWithPermIndirection(p1,p2) As above but uses a permanent indir.
210 -------------------------------------------------------------------------- */
213 * Storage manager mutex
215 #if defined(THREADED_RTS)
216 extern Mutex sm_mutex;
217 extern Mutex atomic_modify_mutvar_mutex;
220 #if defined(THREADED_RTS)
221 #define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex);
222 #define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex);
223 #define ASSERT_SM_LOCK() ASSERT_LOCK_HELD(&sm_mutex);
225 #define ACQUIRE_SM_LOCK
226 #define RELEASE_SM_LOCK
227 #define ASSERT_SM_LOCK()
231 recordMutableGen(StgClosure *p, generation *gen)
236 if (bd->free >= bd->start + BLOCK_SIZE_W) {
238 new_bd = allocBlock();
243 *bd->free++ = (StgWord)p;
248 recordMutableGenLock(StgClosure *p, generation *gen)
251 recordMutableGen(p,gen);
256 recordMutable(StgClosure *p)
259 ASSERT(closure_MUTABLE(p));
261 if (bd->gen_no > 0) recordMutableGen(p, &RTS_DEREF(generations)[bd->gen_no]);
265 recordMutableLock(StgClosure *p)
272 /* -----------------------------------------------------------------------------
273 The CAF table - used to let us revert CAFs in GHCi
274 -------------------------------------------------------------------------- */
276 /* set to disable CAF garbage collection in GHCi. */
277 /* (needed when dynamic libraries are used). */
278 extern rtsBool keepCAFs;
280 /* -----------------------------------------------------------------------------
281 This is the write barrier for MUT_VARs, a.k.a. IORefs. A
282 MUT_VAR_CLEAN object is not on the mutable list; a MUT_VAR_DIRTY
283 is. When written to, a MUT_VAR_CLEAN turns into a MUT_VAR_DIRTY
284 and is put on the mutable list.
285 -------------------------------------------------------------------------- */
287 void dirty_MUT_VAR(StgRegTable *reg, StgClosure *p);
289 /* -----------------------------------------------------------------------------
290 DEBUGGING predicates for pointers
292 LOOKS_LIKE_INFO_PTR(p) returns False if p is definitely not an info ptr
293 LOOKS_LIKE_CLOSURE_PTR(p) returns False if p is definitely not a closure ptr
295 These macros are complete but not sound. That is, they might
296 return false positives. Do not rely on them to distinguish info
297 pointers from closure pointers, for example.
299 We don't use address-space predicates these days, for portability
300 reasons, and the fact that code/data can be scattered about the
301 address space in a dynamically-linked environment. Our best option
302 is to look at the alleged info table and see whether it seems to
304 -------------------------------------------------------------------------- */
306 #define LOOKS_LIKE_INFO_PTR(p) \
307 (p && LOOKS_LIKE_INFO_PTR_NOT_NULL(p))
309 #define LOOKS_LIKE_INFO_PTR_NOT_NULL(p) \
310 (((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type != INVALID_OBJECT && \
311 ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type < N_CLOSURE_TYPES)
313 #define LOOKS_LIKE_CLOSURE_PTR(p) \
314 (LOOKS_LIKE_INFO_PTR((UNTAG_CLOSURE((StgClosure *)(p)))->header.info))
316 /* -----------------------------------------------------------------------------
317 Macros for calculating how big a closure will be (used during allocation)
318 -------------------------------------------------------------------------- */
320 INLINE_HEADER StgOffset PAP_sizeW ( nat n_args )
321 { return sizeofW(StgPAP) + n_args; }
323 INLINE_HEADER StgOffset AP_sizeW ( nat n_args )
324 { return sizeofW(StgAP) + n_args; }
326 INLINE_HEADER StgOffset AP_STACK_sizeW ( nat size )
327 { return sizeofW(StgAP_STACK) + size; }
329 INLINE_HEADER StgOffset CONSTR_sizeW( nat p, nat np )
330 { return sizeofW(StgHeader) + p + np; }
332 INLINE_HEADER StgOffset THUNK_SELECTOR_sizeW ( void )
333 { return sizeofW(StgSelector); }
335 INLINE_HEADER StgOffset BLACKHOLE_sizeW ( void )
336 { return sizeofW(StgHeader)+MIN_PAYLOAD_SIZE; }
338 /* --------------------------------------------------------------------------
340 ------------------------------------------------------------------------*/
342 INLINE_HEADER StgOffset sizeW_fromITBL( const StgInfoTable* itbl )
343 { return sizeofW(StgClosure)
344 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
345 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
347 INLINE_HEADER StgOffset thunk_sizeW_fromITBL( const StgInfoTable* itbl )
348 { return sizeofW(StgThunk)
349 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
350 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
352 INLINE_HEADER StgOffset ap_stack_sizeW( StgAP_STACK* x )
353 { return AP_STACK_sizeW(x->size); }
355 INLINE_HEADER StgOffset ap_sizeW( StgAP* x )
356 { return AP_sizeW(x->n_args); }
358 INLINE_HEADER StgOffset pap_sizeW( StgPAP* x )
359 { return PAP_sizeW(x->n_args); }
361 INLINE_HEADER StgOffset arr_words_sizeW( StgArrWords* x )
362 { return sizeofW(StgArrWords) + x->words; }
364 INLINE_HEADER StgOffset mut_arr_ptrs_sizeW( StgMutArrPtrs* x )
365 { return sizeofW(StgMutArrPtrs) + x->ptrs; }
367 INLINE_HEADER StgWord tso_sizeW ( StgTSO *tso )
368 { return TSO_STRUCT_SIZEW + tso->stack_size; }
370 INLINE_HEADER StgWord bco_sizeW ( StgBCO *bco )
371 { return bco->size; }
374 closure_sizeW_ (StgClosure *p, StgInfoTable *info)
376 switch (info->type) {
379 return sizeofW(StgThunk) + 1;
384 return sizeofW(StgHeader) + 1;
388 return sizeofW(StgThunk) + 2;
395 return sizeofW(StgHeader) + 2;
397 return thunk_sizeW_fromITBL(info);
399 return THUNK_SELECTOR_sizeW();
401 return ap_stack_sizeW((StgAP_STACK *)p);
403 return ap_sizeW((StgAP *)p);
405 return pap_sizeW((StgPAP *)p);
409 case IND_OLDGEN_PERM:
410 return sizeofW(StgInd);
412 return arr_words_sizeW((StgArrWords *)p);
413 case MUT_ARR_PTRS_CLEAN:
414 case MUT_ARR_PTRS_DIRTY:
415 case MUT_ARR_PTRS_FROZEN:
416 case MUT_ARR_PTRS_FROZEN0:
417 return mut_arr_ptrs_sizeW((StgMutArrPtrs*)p);
419 return tso_sizeW((StgTSO *)p);
421 return bco_sizeW((StgBCO *)p);
422 case TVAR_WATCH_QUEUE:
423 return sizeofW(StgTVarWatchQueue);
425 return sizeofW(StgTVar);
427 return sizeofW(StgTRecChunk);
429 return sizeofW(StgTRecHeader);
430 case ATOMIC_INVARIANT:
431 return sizeofW(StgAtomicInvariant);
432 case INVARIANT_CHECK_QUEUE:
433 return sizeofW(StgInvariantCheckQueue);
435 return sizeW_fromITBL(info);
439 // The definitive way to find the size, in words, of a heap-allocated closure
441 closure_sizeW (StgClosure *p)
443 return closure_sizeW_(p, get_itbl(p));
446 /* -----------------------------------------------------------------------------
447 Sizes of stack frames
448 -------------------------------------------------------------------------- */
450 INLINE_HEADER StgWord stack_frame_sizeW( StgClosure *frame )
452 StgRetInfoTable *info;
454 info = get_ret_itbl(frame);
455 switch (info->i.type) {
459 StgRetDyn *dyn = (StgRetDyn *)frame;
460 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
461 RET_DYN_NONPTR_REGS_SIZE +
462 RET_DYN_PTRS(dyn->liveness) + RET_DYN_NONPTRS(dyn->liveness);
466 return sizeofW(StgRetFun) + ((StgRetFun *)frame)->size;
469 return 1 + GET_LARGE_BITMAP(&info->i)->size;
472 return 2 + BCO_BITMAP_SIZE((StgBCO *)((P_)frame)[1]);
475 return 1 + BITMAP_SIZE(info->i.layout.bitmap);
479 /* -----------------------------------------------------------------------------
481 -------------------------------------------------------------------------- */
483 extern void allocNurseries ( void );
484 extern void resetNurseries ( void );
485 extern void resizeNurseries ( nat blocks );
486 extern void resizeNurseriesFixed ( nat blocks );
487 extern lnat countNurseryBlocks ( void );
489 /* -----------------------------------------------------------------------------
491 -------------------------------------------------------------------------- */
493 typedef void (*evac_fn)(StgClosure **);
495 extern void threadPaused ( Capability *cap, StgTSO * );
496 extern StgClosure * isAlive ( StgClosure *p );
497 extern void markCAFs ( evac_fn evac );
499 /* -----------------------------------------------------------------------------
500 Stats 'n' DEBUG stuff
501 -------------------------------------------------------------------------- */
503 extern ullong RTS_VAR(total_allocated);
505 extern lnat calcAllocated ( void );
506 extern lnat calcLive ( void );
507 extern lnat calcNeeded ( void );
510 extern void memInventory(void);
511 extern void checkSanity(void);
512 extern nat countBlocks(bdescr *);
513 extern void checkNurserySanity( step *stp );
517 void printMutOnceList(generation *gen);
518 void printMutableList(generation *gen);
521 /* ----------------------------------------------------------------------------
522 Storage manager internal APIs and globals
523 ------------------------------------------------------------------------- */
525 #define END_OF_STATIC_LIST stgCast(StgClosure*,1)
527 extern void newDynCAF(StgClosure *);
529 extern void move_TSO(StgTSO *src, StgTSO *dest);
530 extern StgTSO *relocate_stack(StgTSO *dest, ptrdiff_t diff);
532 extern StgClosure * RTS_VAR(scavenged_static_objects);
533 extern StgWeak * RTS_VAR(old_weak_ptr_list);
534 extern StgWeak * RTS_VAR(weak_ptr_list);
535 extern StgClosure * RTS_VAR(caf_list);
536 extern StgClosure * RTS_VAR(revertible_caf_list);
537 extern StgTSO * RTS_VAR(resurrected_threads);
539 #endif /* STORAGE_H */