1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2004
5 * External Storage Manger Interface
7 * ---------------------------------------------------------------------------*/
13 #include "OSThreads.h"
15 /* -----------------------------------------------------------------------------
18 * We support an arbitrary number of generations, with an arbitrary number
19 * of steps per generation. Notes (in no particular order):
21 * - all generations except the oldest should have two steps. This gives
22 * objects a decent chance to age before being promoted, and in
23 * particular will ensure that we don't end up with too many
24 * thunks being updated in older generations.
26 * - the oldest generation has one step. There's no point in aging
27 * objects in the oldest generation.
29 * - generation 0, step 0 (G0S0) is the allocation area. It is given
30 * a fixed set of blocks during initialisation, and these blocks
33 * - during garbage collection, each step which is an evacuation
34 * destination (i.e. all steps except G0S0) is allocated a to-space.
35 * evacuated objects are allocated into the step's to-space until
36 * GC is finished, when the original step's contents may be freed
37 * and replaced by the to-space.
39 * - the mutable-list is per-generation (not per-step). G0 doesn't
40 * have one (since every garbage collection collects at least G0).
42 * - block descriptors contain pointers to both the step and the
43 * generation that the block belongs to, for convenience.
45 * - static objects are stored in per-generation lists. See GC.c for
46 * details of how we collect CAFs in the generational scheme.
48 * - large objects are per-step, and are promoted in the same way
49 * as small objects, except that we may allocate large objects into
50 * generation 1 initially.
52 * ------------------------------------------------------------------------- */
54 typedef struct step_ {
55 unsigned int no; /* step number */
56 bdescr * blocks; /* blocks in this step */
57 unsigned int n_blocks; /* number of blocks */
58 struct step_ * to; /* destination step for live objects */
59 struct generation_ * gen; /* generation this step belongs to */
60 unsigned int gen_no; /* generation number (cached) */
61 bdescr * large_objects; /* large objects (doubly linked) */
62 unsigned int n_large_blocks; /* no. of blocks used by large objs */
63 int is_compacted; /* compact this step? (old gen only) */
65 /* During GC, if we are collecting this step, blocks and n_blocks
66 * are copied into the following two fields. After GC, these blocks
68 bdescr * old_blocks; /* bdescr of first from-space block */
69 unsigned int n_old_blocks; /* number of blocks in from-space */
71 /* temporary use during GC: */
72 StgPtr hp; /* next free locn in to-space */
73 StgPtr hpLim; /* end of current to-space block */
74 bdescr * hp_bd; /* bdescr of current to-space block */
75 StgPtr scavd_hp; /* ... same as above, but already */
76 StgPtr scavd_hpLim; /* scavenged. */
77 bdescr * scan_bd; /* block currently being scanned */
78 StgPtr scan; /* scan pointer in current block */
79 bdescr * new_large_objects; /* large objects collected so far */
80 bdescr * scavenged_large_objects; /* live large objs after GC (d-link) */
81 unsigned int n_scavenged_large_blocks;/* size of above */
82 bdescr * bitmap; /* bitmap for compacting collection */
85 typedef struct generation_ {
86 unsigned int no; /* generation number */
87 step * steps; /* steps */
88 unsigned int n_steps; /* number of steps */
89 unsigned int max_blocks; /* max blocks in step 0 */
90 bdescr *mut_list; /* mut objects in this gen (not G0)*/
92 /* temporary use during GC: */
93 bdescr *saved_mut_list;
95 /* stats information */
96 unsigned int collections;
97 unsigned int failed_promotions;
100 extern generation * RTS_VAR(generations);
102 extern generation * RTS_VAR(g0);
103 extern step * RTS_VAR(g0s0);
104 extern generation * RTS_VAR(oldest_gen);
106 /* -----------------------------------------------------------------------------
107 Initialisation / De-initialisation
108 -------------------------------------------------------------------------- */
110 extern void initStorage(void);
111 extern void exitStorage(void);
112 extern void freeStorage(void);
114 /* -----------------------------------------------------------------------------
117 StgPtr allocate(nat n) Allocates a chunk of contiguous store
118 n words long, returning a pointer to
119 the first word. Always succeeds.
121 StgPtr allocatePinned(nat n) Allocates a chunk of contiguous store
122 n words long, which is at a fixed
123 address (won't be moved by GC).
124 Returns a pointer to the first word.
127 NOTE: the GC can't in general handle
128 pinned objects, so allocatePinned()
129 can only be used for ByteArrays at the
132 Don't forget to TICK_ALLOC_XXX(...)
133 after calling allocate or
134 allocatePinned, for the
135 benefit of the ticky-ticky profiler.
137 rtsBool doYouWantToGC(void) Returns True if the storage manager is
138 ready to perform a GC, False otherwise.
140 lnat allocated_bytes(void) Returns the number of bytes allocated
141 via allocate() since the last GC.
142 Used in the reporting of statistics.
144 THREADED_RTS: allocate and doYouWantToGC can be used from STG code, they are
145 surrounded by a mutex.
146 -------------------------------------------------------------------------- */
148 extern StgPtr allocate ( nat n );
149 extern StgPtr allocateLocal ( Capability *cap, nat n );
150 extern StgPtr allocatePinned ( nat n );
151 extern lnat allocated_bytes ( void );
153 extern bdescr * RTS_VAR(small_alloc_list);
154 extern bdescr * RTS_VAR(large_alloc_list);
155 extern bdescr * RTS_VAR(pinned_object_block);
157 extern StgPtr RTS_VAR(alloc_Hp);
158 extern StgPtr RTS_VAR(alloc_HpLim);
160 extern nat RTS_VAR(alloc_blocks);
161 extern nat RTS_VAR(alloc_blocks_lim);
163 INLINE_HEADER rtsBool
164 doYouWantToGC( void )
166 return (alloc_blocks >= alloc_blocks_lim);
169 /* memory allocator for executable memory */
170 extern void *allocateExec (nat bytes);
171 extern void freeExec (void *p);
173 /* -----------------------------------------------------------------------------
174 Performing Garbage Collection
176 GarbageCollect(get_roots) Performs a garbage collection.
177 'get_roots' is called to find all the
178 roots that the system knows about.
180 StgClosure Called by get_roots on each root.
181 MarkRoot(StgClosure *p) Returns the new location of the root.
182 -------------------------------------------------------------------------- */
184 extern void GarbageCollect(void (*get_roots)(evac_fn),rtsBool force_major_gc);
186 /* -----------------------------------------------------------------------------
187 Generational garbage collection support
189 recordMutable(StgPtr p) Informs the garbage collector that a
190 previously immutable object has
191 become (permanently) mutable. Used
192 by thawArray and similar.
194 updateWithIndirection(p1,p2) Updates the object at p1 with an
195 indirection pointing to p2. This is
196 normally called for objects in an old
197 generation (>0) when they are updated.
199 updateWithPermIndirection(p1,p2) As above but uses a permanent indir.
201 -------------------------------------------------------------------------- */
204 * Storage manager mutex
206 #if defined(THREADED_RTS)
207 extern Mutex sm_mutex;
208 extern Mutex atomic_modify_mutvar_mutex;
211 #if defined(THREADED_RTS)
212 #define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex);
213 #define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex);
214 #define ASSERT_SM_LOCK() ASSERT_LOCK_HELD(&sm_mutex);
216 #define ACQUIRE_SM_LOCK
217 #define RELEASE_SM_LOCK
218 #define ASSERT_SM_LOCK()
222 recordMutableGen(StgClosure *p, generation *gen)
227 if (bd->free >= bd->start + BLOCK_SIZE_W) {
229 new_bd = allocBlock();
234 *bd->free++ = (StgWord)p;
239 recordMutableGenLock(StgClosure *p, generation *gen)
242 recordMutableGen(p,gen);
247 recordMutable(StgClosure *p)
250 ASSERT(closure_MUTABLE(p));
252 if (bd->gen_no > 0) recordMutableGen(p, &RTS_DEREF(generations)[bd->gen_no]);
256 recordMutableLock(StgClosure *p)
263 /* -----------------------------------------------------------------------------
264 The CAF table - used to let us revert CAFs in GHCi
265 -------------------------------------------------------------------------- */
267 /* set to disable CAF garbage collection in GHCi. */
268 /* (needed when dynamic libraries are used). */
269 extern rtsBool keepCAFs;
271 /* -----------------------------------------------------------------------------
272 This is the write barrier for MUT_VARs, a.k.a. IORefs. A
273 MUT_VAR_CLEAN object is not on the mutable list; a MUT_VAR_DIRTY
274 is. When written to, a MUT_VAR_CLEAN turns into a MUT_VAR_DIRTY
275 and is put on the mutable list.
276 -------------------------------------------------------------------------- */
278 void dirty_MUT_VAR(StgRegTable *reg, StgClosure *p);
280 /* -----------------------------------------------------------------------------
281 DEBUGGING predicates for pointers
283 LOOKS_LIKE_INFO_PTR(p) returns False if p is definitely not an info ptr
284 LOOKS_LIKE_CLOSURE_PTR(p) returns False if p is definitely not a closure ptr
286 These macros are complete but not sound. That is, they might
287 return false positives. Do not rely on them to distinguish info
288 pointers from closure pointers, for example.
290 We don't use address-space predicates these days, for portability
291 reasons, and the fact that code/data can be scattered about the
292 address space in a dynamically-linked environment. Our best option
293 is to look at the alleged info table and see whether it seems to
295 -------------------------------------------------------------------------- */
297 #define LOOKS_LIKE_INFO_PTR(p) \
298 (p && ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type != INVALID_OBJECT && \
299 ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type < N_CLOSURE_TYPES)
301 #define LOOKS_LIKE_CLOSURE_PTR(p) \
302 (LOOKS_LIKE_INFO_PTR(((StgClosure *)(p))->header.info))
304 /* -----------------------------------------------------------------------------
305 Macros for calculating how big a closure will be (used during allocation)
306 -------------------------------------------------------------------------- */
308 INLINE_HEADER StgOffset PAP_sizeW ( nat n_args )
309 { return sizeofW(StgPAP) + n_args; }
311 INLINE_HEADER StgOffset AP_sizeW ( nat n_args )
312 { return sizeofW(StgAP) + n_args; }
314 INLINE_HEADER StgOffset AP_STACK_sizeW ( nat size )
315 { return sizeofW(StgAP_STACK) + size; }
317 INLINE_HEADER StgOffset CONSTR_sizeW( nat p, nat np )
318 { return sizeofW(StgHeader) + p + np; }
320 INLINE_HEADER StgOffset THUNK_SELECTOR_sizeW ( void )
321 { return sizeofW(StgSelector); }
323 INLINE_HEADER StgOffset BLACKHOLE_sizeW ( void )
324 { return sizeofW(StgHeader)+MIN_PAYLOAD_SIZE; }
326 /* --------------------------------------------------------------------------
328 ------------------------------------------------------------------------*/
330 INLINE_HEADER StgOffset sizeW_fromITBL( const StgInfoTable* itbl )
331 { return sizeofW(StgClosure)
332 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
333 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
335 INLINE_HEADER StgOffset thunk_sizeW_fromITBL( const StgInfoTable* itbl )
336 { return sizeofW(StgThunk)
337 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
338 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
340 INLINE_HEADER StgOffset ap_stack_sizeW( StgAP_STACK* x )
341 { return AP_STACK_sizeW(x->size); }
343 INLINE_HEADER StgOffset ap_sizeW( StgAP* x )
344 { return AP_sizeW(x->n_args); }
346 INLINE_HEADER StgOffset pap_sizeW( StgPAP* x )
347 { return PAP_sizeW(x->n_args); }
349 INLINE_HEADER StgOffset arr_words_sizeW( StgArrWords* x )
350 { return sizeofW(StgArrWords) + x->words; }
352 INLINE_HEADER StgOffset mut_arr_ptrs_sizeW( StgMutArrPtrs* x )
353 { return sizeofW(StgMutArrPtrs) + x->ptrs; }
355 INLINE_HEADER StgWord tso_sizeW ( StgTSO *tso )
356 { return TSO_STRUCT_SIZEW + tso->stack_size; }
358 INLINE_HEADER StgWord bco_sizeW ( StgBCO *bco )
359 { return bco->size; }
362 closure_sizeW_ (StgClosure *p, StgInfoTable *info)
364 switch (info->type) {
367 return sizeofW(StgThunk) + 1;
372 return sizeofW(StgHeader) + 1;
376 return sizeofW(StgThunk) + 2;
383 return sizeofW(StgHeader) + 2;
385 return thunk_sizeW_fromITBL(info);
387 return THUNK_SELECTOR_sizeW();
389 return ap_stack_sizeW((StgAP_STACK *)p);
392 return pap_sizeW((StgPAP *)p);
396 case IND_OLDGEN_PERM:
397 return sizeofW(StgInd);
399 return arr_words_sizeW((StgArrWords *)p);
400 case MUT_ARR_PTRS_CLEAN:
401 case MUT_ARR_PTRS_DIRTY:
402 case MUT_ARR_PTRS_FROZEN:
403 case MUT_ARR_PTRS_FROZEN0:
404 return mut_arr_ptrs_sizeW((StgMutArrPtrs*)p);
406 return tso_sizeW((StgTSO *)p);
408 return bco_sizeW((StgBCO *)p);
409 case TVAR_WATCH_QUEUE:
410 return sizeofW(StgTVarWatchQueue);
412 return sizeofW(StgTVar);
414 return sizeofW(StgTRecChunk);
416 return sizeofW(StgTRecHeader);
417 case ATOMIC_INVARIANT:
418 return sizeofW(StgAtomicInvariant);
419 case INVARIANT_CHECK_QUEUE:
420 return sizeofW(StgInvariantCheckQueue);
422 return sizeW_fromITBL(info);
426 // The definitive way to find the size, in words, of a heap-allocated closure
428 closure_sizeW (StgClosure *p)
430 return closure_sizeW_(p, get_itbl(p));
433 /* -----------------------------------------------------------------------------
434 Sizes of stack frames
435 -------------------------------------------------------------------------- */
437 INLINE_HEADER StgWord stack_frame_sizeW( StgClosure *frame )
439 StgRetInfoTable *info;
441 info = get_ret_itbl(frame);
442 switch (info->i.type) {
446 StgRetDyn *dyn = (StgRetDyn *)frame;
447 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
448 RET_DYN_NONPTR_REGS_SIZE +
449 RET_DYN_PTRS(dyn->liveness) + RET_DYN_NONPTRS(dyn->liveness);
453 return sizeofW(StgRetFun) + ((StgRetFun *)frame)->size;
457 return 1 + GET_LARGE_BITMAP(&info->i)->size;
460 return 2 + BCO_BITMAP_SIZE((StgBCO *)((P_)frame)[1]);
463 return 1 + BITMAP_SIZE(info->i.layout.bitmap);
467 /* -----------------------------------------------------------------------------
469 -------------------------------------------------------------------------- */
471 extern void allocNurseries ( void );
472 extern void resetNurseries ( void );
473 extern void resizeNurseries ( nat blocks );
474 extern void resizeNurseriesFixed ( nat blocks );
475 extern void tidyAllocateLists ( void );
476 extern lnat countNurseryBlocks ( void );
478 /* -----------------------------------------------------------------------------
480 -------------------------------------------------------------------------- */
482 extern void threadPaused ( Capability *cap, StgTSO * );
483 extern StgClosure * isAlive ( StgClosure *p );
484 extern void markCAFs ( evac_fn evac );
486 /* -----------------------------------------------------------------------------
487 Stats 'n' DEBUG stuff
488 -------------------------------------------------------------------------- */
490 extern ullong RTS_VAR(total_allocated);
492 extern lnat calcAllocated ( void );
493 extern lnat calcLive ( void );
494 extern lnat calcNeeded ( void );
497 extern void memInventory(void);
498 extern void checkSanity(void);
499 extern nat countBlocks(bdescr *);
500 extern void checkNurserySanity( step *stp );
504 void printMutOnceList(generation *gen);
505 void printMutableList(generation *gen);
508 /* ----------------------------------------------------------------------------
509 Storage manager internal APIs and globals
510 ------------------------------------------------------------------------- */
512 #define END_OF_STATIC_LIST stgCast(StgClosure*,1)
514 extern void newDynCAF(StgClosure *);
516 extern void move_TSO(StgTSO *src, StgTSO *dest);
517 extern StgTSO *relocate_stack(StgTSO *dest, ptrdiff_t diff);
519 extern StgClosure * RTS_VAR(scavenged_static_objects);
520 extern StgWeak * RTS_VAR(old_weak_ptr_list);
521 extern StgWeak * RTS_VAR(weak_ptr_list);
522 extern StgClosure * RTS_VAR(caf_list);
523 extern StgClosure * RTS_VAR(revertible_caf_list);
524 extern StgTSO * RTS_VAR(resurrected_threads);
526 #endif /* STORAGE_H */