1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2004
5 * External Storage Manger Interface
7 * ---------------------------------------------------------------------------*/
14 /* -----------------------------------------------------------------------------
17 * We support an arbitrary number of generations, with an arbitrary number
18 * of steps per generation. Notes (in no particular order):
20 * - all generations except the oldest should have two steps. This gives
21 * objects a decent chance to age before being promoted, and in
22 * particular will ensure that we don't end up with too many
23 * thunks being updated in older generations.
25 * - the oldest generation has one step. There's no point in aging
26 * objects in the oldest generation.
28 * - generation 0, step 0 (G0S0) is the allocation area. It is given
29 * a fixed set of blocks during initialisation, and these blocks
32 * - during garbage collection, each step which is an evacuation
33 * destination (i.e. all steps except G0S0) is allocated a to-space.
34 * evacuated objects are allocated into the step's to-space until
35 * GC is finished, when the original step's contents may be freed
36 * and replaced by the to-space.
38 * - the mutable-list is per-generation (not per-step). G0 doesn't
39 * have one (since every garbage collection collects at least G0).
41 * - block descriptors contain pointers to both the step and the
42 * generation that the block belongs to, for convenience.
44 * - static objects are stored in per-generation lists. See GC.c for
45 * details of how we collect CAFs in the generational scheme.
47 * - large objects are per-step, and are promoted in the same way
48 * as small objects, except that we may allocate large objects into
49 * generation 1 initially.
51 * ------------------------------------------------------------------------- */
53 typedef struct _step {
54 unsigned int no; /* step number */
55 bdescr * blocks; /* blocks in this step */
56 unsigned int n_blocks; /* number of blocks */
57 struct _step * to; /* destination step for live objects */
58 struct _generation * gen; /* generation this step belongs to */
59 unsigned int gen_no; /* generation number (cached) */
60 bdescr * large_objects; /* large objects (doubly linked) */
61 unsigned int n_large_blocks; /* no. of blocks used by large objs */
62 int is_compacted; /* compact this step? (old gen only) */
64 /* temporary use during GC: */
65 StgPtr hp; /* next free locn in to-space */
66 StgPtr hpLim; /* end of current to-space block */
67 bdescr * hp_bd; /* bdescr of current to-space block */
68 bdescr * to_blocks; /* bdescr of first to-space block */
69 unsigned int n_to_blocks; /* number of blocks in to-space */
70 bdescr * scan_bd; /* block currently being scanned */
71 StgPtr scan; /* scan pointer in current block */
72 bdescr * new_large_objects; /* large objects collected so far */
73 bdescr * scavenged_large_objects; /* live large objs after GC (d-link) */
74 unsigned int n_scavenged_large_blocks;/* size of above */
75 bdescr * bitmap; /* bitmap for compacting collection */
78 typedef struct _generation {
79 unsigned int no; /* generation number */
80 step * steps; /* steps */
81 unsigned int n_steps; /* number of steps */
82 unsigned int max_blocks; /* max blocks in step 0 */
83 StgMutClosure *mut_list; /* mut objects in this gen (not G0)*/
84 StgMutClosure *mut_once_list; /* objects that point to younger gens */
86 /* temporary use during GC: */
87 StgMutClosure * saved_mut_list;
89 /* stats information */
90 unsigned int collections;
91 unsigned int failed_promotions;
94 extern generation * RTS_VAR(generations);
96 extern generation * RTS_VAR(g0);
97 extern step * RTS_VAR(g0s0);
98 extern generation * RTS_VAR(oldest_gen);
100 /* -----------------------------------------------------------------------------
101 Initialisation / De-initialisation
102 -------------------------------------------------------------------------- */
104 extern void initStorage(void);
105 extern void exitStorage(void);
107 /* -----------------------------------------------------------------------------
110 StgPtr allocate(nat n) Allocates a chunk of contiguous store
111 n words long, returning a pointer to
112 the first word. Always succeeds.
114 StgPtr allocatePinned(nat n) Allocates a chunk of contiguous store
115 n words long, which is at a fixed
116 address (won't be moved by GC).
117 Returns a pointer to the first word.
120 NOTE: the GC can't in general handle
121 pinned objects, so allocatePinned()
122 can only be used for ByteArrays at the
125 Don't forget to TICK_ALLOC_XXX(...)
126 after calling allocate or
127 allocatePinned, for the
128 benefit of the ticky-ticky profiler.
130 rtsBool doYouWantToGC(void) Returns True if the storage manager is
131 ready to perform a GC, False otherwise.
133 lnat allocated_bytes(void) Returns the number of bytes allocated
134 via allocate() since the last GC.
135 Used in the reporting of statistics.
137 SMP: allocate and doYouWantToGC can be used from STG code, they are
138 surrounded by a mutex.
139 -------------------------------------------------------------------------- */
141 extern StgPtr allocate ( nat n );
142 extern StgPtr allocatePinned ( nat n );
143 extern lnat allocated_bytes ( void );
145 extern bdescr * RTS_VAR(small_alloc_list);
146 extern bdescr * RTS_VAR(large_alloc_list);
147 extern bdescr * RTS_VAR(pinned_object_block);
149 extern StgPtr RTS_VAR(alloc_Hp);
150 extern StgPtr RTS_VAR(alloc_HpLim);
152 extern nat RTS_VAR(alloc_blocks);
153 extern nat RTS_VAR(alloc_blocks_lim);
155 INLINE_HEADER rtsBool
156 doYouWantToGC( void )
158 return (alloc_blocks >= alloc_blocks_lim);
161 /* -----------------------------------------------------------------------------
162 Performing Garbage Collection
164 GarbageCollect(get_roots) Performs a garbage collection.
165 'get_roots' is called to find all the
166 roots that the system knows about.
168 StgClosure Called by get_roots on each root.
169 MarkRoot(StgClosure *p) Returns the new location of the root.
170 -------------------------------------------------------------------------- */
172 extern void GarbageCollect(void (*get_roots)(evac_fn),rtsBool force_major_gc);
174 /* -----------------------------------------------------------------------------
175 Generational garbage collection support
177 recordMutable(StgPtr p) Informs the garbage collector that a
178 previously immutable object has
179 become (permanently) mutable. Used
180 by thawArray and similar.
182 updateWithIndirection(p1,p2) Updates the object at p1 with an
183 indirection pointing to p2. This is
184 normally called for objects in an old
185 generation (>0) when they are updated.
187 updateWithPermIndirection(p1,p2) As above but uses a permanent indir.
189 -------------------------------------------------------------------------- */
192 * Storage manager mutex
195 extern Mutex sm_mutex;
196 #define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex)
197 #define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex)
199 #define ACQUIRE_SM_LOCK
200 #define RELEASE_SM_LOCK
203 /* ToDo: shouldn't recordMutable and recordOldToNewPtrs acquire some
204 * kind of lock in the SMP case?
207 recordMutable(StgMutClosure *p)
212 ASSERT(p->header.info == &stg_WHITEHOLE_info || closure_MUTABLE(p));
214 ASSERT(closure_MUTABLE(p));
218 if (bd->gen_no > 0) {
219 p->mut_link = RTS_DEREF(generations)[bd->gen_no].mut_list;
220 RTS_DEREF(generations)[bd->gen_no].mut_list = p;
225 recordOldToNewPtrs(StgMutClosure *p)
230 if (bd->gen_no > 0) {
231 p->mut_link = RTS_DEREF(generations)[bd->gen_no].mut_once_list;
232 RTS_DEREF(generations)[bd->gen_no].mut_once_list = p;
236 /* -----------------------------------------------------------------------------
237 The CAF table - used to let us revert CAFs in GHCi
238 -------------------------------------------------------------------------- */
240 void revertCAFs( void );
242 /* -----------------------------------------------------------------------------
243 DEBUGGING predicates for pointers
245 LOOKS_LIKE_INFO_PTR(p) returns False if p is definitely not an info ptr
246 LOOKS_LIKE_CLOSURE_PTR(p) returns False if p is definitely not a closure ptr
248 These macros are complete but not sound. That is, they might
249 return false positives. Do not rely on them to distinguish info
250 pointers from closure pointers, for example.
252 We don't use address-space predicates these days, for portability
253 reasons, and the fact that code/data can be scattered about the
254 address space in a dynamically-linked environment. Our best option
255 is to look at the alleged info table and see whether it seems to
257 -------------------------------------------------------------------------- */
259 #define LOOKS_LIKE_INFO_PTR(p) \
260 (p && ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type != INVALID_OBJECT && \
261 ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type < N_CLOSURE_TYPES)
263 #define LOOKS_LIKE_CLOSURE_PTR(p) \
264 (LOOKS_LIKE_INFO_PTR(((StgClosure *)(p))->header.info))
266 /* -----------------------------------------------------------------------------
267 Macros for calculating how big a closure will be (used during allocation)
268 -------------------------------------------------------------------------- */
270 INLINE_HEADER StgOffset PAP_sizeW ( nat n_args )
271 { return sizeofW(StgPAP) + n_args; }
273 INLINE_HEADER StgOffset AP_STACK_sizeW ( nat size )
274 { return sizeofW(StgAP_STACK) + size; }
276 INLINE_HEADER StgOffset CONSTR_sizeW( nat p, nat np )
277 { return sizeofW(StgHeader) + p + np; }
279 INLINE_HEADER StgOffset THUNK_SELECTOR_sizeW ( void )
280 { return sizeofW(StgHeader) + MIN_UPD_SIZE; }
282 INLINE_HEADER StgOffset BLACKHOLE_sizeW ( void )
283 { return sizeofW(StgHeader) + MIN_UPD_SIZE; }
285 /* --------------------------------------------------------------------------
287 ------------------------------------------------------------------------*/
289 INLINE_HEADER StgOffset sizeW_fromITBL( const StgInfoTable* itbl )
290 { return sizeofW(StgClosure)
291 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
292 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
294 INLINE_HEADER StgOffset ap_stack_sizeW( StgAP_STACK* x )
295 { return AP_STACK_sizeW(x->size); }
297 INLINE_HEADER StgOffset pap_sizeW( StgPAP* x )
298 { return PAP_sizeW(x->n_args); }
300 INLINE_HEADER StgOffset arr_words_sizeW( StgArrWords* x )
301 { return sizeofW(StgArrWords) + x->words; }
303 INLINE_HEADER StgOffset mut_arr_ptrs_sizeW( StgMutArrPtrs* x )
304 { return sizeofW(StgMutArrPtrs) + x->ptrs; }
306 INLINE_HEADER StgWord tso_sizeW ( StgTSO *tso )
307 { return TSO_STRUCT_SIZEW + tso->stack_size; }
309 INLINE_HEADER StgWord bco_sizeW ( StgBCO *bco )
310 { return bco->size; }
312 /* -----------------------------------------------------------------------------
313 Sizes of stack frames
314 -------------------------------------------------------------------------- */
316 INLINE_HEADER StgWord stack_frame_sizeW( StgClosure *frame )
318 StgRetInfoTable *info;
320 info = get_ret_itbl(frame);
321 switch (info->i.type) {
325 StgRetDyn *dyn = (StgRetDyn *)frame;
326 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
327 RET_DYN_NONPTR_REGS_SIZE +
328 RET_DYN_PTRS(dyn->liveness) + RET_DYN_NONPTRS(dyn->liveness);
332 return sizeofW(StgRetFun) + ((StgRetFun *)frame)->size;
336 return 1 + GET_LARGE_BITMAP(&info->i)->size;
339 return 2 + BCO_BITMAP_SIZE((StgBCO *)((P_)frame)[1]);
342 return 1 + BITMAP_SIZE(info->i.layout.bitmap);
346 /* -----------------------------------------------------------------------------
348 -------------------------------------------------------------------------- */
350 extern void allocNurseries ( void );
351 extern void resetNurseries ( void );
352 extern bdescr * allocNursery ( bdescr *last_bd, nat blocks );
353 extern void resizeNursery ( nat blocks );
354 extern void tidyAllocateLists ( void );
356 /* -----------------------------------------------------------------------------
358 A mutable list is ended with END_MUT_LIST, so that we can use NULL
359 as an indication that an object is not on a mutable list.
360 ------------------------------------------------------------------------- */
362 #define END_MUT_LIST ((StgMutClosure *)(void *)&stg_END_MUT_LIST_closure)
364 /* -----------------------------------------------------------------------------
366 -------------------------------------------------------------------------- */
368 extern void threadPaused ( StgTSO * );
369 extern StgClosure * isAlive ( StgClosure *p );
370 extern void markCAFs ( evac_fn evac );
372 /* -----------------------------------------------------------------------------
373 Stats 'n' DEBUG stuff
374 -------------------------------------------------------------------------- */
376 extern ullong RTS_VAR(total_allocated);
378 extern lnat calcAllocated ( void );
379 extern lnat calcLive ( void );
380 extern lnat calcNeeded ( void );
383 extern void memInventory(void);
384 extern void checkSanity(void);
385 extern nat countBlocks(bdescr *);
389 void printMutOnceList(generation *gen);
390 void printMutableList(generation *gen);
393 /* ----------------------------------------------------------------------------
394 Storage manager internal APIs and globals
395 ------------------------------------------------------------------------- */
397 #define END_OF_STATIC_LIST stgCast(StgClosure*,1)
399 extern void newDynCAF(StgClosure *);
401 extern void move_TSO(StgTSO *src, StgTSO *dest);
402 extern StgTSO *relocate_stack(StgTSO *dest, ptrdiff_t diff);
404 extern StgClosure * RTS_VAR(static_objects);
405 extern StgClosure * RTS_VAR(scavenged_static_objects);
406 extern StgWeak * RTS_VAR(old_weak_ptr_list);
407 extern StgWeak * RTS_VAR(weak_ptr_list);
408 extern StgClosure * RTS_VAR(caf_list);
409 extern StgTSO * RTS_VAR(resurrected_threads);