1 /* -----------------------------------------------------------------------------
3 * (c) The GHC Team, 1998-2004
5 * External Storage Manger Interface
7 * ---------------------------------------------------------------------------*/
14 /* -----------------------------------------------------------------------------
17 * We support an arbitrary number of generations, with an arbitrary number
18 * of steps per generation. Notes (in no particular order):
20 * - all generations except the oldest should have two steps. This gives
21 * objects a decent chance to age before being promoted, and in
22 * particular will ensure that we don't end up with too many
23 * thunks being updated in older generations.
25 * - the oldest generation has one step. There's no point in aging
26 * objects in the oldest generation.
28 * - generation 0, step 0 (G0S0) is the allocation area. It is given
29 * a fixed set of blocks during initialisation, and these blocks
32 * - during garbage collection, each step which is an evacuation
33 * destination (i.e. all steps except G0S0) is allocated a to-space.
34 * evacuated objects are allocated into the step's to-space until
35 * GC is finished, when the original step's contents may be freed
36 * and replaced by the to-space.
38 * - the mutable-list is per-generation (not per-step). G0 doesn't
39 * have one (since every garbage collection collects at least G0).
41 * - block descriptors contain pointers to both the step and the
42 * generation that the block belongs to, for convenience.
44 * - static objects are stored in per-generation lists. See GC.c for
45 * details of how we collect CAFs in the generational scheme.
47 * - large objects are per-step, and are promoted in the same way
48 * as small objects, except that we may allocate large objects into
49 * generation 1 initially.
51 * ------------------------------------------------------------------------- */
53 typedef struct _step {
54 unsigned int no; /* step number */
55 bdescr * blocks; /* blocks in this step */
56 unsigned int n_blocks; /* number of blocks */
57 struct _step * to; /* destination step for live objects */
58 struct _generation * gen; /* generation this step belongs to */
59 unsigned int gen_no; /* generation number (cached) */
60 bdescr * large_objects; /* large objects (doubly linked) */
61 unsigned int n_large_blocks; /* no. of blocks used by large objs */
62 int is_compacted; /* compact this step? (old gen only) */
64 /* temporary use during GC: */
65 StgPtr hp; /* next free locn in to-space */
66 StgPtr hpLim; /* end of current to-space block */
67 bdescr * hp_bd; /* bdescr of current to-space block */
68 bdescr * to_blocks; /* bdescr of first to-space block */
69 unsigned int n_to_blocks; /* number of blocks in to-space */
70 bdescr * scan_bd; /* block currently being scanned */
71 StgPtr scan; /* scan pointer in current block */
72 bdescr * new_large_objects; /* large objects collected so far */
73 bdescr * scavenged_large_objects; /* live large objs after GC (d-link) */
74 unsigned int n_scavenged_large_blocks;/* size of above */
75 bdescr * bitmap; /* bitmap for compacting collection */
78 typedef struct _generation {
79 unsigned int no; /* generation number */
80 step * steps; /* steps */
81 unsigned int n_steps; /* number of steps */
82 unsigned int max_blocks; /* max blocks in step 0 */
83 bdescr *mut_list; /* mut objects in this gen (not G0)*/
85 /* temporary use during GC: */
86 bdescr *saved_mut_list;
88 /* stats information */
89 unsigned int collections;
90 unsigned int failed_promotions;
93 extern generation * RTS_VAR(generations);
95 extern generation * RTS_VAR(g0);
96 extern step * RTS_VAR(g0s0);
97 extern generation * RTS_VAR(oldest_gen);
99 /* -----------------------------------------------------------------------------
100 Initialisation / De-initialisation
101 -------------------------------------------------------------------------- */
103 extern void initStorage(void);
104 extern void exitStorage(void);
106 /* -----------------------------------------------------------------------------
109 StgPtr allocate(nat n) Allocates a chunk of contiguous store
110 n words long, returning a pointer to
111 the first word. Always succeeds.
113 StgPtr allocatePinned(nat n) Allocates a chunk of contiguous store
114 n words long, which is at a fixed
115 address (won't be moved by GC).
116 Returns a pointer to the first word.
119 NOTE: the GC can't in general handle
120 pinned objects, so allocatePinned()
121 can only be used for ByteArrays at the
124 Don't forget to TICK_ALLOC_XXX(...)
125 after calling allocate or
126 allocatePinned, for the
127 benefit of the ticky-ticky profiler.
129 rtsBool doYouWantToGC(void) Returns True if the storage manager is
130 ready to perform a GC, False otherwise.
132 lnat allocated_bytes(void) Returns the number of bytes allocated
133 via allocate() since the last GC.
134 Used in the reporting of statistics.
136 SMP: allocate and doYouWantToGC can be used from STG code, they are
137 surrounded by a mutex.
138 -------------------------------------------------------------------------- */
140 extern StgPtr allocate ( nat n );
141 extern StgPtr allocatePinned ( nat n );
142 extern lnat allocated_bytes ( void );
144 extern bdescr * RTS_VAR(small_alloc_list);
145 extern bdescr * RTS_VAR(large_alloc_list);
146 extern bdescr * RTS_VAR(pinned_object_block);
148 extern StgPtr RTS_VAR(alloc_Hp);
149 extern StgPtr RTS_VAR(alloc_HpLim);
151 extern nat RTS_VAR(alloc_blocks);
152 extern nat RTS_VAR(alloc_blocks_lim);
154 INLINE_HEADER rtsBool
155 doYouWantToGC( void )
157 return (alloc_blocks >= alloc_blocks_lim);
160 /* -----------------------------------------------------------------------------
161 Performing Garbage Collection
163 GarbageCollect(get_roots) Performs a garbage collection.
164 'get_roots' is called to find all the
165 roots that the system knows about.
167 StgClosure Called by get_roots on each root.
168 MarkRoot(StgClosure *p) Returns the new location of the root.
169 -------------------------------------------------------------------------- */
171 extern void GarbageCollect(void (*get_roots)(evac_fn),rtsBool force_major_gc);
173 /* -----------------------------------------------------------------------------
174 Generational garbage collection support
176 recordMutable(StgPtr p) Informs the garbage collector that a
177 previously immutable object has
178 become (permanently) mutable. Used
179 by thawArray and similar.
181 updateWithIndirection(p1,p2) Updates the object at p1 with an
182 indirection pointing to p2. This is
183 normally called for objects in an old
184 generation (>0) when they are updated.
186 updateWithPermIndirection(p1,p2) As above but uses a permanent indir.
188 -------------------------------------------------------------------------- */
191 * Storage manager mutex
194 extern Mutex sm_mutex;
195 #define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex)
196 #define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex)
198 #define ACQUIRE_SM_LOCK
199 #define RELEASE_SM_LOCK
202 /* ToDo: shouldn't recordMutable acquire some
203 * kind of lock in the SMP case? Or do we need per-processor
207 recordMutableGen(StgClosure *p, generation *gen)
212 if (bd->free >= bd->start + BLOCK_SIZE_W) {
214 new_bd = allocBlock();
219 *bd->free++ = (StgWord)p;
223 recordMutable(StgClosure *p)
226 ASSERT(closure_MUTABLE(p));
228 if (bd->gen_no > 0) recordMutableGen(p, &RTS_DEREF(generations)[bd->gen_no]);
231 /* -----------------------------------------------------------------------------
232 The CAF table - used to let us revert CAFs in GHCi
233 -------------------------------------------------------------------------- */
235 void revertCAFs( void );
237 /* -----------------------------------------------------------------------------
238 DEBUGGING predicates for pointers
240 LOOKS_LIKE_INFO_PTR(p) returns False if p is definitely not an info ptr
241 LOOKS_LIKE_CLOSURE_PTR(p) returns False if p is definitely not a closure ptr
243 These macros are complete but not sound. That is, they might
244 return false positives. Do not rely on them to distinguish info
245 pointers from closure pointers, for example.
247 We don't use address-space predicates these days, for portability
248 reasons, and the fact that code/data can be scattered about the
249 address space in a dynamically-linked environment. Our best option
250 is to look at the alleged info table and see whether it seems to
252 -------------------------------------------------------------------------- */
254 #define LOOKS_LIKE_INFO_PTR(p) \
255 (p && ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type != INVALID_OBJECT && \
256 ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type < N_CLOSURE_TYPES)
258 #define LOOKS_LIKE_CLOSURE_PTR(p) \
259 (LOOKS_LIKE_INFO_PTR(((StgClosure *)(p))->header.info))
261 /* -----------------------------------------------------------------------------
262 Macros for calculating how big a closure will be (used during allocation)
263 -------------------------------------------------------------------------- */
265 INLINE_HEADER StgOffset PAP_sizeW ( nat n_args )
266 { return sizeofW(StgPAP) + n_args; }
268 INLINE_HEADER StgOffset AP_STACK_sizeW ( nat size )
269 { return sizeofW(StgAP_STACK) + size; }
271 INLINE_HEADER StgOffset CONSTR_sizeW( nat p, nat np )
272 { return sizeofW(StgHeader) + p + np; }
274 INLINE_HEADER StgOffset THUNK_SELECTOR_sizeW ( void )
275 { return stg_max(sizeofW(StgHeader)+MIN_UPD_SIZE, sizeofW(StgSelector)); }
277 INLINE_HEADER StgOffset BLACKHOLE_sizeW ( void )
278 { return stg_max(sizeofW(StgHeader)+MIN_UPD_SIZE, sizeofW(StgBlockingQueue)); }
280 /* --------------------------------------------------------------------------
282 ------------------------------------------------------------------------*/
284 INLINE_HEADER StgOffset sizeW_fromITBL( const StgInfoTable* itbl )
285 { return sizeofW(StgClosure)
286 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
287 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
289 INLINE_HEADER StgOffset ap_stack_sizeW( StgAP_STACK* x )
290 { return AP_STACK_sizeW(x->size); }
292 INLINE_HEADER StgOffset pap_sizeW( StgPAP* x )
293 { return PAP_sizeW(x->n_args); }
295 INLINE_HEADER StgOffset arr_words_sizeW( StgArrWords* x )
296 { return sizeofW(StgArrWords) + x->words; }
298 INLINE_HEADER StgOffset mut_arr_ptrs_sizeW( StgMutArrPtrs* x )
299 { return sizeofW(StgMutArrPtrs) + x->ptrs; }
301 INLINE_HEADER StgWord tso_sizeW ( StgTSO *tso )
302 { return TSO_STRUCT_SIZEW + tso->stack_size; }
304 INLINE_HEADER StgWord bco_sizeW ( StgBCO *bco )
305 { return bco->size; }
307 /* -----------------------------------------------------------------------------
308 Sizes of stack frames
309 -------------------------------------------------------------------------- */
311 INLINE_HEADER StgWord stack_frame_sizeW( StgClosure *frame )
313 StgRetInfoTable *info;
315 info = get_ret_itbl(frame);
316 switch (info->i.type) {
320 StgRetDyn *dyn = (StgRetDyn *)frame;
321 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
322 RET_DYN_NONPTR_REGS_SIZE +
323 RET_DYN_PTRS(dyn->liveness) + RET_DYN_NONPTRS(dyn->liveness);
327 return sizeofW(StgRetFun) + ((StgRetFun *)frame)->size;
331 return 1 + GET_LARGE_BITMAP(&info->i)->size;
334 return 2 + BCO_BITMAP_SIZE((StgBCO *)((P_)frame)[1]);
337 return 1 + BITMAP_SIZE(info->i.layout.bitmap);
341 /* -----------------------------------------------------------------------------
343 -------------------------------------------------------------------------- */
345 extern void allocNurseries ( void );
346 extern void resetNurseries ( void );
347 extern bdescr * allocNursery ( bdescr *last_bd, nat blocks );
348 extern void resizeNursery ( nat blocks );
349 extern void tidyAllocateLists ( void );
351 /* -----------------------------------------------------------------------------
353 -------------------------------------------------------------------------- */
355 extern void threadPaused ( StgTSO * );
356 extern StgClosure * isAlive ( StgClosure *p );
357 extern void markCAFs ( evac_fn evac );
359 /* -----------------------------------------------------------------------------
360 Stats 'n' DEBUG stuff
361 -------------------------------------------------------------------------- */
363 extern ullong RTS_VAR(total_allocated);
365 extern lnat calcAllocated ( void );
366 extern lnat calcLive ( void );
367 extern lnat calcNeeded ( void );
370 extern void memInventory(void);
371 extern void checkSanity(void);
372 extern nat countBlocks(bdescr *);
376 void printMutOnceList(generation *gen);
377 void printMutableList(generation *gen);
380 /* ----------------------------------------------------------------------------
381 Storage manager internal APIs and globals
382 ------------------------------------------------------------------------- */
384 #define END_OF_STATIC_LIST stgCast(StgClosure*,1)
386 extern void newDynCAF(StgClosure *);
388 extern void move_TSO(StgTSO *src, StgTSO *dest);
389 extern StgTSO *relocate_stack(StgTSO *dest, ptrdiff_t diff);
391 extern StgClosure * RTS_VAR(static_objects);
392 extern StgClosure * RTS_VAR(scavenged_static_objects);
393 extern StgWeak * RTS_VAR(old_weak_ptr_list);
394 extern StgWeak * RTS_VAR(weak_ptr_list);
395 extern StgClosure * RTS_VAR(caf_list);
396 extern StgTSO * RTS_VAR(resurrected_threads);