1 /* -----------------------------------------------------------------------------
2 * $Id: Storage.h,v 1.53 2003/11/12 17:49:11 sof Exp $
4 * (c) The GHC Team, 1998-2002
6 * External Storage Manger Interface
8 * ---------------------------------------------------------------------------*/
15 #include "BlockAlloc.h"
16 #include "StoragePriv.h"
18 #include "LdvProfile.h"
21 /* -----------------------------------------------------------------------------
22 Initialisation / De-initialisation
23 -------------------------------------------------------------------------- */
25 extern void initStorage(void);
26 extern void exitStorage(void);
28 /* -----------------------------------------------------------------------------
31 StgPtr allocate(nat n) Allocates a chunk of contiguous store
32 n words long, returning a pointer to
33 the first word. Always succeeds.
35 StgPtr allocatePinned(nat n) Allocates a chunk of contiguous store
36 n words long, which is at a fixed
37 address (won't be moved by GC).
38 Returns a pointer to the first word.
41 NOTE: the GC can't in general handle
42 pinned objects, so allocatePinned()
43 can only be used for ByteArrays at the
46 Don't forget to TICK_ALLOC_XXX(...)
47 after calling allocate or
48 allocatePinned, for the
49 benefit of the ticky-ticky profiler.
51 rtsBool doYouWantToGC(void) Returns True if the storage manager is
52 ready to perform a GC, False otherwise.
54 lnat allocated_bytes(void) Returns the number of bytes allocated
55 via allocate() since the last GC.
56 Used in the reporting of statistics.
58 SMP: allocate and doYouWantToGC can be used from STG code, they are
59 surrounded by a mutex.
60 -------------------------------------------------------------------------- */
62 extern StgPtr allocate ( nat n );
63 extern StgPtr allocatePinned ( nat n );
64 extern lnat allocated_bytes ( void );
69 return (alloc_blocks >= alloc_blocks_lim);
72 /* -----------------------------------------------------------------------------
73 ExtendNursery(hp,hplim) When hplim is reached, try to grab
74 some more allocation space. Returns
75 False if the allocation space is
76 exhausted, and the application should
77 call GarbageCollect().
78 -------------------------------------------------------------------------- */
80 #define ExtendNursery(hp,hplim) \
82 CurrentNursery->link == NULL ? rtsFalse : \
83 (CurrentNursery = CurrentNursery->link, \
84 OpenNursery(hp,hplim), \
87 /* -----------------------------------------------------------------------------
88 Performing Garbage Collection
90 GarbageCollect(get_roots) Performs a garbage collection.
91 'get_roots' is called to find all the
92 roots that the system knows about.
94 StgClosure Called by get_roots on each root.
95 MarkRoot(StgClosure *p) Returns the new location of the root.
96 -------------------------------------------------------------------------- */
98 extern void GarbageCollect(void (*get_roots)(evac_fn),rtsBool force_major_gc);
100 /* -----------------------------------------------------------------------------
101 Generational garbage collection support
103 recordMutable(StgPtr p) Informs the garbage collector that a
104 previously immutable object has
105 become (permanently) mutable. Used
106 by thawArray and similar.
108 updateWithIndirection(p1,p2) Updates the object at p1 with an
109 indirection pointing to p2. This is
110 normally called for objects in an old
111 generation (>0) when they are updated.
113 updateWithPermIndirection(p1,p2) As above but uses a permanent indir.
115 -------------------------------------------------------------------------- */
118 * Storage manager mutex
121 extern Mutex sm_mutex;
122 #define ACQUIRE_SM_LOCK ACQUIRE_LOCK(&sm_mutex)
123 #define RELEASE_SM_LOCK RELEASE_LOCK(&sm_mutex)
125 #define ACQUIRE_SM_LOCK
126 #define RELEASE_SM_LOCK
129 /* ToDo: shouldn't recordMutable and recordOldToNewPtrs acquire some
130 * kind of lock in the SMP case?
133 recordMutable(StgMutClosure *p)
138 ASSERT(p->header.info == &stg_WHITEHOLE_info || closure_MUTABLE(p));
140 ASSERT(closure_MUTABLE(p));
144 if (bd->gen_no > 0) {
145 p->mut_link = generations[bd->gen_no].mut_list;
146 generations[bd->gen_no].mut_list = p;
151 recordOldToNewPtrs(StgMutClosure *p)
156 if (bd->gen_no > 0) {
157 p->mut_link = generations[bd->gen_no].mut_once_list;
158 generations[bd->gen_no].mut_once_list = p;
163 // We zero out the slop when PROFILING is on.
165 #if !defined(DEBUG) && !defined(PROFILING)
166 #define updateWithIndirection(info, ind_info, p1, p2, and_then) \
170 bd = Bdescr((P_)p1); \
171 if (bd->gen_no == 0) { \
172 ((StgInd *)p1)->indirectee = p2; \
173 SET_INFO(p1,ind_info); \
174 TICK_UPD_NEW_IND(); \
177 ((StgIndOldGen *)p1)->indirectee = p2; \
178 if (info != &stg_BLACKHOLE_BQ_info) { \
180 ((StgIndOldGen *)p1)->mut_link = generations[bd->gen_no].mut_once_list; \
181 generations[bd->gen_no].mut_once_list = (StgMutClosure *)p1; \
184 SET_INFO(p1,&stg_IND_OLDGEN_info); \
185 TICK_UPD_OLD_IND(); \
189 #elif defined(PROFILING)
191 // We call LDV_recordDead_FILL_SLOP_DYNAMIC(p1) regardless of the generation in
195 // After all, we do *NOT* need to call LDV_recordCreate() for both IND and
196 // IND_OLDGEN closures because they are inherently used. But, it corrupts
197 // the invariants that every closure keeps its creation time in the profiling
198 // field. So, we call LDV_recordCreate().
200 #define updateWithIndirection(info, ind_info, p1, p2, and_then) \
204 LDV_recordDead_FILL_SLOP_DYNAMIC((p1)); \
205 bd = Bdescr((P_)p1); \
206 if (bd->gen_no == 0) { \
207 ((StgInd *)p1)->indirectee = p2; \
208 SET_INFO(p1,ind_info); \
209 LDV_recordCreate((p1)); \
210 TICK_UPD_NEW_IND(); \
213 ((StgIndOldGen *)p1)->indirectee = p2; \
214 if (info != &stg_BLACKHOLE_BQ_info) { \
216 ((StgIndOldGen *)p1)->mut_link = generations[bd->gen_no].mut_once_list; \
217 generations[bd->gen_no].mut_once_list = (StgMutClosure *)p1; \
220 SET_INFO(p1,&stg_IND_OLDGEN_info); \
221 LDV_recordCreate((p1)); \
228 /* In the DEBUG case, we also zero out the slop of the old closure,
229 * so that the sanity checker can tell where the next closure is.
231 * Two important invariants: we should never try to update a closure
232 * to point to itself, and the closure being updated should not
233 * already have been updated (the mutable list will get messed up
236 #define updateWithIndirection(info, ind_info, p1, p2, and_then) \
240 ASSERT( p1 != p2 && !closure_IND(p1) ); \
241 bd = Bdescr((P_)p1); \
242 if (bd->gen_no == 0) { \
243 ((StgInd *)p1)->indirectee = p2; \
244 SET_INFO(p1,ind_info); \
245 TICK_UPD_NEW_IND(); \
248 if (info != &stg_BLACKHOLE_BQ_info) { \
250 StgInfoTable *inf = get_itbl(p1); \
251 nat np = inf->layout.payload.ptrs, \
252 nw = inf->layout.payload.nptrs, i; \
253 if (inf->type != THUNK_SELECTOR) { \
254 for (i = 0; i < np + nw; i++) { \
255 ((StgClosure *)p1)->payload[i] = 0; \
260 ((StgIndOldGen *)p1)->mut_link = generations[bd->gen_no].mut_once_list; \
261 generations[bd->gen_no].mut_once_list = (StgMutClosure *)p1; \
264 ((StgIndOldGen *)p1)->indirectee = p2; \
265 SET_INFO(p1,&stg_IND_OLDGEN_info); \
266 TICK_UPD_OLD_IND(); \
272 /* Static objects all live in the oldest generation
274 #define updateWithStaticIndirection(info, p1, p2) \
276 ASSERT( p1 != p2 && !closure_IND(p1) ); \
277 ASSERT( ((StgMutClosure*)p1)->mut_link == NULL ); \
280 ((StgMutClosure *)p1)->mut_link = oldest_gen->mut_once_list; \
281 oldest_gen->mut_once_list = (StgMutClosure *)p1; \
284 ((StgInd *)p1)->indirectee = p2; \
285 SET_INFO((StgInd *)p1, &stg_IND_STATIC_info); \
286 TICK_UPD_STATIC_IND(); \
289 #if defined(TICKY_TICKY) || defined(PROFILING)
291 updateWithPermIndirection(const StgInfoTable *info, StgClosure *p1, StgClosure *p2)
295 ASSERT( p1 != p2 && !closure_IND(p1) );
299 // Destroy the old closure.
300 // Nb: LDV_* stuff cannot mix with ticky-ticky
301 LDV_recordDead_FILL_SLOP_DYNAMIC(p1);
304 if (bd->gen_no == 0) {
305 ((StgInd *)p1)->indirectee = p2;
306 SET_INFO(p1,&stg_IND_PERM_info);
309 // We have just created a new closure.
310 LDV_recordCreate(p1);
312 TICK_UPD_NEW_PERM_IND(p1);
314 ((StgIndOldGen *)p1)->indirectee = p2;
315 if (info != &stg_BLACKHOLE_BQ_info) {
317 ((StgIndOldGen *)p1)->mut_link = generations[bd->gen_no].mut_once_list;
318 generations[bd->gen_no].mut_once_list = (StgMutClosure *)p1;
321 SET_INFO(p1,&stg_IND_OLDGEN_PERM_info);
324 // We have just created a new closure.
325 LDV_recordCreate(p1);
327 TICK_UPD_OLD_PERM_IND();
332 /* -----------------------------------------------------------------------------
333 The CAF table - used to let us revert CAFs in GHCi
334 -------------------------------------------------------------------------- */
336 void revertCAFs( void );
338 /* -----------------------------------------------------------------------------
339 DEBUGGING predicates for pointers
341 LOOKS_LIKE_INFO_PTR(p) returns False if p is definitely not an info ptr
342 LOOKS_LIKE_CLOSURE_PTR(p) returns False if p is definitely not a closure ptr
344 These macros are complete but not sound. That is, they might
345 return false positives. Do not rely on them to distinguish info
346 pointers from closure pointers, for example.
348 We don't use address-space predicates these days, for portability
349 reasons, and the fact that code/data can be scattered about the
350 address space in a dynamically-linked environment. Our best option
351 is to look at the alleged info table and see whether it seems to
353 -------------------------------------------------------------------------- */
355 #define LOOKS_LIKE_INFO_PTR(p) \
356 (p && ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type != INVALID_OBJECT && \
357 ((StgInfoTable *)(INFO_PTR_TO_STRUCT(p)))->type < N_CLOSURE_TYPES)
359 #define LOOKS_LIKE_CLOSURE_PTR(p) \
360 (LOOKS_LIKE_INFO_PTR(((StgClosure *)(p))->header.info))
362 /* -----------------------------------------------------------------------------
363 Macros for calculating how big a closure will be (used during allocation)
364 -------------------------------------------------------------------------- */
366 INLINE_HEADER StgOffset PAP_sizeW ( nat n_args )
367 { return sizeofW(StgPAP) + n_args; }
369 INLINE_HEADER StgOffset AP_STACK_sizeW ( nat size )
370 { return sizeofW(StgAP_STACK) + size; }
372 INLINE_HEADER StgOffset CONSTR_sizeW( nat p, nat np )
373 { return sizeofW(StgHeader) + p + np; }
375 INLINE_HEADER StgOffset THUNK_SELECTOR_sizeW ( void )
376 { return sizeofW(StgHeader) + MIN_UPD_SIZE; }
378 INLINE_HEADER StgOffset BLACKHOLE_sizeW ( void )
379 { return sizeofW(StgHeader) + MIN_UPD_SIZE; }
381 /* --------------------------------------------------------------------------
383 ------------------------------------------------------------------------*/
385 INLINE_HEADER StgOffset sizeW_fromITBL( const StgInfoTable* itbl )
386 { return sizeofW(StgClosure)
387 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
388 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
390 INLINE_HEADER StgOffset ap_stack_sizeW( StgAP_STACK* x )
391 { return AP_STACK_sizeW(x->size); }
393 INLINE_HEADER StgOffset pap_sizeW( StgPAP* x )
394 { return PAP_sizeW(x->n_args); }
396 INLINE_HEADER StgOffset arr_words_sizeW( StgArrWords* x )
397 { return sizeofW(StgArrWords) + x->words; }
399 INLINE_HEADER StgOffset mut_arr_ptrs_sizeW( StgMutArrPtrs* x )
400 { return sizeofW(StgMutArrPtrs) + x->ptrs; }
402 INLINE_HEADER StgWord tso_sizeW ( StgTSO *tso )
403 { return TSO_STRUCT_SIZEW + tso->stack_size; }
405 INLINE_HEADER StgWord bco_sizeW ( StgBCO *bco )
406 { return bco->size; }
408 /* -----------------------------------------------------------------------------
409 Sizes of stack frames
410 -------------------------------------------------------------------------- */
412 INLINE_HEADER StgWord stack_frame_sizeW( StgClosure *frame )
414 StgRetInfoTable *info;
416 info = get_ret_itbl(frame);
417 switch (info->i.type) {
421 StgRetDyn *dyn = (StgRetDyn *)frame;
422 return sizeofW(StgRetDyn) + RET_DYN_BITMAP_SIZE +
423 RET_DYN_NONPTR_REGS_SIZE +
424 GET_PTRS(dyn->liveness) + GET_NONPTRS(dyn->liveness);
428 return sizeofW(StgRetFun) + ((StgRetFun *)frame)->size;
432 return 1 + info->i.layout.large_bitmap->size;
435 return 2 + BCO_BITMAP_SIZE((StgBCO *)((P_)frame)[1]);
438 return 1 + BITMAP_SIZE(info->i.layout.bitmap);
442 /* -----------------------------------------------------------------------------
444 -------------------------------------------------------------------------- */
447 void printMutOnceList(generation *gen);
448 void printMutableList(generation *gen);