1 /* -----------------------------------------------------------------------------
2 * $Id: Storage.h,v 1.22 2001/01/24 15:46:19 simonmar Exp $
4 * (c) The GHC Team, 1998-1999
6 * External Storage Manger Interface
8 * ---------------------------------------------------------------------------*/
14 #include "BlockAlloc.h"
15 #include "StoragePriv.h"
17 /* -----------------------------------------------------------------------------
18 Initialisation / De-initialisation
19 -------------------------------------------------------------------------- */
21 extern void initStorage(void);
22 extern void exitStorage(void);
24 /* -----------------------------------------------------------------------------
27 StgPtr allocate(int n) Allocates a chunk of contiguous store
28 n words long, returning a pointer to
29 the first word. Always succeeds.
31 Don't forget to TICK_ALLOC_XXX(...)
32 after calling allocate, for the
33 benefit of the ticky-ticky profiler.
35 rtsBool doYouWantToGC(void) Returns True if the storage manager is
36 ready to perform a GC, False otherwise.
38 lnat allocated_bytes(void) Returns the number of bytes allocated
39 via allocate() since the last GC.
40 Used in the reoprting of statistics.
42 SMP: allocate and doYouWantToGC can be used from STG code, they are
43 surrounded by a mutex.
44 -------------------------------------------------------------------------- */
46 extern StgPtr allocate(nat n);
47 static inline rtsBool doYouWantToGC(void)
49 return (alloc_blocks >= alloc_blocks_lim);
51 extern lnat allocated_bytes(void);
53 /* -----------------------------------------------------------------------------
54 ExtendNursery(hp,hplim) When hplim is reached, try to grab
55 some more allocation space. Returns
56 False if the allocation space is
57 exhausted, and the application should
58 call GarbageCollect().
59 -------------------------------------------------------------------------- */
61 #define ExtendNursery(hp,hplim) \
62 (CurrentNursery->free = (P_)(hp)+1, \
63 CurrentNursery->link == NULL ? rtsFalse : \
64 (CurrentNursery = CurrentNursery->link, \
65 OpenNursery(hp,hplim), \
68 extern void PleaseStopAllocating(void);
70 /* -----------------------------------------------------------------------------
71 Performing Garbage Collection
73 GarbageCollect(get_roots) Performs a garbage collection.
74 'get_roots' is called to find all the
75 roots that the system knows about.
77 StgClosure Called by get_roots on each root.
78 MarkRoot(StgClosure *p) Returns the new location of the root.
79 -------------------------------------------------------------------------- */
81 extern void GarbageCollect(void (*get_roots)(void),rtsBool force_major_gc);
82 extern StgClosure *MarkRoot(StgClosure *p);
84 /* Temporary measure to ensure we retain all the dynamically-loaded CAFs */
86 extern void markCafs( void );
89 /* -----------------------------------------------------------------------------
90 Generational garbage collection support
92 recordMutable(StgPtr p) Informs the garbage collector that a
93 previously immutable object has
94 become (permanently) mutable. Used
95 by thawArray and similar.
97 updateWithIndirection(p1,p2) Updates the object at p1 with an
98 indirection pointing to p2. This is
99 normally called for objects in an old
100 generation (>0) when they are updated.
102 updateWithPermIndirection(p1,p2) As above but uses a permanent indir.
104 -------------------------------------------------------------------------- */
106 /* ToDo: shouldn't recordMutable and recordOldToNewPtrs acquire some
107 * kind of lock in the SMP case?
110 recordMutable(StgMutClosure *p)
115 ASSERT(p->header.info == &stg_WHITEHOLE_info || closure_MUTABLE(p));
117 ASSERT(closure_MUTABLE(p));
121 if (bd->gen->no > 0) {
122 p->mut_link = bd->gen->mut_list;
123 bd->gen->mut_list = p;
128 recordOldToNewPtrs(StgMutClosure *p)
133 if (bd->gen->no > 0) {
134 p->mut_link = bd->gen->mut_once_list;
135 bd->gen->mut_once_list = p;
140 #define updateWithIndirection(info, p1, p2) \
144 bd = Bdescr((P_)p1); \
145 if (bd->gen->no == 0) { \
146 ((StgInd *)p1)->indirectee = p2; \
147 SET_INFO(p1,&stg_IND_info); \
148 TICK_UPD_NEW_IND(); \
150 ((StgIndOldGen *)p1)->indirectee = p2; \
151 if (info != &stg_BLACKHOLE_BQ_info) { \
152 ACQUIRE_LOCK(&sm_mutex); \
153 ((StgIndOldGen *)p1)->mut_link = bd->gen->mut_once_list; \
154 bd->gen->mut_once_list = (StgMutClosure *)p1; \
155 RELEASE_LOCK(&sm_mutex); \
157 SET_INFO(p1,&stg_IND_OLDGEN_info); \
158 TICK_UPD_OLD_IND(); \
163 /* In the DEBUG case, we also zero out the slop of the old closure,
164 * so that the sanity checker can tell where the next closure is.
166 #define updateWithIndirection(info, p1, p2) \
170 bd = Bdescr((P_)p1); \
171 if (bd->gen->no == 0) { \
172 ((StgInd *)p1)->indirectee = p2; \
173 SET_INFO(p1,&stg_IND_info); \
174 TICK_UPD_NEW_IND(); \
176 if (info != &stg_BLACKHOLE_BQ_info) { \
178 StgInfoTable *inf = get_itbl(p1); \
179 nat np = inf->layout.payload.ptrs, \
180 nw = inf->layout.payload.nptrs, i; \
181 for (i = np; i < np + nw; i++) { \
182 ((StgClosure *)p1)->payload[i] = 0; \
185 ACQUIRE_LOCK(&sm_mutex); \
186 ((StgIndOldGen *)p1)->mut_link = bd->gen->mut_once_list; \
187 bd->gen->mut_once_list = (StgMutClosure *)p1; \
188 RELEASE_LOCK(&sm_mutex); \
190 ((StgIndOldGen *)p1)->indirectee = p2; \
191 SET_INFO(p1,&stg_IND_OLDGEN_info); \
192 TICK_UPD_OLD_IND(); \
197 /* Static objects all live in the oldest generation
199 #define updateWithStaticIndirection(info, p1, p2) \
201 ASSERT( ((StgMutClosure*)p1)->mut_link == NULL ); \
203 ACQUIRE_LOCK(&sm_mutex); \
204 ((StgMutClosure *)p1)->mut_link = oldest_gen->mut_once_list; \
205 oldest_gen->mut_once_list = (StgMutClosure *)p1; \
206 RELEASE_LOCK(&sm_mutex); \
208 ((StgInd *)p1)->indirectee = p2; \
209 SET_INFO((StgInd *)p1, &stg_IND_STATIC_info); \
210 TICK_UPD_STATIC_IND(); \
213 #if defined(TICKY_TICKY) || defined(PROFILING)
215 updateWithPermIndirection(const StgInfoTable *info, StgClosure *p1, StgClosure *p2)
220 if (bd->gen->no == 0) {
221 ((StgInd *)p1)->indirectee = p2;
222 SET_INFO(p1,&stg_IND_PERM_info);
223 TICK_UPD_NEW_PERM_IND(p1);
225 ((StgIndOldGen *)p1)->indirectee = p2;
226 if (info != &stg_BLACKHOLE_BQ_info) {
227 ACQUIRE_LOCK(&sm_mutex);
228 ((StgIndOldGen *)p1)->mut_link = bd->gen->mut_once_list;
229 bd->gen->mut_once_list = (StgMutClosure *)p1;
230 RELEASE_LOCK(&sm_mutex);
232 SET_INFO(p1,&stg_IND_OLDGEN_PERM_info);
233 TICK_UPD_OLD_PERM_IND();
238 /* -----------------------------------------------------------------------------
239 The CAF table - used to let us revert CAFs
240 -------------------------------------------------------------------------- */
242 #if defined(INTERPRETER)
243 typedef struct StgCAFTabEntry_ {
245 StgInfoTable* origItbl;
248 extern void addToECafTable ( StgClosure* closure, StgInfoTable* origItbl );
249 extern void clearECafTable ( void );
251 extern StgCAF* ecafList;
252 extern StgCAFTabEntry* ecafTable;
253 extern StgInt usedECafTable;
254 extern StgInt sizeECafTable;
258 void printMutOnceList(generation *gen);
259 void printMutableList(generation *gen);
262 /* -----------------------------------------------------------------------------
263 Macros for distinguishing data pointers from code pointers
264 -------------------------------------------------------------------------- */
266 * We use some symbols inserted automatically by the linker to decide
267 * whether a pointer points to text, data, or user space. These tests
268 * assume that text is lower in the address space than data, which in
269 * turn is lower than user allocated memory.
271 * If this assumption is false (say on some strange architecture) then
272 * the tests IS_CODE_PTR and IS_DATA_PTR below will need to be
273 * modified (and that should be all that's necessary).
275 * _start } start of read-only text space
276 * _etext } end of read-only text space
277 * _end } end of read-write data space
281 extern void* TEXT_SECTION_END_MARKER_DECL;
282 extern void* DATA_SECTION_END_MARKER_DECL;
284 #if defined(INTERPRETER) || defined(GHCI)
285 /* Take into account code sections in dynamically loaded object files. */
286 #define IS_CODE_PTR(p) ( ((P_)(p) < (P_)&TEXT_SECTION_END_MARKER) \
287 || is_dynamically_loaded_code_or_rodata_ptr((char *)p) )
288 #define IS_DATA_PTR(p) ( ((P_)(p) >= (P_)&TEXT_SECTION_END_MARKER && \
289 (P_)(p) < (P_)&DATA_SECTION_END_MARKER) \
290 || is_dynamically_loaded_rwdata_ptr((char *)p) )
291 #define IS_USER_PTR(p) ( ((P_)(p) >= (P_)&DATA_SECTION_END_MARKER) \
292 && is_not_dynamically_loaded_ptr((char *)p) )
294 #define IS_CODE_PTR(p) ((P_)(p) < (P_)&TEXT_SECTION_END_MARKER)
295 #define IS_DATA_PTR(p) ((P_)(p) >= (P_)&TEXT_SECTION_END_MARKER && (P_)(p) < (P_)&DATA_SECTION_END_MARKER)
296 #define IS_USER_PTR(p) ((P_)(p) >= (P_)&DATA_SECTION_END_MARKER)
299 /* The HEAP_ALLOCED test below is called FOR EVERY SINGLE CLOSURE
300 * during GC. It needs to be FAST.
302 #ifdef TEXT_BEFORE_HEAP
303 # define HEAP_ALLOCED(x) ((StgPtr)(x) >= (StgPtr)(HEAP_BASE))
305 extern int is_heap_alloced(const void* x);
306 # define HEAP_ALLOCED(x) (is_heap_alloced(x))
309 /* When working with Win32 DLLs, static closures are identified by
310 being prefixed with a zero word. This is needed so that we can
311 distinguish between pointers to static closures and (reversed!)
314 This 'scheme' breaks down for closure tables such as CHARLIKE,
315 so we catch these separately.
317 LOOKS_LIKE_STATIC_CLOSURE()
318 - discriminates between static closures and info tbls
319 (needed by LOOKS_LIKE_GHC_INFO() below - [Win32 DLLs only.])
321 - distinguishes between static and heap allocated data.
323 #if defined(ENABLE_WIN32_DLL_SUPPORT) && !defined(INTERPRETER)
324 /* definitely do not enable for mingw DietHEP */
325 #define LOOKS_LIKE_STATIC(r) (!(HEAP_ALLOCED(r)))
327 /* Tiresome predicates needed to check for pointers into the closure tables */
328 #define IS_CHARLIKE_CLOSURE(p) \
329 ( (P_)(p) >= (P_)stg_CHARLIKE_closure && \
330 (char*)(p) <= ((char*)stg_CHARLIKE_closure + \
331 (MAX_CHARLIKE-MIN_CHARLIKE) * sizeof(StgIntCharlikeClosure)) )
332 #define IS_INTLIKE_CLOSURE(p) \
333 ( (P_)(p) >= (P_)stg_INTLIKE_closure && \
334 (char*)(p) <= ((char*)stg_INTLIKE_closure + \
335 (MAX_INTLIKE-MIN_INTLIKE) * sizeof(StgIntCharlikeClosure)) )
337 #define LOOKS_LIKE_STATIC_CLOSURE(r) (((*(((unsigned long *)(r))-1)) == 0) || IS_CHARLIKE_CLOSURE(r) || IS_INTLIKE_CLOSURE(r))
339 #define LOOKS_LIKE_STATIC(r) IS_DATA_PTR(r)
340 #define LOOKS_LIKE_STATIC_CLOSURE(r) IS_DATA_PTR(r)
344 /* -----------------------------------------------------------------------------
345 Macros for distinguishing infotables from closures.
347 You'd think it'd be easy to tell an info pointer from a closure pointer:
348 closures live on the heap and infotables are in read only memory. Right?
349 Wrong! Static closures live in read only memory and Hugs allocates
350 infotables for constructors on the (writable) C heap.
351 -------------------------------------------------------------------------- */
354 # ifdef USE_MINIINTERPRETER
355 /* yoiks: one of the dreaded pointer equality tests */
356 # define IS_HUGS_CONSTR_INFO(info) \
357 (((StgInfoTable *)(info))->entry == (StgFunPtr)&Hugs_CONSTR_entry)
359 # define IS_HUGS_CONSTR_INFO(info) 0 /* ToDo: more than mildly bogus */
362 /* not accurate by any means, but stops the assertions failing... */
363 # define IS_HUGS_CONSTR_INFO(info) IS_USER_PTR(info)
365 # define IS_HUGS_CONSTR_INFO(info) 0 /* ToDo: more than mildly bogus */
368 /* LOOKS_LIKE_GHC_INFO is called moderately often during GC, but
369 * Certainly not as often as HEAP_ALLOCED.
371 #ifdef TEXT_BEFORE_HEAP /* needed for mingw DietHEP */
372 # define LOOKS_LIKE_GHC_INFO(info) IS_CODE_PTR(info)
374 # define LOOKS_LIKE_GHC_INFO(info) (!HEAP_ALLOCED(info) \
375 && !LOOKS_LIKE_STATIC_CLOSURE(info))
378 /* -----------------------------------------------------------------------------
379 Macros for calculating how big a closure will be (used during allocation)
380 -------------------------------------------------------------------------- */
382 /* ToDo: replace unsigned int by nat. The only fly in the ointment is that
383 * nat comes from Rts.h which many folk dont include. Sigh!
385 static __inline__ StgOffset AP_sizeW ( unsigned int n_args )
386 { return sizeofW(StgAP_UPD) + n_args; }
388 static __inline__ StgOffset PAP_sizeW ( unsigned int n_args )
389 { return sizeofW(StgPAP) + n_args; }
391 static __inline__ StgOffset CONSTR_sizeW( unsigned int p, unsigned int np )
392 { return sizeofW(StgHeader) + p + np; }
394 static __inline__ StgOffset THUNK_SELECTOR_sizeW ( void )
395 { return sizeofW(StgHeader) + MIN_UPD_SIZE; }
397 static __inline__ StgOffset BLACKHOLE_sizeW ( void )
398 { return sizeofW(StgHeader) + MIN_UPD_SIZE; }
400 static __inline__ StgOffset CAF_sizeW ( void )
401 { return sizeofW(StgCAF); }
403 /* --------------------------------------------------------------------------
405 * ------------------------------------------------------------------------*/
407 static __inline__ StgOffset sizeW_fromITBL( const StgInfoTable* itbl )
408 { return sizeofW(StgClosure)
409 + sizeofW(StgPtr) * itbl->layout.payload.ptrs
410 + sizeofW(StgWord) * itbl->layout.payload.nptrs; }
412 static __inline__ StgOffset pap_sizeW( StgPAP* x )
413 { return PAP_sizeW(x->n_args); }
415 static __inline__ StgOffset arr_words_sizeW( StgArrWords* x )
416 { return sizeofW(StgArrWords) + x->words; }
418 static __inline__ StgOffset mut_arr_ptrs_sizeW( StgMutArrPtrs* x )
419 { return sizeofW(StgMutArrPtrs) + x->ptrs; }
421 static __inline__ StgWord tso_sizeW ( StgTSO *tso )
422 { return TSO_STRUCT_SIZEW + tso->stack_size; }