1 /* -----------------------------------------------------------------------------
3 * (c) The University of Glasgow 2004
5 * This file is included at the top of all .cmm source files (and
6 * *only* .cmm files). It defines a collection of useful macros for
7 * making .cmm code a bit less error-prone to write, and a bit easier
8 * on the eye for the reader.
10 * For the syntax of .cmm files, see the parser in ghc/compiler/cmm/CmmParse.y.
12 * If you're used to the old HC file syntax, here's a quick cheat sheet
13 * for converting HC code:
16 * - Remove all type casts
18 * - STGFUN(foo) { ... } ==> foo { ... }
19 * - FN_(foo) { ... } ==> foo { ... }
20 * - JMP_(e) ==> jump e;
21 * - Remove EXTFUN(foo)
24 * - Sp += n ==> Sp_adj(n)
25 * - Hp += n ==> Hp_adj(n)
26 * - R1.i ==> R1 (similarly for R1.w, R1.cl etc.)
27 * - You need to explicitly dereference variables; eg.
28 * alloc_blocks ==> CInt[alloc_blocks]
29 * - convert all word offsets into byte offsets:
31 * - sizeofW(StgFoo) ==> SIZEOF_StgFoo
32 * - ENTRY_CODE(e) ==> %ENTRY_CODE(e)
33 * - get_itbl(c) ==> %GET_STD_INFO(c)
34 * - Change liveness masks in STK_CHK_GEN, HP_CHK_GEN:
35 * R1_PTR | R2_PTR ==> R1_PTR & R2_PTR
37 * - Declarations like 'StgPtr p;' become just 'W_ p;'
38 * - e->payload[n] ==> PAYLOAD(e,n)
39 * - Be very careful with comparisons: the infix versions (>, >=, etc.)
40 * are unsigned, so use %lt(a,b) to get signed less-than for example.
42 * Accessing fields of structures defined in the RTS header files is
43 * done via automatically-generated macros in DerivedConstants.h. For
44 * example, where previously we used
46 * CurrentTSO->what_next = x
50 * StgTSO_what_next(CurrentTSO) = x
52 * where the StgTSO_what_next() macro is automatically generated by
53 * mkDerivedConstnants.c. If you need to access a field that doesn't
54 * already have a macro, edit that file (it's pretty self-explanatory).
56 * -------------------------------------------------------------------------- */
62 * In files that are included into both C and C-- (and perhaps
63 * Haskell) sources, we sometimes need to conditionally compile bits
64 * depending on the language. CMINUSMINUS==1 in .cmm sources:
68 #include "ghcconfig.h"
69 #include "RtsConfig.h"
71 /* -----------------------------------------------------------------------------
74 The following synonyms for C-- types are declared here:
76 I8, I16, I32, I64 MachRep-style names for convenience
78 W_ is shorthand for the word type (== StgWord)
79 F_ shorthand for float (F_ == StgFloat == C's float)
80 D_ shorthand for double (D_ == StgDouble == C's double)
82 CInt has the same size as an int in C on this platform
83 CLong has the same size as a long in C on this platform
85 --------------------------------------------------------------------------- */
93 #if SIZEOF_VOID_P == 4
95 /* Maybe it's better to include MachDeps.h */
97 #elif SIZEOF_VOID_P == 8
99 /* Maybe it's better to include MachDeps.h */
102 #error Unknown word size
106 * The RTS must sometimes UNTAG a pointer before dereferencing it.
107 * See the wiki page Commentary/Rts/HaskellExecution/PointerTagging
109 #define TAG_MASK ((1 << TAG_BITS) - 1)
110 #define UNTAG(p) (p & ~TAG_MASK)
111 #define GETTAG(p) (p & TAG_MASK)
115 #elif SIZEOF_INT == 8
118 #error Unknown int size
123 #elif SIZEOF_LONG == 8
126 #error Unknown long size
133 #define SIZEOF_StgDouble 8
134 #define SIZEOF_StgWord64 8
136 /* -----------------------------------------------------------------------------
138 -------------------------------------------------------------------------- */
142 #define STRING(name,str) \
144 name : bits8[] str; \
147 /* -----------------------------------------------------------------------------
150 Everything in C-- is in byte offsets (well, most things). We use
151 some macros to allow us to express offsets in words and to try to
152 avoid byte/word confusion.
153 -------------------------------------------------------------------------- */
155 #define SIZEOF_W SIZEOF_VOID_P
156 #define W_MASK (SIZEOF_W-1)
164 /* Converting quantities of words to bytes */
165 #define WDS(n) ((n)*SIZEOF_W)
168 * Converting quantities of bytes to words
169 * NB. these work on *unsigned* values only
171 #define BYTES_TO_WDS(n) ((n) / SIZEOF_W)
172 #define ROUNDUP_BYTES_TO_WDS(n) (((n) + SIZEOF_W - 1) / SIZEOF_W)
174 /* TO_W_(n) converts n to W_ type from a smaller type */
176 #define TO_W_(x) %sx32(x)
177 #define HALF_W_(x) %lobits16(x)
179 #define TO_W_(x) %sx64(x)
180 #define HALF_W_(x) %lobits32(x)
183 #if SIZEOF_INT == 4 && SIZEOF_W == 8
184 #define W_TO_INT(x) %lobits32(x)
185 #elif SIZEOF_INT == SIZEOF_W
186 #define W_TO_INT(x) (x)
189 /* -----------------------------------------------------------------------------
190 Heap/stack access, and adjusting the heap/stack pointers.
191 -------------------------------------------------------------------------- */
193 #define Sp(n) W_[Sp + WDS(n)]
194 #define Hp(n) W_[Hp + WDS(n)]
196 #define Sp_adj(n) Sp = Sp + WDS(n)
197 #define Hp_adj(n) Hp = Hp + WDS(n)
199 /* -----------------------------------------------------------------------------
200 Assertions and Debuggery
201 -------------------------------------------------------------------------- */
204 #define ASSERT(predicate) \
208 foreign "C" _assertFail(NULL, __LINE__); \
211 #define ASSERT(p) /* nothing */
215 #define DEBUG_ONLY(s) s
217 #define DEBUG_ONLY(s) /* nothing */
221 * The IF_DEBUG macro is useful for debug messages that depend on one
222 * of the RTS debug options. For example:
224 * IF_DEBUG(RtsFlags_DebugFlags_apply,
225 * foreign "C" fprintf(stderr, stg_ap_0_ret_str));
227 * Note the syntax is slightly different to the C version of this macro.
230 #define IF_DEBUG(c,s) if (RtsFlags_DebugFlags_##c(RtsFlags) != 0::I32) { s; }
232 #define IF_DEBUG(c,s) /* nothing */
235 /* -----------------------------------------------------------------------------
238 It isn't safe to "enter" every closure. Functions in particular
239 have no entry code as such; their entry point contains the code to
242 ToDo: range should end in N_CLOSURE_TYPES-1, not N_CLOSURE_TYPES,
243 but switch doesn't allow us to use exprs there yet.
245 If R1 points to a tagged object it points either to
247 * A function with arity <= TAG_MASK.
248 In both cases the right thing to do is to return.
249 Note: it is rather lucky that we can use the tag bits to do this
250 for both objects. Maybe it points to a brittle design?
252 Indirections can contain tagged pointers, so their tag is checked.
253 -------------------------------------------------------------------------- */
257 // When profiling, we cannot shortcut ENTER() by checking the tag,
258 // because LDV profiling relies on entering closures to mark them as
262 info = %INFO_PTR(UNTAG(P1));
270 if (GETTAG(P1) != 0) { \
271 jump %ENTRY_CODE(Sp(0)); \
273 info = %INFO_PTR(P1);
275 #define UNTAG_R1 /* nothing */
283 switch [INVALID_OBJECT .. N_CLOSURE_TYPES] \
284 (TO_W_( %INFO_TYPE(%STD_INFO(info)) )) { \
292 P1 = StgInd_indirectee(P1); \
306 jump %ENTRY_CODE(Sp(0)); \
311 jump %ENTRY_CODE(info); \
315 // The FUN cases almost never happen: a pointer to a non-static FUN
316 // should always be tagged. This unfortunately isn't true for the
317 // interpreter right now, which leaves untagged FUNs on the stack.
319 /* -----------------------------------------------------------------------------
321 -------------------------------------------------------------------------- */
323 #include "Constants.h"
324 #include "DerivedConstants.h"
325 #include "ClosureTypes.h"
327 #include "OSThreads.h"
328 #include "SMPClosureOps.h"
331 * Need MachRegs, because some of the RTS code is conditionally
332 * compiled based on REG_R1, REG_R2, etc.
334 #define STOLEN_X86_REGS 4
335 #include "MachRegs.h"
337 #include "Liveness.h"
338 #include "StgLdvProf.h"
342 #include "Block.h" /* For Bdescr() */
345 #define MyCapability() (BaseReg - OFFSET_Capability_r)
347 /* -------------------------------------------------------------------------
348 Allocation and garbage collection
349 ------------------------------------------------------------------------- */
352 * ALLOC_PRIM is for allocating memory on the heap for a primitive
353 * object. It is used all over PrimOps.cmm.
355 * We make the simplifying assumption that the "admin" part of a
356 * primitive closure is just the header when calculating sizes for
357 * ticky-ticky. It's not clear whether eg. the size field of an array
358 * should be counted as "admin", or the various fields of a BCO.
360 #define ALLOC_PRIM(bytes,liveness,reentry) \
361 HP_CHK_GEN_TICKY(bytes,liveness,reentry); \
362 TICK_ALLOC_PRIM(SIZEOF_StgHeader,bytes-SIZEOF_StgHeader,0); \
365 /* CCS_ALLOC wants the size in words, because ccs->mem_alloc is in words */
366 #define CCCS_ALLOC(__alloc) CCS_ALLOC(BYTES_TO_WDS(__alloc), W_[CCCS])
368 #define HP_CHK_GEN_TICKY(alloc,liveness,reentry) \
369 HP_CHK_GEN(alloc,liveness,reentry); \
370 TICK_ALLOC_HEAP_NOCTR(alloc);
372 // allocateLocal() allocates from the nursery, so we check to see
373 // whether the nursery is nearly empty in any function that uses
374 // allocateLocal() - this includes many of the primops.
375 #define MAYBE_GC(liveness,reentry) \
376 if (bdescr_link(CurrentNursery) == NULL || CInt[alloc_blocks] >= CInt[alloc_blocks_lim]) { \
380 jump stg_gc_gen_hp; \
383 /* -----------------------------------------------------------------------------
385 -------------------------------------------------------------------------- */
388 * This is really ugly, since we don't do the rest of StgHeader this
389 * way. The problem is that values from DerivedConstants.h cannot be
390 * dependent on the way (SMP, PROF etc.). For SIZEOF_StgHeader we get
391 * the value from GHC, but it seems like too much trouble to do that
392 * for StgThunkHeader.
394 #define SIZEOF_StgThunkHeader SIZEOF_StgHeader+SIZEOF_StgSMPThunkHeader
396 #define StgThunk_payload(__ptr__,__ix__) \
397 W_[__ptr__+SIZEOF_StgThunkHeader+ WDS(__ix__)]
399 /* -----------------------------------------------------------------------------
401 -------------------------------------------------------------------------- */
403 /* The offset of the payload of an array */
404 #define BYTE_ARR_CTS(arr) ((arr) + SIZEOF_StgArrWords)
406 /* Getting/setting the info pointer of a closure */
407 #define SET_INFO(p,info) StgHeader_info(p) = info
408 #define GET_INFO(p) StgHeader_info(p)
410 /* Determine the size of an ordinary closure from its info table */
411 #define sizeW_fromITBL(itbl) \
412 SIZEOF_StgHeader + WDS(%INFO_PTRS(itbl)) + WDS(%INFO_NPTRS(itbl))
414 /* NB. duplicated from InfoTables.h! */
415 #define BITMAP_SIZE(bitmap) ((bitmap) & BITMAP_SIZE_MASK)
416 #define BITMAP_BITS(bitmap) ((bitmap) >> BITMAP_BITS_SHIFT)
418 /* Debugging macros */
419 #define LOOKS_LIKE_INFO_PTR(p) \
421 LOOKS_LIKE_INFO_PTR_NOT_NULL(p))
423 #define LOOKS_LIKE_INFO_PTR_NOT_NULL(p) \
424 ( (TO_W_(%INFO_TYPE(%STD_INFO(p))) != INVALID_OBJECT) && \
425 (TO_W_(%INFO_TYPE(%STD_INFO(p))) < N_CLOSURE_TYPES))
427 #define LOOKS_LIKE_CLOSURE_PTR(p) (LOOKS_LIKE_INFO_PTR(GET_INFO(UNTAG(p))))
430 * The layout of the StgFunInfoExtra part of an info table changes
431 * depending on TABLES_NEXT_TO_CODE. So we define field access
432 * macros which use the appropriate version here:
434 #ifdef TABLES_NEXT_TO_CODE
436 * when TABLES_NEXT_TO_CODE, slow_apply is stored as an offset
437 * instead of the normal pointer.
440 #define StgFunInfoExtra_slow_apply(fun_info) \
441 (TO_W_(StgFunInfoExtraRev_slow_apply_offset(fun_info)) \
442 + (fun_info) + SIZEOF_StgFunInfoExtraRev + SIZEOF_StgInfoTable)
444 #define StgFunInfoExtra_fun_type(i) StgFunInfoExtraRev_fun_type(i)
445 #define StgFunInfoExtra_arity(i) StgFunInfoExtraRev_arity(i)
446 #define StgFunInfoExtra_bitmap(i) StgFunInfoExtraRev_bitmap(i)
448 #define StgFunInfoExtra_slow_apply(i) StgFunInfoExtraFwd_slow_apply(i)
449 #define StgFunInfoExtra_fun_type(i) StgFunInfoExtraFwd_fun_type(i)
450 #define StgFunInfoExtra_arity(i) StgFunInfoExtraFwd_arity(i)
451 #define StgFunInfoExtra_bitmap(i) StgFunInfoExtraFwd_bitmap(i)
454 /* -----------------------------------------------------------------------------
455 Voluntary Yields/Blocks
457 We only have a generic version of this at the moment - if it turns
458 out to be slowing us down we can make specialised ones.
459 -------------------------------------------------------------------------- */
461 #define YIELD(liveness,reentry) \
466 #define BLOCK(liveness,reentry) \
471 /* -----------------------------------------------------------------------------
473 -------------------------------------------------------------------------- */
476 #define TICK_BUMP_BY(ctr,n) CLong[ctr] = CLong[ctr] + n
478 #define TICK_BUMP_BY(ctr,n) /* nothing */
481 #define TICK_BUMP(ctr) TICK_BUMP_BY(ctr,1)
483 #define TICK_ENT_DYN_IND() TICK_BUMP(ENT_DYN_IND_ctr)
484 #define TICK_ENT_DYN_THK() TICK_BUMP(ENT_DYN_THK_ctr)
485 #define TICK_ENT_VIA_NODE() TICK_BUMP(ENT_VIA_NODE_ctr)
486 #define TICK_ENT_STATIC_IND() TICK_BUMP(ENT_STATIC_IND_ctr)
487 #define TICK_ENT_PERM_IND() TICK_BUMP(ENT_PERM_IND_ctr)
488 #define TICK_ENT_PAP() TICK_BUMP(ENT_PAP_ctr)
489 #define TICK_ENT_AP() TICK_BUMP(ENT_AP_ctr)
490 #define TICK_ENT_AP_STACK() TICK_BUMP(ENT_AP_STACK_ctr)
491 #define TICK_ENT_BH() TICK_BUMP(ENT_BH_ctr)
492 #define TICK_UNKNOWN_CALL() TICK_BUMP(UNKNOWN_CALL_ctr)
493 #define TICK_UPDF_PUSHED() TICK_BUMP(UPDF_PUSHED_ctr)
494 #define TICK_CATCHF_PUSHED() TICK_BUMP(CATCHF_PUSHED_ctr)
495 #define TICK_UPDF_OMITTED() TICK_BUMP(UPDF_OMITTED_ctr)
496 #define TICK_UPD_NEW_IND() TICK_BUMP(UPD_NEW_IND_ctr)
497 #define TICK_UPD_NEW_PERM_IND() TICK_BUMP(UPD_NEW_PERM_IND_ctr)
498 #define TICK_UPD_OLD_IND() TICK_BUMP(UPD_OLD_IND_ctr)
499 #define TICK_UPD_OLD_PERM_IND() TICK_BUMP(UPD_OLD_PERM_IND_ctr)
501 #define TICK_SLOW_CALL_FUN_TOO_FEW() TICK_BUMP(SLOW_CALL_FUN_TOO_FEW_ctr)
502 #define TICK_SLOW_CALL_FUN_CORRECT() TICK_BUMP(SLOW_CALL_FUN_CORRECT_ctr)
503 #define TICK_SLOW_CALL_FUN_TOO_MANY() TICK_BUMP(SLOW_CALL_FUN_TOO_MANY_ctr)
504 #define TICK_SLOW_CALL_PAP_TOO_FEW() TICK_BUMP(SLOW_CALL_PAP_TOO_FEW_ctr)
505 #define TICK_SLOW_CALL_PAP_CORRECT() TICK_BUMP(SLOW_CALL_PAP_CORRECT_ctr)
506 #define TICK_SLOW_CALL_PAP_TOO_MANY() TICK_BUMP(SLOW_CALL_PAP_TOO_MANY_ctr)
508 #define TICK_SLOW_CALL_v() TICK_BUMP(SLOW_CALL_v_ctr)
509 #define TICK_SLOW_CALL_p() TICK_BUMP(SLOW_CALL_p_ctr)
510 #define TICK_SLOW_CALL_pv() TICK_BUMP(SLOW_CALL_pv_ctr)
511 #define TICK_SLOW_CALL_pp() TICK_BUMP(SLOW_CALL_pp_ctr)
512 #define TICK_SLOW_CALL_ppp() TICK_BUMP(SLOW_CALL_ppp_ctr)
513 #define TICK_SLOW_CALL_pppp() TICK_BUMP(SLOW_CALL_pppp_ctr)
514 #define TICK_SLOW_CALL_ppppp() TICK_BUMP(SLOW_CALL_ppppp_ctr)
515 #define TICK_SLOW_CALL_pppppp() TICK_BUMP(SLOW_CALL_pppppp_ctr)
517 /* NOTE: TICK_HISTO_BY and TICK_HISTO
518 currently have no effect.
519 The old code for it didn't typecheck and I
520 just commented it out to get ticky to work.
523 #define TICK_HISTO_BY(histo,n,i) /* nothing */
525 #define TICK_HISTO(histo,n) TICK_HISTO_BY(histo,n,1)
527 /* An unboxed tuple with n components. */
528 #define TICK_RET_UNBOXED_TUP(n) \
529 TICK_BUMP(RET_UNBOXED_TUP_ctr++); \
530 TICK_HISTO(RET_UNBOXED_TUP,n)
533 * A slow call with n arguments. In the unevald case, this call has
534 * already been counted once, so don't count it again.
536 #define TICK_SLOW_CALL(n) \
537 TICK_BUMP(SLOW_CALL_ctr); \
538 TICK_HISTO(SLOW_CALL,n)
541 * This slow call was found to be to an unevaluated function; undo the
542 * ticks we did in TICK_SLOW_CALL.
544 #define TICK_SLOW_CALL_UNEVALD(n) \
545 TICK_BUMP(SLOW_CALL_UNEVALD_ctr); \
546 TICK_BUMP_BY(SLOW_CALL_ctr,-1); \
547 TICK_HISTO_BY(SLOW_CALL,n,-1);
549 /* Updating a closure with a new CON */
550 #define TICK_UPD_CON_IN_NEW(n) \
551 TICK_BUMP(UPD_CON_IN_NEW_ctr); \
552 TICK_HISTO(UPD_CON_IN_NEW,n)
554 #define TICK_ALLOC_HEAP_NOCTR(n) \
555 TICK_BUMP(ALLOC_HEAP_ctr); \
556 TICK_BUMP_BY(ALLOC_HEAP_tot,n)
558 /* -----------------------------------------------------------------------------
560 -------------------------------------------------------------------------- */
562 #define NO_TREC stg_NO_TREC_closure
563 #define END_TSO_QUEUE stg_END_TSO_QUEUE_closure
564 #define END_INVARIANT_CHECK_QUEUE stg_END_INVARIANT_CHECK_QUEUE_closure
566 #define recordMutableCap(p, gen, regs) \
569 mut_list = Capability_mut_lists(MyCapability()) + WDS(gen); \
570 __bd = W_[mut_list]; \
571 if (bdescr_free(__bd) >= bdescr_start(__bd) + BLOCK_SIZE) { \
573 ("ptr" __new_bd) = foreign "C" allocBlock_lock() [regs]; \
574 bdescr_link(__new_bd) = __bd; \
576 W_[mut_list] = __bd; \
579 free = bdescr_free(__bd); \
581 bdescr_free(__bd) = free + WDS(1);
583 #define recordMutable(p, regs) \
588 __bd = Bdescr(__p); \
589 __gen = TO_W_(bdescr_gen_no(__bd)); \
590 if (__gen > 0) { recordMutableCap(__p, __gen, regs); }