[project @ 1998-12-02 13:17:09 by simonm]
[ghc-hetmet.git] / ghc / compiler / codeGen / CgHeapery.lhs
index 01b2ed9..bc3f5e5 100644 (file)
@@ -1,33 +1,44 @@
 %
-% (c) The GRASP/AQUA Project, Glasgow University, 1992-1996
+% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
+%
+% $Id: CgHeapery.lhs,v 1.10 1998/12/02 13:17:50 simonm Exp $
 %
 \section[CgHeapery]{Heap management functions}
 
 \begin{code}
 module CgHeapery (
-       heapCheck,
+       fastEntryChecks, altHeapCheck, thunkChecks,
        allocHeap, allocDynClosure
 
         -- new functions, basically inserting macro calls into Code -- HWL
-        , heapCheckOnly, fetchAndReschedule, yield
+        ,fetchAndReschedule, yield
     ) where
 
 #include "HsVersions.h"
 
 import AbsCSyn
+import CLabel
 import CgMonad
 
+import CgStackery      ( getFinalStackHW, mkTaggedStkAmodes, mkTagAssts )
+import SMRep           ( fixedHdrSize, getSMRepStr )
 import AbsCUtils       ( mkAbstractCs, getAmodeRep )
-import CgUsages                ( getVirtAndRealHp, setVirtHp, setRealHp,
+import CgUsages                ( getVirtAndRealHp, getRealSp, setVirtHp, setRealHp,
                          initHeapUsage
                        )
-import ClosureInfo     ( closureSize, closureHdrSize, closureGoodStuffSize,
-                         slopSize, allocProfilingMsg, closureKind, ClosureInfo
-                       )
-import HeapOffs                ( isZeroOff, addOff, intOff,
-                         VirtualHeapOffset, HeapOffset
+import ClosureInfo     ( closureSize, closureGoodStuffSize,
+                         slopSize, allocProfilingMsg, ClosureInfo,
+                         closureSMRep
                        )
-import PrimRep         ( PrimRep(..) )
+import PrimRep         ( PrimRep(..), isFollowableRep )
+import Util            ( panic )
+import CmdLineOpts     ( opt_SccProfilingOn )
+import GlaExts
+
+#ifdef DEBUG
+import PprAbsC         ( pprMagicId ) -- tmp
+import Outputable      -- tmp
+#endif
 \end{code}
 
 %************************************************************************
@@ -44,81 +55,298 @@ beginning of every slow entry code in order to simulate the fetching of
 closures. If fetching is necessary (i.e. current closure is not local) then
 an automatic context switch is done.
 
+-----------------------------------------------------------------------------
+A heap/stack check at a fast entry point.
+
 \begin{code}
-heapCheck :: [MagicId]          -- Live registers
-         -> Bool               -- Node reqd after GC?
-         -> Code
-         -> Code
 
-heapCheck = heapCheck' False
+fastEntryChecks
+       :: [MagicId]                    -- Live registers
+       -> [(VirtualSpOffset,Int)]      -- stack slots to tag
+       -> CLabel                       -- return point
+       -> Bool                         -- node points to closure
+       -> Code
+       -> Code
 
-heapCheckOnly :: [MagicId]          -- Live registers
-                -> Bool               -- Node reqd after GC?
-                -> Code
-                -> Code
+fastEntryChecks regs tags ret node_points code
+  =  mkTagAssts tags                            `thenFC` \tag_assts ->
+     getFinalStackHW                            (\ spHw -> 
+     getRealSp                                  `thenFC` \ sp ->
+     let stk_words = spHw - sp in
+     initHeapUsage                              (\ hp_words  ->
 
-heapCheckOnly = heapCheck' False
+     ( if all_pointers then -- heap checks are quite easy
+         absC (checking_code stk_words hp_words tag_assts 
+                   free_reg (length regs))
 
--- May be emit context switch and emit heap check macro
+       else -- they are complicated
 
-heapCheck' ::   Bool                    -- context switch here?
-               -> [MagicId]            -- Live registers
-               -> Bool                 -- Node reqd after GC?
-               -> Code
-               -> Code
+         -- save all registers on the stack and adjust the stack pointer.
+         -- ToDo: find the initial all-pointer segment and don't save them.
+
+         mkTaggedStkAmodes sp addrmode_regs 
+                 `thenFC` \(new_sp, stk_assts, more_tag_assts) ->
+
+         -- only let the extra stack assignments affect the stack
+         -- high water mark if we were doing a stack check anyway;
+         -- otherwise we end up generating unnecessary stack checks.
+         -- Careful about knot-tying loops!
+         let real_stk_words =  if new_sp - sp > stk_words && stk_words /= 0
+                                       then new_sp - sp
+                                       else stk_words
+         in
+
+         let adjust_sp = CAssign (CReg Sp) (CAddr (spRel sp new_sp)) in
+
+         absC (checking_code real_stk_words hp_words 
+                   (mkAbstractCs [tag_assts, stk_assts, more_tag_assts,
+                                  adjust_sp])
+                   (CReg node) 0)
+
+      ) `thenC`
+
+      setRealHp hp_words `thenC`
+      code))
 
-heapCheck' do_context_switch regs node_reqd code
-  = initHeapUsage (\ hHw -> do_heap_chk hHw `thenC` code)
   where
+       
+    checking_code stk hp assts ret regs
+       | node_points = do_checks_np stk hp assts (regs+1) -- ret not required
+        | otherwise   = do_checks    stk hp assts ret regs
+
+    -- When node points to the closure for the function:
+
+    do_checks_np
+       :: Int                          -- stack headroom
+       -> Int                          -- heap  headroom
+       -> AbstractC                    -- assignments to perform on failure
+       -> Int                          -- number of pointer registers live
+       -> AbstractC
+    do_checks_np 0 0 _ _ = AbsCNop
+    do_checks_np 0 hp_words tag_assts ptrs =
+           CCheck HP_CHK_NP [
+                 mkIntCLit hp_words,
+                 mkIntCLit ptrs
+                ]
+                tag_assts
+    do_checks_np stk_words 0 tag_assts ptrs =
+           CCheck STK_CHK_NP [
+                 mkIntCLit stk_words,
+                 mkIntCLit ptrs
+                ]
+                tag_assts
+    do_checks_np stk_words hp_words tag_assts ptrs =
+           CCheck HP_STK_CHK_NP [
+                 mkIntCLit stk_words,
+                 mkIntCLit hp_words,
+                 mkIntCLit ptrs
+                ]
+                tag_assts
+
+    -- When node doesn't point to the closure (we need an explicit retn addr)
+
+    do_checks 
+       :: Int                          -- stack headroom
+       -> Int                          -- heap  headroom
+       -> AbstractC                    -- assignments to perform on failure
+       -> CAddrMode                    -- a register to hold the retn addr.
+       -> Int                          -- number of pointer registers live
+       -> AbstractC
+
+    do_checks 0 0 _ _ _ = AbsCNop
+    do_checks 0 hp_words tag_assts ret_reg ptrs =
+           CCheck HP_CHK [
+                 mkIntCLit hp_words,
+                 CLbl ret CodePtrRep,
+                 ret_reg,
+                 mkIntCLit ptrs
+                ]
+                tag_assts
+    do_checks stk_words 0 tag_assts ret_reg ptrs =
+           CCheck STK_CHK [
+                 mkIntCLit stk_words,
+                 CLbl ret CodePtrRep,
+                 ret_reg,
+                 mkIntCLit ptrs
+                ]
+                tag_assts
+    do_checks stk_words hp_words tag_assts ret_reg ptrs =
+           CCheck HP_STK_CHK [
+                 mkIntCLit stk_words,
+                 mkIntCLit hp_words,
+                 CLbl ret CodePtrRep,
+                 ret_reg,
+                 mkIntCLit ptrs
+                ]
+                tag_assts
+
+    free_reg  = case length regs + 1 of 
+                      IBOX(x) -> CReg (VanillaReg PtrRep x)
+
+    all_pointers = all pointer regs
+    pointer (VanillaReg rep _) = isFollowableRep rep
+    pointer _ = False
+
+    addrmode_regs = map CReg regs
+
+-- Checking code for thunks is just a special case of fast entry points:
+
+thunkChecks :: CLabel -> Bool -> Code -> Code
+thunkChecks ret node_points code = fastEntryChecks [] [] ret node_points code
+\end{code}
 
-    do_heap_chk :: HeapOffset -> Code
-    do_heap_chk words_required
-      =
-       -- HWL:: absC (CComment "Forced heap check --- HWL")  `thenC`
-       --absC  (if do_context_switch
-       --         then context_switch_code
-       --         else AbsCNop)                                 `thenC`
-
-       absC (if do_context_switch && not (isZeroOff words_required)
-               then context_switch_code
-               else AbsCNop)                                   `thenC`
-       absC (if isZeroOff(words_required)
-               then  AbsCNop
-               else  checking_code)  `thenC`
+Heap checks in a case alternative are nice and easy, provided this is
+a bog-standard algebraic case.  We have in our hand:
 
-       -- HWL was here:
-       --  For GrAnSim we want heap checks even if no heap is allocated in
-       --  the basic block to make context switches possible.
-       --  So, the if construct has been replaced by its else branch.
+       * one return address, on the stack,
+       * one return value, in Node.
 
-           -- The test is *inside* the absC, to avoid black holes!
+the canned code for this heap check failure just pushes Node on the
+stack, saying 'EnterGHC' to return.  The scheduler will return by
+entering the top value on the stack, which in turn will return through
+the return address, getting us back to where we were.  This is
+therefore only valid if the return value is *lifted* (just being
+boxed isn't good enough).  Only a PtrRep will do.
 
-       -- Now we have set up the real heap pointer and checked there is
-       -- enough space. It remains only to reflect this in the environment
+For primitive returns, we have an unlifted value in some register
+(either R1 or FloatReg1 or DblReg1).  This means using specialised
+heap-check code for these cases.
 
-       setRealHp words_required
+For unboxed tuple returns, there are an arbitrary number of possibly
+unboxed return values, some of which will be in registers, and the
+others will be on the stack, with gaps left for tagging the unboxed
+objects.  If a heap check is required, we need to fill in these tags.
 
-           -- The "word_required" here is a fudge.
-           -- *** IT DEPENDS ON THE DIRECTION ***, and on
-           -- whether the Hp is moved the whole way all
-           -- at once or not.
-      where
-       all_regs = if node_reqd then node:regs else regs
-       liveness_mask = mkLiveRegsMask all_regs
+The code below will cover all cases for the x86 architecture (where R1
+is the only VanillaReg ever used).  For other architectures, we'll
+have to do something about saving and restoring the other registers.
 
-       maybe_context_switch = if do_context_switch
-                               then context_switch_code
-                               else AbsCNop
+\begin{code}
+altHeapCheck 
+       :: Bool                         -- is an algebraic alternative
+       -> [MagicId]                    -- live registers
+       -> [(VirtualSpOffset,Int)]      -- stack slots to tag
+       -> AbstractC
+       -> Maybe CLabel                 -- ret address if not on top of stack.
+       -> Code
+       -> Code
+
+-- unboxed tuple alternatives and let-no-escapes (the two most annoying
+-- constructs to generate code for!):
+
+altHeapCheck is_fun regs tags fail_code (Just ret_addr) code
+  = mkTagAssts tags `thenFC` \tag_assts1 ->
+    let tag_assts = mkAbstractCs [fail_code, tag_assts1]
+    in
+    initHeapUsage (\ hHw -> do_heap_chk hHw tag_assts `thenC` code)
+  where
+    do_heap_chk words_required tag_assts
+      = absC (if words_required == 0
+               then  AbsCNop
+               else  checking_code tag_assts)  `thenC`
+       setRealHp words_required
 
-       context_switch_code = CMacroStmt THREAD_CONTEXT_SWITCH [
-                             mkIntCLit liveness_mask,
-                             mkIntCLit (if node_reqd then 1 else 0)]
+      where
+       non_void_regs = filter (/= VoidReg) regs
+
+       checking_code tag_assts = 
+         case non_void_regs of
+
+           -- this will cover all cases for x86
+           [VanillaReg rep ILIT(1)] 
+
+              | isFollowableRep rep ->
+                 CCheck HP_CHK_UT_ALT
+                     [mkIntCLit words_required, mkIntCLit 1, mkIntCLit 0,
+                       CReg (VanillaReg RetRep ILIT(2)),
+                       CLbl ret_addr RetRep]
+                     tag_assts
+
+              | otherwise ->
+                 CCheck HP_CHK_UT_ALT
+                     [mkIntCLit words_required, mkIntCLit 0, mkIntCLit 1,
+                       CReg (VanillaReg RetRep ILIT(2)),
+                       CLbl ret_addr RetRep]
+                     tag_assts
+
+           several_regs ->
+                let liveness = mkRegLiveness several_regs
+               in
+               CCheck HP_CHK_GEN
+                    [mkIntCLit words_required, 
+                     mkIntCLit (IBOX(word2Int# liveness)),
+                     CLbl ret_addr RetRep] 
+                    tag_assts
+
+-- normal algebraic and primitive case alternatives:
+
+altHeapCheck is_fun regs [] AbsCNop Nothing code
+  = initHeapUsage (\ hHw -> do_heap_chk hHw `thenC` code)
+  where
+    do_heap_chk :: HeapOffset -> Code
+    do_heap_chk words_required
+      = absC (if words_required == 0
+               then  AbsCNop
+               else  checking_code)  `thenC`
+       setRealHp words_required
 
-       -- Good old heap check (excluding context switch)
-       checking_code = CMacroStmt HEAP_CHK [
-                       mkIntCLit liveness_mask,
-                       COffset words_required,
-                       mkIntCLit (if node_reqd then 1 else 0)]
+      where
+        non_void_regs = filter (/= VoidReg) regs
+
+       checking_code = 
+          case non_void_regs of
+
+           -- No regs live: probably a Void return
+           [] ->
+              CCheck HP_CHK_NOREGS [mkIntCLit words_required] AbsCNop
+
+           -- The SEQ case (polymophic/function typed case branch)
+           [VanillaReg rep ILIT(1)]
+               |  rep == PtrRep
+               && is_fun ->
+                 CCheck HP_CHK_SEQ_NP
+                       [mkIntCLit words_required, mkIntCLit 1{-regs live-}]
+                       AbsCNop
+
+           -- R1 is lifted (the common case)
+           [VanillaReg rep ILIT(1)]
+               | rep == PtrRep ->
+                 CCheck HP_CHK_NP
+                       [mkIntCLit words_required, mkIntCLit 1{-regs live-}]
+                       AbsCNop
+
+           -- R1 is boxed, but unlifted
+               | isFollowableRep rep ->
+                 CCheck HP_CHK_UNPT_R1 [mkIntCLit words_required] AbsCNop
+
+           -- R1 is unboxed
+               | otherwise ->
+                 CCheck HP_CHK_UNBX_R1 [mkIntCLit words_required] AbsCNop
+
+           -- FloatReg1
+           [FloatReg ILIT(1)] ->
+                 CCheck HP_CHK_F1 [mkIntCLit words_required] AbsCNop
+
+           -- DblReg1
+           [DoubleReg ILIT(1)] ->
+                 CCheck HP_CHK_D1 [mkIntCLit words_required] AbsCNop
+
+           -- LngReg1
+           [LongReg _ ILIT(1)] ->
+                 CCheck HP_CHK_L1 [mkIntCLit words_required] AbsCNop
+
+#ifdef DEBUG
+           _ -> panic ("CgHeapery.altHeapCheck: unimplemented heap-check, live regs = " ++ showSDoc (sep (map pprMagicId non_void_regs)))
+#endif
+
+-- build up a bitmap of the live pointer registers
+
+mkRegLiveness :: [MagicId] -> Word#
+mkRegLiveness [] = int2Word# 0#
+mkRegLiveness (VanillaReg rep i : regs) 
+   | isFollowableRep rep = ((int2Word# 1#) `shiftL#` (i -# 1#)) 
+                               `or#` mkRegLiveness regs
+   | otherwise           = mkRegLiveness regs
 
 -- Emit macro for simulating a fetch and then reschedule
 
@@ -132,7 +360,7 @@ fetchAndReschedule regs node_reqd  =
        else absC AbsCNop
       where
        all_regs = if node_reqd then node:regs else regs
-       liveness_mask = mkLiveRegsMask all_regs
+       liveness_mask = 0 {-XXX: mkLiveRegsMask all_regs-}
 
        reschedule_code = absC  (CMacroStmt GRAN_RESCHEDULE [
                                 mkIntCLit liveness_mask,
@@ -167,7 +395,7 @@ yield regs node_reqd =
       yield_code
       where
         all_regs = if node_reqd then node:regs else regs
-        liveness_mask = mkLiveRegsMask all_regs
+        liveness_mask = 0 {-XXX: mkLiveRegsMask all_regs-}
 
         yield_code = absC (CMacroStmt GRAN_YIELD [mkIntCLit liveness_mask])
 \end{code}
@@ -198,41 +426,48 @@ allocDynClosure closure_info use_cc blame_cc amodes_with_offsets
        -- FIND THE OFFSET OF THE INFO-PTR WORD
        -- virtHp points to last allocated word, ie 1 *before* the
        -- info-ptr word of new object.
-    let  info_offset = addOff virtHp (intOff 1)
+    let  info_offset = virtHp + 1
 
        -- do_move IS THE ASSIGNMENT FUNCTION
         do_move (amode, offset_from_start)
-          = CAssign (CVal (HpRel realHp
-                                 (info_offset `addOff` offset_from_start))
+          = CAssign (CVal (hpRel realHp
+                                 (info_offset + offset_from_start))
                           (getAmodeRep amode))
                     amode
     in
        -- SAY WHAT WE ARE ABOUT TO DO
     profCtrC (allocProfilingMsg closure_info)
-                          [COffset   (closureHdrSize closure_info),
+                          [mkIntCLit fixedHdrSize,
                            mkIntCLit (closureGoodStuffSize closure_info),
                            mkIntCLit slop_size,
-                           COffset   closure_size]     `thenC`
+                           mkIntCLit closure_size]     `thenC`
 
        -- GENERATE THE CODE
     absC ( mkAbstractCs (
-          [ CInitHdr closure_info (HpRel realHp info_offset) use_cc False ]
+          [ cInitHdr closure_info (hpRel realHp info_offset) use_cc ]
           ++ (map do_move amodes_with_offsets)))       `thenC`
 
        -- GENERATE CC PROFILING MESSAGES
-    costCentresC SLIT("CC_ALLOC") [blame_cc,
-                            COffset closure_size,
-                            CLitLit (_PK_ (closureKind closure_info)) IntRep]
+    costCentresC SLIT("CCS_ALLOC") [blame_cc, mkIntCLit closure_size]
+       -- CLitLit (_PK_ type_str) IntRep] -- not necessary? --SDM
                                                        `thenC`
 
        -- BUMP THE VIRTUAL HEAP POINTER
-    setVirtHp (virtHp `addOff` closure_size)           `thenC`
+    setVirtHp (virtHp + closure_size)                  `thenC`
 
        -- RETURN PTR TO START OF OBJECT
     returnFC info_offset
   where
     closure_size = closureSize closure_info
     slop_size    = slopSize closure_info
+    type_str     = getSMRepStr (closureSMRep closure_info)
+
+-- Avoid hanging on to anything in the CC field when we're not profiling.
+
+cInitHdr closure_info amode cc 
+  | opt_SccProfilingOn = CInitHdr closure_info amode cc
+  | otherwise          = CInitHdr closure_info amode (panic "absent cc")
+       
 \end{code}
 
 %************************************************************************
@@ -247,14 +482,14 @@ allocHeap :: HeapOffset           -- Size of the space required
 
 allocHeap space
   = getVirtAndRealHp                           `thenFC` \ (virtHp, realHp) ->
-    let block_start = addOff virtHp (intOff 1)
+    let block_start = virtHp + 1
     in
        -- We charge the allocation to "PRIM" (which is probably right)
-    profCtrC SLIT("ALLOC_PRIM2") [COffset space]       `thenC`
+    profCtrC SLIT("ALLOC_PRIM2") [mkIntCLit space]     `thenC`
 
        -- BUMP THE VIRTUAL HEAP POINTER
-    setVirtHp (virtHp `addOff` space)          `thenC`
+    setVirtHp (virtHp + space)                 `thenC`
 
        -- RETURN PTR TO START OF OBJECT
-    returnFC (CAddr (HpRel realHp block_start))
+    returnFC (CAddr (hpRel realHp block_start))
 \end{code}