X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;f=compiler%2Fcmm%2FCmmNode.hs;fp=compiler%2Fcmm%2FCmmNode.hs;h=12d534ea5323935720f17af5f3ce7c94778c2b29;hb=889c084e943779e76d19f2ef5e970ff655f511eb;hp=0000000000000000000000000000000000000000;hpb=f1a90f54590e5a7a32a9c3ef2950740922b1f425;p=ghc-hetmet.git diff --git a/compiler/cmm/CmmNode.hs b/compiler/cmm/CmmNode.hs new file mode 100644 index 0000000..12d534e --- /dev/null +++ b/compiler/cmm/CmmNode.hs @@ -0,0 +1,303 @@ +-- CmmNode type for representation using Hoopl graphs. +{-# LANGUAGE GADTs #-} +module CmmNode + ( CmmNode(..) + , UpdFrameOffset, Convention(..), ForeignConvention(..), ForeignTarget(..) + , mapExp, mapExpDeep, foldExp, foldExpDeep + ) +where + +import CmmExpr +import CmmDecl +import FastString +import ForeignCall +import SMRep + +import Compiler.Hoopl +import Data.Maybe +import Prelude hiding (succ) + + +------------------------ +-- CmmNode + +data CmmNode e x where + CmmEntry :: Label -> CmmNode C O + CmmComment :: FastString -> CmmNode O O + CmmAssign :: CmmReg -> CmmExpr -> CmmNode O O -- Assign to register + CmmStore :: CmmExpr -> CmmExpr -> CmmNode O O -- Assign to memory location. Size is + -- given by cmmExprType of the rhs. + CmmUnsafeForeignCall :: -- An unsafe foreign call; see Note [Foreign calls] + ForeignTarget -> -- call target + CmmFormals -> -- zero or more results + CmmActuals -> -- zero or more arguments + CmmNode O O + CmmBranch :: Label -> CmmNode O C -- Goto another block in the same procedure + CmmCondBranch :: { -- conditional branch + cml_pred :: CmmExpr, + cml_true, cml_false :: Label + } -> CmmNode O C + CmmSwitch :: CmmExpr -> [Maybe Label] -> CmmNode O C -- Table branch + -- The scrutinee is zero-based; + -- zero -> first block + -- one -> second block etc + -- Undefined outside range, and when there's a Nothing + CmmCall :: { -- A call (native or safe foreign) + cml_target :: CmmExpr, -- never a CmmPrim to a CallishMachOp! + + cml_cont :: Maybe Label, + -- Label of continuation (Nothing for return or tail call) + + cml_args :: ByteOff, + -- Byte offset, from the *old* end of the Area associated with + -- the Label (if cml_cont = Nothing, then Old area), of + -- youngest outgoing arg. Set the stack pointer to this before + -- transferring control. + -- (NB: an update frame might also have been stored in the Old + -- area, but it'll be in an older part than the args.) + + cml_ret_args :: ByteOff, + -- For calls *only*, the byte offset for youngest returned value + -- This is really needed at the *return* point rather than here + -- at the call, but in practice it's convenient to record it here. + + cml_ret_off :: ByteOff + -- For calls *only*, the byte offset of the base of the frame that + -- must be described by the info table for the return point. + -- The older words are an update frames, which have their own + -- info-table and layout information + + -- From a liveness point of view, the stack words older than + -- cml_ret_off are treated as live, even if the sequel of + -- the call goes into a loop. + } -> CmmNode O C + CmmForeignCall :: { -- A safe foreign call; see Note [Foreign calls] + tgt :: ForeignTarget, -- call target and convention + res :: CmmFormals, -- zero or more results + args :: CmmActuals, -- zero or more arguments + succ :: Label, -- Label of continuation + updfr :: UpdFrameOffset, -- where the update frame is (for building infotable) + intrbl:: Bool -- whether or not the call is interruptible + } -> CmmNode O C + +{- Note [Foreign calls] +~~~~~~~~~~~~~~~~~~~~~~~ +A MidForeign call is used for *unsafe* foreign calls; +a LastForeign call is used for *safe* foreign calls. +Unsafe ones are easy: think of them as a "fat machine instruction". + +Safe ones are trickier. A safe foreign call + r = f(x) +ultimately expands to + push "return address" -- Never used to return to; + -- just points an info table + save registers into TSO + call suspendThread + r = f(x) -- Make the call + call resumeThread + restore registers + pop "return address" +We cannot "lower" a safe foreign call to this sequence of Cmms, because +after we've saved Sp all the Cmm optimiser's assumptions are broken. +Furthermore, currently the smart Cmm constructors know the calling +conventions for Haskell, the garbage collector, etc, and "lower" them +so that a LastCall passes no parameters or results. But the smart +constructors do *not* (currently) know the foreign call conventions. + +Note that a safe foreign call needs an info table. +-} + +--------------------------------------------- +-- Eq instance of CmmNode +-- It is a shame GHC cannot infer it by itself :( + +instance Eq (CmmNode e x) where + (CmmEntry a) == (CmmEntry a') = a==a' + (CmmComment a) == (CmmComment a') = a==a' + (CmmAssign a b) == (CmmAssign a' b') = a==a' && b==b' + (CmmStore a b) == (CmmStore a' b') = a==a' && b==b' + (CmmUnsafeForeignCall a b c) == (CmmUnsafeForeignCall a' b' c') = a==a' && b==b' && c==c' + (CmmBranch a) == (CmmBranch a') = a==a' + (CmmCondBranch a b c) == (CmmCondBranch a' b' c') = a==a' && b==b' && c==c' + (CmmSwitch a b) == (CmmSwitch a' b') = a==a' && b==b' + (CmmCall a b c d e) == (CmmCall a' b' c' d' e') = a==a' && b==b' && c==c' && d==d' && e==e' + (CmmForeignCall a b c d e f) == (CmmForeignCall a' b' c' d' e' f') = a==a' && b==b' && c==c' && d==d' && e==e' && f==f' + _ == _ = False + +---------------------------------------------- +-- Hoopl instances of CmmNode + +instance NonLocal CmmNode where + entryLabel (CmmEntry l) = l + -- entryLabel _ = error "CmmNode.entryLabel" + + successors (CmmBranch l) = [l] + successors (CmmCondBranch {cml_true=t, cml_false=f}) = [f, t] -- meets layout constraint + successors (CmmSwitch _ ls) = catMaybes ls + successors (CmmCall {cml_cont=l}) = maybeToList l + successors (CmmForeignCall {succ=l}) = [l] + -- successors _ = error "CmmNode.successors" + + +instance HooplNode CmmNode where + mkBranchNode label = CmmBranch label + mkLabelNode label = CmmEntry label + +-------------------------------------------------- +-- Various helper types + +type UpdFrameOffset = ByteOff + +data Convention + = NativeDirectCall -- Native C-- call skipping the node (closure) argument + | NativeNodeCall -- Native C-- call including the node argument + | NativeReturn -- Native C-- return + | Slow -- Slow entry points: all args pushed on the stack + | GC -- Entry to the garbage collector: uses the node reg! + | PrimOpCall -- Calling prim ops + | PrimOpReturn -- Returning from prim ops + | Foreign -- Foreign call/return + ForeignConvention + | Private + -- Used for control transfers within a (pre-CPS) procedure All + -- jump sites known, never pushed on the stack (hence no SRT) + -- You can choose whatever calling convention you please + -- (provided you make sure all the call sites agree)! + -- This data type eventually to be extended to record the convention. + deriving( Eq ) + +data ForeignConvention + = ForeignConvention + CCallConv -- Which foreign-call convention + [ForeignHint] -- Extra info about the args + [ForeignHint] -- Extra info about the result + deriving Eq + +data ForeignTarget -- The target of a foreign call + = ForeignTarget -- A foreign procedure + CmmExpr -- Its address + ForeignConvention -- Its calling convention + | PrimTarget -- A possibly-side-effecting machine operation + CallishMachOp -- Which one + deriving Eq + +-------------------------------------------------- +-- Instances of register and slot users / definers + +instance UserOfLocalRegs (CmmNode e x) where + foldRegsUsed f z n = case n of + CmmAssign _ expr -> fold f z expr + CmmStore addr rval -> fold f (fold f z addr) rval + CmmUnsafeForeignCall t _ args -> fold f (fold f z t) args + CmmCondBranch expr _ _ -> fold f z expr + CmmSwitch expr _ -> fold f z expr + CmmCall {cml_target=tgt} -> fold f z tgt + CmmForeignCall {tgt=tgt, args=args} -> fold f (fold f z tgt) args + _ -> z + where fold :: forall a b. + UserOfLocalRegs a => + (b -> LocalReg -> b) -> b -> a -> b + fold f z n = foldRegsUsed f z n + +instance UserOfLocalRegs ForeignTarget where + foldRegsUsed _f z (PrimTarget _) = z + foldRegsUsed f z (ForeignTarget e _) = foldRegsUsed f z e + +instance DefinerOfLocalRegs (CmmNode e x) where + foldRegsDefd f z n = case n of + CmmAssign lhs _ -> fold f z lhs + CmmUnsafeForeignCall _ fs _ -> fold f z fs + CmmForeignCall {res=res} -> fold f z res + _ -> z + where fold :: forall a b. + DefinerOfLocalRegs a => + (b -> LocalReg -> b) -> b -> a -> b + fold f z n = foldRegsDefd f z n + + +instance UserOfSlots (CmmNode e x) where + foldSlotsUsed f z n = case n of + CmmAssign _ expr -> fold f z expr + CmmStore addr rval -> fold f (fold f z addr) rval + CmmUnsafeForeignCall _ _ args -> fold f z args + CmmCondBranch expr _ _ -> fold f z expr + CmmSwitch expr _ -> fold f z expr + CmmCall {cml_target=tgt} -> fold f z tgt + CmmForeignCall {tgt=tgt, args=args} -> fold f (fold f z tgt) args + _ -> z + where fold :: forall a b. + UserOfSlots a => + (b -> SubArea -> b) -> b -> a -> b + fold f z n = foldSlotsUsed f z n + +instance UserOfSlots ForeignTarget where + foldSlotsUsed f z (ForeignTarget e _) = foldSlotsUsed f z e + foldSlotsUsed _f z (PrimTarget _) = z + +instance DefinerOfSlots (CmmNode e x) where + foldSlotsDefd f z n = case n of + CmmStore (CmmStackSlot a i) expr -> f z (a, i, widthInBytes $ typeWidth $ cmmExprType expr) + CmmForeignCall {res=res} -> fold f z $ map foreign_call_slot res + _ -> z + where + fold :: forall a b. + DefinerOfSlots a => + (b -> SubArea -> b) -> b -> a -> b + fold f z n = foldSlotsDefd f z n + foreign_call_slot r = case widthInBytes $ typeWidth $ localRegType r of w -> (RegSlot r, w, w) + +----------------------------------- +-- mapping Expr in CmmNode + +mapForeignTarget :: (CmmExpr -> CmmExpr) -> ForeignTarget -> ForeignTarget +mapForeignTarget exp (ForeignTarget e c) = ForeignTarget (exp e) c +mapForeignTarget _ m@(PrimTarget _) = m + +-- Take a transformer on expressions and apply it recursively. +wrapRecExp :: (CmmExpr -> CmmExpr) -> CmmExpr -> CmmExpr +wrapRecExp f (CmmMachOp op es) = f (CmmMachOp op $ map (wrapRecExp f) es) +wrapRecExp f (CmmLoad addr ty) = f (CmmLoad (wrapRecExp f addr) ty) +wrapRecExp f e = f e + +mapExp :: (CmmExpr -> CmmExpr) -> CmmNode e x -> CmmNode e x +mapExp _ f@(CmmEntry _) = f +mapExp _ m@(CmmComment _) = m +mapExp f (CmmAssign r e) = CmmAssign r (f e) +mapExp f (CmmStore addr e) = CmmStore (f addr) (f e) +mapExp f (CmmUnsafeForeignCall tgt fs as) = CmmUnsafeForeignCall (mapForeignTarget f tgt) fs (map f as) +mapExp _ l@(CmmBranch _) = l +mapExp f (CmmCondBranch e ti fi) = CmmCondBranch (f e) ti fi +mapExp f (CmmSwitch e tbl) = CmmSwitch (f e) tbl +mapExp f (CmmCall tgt mb_id o i s) = CmmCall (f tgt) mb_id o i s +mapExp f (CmmForeignCall tgt fs as succ updfr intrbl) = CmmForeignCall (mapForeignTarget f tgt) fs (map f as) succ updfr intrbl + +mapExpDeep :: (CmmExpr -> CmmExpr) -> CmmNode e x -> CmmNode e x +mapExpDeep f = mapExp $ wrapRecExp f + +----------------------------------- +-- folding Expr in CmmNode + +foldExpForeignTarget :: (CmmExpr -> z -> z) -> ForeignTarget -> z -> z +foldExpForeignTarget exp (ForeignTarget e _) z = exp e z +foldExpForeignTarget _ (PrimTarget _) z = z + +-- Take a folder on expressions and apply it recursively. +wrapRecExpf :: (CmmExpr -> z -> z) -> CmmExpr -> z -> z +wrapRecExpf f e@(CmmMachOp _ es) z = foldr (wrapRecExpf f) (f e z) es +wrapRecExpf f e@(CmmLoad addr _) z = wrapRecExpf f addr (f e z) +wrapRecExpf f e z = f e z + +foldExp :: (CmmExpr -> z -> z) -> CmmNode e x -> z -> z +foldExp _ (CmmEntry {}) z = z +foldExp _ (CmmComment {}) z = z +foldExp f (CmmAssign _ e) z = f e z +foldExp f (CmmStore addr e) z = f addr $ f e z +foldExp f (CmmUnsafeForeignCall t _ as) z = foldr f (foldExpForeignTarget f t z) as +foldExp _ (CmmBranch _) z = z +foldExp f (CmmCondBranch e _ _) z = f e z +foldExp f (CmmSwitch e _) z = f e z +foldExp f (CmmCall {cml_target=tgt}) z = f tgt z +foldExp f (CmmForeignCall {tgt=tgt, args=args}) z = foldr f (foldExpForeignTarget f tgt z) args + +foldExpDeep :: (CmmExpr -> z -> z) -> CmmNode e x -> z -> z +foldExpDeep f = foldExp $ wrapRecExpf f