%
% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
%
-% $Id: CgCase.lhs,v 1.52 2001/05/22 13:43:15 simonpj Exp $
+% $Id: CgCase.lhs,v 1.70 2004/08/13 13:25:45 simonmar Exp $
%
%********************************************************
%* *
%********************************************************
\begin{code}
-module CgCase ( cgCase, saveVolatileVarsAndRegs, restoreCurrentCostCentre
+module CgCase ( cgCase, saveVolatileVarsAndRegs,
+ restoreCurrentCostCentre
) where
#include "HsVersions.h"
import CgMonad
import StgSyn
-import AbsCSyn
-
-import AbsCUtils ( mkAbstractCs, mkAbsCStmts, mkAlgAltsCSwitch,
- getAmodeRep, nonemptyAbsC
- )
-import CgUpdate ( reserveSeqFrame )
-import CgBindery ( getVolatileRegs, getArgAmodes,
+import CgBindery ( getArgAmodes,
bindNewToReg, bindNewToTemp,
- bindNewPrimToAmode, getCAddrModeAndInfo,
- rebindToStack, getCAddrMode, getCAddrModeIfVolatile,
- buildContLivenessMask, nukeDeadBindings,
+ getCgIdInfo, getArgAmode,
+ rebindToStack, getCAddrModeIfVolatile,
+ nukeDeadBindings, idInfoToAmode
)
import CgCon ( bindConArgs, bindUnboxedTupleComponents )
-import CgHeapery ( altHeapCheck )
-import CgRetConv ( dataReturnConvPrim, ctrlReturnConvAlg,
+import CgHeapery ( altHeapCheck, unbxTupleHeapCheck )
+import CgCallConv ( dataReturnConvPrim, ctrlReturnConvAlg,
CtrlReturnConvention(..)
)
-import CgStackery ( allocPrimStack, allocStackTop,
- deAllocStackTop, freeStackSlots, dataStackSlots
- )
-import CgTailCall ( tailCallFun )
-import CgUsages ( getSpRelOffset )
-import CLabel ( mkVecTblLabel, mkClosureTblLabel,
- mkDefaultLabel, mkAltLabel, mkReturnInfoLabel
+import CgStackery ( allocPrimStack, allocStackTop, getSpRelOffset,
+ deAllocStackTop, freeStackSlots
)
+import CgTailCall ( performTailCall )
+import CgPrimOp ( cgPrimOp )
+import CgForeignCall ( cgForeignCall )
+import CgUtils ( newTemp, cgLit, emitLitSwitch, emitSwitch,
+ tagToClosure )
+import CgProf ( curCCS, curCCSAddr )
+import CgInfoTbls ( emitDirectReturnTarget, emitAlgReturnTarget,
+ dataConTagZ )
+import SMRep ( CgRep(..), retAddrSizeW, nonVoidArg, isVoidArg,
+ idCgRep, tyConCgRep, typeHint )
+import CmmUtils ( CmmStmts, noStmts, oneStmt, plusStmts )
+import Cmm
+import MachOp ( wordRep )
import ClosureInfo ( mkLFArgument )
import CmdLineOpts ( opt_SccProfilingOn )
-import Id ( Id, idPrimRep, isDeadBinder )
-import DataCon ( DataCon, dataConTag, fIRST_TAG, ConTag )
+import Id ( Id, idName, isDeadBinder, idType )
+import ForeignCall ( ForeignCall(..), CCallSpec(..), playSafe )
import VarSet ( varSetElems )
-import Literal ( Literal )
-import PrimOp ( primOpOutOfLine, PrimOp(..) )
-import PrimRep ( getPrimRepSize, retPrimRepSize, PrimRep(..)
- )
-import TyCon ( isEnumerationTyCon, isUnboxedTupleTyCon, tyConPrimRep )
-import Unique ( Unique, Uniquable(..), newTagUnique )
-import Maybes ( maybeToBool )
-import Util ( only )
+import CoreSyn ( AltCon(..) )
+import PrimOp ( PrimOp(..), primOpOutOfLine )
+import TyCon ( isEnumerationTyCon, tyConFamilySize )
+import Util ( isSingleton )
import Outputable
\end{code}
-> StgLiveVars
-> Id
-> SRT
- -> StgCaseAlts
+ -> AltType
+ -> [StgAlt]
-> Code
\end{code}
-Special case #1: PrimOps returning enumeration types.
-
-For enumeration types, we invent a temporary (builtin-unique 1) to
-hold the tag, and cross our fingers that this doesn't clash with
-anything else. Builtin-unique 0 is used for a similar reason when
-compiling enumerated-type primops in CgExpr.lhs. We can't use the
-unique from the case binder, because this is used to hold the actual
-closure (when the case binder is live, that is).
-
-There is an extra special case for
-
- case tagToEnum# x of
- ...
+Special case #1: case of literal.
-which generates no code for the primop, unless x is used in the
-alternatives (in which case we lookup the tag in the relevant closure
-table to get the closure).
+\begin{code}
+cgCase (StgLit lit) live_in_whole_case live_in_alts bndr srt
+ alt_type@(PrimAlt tycon) alts
+ = do { tmp_reg <- bindNewToTemp bndr
+ ; cm_lit <- cgLit lit
+ ; stmtC (CmmAssign tmp_reg (CmmLit cm_lit))
+ ; cgPrimAlts NoGC alt_type tmp_reg alts }
+\end{code}
-Being a bit short of uniques for temporary variables here, we use
-newTagUnique to generate a new unique from the case binder. The case
-binder's unique will presumably have the 'c' tag (generated by
-CoreToStg), so we just change its tag to 'C' (for 'case') to ensure it
-doesn't clash with anything else.
+Special case #2: scrutinising a primitive-typed variable. No
+evaluation required. We don't save volatile variables, nor do we do a
+heap-check in the alternatives. Instead, the heap usage of the
+alternatives is worst-cased and passed upstream. This can result in
+allocating more heap than strictly necessary, but it will sometimes
+eliminate a heap check altogether.
\begin{code}
-cgCase (StgOpApp op args _)
- live_in_whole_case live_in_alts bndr srt (StgAlgAlts (Just tycon) alts deflt)
- | isEnumerationTyCon tycon
- = getArgAmodes args `thenFC` \ arg_amodes ->
-
- case op of {
- StgPrimOp TagToEnumOp -- No code!
- -> returnFC (only arg_amodes) ;
-
- _ -> -- Perform the operation
- let
- tag_amode = CTemp (newTagUnique (getUnique bndr) 'C') IntRep
- in
- getVolatileRegs live_in_alts `thenFC` \ vol_regs ->
- absC (COpStmt [tag_amode] op arg_amodes vol_regs) `thenC`
- -- NB: no liveness arg
- returnFC tag_amode
- } `thenFC` \ tag_amode ->
-
- let
- closure = CVal (CIndex (CLbl (mkClosureTblLabel tycon) PtrRep)
- tag_amode PtrRep)
- PtrRep
- in
-
- -- Bind the default binder if necessary
- -- The deadness info is set by StgVarInfo
- (if (isDeadBinder bndr)
- then nopC
- else bindNewToTemp bndr `thenFC` \ bndr_amode ->
- absC (CAssign bndr_amode closure))
- `thenC`
-
- -- compile the alts
- cgAlgAlts NoGC (getUnique bndr) Nothing{-cc_slot-} False{-no semi-tagging-}
- False{-not poly case-} alts deflt
- False{-don't emit yield-} `thenFC` \ (tagged_alts, deflt_c) ->
-
- -- Do the switch
- absC (mkAlgAltsCSwitch tag_amode tagged_alts deflt_c)
+cgCase (StgApp v []) live_in_whole_case live_in_alts bndr srt
+ alt_type@(PrimAlt tycon) alts
+ = do { -- Careful! we can't just bind the default binder to the same thing
+ -- as the scrutinee, since it might be a stack location, and having
+ -- two bindings pointing at the same stack locn doesn't work (it
+ -- confuses nukeDeadBindings). Hence, use a new temp.
+ v_info <- getCgIdInfo v
+ ; amode <- idInfoToAmode v_info
+ ; tmp_reg <- bindNewToTemp bndr
+ ; stmtC (CmmAssign tmp_reg amode)
+ ; cgPrimAlts NoGC alt_type tmp_reg alts }
\end{code}
-Special case #2: inline PrimOps.
+Special case #3: inline PrimOps and foreign calls.
\begin{code}
cgCase (StgOpApp op@(StgPrimOp primop) args _)
- live_in_whole_case live_in_alts bndr srt alts
+ live_in_whole_case live_in_alts bndr srt alt_type alts
| not (primOpOutOfLine primop)
- =
- -- Get amodes for the arguments and results
- getArgAmodes args `thenFC` \ arg_amodes ->
- getVolatileRegs live_in_alts `thenFC` \ vol_regs ->
-
- case alts of
- StgPrimAlts tycon alts deflt -- PRIMITIVE ALTS
- -> absC (COpStmt [CTemp (getUnique bndr) (tyConPrimRep tycon)]
- op
- arg_amodes -- note: no liveness arg
- vol_regs) `thenC`
- cgPrimInlineAlts bndr tycon alts deflt
-
- StgAlgAlts (Just tycon) [(_, args, _, rhs)] StgNoDefault
- | isUnboxedTupleTyCon tycon -- UNBOXED TUPLE ALTS
- -> -- no heap check, no yield, just get in there and do it.
- absC (COpStmt [ CTemp (getUnique arg) (idPrimRep arg) | arg <- args ]
- op
- arg_amodes -- note: no liveness arg
- vol_regs) `thenC`
- mapFCs bindNewToTemp args `thenFC` \ _ ->
- cgExpr rhs
-
- other -> pprPanic "cgCase: case of primop has strange alts" (pprStgAlts alts)
+ = cgInlinePrimOp primop args bndr alt_type live_in_alts alts
\end{code}
TODO: Case-of-case of primop can probably be done inline too (but
ghc/lib/misc/PackedString.lhs for examples where this crops up (with
4.02).
-Another special case: scrutinising a primitive-typed variable. No
-evaluation required. We don't save volatile variables, nor do we do a
-heap-check in the alternatives. Instead, the heap usage of the
-alternatives is worst-cased and passed upstream. This can result in
-allocating more heap than strictly necessary, but it will sometimes
-eliminate a heap check altogether.
+Special case #4: inline foreign calls: an unsafe foreign call can be done
+right here, just like an inline primop.
\begin{code}
-cgCase (StgApp v []) live_in_whole_case live_in_alts bndr srt
- (StgPrimAlts tycon alts deflt)
-
- =
- getCAddrMode v `thenFC` \amode ->
-
- {-
- Careful! we can't just bind the default binder to the same thing
- as the scrutinee, since it might be a stack location, and having
- two bindings pointing at the same stack locn doesn't work (it
- confuses nukeDeadBindings). Hence, use a new temp.
- -}
- bindNewToTemp bndr `thenFC` \deflt_amode ->
- absC (CAssign deflt_amode amode) `thenC`
+cgCase (StgOpApp op@(StgFCallOp fcall _) args _)
+ live_in_whole_case live_in_alts bndr srt alt_type alts
+ | unsafe_foreign_call
+ = ASSERT( isSingleton alts )
+ do -- *must* be an unboxed tuple alt.
+ -- exactly like the cgInlinePrimOp case for unboxed tuple alts..
+ { res_tmps <- mapFCs bindNewToTemp non_void_res_ids
+ ; let res_hints = map (typeHint.idType) non_void_res_ids
+ ; cgForeignCall (zip res_tmps res_hints) fcall args live_in_alts
+ ; cgExpr rhs }
+ where
+ (_, res_ids, _, rhs) = head alts
+ non_void_res_ids = filter (nonVoidArg . idCgRep) res_ids
- cgPrimAlts NoGC amode alts deflt []
+ unsafe_foreign_call
+ = case fcall of
+ CCall (CCallSpec _ _ s) -> not (playSafe s)
+ _other -> False
\end{code}
Special case: scrutinising a non-primitive variable.
\begin{code}
cgCase (StgApp fun args)
- live_in_whole_case live_in_alts bndr srt alts
- = getCAddrModeAndInfo fun `thenFC` \ (fun', fun_amode, lf_info) ->
- getArgAmodes args `thenFC` \ arg_amodes ->
-
- -- Squish the environment
- nukeDeadBindings live_in_alts `thenC`
- saveVolatileVarsAndRegs live_in_alts
- `thenFC` \ (save_assts, alts_eob_info, maybe_cc_slot) ->
-
- allocStackTop retPrimRepSize `thenFC` \_ ->
-
- forkEval alts_eob_info nopC (
- deAllocStackTop retPrimRepSize `thenFC` \_ ->
- cgEvalAlts maybe_cc_slot bndr srt alts)
- `thenFC` \ scrut_eob_info ->
-
- setEndOfBlockInfo (maybeReserveSeqFrame alts scrut_eob_info) $
- tailCallFun fun' fun_amode lf_info arg_amodes save_assts
+ live_in_whole_case live_in_alts bndr srt alt_type alts
+ = do { fun_info <- getCgIdInfo fun
+ ; arg_amodes <- getArgAmodes args
+
+ -- Nuking dead bindings *before* calculating the saves is the
+ -- value-add here. We might end up freeing up some slots currently
+ -- occupied by variables only required for the call.
+ -- NOTE: we need to look up the variables used in the call before
+ -- doing this, because some of them may not be in the environment
+ -- afterward.
+ ; nukeDeadBindings live_in_alts
+ ; (save_assts, alts_eob_info, maybe_cc_slot)
+ <- saveVolatileVarsAndRegs live_in_alts
+
+ ; scrut_eob_info
+ <- forkEval alts_eob_info
+ (allocStackTop retAddrSizeW >> nopC)
+ (do { deAllocStackTop retAddrSizeW
+ ; cgEvalAlts maybe_cc_slot bndr srt alt_type alts })
+
+ ; setEndOfBlockInfo (maybeReserveSeqFrame alt_type scrut_eob_info)
+ (performTailCall fun_info arg_amodes save_assts) }
\end{code}
Note about return addresses: we *always* push a return address, even
Finally, here is the general case.
\begin{code}
-cgCase expr live_in_whole_case live_in_alts bndr srt alts
- = -- Figure out what volatile variables to save
- nukeDeadBindings live_in_whole_case `thenC`
+cgCase expr live_in_whole_case live_in_alts bndr srt alt_type alts
+ = do { -- Figure out what volatile variables to save
+ nukeDeadBindings live_in_whole_case
- saveVolatileVarsAndRegs live_in_alts
- `thenFC` \ (save_assts, alts_eob_info, maybe_cc_slot) ->
-
- -- Save those variables right now!
- absC save_assts `thenC`
-
- -- generate code for the alts
- forkEval alts_eob_info
- (nukeDeadBindings live_in_alts `thenC`
- allocStackTop retPrimRepSize -- space for retn address
- `thenFC` \_ -> nopC
- )
- (deAllocStackTop retPrimRepSize `thenFC` \_ ->
- cgEvalAlts maybe_cc_slot bndr srt alts) `thenFC` \ scrut_eob_info ->
-
- setEndOfBlockInfo (maybeReserveSeqFrame alts scrut_eob_info) $
- cgExpr expr
+ ; (save_assts, alts_eob_info, maybe_cc_slot)
+ <- saveVolatileVarsAndRegs live_in_alts
+
+ -- Save those variables right now!
+ ; emitStmts save_assts
+
+ -- generate code for the alts
+ ; scrut_eob_info
+ <- forkEval alts_eob_info
+ (do { nukeDeadBindings live_in_alts
+ ; allocStackTop retAddrSizeW -- space for retn address
+ ; nopC })
+ (do { deAllocStackTop retAddrSizeW
+ ; cgEvalAlts maybe_cc_slot bndr srt alt_type alts })
+
+ ; setEndOfBlockInfo (maybeReserveSeqFrame alt_type scrut_eob_info)
+ (cgExpr expr)
+ }
\end{code}
There's a lot of machinery going on behind the scenes to manage the
TODO!! Problem: however, we have to save the current cost centre
stack somewhere, because at the eval point the current CCS might be
-different. So we pick a free stack slot and save CCCS in it. The
-problem with this is that this slot isn't recorded as free/unboxed in
-the environment, so a case expression in the scrutinee will have the
-wrong bitmap attached. Fortunately we don't ever seem to see
-case-of-case at the back end. One solution might be to shift the
-saved CCS to the correct place in the activation record just before
-the jump.
- --SDM
-
-(one consequence of the above is that activation records on the stack
-don't follow the layout of closures when we're profiling. The CCS
-could be anywhere within the record).
+different. So we pick a free stack slot and save CCCS in it. One
+consequence of this is that activation records on the stack don't
+follow the layout of closures when we're profiling. The CCS could be
+anywhere within the record).
\begin{code}
--- We need to reserve a seq frame for a polymorphic case
-maybeReserveSeqFrame (StgAlgAlts Nothing _ _) scrut_eob_info = reserveSeqFrame scrut_eob_info
-maybeReserveSeqFrame other scrut_eob_info = scrut_eob_info
+maybeReserveSeqFrame PolyAlt (EndOfBlockInfo args_sp (CaseAlts amode stuff bndr _))
+ = EndOfBlockInfo (args_sp + retAddrSizeW) (CaseAlts amode stuff bndr True)
+maybeReserveSeqFrame other scrut_eob_info = scrut_eob_info
+\end{code}
+
+
+%************************************************************************
+%* *
+ Inline primops
+%* *
+%************************************************************************
+
+\begin{code}
+cgInlinePrimOp primop args bndr (PrimAlt tycon) live_in_alts alts
+ | isVoidArg (idCgRep bndr)
+ = ASSERT( con == DEFAULT && isSingleton alts && null bs )
+ do { -- VOID RESULT; just sequencing,
+ -- so get in there and do it
+ cgPrimOp [] primop args live_in_alts
+ ; cgExpr rhs }
+ where
+ (con,bs,_,rhs) = head alts
+
+cgInlinePrimOp primop args bndr (PrimAlt tycon) live_in_alts alts
+ = do { -- PRIMITIVE ALTS, with non-void result
+ tmp_reg <- bindNewToTemp bndr
+ ; cgPrimOp [tmp_reg] primop args live_in_alts
+ ; cgPrimAlts NoGC (PrimAlt tycon) tmp_reg alts }
+
+cgInlinePrimOp primop args bndr (UbxTupAlt tycon) live_in_alts alts
+ = ASSERT( isSingleton alts )
+ do { -- UNBOXED TUPLE ALTS
+ -- No heap check, no yield, just get in there and do it.
+ -- NB: the case binder isn't bound to anything;
+ -- it has a unboxed tuple type
+
+ res_tmps <- mapFCs bindNewToTemp non_void_res_ids
+ ; cgPrimOp res_tmps primop args live_in_alts
+ ; cgExpr rhs }
+ where
+ (_, res_ids, _, rhs) = head alts
+ non_void_res_ids = filter (nonVoidArg . idCgRep) res_ids
+
+cgInlinePrimOp primop args bndr (AlgAlt tycon) live_in_alts alts
+ = do { -- ENUMERATION TYPE RETURN
+ -- Typical: case a ># b of { True -> ..; False -> .. }
+ -- The primop itself returns an index into the table of
+ -- closures for the enumeration type.
+ tag_amode <- ASSERT( isEnumerationTyCon tycon )
+ do_enum_primop primop
+
+ -- Bind the default binder if necessary
+ -- (avoiding it avoids the assignment)
+ -- The deadness info is set by StgVarInfo
+ ; whenC (not (isDeadBinder bndr))
+ (do { tmp_reg <- bindNewToTemp bndr
+ ; stmtC (CmmAssign tmp_reg (tagToClosure tycon tag_amode)) })
+
+ -- Compile the alts
+ ; (branches, mb_deflt) <- cgAlgAlts NoGC Nothing{-cc_slot-}
+ (AlgAlt tycon) alts
+
+ -- Do the switch
+ ; emitSwitch tag_amode branches mb_deflt 0 (tyConFamilySize tycon - 1)
+ }
+ where
+
+ do_enum_primop :: PrimOp -> FCode CmmExpr -- Returns amode for result
+ do_enum_primop TagToEnumOp -- No code!
+ | [arg] <- args = do
+ (_,e) <- getArgAmode arg
+ return e
+ do_enum_primop primop
+ = do tmp <- newTemp wordRep
+ cgPrimOp [tmp] primop args live_in_alts
+ returnFC (CmmReg tmp)
+
+cgInlinePrimOp primop arg_amodes bndr PolyAlt live_in_alts alts
+ = pprPanic "cgCase: case of primop has polymorphic type" (ppr bndr)
\end{code}
%************************************************************************
cgEvalAlts :: Maybe VirtualSpOffset -- Offset of cost-centre to be restored, if any
-> Id
-> SRT -- SRT for the continuation
- -> StgCaseAlts
+ -> AltType
+ -> [StgAlt]
-> FCode Sequel -- Any addr modes inside are guaranteed
-- to be a label so that we can duplicate it
-- without risk of duplicating code
-cgEvalAlts cc_slot bndr srt alts
- =
- let uniq = getUnique bndr in
-
- buildContLivenessMask uniq `thenFC` \ liveness_mask ->
-
- case alts of
-
- -- algebraic alts ...
- StgAlgAlts maybe_tycon alts deflt ->
-
- -- bind the default binder (it covers all the alternatives)
- bindNewToReg bndr node mkLFArgument `thenC`
+cgEvalAlts cc_slot bndr srt alt_type@(PrimAlt tycon) alts
+ = do { let rep = tyConCgRep tycon
+ reg = dataReturnConvPrim rep -- Bottom for voidRep
+
+ ; abs_c <- forkProc $ do
+ { -- Bind the case binder, except if it's void
+ -- (reg is bottom in that case)
+ whenC (nonVoidArg rep) $
+ bindNewToReg bndr reg (mkLFArgument bndr)
+ ; restoreCurrentCostCentre cc_slot True
+ ; cgPrimAlts GCMayHappen alt_type reg alts }
+
+ ; lbl <- emitDirectReturnTarget (idName bndr) abs_c srt
+ ; returnFC (CaseAlts lbl Nothing bndr False) }
+
+cgEvalAlts cc_slot bndr srt (UbxTupAlt _) [(con,args,_,rhs)]
+ = -- Unboxed tuple case
+ -- By now, the simplifier should have have turned it
+ -- into case e of (# a,b #) -> e
+ -- There shouldn't be a
+ -- case e of DEFAULT -> e
+ ASSERT2( case con of { DataAlt _ -> True; other -> False },
+ text "cgEvalAlts: dodgy case of unboxed tuple type" )
+ do { -- forkAbsC for the RHS, so that the envt is
+ -- not changed for the emitDirectReturn call
+ abs_c <- forkProc $ do
+ { (live_regs, ptrs, nptrs, _) <- bindUnboxedTupleComponents args
+ -- Restore the CC *after* binding the tuple components,
+ -- so that we get the stack offset of the saved CC right.
+ ; restoreCurrentCostCentre cc_slot True
+ -- Generate a heap check if necessary
+ -- and finally the code for the alternative
+ ; unbxTupleHeapCheck live_regs ptrs nptrs noStmts
+ (cgExpr rhs) }
+ ; lbl <- emitDirectReturnTarget (idName bndr) abs_c srt
+ ; returnFC (CaseAlts lbl Nothing bndr False) }
+
+cgEvalAlts cc_slot bndr srt alt_type alts
+ = -- Algebraic and polymorphic case
+ do { -- Bind the default binder
+ bindNewToReg bndr nodeReg (mkLFArgument bndr)
-- Generate sequel info for use downstream
-- At the moment, we only do it if the type is vector-returnable.
--
-- which is worse than having the alt code in the switch statement
- let is_alg = maybeToBool maybe_tycon
- Just spec_tycon = maybe_tycon
- in
-
- -- deal with the unboxed tuple case
- if is_alg && isUnboxedTupleTyCon spec_tycon then
- case alts of
- [alt] -> let lbl = mkReturnInfoLabel uniq in
- cgUnboxedTupleAlt uniq cc_slot True alt
- `thenFC` \ abs_c ->
- getSRTLabel `thenFC` \srt_label ->
- absC (CRetDirect uniq abs_c (srt_label, srt)
- liveness_mask) `thenC`
- returnFC (CaseAlts (CLbl lbl RetRep) Nothing)
- _ -> panic "cgEvalAlts: dodgy case of unboxed tuple type"
-
- -- normal algebraic (or polymorphic) case alternatives
- else let
- ret_conv | is_alg = ctrlReturnConvAlg spec_tycon
- | otherwise = UnvectoredReturn 0
-
- use_labelled_alts = case ret_conv of
- VectoredReturn _ -> True
- _ -> False
-
- semi_tagged_stuff
- = if use_labelled_alts then
- cgSemiTaggedAlts bndr alts deflt -- Just <something>
- else
- Nothing -- no semi-tagging info
-
- in
- cgAlgAlts GCMayHappen uniq cc_slot use_labelled_alts (not is_alg)
- alts deflt True `thenFC` \ (tagged_alt_absCs, deflt_absC) ->
-
- mkReturnVector uniq tagged_alt_absCs deflt_absC srt liveness_mask
- ret_conv `thenFC` \ return_vec ->
-
- returnFC (CaseAlts return_vec semi_tagged_stuff)
-
- -- primitive alts...
- StgPrimAlts tycon alts deflt ->
-
- -- Restore the cost centre
- restoreCurrentCostCentre cc_slot `thenFC` \ cc_restore ->
-
- -- Generate the switch
- getAbsC (cgPrimEvalAlts bndr tycon alts deflt) `thenFC` \ abs_c ->
-
- -- Generate the labelled block, starting with restore-cost-centre
- getSRTLabel `thenFC` \srt_label ->
- absC (CRetDirect uniq (cc_restore `mkAbsCStmts` abs_c)
- (srt_label,srt) liveness_mask) `thenC`
-
- -- Return an amode for the block
- returnFC (CaseAlts (CLbl (mkReturnInfoLabel uniq) RetRep) Nothing)
+ ; (alts, mb_deflt) <- cgAlgAlts GCMayHappen cc_slot alt_type alts
+
+ ; (lbl, branches) <- emitAlgReturnTarget (idName bndr)
+ alts mb_deflt srt ret_conv
+
+ ; returnFC (CaseAlts lbl branches bndr False) }
+ where
+ ret_conv = case alt_type of
+ AlgAlt tc -> ctrlReturnConvAlg tc
+ PolyAlt -> UnvectoredReturn 0
\end{code}
\begin{code}
cgAlgAlts :: GCFlag
- -> Unique
- -> Maybe VirtualSpOffset
- -> Bool -- True <=> branches must be labelled
- -> Bool -- True <=> polymorphic case
- -> [(DataCon, [Id], [Bool], StgExpr)] -- The alternatives
- -> StgCaseDefault -- The default
- -> Bool -- Context switch at alts?
- -> FCode ([(ConTag, AbstractC)], -- The branches
- AbstractC -- The default case
- )
-
-cgAlgAlts gc_flag uniq restore_cc must_label_branches is_fun alts deflt
- emit_yield{-should a yield macro be emitted?-}
-
- = forkAlts (map (cgAlgAlt gc_flag uniq restore_cc must_label_branches emit_yield) alts)
- (cgAlgDefault gc_flag is_fun uniq restore_cc must_label_branches deflt emit_yield)
-\end{code}
+ -> Maybe VirtualSpOffset
+ -> AltType -- ** AlgAlt or PolyAlt only **
+ -> [StgAlt] -- The alternatives
+ -> FCode ( [(ConTagZ, CgStmts)], -- The branches
+ Maybe CgStmts ) -- The default case
-\begin{code}
-cgAlgDefault :: GCFlag
- -> Bool -- could be a function-typed result?
- -> Unique -> Maybe VirtualSpOffset -> Bool -- turgid state...
- -> StgCaseDefault -- input
- -> Bool
- -> FCode AbstractC -- output
-
-cgAlgDefault gc_flag is_fun uniq cc_slot must_label_branch StgNoDefault _
- = returnFC AbsCNop
-
-cgAlgDefault gc_flag is_fun uniq cc_slot must_label_branch
- (StgBindDefault rhs)
- emit_yield{-should a yield macro be emitted?-}
-
- = -- We have arranged that Node points to the thing
- restoreCurrentCostCentre cc_slot `thenFC` \restore_cc ->
- getAbsC (absC restore_cc `thenC`
- -- HWL: maybe need yield here
- --(if emit_yield
- -- then yield [node] True
- -- else absC AbsCNop) `thenC`
- possibleHeapCheck gc_flag is_fun [node] [] Nothing (cgExpr rhs)
- -- Node is live, but doesn't need to point at the thing itself;
- -- it's ok for Node to point to an indirection or FETCH_ME
- -- Hence no need to re-enter Node.
- ) `thenFC` \ abs_c ->
-
- let
- final_abs_c | must_label_branch = CCodeBlock lbl abs_c
- | otherwise = abs_c
- in
- returnFC final_abs_c
- where
- lbl = mkDefaultLabel uniq
+cgAlgAlts gc_flag cc_slot alt_type alts
+ = do alts <- forkAlts [ cgAlgAlt gc_flag cc_slot alt_type alt | alt <- alts]
+ let
+ mb_deflt = case alts of -- DEFAULT is always first, if present
+ ((DEFAULT,blks) : _) -> Just blks
+ other -> Nothing
+
+ branches = [(dataConTagZ con, blks)
+ | (DataAlt con, blks) <- alts]
+ -- in
+ return (branches, mb_deflt)
--- HWL comment on GrAnSim: GRAN_YIELDs needed; emitted in cgAlgAltRhs
cgAlgAlt :: GCFlag
- -> Unique -> Maybe VirtualSpOffset -> Bool -- turgid state
- -> Bool -- Context switch at alts?
- -> (DataCon, [Id], [Bool], StgExpr)
- -> FCode (ConTag, AbstractC)
-
-cgAlgAlt gc_flag uniq cc_slot must_label_branch
- emit_yield{-should a yield macro be emitted?-}
- (con, args, use_mask, rhs)
- =
- restoreCurrentCostCentre cc_slot `thenFC` \restore_cc ->
- getAbsC (absC restore_cc `thenC`
- -- HWL: maybe need yield here
- -- (if emit_yield
- -- then yield [node] True -- XXX live regs wrong
- -- else absC AbsCNop) `thenC`
- (case gc_flag of
- NoGC -> mapFCs bindNewToTemp args `thenFC` \_ -> nopC
- GCMayHappen -> bindConArgs con args
- ) `thenC`
- possibleHeapCheck gc_flag False [node] [] Nothing (
- cgExpr rhs)
- ) `thenFC` \ abs_c ->
- let
- final_abs_c | must_label_branch = CCodeBlock lbl abs_c
- | otherwise = abs_c
- in
- returnFC (tag, final_abs_c)
+ -> Maybe VirtualSpOffset -- Turgid state
+ -> AltType -- ** AlgAlt or PolyAlt only **
+ -> StgAlt
+ -> FCode (AltCon, CgStmts)
+
+cgAlgAlt gc_flag cc_slot alt_type (con, args, use_mask, rhs)
+ = do { abs_c <- getCgStmts $ do
+ { bind_con_args con args
+ ; restoreCurrentCostCentre cc_slot True
+ ; maybeAltHeapCheck gc_flag alt_type (cgExpr rhs) }
+ ; return (con, abs_c) }
where
- tag = dataConTag con
- lbl = mkAltLabel uniq tag
-
-cgUnboxedTupleAlt
- :: Unique -- unique for label of the alternative
- -> Maybe VirtualSpOffset -- Restore cost centre
- -> Bool -- ctxt switch
- -> (DataCon, [Id], [Bool], StgExpr) -- alternative
- -> FCode AbstractC
-
-cgUnboxedTupleAlt lbl cc_slot emit_yield (con,args,use_mask,rhs)
- = getAbsC (
- bindUnboxedTupleComponents args
- `thenFC` \ (live_regs,tags,stack_res) ->
-
- restoreCurrentCostCentre cc_slot `thenFC` \restore_cc ->
- absC restore_cc `thenC`
-
- -- HWL: maybe need yield here
- -- (if emit_yield
- -- then yield live_regs True -- XXX live regs wrong?
- -- else absC AbsCNop) `thenC`
- let
- -- ToDo: could maybe use Nothing here if stack_res is False
- -- since the heap-check can just return to the top of the
- -- stack.
- ret_addr = Just lbl
- in
-
- -- free up stack slots containing tags,
- freeStackSlots (map fst tags) `thenC`
-
- -- generate a heap check if necessary
- possibleHeapCheck GCMayHappen False live_regs tags ret_addr (
-
- -- and finally the code for the alternative
- cgExpr rhs)
- )
+ bind_con_args DEFAULT args = nopC
+ bind_con_args (DataAlt dc) args = bindConArgs dc args
\end{code}
-%************************************************************************
-%* *
-\subsection[CgCase-semi-tagged-alts]{The code to deal with sem-tagging}
-%* *
-%************************************************************************
-
-Turgid-but-non-monadic code to conjure up the required info from
-algebraic case alternatives for semi-tagging.
-
-\begin{code}
-cgSemiTaggedAlts :: Id
- -> [(DataCon, [Id], [Bool], StgExpr)]
- -> GenStgCaseDefault Id Id
- -> SemiTaggingStuff
-
-cgSemiTaggedAlts binder alts deflt
- = Just (map st_alt alts, st_deflt deflt)
- where
- uniq = getUnique binder
-
- st_deflt StgNoDefault = Nothing
-
- st_deflt (StgBindDefault _)
- = Just (Just binder,
- (CCallProfCtrMacro SLIT("RET_SEMI_BY_DEFAULT") [], -- ToDo: monadise?
- mkDefaultLabel uniq)
- )
-
- st_alt (con, args, use_mask, _)
- = -- Ha! Nothing to do; Node already points to the thing
- (con_tag,
- (CCallProfCtrMacro SLIT("RET_SEMI_IN_HEAP") -- ToDo: monadise?
- [mkIntCLit (length args)], -- how big the thing in the heap is
- join_label)
- )
- where
- con_tag = dataConTag con
- join_label = mkAltLabel uniq con_tag
-\end{code}
%************************************************************************
%* *
%* *
%************************************************************************
-@cgPrimEvalAlts@ and @cgPrimInlineAlts@ generate suitable @CSwitch@es
+@cgPrimAlts@ generates suitable a @CSwitch@
for dealing with the alternatives of a primitive @case@, given an
addressing mode for the thing to scrutinise. It also keeps track of
the maximum stack depth encountered down any branch.
As usual, no binders in the alternatives are yet bound.
\begin{code}
-cgPrimInlineAlts bndr tycon alts deflt
- = cgPrimAltsWithDefault bndr NoGC (CTemp uniq kind) alts deflt []
- where
- uniq = getUnique bndr
- kind = tyConPrimRep tycon
-
-cgPrimEvalAlts bndr tycon alts deflt
- = cgPrimAltsWithDefault bndr GCMayHappen (CReg reg) alts deflt [reg]
- where
- reg = WARN( case kind of { PtrRep -> True; other -> False },
- text "cgPrimEE" <+> ppr bndr <+> ppr tycon )
- dataReturnConvPrim kind
- kind = tyConPrimRep tycon
-
-cgPrimAltsWithDefault bndr gc_flag scrutinee alts deflt regs
- = -- first bind the default if necessary
- bindNewPrimToAmode bndr scrutinee `thenC`
- cgPrimAlts gc_flag scrutinee alts deflt regs
-
-cgPrimAlts gc_flag scrutinee alts deflt regs
- = forkAlts (map (cgPrimAlt gc_flag regs) alts)
- (cgPrimDefault gc_flag regs deflt)
- `thenFC` \ (alt_absCs, deflt_absC) ->
-
- absC (CSwitch scrutinee alt_absCs deflt_absC)
- -- CSwitch does sensible things with one or zero alternatives
-
+cgPrimAlts :: GCFlag
+ -> AltType -- Always PrimAlt, but passed to maybeAltHeapCheck
+ -> CmmReg -- Scrutinee
+ -> [StgAlt] -- Alternatives
+ -> Code
+-- NB: cgPrimAlts emits code that does the case analysis.
+-- It's often used in inline situations, rather than to genearte
+-- a labelled return point. That's why its interface is a little
+-- different to cgAlgAlts
+--
+-- INVARIANT: the default binder is already bound
+cgPrimAlts gc_flag alt_type scrutinee alts
+ = do { tagged_absCs <- forkAlts (map (cgPrimAlt gc_flag alt_type) alts)
+ ; let ((DEFAULT, deflt_absC) : others) = tagged_absCs -- There is always a default
+ alt_absCs = [(lit,rhs) | (LitAlt lit, rhs) <- others]
+ ; emitLitSwitch (CmmReg scrutinee) alt_absCs deflt_absC }
cgPrimAlt :: GCFlag
- -> [MagicId] -- live registers
- -> (Literal, StgExpr) -- The alternative
- -> FCode (Literal, AbstractC) -- Its compiled form
-
-cgPrimAlt gc_flag regs (lit, rhs)
- = getAbsC rhs_code `thenFC` \ absC ->
- returnFC (lit,absC)
- where
- rhs_code = possibleHeapCheck gc_flag False regs [] Nothing (cgExpr rhs)
-
-cgPrimDefault :: GCFlag
- -> [MagicId] -- live registers
- -> StgCaseDefault
- -> FCode AbstractC
-
-cgPrimDefault gc_flag regs StgNoDefault
- = panic "cgPrimDefault: No default in prim case"
-
-cgPrimDefault gc_flag regs (StgBindDefault rhs)
- = getAbsC (possibleHeapCheck gc_flag False regs [] Nothing (cgExpr rhs))
+ -> AltType
+ -> StgAlt -- The alternative
+ -> FCode (AltCon, CgStmts) -- Its compiled form
+
+cgPrimAlt gc_flag alt_type (con, [], [], rhs)
+ = ASSERT( case con of { DEFAULT -> True; LitAlt _ -> True; other -> False } )
+ do { abs_c <- getCgStmts (maybeAltHeapCheck gc_flag alt_type (cgExpr rhs))
+ ; returnFC (con, abs_c) }
\end{code}
%************************************************************************
\begin{code}
+maybeAltHeapCheck
+ :: GCFlag
+ -> AltType -- PolyAlt, PrimAlt, AlgAlt, but *not* UbxTupAlt
+ -> Code -- Continuation
+ -> Code
+maybeAltHeapCheck NoGC _ code = code
+maybeAltHeapCheck GCMayHappen alt_type code = altHeapCheck alt_type code
+
saveVolatileVarsAndRegs
:: StgLiveVars -- Vars which should be made safe
- -> FCode (AbstractC, -- Assignments to do the saves
+ -> FCode (CmmStmts, -- Assignments to do the saves
EndOfBlockInfo, -- sequel for the alts
Maybe VirtualSpOffset) -- Slot for current cost centre
-
saveVolatileVarsAndRegs vars
- = saveVolatileVars vars `thenFC` \ var_saves ->
- saveCurrentCostCentre `thenFC` \ (maybe_cc_slot, cc_save) ->
- getEndOfBlockInfo `thenFC` \ eob_info ->
- returnFC (mkAbstractCs [var_saves, cc_save],
- eob_info,
- maybe_cc_slot)
+ = do { var_saves <- saveVolatileVars vars
+ ; (maybe_cc_slot, cc_save) <- saveCurrentCostCentre
+ ; eob_info <- getEndOfBlockInfo
+ ; returnFC (var_saves `plusStmts` cc_save,
+ eob_info,
+ maybe_cc_slot) }
-saveVolatileVars :: StgLiveVars -- Vars which should be made safe
- -> FCode AbstractC -- Assignments to to the saves
+saveVolatileVars :: StgLiveVars -- Vars which should be made safe
+ -> FCode CmmStmts -- Assignments to to the saves
saveVolatileVars vars
- = save_em (varSetElems vars)
+ = do { stmts_s <- mapFCs save_it (varSetElems vars)
+ ; return (foldr plusStmts noStmts stmts_s) }
where
- save_em [] = returnFC AbsCNop
-
- save_em (var:vars)
- = getCAddrModeIfVolatile var `thenFC` \ v ->
- case v of
- Nothing -> save_em vars -- Non-volatile, so carry on
-
-
- Just vol_amode -> -- Aha! It's volatile
- save_var var vol_amode `thenFC` \ abs_c ->
- save_em vars `thenFC` \ abs_cs ->
- returnFC (abs_c `mkAbsCStmts` abs_cs)
+ save_it var
+ = do { v <- getCAddrModeIfVolatile var
+ ; case v of
+ Nothing -> return noStmts -- Non-volatile
+ Just vol_amode -> save_var var vol_amode -- Aha! It's volatile
+ }
save_var var vol_amode
- = allocPrimStack (getPrimRepSize kind) `thenFC` \ slot ->
- rebindToStack var slot `thenC`
- getSpRelOffset slot `thenFC` \ sp_rel ->
- returnFC (CAssign (CVal sp_rel kind) vol_amode)
- where
- kind = getAmodeRep vol_amode
+ = do { slot <- allocPrimStack (idCgRep var)
+ ; rebindToStack var slot
+ ; sp_rel <- getSpRelOffset slot
+ ; returnFC (oneStmt (CmmStore sp_rel vol_amode)) }
\end{code}
---------------------------------------------------------------------------
\begin{code}
saveCurrentCostCentre ::
FCode (Maybe VirtualSpOffset, -- Where we decide to store it
- AbstractC) -- Assignment to save it
+ CmmStmts) -- Assignment to save it
saveCurrentCostCentre
- = if not opt_SccProfilingOn then
- returnFC (Nothing, AbsCNop)
- else
- allocPrimStack (getPrimRepSize CostCentreRep) `thenFC` \ slot ->
- dataStackSlots [slot] `thenC`
- getSpRelOffset slot `thenFC` \ sp_rel ->
- returnFC (Just slot,
- CAssign (CVal sp_rel CostCentreRep) (CReg CurCostCentre))
-
-restoreCurrentCostCentre :: Maybe VirtualSpOffset -> FCode AbstractC
-restoreCurrentCostCentre Nothing = returnFC AbsCNop
-restoreCurrentCostCentre (Just slot)
- = getSpRelOffset slot `thenFC` \ sp_rel ->
- freeStackSlots [slot] `thenC`
- returnFC (CCallProfCCMacro SLIT("RESTORE_CCCS") [CVal sp_rel CostCentreRep])
- -- we use the RESTORE_CCCS macro, rather than just
- -- assigning into CurCostCentre, in case RESTORE_CCCS
- -- has some sanity-checking in it.
+ | not opt_SccProfilingOn
+ = returnFC (Nothing, noStmts)
+ | otherwise
+ = do { slot <- allocPrimStack PtrArg
+ ; sp_rel <- getSpRelOffset slot
+ ; returnFC (Just slot,
+ oneStmt (CmmStore sp_rel curCCS)) }
+
+-- Sometimes we don't free the slot containing the cost centre after restoring it
+-- (see CgLetNoEscape.cgLetNoEscapeBody).
+restoreCurrentCostCentre :: Maybe VirtualSpOffset -> Bool -> Code
+restoreCurrentCostCentre Nothing _freeit = nopC
+restoreCurrentCostCentre (Just slot) freeit
+ = do { sp_rel <- getSpRelOffset slot
+ ; whenC freeit (freeStackSlots [slot])
+ ; stmtC (CmmStore curCCSAddr (CmmLoad sp_rel wordRep)) }
\end{code}
-%************************************************************************
-%* *
-\subsection[CgCase-return-vec]{Building a return vector}
-%* *
-%************************************************************************
-
-Build a return vector, and return a suitable label addressing
-mode for it.
-
-\begin{code}
-mkReturnVector :: Unique
- -> [(ConTag, AbstractC)] -- Branch codes
- -> AbstractC -- Default case
- -> SRT -- continuation's SRT
- -> Liveness -- stack liveness
- -> CtrlReturnConvention
- -> FCode CAddrMode
-
-mkReturnVector uniq tagged_alt_absCs deflt_absC srt liveness ret_conv
- = getSRTLabel `thenFC` \srt_label ->
- let
- (return_vec_amode, vtbl_body) = case ret_conv of {
-
- -- might be a polymorphic case...
- UnvectoredReturn 0 ->
- ASSERT(null tagged_alt_absCs)
- (CLbl ret_label RetRep,
- absC (CRetDirect uniq deflt_absC (srt_label, srt) liveness));
-
- UnvectoredReturn n ->
- -- find the tag explicitly rather than using tag_reg for now.
- -- on architectures with lots of regs the tag will be loaded
- -- into tag_reg by the code doing the returning.
- let
- tag = CMacroExpr WordRep GET_TAG [CVal (nodeRel 0) DataPtrRep]
- in
- (CLbl ret_label RetRep,
- absC (CRetDirect uniq
- (mkAlgAltsCSwitch tag tagged_alt_absCs deflt_absC)
- (srt_label, srt)
- liveness));
-
- VectoredReturn table_size ->
- let
- (vector_table, alts_absC) =
- unzip (map mk_vector_entry [fIRST_TAG .. (table_size+fIRST_TAG-1)])
-
- ret_vector = CRetVector vtbl_label
- vector_table
- (srt_label, srt) liveness
- in
- (CLbl vtbl_label DataPtrRep,
- -- alts come first, because we don't want to declare all the symbols
- absC (mkAbstractCs (mkAbstractCs alts_absC : [deflt_absC,ret_vector]))
- )
-
- } in
- vtbl_body `thenC`
- returnFC return_vec_amode
- -- )
- where
-
- vtbl_label = mkVecTblLabel uniq
- ret_label = mkReturnInfoLabel uniq
-
- deflt_lbl =
- case nonemptyAbsC deflt_absC of
- -- the simplifier might have eliminated a case
- Nothing -> mkIntCLit 0 -- CLbl mkErrorStdEntryLabel CodePtrRep
- Just absC@(CCodeBlock lbl _) -> CLbl lbl CodePtrRep
-
- mk_vector_entry :: ConTag -> (CAddrMode, AbstractC)
- mk_vector_entry tag
- = case [ absC | (t, absC) <- tagged_alt_absCs, t == tag ] of
- [] -> (deflt_lbl, AbsCNop)
- [absC@(CCodeBlock lbl _)] -> (CLbl lbl CodePtrRep,absC)
- _ -> panic "mkReturnVector: too many"
-\end{code}
-
-%************************************************************************
-%* *
-\subsection[CgCase-utils]{Utilities for handling case expressions}
-%* *
-%************************************************************************
-
-@possibleHeapCheck@ tests a flag passed in to decide whether to do a
-heap check or not. These heap checks are always in a case
-alternative, so we use altHeapCheck.
-
-\begin{code}
-possibleHeapCheck
- :: GCFlag
- -> Bool -- True <=> algebraic case
- -> [MagicId] -- live registers
- -> [(VirtualSpOffset,Int)] -- stack slots to tag
- -> Maybe Unique -- return address unique
- -> Code -- continuation
- -> Code
-
-possibleHeapCheck GCMayHappen is_alg regs tags lbl code
- = altHeapCheck is_alg regs tags AbsCNop lbl code
-possibleHeapCheck NoGC _ _ tags lbl code
- = code
-\end{code}