%
% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
%
-% $Id: CgCase.lhs,v 1.20 1998/12/22 12:55:54 simonm Exp $
+% $Id: CgCase.lhs,v 1.70 2004/08/13 13:25:45 simonmar Exp $
%
%********************************************************
%* *
%********************************************************
\begin{code}
-module CgCase ( cgCase, saveVolatileVarsAndRegs, restoreCurrentCostCentre,
- splitTyConAppThroughNewTypes ) where
+module CgCase ( cgCase, saveVolatileVarsAndRegs,
+ restoreCurrentCostCentre
+ ) where
#include "HsVersions.h"
import CgMonad
import StgSyn
-import AbsCSyn
-
-import AbsCUtils ( mkAbstractCs, mkAbsCStmts, mkAlgAltsCSwitch,
- getAmodeRep, nonemptyAbsC
- )
-import CoreSyn ( isDeadBinder )
-import CgUpdate ( reserveSeqFrame )
-import CgBindery ( getVolatileRegs, getArgAmodes,
+import CgBindery ( getArgAmodes,
bindNewToReg, bindNewToTemp,
- bindNewPrimToAmode,
- rebindToStack, getCAddrMode,
- getCAddrModeAndInfo, getCAddrModeIfVolatile,
- buildContLivenessMask, nukeDeadBindings
+ getCgIdInfo, getArgAmode,
+ rebindToStack, getCAddrModeIfVolatile,
+ nukeDeadBindings, idInfoToAmode
)
import CgCon ( bindConArgs, bindUnboxedTupleComponents )
-import CgHeapery ( altHeapCheck, yield )
-import CgRetConv ( dataReturnConvPrim, ctrlReturnConvAlg,
+import CgHeapery ( altHeapCheck, unbxTupleHeapCheck )
+import CgCallConv ( dataReturnConvPrim, ctrlReturnConvAlg,
CtrlReturnConvention(..)
)
-import CgStackery ( allocPrimStack, allocStackTop,
+import CgStackery ( allocPrimStack, allocStackTop, getSpRelOffset,
deAllocStackTop, freeStackSlots
)
-import CgTailCall ( tailCallFun )
-import CgUsages ( getSpRelOffset, getRealSp )
-import CLabel ( CLabel, mkVecTblLabel, mkReturnPtLabel,
- mkDefaultLabel, mkAltLabel, mkReturnInfoLabel,
- mkErrorStdEntryLabel, mkClosureTblLabel
- )
+import CgTailCall ( performTailCall )
+import CgPrimOp ( cgPrimOp )
+import CgForeignCall ( cgForeignCall )
+import CgUtils ( newTemp, cgLit, emitLitSwitch, emitSwitch,
+ tagToClosure )
+import CgProf ( curCCS, curCCSAddr )
+import CgInfoTbls ( emitDirectReturnTarget, emitAlgReturnTarget,
+ dataConTagZ )
+import SMRep ( CgRep(..), retAddrSizeW, nonVoidArg, isVoidArg,
+ idCgRep, tyConCgRep, typeHint )
+import CmmUtils ( CmmStmts, noStmts, oneStmt, plusStmts )
+import Cmm
+import MachOp ( wordRep )
import ClosureInfo ( mkLFArgument )
-import CmdLineOpts ( opt_SccProfilingOn, opt_GranMacros )
-import CostCentre ( CostCentre )
-import Id ( Id, idPrimRep )
-import DataCon ( DataCon, dataConTag, fIRST_TAG, ConTag,
- isUnboxedTupleCon, dataConType )
+import CmdLineOpts ( opt_SccProfilingOn )
+import Id ( Id, idName, isDeadBinder, idType )
+import ForeignCall ( ForeignCall(..), CCallSpec(..), playSafe )
import VarSet ( varSetElems )
-import Const ( Con(..), Literal )
-import PrimOp ( primOpOutOfLine, PrimOp(..) )
-import PrimRep ( getPrimRepSize, retPrimRepSize, PrimRep(..)
- )
-import TyCon ( TyCon, isEnumerationTyCon, isUnboxedTupleTyCon,
- isNewTyCon, isAlgTyCon,
- tyConDataCons, tyConFamilySize )
-import Type ( Type, typePrimRep, splitAlgTyConApp, splitTyConApp_maybe,
- splitFunTys, applyTys )
-import Unique ( Unique, Uniquable(..) )
-import Maybes ( maybeToBool )
+import CoreSyn ( AltCon(..) )
+import PrimOp ( PrimOp(..), primOpOutOfLine )
+import TyCon ( isEnumerationTyCon, tyConFamilySize )
+import Util ( isSingleton )
import Outputable
\end{code}
This never hurts us if there is only one alternative.
-
-*** NOT YET DONE *** The difficulty is that \tr{!B!}, \tr{!C!} need
-to take account of what is live, and that includes all live volatile
-variables, even if they also have stable analogues. Furthermore, the
-stack pointers must be lined up properly so that GC sees tidy stacks.
-If these things are done, then the heap checks can be done at \tr{!B!} and
-\tr{!C!} without a full save-volatile-vars sequence.
-
\begin{code}
cgCase :: StgExpr
-> StgLiveVars
-> StgLiveVars
-> Id
-> SRT
- -> StgCaseAlts
+ -> AltType
+ -> [StgAlt]
-> Code
\end{code}
-Several special cases for inline primitive operations.
+Special case #1: case of literal.
\begin{code}
-cgCase (StgCon (PrimOp op) args res_ty) live_in_whole_case live_in_alts bndr srt alts
- | not (primOpOutOfLine op)
- =
- -- Get amodes for the arguments and results
- getArgAmodes args `thenFC` \ arg_amodes ->
- let
- result_amodes = getPrimAppResultAmodes (getUnique bndr) alts
- in
- -- Perform the operation
- getVolatileRegs live_in_alts `thenFC` \ vol_regs ->
-
- absC (COpStmt result_amodes op
- arg_amodes -- note: no liveness arg
- vol_regs) `thenC`
-
- -- Scrutinise the result
- cgInlineAlts bndr alts
+cgCase (StgLit lit) live_in_whole_case live_in_alts bndr srt
+ alt_type@(PrimAlt tycon) alts
+ = do { tmp_reg <- bindNewToTemp bndr
+ ; cm_lit <- cgLit lit
+ ; stmtC (CmmAssign tmp_reg (CmmLit cm_lit))
+ ; cgPrimAlts NoGC alt_type tmp_reg alts }
\end{code}
-TODO: Case-of-case of primop can probably be done inline too (but
-maybe better to translate it out beforehand). See
-ghc/lib/misc/PackedString.lhs for examples where this crops up (with
-4.02).
-
-Another special case: scrutinising a primitive-typed variable. No
+Special case #2: scrutinising a primitive-typed variable. No
evaluation required. We don't save volatile variables, nor do we do a
heap-check in the alternatives. Instead, the heap usage of the
alternatives is worst-cased and passed upstream. This can result in
\begin{code}
cgCase (StgApp v []) live_in_whole_case live_in_alts bndr srt
- (StgPrimAlts ty alts deflt)
-
- =
- getCAddrMode v `thenFC` \amode ->
-
- {-
- Careful! we can't just bind the default binder to the same thing
- as the scrutinee, since it might be a stack location, and having
- two bindings pointing at the same stack locn doesn't work (it
- confuses nukeDeadBindings). Hence, use a new temp.
- -}
- (if (isDeadBinder bndr)
- then nopC
- else bindNewToTemp bndr `thenFC` \deflt_amode ->
- absC (CAssign deflt_amode amode)) `thenC`
-
- cgPrimAlts NoGC amode alts deflt []
+ alt_type@(PrimAlt tycon) alts
+ = do { -- Careful! we can't just bind the default binder to the same thing
+ -- as the scrutinee, since it might be a stack location, and having
+ -- two bindings pointing at the same stack locn doesn't work (it
+ -- confuses nukeDeadBindings). Hence, use a new temp.
+ v_info <- getCgIdInfo v
+ ; amode <- idInfoToAmode v_info
+ ; tmp_reg <- bindNewToTemp bndr
+ ; stmtC (CmmAssign tmp_reg amode)
+ ; cgPrimAlts NoGC alt_type tmp_reg alts }
\end{code}
-Special case: scrutinising a non-primitive variable.
-This can be done a little better than the general case, because
-we can reuse/trim the stack slot holding the variable (if it is in one).
+Special case #3: inline PrimOps and foreign calls.
\begin{code}
-cgCase (StgApp fun args)
- live_in_whole_case live_in_alts bndr srt alts@(StgAlgAlts ty _ _)
- =
- getCAddrModeAndInfo fun `thenFC` \ (fun_amode, lf_info) ->
- getArgAmodes args `thenFC` \ arg_amodes ->
+cgCase (StgOpApp op@(StgPrimOp primop) args _)
+ live_in_whole_case live_in_alts bndr srt alt_type alts
+ | not (primOpOutOfLine primop)
+ = cgInlinePrimOp primop args bndr alt_type live_in_alts alts
+\end{code}
- -- Squish the environment
- nukeDeadBindings live_in_alts `thenC`
- saveVolatileVarsAndRegs live_in_alts
- `thenFC` \ (save_assts, alts_eob_info, maybe_cc_slot) ->
+TODO: Case-of-case of primop can probably be done inline too (but
+maybe better to translate it out beforehand). See
+ghc/lib/misc/PackedString.lhs for examples where this crops up (with
+4.02).
- allocStackTop retPrimRepSize `thenFC` \_ ->
+Special case #4: inline foreign calls: an unsafe foreign call can be done
+right here, just like an inline primop.
- forkEval alts_eob_info nopC (
- deAllocStackTop retPrimRepSize `thenFC` \_ ->
- cgEvalAlts maybe_cc_slot bndr srt alts)
- `thenFC` \ scrut_eob_info ->
+\begin{code}
+cgCase (StgOpApp op@(StgFCallOp fcall _) args _)
+ live_in_whole_case live_in_alts bndr srt alt_type alts
+ | unsafe_foreign_call
+ = ASSERT( isSingleton alts )
+ do -- *must* be an unboxed tuple alt.
+ -- exactly like the cgInlinePrimOp case for unboxed tuple alts..
+ { res_tmps <- mapFCs bindNewToTemp non_void_res_ids
+ ; let res_hints = map (typeHint.idType) non_void_res_ids
+ ; cgForeignCall (zip res_tmps res_hints) fcall args live_in_alts
+ ; cgExpr rhs }
+ where
+ (_, res_ids, _, rhs) = head alts
+ non_void_res_ids = filter (nonVoidArg . idCgRep) res_ids
- let real_scrut_eob_info =
- if not_con_ty
- then reserveSeqFrame scrut_eob_info
- else scrut_eob_info
- in
+ unsafe_foreign_call
+ = case fcall of
+ CCall (CCallSpec _ _ s) -> not (playSafe s)
+ _other -> False
+\end{code}
- setEndOfBlockInfo real_scrut_eob_info (
- tailCallFun fun fun_amode lf_info arg_amodes save_assts
- )
+Special case: scrutinising a non-primitive variable.
+This can be done a little better than the general case, because
+we can reuse/trim the stack slot holding the variable (if it is in one).
- where
- not_con_ty = case (getScrutineeTyCon ty) of
- Just _ -> False
- other -> True
+\begin{code}
+cgCase (StgApp fun args)
+ live_in_whole_case live_in_alts bndr srt alt_type alts
+ = do { fun_info <- getCgIdInfo fun
+ ; arg_amodes <- getArgAmodes args
+
+ -- Nuking dead bindings *before* calculating the saves is the
+ -- value-add here. We might end up freeing up some slots currently
+ -- occupied by variables only required for the call.
+ -- NOTE: we need to look up the variables used in the call before
+ -- doing this, because some of them may not be in the environment
+ -- afterward.
+ ; nukeDeadBindings live_in_alts
+ ; (save_assts, alts_eob_info, maybe_cc_slot)
+ <- saveVolatileVarsAndRegs live_in_alts
+
+ ; scrut_eob_info
+ <- forkEval alts_eob_info
+ (allocStackTop retAddrSizeW >> nopC)
+ (do { deAllocStackTop retAddrSizeW
+ ; cgEvalAlts maybe_cc_slot bndr srt alt_type alts })
+
+ ; setEndOfBlockInfo (maybeReserveSeqFrame alt_type scrut_eob_info)
+ (performTailCall fun_info arg_amodes save_assts) }
\end{code}
Note about return addresses: we *always* push a return address, even
Finally, here is the general case.
\begin{code}
-cgCase expr live_in_whole_case live_in_alts bndr srt alts
- = -- Figure out what volatile variables to save
- nukeDeadBindings live_in_whole_case `thenC`
+cgCase expr live_in_whole_case live_in_alts bndr srt alt_type alts
+ = do { -- Figure out what volatile variables to save
+ nukeDeadBindings live_in_whole_case
- saveVolatileVarsAndRegs live_in_alts
- `thenFC` \ (save_assts, alts_eob_info, maybe_cc_slot) ->
-
- -- Save those variables right now!
- absC save_assts `thenC`
-
- -- generate code for the alts
- forkEval alts_eob_info
- (
- nukeDeadBindings live_in_alts `thenC`
- allocStackTop retPrimRepSize -- space for retn address
- `thenFC` \_ -> nopC
- )
- (deAllocStackTop retPrimRepSize `thenFC` \_ ->
- cgEvalAlts maybe_cc_slot bndr srt alts) `thenFC` \ scrut_eob_info ->
-
- let real_scrut_eob_info =
- if not_con_ty
- then reserveSeqFrame scrut_eob_info
- else scrut_eob_info
- in
-
- setEndOfBlockInfo real_scrut_eob_info (cgExpr expr)
-
- where
- not_con_ty = case (getScrutineeTyCon (alts_ty alts)) of
- Just _ -> False
- other -> True
+ ; (save_assts, alts_eob_info, maybe_cc_slot)
+ <- saveVolatileVarsAndRegs live_in_alts
+
+ -- Save those variables right now!
+ ; emitStmts save_assts
+
+ -- generate code for the alts
+ ; scrut_eob_info
+ <- forkEval alts_eob_info
+ (do { nukeDeadBindings live_in_alts
+ ; allocStackTop retAddrSizeW -- space for retn address
+ ; nopC })
+ (do { deAllocStackTop retAddrSizeW
+ ; cgEvalAlts maybe_cc_slot bndr srt alt_type alts })
+
+ ; setEndOfBlockInfo (maybeReserveSeqFrame alt_type scrut_eob_info)
+ (cgExpr expr)
+ }
\end{code}
There's a lot of machinery going on behind the scenes to manage the
TODO!! Problem: however, we have to save the current cost centre
stack somewhere, because at the eval point the current CCS might be
-different. So we pick a free stack slot and save CCCS in it. The
-problem with this is that this slot isn't recorded as free/unboxed in
-the environment, so a case expression in the scrutinee will have the
-wrong bitmap attached. Fortunately we don't ever seem to see
-case-of-case at the back end. One solution might be to shift the
-saved CCS to the correct place in the activation record just before
-the jump.
- --SDM
-
-(one consequence of the above is that activation records on the stack
-don't follow the layout of closures when we're profiling. The CCS
-could be anywhere within the record).
+different. So we pick a free stack slot and save CCCS in it. One
+consequence of this is that activation records on the stack don't
+follow the layout of closures when we're profiling. The CCS could be
+anywhere within the record).
\begin{code}
-alts_ty (StgAlgAlts ty _ _) = ty
-alts_ty (StgPrimAlts ty _ _) = ty
+maybeReserveSeqFrame PolyAlt (EndOfBlockInfo args_sp (CaseAlts amode stuff bndr _))
+ = EndOfBlockInfo (args_sp + retAddrSizeW) (CaseAlts amode stuff bndr True)
+maybeReserveSeqFrame other scrut_eob_info = scrut_eob_info
\end{code}
+
%************************************************************************
%* *
-\subsection[CgCase-primops]{Primitive applications}
+ Inline primops
%* *
%************************************************************************
-Get result amodes for a primitive operation, in the case wher GC can't happen.
-The amodes are returned in canonical order, ready for the prim-op!
-
- Alg case: temporaries named as in the alternatives,
- plus (CTemp u) for the tag (if needed)
- Prim case: (CTemp u)
-
-This is all disgusting, because these amodes must be consistent with those
-invented by CgAlgAlts.
-
\begin{code}
-getPrimAppResultAmodes
- :: Unique
- -> StgCaseAlts
- -> [CAddrMode]
-\end{code}
-
-\begin{code}
--- If there's an StgBindDefault which does use the bound
--- variable, then we can only handle it if the type involved is
--- an enumeration type. That's important in the case
--- of comparisions:
---
--- case x ># y of
--- r -> f r
---
--- The only reason for the restriction to *enumeration* types is our
--- inability to invent suitable temporaries to hold the results;
--- Elaborating the CTemp addr mode to have a second uniq field
--- (which would simply count from 1) would solve the problem.
--- Anyway, cgInlineAlts is now capable of handling all cases;
--- it's only this function which is being wimpish.
-
-getPrimAppResultAmodes uniq (StgAlgAlts ty alts
- (StgBindDefault rhs))
- | isEnumerationTyCon spec_tycon = [tag_amode]
- | otherwise = pprPanic "getPrimAppResultAmodes: non-enumeration algebraic alternatives with default" (ppr uniq <+> ppr rhs)
+cgInlinePrimOp primop args bndr (PrimAlt tycon) live_in_alts alts
+ | isVoidArg (idCgRep bndr)
+ = ASSERT( con == DEFAULT && isSingleton alts && null bs )
+ do { -- VOID RESULT; just sequencing,
+ -- so get in there and do it
+ cgPrimOp [] primop args live_in_alts
+ ; cgExpr rhs }
+ where
+ (con,bs,_,rhs) = head alts
+
+cgInlinePrimOp primop args bndr (PrimAlt tycon) live_in_alts alts
+ = do { -- PRIMITIVE ALTS, with non-void result
+ tmp_reg <- bindNewToTemp bndr
+ ; cgPrimOp [tmp_reg] primop args live_in_alts
+ ; cgPrimAlts NoGC (PrimAlt tycon) tmp_reg alts }
+
+cgInlinePrimOp primop args bndr (UbxTupAlt tycon) live_in_alts alts
+ = ASSERT( isSingleton alts )
+ do { -- UNBOXED TUPLE ALTS
+ -- No heap check, no yield, just get in there and do it.
+ -- NB: the case binder isn't bound to anything;
+ -- it has a unboxed tuple type
+
+ res_tmps <- mapFCs bindNewToTemp non_void_res_ids
+ ; cgPrimOp res_tmps primop args live_in_alts
+ ; cgExpr rhs }
+ where
+ (_, res_ids, _, rhs) = head alts
+ non_void_res_ids = filter (nonVoidArg . idCgRep) res_ids
+
+cgInlinePrimOp primop args bndr (AlgAlt tycon) live_in_alts alts
+ = do { -- ENUMERATION TYPE RETURN
+ -- Typical: case a ># b of { True -> ..; False -> .. }
+ -- The primop itself returns an index into the table of
+ -- closures for the enumeration type.
+ tag_amode <- ASSERT( isEnumerationTyCon tycon )
+ do_enum_primop primop
+
+ -- Bind the default binder if necessary
+ -- (avoiding it avoids the assignment)
+ -- The deadness info is set by StgVarInfo
+ ; whenC (not (isDeadBinder bndr))
+ (do { tmp_reg <- bindNewToTemp bndr
+ ; stmtC (CmmAssign tmp_reg (tagToClosure tycon tag_amode)) })
+
+ -- Compile the alts
+ ; (branches, mb_deflt) <- cgAlgAlts NoGC Nothing{-cc_slot-}
+ (AlgAlt tycon) alts
+
+ -- Do the switch
+ ; emitSwitch tag_amode branches mb_deflt 0 (tyConFamilySize tycon - 1)
+ }
where
- -- A temporary variable to hold the tag; this is unaffected by GC because
- -- the heap-checks in the branches occur after the switch
- tag_amode = CTemp uniq IntRep
- (spec_tycon, _, _) = splitAlgTyConApp ty
-\end{code}
-
-If we don't have a default case, we could be scrutinising an unboxed
-tuple, or an enumeration type...
-
-\begin{code}
-getPrimAppResultAmodes uniq (StgAlgAlts ty alts other_default)
- -- Default is either StgNoDefault or StgBindDefault with unused binder
-
- | isEnumerationTyCon tycon = [CTemp uniq IntRep]
-
- | isUnboxedTupleTyCon tycon =
- case alts of
- [(con, args, use_mask, rhs)] ->
- [ CTemp (getUnique arg) (idPrimRep arg) | arg <- args ]
- _ -> panic "getPrimAppResultAmodes: case of unboxed tuple has multiple branches"
-
- | otherwise = panic ("getPrimAppResultAmodes: case of primop has strange type: " ++ showSDoc (ppr ty))
-
- where (tycon, _, _) = splitAlgTyConApp ty
-\end{code}
-
-The situation is simpler for primitive results, because there is only
-one!
-\begin{code}
-getPrimAppResultAmodes uniq (StgPrimAlts ty _ _)
- = [CTemp uniq (typePrimRep ty)]
+ do_enum_primop :: PrimOp -> FCode CmmExpr -- Returns amode for result
+ do_enum_primop TagToEnumOp -- No code!
+ | [arg] <- args = do
+ (_,e) <- getArgAmode arg
+ return e
+ do_enum_primop primop
+ = do tmp <- newTemp wordRep
+ cgPrimOp [tmp] primop args live_in_alts
+ returnFC (CmmReg tmp)
+
+cgInlinePrimOp primop arg_amodes bndr PolyAlt live_in_alts alts
+ = pprPanic "cgCase: case of primop has polymorphic type" (ppr bndr)
\end{code}
-
%************************************************************************
%* *
\subsection[CgCase-alts]{Alternatives}
cgEvalAlts :: Maybe VirtualSpOffset -- Offset of cost-centre to be restored, if any
-> Id
-> SRT -- SRT for the continuation
- -> StgCaseAlts
+ -> AltType
+ -> [StgAlt]
-> FCode Sequel -- Any addr modes inside are guaranteed
-- to be a label so that we can duplicate it
-- without risk of duplicating code
-cgEvalAlts cc_slot bndr srt alts
- =
- let uniq = getUnique bndr in
-
- -- Generate the instruction to restore cost centre, if any
- restoreCurrentCostCentre cc_slot `thenFC` \ cc_restore ->
-
- -- get the stack liveness for the info table (after the CC slot has
- -- been freed - this is important).
- buildContLivenessMask uniq `thenFC` \ liveness_mask ->
-
- case alts of
-
- -- algebraic alts ...
- (StgAlgAlts ty alts deflt) ->
-
- -- bind the default binder (it covers all the alternatives)
- (if (isDeadBinder bndr)
- then nopC
- else bindNewToReg bndr node mkLFArgument) `thenC`
+cgEvalAlts cc_slot bndr srt alt_type@(PrimAlt tycon) alts
+ = do { let rep = tyConCgRep tycon
+ reg = dataReturnConvPrim rep -- Bottom for voidRep
+
+ ; abs_c <- forkProc $ do
+ { -- Bind the case binder, except if it's void
+ -- (reg is bottom in that case)
+ whenC (nonVoidArg rep) $
+ bindNewToReg bndr reg (mkLFArgument bndr)
+ ; restoreCurrentCostCentre cc_slot True
+ ; cgPrimAlts GCMayHappen alt_type reg alts }
+
+ ; lbl <- emitDirectReturnTarget (idName bndr) abs_c srt
+ ; returnFC (CaseAlts lbl Nothing bndr False) }
+
+cgEvalAlts cc_slot bndr srt (UbxTupAlt _) [(con,args,_,rhs)]
+ = -- Unboxed tuple case
+ -- By now, the simplifier should have have turned it
+ -- into case e of (# a,b #) -> e
+ -- There shouldn't be a
+ -- case e of DEFAULT -> e
+ ASSERT2( case con of { DataAlt _ -> True; other -> False },
+ text "cgEvalAlts: dodgy case of unboxed tuple type" )
+ do { -- forkAbsC for the RHS, so that the envt is
+ -- not changed for the emitDirectReturn call
+ abs_c <- forkProc $ do
+ { (live_regs, ptrs, nptrs, _) <- bindUnboxedTupleComponents args
+ -- Restore the CC *after* binding the tuple components,
+ -- so that we get the stack offset of the saved CC right.
+ ; restoreCurrentCostCentre cc_slot True
+ -- Generate a heap check if necessary
+ -- and finally the code for the alternative
+ ; unbxTupleHeapCheck live_regs ptrs nptrs noStmts
+ (cgExpr rhs) }
+ ; lbl <- emitDirectReturnTarget (idName bndr) abs_c srt
+ ; returnFC (CaseAlts lbl Nothing bndr False) }
+
+cgEvalAlts cc_slot bndr srt alt_type alts
+ = -- Algebraic and polymorphic case
+ do { -- Bind the default binder
+ bindNewToReg bndr nodeReg (mkLFArgument bndr)
-- Generate sequel info for use downstream
-- At the moment, we only do it if the type is vector-returnable.
--
-- which is worse than having the alt code in the switch statement
- let tycon_info = getScrutineeTyCon ty
- is_alg = maybeToBool tycon_info
- Just spec_tycon = tycon_info
- in
-
- -- deal with the unboxed tuple case
- if is_alg && isUnboxedTupleTyCon spec_tycon then
- case alts of
- [alt] -> let lbl = mkReturnInfoLabel uniq in
- cgUnboxedTupleAlt lbl cc_restore True alt
- `thenFC` \ abs_c ->
- getSRTLabel `thenFC` \srt_label ->
- absC (CRetDirect uniq abs_c (srt_label, srt)
- liveness_mask) `thenC`
- returnFC (CaseAlts (CLbl lbl RetRep) Nothing)
- _ -> panic "cgEvalAlts: dodgy case of unboxed tuple type"
-
- -- normal algebraic (or polymorphic) case alternatives
- else let
- ret_conv | is_alg = ctrlReturnConvAlg spec_tycon
- | otherwise = UnvectoredReturn 0
-
- use_labelled_alts = case ret_conv of
- VectoredReturn _ -> True
- _ -> False
-
- semi_tagged_stuff
- = if use_labelled_alts then
- cgSemiTaggedAlts bndr alts deflt -- Just <something>
- else
- Nothing -- no semi-tagging info
-
- in
- cgAlgAlts GCMayHappen uniq cc_restore use_labelled_alts (not is_alg)
- alts deflt True `thenFC` \ (tagged_alt_absCs, deflt_absC) ->
-
- mkReturnVector uniq tagged_alt_absCs deflt_absC srt liveness_mask
- ret_conv `thenFC` \ return_vec ->
-
- returnFC (CaseAlts return_vec semi_tagged_stuff)
-
- -- primitive alts...
- (StgPrimAlts ty alts deflt) ->
-
- -- Generate the switch
- getAbsC (cgPrimEvalAlts bndr ty alts deflt) `thenFC` \ abs_c ->
-
- -- Generate the labelled block, starting with restore-cost-centre
- getSRTLabel `thenFC` \srt_label ->
- absC (CRetDirect uniq (cc_restore `mkAbsCStmts` abs_c)
- (srt_label,srt) liveness_mask) `thenC`
-
- -- Return an amode for the block
- returnFC (CaseAlts (CLbl (mkReturnPtLabel uniq) RetRep) Nothing)
-\end{code}
+ ; (alts, mb_deflt) <- cgAlgAlts GCMayHappen cc_slot alt_type alts
+ ; (lbl, branches) <- emitAlgReturnTarget (idName bndr)
+ alts mb_deflt srt ret_conv
-\begin{code}
-cgInlineAlts :: Id
- -> StgCaseAlts
- -> Code
+ ; returnFC (CaseAlts lbl branches bndr False) }
+ where
+ ret_conv = case alt_type of
+ AlgAlt tc -> ctrlReturnConvAlg tc
+ PolyAlt -> UnvectoredReturn 0
\end{code}
+
HWL comment on {\em GrAnSim\/} (adding GRAN_YIELDs for context switch): If
we do an inlining of the case no separate functions for returning are
created, so we don't have to generate a GRAN_YIELD in that case. This info
must be propagated to cgAlgAltRhs (where the GRAN_YIELD macro might be
emitted). Hence, the new Bool arg to cgAlgAltRhs.
-First case: primitive op returns an unboxed tuple.
-
-\begin{code}
-cgInlineAlts bndr (StgAlgAlts ty [alt@(con,args,use_mask,rhs)] StgNoDefault)
- | isUnboxedTupleCon con
- = -- no heap check, no yield, just get in there and do it.
- mapFCs bindNewToTemp args `thenFC` \ _ ->
- cgExpr rhs
-
- | otherwise
- = panic "cgInlineAlts: single alternative, not an unboxed tuple"
-\end{code}
-
-Hack: to deal with
-
- case <# x y of z {
- DEFAULT -> ...
- }
-
-\begin{code}
-cgInlineAlts bndr (StgAlgAlts ty [] (StgBindDefault rhs))
- = bindNewToTemp bndr `thenFC` \amode ->
- let
- (tycon, _, _) = splitAlgTyConApp ty
- closure_lbl = CTableEntry (CLbl (mkClosureTblLabel tycon) PtrRep) amode PtrRep
- in
- absC (CAssign amode closure_lbl) `thenC`
- cgExpr rhs
-\end{code}
-
-Second case: algebraic case, several alternatives.
-Tag is held in a temporary.
-
-\begin{code}
-cgInlineAlts bndr (StgAlgAlts ty alts deflt)
- = cgAlgAlts NoGC uniq AbsCNop{-restore_cc-} False{-no semi-tagging-}
- False{-not poly case-} alts deflt
- False{-don't emit yield-} `thenFC` \ (tagged_alts, deflt_c) ->
-
- -- Do the switch
- absC (mkAlgAltsCSwitch tag_amode tagged_alts deflt_c)
- where
- -- A temporary variable to hold the tag; this is unaffected by GC because
- -- the heap-checks in the branches occur after the switch
- tag_amode = CTemp uniq IntRep
- uniq = getUnique bndr
-\end{code}
-
-Third (real) case: primitive result type.
-
-\begin{code}
-cgInlineAlts bndr (StgPrimAlts ty alts deflt)
- = cgPrimInlineAlts bndr ty alts deflt
-\end{code}
-
-
%************************************************************************
%* *
\subsection[CgCase-alg-alts]{Algebraic alternatives}
\begin{code}
cgAlgAlts :: GCFlag
- -> Unique
- -> AbstractC -- Restore-cost-centre instruction
- -> Bool -- True <=> branches must be labelled
- -> Bool -- True <=> polymorphic case
- -> [(DataCon, [Id], [Bool], StgExpr)] -- The alternatives
- -> StgCaseDefault -- The default
- -> Bool -- Context switch at alts?
- -> FCode ([(ConTag, AbstractC)], -- The branches
- AbstractC -- The default case
- )
-
-cgAlgAlts gc_flag uniq restore_cc must_label_branches is_fun alts deflt
- emit_yield{-should a yield macro be emitted?-}
-
- = forkAlts (map (cgAlgAlt gc_flag uniq restore_cc must_label_branches emit_yield) alts)
- (cgAlgDefault gc_flag is_fun uniq restore_cc must_label_branches deflt emit_yield)
-\end{code}
+ -> Maybe VirtualSpOffset
+ -> AltType -- ** AlgAlt or PolyAlt only **
+ -> [StgAlt] -- The alternatives
+ -> FCode ( [(ConTagZ, CgStmts)], -- The branches
+ Maybe CgStmts ) -- The default case
-\begin{code}
-cgAlgDefault :: GCFlag
- -> Bool -- could be a function-typed result?
- -> Unique -> AbstractC -> Bool -- turgid state...
- -> StgCaseDefault -- input
- -> Bool
- -> FCode AbstractC -- output
-
-cgAlgDefault gc_flag is_fun uniq restore_cc must_label_branch StgNoDefault _
- = returnFC AbsCNop
-
-cgAlgDefault gc_flag is_fun uniq restore_cc must_label_branch
- (StgBindDefault rhs)
- emit_yield{-should a yield macro be emitted?-}
-
- = -- We have arranged that Node points to the thing
- getAbsC (absC restore_cc `thenC`
- (if opt_GranMacros && emit_yield
- then yield [node] False
- else absC AbsCNop) `thenC`
- possibleHeapCheck gc_flag is_fun [node] [] Nothing (cgExpr rhs)
- -- Node is live, but doesn't need to point at the thing itself;
- -- it's ok for Node to point to an indirection or FETCH_ME
- -- Hence no need to re-enter Node.
- ) `thenFC` \ abs_c ->
-
- let
- final_abs_c | must_label_branch = CCodeBlock lbl abs_c
- | otherwise = abs_c
- in
- returnFC final_abs_c
- where
- lbl = mkDefaultLabel uniq
+cgAlgAlts gc_flag cc_slot alt_type alts
+ = do alts <- forkAlts [ cgAlgAlt gc_flag cc_slot alt_type alt | alt <- alts]
+ let
+ mb_deflt = case alts of -- DEFAULT is always first, if present
+ ((DEFAULT,blks) : _) -> Just blks
+ other -> Nothing
+
+ branches = [(dataConTagZ con, blks)
+ | (DataAlt con, blks) <- alts]
+ -- in
+ return (branches, mb_deflt)
--- HWL comment on GrAnSim: GRAN_YIELDs needed; emitted in cgAlgAltRhs
cgAlgAlt :: GCFlag
- -> Unique -> AbstractC -> Bool -- turgid state
- -> Bool -- Context switch at alts?
- -> (DataCon, [Id], [Bool], StgExpr)
- -> FCode (ConTag, AbstractC)
-
-cgAlgAlt gc_flag uniq restore_cc must_label_branch
- emit_yield{-should a yield macro be emitted?-}
- (con, args, use_mask, rhs)
- = getAbsC (absC restore_cc `thenC`
- (if opt_GranMacros && emit_yield
- then yield [node] True -- XXX live regs wrong
- else absC AbsCNop) `thenC`
- (case gc_flag of
- NoGC -> mapFCs bindNewToTemp args `thenFC` \_ -> nopC
- GCMayHappen -> bindConArgs con args
- ) `thenC`
- possibleHeapCheck gc_flag False [node] [] Nothing (
- cgExpr rhs)
- ) `thenFC` \ abs_c ->
- let
- final_abs_c | must_label_branch = CCodeBlock lbl abs_c
- | otherwise = abs_c
- in
- returnFC (tag, final_abs_c)
+ -> Maybe VirtualSpOffset -- Turgid state
+ -> AltType -- ** AlgAlt or PolyAlt only **
+ -> StgAlt
+ -> FCode (AltCon, CgStmts)
+
+cgAlgAlt gc_flag cc_slot alt_type (con, args, use_mask, rhs)
+ = do { abs_c <- getCgStmts $ do
+ { bind_con_args con args
+ ; restoreCurrentCostCentre cc_slot True
+ ; maybeAltHeapCheck gc_flag alt_type (cgExpr rhs) }
+ ; return (con, abs_c) }
where
- tag = dataConTag con
- lbl = mkAltLabel uniq tag
-
-cgUnboxedTupleAlt
- :: CLabel -- label of the alternative
- -> AbstractC -- junk
- -> Bool -- ctxt switch
- -> (DataCon, [Id], [Bool], StgExpr) -- alternative
- -> FCode AbstractC
-
-cgUnboxedTupleAlt lbl restore_cc emit_yield (con,args,use_mask,rhs)
- = getAbsC (
- absC restore_cc `thenC`
-
- bindUnboxedTupleComponents args
- `thenFC` \ (live_regs,tags,stack_res) ->
- (if opt_GranMacros && emit_yield
- then yield live_regs True -- XXX live regs wrong?
- else absC AbsCNop) `thenC`
- let
- -- ToDo: could maybe use Nothing here if stack_res is False
- -- since the heap-check can just return to the top of the
- -- stack.
- ret_addr = Just lbl
- in
-
- -- free up stack slots containing tags,
- freeStackSlots (map fst tags) `thenC`
-
- -- generate a heap check if necessary
- possibleHeapCheck GCMayHappen False live_regs tags ret_addr (
-
- -- and finally the code for the alternative
- cgExpr rhs)
- )
+ bind_con_args DEFAULT args = nopC
+ bind_con_args (DataAlt dc) args = bindConArgs dc args
\end{code}
-%************************************************************************
-%* *
-\subsection[CgCase-semi-tagged-alts]{The code to deal with sem-tagging}
-%* *
-%************************************************************************
-
-Turgid-but-non-monadic code to conjure up the required info from
-algebraic case alternatives for semi-tagging.
-
-\begin{code}
-cgSemiTaggedAlts :: Id
- -> [(DataCon, [Id], [Bool], StgExpr)]
- -> GenStgCaseDefault Id Id
- -> SemiTaggingStuff
-
-cgSemiTaggedAlts binder alts deflt
- = Just (map st_alt alts, st_deflt deflt)
- where
- uniq = getUnique binder
-
- st_deflt StgNoDefault = Nothing
-
- st_deflt (StgBindDefault _)
- = Just (Just binder,
- (CCallProfCtrMacro SLIT("RET_SEMI_BY_DEFAULT") [], -- ToDo: monadise?
- mkDefaultLabel uniq)
- )
-
- st_alt (con, args, use_mask, _)
- = -- Ha! Nothing to do; Node already points to the thing
- (con_tag,
- (CCallProfCtrMacro SLIT("RET_SEMI_IN_HEAP") -- ToDo: monadise?
- [mkIntCLit (length args)], -- how big the thing in the heap is
- join_label)
- )
- where
- con_tag = dataConTag con
- join_label = mkAltLabel uniq con_tag
-\end{code}
%************************************************************************
%* *
%* *
%************************************************************************
-@cgPrimEvalAlts@ and @cgPrimInlineAlts@ generate suitable @CSwitch@es
+@cgPrimAlts@ generates suitable a @CSwitch@
for dealing with the alternatives of a primitive @case@, given an
addressing mode for the thing to scrutinise. It also keeps track of
the maximum stack depth encountered down any branch.
As usual, no binders in the alternatives are yet bound.
\begin{code}
-cgPrimInlineAlts bndr ty alts deflt
- = cgPrimAltsWithDefault bndr NoGC (CTemp uniq kind) alts deflt []
- where
- uniq = getUnique bndr
- kind = typePrimRep ty
-
-cgPrimEvalAlts bndr ty alts deflt
- = cgPrimAltsWithDefault bndr GCMayHappen (CReg reg) alts deflt [reg]
- where
- reg = dataReturnConvPrim kind
- kind = typePrimRep ty
-
-cgPrimAltsWithDefault bndr gc_flag scrutinee alts deflt regs
- = -- first bind the default if necessary
- (if isDeadBinder bndr
- then nopC
- else bindNewPrimToAmode bndr scrutinee) `thenC`
- cgPrimAlts gc_flag scrutinee alts deflt regs
-
-cgPrimAlts gc_flag scrutinee alts deflt regs
- = forkAlts (map (cgPrimAlt gc_flag regs) alts)
- (cgPrimDefault gc_flag regs deflt)
- `thenFC` \ (alt_absCs, deflt_absC) ->
-
- absC (CSwitch scrutinee alt_absCs deflt_absC)
- -- CSwitch does sensible things with one or zero alternatives
-
+cgPrimAlts :: GCFlag
+ -> AltType -- Always PrimAlt, but passed to maybeAltHeapCheck
+ -> CmmReg -- Scrutinee
+ -> [StgAlt] -- Alternatives
+ -> Code
+-- NB: cgPrimAlts emits code that does the case analysis.
+-- It's often used in inline situations, rather than to genearte
+-- a labelled return point. That's why its interface is a little
+-- different to cgAlgAlts
+--
+-- INVARIANT: the default binder is already bound
+cgPrimAlts gc_flag alt_type scrutinee alts
+ = do { tagged_absCs <- forkAlts (map (cgPrimAlt gc_flag alt_type) alts)
+ ; let ((DEFAULT, deflt_absC) : others) = tagged_absCs -- There is always a default
+ alt_absCs = [(lit,rhs) | (LitAlt lit, rhs) <- others]
+ ; emitLitSwitch (CmmReg scrutinee) alt_absCs deflt_absC }
cgPrimAlt :: GCFlag
- -> [MagicId] -- live registers
- -> (Literal, StgExpr) -- The alternative
- -> FCode (Literal, AbstractC) -- Its compiled form
-
-cgPrimAlt gc_flag regs (lit, rhs)
- = getAbsC rhs_code `thenFC` \ absC ->
- returnFC (lit,absC)
- where
- rhs_code = possibleHeapCheck gc_flag False regs [] Nothing (cgExpr rhs)
-
-cgPrimDefault :: GCFlag
- -> [MagicId] -- live registers
- -> StgCaseDefault
- -> FCode AbstractC
-
-cgPrimDefault gc_flag regs StgNoDefault
- = panic "cgPrimDefault: No default in prim case"
-
-cgPrimDefault gc_flag regs (StgBindDefault rhs)
- = getAbsC (possibleHeapCheck gc_flag False regs [] Nothing (cgExpr rhs))
+ -> AltType
+ -> StgAlt -- The alternative
+ -> FCode (AltCon, CgStmts) -- Its compiled form
+
+cgPrimAlt gc_flag alt_type (con, [], [], rhs)
+ = ASSERT( case con of { DEFAULT -> True; LitAlt _ -> True; other -> False } )
+ do { abs_c <- getCgStmts (maybeAltHeapCheck gc_flag alt_type (cgExpr rhs))
+ ; returnFC (con, abs_c) }
\end{code}
%************************************************************************
\begin{code}
+maybeAltHeapCheck
+ :: GCFlag
+ -> AltType -- PolyAlt, PrimAlt, AlgAlt, but *not* UbxTupAlt
+ -> Code -- Continuation
+ -> Code
+maybeAltHeapCheck NoGC _ code = code
+maybeAltHeapCheck GCMayHappen alt_type code = altHeapCheck alt_type code
+
saveVolatileVarsAndRegs
:: StgLiveVars -- Vars which should be made safe
- -> FCode (AbstractC, -- Assignments to do the saves
+ -> FCode (CmmStmts, -- Assignments to do the saves
EndOfBlockInfo, -- sequel for the alts
Maybe VirtualSpOffset) -- Slot for current cost centre
-
saveVolatileVarsAndRegs vars
- = saveVolatileVars vars `thenFC` \ var_saves ->
- saveCurrentCostCentre `thenFC` \ (maybe_cc_slot, cc_save) ->
- getEndOfBlockInfo `thenFC` \ eob_info ->
- returnFC (mkAbstractCs [var_saves, cc_save],
- eob_info,
- maybe_cc_slot)
+ = do { var_saves <- saveVolatileVars vars
+ ; (maybe_cc_slot, cc_save) <- saveCurrentCostCentre
+ ; eob_info <- getEndOfBlockInfo
+ ; returnFC (var_saves `plusStmts` cc_save,
+ eob_info,
+ maybe_cc_slot) }
-saveVolatileVars :: StgLiveVars -- Vars which should be made safe
- -> FCode AbstractC -- Assignments to to the saves
+saveVolatileVars :: StgLiveVars -- Vars which should be made safe
+ -> FCode CmmStmts -- Assignments to to the saves
saveVolatileVars vars
- = save_em (varSetElems vars)
+ = do { stmts_s <- mapFCs save_it (varSetElems vars)
+ ; return (foldr plusStmts noStmts stmts_s) }
where
- save_em [] = returnFC AbsCNop
-
- save_em (var:vars)
- = getCAddrModeIfVolatile var `thenFC` \ v ->
- case v of
- Nothing -> save_em vars -- Non-volatile, so carry on
-
-
- Just vol_amode -> -- Aha! It's volatile
- save_var var vol_amode `thenFC` \ abs_c ->
- save_em vars `thenFC` \ abs_cs ->
- returnFC (abs_c `mkAbsCStmts` abs_cs)
+ save_it var
+ = do { v <- getCAddrModeIfVolatile var
+ ; case v of
+ Nothing -> return noStmts -- Non-volatile
+ Just vol_amode -> save_var var vol_amode -- Aha! It's volatile
+ }
save_var var vol_amode
- = allocPrimStack (getPrimRepSize kind) `thenFC` \ slot ->
- rebindToStack var slot `thenC`
- getSpRelOffset slot `thenFC` \ sp_rel ->
- returnFC (CAssign (CVal sp_rel kind) vol_amode)
- where
- kind = getAmodeRep vol_amode
+ = do { slot <- allocPrimStack (idCgRep var)
+ ; rebindToStack var slot
+ ; sp_rel <- getSpRelOffset slot
+ ; returnFC (oneStmt (CmmStore sp_rel vol_amode)) }
\end{code}
---------------------------------------------------------------------------
\begin{code}
saveCurrentCostCentre ::
FCode (Maybe VirtualSpOffset, -- Where we decide to store it
- AbstractC) -- Assignment to save it
+ CmmStmts) -- Assignment to save it
saveCurrentCostCentre
- = if not opt_SccProfilingOn then
- returnFC (Nothing, AbsCNop)
- else
- allocPrimStack (getPrimRepSize CostCentreRep) `thenFC` \ slot ->
- getSpRelOffset slot `thenFC` \ sp_rel ->
- returnFC (Just slot,
- CAssign (CVal sp_rel CostCentreRep) (CReg CurCostCentre))
-
-restoreCurrentCostCentre :: Maybe VirtualSpOffset -> FCode AbstractC
-
-restoreCurrentCostCentre Nothing
- = returnFC AbsCNop
-restoreCurrentCostCentre (Just slot)
- = getSpRelOffset slot `thenFC` \ sp_rel ->
- freeStackSlots [slot] `thenC`
- returnFC (CCallProfCCMacro SLIT("RESTORE_CCCS") [CVal sp_rel CostCentreRep])
- -- we use the RESTORE_CCCS macro, rather than just
- -- assigning into CurCostCentre, in case RESTORE_CCC
- -- has some sanity-checking in it.
-\end{code}
-
-%************************************************************************
-%* *
-\subsection[CgCase-return-vec]{Building a return vector}
-%* *
-%************************************************************************
-
-Build a return vector, and return a suitable label addressing
-mode for it.
-
-\begin{code}
-mkReturnVector :: Unique
- -> [(ConTag, AbstractC)] -- Branch codes
- -> AbstractC -- Default case
- -> SRT -- continuation's SRT
- -> Liveness -- stack liveness
- -> CtrlReturnConvention
- -> FCode CAddrMode
-
-mkReturnVector uniq tagged_alt_absCs deflt_absC srt liveness ret_conv
- = getSRTLabel `thenFC` \srt_label ->
- let
- srt_info = (srt_label, srt)
-
- (return_vec_amode, vtbl_body) = case ret_conv of {
-
- -- might be a polymorphic case...
- UnvectoredReturn 0 ->
- ASSERT(null tagged_alt_absCs)
- (CLbl ret_label RetRep,
- absC (CRetDirect uniq deflt_absC (srt_label, srt) liveness));
-
- UnvectoredReturn n ->
- -- find the tag explicitly rather than using tag_reg for now.
- -- on architectures with lots of regs the tag will be loaded
- -- into tag_reg by the code doing the returning.
- let
- tag = CMacroExpr WordRep GET_TAG [CVal (nodeRel 0) DataPtrRep]
- in
- (CLbl ret_label RetRep,
- absC (CRetDirect uniq
- (mkAlgAltsCSwitch tag tagged_alt_absCs deflt_absC)
- (srt_label, srt)
- liveness));
-
- VectoredReturn table_size ->
- let
- (vector_table, alts_absC) =
- unzip (map mk_vector_entry [fIRST_TAG .. (table_size+fIRST_TAG-1)])
-
- ret_vector = CRetVector vtbl_label
- vector_table
- (srt_label, srt) liveness
- in
- (CLbl vtbl_label DataPtrRep,
- -- alts come first, because we don't want to declare all the symbols
- absC (mkAbstractCs (mkAbstractCs alts_absC : [deflt_absC,ret_vector]))
- )
-
- } in
- vtbl_body `thenC`
- returnFC return_vec_amode
- -- )
- where
-
- vtbl_label = mkVecTblLabel uniq
- ret_label = mkReturnInfoLabel uniq
-
- deflt_lbl =
- case nonemptyAbsC deflt_absC of
- -- the simplifier might have eliminated a case
- Nothing -> CLbl mkErrorStdEntryLabel CodePtrRep
- Just absC@(CCodeBlock lbl _) -> CLbl lbl CodePtrRep
-
- mk_vector_entry :: ConTag -> (CAddrMode, AbstractC)
- mk_vector_entry tag
- = case [ absC | (t, absC) <- tagged_alt_absCs, t == tag ] of
- [] -> (deflt_lbl, AbsCNop)
- [absC@(CCodeBlock lbl _)] -> (CLbl lbl CodePtrRep,absC)
- _ -> panic "mkReturnVector: too many"
-\end{code}
-
-%************************************************************************
-%* *
-\subsection[CgCase-utils]{Utilities for handling case expressions}
-%* *
-%************************************************************************
-
-@possibleHeapCheck@ tests a flag passed in to decide whether to do a
-heap check or not. These heap checks are always in a case
-alternative, so we use altHeapCheck.
-
-\begin{code}
-possibleHeapCheck
- :: GCFlag
- -> Bool -- True <=> algebraic case
- -> [MagicId] -- live registers
- -> [(VirtualSpOffset,Int)] -- stack slots to tag
- -> Maybe CLabel -- return address
- -> Code -- continuation
- -> Code
-
-possibleHeapCheck GCMayHappen is_alg regs tags lbl code
- = altHeapCheck is_alg regs tags AbsCNop lbl code
-possibleHeapCheck NoGC _ _ tags lbl code
- = code
+ | not opt_SccProfilingOn
+ = returnFC (Nothing, noStmts)
+ | otherwise
+ = do { slot <- allocPrimStack PtrArg
+ ; sp_rel <- getSpRelOffset slot
+ ; returnFC (Just slot,
+ oneStmt (CmmStore sp_rel curCCS)) }
+
+-- Sometimes we don't free the slot containing the cost centre after restoring it
+-- (see CgLetNoEscape.cgLetNoEscapeBody).
+restoreCurrentCostCentre :: Maybe VirtualSpOffset -> Bool -> Code
+restoreCurrentCostCentre Nothing _freeit = nopC
+restoreCurrentCostCentre (Just slot) freeit
+ = do { sp_rel <- getSpRelOffset slot
+ ; whenC freeit (freeStackSlots [slot])
+ ; stmtC (CmmStore curCCSAddr (CmmLoad sp_rel wordRep)) }
\end{code}
-splitTyConAppThroughNewTypes is like splitTyConApp_maybe except
-that it looks through newtypes in addition to synonyms. It's
-useful in the back end where we're not interested in newtypes
-anymore.
-
-Sometimes, we've thrown away the constructors during pruning in the
-renamer. In these cases, we emit a warning and fall back to using a
-SEQ_FRAME to evaluate the case scrutinee.
-
-\begin{code}
-getScrutineeTyCon :: Type -> Maybe TyCon
-getScrutineeTyCon ty =
- case (splitTyConAppThroughNewTypes ty) of
- Nothing -> Nothing
- Just (tc,_) ->
- if not (isAlgTyCon tc) then Just tc else
- -- works for primitive TyCons too
- case (tyConFamilySize tc) of
- 0 -> pprTrace "Warning" (hcat [
- text "constructors for ",
- ppr tc,
- text " not available.\n\tUse -fno-prune-tydecls to fix."
- ]) Nothing
- _ -> Just tc
-
-splitTyConAppThroughNewTypes :: Type -> Maybe (TyCon, [Type])
-splitTyConAppThroughNewTypes ty
- = case splitTyConApp_maybe ty of
- Just (tc, tys)
- | isNewTyCon tc -> splitTyConAppThroughNewTypes ty
- | otherwise -> Just (tc, tys)
- where
- ([ty], _) = splitFunTys (applyTys (dataConType (head (tyConDataCons tc))) tys)
-
- other -> Nothing
-
-\end{code}