X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;f=ghc%2Fcompiler%2FcodeGen%2FCgCase.lhs;h=8c67334b2814f360bac336549507ee89c2ec6b6e;hb=d28ba8c800901bea01f70c4719278c2a364cf9fc;hp=305a28332d3b669f3eefe2c9de686c6ebeab93d2;hpb=ee67797fcd3b833c3b1a90a543e9520f36f9a6b9;p=ghc-hetmet.git diff --git a/ghc/compiler/codeGen/CgCase.lhs b/ghc/compiler/codeGen/CgCase.lhs index 305a283..8c67334 100644 --- a/ghc/compiler/codeGen/CgCase.lhs +++ b/ghc/compiler/codeGen/CgCase.lhs @@ -1,5 +1,7 @@ % -% (c) The GRASP/AQUA Project, Glasgow University, 1992-1996 +% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 +% +% $Id: CgCase.lhs,v 1.62 2003/05/14 09:13:53 simonmar Exp $ % %******************************************************** %* * @@ -8,63 +10,53 @@ %******************************************************** \begin{code} -module CgCase ( cgCase, saveVolatileVarsAndRegs ) where +module CgCase ( cgCase, saveVolatileVarsAndRegs, restoreCurrentCostCentre + ) where #include "HsVersions.h" -import {-# SOURCE #-} CgExpr +import {-# SOURCE #-} CgExpr ( cgExpr ) import CgMonad import StgSyn import AbsCSyn import AbsCUtils ( mkAbstractCs, mkAbsCStmts, mkAlgAltsCSwitch, - magicIdPrimRep, getAmodeRep + getAmodeRep, nonemptyAbsC ) -import CgBindery ( getVolatileRegs, getArgAmode, getArgAmodes, +import CgBindery ( getVolatileRegs, getArgAmodes, bindNewToReg, bindNewToTemp, - bindNewPrimToAmode, - rebindToAStack, rebindToBStack, - getCAddrModeAndInfo, getCAddrModeIfVolatile, - idInfoToAmode - ) -import CgCon ( buildDynCon, bindConArgs ) -import CgHeapery ( heapCheck, yield ) -import CgRetConv ( dataReturnConvAlg, dataReturnConvPrim, - ctrlReturnConvAlg, - DataReturnConvention(..), CtrlReturnConvention(..), - assignPrimOpResultRegs, - makePrimOpArgsRobust - ) -import CgStackery ( allocAStack, allocBStack, allocAStackTop, allocBStackTop ) -import CgTailCall ( tailCallBusiness, performReturn ) -import CgUsages ( getSpARelOffset, getSpBRelOffset, freeBStkSlot ) -import CLabel ( mkVecTblLabel, mkReturnPtLabel, mkDefaultLabel, - mkAltLabel + bindNewPrimToAmode, getCAddrModeAndInfo, + rebindToStack, getCAddrMode, getCAddrModeIfVolatile, + buildContLivenessMask, nukeDeadBindings, ) -import ClosureInfo ( mkConLFInfo, mkLFArgument, layOutDynCon ) -import CmdLineOpts ( opt_SccProfilingOn, opt_GranMacros ) -import CostCentre ( useCurrentCostCentre, CostCentre ) -import HeapOffs ( VirtualSpBOffset, VirtualHeapOffset ) -import Id ( idPrimRep, dataConTag, fIRST_TAG, ConTag, - isDataCon, DataCon, - idSetToList, GenId{-instance Uniquable,Eq-}, Id +import CgCon ( bindConArgs, bindUnboxedTupleComponents ) +import CgHeapery ( altHeapCheck, unbxTupleHeapCheck ) +import CgRetConv ( dataReturnConvPrim, ctrlReturnConvAlg, + CtrlReturnConvention(..) ) -import Literal ( Literal ) -import Maybes ( catMaybes ) -import PrimOp ( primOpCanTriggerGC, PrimOp(..), - primOpStackRequired, StackRequirement(..) +import CgStackery ( allocPrimStack, allocStackTop, + deAllocStackTop, freeStackSlots, dataStackSlots ) -import PrimRep ( getPrimRepSize, isFollowableRep, retPrimRepSize, - PrimRep(..) +import CgTailCall ( performTailCall ) +import CgUsages ( getSpRelOffset ) +import CLabel ( mkVecTblLabel, mkClosureTblLabel, + mkDefaultLabel, mkAltLabel, mkReturnInfoLabel ) -import TyCon ( isEnumerationTyCon ) -import Type ( typePrimRep, - splitAlgTyConApp, splitAlgTyConApp_maybe, - Type +import ClosureInfo ( mkLFArgument ) +import CmdLineOpts ( opt_SccProfilingOn ) +import Id ( Id, idPrimRep, isDeadBinder ) +import DataCon ( DataCon, dataConTag, fIRST_TAG, ConTag ) +import VarSet ( varSetElems ) +import Literal ( Literal ) +import PrimOp ( primOpOutOfLine, PrimOp(..) ) +import PrimRep ( getPrimRepSize, retPrimRepSize, PrimRep(..) ) -import Unique ( Unique, Uniquable(..) ) -import Util ( sortLt, isIn, isn'tIn, zipEqual ) +import TyCon ( isEnumerationTyCon, isUnboxedTupleTyCon, tyConPrimRep ) +import Name ( Name, getName ) +import Unique ( Unique, Uniquable(..), newTagUnique ) +import Maybes ( maybeToBool ) +import Util ( only ) import Outputable \end{code} @@ -85,13 +77,13 @@ op which can trigger GC. A more interesting situation is this: -\begin{verbatim} + \begin{verbatim} !A!; ...A... case x# of 0# -> !B!; ...B... default -> !C!; ...C... -\end{verbatim} + \end{verbatim} where \tr{!x!} indicates a possible heap-check point. The heap checks in the alternatives {\em can} be omitted, in which case the topmost @@ -99,164 +91,147 @@ heapcheck will take their worst case into account. In favour of omitting \tr{!B!}, \tr{!C!}: -\begin{itemize} -\item -{\em May} save a heap overflow test, + - {\em May} save a heap overflow test, if ...A... allocates anything. The other advantage of this is that we can use relative addressing from a single Hp to get at all the closures so allocated. -\item - No need to save volatile vars etc across the case -\end{itemize} + + - No need to save volatile vars etc across the case Against: -\begin{itemize} -\item - May do more allocation than reqd. This sometimes bites us + - May do more allocation than reqd. This sometimes bites us badly. For example, nfib (ha!) allocates about 30\% more space if the worst-casing is done, because many many calls to nfib are leaf calls which don't need to allocate anything. This never hurts us if there is only one alternative. -\end{itemize} - - -*** NOT YET DONE *** The difficulty is that \tr{!B!}, \tr{!C!} need -to take account of what is live, and that includes all live volatile -variables, even if they also have stable analogues. Furthermore, the -stack pointers must be lined up properly so that GC sees tidy stacks. -If these things are done, then the heap checks can be done at \tr{!B!} and -\tr{!C!} without a full save-volatile-vars sequence. \begin{code} cgCase :: StgExpr -> StgLiveVars -> StgLiveVars - -> Unique + -> Id + -> SRT -> StgCaseAlts -> Code \end{code} -Several special cases for primitive operations. +Special case #1: PrimOps returning enumeration types. +For enumeration types, we invent a temporary (builtin-unique 1) to +hold the tag, and cross our fingers that this doesn't clash with +anything else. Builtin-unique 0 is used for a similar reason when +compiling enumerated-type primops in CgExpr.lhs. We can't use the +unique from the case binder, because this is used to hold the actual +closure (when the case binder is live, that is). -\begin{code} -cgCase (StgPrim op args _) live_in_whole_case live_in_alts uniq alts - | not (primOpCanTriggerGC op) - = - -- Get amodes for the arguments and results - getPrimOpArgAmodes op args `thenFC` \ arg_amodes -> - let - result_amodes = getPrimAppResultAmodes uniq alts - liveness_mask = panic "cgCase: liveness of non-GC-ing primop touched\n" - in - -- Perform the operation - getVolatileRegs live_in_alts `thenFC` \ vol_regs -> +There is an extra special case for - -- seq cannot happen here => no additional B Stack alloc + case tagToEnum# x of + ... - absC (COpStmt result_amodes op - arg_amodes -- note: no liveness arg - liveness_mask vol_regs) `thenC` +which generates no code for the primop, unless x is used in the +alternatives (in which case we lookup the tag in the relevant closure +table to get the closure). - -- Scrutinise the result - cgInlineAlts NoGC uniq alts +Being a bit short of uniques for temporary variables here, we use +newTagUnique to generate a new unique from the case binder. The case +binder's unique will presumably have the 'c' tag (generated by +CoreToStg), so we just change its tag to 'C' (for 'case') to ensure it +doesn't clash with anything else. - | otherwise -- *Can* trigger GC - = getPrimOpArgAmodes op args `thenFC` \ arg_amodes -> +\begin{code} +cgCase (StgOpApp op args _) + live_in_whole_case live_in_alts bndr srt (StgAlgAlts (Just tycon) alts deflt) + | isEnumerationTyCon tycon + = getArgAmodes args `thenFC` \ arg_amodes -> + + case op of { + StgPrimOp TagToEnumOp -- No code! + -> returnFC (only arg_amodes) ; + + _ -> -- Perform the operation + let + tag_amode = CTemp (newTagUnique (getUnique bndr) 'C') IntRep + in + getVolatileRegs live_in_alts `thenFC` \ vol_regs -> + absC (COpStmt [tag_amode] op arg_amodes vol_regs) + `thenC` + -- NB: no liveness arg + returnFC tag_amode + } `thenFC` \ tag_amode -> - -- Get amodes for the arguments and results, and assign to regs - -- (Can-trigger-gc primops guarantee to have their (nonRobust) - -- args in regs) let - op_result_regs = assignPrimOpResultRegs op + closure = CVal (CIndex (CLbl (mkClosureTblLabel tycon) PtrRep) + tag_amode PtrRep) + PtrRep + in - op_result_amodes = map CReg op_result_regs + -- Bind the default binder if necessary + -- The deadness info is set by StgVarInfo + (if (isDeadBinder bndr) + then nopC + else bindNewToTemp bndr `thenFC` \ bndr_amode -> + absC (CAssign bndr_amode closure)) + `thenC` - (op_arg_amodes, liveness_mask, arg_assts) - = makePrimOpArgsRobust op arg_amodes + -- compile the alts + cgAlgAlts NoGC False{-not polymorphic-} (getUnique bndr) + Nothing{-cc_slot-} False{-no semi-tagging-} + alts deflt False{-don't emit yield-} `thenFC` \ (tagged_alts, deflt_c) -> - liveness_arg = mkIntCLit liveness_mask - in - -- Tidy up in case GC happens... + -- Do the switch + absC (mkAlgAltsCSwitch tag_amode tagged_alts deflt_c) +\end{code} - -- Nota Bene the use of live_in_whole_case in nukeDeadBindings. - -- Reason: the arg_assts computed above may refer to some stack slots - -- which are not live in the alts. So we mustn't use those slots - -- to save volatile vars in! - nukeDeadBindings live_in_whole_case `thenC` - saveVolatileVars live_in_alts `thenFC` \ volatile_var_save_assts -> - - -- Allocate stack words for the prim-op itself, - -- these are guaranteed to be ON TOP OF the stack. - -- Currently this is used *only* by the seq# primitive op. - let - (a_req,b_req) = case (primOpStackRequired op) of - NoStackRequired -> (0, 0) - FixedStackRequired a b -> (a, b) - VariableStackRequired -> (0, 0) -- i.e. don't care - in - allocAStackTop a_req `thenFC` \ a_slot -> - allocBStackTop b_req `thenFC` \ b_slot -> - - getEndOfBlockInfo `thenFC` \ eob_info@(EndOfBlockInfo args_spa args_spb sequel) -> - -- a_req and b_req allocate stack space that is taken care of by the - -- macros generated for the primops; thus, we there is no need to adjust - -- this part of the stacks later on (=> +a_req in EndOfBlockInfo) - -- currently all this is only used for SeqOp - forkEval (if True {- a_req==0 && b_req==0 -} - then eob_info - else (EndOfBlockInfo (args_spa+a_req) - (args_spb+b_req) sequel)) nopC - ( - getAbsC (cgInlineAlts GCMayHappen uniq alts) `thenFC` \ abs_c -> - absC (CRetUnVector vtbl_label (CLabelledCode return_label abs_c)) - `thenC` - returnFC (CaseAlts (CUnVecLbl return_label vtbl_label) - Nothing{-no semi-tagging-})) - `thenFC` \ new_eob_info -> - - -- Record the continuation info - setEndOfBlockInfo new_eob_info ( - - -- Now "return" to the inline alternatives; this will get - -- compiled to a fall-through. - let - simultaneous_assts = arg_assts `mkAbsCStmts` volatile_var_save_assts - - -- do_op_and_continue will be passed an amode for the continuation - do_op_and_continue sequel - = absC (COpStmt op_result_amodes - op - (pin_liveness op liveness_arg op_arg_amodes) - liveness_mask - [{-no vol_regs-}]) - `thenC` - - sequelToAmode sequel `thenFC` \ dest_amode -> - absC (CReturn dest_amode DirectReturn) - - -- Note: we CJump even for algebraic data types, - -- because cgInlineAlts always generates code, never a - -- vector. - in - performReturn simultaneous_assts do_op_and_continue live_in_alts - ) - where - -- for all PrimOps except ccalls, we pin the liveness info - -- on as the first "argument" - -- ToDo: un-duplicate? +Special case #2: case of literal. - pin_liveness (CCallOp _ _ _ _ _ _) _ args = args - pin_liveness other_op liveness_arg args - = liveness_arg :args +\begin{code} +cgCase (StgLit lit) live_in_whole_case live_in_alts bndr srt alts = + absC (CAssign (CTemp (getUnique bndr) (idPrimRep bndr)) (CLit lit)) `thenC` + case alts of + StgPrimAlts tycon alts deflt -> cgPrimInlineAlts bndr tycon alts deflt + other -> pprPanic "cgCase: case of literal has strange alts" (pprStgAlts alts) +\end{code} - vtbl_label = mkVecTblLabel uniq - return_label = mkReturnPtLabel uniq +Special case #3: inline PrimOps. +\begin{code} +cgCase (StgOpApp op@(StgPrimOp primop) args _) + live_in_whole_case live_in_alts bndr srt alts + | not (primOpOutOfLine primop) + = + -- Get amodes for the arguments and results + getArgAmodes args `thenFC` \ arg_amodes -> + getVolatileRegs live_in_alts `thenFC` \ vol_regs -> + + case alts of + StgPrimAlts tycon alts deflt -- PRIMITIVE ALTS + -> absC (COpStmt [CTemp (getUnique bndr) (tyConPrimRep tycon)] + op + arg_amodes -- note: no liveness arg + vol_regs) `thenC` + cgPrimInlineAlts bndr tycon alts deflt + + StgAlgAlts (Just tycon) [(_, args, _, rhs)] StgNoDefault + | isUnboxedTupleTyCon tycon -- UNBOXED TUPLE ALTS + -> -- no heap check, no yield, just get in there and do it. + absC (COpStmt [ CTemp (getUnique arg) (idPrimRep arg) | arg <- args ] + op + arg_amodes -- note: no liveness arg + vol_regs) `thenC` + mapFCs bindNewToTemp args `thenFC` \ _ -> + cgExpr rhs + + other -> pprPanic "cgCase: case of primop has strange alts" (pprStgAlts alts) \end{code} +TODO: Case-of-case of primop can probably be done inline too (but +maybe better to translate it out beforehand). See +ghc/lib/misc/PackedString.lhs for examples where this crops up (with +4.02). + Another special case: scrutinising a primitive-typed variable. No evaluation required. We don't save volatile variables, nor do we do a heap-check in the alternatives. Instead, the heap usage of the @@ -265,9 +240,22 @@ allocating more heap than strictly necessary, but it will sometimes eliminate a heap check altogether. \begin{code} -cgCase (StgApp v [] _) live_in_whole_case live_in_alts uniq (StgPrimAlts ty alts deflt) - = getArgAmode v `thenFC` \ amode -> - cgPrimAltsGivenScrutinee NoGC amode alts deflt +cgCase (StgApp v []) live_in_whole_case live_in_alts bndr srt + (StgPrimAlts tycon alts deflt) + + = + getCAddrMode v `thenFC` \amode -> + + {- + Careful! we can't just bind the default binder to the same thing + as the scrutinee, since it might be a stack location, and having + two bindings pointing at the same stack locn doesn't work (it + confuses nukeDeadBindings). Hence, use a new temp. + -} + bindNewToTemp bndr `thenFC` \deflt_amode -> + absC (CAssign deflt_amode amode) `thenC` + + cgPrimAlts NoGC amode alts deflt [] \end{code} Special case: scrutinising a non-primitive variable. @@ -275,124 +263,111 @@ This can be done a little better than the general case, because we can reuse/trim the stack slot holding the variable (if it is in one). \begin{code} -cgCase (StgApp (StgVarArg fun) args _ {-lvs must be same as live_in_alts-}) - live_in_whole_case live_in_alts uniq alts@(StgAlgAlts _ _ _) - = - getCAddrModeAndInfo fun `thenFC` \ (fun_amode, lf_info) -> +cgCase (StgApp fun args) + live_in_whole_case live_in_alts bndr srt alts + = getCAddrModeAndInfo fun `thenFC` \ (fun', fun_amode, lf_info) -> getArgAmodes args `thenFC` \ arg_amodes -> - -- Squish the environment + -- Nuking dead bindings *before* calculating the saves is the + -- value-add here. We might end up freeing up some slots currently + -- occupied by variables only required for the call. + -- NOTE: we need to look up the variables used in the call before + -- doing this, because some of them may not be in the environment + -- afterward. nukeDeadBindings live_in_alts `thenC` saveVolatileVarsAndRegs live_in_alts `thenFC` \ (save_assts, alts_eob_info, maybe_cc_slot) -> - forkEval alts_eob_info - nopC (cgEvalAlts maybe_cc_slot uniq alts) `thenFC` \ scrut_eob_info -> - setEndOfBlockInfo scrut_eob_info ( - tailCallBusiness fun fun_amode lf_info arg_amodes live_in_alts save_assts - ) + forkEval alts_eob_info + ( allocStackTop retPrimRepSize + `thenFC` \_ -> nopC ) + ( deAllocStackTop retPrimRepSize `thenFC` \_ -> + cgEvalAlts maybe_cc_slot bndr srt alts ) + `thenFC` \ scrut_eob_info -> + setEndOfBlockInfo (maybeReserveSeqFrame alts scrut_eob_info) $ + performTailCall fun' fun_amode lf_info arg_amodes save_assts \end{code} +Note about return addresses: we *always* push a return address, even +if because of an optimisation we end up jumping direct to the return +code (not through the address itself). The alternatives always assume +that the return address is on the stack. The return address is +required in case the alternative performs a heap check, since it +encodes the liveness of the slots in the activation record. + +On entry to the case alternative, we can re-use the slot containing +the return address immediately after the heap check. That's what the +deAllocStackTop call is doing above. + Finally, here is the general case. \begin{code} -cgCase expr live_in_whole_case live_in_alts uniq alts +cgCase expr live_in_whole_case live_in_alts bndr srt alts = -- Figure out what volatile variables to save nukeDeadBindings live_in_whole_case `thenC` + saveVolatileVarsAndRegs live_in_alts `thenFC` \ (save_assts, alts_eob_info, maybe_cc_slot) -> - -- Save those variables right now! + -- Save those variables right now! absC save_assts `thenC` + -- generate code for the alts forkEval alts_eob_info - (nukeDeadBindings live_in_alts) - (cgEvalAlts maybe_cc_slot uniq alts) `thenFC` \ scrut_eob_info -> - - setEndOfBlockInfo scrut_eob_info (cgExpr expr) + (nukeDeadBindings live_in_alts `thenC` + allocStackTop retPrimRepSize -- space for retn address + `thenFC` \_ -> nopC + ) + (deAllocStackTop retPrimRepSize `thenFC` \_ -> + cgEvalAlts maybe_cc_slot bndr srt alts) `thenFC` \ scrut_eob_info -> + + setEndOfBlockInfo (maybeReserveSeqFrame alts scrut_eob_info) $ + cgExpr expr \end{code} -%************************************************************************ -%* * -\subsection[CgCase-primops]{Primitive applications} -%* * -%************************************************************************ - -Get result amodes for a primitive operation, in the case wher GC can't happen. -The amodes are returned in canonical order, ready for the prim-op! - - Alg case: temporaries named as in the alternatives, - plus (CTemp u) for the tag (if needed) - Prim case: (CTemp u) - -This is all disgusting, because these amodes must be consistent with those -invented by CgAlgAlts. - -\begin{code} -getPrimAppResultAmodes - :: Unique - -> StgCaseAlts - -> [CAddrMode] -\end{code} +There's a lot of machinery going on behind the scenes to manage the +stack pointer here. forkEval takes the virtual Sp and free list from +the first argument, and turns that into the *real* Sp for the second +argument. It also uses this virtual Sp as the args-Sp in the EOB info +returned, so that the scrutinee will trim the real Sp back to the +right place before doing whatever it does. + --SDM (who just spent an hour figuring this out, and didn't want to + forget it). + +Why don't we push the return address just before evaluating the +scrutinee? Because the slot reserved for the return address might +contain something useful, so we wait until performing a tail call or +return before pushing the return address (see +CgTailCall.pushReturnAddress). + +This also means that the environment doesn't need to know about the +free stack slot for the return address (for generating bitmaps), +because we don't reserve it until just before the eval. + +TODO!! Problem: however, we have to save the current cost centre +stack somewhere, because at the eval point the current CCS might be +different. So we pick a free stack slot and save CCCS in it. The +problem with this is that this slot isn't recorded as free/unboxed in +the environment, so a case expression in the scrutinee will have the +wrong bitmap attached. Fortunately we don't ever seem to see +case-of-case at the back end. One solution might be to shift the +saved CCS to the correct place in the activation record just before +the jump. + --SDM + +(one consequence of the above is that activation records on the stack +don't follow the layout of closures when we're profiling. The CCS +could be anywhere within the record). \begin{code} --- If there's an StgBindDefault which does use the bound --- variable, then we can only handle it if the type involved is --- an enumeration type. That's important in the case --- of comparisions: --- --- case x ># y of --- r -> f r --- --- The only reason for the restriction to *enumeration* types is our --- inability to invent suitable temporaries to hold the results; --- Elaborating the CTemp addr mode to have a second uniq field --- (which would simply count from 1) would solve the problem. --- Anyway, cgInlineAlts is now capable of handling all cases; --- it's only this function which is being wimpish. - -getPrimAppResultAmodes uniq (StgAlgAlts ty alts (StgBindDefault _ True {- used -} _)) - | isEnumerationTyCon spec_tycon = [tag_amode] - | otherwise = panic "getPrimAppResultAmodes: non-enumeration algebraic alternatives with default" - where - -- A temporary variable to hold the tag; this is unaffected by GC because - -- the heap-checks in the branches occur after the switch - tag_amode = CTemp uniq IntRep - (spec_tycon, _, _) = splitAlgTyConApp ty - -getPrimAppResultAmodes uniq (StgAlgAlts ty alts other_default) - -- Default is either StgNoDefault or StgBindDefault with unused binder - = case alts of - [_] -> arg_amodes -- No need for a tag - other -> tag_amode : arg_amodes - where - -- A temporary variable to hold the tag; this is unaffected by GC because - -- the heap-checks in the branches occur after the switch - tag_amode = CTemp uniq IntRep +maybeReserveSeqFrame (StgAlgAlts Nothing _ _) + (EndOfBlockInfo args_sp (CaseAlts amode stuff _)) + = EndOfBlockInfo (args_sp + retPrimRepSize) (CaseAlts amode stuff True) - -- Sort alternatives into canonical order; there must be a complete - -- set because there's no default case. - sorted_alts = sortLt lt alts - (con1,_,_,_) `lt` (con2,_,_,_) = dataConTag con1 < dataConTag con2 - - arg_amodes :: [CAddrMode] - - -- Turn them into amodes - arg_amodes = concat (map mk_amodes sorted_alts) - mk_amodes (con, args, use_mask, rhs) - = [ CTemp (uniqueOf arg) (idPrimRep arg) | arg <- args ] -\end{code} - -The situation is simpler for primitive -results, because there is only one! - -\begin{code} -getPrimAppResultAmodes uniq (StgPrimAlts ty _ _) - = [CTemp uniq (typePrimRep ty)] +maybeReserveSeqFrame other scrut_eob_info = scrut_eob_info \end{code} - %************************************************************************ %* * \subsection[CgCase-alts]{Alternatives} @@ -404,16 +379,27 @@ alternatives of a @case@, used in a context when there is some evaluation to be done. \begin{code} -cgEvalAlts :: Maybe VirtualSpBOffset -- Offset of cost-centre to be restored, if any - -> Unique +cgEvalAlts :: Maybe VirtualSpOffset -- Offset of cost-centre to be restored, if any + -> Id + -> SRT -- SRT for the continuation -> StgCaseAlts - -> FCode Sequel -- Any addr modes inside are guaranteed to be a label - -- so that we can duplicate it without risk of - -- duplicating code + -> FCode Sequel -- Any addr modes inside are guaranteed + -- to be a label so that we can duplicate it + -- without risk of duplicating code + +cgEvalAlts cc_slot bndr srt alts + = + let uniq = getUnique bndr; name = getName bndr in + + buildContLivenessMask name `thenFC` \ liveness -> + + case alts of -cgEvalAlts cc_slot uniq (StgAlgAlts ty alts deflt) - = -- Generate the instruction to restore cost centre, if any - restoreCurrentCostCentre cc_slot `thenFC` \ cc_restore -> + -- algebraic alts ... + StgAlgAlts maybe_tycon alts deflt -> + + -- bind the default binder (it covers all the alternatives) + bindNewToReg bndr node (mkLFArgument bndr) `thenC` -- Generate sequel info for use downstream -- At the moment, we only do it if the type is vector-returnable. @@ -424,92 +410,75 @@ cgEvalAlts cc_slot uniq (StgAlgAlts ty alts deflt) -- -- which is worse than having the alt code in the switch statement - let - (spec_tycon, _, _) = splitAlgTyConApp ty - - use_labelled_alts - = case ctrlReturnConvAlg spec_tycon of - VectoredReturn _ -> True - _ -> False - - semi_tagged_stuff - = if not use_labelled_alts then - Nothing -- no semi-tagging info - else - cgSemiTaggedAlts uniq alts deflt -- Just - in - cgAlgAlts GCMayHappen uniq cc_restore use_labelled_alts ty alts deflt True - `thenFC` \ (tagged_alt_absCs, deflt_absC) -> - - mkReturnVector uniq ty tagged_alt_absCs deflt_absC `thenFC` \ return_vec -> - - returnFC (CaseAlts return_vec semi_tagged_stuff) + let is_alg = maybeToBool maybe_tycon + Just spec_tycon = maybe_tycon + in + + -- Deal with the unboxed tuple case + if is_alg && isUnboxedTupleTyCon spec_tycon then + -- By now, the simplifier should have have turned it + -- into case e of (# a,b #) -> e + -- There shouldn't be a + -- case e of DEFAULT -> e + ASSERT2( case (alts, deflt) of { ([_],StgNoDefault) -> True; other -> False }, + text "cgEvalAlts: dodgy case of unboxed tuple type" ) + let + alt = head alts + lbl = mkReturnInfoLabel uniq + in + cgUnboxedTupleAlt uniq cc_slot True alt `thenFC` \ abs_c -> + getSRTInfo name srt `thenFC` \ srt_info -> + absC (CRetDirect uniq abs_c srt_info liveness) `thenC` + returnFC (CaseAlts (CLbl lbl RetRep) Nothing False) -cgEvalAlts cc_slot uniq (StgPrimAlts ty alts deflt) - = -- Generate the instruction to restore cost centre, if any - restoreCurrentCostCentre cc_slot `thenFC` \ cc_restore -> + -- normal algebraic (or polymorphic) case alternatives + else let + ret_conv | is_alg = ctrlReturnConvAlg spec_tycon + | otherwise = UnvectoredReturn 0 - -- Generate the switch - getAbsC (cgPrimAlts GCMayHappen uniq ty alts deflt) `thenFC` \ abs_c -> + use_labelled_alts = case ret_conv of + VectoredReturn _ -> True + _ -> False - -- Generate the labelled block, starting with restore-cost-centre - absC (CRetUnVector vtbl_label - (CLabelledCode return_label (cc_restore `mkAbsCStmts` abs_c))) - `thenC` - -- Return an amode for the block - returnFC (CaseAlts (CUnVecLbl return_label vtbl_label) Nothing{-no semi-tagging-}) - where - vtbl_label = mkVecTblLabel uniq - return_label = mkReturnPtLabel uniq -\end{code} + semi_tagged_stuff + = if use_labelled_alts then + cgSemiTaggedAlts bndr alts deflt -- Just + else + Nothing -- no semi-tagging info + in + cgAlgAlts GCMayHappen (not is_alg) uniq cc_slot use_labelled_alts + alts deflt True `thenFC` \ (tagged_alt_absCs, deflt_absC) -> -\begin{code} -cgInlineAlts :: GCFlag -> Unique - -> StgCaseAlts - -> Code -\end{code} + mkReturnVector name tagged_alt_absCs deflt_absC srt liveness + ret_conv `thenFC` \ return_vec -> -HWL comment on {\em GrAnSim\/} (adding GRAN_YIELDs for context switch): If -we do an inlining of the case no separate functions for returning are -created, so we don't have to generate a GRAN_YIELD in that case. This info -must be propagated to cgAlgAltRhs (where the GRAN_YIELD macro might be -emitted). Hence, the new Bool arg to cgAlgAltRhs. + returnFC (CaseAlts return_vec semi_tagged_stuff False) -First case: algebraic case, exactly one alternative, no default. -In this case the primitive op will not have set a temporary to the -tag, so we shouldn't generate a switch statment. Instead we just -do the right thing. + -- primitive alts... + StgPrimAlts tycon alts deflt -> -\begin{code} -cgInlineAlts gc_flag uniq (StgAlgAlts ty [alt@(con,args,use_mask,rhs)] StgNoDefault) - = cgAlgAltRhs gc_flag con args use_mask rhs False{-no yield macro if alt gets inlined-} -\end{code} + -- Restore the cost centre + restoreCurrentCostCentre cc_slot `thenFC` \ cc_restore -> -Second case: algebraic case, several alternatives. -Tag is held in a temporary. + -- Generate the switch + getAbsC (cgPrimEvalAlts bndr tycon alts deflt) `thenFC` \ abs_c -> -\begin{code} -cgInlineAlts gc_flag uniq (StgAlgAlts ty alts deflt) - = cgAlgAlts gc_flag uniq AbsCNop{-restore_cc-} False{-no semi-tagging-} - ty alts deflt - False{-don't emit yield-} `thenFC` \ (tagged_alts, deflt_c) -> + -- Generate the labelled block, starting with restore-cost-centre + getSRTInfo name srt `thenFC` \srt_info -> + absC (CRetDirect uniq (cc_restore `mkAbsCStmts` abs_c) + srt_info liveness) `thenC` - -- Do the switch - absC (mkAlgAltsCSwitch tag_amode tagged_alts deflt_c) - where - -- A temporary variable to hold the tag; this is unaffected by GC because - -- the heap-checks in the branches occur after the switch - tag_amode = CTemp uniq IntRep + -- Return an amode for the block + returnFC (CaseAlts (CLbl (mkReturnInfoLabel uniq) RetRep) Nothing False) \end{code} -Third (real) case: primitive result type. - -\begin{code} -cgInlineAlts gc_flag uniq (StgPrimAlts ty alts deflt) - = cgPrimAlts gc_flag uniq ty alts deflt -\end{code} +HWL comment on {\em GrAnSim\/} (adding GRAN_YIELDs for context switch): If +we do an inlining of the case no separate functions for returning are +created, so we don't have to generate a GRAN_YIELD in that case. This info +must be propagated to cgAlgAltRhs (where the GRAN_YIELD macro might be +emitted). Hence, the new Bool arg to cgAlgAltRhs. %************************************************************************ %* * @@ -527,168 +496,54 @@ are inlined alternatives. \begin{code} cgAlgAlts :: GCFlag + -> Bool -- polymorphic case -> Unique - -> AbstractC -- Restore-cost-centre instruction + -> Maybe VirtualSpOffset -> Bool -- True <=> branches must be labelled - -> Type -- From the case statement - -> [(Id, [Id], [Bool], StgExpr)] -- The alternatives - -> StgCaseDefault -- The default + -> [(DataCon, [Id], [Bool], StgExpr)] -- The alternatives + -> StgCaseDefault -- The default -> Bool -- Context switch at alts? -> FCode ([(ConTag, AbstractC)], -- The branches AbstractC -- The default case ) -\end{code} - -The case with a default which has a binder is different. We need to -pick all the constructors which aren't handled explicitly by an -alternative, and which return their results in registers, allocate -them explicitly in the heap, and jump to a join point for the default -case. - -OLD: All of this only works if a heap-check is required anyway, because -otherwise it isn't safe to allocate. - -NEW (July 94): now false! It should work regardless of gc_flag, -because of the extra_branches argument now added to forkAlts. - -We put a heap-check at the join point, for the benefit of constructors -which don't need to do allocation. This means that ones which do need -to allocate may end up doing two heap-checks; but that's just too bad. -(We'd need two join labels otherwise. ToDo.) - -It's all pretty turgid anyway. - -\begin{code} -cgAlgAlts gc_flag uniq restore_cc semi_tagging - ty alts deflt@(StgBindDefault binder True{-used-} _) - emit_yield{-should a yield macro be emitted?-} - = let - extra_branches :: [FCode (ConTag, AbstractC)] - extra_branches = catMaybes (map mk_extra_branch default_cons) - - must_label_default = semi_tagging || not (null extra_branches) - in - forkAlts (map (cgAlgAlt gc_flag uniq restore_cc semi_tagging emit_yield) alts) - extra_branches - (cgAlgDefault gc_flag uniq restore_cc must_label_default deflt emit_yield) - where - - default_join_lbl = mkDefaultLabel uniq - jump_instruction = CJump (CLbl default_join_lbl CodePtrRep) - - (spec_tycon, _, spec_cons) = splitAlgTyConApp ty - - alt_cons = [ con | (con,_,_,_) <- alts ] - - default_cons = [ spec_con | spec_con <- spec_cons, -- In this type - spec_con `not_elem` alt_cons ] -- Not handled explicitly - where - not_elem = isn'tIn "cgAlgAlts" - - -- (mk_extra_branch con) returns the a maybe for the extra branch for con. - -- The "maybe" is because con may return in heap, in which case there is - -- nothing to do. Otherwise, we have a special case for a nullary constructor, - -- but in the general case we do an allocation and heap-check. - - mk_extra_branch :: DataCon -> (Maybe (FCode (ConTag, AbstractC))) - - mk_extra_branch con - = ASSERT(isDataCon con) - case dataReturnConvAlg con of - ReturnInHeap -> Nothing - ReturnInRegs rs -> Just (getAbsC (alloc_code rs) `thenFC` \ abs_c -> - returnFC (tag, abs_c) - ) - where - lf_info = mkConLFInfo con - tag = dataConTag con - - -- alloc_code generates code to allocate constructor con, whose args are - -- in the arguments to alloc_code, assigning the result to Node. - alloc_code :: [MagicId] -> Code - - alloc_code regs - = possibleHeapCheck gc_flag regs False ( - buildDynCon binder useCurrentCostCentre con - (map CReg regs) (all zero_size regs) - `thenFC` \ idinfo -> - idInfoToAmode PtrRep idinfo `thenFC` \ amode -> - - absC (CAssign (CReg node) amode) `thenC` - absC jump_instruction - ) - where - zero_size reg = getPrimRepSize (magicIdPrimRep reg) == 0 -\end{code} - -Now comes the general case -\begin{code} -cgAlgAlts gc_flag uniq restore_cc must_label_branches ty alts deflt - {- The deflt is either StgNoDefault or a BindDefault which doesn't use the binder -} +cgAlgAlts gc_flag is_poly uniq restore_cc must_label_branches alts deflt emit_yield{-should a yield macro be emitted?-} = forkAlts (map (cgAlgAlt gc_flag uniq restore_cc must_label_branches emit_yield) alts) - [{- No "extra branches" -}] - (cgAlgDefault gc_flag uniq restore_cc must_label_branches deflt emit_yield) + (cgAlgDefault gc_flag is_poly uniq restore_cc must_label_branches deflt emit_yield) \end{code} \begin{code} cgAlgDefault :: GCFlag - -> Unique -> AbstractC -> Bool -- turgid state... - -> StgCaseDefault -- input + -> Bool -- polymorphic case + -> Unique -> Maybe VirtualSpOffset -> Bool -- turgid state... + -> StgCaseDefault -- input -> Bool - -> FCode AbstractC -- output + -> FCode AbstractC -- output -cgAlgDefault gc_flag uniq restore_cc must_label_branch - StgNoDefault _ +cgAlgDefault gc_flag is_poly uniq cc_slot must_label_branch StgNoDefault _ = returnFC AbsCNop -cgAlgDefault gc_flag uniq restore_cc must_label_branch - (StgBindDefault _ False{-binder not used-} rhs) - emit_yield{-should a yield macro be emitted?-} - - = getAbsC (absC restore_cc `thenC` - let - emit_gran_macros = opt_GranMacros - in - (if emit_gran_macros && emit_yield - then yield [] False - else absC AbsCNop) `thenC` - -- liveness same as in possibleHeapCheck below - possibleHeapCheck gc_flag [] False (cgExpr rhs)) `thenFC` \ abs_c -> - let - final_abs_c | must_label_branch = CJump (CLabelledCode lbl abs_c) - | otherwise = abs_c - in - returnFC final_abs_c - where - lbl = mkDefaultLabel uniq - - -cgAlgDefault gc_flag uniq restore_cc must_label_branch - (StgBindDefault binder True{-binder used-} rhs) +cgAlgDefault gc_flag is_poly uniq cc_slot must_label_branch + (StgBindDefault rhs) emit_yield{-should a yield macro be emitted?-} - = -- We have arranged that Node points to the thing, even - -- even if we return in registers - bindNewToReg binder node mkLFArgument `thenC` + = -- We have arranged that Node points to the thing + restoreCurrentCostCentre cc_slot `thenFC` \restore_cc -> getAbsC (absC restore_cc `thenC` - let - emit_gran_macros = opt_GranMacros - in - (if emit_gran_macros && emit_yield - then yield [node] False - else absC AbsCNop) `thenC` - -- liveness same as in possibleHeapCheck below - possibleHeapCheck gc_flag [node] False (cgExpr rhs) + -- HWL: maybe need yield here + --(if emit_yield + -- then yield [node] True + -- else absC AbsCNop) `thenC` + algAltHeapCheck gc_flag is_poly [node] (cgExpr rhs) -- Node is live, but doesn't need to point at the thing itself; -- it's ok for Node to point to an indirection or FETCH_ME -- Hence no need to re-enter Node. ) `thenFC` \ abs_c -> let - final_abs_c | must_label_branch = CJump (CLabelledCode lbl abs_c) + final_abs_c | must_label_branch = CCodeBlock lbl abs_c | otherwise = abs_c in returnFC final_abs_c @@ -698,20 +553,30 @@ cgAlgDefault gc_flag uniq restore_cc must_label_branch -- HWL comment on GrAnSim: GRAN_YIELDs needed; emitted in cgAlgAltRhs cgAlgAlt :: GCFlag - -> Unique -> AbstractC -> Bool -- turgid state + -> Unique -> Maybe VirtualSpOffset -> Bool -- turgid state -> Bool -- Context switch at alts? - -> (Id, [Id], [Bool], StgExpr) + -> (DataCon, [Id], [Bool], StgExpr) -> FCode (ConTag, AbstractC) -cgAlgAlt gc_flag uniq restore_cc must_label_branch +cgAlgAlt gc_flag uniq cc_slot must_label_branch emit_yield{-should a yield macro be emitted?-} (con, args, use_mask, rhs) - = getAbsC (absC restore_cc `thenC` - cgAlgAltRhs gc_flag con args use_mask rhs - emit_yield + = + restoreCurrentCostCentre cc_slot `thenFC` \restore_cc -> + getAbsC (absC restore_cc `thenC` + -- HWL: maybe need yield here + -- (if emit_yield + -- then yield [node] True -- XXX live regs wrong + -- else absC AbsCNop) `thenC` + (case gc_flag of + NoGC -> mapFCs bindNewToTemp args `thenFC` \_ -> nopC + GCMayHappen -> bindConArgs con args + ) `thenC` + algAltHeapCheck gc_flag False{-not poly-} [node] ( + cgExpr rhs) ) `thenFC` \ abs_c -> let - final_abs_c | must_label_branch = CJump (CLabelledCode lbl abs_c) + final_abs_c | must_label_branch = CCodeBlock lbl abs_c | otherwise = abs_c in returnFC (tag, final_abs_c) @@ -719,38 +584,31 @@ cgAlgAlt gc_flag uniq restore_cc must_label_branch tag = dataConTag con lbl = mkAltLabel uniq tag -cgAlgAltRhs :: GCFlag - -> Id - -> [Id] - -> [Bool] - -> StgExpr - -> Bool -- context switch? - -> Code -cgAlgAltRhs gc_flag con args use_mask rhs emit_yield - = let - (live_regs, node_reqd) - = case (dataReturnConvAlg con) of - ReturnInHeap -> ([], True) - ReturnInRegs regs -> ([reg | (reg,True) <- zipEqual "cgAlgAltRhs" regs use_mask], False) - -- Pick the live registers using the use_mask - -- Doing so is IMPORTANT, because with semi-tagging - -- enabled only the live registers will have valid - -- pointers in them. - in - let - emit_gran_macros = opt_GranMacros - in - (if emit_gran_macros && emit_yield - then yield live_regs node_reqd - else absC AbsCNop) `thenC` - -- liveness same as in possibleHeapCheck below - possibleHeapCheck gc_flag live_regs node_reqd ( - (case gc_flag of - NoGC -> mapFCs bindNewToTemp args `thenFC` \ _ -> - nopC - GCMayHappen -> bindConArgs con args - ) `thenC` - cgExpr rhs +cgUnboxedTupleAlt + :: Unique -- unique for label of the alternative + -> Maybe VirtualSpOffset -- Restore cost centre + -> Bool -- ctxt switch + -> (DataCon, [Id], [Bool], StgExpr) -- alternative + -> FCode AbstractC + +cgUnboxedTupleAlt lbl cc_slot emit_yield (con,args,use_mask,rhs) + = getAbsC ( + bindUnboxedTupleComponents args + `thenFC` \ (live_regs, ptrs, nptrs, stack_res) -> + + restoreCurrentCostCentre cc_slot `thenFC` \restore_cc -> + absC restore_cc `thenC` + + -- HWL: maybe need yield here + -- (if emit_yield + -- then yield live_regs True -- XXX live regs wrong? + -- else absC AbsCNop) `thenC` + + -- generate a heap check if necessary + possibleUnbxTupleHeapCheck GCMayHappen live_regs ptrs nptrs ( + + -- and finally the code for the alternative + cgExpr rhs) ) \end{code} @@ -764,60 +622,34 @@ Turgid-but-non-monadic code to conjure up the required info from algebraic case alternatives for semi-tagging. \begin{code} -cgSemiTaggedAlts :: Unique - -> [(Id, [Id], [Bool], StgExpr)] +cgSemiTaggedAlts :: Id + -> [(DataCon, [Id], [Bool], StgExpr)] -> GenStgCaseDefault Id Id -> SemiTaggingStuff -cgSemiTaggedAlts uniq alts deflt +cgSemiTaggedAlts binder alts deflt = Just (map st_alt alts, st_deflt deflt) where + uniq = getUnique binder + st_deflt StgNoDefault = Nothing - st_deflt (StgBindDefault binder binder_used _) - = Just (if binder_used then Just binder else Nothing, - (CCallProfCtrMacro SLIT("RET_SEMI_BY_DEFAULT") [], -- ToDo: monadise? + st_deflt (StgBindDefault _) + = Just (Just binder, + (CCallProfCtrMacro FSLIT("RET_SEMI_BY_DEFAULT") [], -- ToDo: monadise? mkDefaultLabel uniq) ) st_alt (con, args, use_mask, _) - = case (dataReturnConvAlg con) of - - ReturnInHeap -> - -- Ha! Nothing to do; Node already points to the thing - (con_tag, - (CCallProfCtrMacro SLIT("RET_SEMI_IN_HEAP") -- ToDo: monadise? - [mkIntCLit (length args)], -- how big the thing in the heap is + = -- Ha! Nothing to do; Node already points to the thing + (con_tag, + (CCallProfCtrMacro FSLIT("RET_SEMI_IN_HEAP") -- ToDo: monadise? + [mkIntCLit (length args)], -- how big the thing in the heap is join_label) ) - - ReturnInRegs regs -> - -- We have to load the live registers from the constructor - -- pointed to by Node. - let - (_, regs_w_offsets) = layOutDynCon con magicIdPrimRep regs - - used_regs = selectByMask use_mask regs - - used_regs_w_offsets = [ ro | ro@(reg,offset) <- regs_w_offsets, - reg `is_elem` used_regs] - - is_elem = isIn "cgSemiTaggedAlts" - in - (con_tag, - (mkAbstractCs [ - CCallProfCtrMacro SLIT("RET_SEMI_IN_REGS") -- ToDo: macroise? - [mkIntCLit (length regs_w_offsets), - mkIntCLit (length used_regs_w_offsets)], - CSimultaneous (mkAbstractCs (map move_to_reg used_regs_w_offsets))], - join_label)) where con_tag = dataConTag con join_label = mkAltLabel uniq con_tag - - move_to_reg :: (MagicId, VirtualHeapOffset {-from Node-}) -> AbstractC - move_to_reg (reg, offset) - = CAssign (CReg reg) (CVal (NodeRel offset) (magicIdPrimRep reg)) \end{code} %************************************************************************ @@ -826,69 +658,61 @@ cgSemiTaggedAlts uniq alts deflt %* * %************************************************************************ -@cgPrimAlts@ generates a suitable @CSwitch@ for dealing with the -alternatives of a primitive @case@, given an addressing mode for the -thing to scrutinise. It also keeps track of the maximum stack depth -encountered down any branch. +@cgPrimEvalAlts@ and @cgPrimInlineAlts@ generate suitable @CSwitch@es +for dealing with the alternatives of a primitive @case@, given an +addressing mode for the thing to scrutinise. It also keeps track of +the maximum stack depth encountered down any branch. As usual, no binders in the alternatives are yet bound. \begin{code} -cgPrimAlts :: GCFlag - -> Unique - -> Type - -> [(Literal, StgExpr)] -- Alternatives - -> StgCaseDefault -- Default - -> Code - -cgPrimAlts gc_flag uniq ty alts deflt - = cgPrimAltsGivenScrutinee gc_flag scrutinee alts deflt - where - -- A temporary variable, or standard register, to hold the result - scrutinee = case gc_flag of - NoGC -> CTemp uniq kind - GCMayHappen -> CReg (dataReturnConvPrim kind) - - kind = typePrimRep ty - - -cgPrimAltsGivenScrutinee gc_flag scrutinee alts deflt - = forkAlts (map (cgPrimAlt gc_flag) alts) - [{- No "extra branches" -}] - (cgPrimDefault gc_flag scrutinee deflt) `thenFC` \ (alt_absCs, deflt_absC) -> +cgPrimInlineAlts bndr tycon alts deflt + = cgPrimAltsWithDefault bndr NoGC (CTemp uniq kind) alts deflt [] + where + uniq = getUnique bndr + kind = tyConPrimRep tycon + +cgPrimEvalAlts bndr tycon alts deflt + = cgPrimAltsWithDefault bndr GCMayHappen (CReg reg) alts deflt [reg] + where + reg = dataReturnConvPrim kind + kind = tyConPrimRep tycon + +cgPrimAltsWithDefault bndr gc_flag scrutinee alts deflt regs + = -- first bind the default if necessary + bindNewPrimToAmode bndr scrutinee `thenC` + cgPrimAlts gc_flag scrutinee alts deflt regs + +cgPrimAlts gc_flag scrutinee alts deflt regs + = forkAlts (map (cgPrimAlt gc_flag regs) alts) + (cgPrimDefault gc_flag regs deflt) + `thenFC` \ (alt_absCs, deflt_absC) -> + absC (CSwitch scrutinee alt_absCs deflt_absC) - -- CSwitch does sensible things with one or zero alternatives + -- CSwitch does sensible things with one or zero alternatives cgPrimAlt :: GCFlag - -> (Literal, StgExpr) -- The alternative + -> [MagicId] -- live registers + -> (Literal, StgExpr) -- The alternative -> FCode (Literal, AbstractC) -- Its compiled form -cgPrimAlt gc_flag (lit, rhs) +cgPrimAlt gc_flag regs (lit, rhs) = getAbsC rhs_code `thenFC` \ absC -> returnFC (lit,absC) where - rhs_code = possibleHeapCheck gc_flag [] False (cgExpr rhs ) + rhs_code = primAltHeapCheck gc_flag regs (cgExpr rhs) cgPrimDefault :: GCFlag - -> CAddrMode -- Scrutinee + -> [MagicId] -- live registers -> StgCaseDefault -> FCode AbstractC -cgPrimDefault gc_flag scrutinee StgNoDefault +cgPrimDefault gc_flag regs StgNoDefault = panic "cgPrimDefault: No default in prim case" -cgPrimDefault gc_flag scrutinee (StgBindDefault _ False{-binder not used-} rhs) - = getAbsC (possibleHeapCheck gc_flag [] False (cgExpr rhs )) - -cgPrimDefault gc_flag scrutinee (StgBindDefault binder True{-used-} rhs) - = getAbsC (possibleHeapCheck gc_flag regs False rhs_code) - where - regs = if isFollowableRep (getAmodeRep scrutinee) then - [node] else [] - - rhs_code = bindNewPrimToAmode binder scrutinee `thenC` - cgExpr rhs +cgPrimDefault gc_flag regs (StgBindDefault rhs) + = getAbsC (primAltHeapCheck gc_flag regs (cgExpr rhs)) \end{code} @@ -900,19 +724,18 @@ cgPrimDefault gc_flag scrutinee (StgBindDefault binder True{-used-} rhs) \begin{code} saveVolatileVarsAndRegs - :: StgLiveVars -- Vars which should be made safe + :: StgLiveVars -- Vars which should be made safe -> FCode (AbstractC, -- Assignments to do the saves - EndOfBlockInfo, -- New sequel, recording where the return - -- address now is - Maybe VirtualSpBOffset) -- Slot for current cost centre + EndOfBlockInfo, -- sequel for the alts + Maybe VirtualSpOffset) -- Slot for current cost centre saveVolatileVarsAndRegs vars - = saveVolatileVars vars `thenFC` \ var_saves -> - saveCurrentCostCentre `thenFC` \ (maybe_cc_slot, cc_save) -> - saveReturnAddress `thenFC` \ (new_eob_info, ret_save) -> - returnFC (mkAbstractCs [var_saves, cc_save, ret_save], - new_eob_info, + = saveVolatileVars vars `thenFC` \ var_saves -> + saveCurrentCostCentre `thenFC` \ (maybe_cc_slot, cc_save) -> + getEndOfBlockInfo `thenFC` \ eob_info -> + returnFC (mkAbstractCs [var_saves, cc_save], + eob_info, maybe_cc_slot) @@ -920,7 +743,7 @@ saveVolatileVars :: StgLiveVars -- Vars which should be made safe -> FCode AbstractC -- Assignments to to the saves saveVolatileVars vars - = save_em (idSetToList vars) + = save_em (varSetElems vars) where save_em [] = returnFC AbsCNop @@ -936,101 +759,47 @@ saveVolatileVars vars returnFC (abs_c `mkAbsCStmts` abs_cs) save_var var vol_amode - | isFollowableRep kind - = allocAStack `thenFC` \ a_slot -> - rebindToAStack var a_slot `thenC` - getSpARelOffset a_slot `thenFC` \ spa_rel -> - returnFC (CAssign (CVal spa_rel kind) vol_amode) - | otherwise - = allocBStack (getPrimRepSize kind) `thenFC` \ b_slot -> - rebindToBStack var b_slot `thenC` - getSpBRelOffset b_slot `thenFC` \ spb_rel -> - returnFC (CAssign (CVal spb_rel kind) vol_amode) + = allocPrimStack (getPrimRepSize kind) `thenFC` \ slot -> + rebindToStack var slot `thenC` + getSpRelOffset slot `thenFC` \ sp_rel -> + returnFC (CAssign (CVal sp_rel kind) vol_amode) where kind = getAmodeRep vol_amode - -saveReturnAddress :: FCode (EndOfBlockInfo, AbstractC) -saveReturnAddress - = getEndOfBlockInfo `thenFC` \ eob_info@(EndOfBlockInfo vA vB sequel) -> - - -- See if it is volatile - case sequel of - InRetReg -> -- Yes, it's volatile - allocBStack retPrimRepSize `thenFC` \ b_slot -> - getSpBRelOffset b_slot `thenFC` \ spb_rel -> - - returnFC (EndOfBlockInfo vA vB (OnStack b_slot), - CAssign (CVal spb_rel RetRep) (CReg RetReg)) - - UpdateCode _ -> -- It's non-volatile all right, but we still need - -- to allocate a B-stack slot for it, *solely* to make - -- sure that update frames for different values do not - -- appear adjacent on the B stack. This makes sure - -- that B-stack squeezing works ok. - -- See note below - allocBStack retPrimRepSize `thenFC` \ b_slot -> - returnFC (eob_info, AbsCNop) - - other -> -- No, it's non-volatile, so do nothing - returnFC (eob_info, AbsCNop) \end{code} -Note about B-stack squeezing. Consider the following:` - - y = [...] \u [] -> ... - x = [y] \u [] -> case y of (a,b) -> a - -The code for x will push an update frame, and then enter y. The code -for y will push another update frame. If the B-stack-squeezer then -wakes up, it will see two update frames right on top of each other, -and will combine them. This is WRONG, of course, because x's value is -not the same as y's. - -The fix implemented above makes sure that we allocate an (unused) -B-stack slot before entering y. You can think of this as holding the -saved value of RetAddr, which (after pushing x's update frame will be -some update code ptr). The compiler is clever enough to load the -static update code ptr into RetAddr before entering ~a~, but the slot -is still there to separate the update frames. +--------------------------------------------------------------------------- When we save the current cost centre (which is done for lexical -scoping), we allocate a free B-stack location, and return (a)~the +scoping), we allocate a free stack location, and return (a)~the virtual offset of the location, to pass on to the alternatives, and (b)~the assignment to do the save (just as for @saveVolatileVars@). \begin{code} saveCurrentCostCentre :: - FCode (Maybe VirtualSpBOffset, -- Where we decide to store it - -- Nothing if not lexical CCs + FCode (Maybe VirtualSpOffset, -- Where we decide to store it AbstractC) -- Assignment to save it - -- AbsCNop if not lexical CCs saveCurrentCostCentre - = let - doing_profiling = opt_SccProfilingOn - in - if not doing_profiling then + = if not opt_SccProfilingOn then returnFC (Nothing, AbsCNop) else - allocBStack (getPrimRepSize CostCentreRep) `thenFC` \ b_slot -> - getSpBRelOffset b_slot `thenFC` \ spb_rel -> - returnFC (Just b_slot, - CAssign (CVal spb_rel CostCentreRep) (CReg CurCostCentre)) - -restoreCurrentCostCentre :: Maybe VirtualSpBOffset -> FCode AbstractC - -restoreCurrentCostCentre Nothing - = returnFC AbsCNop -restoreCurrentCostCentre (Just b_slot) - = getSpBRelOffset b_slot `thenFC` \ spb_rel -> - freeBStkSlot b_slot `thenC` - returnFC (CCallProfCCMacro SLIT("RESTORE_CCC") [CVal spb_rel CostCentreRep]) - -- we use the RESTORE_CCC macro, rather than just - -- assigning into CurCostCentre, in case RESTORE_CCC + allocPrimStack (getPrimRepSize CostCentreRep) `thenFC` \ slot -> + dataStackSlots [slot] `thenC` + getSpRelOffset slot `thenFC` \ sp_rel -> + returnFC (Just slot, + CAssign (CVal sp_rel CostCentreRep) (CReg CurCostCentre)) + +restoreCurrentCostCentre :: Maybe VirtualSpOffset -> FCode AbstractC +restoreCurrentCostCentre Nothing = returnFC AbsCNop +restoreCurrentCostCentre (Just slot) + = getSpRelOffset slot `thenFC` \ sp_rel -> + freeStackSlots [slot] `thenC` + returnFC (CCallProfCCMacro FSLIT("RESTORE_CCCS") [CVal sp_rel CostCentreRep]) + -- we use the RESTORE_CCCS macro, rather than just + -- assigning into CurCostCentre, in case RESTORE_CCCS -- has some sanity-checking in it. \end{code} - %************************************************************************ %* * \subsection[CgCase-return-vec]{Building a return vector} @@ -1041,58 +810,70 @@ Build a return vector, and return a suitable label addressing mode for it. \begin{code} -mkReturnVector :: Unique - -> Type +mkReturnVector :: Name -> [(ConTag, AbstractC)] -- Branch codes -> AbstractC -- Default case + -> SRT -- continuation's SRT + -> Liveness -- stack liveness + -> CtrlReturnConvention -> FCode CAddrMode -mkReturnVector uniq ty tagged_alt_absCs deflt_absC - = let - (return_vec_amode, vtbl_body) = case (ctrlReturnConvAlg tycon) of { - - UnvectoredReturn _ -> - (CUnVecLbl ret_label vtbl_label, - absC (CRetUnVector vtbl_label - (CLabelledCode ret_label - (mkAlgAltsCSwitch (CReg TagReg) - tagged_alt_absCs - deflt_absC)))); +mkReturnVector name tagged_alt_absCs deflt_absC srt liveness ret_conv + = getSRTInfo name srt `thenFC` \ srt_info -> + let + (return_vec_amode, vtbl_body) = case ret_conv of { + + -- might be a polymorphic case... + UnvectoredReturn 0 -> + ASSERT(null tagged_alt_absCs) + (CLbl ret_label RetRep, + absC (CRetDirect uniq deflt_absC srt_info liveness)); + + UnvectoredReturn n -> + -- find the tag explicitly rather than using tag_reg for now. + -- on architectures with lots of regs the tag will be loaded + -- into tag_reg by the code doing the returning. + let + tag = CMacroExpr WordRep GET_TAG [CVal (nodeRel 0) DataPtrRep] + in + (CLbl ret_label RetRep, + absC (CRetDirect uniq + (mkAlgAltsCSwitch tag tagged_alt_absCs deflt_absC) + srt_info liveness)); + VectoredReturn table_size -> - (CLbl vtbl_label DataPtrRep, - absC (CRetVector vtbl_label - -- must restore cc before each alt, if required - (map mk_vector_entry [fIRST_TAG .. (table_size+fIRST_TAG-1)]) - deflt_absC)) - --- Leave nops and comments in for now; they are eliminated --- lazily as it's printed. --- (case (nonemptyAbsC deflt_absC) of --- Nothing -> AbsCNop --- Just def -> def) + let + (vector_table, alts_absC) = + unzip (map mk_vector_entry [fIRST_TAG .. (table_size+fIRST_TAG-1)]) + + ret_vector = CRetVector vtbl_label vector_table srt_info liveness + in + (CLbl vtbl_label DataPtrRep, + -- alts come first, because we don't want to declare all the symbols + absC (mkAbstractCs (mkAbstractCs alts_absC : [deflt_absC,ret_vector])) + ) } in vtbl_body `thenC` returnFC return_vec_amode -- ) where - - (tycon,_,_) = case splitAlgTyConApp_maybe ty of -- *must* be a real "data" type constructor - Just xx -> xx - Nothing -> pprPanic "ERROR: can't generate code for polymorphic case" - (vcat [text "probably a mis-use of `seq' or `par';", - text "the User's Guide has more details.", - text "Offending type:" <+> ppr ty - ]) + uniq = getUnique name vtbl_label = mkVecTblLabel uniq - ret_label = mkReturnPtLabel uniq + ret_label = mkReturnInfoLabel uniq + + deflt_lbl = + case nonemptyAbsC deflt_absC of + -- the simplifier might have eliminated a case + Nothing -> mkIntCLit 0 -- CLbl mkErrorStdEntryLabel CodePtrRep + Just absC@(CCodeBlock lbl _) -> CLbl lbl CodePtrRep - mk_vector_entry :: ConTag -> Maybe CAddrMode + mk_vector_entry :: ConTag -> (CAddrMode, AbstractC) mk_vector_entry tag = case [ absC | (t, absC) <- tagged_alt_absCs, t == tag ] of - [] -> Nothing - [absC] -> Just (CCode absC) + [] -> (deflt_lbl, AbsCNop) + [absC@(CCodeBlock lbl _)] -> (CLbl lbl CodePtrRep,absC) _ -> panic "mkReturnVector: too many" \end{code} @@ -1102,20 +883,40 @@ mkReturnVector uniq ty tagged_alt_absCs deflt_absC %* * %************************************************************************ -@possibleHeapCheck@ tests a flag passed in to decide whether to -do a heap check or not. +'possibleHeapCheck' tests a flag passed in to decide whether to do a +heap check or not. These heap checks are always in a case +alternative, so we use altHeapCheck. \begin{code} -possibleHeapCheck :: GCFlag -> [MagicId] -> Bool -> Code -> Code +algAltHeapCheck + :: GCFlag + -> Bool -- polymorphic case + -> [MagicId] -- live registers + -> Code -- continuation + -> Code -possibleHeapCheck GCMayHappen regs node_reqd code = heapCheck regs node_reqd code -possibleHeapCheck NoGC _ _ code = code -\end{code} +algAltHeapCheck GCMayHappen is_poly regs code = altHeapCheck is_poly regs code +algAltHeapCheck NoGC _ _ code = code -Select a restricted set of registers based on a usage mask. +primAltHeapCheck + :: GCFlag + -> [MagicId] -- live registers + -> Code -- continuation + -> Code -\begin{code} -selectByMask [] [] = [] -selectByMask (True:ms) (x:xs) = x : selectByMask ms xs -selectByMask (False:ms) (x:xs) = selectByMask ms xs +primAltHeapCheck GCMayHappen regs code = altHeapCheck True regs code +primAltHeapCheck NoGC _ code = code + +possibleUnbxTupleHeapCheck + :: GCFlag + -> [MagicId] -- live registers + -> Int -- no. of stack slots containing ptrs + -> Int -- no. of stack slots containing nonptrs + -> Code -- continuation + -> Code + +possibleUnbxTupleHeapCheck GCMayHappen regs ptrs nptrs code + = unbxTupleHeapCheck regs ptrs nptrs AbsCNop code +possibleUnbxTupleHeapCheck NoGC _ _ _ code + = code \end{code}