%
-% (c) The University of Glasgow 2002
+% (c) The University of Glasgow 2002-2006
%
-\section[ByteCodeGen]{Generate bytecode from Core}
+
+ByteCodeGen: Generate bytecode from Core
\begin{code}
module ByteCodeGen ( UnlinkedBCO, byteCodeGen, coreExprToBCOs ) where
#include "HsVersions.h"
import ByteCodeInstr
-import ByteCodeFFI ( mkMarshalCode, moan64 )
-import ByteCodeAsm ( CompiledByteCode(..), UnlinkedBCO,
- assembleBCO, assembleBCOs, iNTERP_STACK_CHECK_THRESH )
-import ByteCodeLink ( lookupStaticPtr )
+import ByteCodeItbls
+import ByteCodeAsm
+import ByteCodeLink
+import LibFFI
import Outputable
-import Name ( Name, getName, mkSystemVarName )
+import Name
+import MkId
import Id
import FiniteMap
-import ForeignCall ( ForeignCall(..), CCallTarget(..), CCallSpec(..) )
-import HscTypes ( TypeEnv, typeEnvTyCons, typeEnvClasses )
-import CoreUtils ( exprType )
+import ForeignCall
+import HscTypes
+import CoreUtils
import CoreSyn
-import PprCore ( pprCoreExpr )
-import Literal ( Literal(..), literalType )
-import PrimOp ( PrimOp(..) )
-import CoreFVs ( freeVars )
-import Type ( isUnLiftedType, splitTyConApp_maybe )
-import DataCon ( DataCon, dataConTag, fIRST_TAG, dataConTyCon,
- isUnboxedTupleCon, isNullaryRepDataCon, dataConWorkId,
- dataConRepArity )
-import TyCon ( TyCon, tyConFamilySize, isDataTyCon,
- tyConDataCons, isUnboxedTupleTyCon )
-import Class ( Class, classTyCon )
-import Type ( Type, repType, splitFunTys, dropForAlls, pprType )
+import PprCore
+import Literal
+import PrimOp
+import CoreFVs
+import Type
+import DataCon
+import TyCon
+-- import Type
import Util
-import DataCon ( dataConRepArity )
-import Var ( isTyVar )
-import VarSet ( VarSet, varSetElems )
-import TysPrim ( arrayPrimTyCon, mutableArrayPrimTyCon,
- byteArrayPrimTyCon, mutableByteArrayPrimTyCon
- )
-import DynFlags ( DynFlags, DynFlag(..) )
-import ErrUtils ( showPass, dumpIfSet_dyn )
-import Unique ( mkPseudoUniqueE )
-import FastString ( FastString(..), unpackFS )
-import Panic ( GhcException(..) )
-import SMRep ( typeCgRep, arrWordsHdrSize, arrPtrsHdrSize, StgWord,
- CgRep(..), cgRepSizeW, isFollowableArg, idCgRep )
-import Bitmap ( intsToReverseBitmap, mkBitmap )
+-- import DataCon
+import Var
+import VarSet
+import TysPrim
+import DynFlags
+import ErrUtils
+import Unique
+import FastString
+import Panic
+import SMRep
+import Bitmap
import OrdList
-import Constants ( wORD_SIZE )
+import Constants
-import Data.List ( intersperse, sortBy, zip4, zip6, partition )
-import Foreign ( Ptr, castPtr, mallocBytes, pokeByteOff, Word8,
- withForeignPtr )
-import Foreign.C ( CInt )
-import Control.Exception ( throwDyn )
+import Data.List
+import Foreign
+import Foreign.C
-import GHC.Exts ( Int(..), ByteArray# )
+-- import GHC.Exts ( Int(..) )
import Control.Monad ( when )
-import Data.Char ( ord, chr )
+import Data.Char
+
+import UniqSupply
+import BreakArray
+import Data.Maybe
+import Module
+import IdInfo
-- -----------------------------------------------------------------------------
-- Generating byte code for a complete module
byteCodeGen :: DynFlags
-> [CoreBind]
-> [TyCon]
+ -> ModBreaks
-> IO CompiledByteCode
-byteCodeGen dflags binds tycs
+byteCodeGen dflags binds tycs modBreaks
= do showPass dflags "ByteCodeGen"
let flatBinds = [ (bndr, freeVars rhs)
| (bndr, rhs) <- flattenBinds binds]
- (BcM_State final_ctr mallocd, proto_bcos)
- <- runBc (mapM schemeTopBind flatBinds)
+ us <- mkSplitUniqSupply 'y'
+ (BcM_State _us _final_ctr mallocd _, proto_bcos)
+ <- runBc us modBreaks (mapM schemeTopBind flatBinds)
when (notNull mallocd)
(panic "ByteCodeGen.byteCodeGen: missing final emitBc?")
-- create a totally bogus name for the top-level BCO; this
-- should be harmless, since it's never used for anything
- let invented_name = mkSystemVarName (mkPseudoUniqueE 0) FSLIT("ExprTopLevel")
- invented_id = mkLocalId invented_name (panic "invented_id's type")
+ let invented_name = mkSystemVarName (mkPseudoUniqueE 0) (fsLit "ExprTopLevel")
+ invented_id = Id.mkLocalId invented_name (panic "invented_id's type")
- (BcM_State final_ctr mallocd, proto_bco)
- <- runBc (schemeTopBind (invented_id, freeVars expr))
+ -- the uniques are needed to generate fresh variables when we introduce new
+ -- let bindings for ticked expressions
+ us <- mkSplitUniqSupply 'y'
+ (BcM_State _us _final_ctr mallocd _ , proto_bco)
+ <- runBc us emptyModBreaks (schemeTopBind (invented_id, freeVars expr))
when (notNull mallocd)
(panic "ByteCodeGen.coreExprToBCOs: missing final emitBc?")
-- to mess with it after each push/pop.
type BCEnv = FiniteMap Id Int -- To find vars on the stack
+{-
ppBCEnv :: BCEnv -> SDoc
ppBCEnv p
= text "begin-env"
where
pp_one (var, offset) = int offset <> colon <+> ppr var <+> ppr (idCgRep var)
cmp_snd x y = compare (snd x) (snd y)
+-}
-- Create a BCO and do a spot of peephole optimisation on the insns
-- at the same time.
-> Int
-> [StgWord]
-> Bool -- True <=> is a return point, rather than a function
- -> [Ptr ()]
+ -> [BcPtr]
-> ProtoBCO name
-mkProtoBCO nm instrs_ordlist origin arity bitmap_size bitmap
- is_ret mallocd_blocks
+mkProtoBCO nm instrs_ordlist origin arity bitmap_size bitmap is_ret mallocd_blocks
= ProtoBCO {
protoBCOName = nm,
protoBCOInstrs = maybe_with_stack_check,
-- and if >= iNTERP_STACK_CHECK_THRESH, add an explicit
-- stack check. (The interpreter always does a stack check
-- for iNTERP_STACK_CHECK_THRESH words at the start of each
- -- BCO anyway, so we only need to add an explicit on in the
+ -- BCO anyway, so we only need to add an explicit one in the
-- (hopefully rare) cases when the (overestimated) stack use
-- exceeds iNTERP_STACK_CHECK_THRESH.
maybe_with_stack_check
- | is_ret = peep_d
- -- don't do stack checks at return points;
+ | is_ret && stack_usage < aP_STACK_SPLIM = peep_d
+ -- don't do stack checks at return points,
-- everything is aggregated up to the top BCO
- -- (which must be a function)
- | stack_overest >= 65535
- = pprPanic "mkProtoBCO: stack use won't fit in 16 bits"
- (int stack_overest)
- | stack_overest >= iNTERP_STACK_CHECK_THRESH
- = STKCHECK stack_overest : peep_d
+ -- (which must be a function).
+ -- That is, unless the stack usage is >= AP_STACK_SPLIM,
+ -- see bug #1466.
+ | stack_usage >= iNTERP_STACK_CHECK_THRESH
+ = STKCHECK stack_usage : peep_d
| otherwise
= peep_d -- the supposedly common case
- stack_overest = sum (map bciStackUse peep_d)
+ -- We assume that this sum doesn't wrap
+ stack_usage = sum (map bciStackUse peep_d)
-- Merge local pushes
peep_d = peep (fromOL instrs_ordlist)
schemeTopBind :: (Id, AnnExpr Id VarSet) -> BcM (ProtoBCO Name)
-schemeTopBind (id, rhs)
+schemeTopBind (id, rhs)
| Just data_con <- isDataConWorkId_maybe id,
- isNullaryRepDataCon data_con
- = -- Special case for the worker of a nullary data con.
+ isNullaryRepDataCon data_con = do
+ -- Special case for the worker of a nullary data con.
-- It'll look like this: Nil = /\a -> Nil a
-- If we feed it into schemeR, we'll get
-- Nil = Nil
-- because mkConAppCode treats nullary constructor applications
-- by just re-using the single top-level definition. So
-- for the worker itself, we must allocate it directly.
+ -- ioToBc (putStrLn $ "top level BCO")
emitBc (mkProtoBCO (getName id) (toOL [PACK data_con 0, ENTER])
- (Right rhs) 0 0 [{-no bitmap-}] False{-not alts-})
+ (Right rhs) 0 0 [{-no bitmap-}] False{-not alts-})
| otherwise
= schemeR [{- No free variables -}] (id, rhs)
+
-- -----------------------------------------------------------------------------
-- schemeR
-- top-level things, which have no free vars.
-> (Id, AnnExpr Id VarSet)
-> BcM (ProtoBCO Name)
-schemeR fvs (nm, rhs)
+schemeR fvs (nm, rhs)
{-
| trace (showSDoc (
(char ' '
= undefined
| otherwise
-}
- = schemeR_wrk fvs nm rhs (collect [] rhs)
+ = schemeR_wrk fvs nm rhs (collect rhs)
-collect xs (_, AnnNote note e) = collect xs e
-collect xs (_, AnnLam x e) = collect (if isTyVar x then xs else (x:xs)) e
-collect xs (_, not_lambda) = (reverse xs, not_lambda)
+collect :: AnnExpr Id VarSet -> ([Var], AnnExpr' Id VarSet)
+collect (_, e) = go [] e
+ where
+ go xs e | Just e' <- bcView e = go xs e'
+ go xs (AnnLam x (_,e)) = go (x:xs) e
+ go xs not_lambda = (reverse xs, not_lambda)
+schemeR_wrk :: [Id] -> Id -> AnnExpr Id VarSet -> ([Var], AnnExpr' Var VarSet) -> BcM (ProtoBCO Name)
schemeR_wrk fvs nm original_body (args, body)
= let
all_args = reverse args ++ fvs
bits = argBits (reverse (map idCgRep all_args))
bitmap_size = length bits
bitmap = mkBitmap bits
- in
- schemeE szw_args 0 p_init body `thenBc` \ body_code ->
+ in do
+ body_code <- schemeER_wrk szw_args p_init body
+
emitBc (mkProtoBCO (getName nm) body_code (Right original_body)
arity bitmap_size bitmap False{-not alts-})
+-- introduce break instructions for ticked expressions
+schemeER_wrk :: Int -> BCEnv -> AnnExpr' Id VarSet -> BcM BCInstrList
+schemeER_wrk d p rhs
+ | Just (tickInfo, (_annot, newRhs)) <- isTickedExp' rhs = do
+ code <- schemeE d 0 p newRhs
+ arr <- getBreakArray
+ let idOffSets = getVarOffSets d p tickInfo
+ let tickNumber = tickInfo_number tickInfo
+ let breakInfo = BreakInfo
+ { breakInfo_module = tickInfo_module tickInfo
+ , breakInfo_number = tickNumber
+ , breakInfo_vars = idOffSets
+ , breakInfo_resty = exprType (deAnnotate' newRhs)
+ }
+ let breakInstr = case arr of (BA arr#) -> BRK_FUN arr# tickNumber breakInfo
+ return $ breakInstr `consOL` code
+ | otherwise = schemeE d 0 p rhs
+
+getVarOffSets :: Int -> BCEnv -> TickInfo -> [(Id, Int)]
+getVarOffSets d p = catMaybes . map (getOffSet d p) . tickInfo_locals
+
+getOffSet :: Int -> BCEnv -> Id -> Maybe (Id, Int)
+getOffSet d env id
+ = case lookupBCEnv_maybe env id of
+ Nothing -> Nothing
+ Just offset -> Just (id, d - offset)
fvsToEnv :: BCEnv -> VarSet -> [Id]
-- Takes the free variables of a right-hand side, and
-- -----------------------------------------------------------------------------
-- schemeE
+data TickInfo
+ = TickInfo
+ { tickInfo_number :: Int -- the (module) unique number of the tick
+ , tickInfo_module :: Module -- the origin of the ticked expression
+ , tickInfo_locals :: [Id] -- the local vars in scope at the ticked expression
+ }
+
+instance Outputable TickInfo where
+ ppr info = text "TickInfo" <+>
+ parens (int (tickInfo_number info) <+> ppr (tickInfo_module info) <+>
+ ppr (tickInfo_locals info))
+
-- Compile code to apply the given expression to the remaining args
-- on the stack, returning a HNF.
schemeE :: Int -> Sequel -> BCEnv -> AnnExpr' Id VarSet -> BcM BCInstrList
+schemeE d s p e
+ | Just e' <- bcView e
+ = schemeE d s p e'
+
-- Delegate tail-calls to schemeT.
-schemeE d s p e@(AnnApp f a)
+schemeE d s p e@(AnnApp _ _)
= schemeT d s p e
schemeE d s p e@(AnnVar v)
schemeT d s p e
| otherwise
- = -- Returning an unlifted value.
- -- Heave it on the stack, SLIDE, and RETURN.
- pushAtom d p (AnnVar v) `thenBc` \ (push, szw) ->
- returnBc (push -- value onto stack
- `appOL` mkSLIDE szw (d-s) -- clear to sequel
- `snocOL` RETURN_UBX v_rep) -- go
+ = do -- Returning an unlifted value.
+ -- Heave it on the stack, SLIDE, and RETURN.
+ (push, szw) <- pushAtom d p (AnnVar v)
+ return (push -- value onto stack
+ `appOL` mkSLIDE szw (d-s) -- clear to sequel
+ `snocOL` RETURN_UBX v_rep) -- go
where
v_type = idType v
v_rep = typeCgRep v_type
schemeE d s p (AnnLit literal)
- = pushAtom d p (AnnLit literal) `thenBc` \ (push, szw) ->
- let l_rep = typeCgRep (literalType literal)
- in returnBc (push -- value onto stack
- `appOL` mkSLIDE szw (d-s) -- clear to sequel
- `snocOL` RETURN_UBX l_rep) -- go
-
+ = do (push, szw) <- pushAtom d p (AnnLit literal)
+ let l_rep = typeCgRep (literalType literal)
+ return (push -- value onto stack
+ `appOL` mkSLIDE szw (d-s) -- clear to sequel
+ `snocOL` RETURN_UBX l_rep) -- go
schemeE d s p (AnnLet (AnnNonRec x (_,rhs)) (_,body))
| (AnnVar v, args_r_to_l) <- splitApp rhs,
Just data_con <- isDataConWorkId_maybe v,
dataConRepArity data_con == length args_r_to_l
- = -- Special case for a non-recursive let whose RHS is a
+ = do -- Special case for a non-recursive let whose RHS is a
-- saturatred constructor application.
-- Just allocate the constructor and carry on
- mkConAppCode d s p data_con args_r_to_l `thenBc` \ alloc_code ->
- schemeE (d+1) s (addToFM p x d) body `thenBc` \ body_code ->
- returnBc (alloc_code `appOL` body_code)
+ alloc_code <- mkConAppCode d s p data_con args_r_to_l
+ body_code <- schemeE (d+1) s (addToFM p x d) body
+ return (alloc_code `appOL` body_code)
-- General case for let. Generates correct, if inefficient, code in
-- all situations.
sizes = map (\rhs_fvs -> sum (map idSizeW rhs_fvs)) fvss
-- the arity of each rhs
- arities = map (length . fst . collect []) rhss
+ arities = map (length . fst . collect) rhss
-- This p', d' defn is safe because all the items being pushed
-- are ptrs, so all have size 1. d' and p' reflect the stack
zipE = zipEqual "schemeE"
-- ToDo: don't build thunks for things with no free variables
- build_thunk dd [] size bco off arity
- = returnBc (PUSH_BCO bco `consOL` unitOL (mkap (off+size) size))
+ build_thunk _ [] size bco off arity
+ = return (PUSH_BCO bco `consOL` unitOL (mkap (off+size) size))
where
mkap | arity == 0 = MKAP
| otherwise = MKPAP
build_thunk dd (fv:fvs) size bco off arity = do
(push_code, pushed_szw) <- pushAtom dd p' (AnnVar fv)
more_push_code <- build_thunk (dd+pushed_szw) fvs size bco off arity
- returnBc (push_code `appOL` more_push_code)
+ return (push_code `appOL` more_push_code)
alloc_code = toOL (zipWith mkAlloc sizes arities)
- where mkAlloc sz 0 = ALLOC_AP sz
+ where mkAlloc sz 0
+ | is_tick = ALLOC_AP_NOUPD sz
+ | otherwise = ALLOC_AP sz
mkAlloc sz arity = ALLOC_PAP arity sz
+ is_tick = case binds of
+ AnnNonRec id _ -> occNameFS (getOccName id) == tickFS
+ _other -> False
+
compile_bind d' fvs x rhs size arity off = do
bco <- schemeR fvs (x,rhs)
build_thunk d' fvs size bco off arity
in do
body_code <- schemeE d' s p' body
thunk_codes <- sequence compile_binds
- returnBc (alloc_code `appOL` concatOL thunk_codes `appOL` body_code)
-
-
-
-schemeE d s p (AnnCase scrut bndr _ [(DataAlt dc, [bind1, bind2], rhs)])
+ return (alloc_code `appOL` concatOL thunk_codes `appOL` body_code)
+
+-- introduce a let binding for a ticked case expression. This rule
+-- *should* only fire when the expression was not already let-bound
+-- (the code gen for let bindings should take care of that). Todo: we
+-- call exprFreeVars on a deAnnotated expression, this may not be the
+-- best way to calculate the free vars but it seemed like the least
+-- intrusive thing to do
+schemeE d s p exp@(AnnCase {})
+ | Just (_tickInfo, _rhs) <- isTickedExp' exp
+ = if isUnLiftedType ty
+ then do
+ -- If the result type is unlifted, then we must generate
+ -- let f = \s . case tick# of _ -> e
+ -- in f realWorld#
+ -- When we stop at the breakpoint, _result will have an unlifted
+ -- type and hence won't be bound in the environment, but the
+ -- breakpoint will otherwise work fine.
+ id <- newId (mkFunTy realWorldStatePrimTy ty)
+ st <- newId realWorldStatePrimTy
+ let letExp = AnnLet (AnnNonRec id (fvs, AnnLam st (emptyVarSet, exp)))
+ (emptyVarSet, (AnnApp (emptyVarSet, AnnVar id)
+ (emptyVarSet, AnnVar realWorldPrimId)))
+ schemeE d s p letExp
+ else do
+ id <- newId ty
+ -- Todo: is emptyVarSet correct on the next line?
+ let letExp = AnnLet (AnnNonRec id (fvs, exp)) (emptyVarSet, AnnVar id)
+ schemeE d s p letExp
+ where exp' = deAnnotate' exp
+ fvs = exprFreeVars exp'
+ ty = exprType exp'
+
+schemeE d s p (AnnCase scrut _ _ [(DataAlt dc, [bind1, bind2], rhs)])
| isUnboxedTupleCon dc, VoidArg <- typeCgRep (idType bind1)
-- Convert
-- case .... of x { (# VoidArg'd-thing, a #) -> ... }
-- envt (it won't be bound now) because we never look such things up.
= --trace "automagic mashing of case alts (# VoidArg, a #)" $
- doCase d s p scrut bind2 [(DEFAULT, [], rhs)] True{-unboxed tuple-}
+ doCase d s p scrut bind2 [(DEFAULT, [], rhs)] True{-unboxed tuple-}
| isUnboxedTupleCon dc, VoidArg <- typeCgRep (idType bind2)
= --trace "automagic mashing of case alts (# a, VoidArg #)" $
- doCase d s p scrut bind1 [(DEFAULT, [], rhs)] True{-unboxed tuple-}
+ doCase d s p scrut bind1 [(DEFAULT, [], rhs)] True{-unboxed tuple-}
-schemeE d s p (AnnCase scrut bndr _ [(DataAlt dc, [bind1], rhs)])
+schemeE d s p (AnnCase scrut _ _ [(DataAlt dc, [bind1], rhs)])
| isUnboxedTupleCon dc
-- Similarly, convert
-- case .... of x { (# a #) -> ... }
-- to
-- case .... of a { DEFAULT -> ... }
= --trace "automagic mashing of case alts (# a #)" $
- doCase d s p scrut bind1 [(DEFAULT, [], rhs)] True{-unboxed tuple-}
+ doCase d s p scrut bind1 [(DEFAULT, [], rhs)] True{-unboxed tuple-}
schemeE d s p (AnnCase scrut bndr _ alts)
- = doCase d s p scrut bndr alts False{-not an unboxed tuple-}
+ = doCase d s p scrut bndr alts False{-not an unboxed tuple-}
+
+schemeE _ _ _ expr
+ = pprPanic "ByteCodeGen.schemeE: unhandled case"
+ (pprCoreExpr (deAnnotate' expr))
-schemeE d s p (AnnNote note (_, body))
- = schemeE d s p body
+{-
+ Ticked Expressions
+ ------------------
+
+ A ticked expression looks like this:
-schemeE d s p (AnnCast (_, body) _)
- = schemeE d s p body
+ case tick<n> var1 ... varN of DEFAULT -> e
-schemeE d s p other
- = pprPanic "ByteCodeGen.schemeE: unhandled case"
- (pprCoreExpr (deAnnotate' other))
+ (*) <n> is the number of the tick, which is unique within a module
+ (*) var1 ... varN are the local variables in scope at the tick site
+
+ If we find a ticked expression we return:
+
+ Just ((n, [var1 ... varN]), e)
+ otherwise we return Nothing.
+
+ The idea is that the "case tick<n> ..." is really just an annotation on
+ the code. When we find such a thing, we pull out the useful information,
+ and then compile the code as if it was just the expression "e".
+
+-}
+
+isTickedExp' :: AnnExpr' Id a -> Maybe (TickInfo, AnnExpr Id a)
+isTickedExp' (AnnCase scrut _bndr _type alts)
+ | Just tickInfo <- isTickedScrut scrut,
+ [(DEFAULT, _bndr, rhs)] <- alts
+ = Just (tickInfo, rhs)
+ where
+ isTickedScrut :: (AnnExpr Id a) -> Maybe TickInfo
+ isTickedScrut expr
+ | Var id <- f,
+ Just (TickBox modName tickNumber) <- isTickBoxOp_maybe id
+ = Just $ TickInfo { tickInfo_number = tickNumber
+ , tickInfo_module = modName
+ , tickInfo_locals = idsOfArgs args
+ }
+ | otherwise = Nothing
+ where
+ (f, args) = collectArgs $ deAnnotate expr
+ idsOfArgs :: [Expr Id] -> [Id]
+ idsOfArgs = catMaybes . map exprId
+ exprId :: Expr Id -> Maybe Id
+ exprId (Var id) = Just id
+ exprId _ = Nothing
+
+isTickedExp' _ = Nothing
-- Compile code to do a tail call. Specifically, push the fn,
-- slide the on-stack app back down to the sequel depth,
-- Case 0
| Just (arg, constr_names) <- maybe_is_tagToEnum_call
- = pushAtom d p arg `thenBc` \ (push, arg_words) ->
- implement_tagToId constr_names `thenBc` \ tagToId_sequence ->
- returnBc (push `appOL` tagToId_sequence
- `appOL` mkSLIDE 1 (d+arg_words-s)
- `snocOL` ENTER)
+ = do (push, arg_words) <- pushAtom d p arg
+ tagToId_sequence <- implement_tagToId constr_names
+ return (push `appOL` tagToId_sequence
+ `appOL` mkSLIDE 1 (d+arg_words-s)
+ `snocOL` ENTER)
-- Case 1
| Just (CCall ccall_spec) <- isFCallId_maybe fn
-- Case 3: Ordinary data constructor
| Just con <- maybe_saturated_dcon
- = mkConAppCode d s p con args_r_to_l `thenBc` \ alloc_con ->
- returnBc (alloc_con `appOL`
- mkSLIDE 1 (d - s) `snocOL`
- ENTER)
+ = do alloc_con <- mkConAppCode d s p con args_r_to_l
+ return (alloc_con `appOL`
+ mkSLIDE 1 (d - s) `snocOL`
+ ENTER)
-- Case 4: Tail call of function
| otherwise
(AnnApp (_, AnnApp (_, AnnVar v) (_, AnnType t)) arg)
-> case isPrimOpId_maybe v of
Just TagToEnumOp -> Just (snd arg, extract_constr_Names t)
- other -> Nothing
- other -> Nothing
+ _ -> Nothing
+ _ -> Nothing
-- Extract the args (R->L) and fn
-- The function will necessarily be a variable,
-> [AnnExpr' Id VarSet] -- Args, in *reverse* order
-> BcM BCInstrList
-mkConAppCode orig_d s p con [] -- Nullary constructor
+mkConAppCode _ _ _ con [] -- Nullary constructor
= ASSERT( isNullaryRepDataCon con )
- returnBc (unitOL (PUSH_G (getName (dataConWorkId con))))
+ return (unitOL (PUSH_G (getName (dataConWorkId con))))
-- Instead of doing a PACK, which would allocate a fresh
-- copy of this constructor, use the single shared version.
-mkConAppCode orig_d s p con args_r_to_l
+mkConAppCode orig_d _ p con args_r_to_l
= ASSERT( dataConRepArity con == length args_r_to_l )
do_pushery orig_d (non_ptr_args ++ ptr_args)
where
(ptr_args, non_ptr_args) = partition isPtrAtom args_r_to_l
do_pushery d (arg:args)
- = pushAtom d p arg `thenBc` \ (push, arg_words) ->
- do_pushery (d+arg_words) args `thenBc` \ more_push_code ->
- returnBc (push `appOL` more_push_code)
+ = do (push, arg_words) <- pushAtom d p arg
+ more_push_code <- do_pushery (d+arg_words) args
+ return (push `appOL` more_push_code)
do_pushery d []
- = returnBc (unitOL (PACK con n_arg_words))
+ = return (unitOL (PACK con n_arg_words))
where
n_arg_words = d - orig_d
-> AnnExpr' Id VarSet -> BcM BCInstrList
unboxedTupleReturn d s p arg = do
(push, sz) <- pushAtom d p arg
- returnBc (push `appOL`
+ return (push `appOL`
mkSLIDE sz (d-s) `snocOL`
RETURN_UBX (atomRep arg))
ASSERT( null reps ) return ()
(push_fn, sz) <- pushAtom d p (AnnVar fn)
ASSERT( sz == 1 ) return ()
- returnBc (push_fn `appOL` (
+ return (push_fn `appOL` (
mkSLIDE ((d-init_d) + 1) (init_d - s) `appOL`
unitOL ENTER))
do_pushes d args reps = do
(next_d, push_code) <- push_seq d these_args
instrs <- do_pushes (next_d + 1) rest_of_args rest_of_reps
-- ^^^ for the PUSH_APPLY_ instruction
- returnBc (push_code `appOL` (push_apply `consOL` instrs))
+ return (push_code `appOL` (push_apply `consOL` instrs))
push_seq d [] = return (d, nilOL)
push_seq d (arg:args) = do
return (final_d, push_code `appOL` more_push_code)
-- v. similar to CgStackery.findMatch, ToDo: merge
+findPushSeq :: [CgRep] -> (BCInstr, Int, [CgRep])
findPushSeq (PtrArg: PtrArg: PtrArg: PtrArg: PtrArg: PtrArg: rest)
= (PUSH_APPLY_PPPPPP, 6, rest)
findPushSeq (PtrArg: PtrArg: PtrArg: PtrArg: PtrArg: rest)
-> AnnExpr Id VarSet -> Id -> [AnnAlt Id VarSet]
-> Bool -- True <=> is an unboxed tuple case, don't enter the result
-> BcM BCInstrList
-doCase d s p (_,scrut)
- bndr alts is_unboxed_tuple
+doCase d s p (_,scrut) bndr alts is_unboxed_tuple
= let
-- Top of stack is the return itbl, as usual.
-- underneath it is the pointer to the alt_code BCO.
isAlgCase = not (isUnLiftedType bndr_ty) && not is_unboxed_tuple
-- given an alt, return a discr and code for it.
- codeALt alt@(DEFAULT, _, (_,rhs))
- = schemeE d_alts s p_alts rhs `thenBc` \ rhs_code ->
- returnBc (NoDiscr, rhs_code)
- codeAlt alt@(discr, bndrs, (_,rhs))
+ codeAlt (DEFAULT, _, (_,rhs))
+ = do rhs_code <- schemeE d_alts s p_alts rhs
+ return (NoDiscr, rhs_code)
+
+ codeAlt alt@(_, bndrs, (_,rhs))
-- primitive or nullary constructor alt: no need to UNPACK
| null real_bndrs = do
rhs_code <- schemeE d_alts s p_alts rhs
- returnBc (my_discr alt, rhs_code)
+ return (my_discr alt, rhs_code)
-- algebraic alt with some binders
- | ASSERT(isAlgCase) otherwise =
+ | otherwise =
let
(ptrs,nptrs) = partition (isFollowableArg.idCgRep) real_bndrs
ptr_sizes = map idSizeW ptrs
(zip (reverse (ptrs ++ nptrs))
(mkStackOffsets d_alts (reverse bind_sizes)))
in do
+ MASSERT(isAlgCase)
rhs_code <- schemeE (d_alts+size) s p' rhs
return (my_discr alt, unitOL (UNPACK size) `appOL` rhs_code)
where
real_bndrs = filter (not.isTyVar) bndrs
-
- my_discr (DEFAULT, binds, rhs) = NoDiscr {-shouldn't really happen-}
- my_discr (DataAlt dc, binds, rhs)
+ my_discr (DEFAULT, _, _) = NoDiscr {-shouldn't really happen-}
+ my_discr (DataAlt dc, _, _)
| isUnboxedTupleCon dc
= unboxedTupleException
| otherwise
= DiscrP (dataConTag dc - fIRST_TAG)
- my_discr (LitAlt l, binds, rhs)
+ my_discr (LitAlt l, _, _)
= case l of MachInt i -> DiscrI (fromInteger i)
MachFloat r -> DiscrF (fromRational r)
MachDouble r -> DiscrD (fromRational r)
-- things that are pointers, whereas in CgBindery the code builds the
-- bitmap from the free slots and unboxed bindings.
-- (ToDo: merge?)
- bitmap = intsToReverseBitmap d{-size-} (sortLe (<=) rel_slots)
+ --
+ -- NOTE [7/12/2006] bug #1013, testcase ghci/should_run/ghci002.
+ -- The bitmap must cover the portion of the stack up to the sequel only.
+ -- Previously we were building a bitmap for the whole depth (d), but we
+ -- really want a bitmap up to depth (d-s). This affects compilation of
+ -- case-of-case expressions, which is the only time we can be compiling a
+ -- case expression with s /= 0.
+ bitmap_size = d-s
+ bitmap = intsToReverseBitmap bitmap_size{-size-}
+ (sortLe (<=) (filter (< bitmap_size) rel_slots))
where
binds = fmToList p
rel_slots = concat (map spread binds)
in do
alt_stuff <- mapM codeAlt alts
alt_final <- mkMultiBranch maybe_ncons alt_stuff
+
let
alt_bco_name = getName bndr
alt_bco = mkProtoBCO alt_bco_name alt_final (Left alts)
- 0{-no arity-} d{-bitmap size-} bitmap True{-is alts-}
+ 0{-no arity-} bitmap_size bitmap True{-is alts-}
-- in
-- trace ("case: bndr = " ++ showSDocDebug (ppr bndr) ++ "\ndepth = " ++ show d ++ "\nenv = \n" ++ showSDocDebug (ppBCEnv p) ++
-- "\n bitmap = " ++ show bitmap) $ do
let push_alts
| isAlgCase = PUSH_ALTS alt_bco'
| otherwise = PUSH_ALTS_UNLIFTED alt_bco' (typeCgRep bndr_ty)
- returnBc (push_alts `consOL` scrut_code)
+ return (push_alts `consOL` scrut_code)
-- -----------------------------------------------------------------------------
-> [AnnExpr' Id VarSet] -- args (atoms)
-> BcM BCInstrList
-generateCCall d0 s p ccall_spec@(CCallSpec target cconv safety) fn args_r_to_l
+generateCCall d0 s p (CCallSpec target cconv _) fn args_r_to_l
= let
-- useful constants
addr_sizeW = cgRepSizeW NonPtrArg
-- depth to the first word of the bits for that arg, and the
-- CgRep of what was actually pushed.
- pargs d [] = returnBc []
+ pargs _ [] = return []
pargs d (a:az)
= let arg_ty = repType (exprType (deAnnotate' a))
-- contains.
Just (t, _)
| t == arrayPrimTyCon || t == mutableArrayPrimTyCon
- -> pargs (d + addr_sizeW) az `thenBc` \ rest ->
- parg_ArrayishRep arrPtrsHdrSize d p a
- `thenBc` \ code ->
- returnBc ((code,NonPtrArg):rest)
+ -> do rest <- pargs (d + addr_sizeW) az
+ code <- parg_ArrayishRep arrPtrsHdrSize d p a
+ return ((code,AddrRep):rest)
| t == byteArrayPrimTyCon || t == mutableByteArrayPrimTyCon
- -> pargs (d + addr_sizeW) az `thenBc` \ rest ->
- parg_ArrayishRep arrWordsHdrSize d p a
- `thenBc` \ code ->
- returnBc ((code,NonPtrArg):rest)
+ -> do rest <- pargs (d + addr_sizeW) az
+ code <- parg_ArrayishRep arrWordsHdrSize d p a
+ return ((code,AddrRep):rest)
-- Default case: push taggedly, but otherwise intact.
- other
- -> pushAtom d p a `thenBc` \ (code_a, sz_a) ->
- pargs (d+sz_a) az `thenBc` \ rest ->
- returnBc ((code_a, atomRep a) : rest)
+ _
+ -> do (code_a, sz_a) <- pushAtom d p a
+ rest <- pargs (d+sz_a) az
+ return ((code_a, atomPrimRep a) : rest)
-- Do magic for Ptr/Byte arrays. Push a ptr to the array on
-- the stack but then advance it over the headers, so as to
-- point to the payload.
parg_ArrayishRep hdrSize d p a
- = pushAtom d p a `thenBc` \ (push_fo, _) ->
- -- The ptr points at the header. Advance it over the
- -- header and then pretend this is an Addr#.
- returnBc (push_fo `snocOL` SWIZZLE 0 hdrSize)
+ = do (push_fo, _) <- pushAtom d p a
+ -- The ptr points at the header. Advance it over the
+ -- header and then pretend this is an Addr#.
+ return (push_fo `snocOL` SWIZZLE 0 hdrSize)
- in
- pargs d0 args_r_to_l `thenBc` \ code_n_reps ->
+ in do
+ code_n_reps <- pargs d0 args_r_to_l
let
(pushs_arg, a_reps_pushed_r_to_l) = unzip code_n_reps
+ a_reps_sizeW = sum (map primRepSizeW a_reps_pushed_r_to_l)
push_args = concatOL pushs_arg
- d_after_args = d0 + sum (map cgRepSizeW a_reps_pushed_r_to_l)
+ d_after_args = d0 + a_reps_sizeW
a_reps_pushed_RAW
- | null a_reps_pushed_r_to_l || head a_reps_pushed_r_to_l /= VoidArg
+ | null a_reps_pushed_r_to_l || head a_reps_pushed_r_to_l /= VoidRep
= panic "ByteCodeGen.generateCCall: missing or invalid World token?"
| otherwise
= reverse (tail a_reps_pushed_r_to_l)
-- Get the result rep.
(returns_void, r_rep)
= case maybe_getCCallReturnRep (idType fn) of
- Nothing -> (True, VoidArg)
+ Nothing -> (True, VoidRep)
Just rr -> (False, rr)
{-
Because the Haskell stack grows down, the a_reps refer to
get_target_info
= case target of
DynamicTarget
- -> returnBc (False, panic "ByteCodeGen.generateCCall(dyn)")
+ -> return (False, panic "ByteCodeGen.generateCCall(dyn)")
StaticTarget target
- -> ioToBc (lookupStaticPtr target) `thenBc` \res ->
- returnBc (True, res)
- in
- get_target_info `thenBc` \ (is_static, static_target_addr) ->
+ -> do res <- ioToBc (lookupStaticPtr stdcall_adj_target)
+ return (True, res)
+ where
+ stdcall_adj_target
+#ifdef mingw32_TARGET_OS
+ | StdCallConv <- cconv
+ = let size = a_reps_sizeW * wORD_SIZE in
+ mkFastString (unpackFS target ++ '@':show size)
+#endif
+ | otherwise
+ = target
+
+ -- in
+ (is_static, static_target_addr) <- get_target_info
let
-- Get the arg reps, zapping the leading Addr# in the dynamic case
-- Push the return placeholder. For a call returning nothing,
-- this is a VoidArg (tag).
- r_sizeW = cgRepSizeW r_rep
+ r_sizeW = primRepSizeW r_rep
d_after_r = d_after_Addr + r_sizeW
r_lit = mkDummyLiteral r_rep
push_r = (if returns_void
else unitOL (PUSH_UBX (Left r_lit) r_sizeW))
-- generate the marshalling code we're going to call
- r_offW = 0
- addr_offW = r_sizeW
- arg1_offW = r_sizeW + addr_sizeW
- args_offW = map (arg1_offW +)
- (init (scanl (+) 0 (map cgRepSizeW a_reps)))
- in
- ioToBc (mkMarshalCode cconv
- (r_offW, r_rep) addr_offW
- (zip args_offW a_reps)) `thenBc` \ addr_of_marshaller ->
- recordMallocBc addr_of_marshaller `thenBc_`
- let
+
-- Offset of the next stack frame down the stack. The CCALL
-- instruction needs to describe the chunk of stack containing
-- the ccall args to the GC, so it needs to know how large it
-- is. See comment in Interpreter.c with the CCALL instruction.
stk_offset = d_after_r - s
+ -- in
+ -- the only difference in libffi mode is that we prepare a cif
+ -- describing the call type by calling libffi, and we attach the
+ -- address of this to the CCALL instruction.
+ token <- ioToBc $ prepForeignCall cconv a_reps r_rep
+ let addr_of_marshaller = castPtrToFunPtr token
+
+ recordItblMallocBc (ItblPtr (castFunPtrToPtr addr_of_marshaller))
+ let
-- do the call
- do_call = unitOL (CCALL stk_offset (castPtr addr_of_marshaller))
+ do_call = unitOL (CCALL stk_offset (castFunPtrToPtr addr_of_marshaller))
-- slide and return
wrapup = mkSLIDE r_sizeW (d_after_r - r_sizeW - s)
- `snocOL` RETURN_UBX r_rep
- in
+ `snocOL` RETURN_UBX (primRepToCgRep r_rep)
+ --in
--trace (show (arg1_offW, args_offW , (map cgRepSizeW a_reps) )) $
- returnBc (
+ return (
push_args `appOL`
push_Addr `appOL` push_r `appOL` do_call `appOL` wrapup
)
-
-- Make a dummy literal, to be used as a placeholder for FFI return
-- values on the stack.
-mkDummyLiteral :: CgRep -> Literal
+mkDummyLiteral :: PrimRep -> Literal
mkDummyLiteral pr
= case pr of
- NonPtrArg -> MachWord 0
- DoubleArg -> MachDouble 0
- FloatArg -> MachFloat 0
- _ -> moan64 "mkDummyLiteral" (ppr pr)
+ IntRep -> MachInt 0
+ WordRep -> MachWord 0
+ AddrRep -> MachNullAddr
+ DoubleRep -> MachDouble 0
+ FloatRep -> MachFloat 0
+ Int64Rep -> MachInt64 0
+ Word64Rep -> MachWord64 0
+ _ -> panic "mkDummyLiteral"
-- Convert (eg)
--
-- to Nothing
-maybe_getCCallReturnRep :: Type -> Maybe CgRep
+maybe_getCCallReturnRep :: Type -> Maybe PrimRep
maybe_getCCallReturnRep fn_ty
- = let (a_tys, r_ty) = splitFunTys (dropForAlls fn_ty)
+ = let (_a_tys, r_ty) = splitFunTys (dropForAlls fn_ty)
maybe_r_rep_to_go
= if isSingleton r_reps then Nothing else Just (r_reps !! 1)
(r_tycon, r_reps)
= case splitTyConApp_maybe (repType r_ty) of
- (Just (tyc, tys)) -> (tyc, map typeCgRep tys)
+ (Just (tyc, tys)) -> (tyc, map typePrimRep tys)
Nothing -> blargh
- ok = ( ( r_reps `lengthIs` 2 && VoidArg == head r_reps)
- || r_reps == [VoidArg] )
+ ok = ( ( r_reps `lengthIs` 2 && VoidRep == head r_reps)
+ || r_reps == [VoidRep] )
&& isUnboxedTupleTyCon r_tycon
&& case maybe_r_rep_to_go of
Nothing -> True
- Just r_rep -> r_rep /= PtrArg
+ Just r_rep -> r_rep /= PtrRep
-- if it was, it would be impossible
-- to create a valid return value
-- placeholder on the stack
implement_tagToId :: [Name] -> BcM BCInstrList
implement_tagToId names
= ASSERT( notNull names )
- getLabelsBc (length names) `thenBc` \ labels ->
- getLabelBc `thenBc` \ label_fail ->
- getLabelBc `thenBc` \ label_exit ->
- zip4 labels (tail labels ++ [label_fail])
- [0 ..] names `bind` \ infos ->
- map (mkStep label_exit) infos `bind` \ steps ->
- returnBc (concatOL steps
- `appOL`
- toOL [LABEL label_fail, CASEFAIL, LABEL label_exit])
+ do labels <- getLabelsBc (length names)
+ label_fail <- getLabelBc
+ label_exit <- getLabelBc
+ let infos = zip4 labels (tail labels ++ [label_fail])
+ [0 ..] names
+ steps = map (mkStep label_exit) infos
+ return (concatOL steps
+ `appOL`
+ toOL [LABEL label_fail, CASEFAIL, LABEL label_exit])
where
mkStep l_exit (my_label, next_label, n, name_for_n)
= toOL [LABEL my_label,
pushAtom :: Int -> BCEnv -> AnnExpr' Id VarSet -> BcM (BCInstrList, Int)
-pushAtom d p (AnnApp f (_, AnnType _))
- = pushAtom d p (snd f)
-
-pushAtom d p (AnnNote note e)
- = pushAtom d p (snd e)
-
-pushAtom d p (AnnLam x e)
- | isTyVar x
- = pushAtom d p (snd e)
+pushAtom d p e
+ | Just e' <- bcView e
+ = pushAtom d p e'
pushAtom d p (AnnVar v)
-
| idCgRep v == VoidArg
- = returnBc (nilOL, 0)
+ = return (nilOL, 0)
| isFCallId v
= pprPanic "pushAtom: shouldn't get an FCallId here" (ppr v)
| Just primop <- isPrimOpId_maybe v
- = returnBc (unitOL (PUSH_PRIMOP primop), 1)
+ = return (unitOL (PUSH_PRIMOP primop), 1)
| Just d_v <- lookupBCEnv_maybe p v -- v is a local variable
- = returnBc (toOL (nOfThem sz (PUSH_L (d-d_v+sz-2))), sz)
+ = return (toOL (nOfThem sz (PUSH_L (d-d_v+sz-2))), sz)
-- d - d_v the number of words between the TOS
-- and the 1st slot of the object
--
| otherwise -- v must be a global variable
= ASSERT(sz == 1)
- returnBc (unitOL (PUSH_G (getName v)), sz)
+ return (unitOL (PUSH_G (getName v)), sz)
where
sz = idSizeW v
-pushAtom d p (AnnLit lit)
+pushAtom _ _ (AnnLit lit)
= case lit of
- MachLabel fs _ -> code NonPtrArg
- MachWord w -> code NonPtrArg
- MachInt i -> code PtrArg
- MachFloat r -> code FloatArg
- MachDouble r -> code DoubleArg
- MachChar c -> code NonPtrArg
- MachStr s -> pushStr s
+ MachLabel _ _ _ -> code NonPtrArg
+ MachWord _ -> code NonPtrArg
+ MachInt _ -> code PtrArg
+ MachFloat _ -> code FloatArg
+ MachDouble _ -> code DoubleArg
+ MachChar _ -> code NonPtrArg
+ MachNullAddr -> code NonPtrArg
+ MachStr s -> pushStr s
+ l -> pprPanic "pushAtom" (ppr l)
where
code rep
= let size_host_words = cgRepSizeW rep
- in returnBc (unitOL (PUSH_UBX (Left lit) size_host_words),
+ in return (unitOL (PUSH_UBX (Left lit) size_host_words),
size_host_words)
pushStr s
-- by virtue of the global FastString table, but
-- to be on the safe side we copy the string into
-- a malloc'd area of memory.
- ioToBc (mallocBytes (n+1)) `thenBc` \ ptr ->
- recordMallocBc ptr `thenBc_`
- ioToBc (
- withForeignPtr fp $ \p -> do
- memcpy ptr p (fromIntegral n)
- pokeByteOff ptr n (fromIntegral (ord '\0') :: Word8)
- return ptr
- )
- in
- getMallocvilleAddr `thenBc` \ addr ->
+ do ptr <- ioToBc (mallocBytes (n+1))
+ recordMallocBc ptr
+ ioToBc (
+ withForeignPtr fp $ \p -> do
+ memcpy ptr p (fromIntegral n)
+ pokeByteOff ptr n (fromIntegral (ord '\0') :: Word8)
+ return ptr
+ )
+ in do
+ addr <- getMallocvilleAddr
-- Get the addr on the stack, untaggedly
- returnBc (unitOL (PUSH_UBX (Right addr) 1), 1)
+ return (unitOL (PUSH_UBX (Right addr) 1), 1)
+
+pushAtom d p (AnnCast e _)
+ = pushAtom d p (snd e)
-pushAtom d p other
+pushAtom _ _ expr
= pprPanic "ByteCodeGen.pushAtom"
- (pprCoreExpr (deAnnotate (undefined, other)))
+ (pprCoreExpr (deAnnotate (undefined, expr)))
foreign import ccall unsafe "memcpy"
- memcpy :: Ptr a -> Ptr b -> CInt -> IO ()
+ memcpy :: Ptr a -> Ptr b -> CSize -> IO ()
-- -----------------------------------------------------------------------------
(filter (not.isNoDiscr.fst) raw_ways)
mkTree :: [(Discr, BCInstrList)] -> Discr -> Discr -> BcM BCInstrList
- mkTree [] range_lo range_hi = returnBc the_default
+ mkTree [] _range_lo _range_hi = return the_default
mkTree [val] range_lo range_hi
| range_lo `eqAlt` range_hi
- = returnBc (snd val)
+ = return (snd val)
| otherwise
- = getLabelBc `thenBc` \ label_neq ->
- returnBc (mkTestEQ (fst val) label_neq
- `consOL` (snd val
- `appOL` unitOL (LABEL label_neq)
- `appOL` the_default))
+ = do label_neq <- getLabelBc
+ return (mkTestEQ (fst val) label_neq
+ `consOL` (snd val
+ `appOL` unitOL (LABEL label_neq)
+ `appOL` the_default))
mkTree vals range_lo range_hi
= let n = length vals `div` 2
vals_lo = take n vals
vals_hi = drop n vals
v_mid = fst (head vals_hi)
- in
- getLabelBc `thenBc` \ label_geq ->
- mkTree vals_lo range_lo (dec v_mid) `thenBc` \ code_lo ->
- mkTree vals_hi v_mid range_hi `thenBc` \ code_hi ->
- returnBc (mkTestLT v_mid label_geq
+ in do
+ label_geq <- getLabelBc
+ code_lo <- mkTree vals_lo range_lo (dec v_mid)
+ code_hi <- mkTree vals_hi v_mid range_hi
+ return (mkTestLT v_mid label_geq
`consOL` (code_lo
`appOL` unitOL (LABEL label_geq)
`appOL` code_hi))
the_default
= case d_way of [] -> unitOL CASEFAIL
[(_, def)] -> def
+ _ -> panic "mkMultiBranch/the_default"
-- None of these will be needed if there are no non-default alts
(mkTestLT, mkTestEQ, init_lo, init_hi)
DiscrP _ -> ( \(DiscrP i) fail_label -> TESTLT_P i fail_label,
\(DiscrP i) fail_label -> TESTEQ_P i fail_label,
DiscrP algMinBound,
- DiscrP algMaxBound )
+ DiscrP algMaxBound );
+ NoDiscr -> panic "mkMultiBranch NoDiscr"
}
(algMinBound, algMaxBound)
idSizeW :: Id -> Int
idSizeW id = cgRepSizeW (typeCgRep (idType id))
+-- See bug #1257
unboxedTupleException :: a
unboxedTupleException
- = throwDyn
- (Panic
- ("Bytecode generator can't handle unboxed tuples. Possibly due\n" ++
- "\tto foreign import/export decls in source. Workaround:\n" ++
- "\tcompile this module to a .o file, then restart session."))
+ = ghcError
+ (ProgramError
+ ("Error: bytecode compiler can't handle unboxed tuples.\n"++
+ " Possibly due to foreign import/export decls in source.\n"++
+ " Workaround: use -fobject-code, or compile this module to .o separately."))
+mkSLIDE :: Int -> Int -> OrdList BCInstr
mkSLIDE n d = if d == 0 then nilOL else unitOL (SLIDE n d)
-bind x f = f x
-splitApp :: AnnExpr' id ann -> (AnnExpr' id ann, [AnnExpr' id ann])
+splitApp :: AnnExpr' Var ann -> (AnnExpr' Var ann, [AnnExpr' Var ann])
-- The arguments are returned in *right-to-left* order
-splitApp (AnnApp (_,f) (_,a))
- | isTypeAtom a = splitApp f
- | otherwise = case splitApp f of
- (f', as) -> (f', a:as)
-splitApp (AnnNote n (_,e)) = splitApp e
-splitApp e = (e, [])
-
-
-isTypeAtom :: AnnExpr' id ann -> Bool
-isTypeAtom (AnnType _) = True
-isTypeAtom _ = False
-
-isVoidArgAtom :: AnnExpr' id ann -> Bool
-isVoidArgAtom (AnnVar v) = typeCgRep (idType v) == VoidArg
-isVoidArgAtom (AnnNote n (_,e)) = isVoidArgAtom e
-isVoidArgAtom _ = False
+splitApp e | Just e' <- bcView e = splitApp e'
+splitApp (AnnApp (_,f) (_,a)) = case splitApp f of
+ (f', as) -> (f', a:as)
+splitApp e = (e, [])
+
+
+bcView :: AnnExpr' Var ann -> Maybe (AnnExpr' Var ann)
+-- The "bytecode view" of a term discards
+-- a) type abstractions
+-- b) type applications
+-- c) casts
+-- d) notes
+-- Type lambdas *can* occur in random expressions,
+-- whereas value lambdas cannot; that is why they are nuked here
+bcView (AnnNote _ (_,e)) = Just e
+bcView (AnnCast (_,e) _) = Just e
+bcView (AnnLam v (_,e)) | isTyVar v = Just e
+bcView (AnnApp (_,e) (_, AnnType _)) = Just e
+bcView _ = Nothing
+
+isVoidArgAtom :: AnnExpr' Var ann -> Bool
+isVoidArgAtom e | Just e' <- bcView e = isVoidArgAtom e'
+isVoidArgAtom (AnnVar v) = typePrimRep (idType v) == VoidRep
+isVoidArgAtom _ = False
+
+atomPrimRep :: AnnExpr' Id ann -> PrimRep
+atomPrimRep e | Just e' <- bcView e = atomPrimRep e'
+atomPrimRep (AnnVar v) = typePrimRep (idType v)
+atomPrimRep (AnnLit l) = typePrimRep (literalType l)
+atomPrimRep other = pprPanic "atomPrimRep" (ppr (deAnnotate (undefined,other)))
atomRep :: AnnExpr' Id ann -> CgRep
-atomRep (AnnVar v) = typeCgRep (idType v)
-atomRep (AnnLit l) = typeCgRep (literalType l)
-atomRep (AnnNote n b) = atomRep (snd b)
-atomRep (AnnApp f (_, AnnType _)) = atomRep (snd f)
-atomRep (AnnLam x e) | isTyVar x = atomRep (snd e)
-atomRep other = pprPanic "atomRep" (ppr (deAnnotate (undefined,other)))
+atomRep e = primRepToCgRep (atomPrimRep e)
isPtrAtom :: AnnExpr' Id ann -> Bool
isPtrAtom e = atomRep e == PtrArg
-- -----------------------------------------------------------------------------
-- The bytecode generator's monad
+type BcPtr = Either ItblPtr (Ptr ())
+
data BcM_State
= BcM_State {
+ uniqSupply :: UniqSupply, -- for generating fresh variable names
nextlabel :: Int, -- for generating local labels
- malloced :: [Ptr ()] } -- ptrs malloced for current BCO
+ malloced :: [BcPtr], -- thunks malloced for current BCO
-- Should be free()d when it is GCd
+ breakArray :: BreakArray -- array of breakpoint flags
+ }
newtype BcM r = BcM (BcM_State -> IO (BcM_State, r))
x <- io
return (st, x)
-runBc :: BcM r -> IO (BcM_State, r)
-runBc (BcM m) = m (BcM_State 0 [])
+runBc :: UniqSupply -> ModBreaks -> BcM r -> IO (BcM_State, r)
+runBc us modBreaks (BcM m)
+ = m (BcM_State us 0 [] breakArray)
+ where
+ breakArray = modBreaks_flags modBreaks
thenBc :: BcM a -> (a -> BcM b) -> BcM b
thenBc (BcM expr) cont = BcM $ \st0 -> do
thenBc_ :: BcM a -> BcM b -> BcM b
thenBc_ (BcM expr) (BcM cont) = BcM $ \st0 -> do
- (st1, q) <- expr st0
+ (st1, _) <- expr st0
(st2, r) <- cont st1
return (st2, r)
(>>) = thenBc_
return = returnBc
-emitBc :: ([Ptr ()] -> ProtoBCO Name) -> BcM (ProtoBCO Name)
+emitBc :: ([BcPtr] -> ProtoBCO Name) -> BcM (ProtoBCO Name)
emitBc bco
= BcM $ \st -> return (st{malloced=[]}, bco (malloced st))
recordMallocBc :: Ptr a -> BcM ()
recordMallocBc a
- = BcM $ \st -> return (st{malloced = castPtr a : malloced st}, ())
+ = BcM $ \st -> return (st{malloced = Right (castPtr a) : malloced st}, ())
+
+recordItblMallocBc :: ItblPtr -> BcM ()
+recordItblMallocBc a
+ = BcM $ \st -> return (st{malloced = Left a : malloced st}, ())
getLabelBc :: BcM Int
getLabelBc
getLabelsBc n
= BcM $ \st -> let ctr = nextlabel st
in return (st{nextlabel = ctr+n}, [ctr .. ctr+n-1])
+
+getBreakArray :: BcM BreakArray
+getBreakArray = BcM $ \st -> return (st, breakArray st)
+
+newUnique :: BcM Unique
+newUnique = BcM $
+ \st -> case splitUniqSupply (uniqSupply st) of
+ (us1, us2) -> let newState = st { uniqSupply = us2 }
+ in return (newState, uniqFromSupply us1)
+
+newId :: Type -> BcM Id
+newId ty = do
+ uniq <- newUnique
+ return $ mkSysLocal tickFS uniq ty
+
+tickFS :: FastString
+tickFS = fsLit "ticked"
\end{code}