+-- -----------------------------------------------------------------------------
+-- Returning an unboxed tuple with one non-void component (the only
+-- case we can handle).
+--
+-- Remember, we don't want to *evaluate* the component that is being
+-- returned, even if it is a pointed type. We always just return.
+
+unboxedTupleReturn
+ :: Int -> Sequel -> BCEnv
+ -> AnnExpr' Id VarSet -> BcM BCInstrList
+unboxedTupleReturn d s p arg = do
+ (push, sz) <- pushAtom d p arg
+ returnBc (push `appOL`
+ mkSLIDE sz (d-s) `snocOL`
+ RETURN_UBX (atomRep arg))
+
+-- -----------------------------------------------------------------------------
+-- Generate code for a tail-call
+
+doTailCall
+ :: Int -> Sequel -> BCEnv
+ -> Id -> [AnnExpr' Id VarSet]
+ -> BcM BCInstrList
+doTailCall init_d s p fn args
+ = do_pushes init_d args (map atomRep args)
+ where
+ do_pushes d [] reps = do
+ ASSERTM( null reps )
+ (push_fn, sz) <- pushAtom d p (AnnVar fn)
+ ASSERTM( sz == 1 )
+ returnBc (push_fn `appOL` (
+ mkSLIDE ((d-init_d) + 1) (init_d - s) `appOL`
+ unitOL ENTER))
+ do_pushes d args reps = do
+ let (push_apply, n, rest_of_reps) = findPushSeq reps
+ (these_args, rest_of_args) = splitAt n args
+ (next_d, push_code) <- push_seq d these_args
+ instrs <- do_pushes (next_d + 1) rest_of_args rest_of_reps
+ -- ^^^ for the PUSH_APPLY_ instruction
+ returnBc (push_code `appOL` (push_apply `consOL` instrs))
+
+ push_seq d [] = return (d, nilOL)
+ push_seq d (arg:args) = do
+ (push_code, sz) <- pushAtom d p arg
+ (final_d, more_push_code) <- push_seq (d+sz) args
+ return (final_d, push_code `appOL` more_push_code)
+
+-- v. similar to CgStackery.findMatch, ToDo: merge
+findPushSeq (PtrArg: PtrArg: PtrArg: PtrArg: PtrArg: PtrArg: rest)
+ = (PUSH_APPLY_PPPPPP, 6, rest)
+findPushSeq (PtrArg: PtrArg: PtrArg: PtrArg: PtrArg: rest)
+ = (PUSH_APPLY_PPPPP, 5, rest)
+findPushSeq (PtrArg: PtrArg: PtrArg: PtrArg: rest)
+ = (PUSH_APPLY_PPPP, 4, rest)
+findPushSeq (PtrArg: PtrArg: PtrArg: rest)
+ = (PUSH_APPLY_PPP, 3, rest)
+findPushSeq (PtrArg: PtrArg: rest)
+ = (PUSH_APPLY_PP, 2, rest)
+findPushSeq (PtrArg: rest)
+ = (PUSH_APPLY_P, 1, rest)
+findPushSeq (VoidArg: rest)
+ = (PUSH_APPLY_V, 1, rest)
+findPushSeq (NonPtrArg: rest)
+ = (PUSH_APPLY_N, 1, rest)
+findPushSeq (FloatArg: rest)
+ = (PUSH_APPLY_F, 1, rest)
+findPushSeq (DoubleArg: rest)
+ = (PUSH_APPLY_D, 1, rest)
+findPushSeq (LongArg: rest)
+ = (PUSH_APPLY_L, 1, rest)
+findPushSeq _
+ = panic "ByteCodeGen.findPushSeq"
+
+-- -----------------------------------------------------------------------------
+-- Case expressions
+
+doCase :: Int -> Sequel -> BCEnv
+ -> AnnExpr Id VarSet -> Id -> [AnnAlt Id VarSet]
+ -> Bool -- True <=> is an unboxed tuple case, don't enter the result
+ -> BcM BCInstrList
+doCase d s p (_,scrut)
+ bndr alts is_unboxed_tuple
+ = let
+ -- Top of stack is the return itbl, as usual.
+ -- underneath it is the pointer to the alt_code BCO.
+ -- When an alt is entered, it assumes the returned value is
+ -- on top of the itbl.
+ ret_frame_sizeW = 2
+
+ -- An unlifted value gets an extra info table pushed on top
+ -- when it is returned.
+ unlifted_itbl_sizeW | isAlgCase = 0
+ | otherwise = 1
+
+ -- depth of stack after the return value has been pushed
+ d_bndr = d + ret_frame_sizeW + idSizeW bndr
+
+ -- depth of stack after the extra info table for an unboxed return
+ -- has been pushed, if any. This is the stack depth at the
+ -- continuation.
+ d_alts = d_bndr + unlifted_itbl_sizeW
+
+ -- Env in which to compile the alts, not including
+ -- any vars bound by the alts themselves
+ p_alts = addToFM p bndr (d_bndr - 1)
+
+ bndr_ty = idType bndr
+ isAlgCase = not (isUnLiftedType bndr_ty) && not is_unboxed_tuple
+
+ -- given an alt, return a discr and code for it.
+ codeALt alt@(DEFAULT, _, (_,rhs))
+ = schemeE d_alts s p_alts rhs `thenBc` \ rhs_code ->
+ returnBc (NoDiscr, rhs_code)
+ codeAlt alt@(discr, bndrs, (_,rhs))
+ -- primitive or nullary constructor alt: no need to UNPACK
+ | null real_bndrs = do
+ rhs_code <- schemeE d_alts s p_alts rhs
+ returnBc (my_discr alt, rhs_code)
+ -- algebraic alt with some binders
+ | ASSERT(isAlgCase) otherwise =
+ let
+ (ptrs,nptrs) = partition (isFollowableArg.idCgRep) real_bndrs
+ ptr_sizes = map idSizeW ptrs
+ nptrs_sizes = map idSizeW nptrs
+ bind_sizes = ptr_sizes ++ nptrs_sizes
+ size = sum ptr_sizes + sum nptrs_sizes
+ -- the UNPACK instruction unpacks in reverse order...
+ p' = addListToFM p_alts
+ (zip (reverse (ptrs ++ nptrs))
+ (mkStackOffsets d_alts (reverse bind_sizes)))
+ in do
+ rhs_code <- schemeE (d_alts+size) s p' rhs
+ return (my_discr alt, unitOL (UNPACK size) `appOL` rhs_code)
+ where
+ real_bndrs = filter (not.isTyVar) bndrs
+
+
+ my_discr (DEFAULT, binds, rhs) = NoDiscr {-shouldn't really happen-}
+ my_discr (DataAlt dc, binds, rhs)
+ | isUnboxedTupleCon dc
+ = unboxedTupleException
+ | otherwise
+ = DiscrP (dataConTag dc - fIRST_TAG)
+ my_discr (LitAlt l, binds, rhs)
+ = case l of MachInt i -> DiscrI (fromInteger i)
+ MachFloat r -> DiscrF (fromRational r)
+ MachDouble r -> DiscrD (fromRational r)
+ MachChar i -> DiscrI (ord i)
+ _ -> pprPanic "schemeE(AnnCase).my_discr" (ppr l)
+
+ maybe_ncons
+ | not isAlgCase = Nothing
+ | otherwise
+ = case [dc | (DataAlt dc, _, _) <- alts] of
+ [] -> Nothing
+ (dc:_) -> Just (tyConFamilySize (dataConTyCon dc))
+
+ -- the bitmap is relative to stack depth d, i.e. before the
+ -- BCO, info table and return value are pushed on.
+ -- This bit of code is v. similar to buildLivenessMask in CgBindery,
+ -- except that here we build the bitmap from the known bindings of
+ -- things that are pointers, whereas in CgBindery the code builds the
+ -- bitmap from the free slots and unboxed bindings.
+ -- (ToDo: merge?)
+ bitmap = intsToReverseBitmap d{-size-} (sortLe (<=) rel_slots)
+ where
+ binds = fmToList p
+ rel_slots = concat (map spread binds)
+ spread (id, offset)
+ | isFollowableArg (idCgRep id) = [ rel_offset ]
+ | otherwise = []
+ where rel_offset = d - offset - 1
+
+ in do
+ alt_stuff <- mapM codeAlt alts
+ alt_final <- mkMultiBranch maybe_ncons alt_stuff
+ let
+ alt_bco_name = getName bndr
+ alt_bco = mkProtoBCO alt_bco_name alt_final (Left alts)
+ 0{-no arity-} d{-bitmap size-} bitmap True{-is alts-}
+ -- in
+-- trace ("case: bndr = " ++ showSDocDebug (ppr bndr) ++ "\ndepth = " ++ show d ++ "\nenv = \n" ++ showSDocDebug (ppBCEnv p) ++
+-- "\n bitmap = " ++ show bitmap) $ do
+ scrut_code <- schemeE (d + ret_frame_sizeW) (d + ret_frame_sizeW) p scrut
+ alt_bco' <- emitBc alt_bco
+ let push_alts
+ | isAlgCase = PUSH_ALTS alt_bco'
+ | otherwise = PUSH_ALTS_UNLIFTED alt_bco' (typeCgRep bndr_ty)
+ returnBc (push_alts `consOL` scrut_code)
+
+
+-- -----------------------------------------------------------------------------
+-- Deal with a CCall.
+
+-- Taggedly push the args onto the stack R->L,
+-- deferencing ForeignObj#s and adjusting addrs to point to
+-- payloads in Ptr/Byte arrays. Then, generate the marshalling
+-- (machine) code for the ccall, and create bytecodes to call that and
+-- then return in the right way.
+
+generateCCall :: Int -> Sequel -- stack and sequel depths
+ -> BCEnv
+ -> CCallSpec -- where to call
+ -> Id -- of target, for type info
+ -> [AnnExpr' Id VarSet] -- args (atoms)
+ -> BcM BCInstrList
+
+generateCCall d0 s p ccall_spec@(CCallSpec target cconv safety) fn args_r_to_l
+ = let
+ -- useful constants
+ addr_sizeW = cgRepSizeW NonPtrArg
+
+ -- Get the args on the stack, with tags and suitably
+ -- dereferenced for the CCall. For each arg, return the
+ -- depth to the first word of the bits for that arg, and the
+ -- CgRep of what was actually pushed.
+
+ pargs d [] = returnBc []
+ pargs d (a:az)
+ = let arg_ty = repType (exprType (deAnnotate' a))
+
+ in case splitTyConApp_maybe arg_ty of
+ -- Don't push the FO; instead push the Addr# it
+ -- contains.
+ Just (t, _)
+ | t == arrayPrimTyCon || t == mutableArrayPrimTyCon
+ -> pargs (d + addr_sizeW) az `thenBc` \ rest ->
+ parg_ArrayishRep arrPtrsHdrSize d p a
+ `thenBc` \ code ->
+ returnBc ((code,NonPtrArg):rest)
+
+ | t == byteArrayPrimTyCon || t == mutableByteArrayPrimTyCon
+ -> pargs (d + addr_sizeW) az `thenBc` \ rest ->
+ parg_ArrayishRep arrWordsHdrSize d p a
+ `thenBc` \ code ->
+ returnBc ((code,NonPtrArg):rest)
+
+ -- Default case: push taggedly, but otherwise intact.
+ other
+ -> pushAtom d p a `thenBc` \ (code_a, sz_a) ->
+ pargs (d+sz_a) az `thenBc` \ rest ->
+ returnBc ((code_a, atomRep a) : rest)
+
+ -- Do magic for Ptr/Byte arrays. Push a ptr to the array on
+ -- the stack but then advance it over the headers, so as to
+ -- point to the payload.
+ parg_ArrayishRep hdrSize d p a
+ = pushAtom d p a `thenBc` \ (push_fo, _) ->
+ -- The ptr points at the header. Advance it over the
+ -- header and then pretend this is an Addr#.
+ returnBc (push_fo `snocOL` SWIZZLE 0 hdrSize)
+
+ in
+ pargs d0 args_r_to_l `thenBc` \ code_n_reps ->
+ let
+ (pushs_arg, a_reps_pushed_r_to_l) = unzip code_n_reps
+
+ push_args = concatOL pushs_arg
+ d_after_args = d0 + sum (map cgRepSizeW a_reps_pushed_r_to_l)
+ a_reps_pushed_RAW
+ | null a_reps_pushed_r_to_l || head a_reps_pushed_r_to_l /= VoidArg
+ = panic "ByteCodeGen.generateCCall: missing or invalid World token?"
+ | otherwise
+ = reverse (tail a_reps_pushed_r_to_l)
+
+ -- Now: a_reps_pushed_RAW are the reps which are actually on the stack.
+ -- push_args is the code to do that.
+ -- d_after_args is the stack depth once the args are on.
+
+ -- Get the result rep.
+ (returns_void, r_rep)
+ = case maybe_getCCallReturnRep (idType fn) of
+ Nothing -> (True, VoidArg)
+ Just rr -> (False, rr)
+ {-
+ Because the Haskell stack grows down, the a_reps refer to
+ lowest to highest addresses in that order. The args for the call
+ are on the stack. Now push an unboxed Addr# indicating
+ the C function to call. Then push a dummy placeholder for the
+ result. Finally, emit a CCALL insn with an offset pointing to the
+ Addr# just pushed, and a literal field holding the mallocville
+ address of the piece of marshalling code we generate.
+ So, just prior to the CCALL insn, the stack looks like this
+ (growing down, as usual):
+
+ <arg_n>
+ ...
+ <arg_1>
+ Addr# address_of_C_fn
+ <placeholder-for-result#> (must be an unboxed type)
+
+ The interpreter then calls the marshall code mentioned
+ in the CCALL insn, passing it (& <placeholder-for-result#>),
+ that is, the addr of the topmost word in the stack.
+ When this returns, the placeholder will have been
+ filled in. The placeholder is slid down to the sequel
+ depth, and we RETURN.
+
+ This arrangement makes it simple to do f-i-dynamic since the Addr#
+ value is the first arg anyway.
+
+ The marshalling code is generated specifically for this
+ call site, and so knows exactly the (Haskell) stack
+ offsets of the args, fn address and placeholder. It
+ copies the args to the C stack, calls the stacked addr,
+ and parks the result back in the placeholder. The interpreter
+ calls it as a normal C call, assuming it has a signature
+ void marshall_code ( StgWord* ptr_to_top_of_stack )
+ -}
+ -- resolve static address
+ get_target_info
+ = case target of
+ DynamicTarget
+ -> returnBc (False, panic "ByteCodeGen.generateCCall(dyn)")
+ StaticTarget target
+ -> ioToBc (lookupStaticPtr target) `thenBc` \res ->
+ returnBc (True, res)
+ in
+ get_target_info `thenBc` \ (is_static, static_target_addr) ->
+ let
+
+ -- Get the arg reps, zapping the leading Addr# in the dynamic case
+ a_reps -- | trace (showSDoc (ppr a_reps_pushed_RAW)) False = error "???"
+ | is_static = a_reps_pushed_RAW
+ | otherwise = if null a_reps_pushed_RAW
+ then panic "ByteCodeGen.generateCCall: dyn with no args"
+ else tail a_reps_pushed_RAW
+
+ -- push the Addr#
+ (push_Addr, d_after_Addr)
+ | is_static
+ = (toOL [PUSH_UBX (Right static_target_addr) addr_sizeW],
+ d_after_args + addr_sizeW)
+ | otherwise -- is already on the stack
+ = (nilOL, d_after_args)
+
+ -- Push the return placeholder. For a call returning nothing,
+ -- this is a VoidArg (tag).
+ r_sizeW = cgRepSizeW r_rep
+ d_after_r = d_after_Addr + r_sizeW
+ r_lit = mkDummyLiteral r_rep
+ push_r = (if returns_void
+ then nilOL
+ else unitOL (PUSH_UBX (Left r_lit) r_sizeW))
+
+ -- generate the marshalling code we're going to call
+ r_offW = 0
+ addr_offW = r_sizeW
+ arg1_offW = r_sizeW + addr_sizeW
+ args_offW = map (arg1_offW +)
+ (init (scanl (+) 0 (map cgRepSizeW a_reps)))
+ in
+ ioToBc (mkMarshalCode cconv
+ (r_offW, r_rep) addr_offW
+ (zip args_offW a_reps)) `thenBc` \ addr_of_marshaller ->
+ recordMallocBc addr_of_marshaller `thenBc_`
+ let
+ -- Offset of the next stack frame down the stack. The CCALL
+ -- instruction needs to describe the chunk of stack containing
+ -- the ccall args to the GC, so it needs to know how large it
+ -- is. See comment in Interpreter.c with the CCALL instruction.
+ stk_offset = d_after_r - s
+
+ -- do the call
+ do_call = unitOL (CCALL stk_offset (castPtr addr_of_marshaller))
+ -- slide and return
+ wrapup = mkSLIDE r_sizeW (d_after_r - r_sizeW - s)
+ `snocOL` RETURN_UBX r_rep
+ in
+ --trace (show (arg1_offW, args_offW , (map cgRepSizeW a_reps) )) $
+ returnBc (
+ push_args `appOL`
+ push_Addr `appOL` push_r `appOL` do_call `appOL` wrapup
+ )
+
+
+-- Make a dummy literal, to be used as a placeholder for FFI return
+-- values on the stack.
+mkDummyLiteral :: CgRep -> Literal
+mkDummyLiteral pr
+ = case pr of
+ NonPtrArg -> MachWord 0
+ DoubleArg -> MachDouble 0
+ FloatArg -> MachFloat 0
+ _ -> moan64 "mkDummyLiteral" (ppr pr)
+
+
+-- Convert (eg)
+-- GHC.Prim.Char# -> GHC.Prim.State# GHC.Prim.RealWorld
+-- -> (# GHC.Prim.State# GHC.Prim.RealWorld, GHC.Prim.Int# #)
+--
+-- to Just IntRep
+-- and check that an unboxed pair is returned wherein the first arg is VoidArg'd.
+--
+-- Alternatively, for call-targets returning nothing, convert
+--
+-- GHC.Prim.Char# -> GHC.Prim.State# GHC.Prim.RealWorld
+-- -> (# GHC.Prim.State# GHC.Prim.RealWorld #)
+--
+-- to Nothing
+
+maybe_getCCallReturnRep :: Type -> Maybe CgRep
+maybe_getCCallReturnRep fn_ty
+ = let (a_tys, r_ty) = splitFunTys (dropForAlls fn_ty)
+ maybe_r_rep_to_go
+ = if isSingleton r_reps then Nothing else Just (r_reps !! 1)
+ (r_tycon, r_reps)
+ = case splitTyConApp_maybe (repType r_ty) of
+ (Just (tyc, tys)) -> (tyc, map typeCgRep tys)
+ Nothing -> blargh
+ ok = ( ( r_reps `lengthIs` 2 && VoidArg == head r_reps)
+ || r_reps == [VoidArg] )
+ && isUnboxedTupleTyCon r_tycon
+ && case maybe_r_rep_to_go of
+ Nothing -> True
+ Just r_rep -> r_rep /= PtrArg
+ -- if it was, it would be impossible
+ -- to create a valid return value
+ -- placeholder on the stack
+ blargh = pprPanic "maybe_getCCallReturn: can't handle:"
+ (pprType fn_ty)
+ in
+ --trace (showSDoc (ppr (a_reps, r_reps))) $
+ if ok then maybe_r_rep_to_go else blargh
+