+ returnBc (PUSH_AS alt_bco_name scrut_primrep `consOL` scrut_code)
+
+
+schemeE d s p (fvs, AnnNote note body)
+ = schemeE d s p body
+
+schemeE d s p other
+ = pprPanic "ByteCodeGen.schemeE: unhandled case"
+ (pprCoreExpr (deAnnotate other))
+
+
+-- Compile code to do a tail call. Specifically, push the fn,
+-- slide the on-stack app back down to the sequel depth,
+-- and enter. Four cases:
+--
+-- 0. (Nasty hack).
+-- An application "GHC.Prim.tagToEnum# <type> unboxed-int".
+-- The int will be on the stack. Generate a code sequence
+-- to convert it to the relevant constructor, SLIDE and ENTER.
+--
+-- 1. A nullary constructor. Push its closure on the stack
+-- and SLIDE and RETURN.
+--
+-- 2. (Another nasty hack). Spot (# a::VoidRep, b #) and treat
+-- it simply as b -- since the representations are identical
+-- (the VoidRep takes up zero stack space). Also, spot
+-- (# b #) and treat it as b.
+--
+-- 3. The fn denotes a ccall. Defer to generateCCall.
+--
+-- 4. Application of a non-nullary constructor, by defn saturated.
+-- Split the args into ptrs and non-ptrs, and push the nonptrs,
+-- then the ptrs, and then do PACK and RETURN.
+--
+-- 5. Otherwise, it must be a function call. Push the args
+-- right to left, SLIDE and ENTER.
+
+schemeT :: Int -- Stack depth
+ -> Sequel -- Sequel depth
+ -> BCEnv -- stack env
+ -> AnnExpr Id VarSet
+ -> BcM BCInstrList
+
+schemeT d s p app
+
+-- | trace ("schemeT: env in = \n" ++ showSDocDebug (ppBCEnv p)) False
+-- = panic "schemeT ?!?!"
+
+-- | trace ("\nschemeT\n" ++ showSDoc (pprCoreExpr (deAnnotate app)) ++ "\n") False
+-- = error "?!?!"
+
+ -- Case 0
+ | Just (arg, constr_names) <- maybe_is_tagToEnum_call
+ = pushAtom True d p arg `thenBc` \ (push, arg_words) ->
+ implement_tagToId constr_names `thenBc` \ tagToId_sequence ->
+ returnBc (push `appOL` tagToId_sequence
+ `appOL` mkSLIDE 1 (d+arg_words-s)
+ `snocOL` ENTER)
+
+ -- Case 1
+ | is_con_call && null args_r_to_l
+ = returnBc (
+ (PUSH_G (Left (getName con)) `consOL` mkSLIDE 1 (d-s))
+ `snocOL` ENTER
+ )
+
+ -- Case 2
+ | is_con_call,
+ isUnboxedTupleCon con, -- (# ... #)
+ [(_,arg1),(_,arg2)] <- args_r_to_l, -- Exactly two args
+ let
+ isVoidRepAtom (AnnVar v) = typePrimRep (idType v) == VoidRep
+ isVoidRepAtom (AnnNote n (_,e)) = isVoidRepAtom e
+ isVoidRepAtom _ = False
+ in
+ isVoidRepAtom arg2 -- The first arg is void
+ = --trace (if isSingleton args_r_to_l
+ -- then "schemeT: unboxed singleton"
+ -- else "schemeT: unboxed pair with Void first component") (
+ pushAtom True d p arg1 `thenBc` \ (push, szw) ->
+ returnBc (push -- value onto stack
+ `appOL` mkSLIDE szw (d-s) -- clear to sequel
+ `snocOL` RETURN (atomRep arg1)) -- go
+ -- We used to use "schemeT d s p arg1", but that is wrong.
+ -- We must use RETURN (because it's an unboxed tuple)
+ -- I think that this still does not work: SLPJ Oct 02
+
+ -- Case 3
+ | Just (CCall ccall_spec) <- isFCallId_maybe fn
+ = generateCCall d s p ccall_spec fn args_r_to_l
+
+ -- Cases 4 and 5
+ | otherwise
+ = if is_con_call && isUnboxedTupleCon con
+ then unboxedTupleException
+ else do_pushery d (map snd args_final_r_to_l)
+
+ where
+ -- Detect and extract relevant info for the tagToEnum kludge.
+ maybe_is_tagToEnum_call
+ = let extract_constr_Names ty
+ = case splitTyConApp_maybe (repType ty) of
+ (Just (tyc, [])) | isDataTyCon tyc
+ -> map getName (tyConDataCons tyc)
+ other -> panic "maybe_is_tagToEnum_call.extract_constr_Ids"
+ in
+ case app of
+ (_, AnnApp (_, AnnApp (_, AnnVar v) (_, AnnType t)) arg)
+ -> case isPrimOpId_maybe v of
+ Just TagToEnumOp -> Just (snd arg, extract_constr_Names t)
+ other -> Nothing
+ other -> Nothing
+
+ -- Extract the args (R->L) and fn
+ (args_r_to_l, fn) = chomp app
+ chomp expr
+ = case snd expr of
+ AnnVar v -> ([], v)
+ AnnApp f a
+ | isTypeAtom (snd a) -> chomp f
+ | otherwise -> case chomp f of (az, f) -> (a:az, f)
+ AnnNote n e -> chomp e
+ other -> pprPanic "schemeT"
+ (ppr (deAnnotate (panic "schemeT.chomp", other)))
+
+ n_args = length args_r_to_l
+
+ isTypeAtom (AnnType _) = True
+ isTypeAtom _ = False
+
+ -- decide if this is a constructor application, because we need
+ -- to rearrange the arguments on the stack if so. For building
+ -- a constructor, we put pointers before non-pointers and omit
+ -- the tags.
+ --
+ -- Also if the constructor is not saturated, we just arrange to
+ -- call the curried worker instead.
+
+ maybe_dcon = case isDataConId_maybe fn of
+ Just con | dataConRepArity con == n_args -> Just con
+ _ -> Nothing
+ is_con_call = isJust maybe_dcon
+ (Just con) = maybe_dcon
+
+ args_final_r_to_l
+ | not is_con_call
+ = args_r_to_l
+ | otherwise
+ = filter (not.isPtr.snd) args_r_to_l ++ filter (isPtr.snd) args_r_to_l
+ where isPtr = isFollowableRep . atomRep
+
+ -- make code to push the args and then do the SLIDE-ENTER thing
+ tag_when_push = not is_con_call
+ narg_words = sum (map (get_arg_szw . atomRep . snd) args_r_to_l)
+ get_arg_szw = if tag_when_push then taggedSizeW else untaggedSizeW
+
+ do_pushery d (arg:args)
+ = pushAtom tag_when_push d p arg `thenBc` \ (push, arg_words) ->
+ do_pushery (d+arg_words) args `thenBc` \ more_push_code ->
+ returnBc (push `appOL` more_push_code)
+ do_pushery d []
+ | Just (CCall ccall_spec) <- isFCallId_maybe fn
+ = panic "schemeT.do_pushery: unexpected ccall"
+ | otherwise
+ = case maybe_dcon of
+ Just con -> returnBc (
+ (PACK con narg_words `consOL`
+ mkSLIDE 1 (d - narg_words - s)) `snocOL`
+ ENTER
+ )
+ Nothing
+ -> pushAtom True d p (AnnVar fn)
+ `thenBc` \ (push, arg_words) ->
+ returnBc (push `appOL` mkSLIDE (narg_words+arg_words)
+ (d - s - narg_words)
+ `snocOL` ENTER)
+
+
+{- Deal with a CCall. Taggedly push the args onto the stack R->L,
+ deferencing ForeignObj#s and (ToDo: adjusting addrs to point to
+ payloads in Ptr/Byte arrays). Then, generate the marshalling
+ (machine) code for the ccall, and create bytecodes to call that and
+ then return in the right way.
+-}
+generateCCall :: Int -> Sequel -- stack and sequel depths
+ -> BCEnv
+ -> CCallSpec -- where to call
+ -> Id -- of target, for type info
+ -> [AnnExpr Id VarSet] -- args (atoms)
+ -> BcM BCInstrList
+
+generateCCall d0 s p ccall_spec@(CCallSpec target cconv safety) fn args_r_to_l
+ = let
+ -- useful constants
+ addr_usizeW = untaggedSizeW AddrRep
+ addr_tsizeW = taggedSizeW AddrRep
+
+ -- Get the args on the stack, with tags and suitably
+ -- dereferenced for the CCall. For each arg, return the
+ -- depth to the first word of the bits for that arg, and the
+ -- PrimRep of what was actually pushed.
+
+ pargs d [] = returnBc []
+ pargs d ((_,a):az)
+ = let arg_ty = repType (exprType (deAnnotate' a))
+
+ in case splitTyConApp_maybe arg_ty of
+ -- Don't push the FO; instead push the Addr# it
+ -- contains.
+ Just (t, _)
+ | t == foreignObjPrimTyCon
+ -> pushAtom False{-irrelevant-} d p a
+ `thenBc` \ (push_fo, _) ->
+ let foro_szW = taggedSizeW PtrRep
+ d_now = d + addr_tsizeW
+ code = push_fo `appOL` toOL [
+ UPK_TAG addr_usizeW 0 0,
+ SLIDE addr_tsizeW foro_szW
+ ]
+ in pargs d_now az `thenBc` \ rest ->
+ returnBc ((code, AddrRep) : rest)
+
+ | t == arrayPrimTyCon || t == mutableArrayPrimTyCon
+ -> pargs (d + addr_tsizeW) az `thenBc` \ rest ->
+ parg_ArrayishRep arrPtrsHdrSize d p a
+ `thenBc` \ code ->
+ returnBc ((code,AddrRep):rest)
+
+ | t == byteArrayPrimTyCon || t == mutableByteArrayPrimTyCon
+ -> pargs (d + addr_tsizeW) az `thenBc` \ rest ->
+ parg_ArrayishRep arrWordsHdrSize d p a
+ `thenBc` \ code ->
+ returnBc ((code,AddrRep):rest)
+
+ -- Default case: push taggedly, but otherwise intact.
+ other
+ -> pushAtom True d p a `thenBc` \ (code_a, sz_a) ->
+ pargs (d+sz_a) az `thenBc` \ rest ->
+ returnBc ((code_a, atomRep a) : rest)
+
+ -- Do magic for Ptr/Byte arrays. Push a ptr to the array on
+ -- the stack but then advance it over the headers, so as to
+ -- point to the payload.
+ parg_ArrayishRep hdrSizeW d p a
+ = pushAtom False{-irrel-} d p a `thenBc` \ (push_fo, _) ->
+ -- The ptr points at the header. Advance it over the
+ -- header and then pretend this is an Addr# (push a tag).
+ returnBc (push_fo `snocOL`
+ SWIZZLE 0 (hdrSizeW * untaggedSizeW PtrRep
+ * wORD_SIZE)
+ `snocOL`
+ PUSH_TAG addr_usizeW)
+
+ in
+ pargs d0 args_r_to_l `thenBc` \ code_n_reps ->
+ let
+ (pushs_arg, a_reps_pushed_r_to_l) = unzip code_n_reps
+
+ push_args = concatOL pushs_arg
+ d_after_args = d0 + sum (map taggedSizeW a_reps_pushed_r_to_l)
+ a_reps_pushed_RAW
+ | null a_reps_pushed_r_to_l || head a_reps_pushed_r_to_l /= VoidRep
+ = panic "ByteCodeGen.generateCCall: missing or invalid World token?"
+ | otherwise
+ = reverse (tail a_reps_pushed_r_to_l)
+
+ -- Now: a_reps_pushed_RAW are the reps which are actually on the stack.
+ -- push_args is the code to do that.
+ -- d_after_args is the stack depth once the args are on.
+
+ -- Get the result rep.
+ (returns_void, r_rep)
+ = case maybe_getCCallReturnRep (idType fn) of
+ Nothing -> (True, VoidRep)
+ Just rr -> (False, rr)
+ {-
+ Because the Haskell stack grows down, the a_reps refer to
+ lowest to highest addresses in that order. The args for the call
+ are on the stack. Now push an unboxed, tagged Addr# indicating
+ the C function to call. Then push a dummy placeholder for the
+ result. Finally, emit a CCALL insn with an offset pointing to the
+ Addr# just pushed, and a literal field holding the mallocville
+ address of the piece of marshalling code we generate.
+ So, just prior to the CCALL insn, the stack looks like this
+ (growing down, as usual):
+
+ <arg_n>
+ ...
+ <arg_1>
+ Addr# address_of_C_fn
+ <placeholder-for-result#> (must be an unboxed type)
+
+ The interpreter then calls the marshall code mentioned
+ in the CCALL insn, passing it (& <placeholder-for-result#>),
+ that is, the addr of the topmost word in the stack.
+ When this returns, the placeholder will have been
+ filled in. The placeholder is slid down to the sequel
+ depth, and we RETURN.
+
+ This arrangement makes it simple to do f-i-dynamic since the Addr#
+ value is the first arg anyway. It also has the virtue that the
+ stack is GC-understandable at all times.
+
+ The marshalling code is generated specifically for this
+ call site, and so knows exactly the (Haskell) stack
+ offsets of the args, fn address and placeholder. It
+ copies the args to the C stack, calls the stacked addr,
+ and parks the result back in the placeholder. The interpreter
+ calls it as a normal C call, assuming it has a signature
+ void marshall_code ( StgWord* ptr_to_top_of_stack )
+ -}
+ -- resolve static address
+ get_target_info
+ = case target of
+ DynamicTarget
+ -> returnBc (False, panic "ByteCodeGen.generateCCall(dyn)")
+ StaticTarget target
+ -> ioToBc (lookupStaticPtr target) `thenBc` \res ->
+ returnBc (True, res)
+ CasmTarget _
+ -> pprPanic "ByteCodeGen.generateCCall: casm" (ppr ccall_spec)
+ in
+ get_target_info `thenBc` \ (is_static, static_target_addr) ->
+ let
+
+ -- Get the arg reps, zapping the leading Addr# in the dynamic case
+ a_reps -- | trace (showSDoc (ppr a_reps_pushed_RAW)) False = error "???"
+ | is_static = a_reps_pushed_RAW
+ | otherwise = if null a_reps_pushed_RAW
+ then panic "ByteCodeGen.generateCCall: dyn with no args"
+ else tail a_reps_pushed_RAW
+
+ -- push the Addr#
+ (push_Addr, d_after_Addr)
+ | is_static
+ = (toOL [PUSH_UBX (Right static_target_addr) addr_usizeW,
+ PUSH_TAG addr_usizeW],
+ d_after_args + addr_tsizeW)
+ | otherwise -- is already on the stack
+ = (nilOL, d_after_args)
+
+ -- Push the return placeholder. For a call returning nothing,
+ -- this is a VoidRep (tag).
+ r_usizeW = untaggedSizeW r_rep
+ r_tsizeW = taggedSizeW r_rep
+ d_after_r = d_after_Addr + r_tsizeW
+ r_lit = mkDummyLiteral r_rep
+ push_r = (if returns_void
+ then nilOL
+ else unitOL (PUSH_UBX (Left r_lit) r_usizeW))
+ `appOL`
+ unitOL (PUSH_TAG r_usizeW)
+
+ -- generate the marshalling code we're going to call
+ r_offW = 0
+ addr_offW = r_tsizeW
+ arg1_offW = r_tsizeW + addr_tsizeW
+ args_offW = map (arg1_offW +)
+ (init (scanl (+) 0 (map taggedSizeW a_reps)))
+ in
+ ioToBc (mkMarshalCode cconv
+ (r_offW, r_rep) addr_offW
+ (zip args_offW a_reps)) `thenBc` \ addr_of_marshaller ->
+ recordMallocBc addr_of_marshaller `thenBc_`
+ let
+ -- do the call
+ do_call = unitOL (CCALL (castPtr addr_of_marshaller))
+ -- slide and return
+ wrapup = mkSLIDE r_tsizeW (d_after_r - r_tsizeW - s)
+ `snocOL` RETURN r_rep
+ in
+ --trace (show (arg1_offW, args_offW , (map taggedSizeW a_reps) )) (
+ returnBc (
+ push_args `appOL`
+ push_Addr `appOL` push_r `appOL` do_call `appOL` wrapup
+ )
+ --)
+
+
+-- Make a dummy literal, to be used as a placeholder for FFI return
+-- values on the stack.
+mkDummyLiteral :: PrimRep -> Literal
+mkDummyLiteral pr
+ = case pr of
+ CharRep -> MachChar 0
+ IntRep -> MachInt 0
+ WordRep -> MachWord 0
+ DoubleRep -> MachDouble 0
+ FloatRep -> MachFloat 0
+ AddrRep | taggedSizeW AddrRep == taggedSizeW WordRep -> MachWord 0
+ _ -> moan64 "mkDummyLiteral" (ppr pr)
+
+
+-- Convert (eg)
+-- GHC.Prim.Char# -> GHC.Prim.State# GHC.Prim.RealWorld
+-- -> (# GHC.Prim.State# GHC.Prim.RealWorld, GHC.Prim.Int# #)
+--
+-- to Just IntRep
+-- and check that an unboxed pair is returned wherein the first arg is VoidRep'd.
+--
+-- Alternatively, for call-targets returning nothing, convert
+--
+-- GHC.Prim.Char# -> GHC.Prim.State# GHC.Prim.RealWorld
+-- -> (# GHC.Prim.State# GHC.Prim.RealWorld #)
+--
+-- to Nothing
+
+maybe_getCCallReturnRep :: Type -> Maybe PrimRep
+maybe_getCCallReturnRep fn_ty
+ = let (a_tys, r_ty) = splitFunTys (dropForAlls fn_ty)
+ maybe_r_rep_to_go
+ = if isSingleton r_reps then Nothing else Just (r_reps !! 1)
+ (r_tycon, r_reps)
+ = case splitTyConApp_maybe (repType r_ty) of
+ (Just (tyc, tys)) -> (tyc, map typePrimRep tys)
+ Nothing -> blargh
+ ok = ( ( r_reps `lengthIs` 2 && VoidRep == head r_reps)
+ || r_reps == [VoidRep] )
+ && isUnboxedTupleTyCon r_tycon
+ && case maybe_r_rep_to_go of
+ Nothing -> True
+ Just r_rep -> r_rep /= PtrRep
+ -- if it was, it would be impossible
+ -- to create a valid return value
+ -- placeholder on the stack
+ blargh = pprPanic "maybe_getCCallReturn: can't handle:"
+ (pprType fn_ty)
+ in
+ --trace (showSDoc (ppr (a_reps, r_reps))) (
+ if ok then maybe_r_rep_to_go else blargh
+ --)
+
+atomRep (AnnVar v) = typePrimRep (idType v)
+atomRep (AnnLit l) = literalPrimRep l
+atomRep (AnnNote n b) = atomRep (snd b)
+atomRep (AnnApp f (_, AnnType _)) = atomRep (snd f)
+atomRep (AnnLam x e) | isTyVar x = atomRep (snd e)
+atomRep other = pprPanic "atomRep" (ppr (deAnnotate (undefined,other)))
+
+-- Compile code which expects an unboxed Int on the top of stack,
+-- (call it i), and pushes the i'th closure in the supplied list
+-- as a consequence.
+implement_tagToId :: [Name] -> BcM BCInstrList
+implement_tagToId names
+ = ASSERT( notNull names )
+ getLabelsBc (length names) `thenBc` \ labels ->
+ getLabelBc `thenBc` \ label_fail ->
+ getLabelBc `thenBc` \ label_exit ->
+ zip4 labels (tail labels ++ [label_fail])
+ [0 ..] names `bind` \ infos ->
+ map (mkStep label_exit) infos `bind` \ steps ->
+ returnBc (concatOL steps
+ `appOL`
+ toOL [LABEL label_fail, CASEFAIL, LABEL label_exit])
+ where
+ mkStep l_exit (my_label, next_label, n, name_for_n)
+ = toOL [LABEL my_label,
+ TESTEQ_I n next_label,
+ PUSH_G (Left name_for_n),
+ JMP l_exit]
+
+
+-- Make code to unpack the top-of-stack constructor onto the stack,
+-- adding tags for the unboxed bits. Takes the PrimReps of the
+-- constructor's arguments. off_h and off_s are travelling offsets
+-- along the constructor and the stack.
+--
+-- Supposing a constructor in the heap has layout
+--
+-- Itbl p_1 ... p_i np_1 ... np_j
+--
+-- then we add to the stack, shown growing down, the following:
+--
+-- (previous stack)
+-- p_i
+-- ...
+-- p_1
+-- np_j
+-- tag_for(np_j)
+-- ..
+-- np_1
+-- tag_for(np_1)
+--
+-- so that in the common case (ptrs only) a single UNPACK instr can
+-- copy all the payload of the constr onto the stack with no further ado.
+
+mkUnpackCode :: [Id] -- constr args
+ -> Int -- depth before unpack
+ -> BCEnv -- env before unpack
+ -> (BCInstrList, Int, BCEnv)
+mkUnpackCode vars d p
+ = --trace ("mkUnpackCode: " ++ showSDocDebug (ppr vars)
+ -- ++ " --> " ++ show d' ++ "\n" ++ showSDocDebug (ppBCEnv p')
+ -- ++ "\n") (
+ (code_p `appOL` code_np, d', p')
+ --)
+ where
+ -- vars with reps
+ vreps = [(var, typePrimRep (idType var)) | var <- vars]
+
+ -- ptrs and nonptrs, forward
+ vreps_p = filter (isFollowableRep.snd) vreps
+ vreps_np = filter (not.isFollowableRep.snd) vreps
+
+ -- the order in which we will augment the environment
+ vreps_env = reverse vreps_p ++ reverse vreps_np
+
+ -- new env and depth
+ vreps_env_tszsw = map (taggedSizeW.snd) vreps_env
+ p' = addListToFM p (zip (map fst vreps_env)
+ (mkStackOffsets d vreps_env_tszsw))
+ d' = d + sum vreps_env_tszsw
+
+ -- code to unpack the ptrs
+ ptrs_szw = sum (map (untaggedSizeW.snd) vreps_p)
+ code_p | null vreps_p = nilOL
+ | otherwise = unitOL (UNPACK ptrs_szw)
+
+ -- code to unpack the nonptrs
+ vreps_env_uszw = sum (map (untaggedSizeW.snd) vreps_env)
+ code_np = do_nptrs vreps_env_uszw ptrs_szw (reverse (map snd vreps_np))
+ do_nptrs off_h off_s [] = nilOL
+ do_nptrs off_h off_s (npr:nprs)
+ | npr `elem` [IntRep, WordRep, FloatRep, DoubleRep,
+ CharRep, AddrRep, StablePtrRep]
+ = approved
+ | otherwise
+ = moan64 "ByteCodeGen.mkUnpackCode" (ppr npr)
+ where
+ approved = UPK_TAG usizeW (off_h-usizeW) off_s `consOL` theRest
+ theRest = do_nptrs (off_h-usizeW) (off_s + tsizeW) nprs
+ usizeW = untaggedSizeW npr
+ tsizeW = taggedSizeW npr
+
+
+-- Push an atom onto the stack, returning suitable code & number of
+-- stack words used. Pushes it either tagged or untagged, since
+-- pushAtom is used to set up the stack prior to copying into the
+-- heap for both APs (requiring tags) and constructors (which don't).
+--
+-- NB this means NO GC between pushing atoms for a constructor and
+-- copying them into the heap. It probably also means that
+-- tail calls MUST be of the form atom{atom ... atom} since if the
+-- expression head was allowed to be arbitrary, there could be GC
+-- in between pushing the arg atoms and completing the head.
+-- (not sure; perhaps the allocate/doYouWantToGC interface means this
+-- isn't a problem; but only if arbitrary graph construction for the
+-- head doesn't leave this BCO, since GC might happen at the start of
+-- each BCO (we consult doYouWantToGC there).
+--
+-- Blargh. JRS 001206
+--
+-- NB (further) that the env p must map each variable to the highest-
+-- numbered stack slot for it. For example, if the stack has depth 4
+-- and we tagged-ly push (v :: Int#) on it, the value will be in stack[4],
+-- the tag in stack[5], the stack will have depth 6, and p must map v to
+-- 5 and not to 4. Stack locations are numbered from zero, so a depth
+-- 6 stack has valid words 0 .. 5.
+
+pushAtom :: Bool -> Int -> BCEnv -> AnnExpr' Id VarSet -> BcM (BCInstrList, Int)
+pushAtom tagged d p (AnnVar v)
+
+ | idPrimRep v == VoidRep
+ = if tagged then returnBc (unitOL (PUSH_TAG 0), 1)
+ else panic "ByteCodeGen.pushAtom(VoidRep,untaggedly)"
+
+ | isFCallId v
+ = pprPanic "pushAtom: shouldn't get an FCallId here" (ppr v)
+
+ | Just primop <- isPrimOpId_maybe v
+ = returnBc (unitOL (PUSH_G (Right primop)), 1)
+
+ | otherwise
+ = let {-
+ str = "\npushAtom " ++ showSDocDebug (ppr v)
+ ++ " :: " ++ showSDocDebug (pprType (idType v))
+ ++ ", depth = " ++ show d
+ ++ ", tagged = " ++ show tagged ++ ", env =\n" ++
+ showSDocDebug (ppBCEnv p)
+ ++ " --> words: " ++ show (snd result) ++ "\n" ++
+ showSDoc (nest 4 (vcat (map ppr (fromOL (fst result)))))
+ ++ "\nendPushAtom " ++ showSDocDebug (ppr v)
+ -}
+
+ result
+ = case lookupBCEnv_maybe p v of
+ Just d_v -> (toOL (nOfThem nwords (PUSH_L (d-d_v+sz_t-2))), nwords)
+ Nothing -> ASSERT(sz_t == 1) (unitOL (PUSH_G (Left nm)), nwords)
+
+ nm = case isDataConId_maybe v of
+ Just c -> getName c
+ Nothing -> getName v
+
+ sz_t = taggedIdSizeW v
+ sz_u = untaggedIdSizeW v
+ nwords = if tagged then sz_t else sz_u
+ in
+ returnBc result
+
+pushAtom True d p (AnnLit lit)
+ = pushAtom False d p (AnnLit lit) `thenBc` \ (ubx_code, ubx_size) ->
+ returnBc (ubx_code `snocOL` PUSH_TAG ubx_size, 1 + ubx_size)
+
+pushAtom False d p (AnnLit lit)
+ = case lit of
+ MachLabel fs -> code CodePtrRep
+ MachWord w -> code WordRep
+ MachInt i -> code IntRep
+ MachFloat r -> code FloatRep
+ MachDouble r -> code DoubleRep
+ MachChar c -> code CharRep
+ MachStr s -> pushStr s
+ where
+ code rep
+ = let size_host_words = untaggedSizeW rep
+ in returnBc (unitOL (PUSH_UBX (Left lit) size_host_words),
+ size_host_words)
+
+ pushStr s
+ = let getMallocvilleAddr
+ = case s of
+ FastString _ l ba ->
+ -- sigh, a string in the heap is no good to us.
+ -- We need a static C pointer, since the type of
+ -- a string literal is Addr#. So, copy the string
+ -- into C land and introduce a memory leak
+ -- at the same time.
+ let n = I# l
+ -- CAREFUL! Chars are 32 bits in ghc 4.09+
+ in ioToBc (mallocBytes (n+1)) `thenBc` \ ptr ->
+ recordMallocBc ptr `thenBc_`
+ ioToBc (
+ do memcpy ptr ba (fromIntegral n)
+ pokeByteOff ptr n (fromIntegral (ord '\0') :: Word8)
+ return ptr
+ )
+ other -> panic "ByteCodeGen.pushAtom.pushStr"
+ in
+ getMallocvilleAddr `thenBc` \ addr ->
+ -- Get the addr on the stack, untaggedly
+ returnBc (unitOL (PUSH_UBX (Right addr) 1), 1)
+
+
+
+
+
+pushAtom tagged d p (AnnApp f (_, AnnType _))
+ = pushAtom tagged d p (snd f)
+
+pushAtom tagged d p (AnnNote note e)
+ = pushAtom tagged d p (snd e)
+
+pushAtom tagged d p (AnnLam x e)
+ | isTyVar x
+ = pushAtom tagged d p (snd e)
+
+pushAtom tagged d p other
+ = pprPanic "ByteCodeGen.pushAtom"
+ (pprCoreExpr (deAnnotate (undefined, other)))
+
+foreign import "memcpy" memcpy :: Ptr a -> ByteArray# -> CInt -> IO ()