#include "HsVersions.h"
-import List ( partition, sort )
+import List ( sort )
import MachMisc
import MachRegs
-
-import CLabel ( pprCLabel_asm, isAsmTemp, CLabel{-instance Ord-} )
+import Stix ( DestInfo(..) )
+import CLabel ( isAsmTemp, CLabel{-instance Ord-} )
import FiniteMap ( addToFM, lookupFM, FiniteMap )
import Outputable
import Constants ( rESERVED_C_STACK_BYTES )
import Unique ( Unique, Uniquable(..) )
+import FastTypes
+
\end{code}
%************************************************************************
interesting (VirtualRegI _) = True
interesting (VirtualRegF _) = True
interesting (VirtualRegD _) = True
-interesting (RealReg (I# i)) = _IS_TRUE_(freeReg i)
+interesting (RealReg i) = isFastTrue (freeReg i)
#if alpha_TARGET_ARCH
regUsage instr = case instr of
LD B reg addr -> usage (regAddr addr, [reg, t9])
- LD BU reg addr -> usage (regAddr addr, [reg, t9])
+ LD Bu reg addr -> usage (regAddr addr, [reg, t9])
-- LD W reg addr -> usage (regAddr addr, [reg, t9]) : UNUSED
--- LD WU reg addr -> usage (regAddr addr, [reg, t9]) : UNUSED
+-- LD Wu reg addr -> usage (regAddr addr, [reg, t9]) : UNUSED
LD sz reg addr -> usage (regAddr addr, [reg])
LDA reg addr -> usage (regAddr addr, [reg])
LDAH reg addr -> usage (regAddr addr, [reg])
ADD sz src dst -> usageRM src dst
SUB sz src dst -> usageRM src dst
IMUL sz src dst -> usageRM src dst
- IDIV sz src -> mkRU (eax:edx:use_R src) [eax,edx]
+ IMUL64 sd1 sd2 -> mkRU [sd1,sd2] [sd1,sd2]
+ MUL sz src dst -> usageRM src dst
+ IQUOT sz src dst -> usageRM src dst
+ IREM sz src dst -> usageRM src dst
+ QUOT sz src dst -> usageRM src dst
+ REM sz src dst -> usageRM src dst
AND sz src dst -> usageRM src dst
OR sz src dst -> usageRM src dst
XOR sz src dst -> usageRM src dst
CMP sz src dst -> mkRU (use_R src ++ use_R dst) []
SETCC cond op -> mkRU [] (def_W op)
JXX cond lbl -> mkRU [] []
- JMP op -> mkRU (use_R op) []
+ JMP dsts op -> mkRU (use_R op) []
CALL imm -> mkRU [] callClobberedRegs
CLTD -> mkRU [eax] [edx]
NOP -> mkRU [] []
GLDZ dst -> mkRU [] [dst]
GLD1 dst -> mkRU [] [dst]
- GFTOD src dst -> mkRU [src] [dst]
GFTOI src dst -> mkRU [src] [dst]
-
- GDTOF src dst -> mkRU [src] [dst]
GDTOI src dst -> mkRU [src] [dst]
GITOF src dst -> mkRU [src] [dst]
mkRU src dst = RU (regSetFromList (filter interesting src))
(regSetFromList (filter interesting dst))
--- Allow the spiller to de\cide whether or not it can use
--- %edx as a spill temporary.
-hasFixedEDX instr
- = case instr of
- IDIV _ _ -> True
- CLTD -> True
- other -> False
-
#endif {- i386_TARGET_ARCH -}
-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#if sparc_TARGET_ARCH
FxTOy s1 s2 r1 r2 -> usage ([r1], [r2])
-- We assume that all local jumps will be BI/BF. JMP must be out-of-line.
- JMP addr -> usage (regAddr addr, [])
+ JMP dst addr -> usage (regAddr addr, [])
CALL _ n True -> noUsage
CALL _ n False -> usage (argRegs n, callClobberedRegs)
#endif
#if i386_TARGET_ARCH
-- We can use %fake4 and %fake5 safely for float temps.
- -- Int regs are more troublesome. Only %ecx is definitely
- -- available. If there are no division insns, we can use %edx
- -- too. At a pinch, we also could bag %eax if there are no
- -- divisions and no ccalls, but so far we've never encountered
+ -- Int regs are more troublesome. Only %ecx and %edx are
+ -- definitely. At a pinch, we also could bag %eax if there
+ -- are no ccalls, but so far we've never encountered
-- a situation where three integer temporaries are necessary.
--
-- Because registers are in short supply on x86, we give the
= let f1 = fake5
f2 = fake4
intregs_avail
- = ecx : if any hasFixedEDX instrs then [] else [edx]
+ = [ecx, edx]
possibilities
= case intregs_avail of
[i1] -> [ [], [i1], [f1], [i1,f1], [f1,f2],
| Next -- falls through to next insn
| Branch CLabel -- unconditional branch to the label
| NextOrBranch CLabel -- conditional branch to the label
+ | MultiFuture [CLabel] -- multiple specific futures
--instance Outputable InsnFuture where
-- ppr NoFuture = text "NoFuture"
JXX _ clbl | isAsmTemp clbl -> NextOrBranch clbl
JXX _ _ -> panic "insnFuture: conditional jump to non-local label"
+ -- If the insn says what its dests are, use em!
+ JMP (DestInfo dsts) _ -> MultiFuture dsts
+
-- unconditional jump to local label
- JMP (OpImm (ImmCLbl clbl)) | isAsmTemp clbl -> Branch clbl
+ JMP NoDestInfo (OpImm (ImmCLbl clbl)) | isAsmTemp clbl -> Branch clbl
-- unconditional jump to non-local label
- JMP lbl -> NoFuture
+ JMP NoDestInfo lbl -> NoFuture
+
+ -- be extra-paranoid
+ JMP _ _ -> panic "insnFuture(x86): JMP wierdness"
boring -> Next
BF other _ (ImmCLbl clbl) -> NextOrBranch clbl
BF other _ _ -> panic "nativeGen(sparc):insnFuture(BF)"
- -- JMP and CALL(terminal) must be out-of-line.
- JMP _ -> NoFuture
- CALL _ _ True -> NoFuture
+ -- CALL(terminal) must be out-of-line. JMP is not out-of-line
+ -- iff it specifies its destinations.
+ JMP NoDestInfo _ -> NoFuture -- n.b. NoFuture == MultiFuture []
+ JMP (DestInfo dsts) _ -> MultiFuture dsts
+
+ CALL _ _ True -> NoFuture
boring -> Next
ADD sz src dst -> patch2 (ADD sz) src dst
SUB sz src dst -> patch2 (SUB sz) src dst
IMUL sz src dst -> patch2 (IMUL sz) src dst
- IDIV sz src -> patch1 (IDIV sz) src
+ IMUL64 sd1 sd2 -> IMUL64 (env sd1) (env sd2)
+ MUL sz src dst -> patch2 (MUL sz) src dst
+ IQUOT sz src dst -> patch2 (IQUOT sz) src dst
+ IREM sz src dst -> patch2 (IREM sz) src dst
+ QUOT sz src dst -> patch2 (QUOT sz) src dst
+ REM sz src dst -> patch2 (REM sz) src dst
AND sz src dst -> patch2 (AND sz) src dst
OR sz src dst -> patch2 (OR sz) src dst
XOR sz src dst -> patch2 (XOR sz) src dst
PUSH sz op -> patch1 (PUSH sz) op
POP sz op -> patch1 (POP sz) op
SETCC cond op -> patch1 (SETCC cond) op
- JMP op -> patch1 JMP op
+ JMP dsts op -> patch1 (JMP dsts) op
GMOV src dst -> GMOV (env src) (env dst)
GLD sz src dst -> GLD sz (lookupAddr src) (env dst)
GLDZ dst -> GLDZ (env dst)
GLD1 dst -> GLD1 (env dst)
- GFTOD src dst -> GFTOD (env src) (env dst)
GFTOI src dst -> GFTOI (env src) (env dst)
-
- GDTOF src dst -> GDTOF (env src) (env dst)
GDTOI src dst -> GDTOI (env src) (env dst)
GITOF src dst -> GITOF (env src) (env dst)
JXX _ _ -> instr
CALL _ -> instr
CLTD -> instr
- _ -> pprPanic "patchInstr(x86)" empty
+ _ -> pprPanic "patchRegs(x86)" empty
where
patch1 insn op = insn (patchOp op)
#if sparc_TARGET_ARCH
patchRegs instr env = case instr of
- LD sz addr reg -> LD sz (fixAddr addr) (env reg)
- ST sz reg addr -> ST sz (env reg) (fixAddr addr)
- ADD x cc r1 ar r2 -> ADD x cc (env r1) (fixRI ar) (env r2)
- SUB x cc r1 ar r2 -> SUB x cc (env r1) (fixRI ar) (env r2)
- AND b r1 ar r2 -> AND b (env r1) (fixRI ar) (env r2)
- ANDN b r1 ar r2 -> ANDN b (env r1) (fixRI ar) (env r2)
- OR b r1 ar r2 -> OR b (env r1) (fixRI ar) (env r2)
- ORN b r1 ar r2 -> ORN b (env r1) (fixRI ar) (env r2)
- XOR b r1 ar r2 -> XOR b (env r1) (fixRI ar) (env r2)
- XNOR b r1 ar r2 -> XNOR b (env r1) (fixRI ar) (env r2)
- SLL r1 ar r2 -> SLL (env r1) (fixRI ar) (env r2)
- SRL r1 ar r2 -> SRL (env r1) (fixRI ar) (env r2)
- SRA r1 ar r2 -> SRA (env r1) (fixRI ar) (env r2)
- SETHI imm reg -> SETHI imm (env reg)
- FABS s r1 r2 -> FABS s (env r1) (env r2)
- FADD s r1 r2 r3 -> FADD s (env r1) (env r2) (env r3)
- FCMP e s r1 r2 -> FCMP e s (env r1) (env r2)
- FDIV s r1 r2 r3 -> FDIV s (env r1) (env r2) (env r3)
- FMOV s r1 r2 -> FMOV s (env r1) (env r2)
- FMUL s r1 r2 r3 -> FMUL s (env r1) (env r2) (env r3)
- FNEG s r1 r2 -> FNEG s (env r1) (env r2)
- FSQRT s r1 r2 -> FSQRT s (env r1) (env r2)
- FSUB s r1 r2 r3 -> FSUB s (env r1) (env r2) (env r3)
- FxTOy s1 s2 r1 r2 -> FxTOy s1 s2 (env r1) (env r2)
- JMP addr -> JMP (fixAddr addr)
+ LD sz addr reg -> LD sz (fixAddr addr) (env reg)
+ ST sz reg addr -> ST sz (env reg) (fixAddr addr)
+ ADD x cc r1 ar r2 -> ADD x cc (env r1) (fixRI ar) (env r2)
+ SUB x cc r1 ar r2 -> SUB x cc (env r1) (fixRI ar) (env r2)
+ AND b r1 ar r2 -> AND b (env r1) (fixRI ar) (env r2)
+ ANDN b r1 ar r2 -> ANDN b (env r1) (fixRI ar) (env r2)
+ OR b r1 ar r2 -> OR b (env r1) (fixRI ar) (env r2)
+ ORN b r1 ar r2 -> ORN b (env r1) (fixRI ar) (env r2)
+ XOR b r1 ar r2 -> XOR b (env r1) (fixRI ar) (env r2)
+ XNOR b r1 ar r2 -> XNOR b (env r1) (fixRI ar) (env r2)
+ SLL r1 ar r2 -> SLL (env r1) (fixRI ar) (env r2)
+ SRL r1 ar r2 -> SRL (env r1) (fixRI ar) (env r2)
+ SRA r1 ar r2 -> SRA (env r1) (fixRI ar) (env r2)
+ SETHI imm reg -> SETHI imm (env reg)
+ FABS s r1 r2 -> FABS s (env r1) (env r2)
+ FADD s r1 r2 r3 -> FADD s (env r1) (env r2) (env r3)
+ FCMP e s r1 r2 -> FCMP e s (env r1) (env r2)
+ FDIV s r1 r2 r3 -> FDIV s (env r1) (env r2) (env r3)
+ FMOV s r1 r2 -> FMOV s (env r1) (env r2)
+ FMUL s r1 r2 r3 -> FMUL s (env r1) (env r2) (env r3)
+ FNEG s r1 r2 -> FNEG s (env r1) (env r2)
+ FSQRT s r1 r2 -> FSQRT s (env r1) (env r2)
+ FSUB s r1 r2 r3 -> FSUB s (env r1) (env r2) (env r3)
+ FxTOy s1 s2 r1 r2 -> FxTOy s1 s2 (env r1) (env r2)
+ JMP dsts addr -> JMP dsts (fixAddr addr)
_ -> instr
where
fixAddr (AddrRegReg r1 r2) = AddrRegReg (env r1) (env r2)
Spill to memory, and load it back...
JRS, 000122: on x86, don't spill directly above the stack pointer,
-since some insn sequences (int <-> conversions, and eventually
-StixInteger) use this as a temp location. Leave 8 words (ie, 64 bytes
-for a 64-bit arch) of slop.
+since some insn sequences (int <-> conversions) use this as a temp
+location. Leave 8 words (ie, 64 bytes for a 64-bit arch) of slop.
\begin{code}
spillSlotSize :: Int
= pprPanic "spillSlotToOffset:"
(text "invalid spill location: " <> int slot)
-vregToSpillSlot :: FiniteMap Unique Int -> Unique -> Int
+vregToSpillSlot :: FiniteMap VRegUnique Int -> VRegUnique -> Int
vregToSpillSlot vreg_to_slot_map u
= case lookupFM vreg_to_slot_map u of
Just xx -> xx
- Nothing -> pprPanic "vregToSpillSlot: unmapped vreg" (ppr u)
+ Nothing -> pprPanic "vregToSpillSlot: unmapped vreg" (pprVRegUnique u)
-spillReg, loadReg :: FiniteMap Unique Int -> Int -> Reg -> Reg -> Instr
+spillReg, loadReg :: FiniteMap VRegUnique Int -> Int -> Reg -> Reg -> Instr
spillReg vreg_to_slot_map delta dyn vreg
| isVirtualReg vreg
- = let slot_no = vregToSpillSlot vreg_to_slot_map (getUnique vreg)
+ = let slot_no = vregToSpillSlot vreg_to_slot_map (getVRegUnique vreg)
off = spillSlotToOffset slot_no
in
{-Alpha: spill below the stack pointer (?)-}
loadReg vreg_to_slot_map delta vreg dyn
| isVirtualReg vreg
- = let slot_no = vregToSpillSlot vreg_to_slot_map (getUnique vreg)
+ = let slot_no = vregToSpillSlot vreg_to_slot_map (getVRegUnique vreg)
off = spillSlotToOffset slot_no
in
IF_ARCH_alpha( LD sz dyn (spRel (- (off `div` 8)))