1 {-# OPTIONS -fno-warn-missing-signatures #-}
2 -- | Clean out unneeded spill\/reload instrs
4 -- * Handling of join points
8 -- RELOAD SLOT(0), %r1 RELOAD SLOT(0), %r1
13 -- RELOAD SLOT(0), %r1
17 -- So long as %r1 hasn't been written to in A, B or C then we don't need the
20 -- What we really care about here is that on the entry to B3, %r1 will always
21 -- have the same value that is in SLOT(0) (ie, %r1 is _valid_)
23 -- This also works if the reloads in B1\/B2 were spills instead, because
24 -- spilling %r1 to a slot makes that slot have the same value as %r1.
26 module RegAlloc.Graph.SpillClean (
31 import RegAlloc.Liveness
47 import qualified Data.Map as Map
48 import qualified Data.Set as Set
55 -- | Clean out unneeded spill\/reloads from this top level thing.
58 => LiveCmmTop instr -> LiveCmmTop instr
61 = evalState (cleanSpin 0 cmm) initCleanS
63 -- | do one pass of cleaning
68 -> CleanM (LiveCmmTop instr)
71 cleanSpin spinCount code
72 = do jumpValid <- gets sJumpValid
79 $ cleanSpin' spinCount code
82 cleanSpin spinCount code
84 -- init count of cleaned spills\/reloads
86 { sCleanedSpillsAcc = 0
87 , sCleanedReloadsAcc = 0
88 , sReloadedBy = emptyUFM }
90 code_forward <- mapBlockTopM cleanBlockForward code
91 code_backward <- cleanTopBackward code_forward
93 -- During the cleaning of each block we collected information about what regs
94 -- were valid across each jump. Based on this, work out whether it will be
95 -- safe to erase reloads after join points for the next pass.
98 -- remember how many spills\/reloads we cleaned in this pass
99 spills <- gets sCleanedSpillsAcc
100 reloads <- gets sCleanedReloadsAcc
102 { sCleanedCount = (spills, reloads) : sCleanedCount s }
104 -- if nothing was cleaned in this pass or the last one
105 -- then we're done and it's time to bail out
106 cleanedCount <- gets sCleanedCount
107 if take 2 cleanedCount == [(0, 0), (0, 0)]
110 -- otherwise go around again
111 else cleanSpin (spinCount + 1) code_backward
114 -- | Clean one basic block
117 => LiveBasicBlock instr
118 -> CleanM (LiveBasicBlock instr)
120 cleanBlockForward (BasicBlock blockId instrs)
122 -- see if we have a valid association for the entry to this block
123 jumpValid <- gets sJumpValid
124 let assoc = case lookupUFM jumpValid blockId of
126 Nothing -> emptyAssoc
128 instrs_reload <- cleanForward blockId assoc [] instrs
129 return $ BasicBlock blockId instrs_reload
133 -- | Clean out unneeded reload instructions.
134 -- Walking forwards across the code
135 -- On a reload, if we know a reg already has the same value as a slot
136 -- then we don't need to do the reload.
140 => BlockId -- ^ the block that we're currently in
141 -> Assoc Store -- ^ two store locations are associated if they have the same value
142 -> [LiveInstr instr] -- ^ acc
143 -> [LiveInstr instr] -- ^ instrs to clean (in backwards order)
144 -> CleanM [LiveInstr instr] -- ^ cleaned instrs (in forward order)
146 cleanForward _ _ acc []
149 -- write out live range joins via spill slots to just a spill and a reg-reg move
150 -- hopefully the spill will be also be cleaned in the next pass
152 cleanForward blockId assoc acc (li1 : li2 : instrs)
154 | LiveInstr (SPILL reg1 slot1) _ <- li1
155 , LiveInstr (RELOAD slot2 reg2) _ <- li2
158 modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
159 cleanForward blockId assoc acc
160 (li1 : LiveInstr (mkRegRegMoveInstr reg1 reg2) Nothing : instrs)
163 cleanForward blockId assoc acc (li@(LiveInstr i1 _) : instrs)
164 | Just (r1, r2) <- takeRegRegMoveInstr i1
166 -- erase any left over nop reg reg moves while we're here
167 -- this will also catch any nop moves that the "write out live range joins" case above
169 then cleanForward blockId assoc acc instrs
171 -- if r1 has the same value as some slots and we copy r1 to r2,
172 -- then r2 is now associated with those slots instead
173 else do let assoc' = addAssoc (SReg r1) (SReg r2)
177 cleanForward blockId assoc' (li : acc) instrs
180 cleanForward blockId assoc acc (li : instrs)
182 -- update association due to the spill
183 | LiveInstr (SPILL reg slot) _ <- li
184 = let assoc' = addAssoc (SReg reg) (SSlot slot)
185 $ delAssoc (SSlot slot)
187 in cleanForward blockId assoc' (li : acc) instrs
189 -- clean a reload instr
190 | LiveInstr (RELOAD{}) _ <- li
191 = do (assoc', mli) <- cleanReload blockId assoc li
193 Nothing -> cleanForward blockId assoc' acc instrs
194 Just li' -> cleanForward blockId assoc' (li' : acc) instrs
196 -- remember the association over a jump
197 | LiveInstr instr _ <- li
198 , targets <- jumpDestsOfInstr instr
200 = do mapM_ (accJumpValid assoc) targets
201 cleanForward blockId assoc (li : acc) instrs
203 -- writing to a reg changes its value.
204 | LiveInstr instr _ <- li
205 , RU _ written <- regUsageOfInstr instr
206 = let assoc' = foldr delAssoc assoc (map SReg $ nub written)
207 in cleanForward blockId assoc' (li : acc) instrs
211 -- | Try and rewrite a reload instruction to something more pleasing
218 -> CleanM (Assoc Store, Maybe (LiveInstr instr))
220 cleanReload blockId assoc li@(LiveInstr (RELOAD slot reg) _)
222 -- if the reg we're reloading already has the same value as the slot
223 -- then we can erase the instruction outright
224 | elemAssoc (SSlot slot) (SReg reg) assoc
225 = do modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
226 return (assoc, Nothing)
228 -- if we can find another reg with the same value as this slot then
229 -- do a move instead of a reload.
230 | Just reg2 <- findRegOfSlot assoc slot
231 = do modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
233 let assoc' = addAssoc (SReg reg) (SReg reg2)
234 $ delAssoc (SReg reg)
237 return (assoc', Just $ LiveInstr (mkRegRegMoveInstr reg2 reg) Nothing)
239 -- gotta keep this instr
241 = do -- update the association
242 let assoc' = addAssoc (SReg reg) (SSlot slot) -- doing the reload makes reg and slot the same value
243 $ delAssoc (SReg reg) -- reg value changes on reload
246 -- remember that this block reloads from this slot
247 accBlockReloadsSlot blockId slot
249 return (assoc', Just li)
252 = panic "RegSpillClean.cleanReload: unhandled instr"
255 -- | Clean out unneeded spill instructions.
257 -- If there were no reloads from a slot between a spill and the last one
258 -- then the slot was never read and we don't need the spill.
262 -- SPILL r3 -> s1 <--- don't need this spill
267 -- "slots which were spilled to but not reloaded from yet"
269 -- Walking backwards across the code:
270 -- a) On a reload from a slot, remove it from the set.
272 -- a) On a spill from a slot
273 -- If the slot is in set then we can erase the spill,
274 -- because it won't be reloaded from until after the next spill.
277 -- keep the spill and add the slot to the set
279 -- TODO: This is mostly inter-block
280 -- we should really be updating the noReloads set as we cross jumps also.
282 -- TODO: generate noReloads from liveSlotsOnEntry
287 -> CleanM (LiveCmmTop instr)
294 CmmProc info label params sccs
295 | LiveInfo _ _ _ liveSlotsOnEntry <- info
296 -> do sccs' <- mapM (mapSCCM (cleanBlockBackward liveSlotsOnEntry)) sccs
297 return $ CmmProc info label params sccs'
302 => Map BlockId (Set Int)
303 -> LiveBasicBlock instr
304 -> CleanM (LiveBasicBlock instr)
306 cleanBlockBackward liveSlotsOnEntry (BasicBlock blockId instrs)
307 = do instrs_spill <- cleanBackward liveSlotsOnEntry emptyUniqSet [] instrs
308 return $ BasicBlock blockId instrs_spill
314 => Map BlockId (Set Int) -- ^ Slots live on entry to each block
315 -> UniqSet Int -- ^ slots that have been spilled, but not reloaded from
316 -> [LiveInstr instr] -- ^ acc
317 -> [LiveInstr instr] -- ^ instrs to clean (in forwards order)
318 -> CleanM [LiveInstr instr] -- ^ cleaned instrs (in backwards order)
321 cleanBackward liveSlotsOnEntry noReloads acc lis
322 = do reloadedBy <- gets sReloadedBy
323 cleanBackward' liveSlotsOnEntry reloadedBy noReloads acc lis
325 cleanBackward' _ _ _ acc []
328 cleanBackward' liveSlotsOnEntry reloadedBy noReloads acc (li : instrs)
330 -- if nothing ever reloads from this slot then we don't need the spill
331 | LiveInstr (SPILL _ slot) _ <- li
332 , Nothing <- lookupUFM reloadedBy (SSlot slot)
333 = do modify $ \s -> s { sCleanedSpillsAcc = sCleanedSpillsAcc s + 1 }
334 cleanBackward liveSlotsOnEntry noReloads acc instrs
336 | LiveInstr (SPILL _ slot) _ <- li
337 = if elementOfUniqSet slot noReloads
339 -- we can erase this spill because the slot won't be read until after the next one
341 modify $ \s -> s { sCleanedSpillsAcc = sCleanedSpillsAcc s + 1 }
342 cleanBackward liveSlotsOnEntry noReloads acc instrs
345 -- this slot is being spilled to, but we haven't seen any reloads yet.
346 let noReloads' = addOneToUniqSet noReloads slot
347 cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
349 -- if we reload from a slot then it's no longer unused
350 | LiveInstr (RELOAD slot _) _ <- li
351 , noReloads' <- delOneFromUniqSet noReloads slot
352 = cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
354 -- If a slot is live in a jump target then assume it's reloaded there.
355 -- TODO: A real dataflow analysis would do a better job here.
356 -- If the target block _ever_ used the slot then we assume it always does,
357 -- but if those reloads are cleaned the slot liveness map doesn't get updated.
358 | LiveInstr instr _ <- li
359 , targets <- jumpDestsOfInstr instr
361 let slotsReloadedByTargets
364 $ map (flip Map.lookup liveSlotsOnEntry)
367 let noReloads' = foldl' delOneFromUniqSet noReloads
368 $ Set.toList slotsReloadedByTargets
370 cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
372 -- some other instruction
374 = cleanBackward liveSlotsOnEntry noReloads (li : acc) instrs
377 -- collateJoinPoints:
379 -- | combine the associations from all the inward control flow edges.
381 collateJoinPoints :: CleanM ()
384 { sJumpValid = mapUFM intersects (sJumpValidAcc s)
385 , sJumpValidAcc = emptyUFM }
387 intersects :: [Assoc Store] -> Assoc Store
388 intersects [] = emptyAssoc
389 intersects assocs = foldl1' intersectAssoc assocs
392 -- | See if we have a reg with the same value as this slot in the association table.
393 findRegOfSlot :: Assoc Store -> Int -> Maybe Reg
394 findRegOfSlot assoc slot
395 | close <- closeAssoc (SSlot slot) assoc
396 , Just (SReg reg) <- find isStoreReg $ uniqSetToList close
404 type CleanM = State CleanS
407 { -- regs which are valid at the start of each block.
408 sJumpValid :: UniqFM (Assoc Store)
410 -- collecting up what regs were valid across each jump.
411 -- in the next pass we can collate these and write the results
413 , sJumpValidAcc :: UniqFM [Assoc Store]
415 -- map of (slot -> blocks which reload from this slot)
416 -- used to decide if whether slot spilled to will ever be
417 -- reloaded from on this path.
418 , sReloadedBy :: UniqFM [BlockId]
420 -- spills\/reloads cleaned each pass (latest at front)
421 , sCleanedCount :: [(Int, Int)]
423 -- spills\/reloads that have been cleaned in this pass so far.
424 , sCleanedSpillsAcc :: Int
425 , sCleanedReloadsAcc :: Int }
430 { sJumpValid = emptyUFM
431 , sJumpValidAcc = emptyUFM
433 , sReloadedBy = emptyUFM
437 , sCleanedSpillsAcc = 0
438 , sCleanedReloadsAcc = 0 }
441 -- | Remember the associations before a jump
442 accJumpValid :: Assoc Store -> BlockId -> CleanM ()
443 accJumpValid assocs target
445 sJumpValidAcc = addToUFM_C (++)
451 accBlockReloadsSlot :: BlockId -> Slot -> CleanM ()
452 accBlockReloadsSlot blockId slot
454 sReloadedBy = addToUFM_C (++)
461 -- A store location can be a stack slot or a register
467 -- | Check if this is a reg store
468 isStoreReg :: Store -> Bool
474 -- spill cleaning is only done once all virtuals have been allocated to realRegs
476 instance Uniquable Store where
478 | RegReal (RealRegSingle i) <- r
479 = mkRegSingleUnique i
481 | RegReal (RealRegPair r1 r2) <- r
482 = mkRegPairUnique (r1 * 65535 + r2)
485 = error "RegSpillClean.getUnique: found virtual reg during spill clean, only real regs expected."
487 getUnique (SSlot i) = mkRegSubUnique i -- [SLPJ] I hope "SubUnique" is ok
489 instance Outputable Store where
490 ppr (SSlot i) = text "slot" <> int i
495 -- Association graphs.
496 -- In the spill cleaner, two store locations are associated if they are known
497 -- to hold the same value.
499 type Assoc a = UniqFM (UniqSet a)
501 -- | an empty association
502 emptyAssoc :: Assoc a
503 emptyAssoc = emptyUFM
506 -- | add an association between these two things
507 addAssoc :: Uniquable a
508 => a -> a -> Assoc a -> Assoc a
511 = let m1 = addToUFM_C unionUniqSets m a (unitUniqSet b)
512 m2 = addToUFM_C unionUniqSets m1 b (unitUniqSet a)
516 -- | delete all associations to a node
517 delAssoc :: (Outputable a, Uniquable a)
518 => a -> Assoc a -> Assoc a
521 | Just aSet <- lookupUFM m a
522 , m1 <- delFromUFM m a
523 = foldUniqSet (\x m -> delAssoc1 x a m) m1 aSet
528 -- | delete a single association edge (a -> b)
529 delAssoc1 :: Uniquable a
530 => a -> a -> Assoc a -> Assoc a
533 | Just aSet <- lookupUFM m a
534 = addToUFM m a (delOneFromUniqSet aSet b)
539 -- | check if these two things are associated
540 elemAssoc :: (Outputable a, Uniquable a)
541 => a -> a -> Assoc a -> Bool
544 = elementOfUniqSet b (closeAssoc a m)
546 -- | find the refl. trans. closure of the association from this point
547 closeAssoc :: (Outputable a, Uniquable a)
548 => a -> Assoc a -> UniqSet a
551 = closeAssoc' assoc emptyUniqSet (unitUniqSet a)
553 closeAssoc' assoc visited toVisit
554 = case uniqSetToList toVisit of
556 -- nothing else to visit, we're done
561 -- we've already seen this node
562 | elementOfUniqSet x visited
563 -> closeAssoc' assoc visited (delOneFromUniqSet toVisit x)
565 -- haven't seen this node before,
566 -- remember to visit all its neighbors
569 = case lookupUFM assoc x of
570 Nothing -> emptyUniqSet
574 (addOneToUniqSet visited x)
575 (unionUniqSets toVisit neighbors)
580 => Assoc a -> Assoc a -> Assoc a
583 = intersectUFM_C (intersectUniqSets) a b