summaryrefslogtreecommitdiff
path: root/compiler/nativeGen/RegAlloc/Graph
diff options
context:
space:
mode:
authorbenl@ouroborus.net <unknown>2010-10-13 01:54:14 +0000
committerbenl@ouroborus.net <unknown>2010-10-13 01:54:14 +0000
commit09732d3c8ba3b8ab3ebfc5596cc8fdd7f2bb100f (patch)
tree4d64c2197aeb2fe7df8369724cb87e20f4bdb3e8 /compiler/nativeGen/RegAlloc/Graph
parent2ea237998122126f092e3d39482b2f0a95fe0a99 (diff)
downloadhaskell-09732d3c8ba3b8ab3ebfc5596cc8fdd7f2bb100f.tar.gz
RegAlloc: Track slot liveness over jumps in spill cleaner
Diffstat (limited to 'compiler/nativeGen/RegAlloc/Graph')
-rw-r--r--compiler/nativeGen/RegAlloc/Graph/Spill.hs121
-rw-r--r--compiler/nativeGen/RegAlloc/Graph/SpillClean.hs94
-rw-r--r--compiler/nativeGen/RegAlloc/Graph/SpillCost.hs2
3 files changed, 168 insertions, 49 deletions
diff --git a/compiler/nativeGen/RegAlloc/Graph/Spill.hs b/compiler/nativeGen/RegAlloc/Graph/Spill.hs
index 10bd669054..d82e8a8fec 100644
--- a/compiler/nativeGen/RegAlloc/Graph/Spill.hs
+++ b/compiler/nativeGen/RegAlloc/Graph/Spill.hs
@@ -1,18 +1,19 @@
-
{-# OPTIONS -fno-warn-missing-signatures #-}
+-- | When there aren't enough registers to hold all the vregs we have to spill some of those
+-- vregs to slots on the stack. This module is used modify the code to use those slots.
+--
module RegAlloc.Graph.Spill (
regSpill,
SpillStats(..),
accSpillSL
)
-
where
-
import RegAlloc.Liveness
import Instruction
import Reg
-import Cmm
+import Cmm hiding (RegSet)
+import BlockId
import State
import Unique
@@ -22,15 +23,21 @@ import UniqSupply
import Outputable
import Data.List
+import Data.Maybe
+import Data.Map (Map)
+import Data.Set (Set)
+import qualified Data.Map as Map
+import qualified Data.Set as Set
--- | Spill all these virtual regs to memory
--- TODO: see if we can split some of the live ranges instead of just globally
--- spilling the virtual reg.
+-- | Spill all these virtual regs to stack slots.
+--
+-- TODO: See if we can split some of the live ranges instead of just globally
+-- spilling the virtual reg. This might make the spill cleaner's job easier.
--
--- TODO: On ciscy x86 and x86_64 we don't nessesarally have to add a mov instruction
--- when making spills. If an instr is using a spilled virtual we may be able to
--- address the spill slot directly.
+-- TODO: On CISCy x86 and x86_64 we don't nessesarally have to add a mov instruction
+-- when making spills. If an instr is using a spilled virtual we may be able to
+-- address the spill slot directly.
--
regSpill
:: Instruction instr
@@ -38,7 +45,7 @@ regSpill
-> UniqSet Int -- ^ available stack slots
-> UniqSet VirtualReg -- ^ the regs to spill
-> UniqSM
- ([LiveCmmTop instr] -- code will spill instructions
+ ([LiveCmmTop instr] -- code with SPILL and RELOAD meta instructions added.
, UniqSet Int -- left over slots
, SpillStats ) -- stats about what happened during spilling
@@ -62,7 +69,7 @@ regSpill code slotsFree regs
-- run the spiller on all the blocks
let (code', state') =
- runState (mapM (mapBlockTopM (regSpill_block regSlotMap)) code)
+ runState (mapM (regSpill_top regSlotMap) code)
(initSpillS us)
return ( code'
@@ -70,15 +77,84 @@ regSpill code slotsFree regs
, makeSpillStats state')
+-- | Spill some registers to stack slots in a top-level thing.
+regSpill_top
+ :: Instruction instr
+ => RegMap Int -- ^ map of vregs to slots they're being spilled to.
+ -> LiveCmmTop instr -- ^ the top level thing.
+ -> SpillM (LiveCmmTop instr)
+
+regSpill_top regSlotMap cmm
+ = case cmm of
+ CmmData{}
+ -> return cmm
+
+ CmmProc info label params sccs
+ | LiveInfo static firstId mLiveVRegsOnEntry liveSlotsOnEntry <- info
+ -> do
+ -- We should only passed Cmms with the liveness maps filled in, but we'll
+ -- create empty ones if they're not there just in case.
+ let liveVRegsOnEntry = fromMaybe emptyBlockEnv mLiveVRegsOnEntry
+
+ -- The liveVRegsOnEntry contains the set of vregs that are live on entry to
+ -- each basic block. If we spill one of those vregs we remove it from that
+ -- set and add the corresponding slot number to the liveSlotsOnEntry set.
+ -- The spill cleaner needs this information to erase unneeded spill and
+ -- reload instructions after we've done a successful allocation.
+ let liveSlotsOnEntry' :: Map BlockId (Set Int)
+ liveSlotsOnEntry'
+ = foldBlockEnv patchLiveSlot liveSlotsOnEntry liveVRegsOnEntry
+
+ let info'
+ = LiveInfo static firstId
+ (Just liveVRegsOnEntry)
+ liveSlotsOnEntry'
+
+ -- Apply the spiller to all the basic blocks in the CmmProc.
+ sccs' <- mapM (mapSCCM (regSpill_block regSlotMap)) sccs
+
+ return $ CmmProc info' label params sccs'
+
+ where -- | Given a BlockId and the set of registers live in it,
+ -- if registers in this block are being spilled to stack slots,
+ -- then record the fact that these slots are now live in those blocks
+ -- in the given slotmap.
+ patchLiveSlot :: BlockId -> RegSet -> Map BlockId (Set Int) -> Map BlockId (Set Int)
+ patchLiveSlot blockId regsLive slotMap
+ = let curSlotsLive = fromMaybe Set.empty
+ $ Map.lookup blockId slotMap
+
+ moreSlotsLive = Set.fromList
+ $ catMaybes
+ $ map (lookupUFM regSlotMap)
+ $ uniqSetToList regsLive
+
+ slotMap' = Map.insert blockId (Set.union curSlotsLive moreSlotsLive) slotMap
+
+ in slotMap'
+
+
+
+-- | Spill some registers to stack slots in a basic block.
+regSpill_block
+ :: Instruction instr
+ => UniqFM Int -- ^ map of vregs to slots they're being spilled to.
+ -> LiveBasicBlock instr
+ -> SpillM (LiveBasicBlock instr)
+
regSpill_block regSlotMap (BasicBlock i instrs)
= do instrss' <- mapM (regSpill_instr regSlotMap) instrs
return $ BasicBlock i (concat instrss')
+-- | Spill some registers to stack slots in a single instruction. If the instruction
+-- uses registers that need to be spilled, then it is prefixed (or postfixed) with
+-- the appropriate RELOAD or SPILL meta instructions.
regSpill_instr
:: Instruction instr
- => UniqFM Int
- -> LiveInstr instr -> SpillM [LiveInstr instr]
+ => UniqFM Int -- ^ map of vregs to slots they're being spilled to.
+ -> LiveInstr instr
+ -> SpillM [LiveInstr instr]
regSpill_instr _ li@(LiveInstr _ Nothing)
= do return [li]
@@ -174,7 +250,7 @@ spillModify regSlotMap instr reg
--- | rewrite uses of this virtual reg in an instr to use a different virtual reg
+-- | Rewrite uses of this virtual reg in an instr to use a different virtual reg
patchInstr
:: Instruction instr
=> Reg -> instr -> SpillM (instr, Reg)
@@ -198,13 +274,14 @@ patchReg1 old new instr
in patchRegsOfInstr instr patchF
-------------------------------------------------------
--- Spiller monad
-
+-- Spiller monad --------------------------------------------------------------
data SpillS
= SpillS
- { stateUS :: UniqSupply
- , stateSpillSL :: UniqFM (Reg, Int, Int) } -- ^ spilled reg vs number of times vreg was loaded, stored
+ { -- | unique supply for generating fresh vregs.
+ stateUS :: UniqSupply
+
+ -- | spilled vreg vs the number of times it was loaded, stored
+ , stateSpillSL :: UniqFM (Reg, Int, Int) }
initSpillS uniqueSupply
= SpillS
@@ -226,9 +303,7 @@ accSpillSL (r1, s1, l1) (_, s2, l2)
= (r1, s1 + s2, l1 + l2)
-----------------------------------------------------
--- Spiller stats
-
+-- Spiller stats --------------------------------------------------------------
data SpillStats
= SpillStats
{ spillStoreLoad :: UniqFM (Reg, Int, Int) }
diff --git a/compiler/nativeGen/RegAlloc/Graph/SpillClean.hs b/compiler/nativeGen/RegAlloc/Graph/SpillClean.hs
index 11e3cef10f..253cb70c9e 100644
--- a/compiler/nativeGen/RegAlloc/Graph/SpillClean.hs
+++ b/compiler/nativeGen/RegAlloc/Graph/SpillClean.hs
@@ -23,7 +23,6 @@
-- This also works if the reloads in B1\/B2 were spills instead, because
-- spilling %r1 to a slot makes that slot have the same value as %r1.
--
-
module RegAlloc.Graph.SpillClean (
cleanSpills
)
@@ -42,7 +41,13 @@ import State
import Outputable
import Util
-import Data.List ( find, nub )
+import Data.List
+import Data.Maybe
+import Data.Map (Map)
+import Data.Set (Set)
+import qualified Data.Map as Map
+import qualified Data.Set as Set
+
--
type Slot = Int
@@ -84,8 +89,8 @@ cleanSpin spinCount code
, sReloadedBy = emptyUFM }
code_forward <- mapBlockTopM cleanBlockForward code
- code_backward <- mapBlockTopM cleanBlockBackward code_forward
-
+ code_backward <- cleanTopBackward code_forward
+
-- During the cleaning of each block we collected information about what regs
-- were valid across each jump. Based on this, work out whether it will be
-- safe to erase reloads after join points for the next pass.
@@ -125,17 +130,6 @@ cleanBlockForward (BasicBlock blockId instrs)
return $ BasicBlock blockId instrs_reload
-cleanBlockBackward
- :: Instruction instr
- => LiveBasicBlock instr
- -> CleanM (LiveBasicBlock instr)
-
-cleanBlockBackward (BasicBlock blockId instrs)
- = do instrs_spill <- cleanBackward emptyUniqSet [] instrs
- return $ BasicBlock blockId instrs_spill
-
-
-
-- | Clean out unneeded reload instructions.
-- Walking forwards across the code
@@ -286,27 +280,59 @@ cleanReload _ _ _
-- TODO: This is mostly inter-block
-- we should really be updating the noReloads set as we cross jumps also.
--
+-- TODO: generate noReloads from liveSlotsOnEntry
+--
+cleanTopBackward
+ :: Instruction instr
+ => LiveCmmTop instr
+ -> CleanM (LiveCmmTop instr)
+
+cleanTopBackward cmm
+ = case cmm of
+ CmmData{}
+ -> return cmm
+
+ CmmProc info label params sccs
+ | LiveInfo _ _ _ liveSlotsOnEntry <- info
+ -> do sccs' <- mapM (mapSCCM (cleanBlockBackward liveSlotsOnEntry)) sccs
+ return $ CmmProc info label params sccs'
+
+
+cleanBlockBackward
+ :: Instruction instr
+ => Map BlockId (Set Int)
+ -> LiveBasicBlock instr
+ -> CleanM (LiveBasicBlock instr)
+
+cleanBlockBackward liveSlotsOnEntry (BasicBlock blockId instrs)
+ = do instrs_spill <- cleanBackward liveSlotsOnEntry emptyUniqSet [] instrs
+ return $ BasicBlock blockId instrs_spill
+
+
+
cleanBackward
- :: UniqSet Int -- ^ slots that have been spilled, but not reloaded from
+ :: Instruction instr
+ => Map BlockId (Set Int) -- ^ Slots live on entry to each block
+ -> UniqSet Int -- ^ slots that have been spilled, but not reloaded from
-> [LiveInstr instr] -- ^ acc
-> [LiveInstr instr] -- ^ instrs to clean (in forwards order)
-> CleanM [LiveInstr instr] -- ^ cleaned instrs (in backwards order)
-cleanBackward noReloads acc lis
+cleanBackward liveSlotsOnEntry noReloads acc lis
= do reloadedBy <- gets sReloadedBy
- cleanBackward' reloadedBy noReloads acc lis
+ cleanBackward' liveSlotsOnEntry reloadedBy noReloads acc lis
-cleanBackward' _ _ acc []
+cleanBackward' _ _ _ acc []
= return acc
-cleanBackward' reloadedBy noReloads acc (li : instrs)
+cleanBackward' liveSlotsOnEntry reloadedBy noReloads acc (li : instrs)
-- if nothing ever reloads from this slot then we don't need the spill
| LiveInstr (SPILL _ slot) _ <- li
, Nothing <- lookupUFM reloadedBy (SSlot slot)
= do modify $ \s -> s { sCleanedSpillsAcc = sCleanedSpillsAcc s + 1 }
- cleanBackward noReloads acc instrs
+ cleanBackward liveSlotsOnEntry noReloads acc instrs
| LiveInstr (SPILL _ slot) _ <- li
= if elementOfUniqSet slot noReloads
@@ -314,21 +340,39 @@ cleanBackward' reloadedBy noReloads acc (li : instrs)
-- we can erase this spill because the slot won't be read until after the next one
then do
modify $ \s -> s { sCleanedSpillsAcc = sCleanedSpillsAcc s + 1 }
- cleanBackward noReloads acc instrs
+ cleanBackward liveSlotsOnEntry noReloads acc instrs
else do
-- this slot is being spilled to, but we haven't seen any reloads yet.
let noReloads' = addOneToUniqSet noReloads slot
- cleanBackward noReloads' (li : acc) instrs
+ cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
-- if we reload from a slot then it's no longer unused
| LiveInstr (RELOAD slot _) _ <- li
, noReloads' <- delOneFromUniqSet noReloads slot
- = cleanBackward noReloads' (li : acc) instrs
+ = cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
+
+ -- If a slot is live in a jump target then assume it's reloaded there.
+ -- TODO: A real dataflow analysis would do a better job here.
+ -- If the target block _ever_ used the slot then we assume it always does,
+ -- but if those reloads are cleaned the slot liveness map doesn't get updated.
+ | LiveInstr instr _ <- li
+ , targets <- jumpDestsOfInstr instr
+ = do
+ let slotsReloadedByTargets
+ = Set.unions
+ $ catMaybes
+ $ map (flip Map.lookup liveSlotsOnEntry)
+ $ targets
+
+ let noReloads' = foldl' delOneFromUniqSet noReloads
+ $ Set.toList slotsReloadedByTargets
+
+ cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
-- some other instruction
| otherwise
- = cleanBackward noReloads (li : acc) instrs
+ = cleanBackward liveSlotsOnEntry noReloads (li : acc) instrs
-- collateJoinPoints:
diff --git a/compiler/nativeGen/RegAlloc/Graph/SpillCost.hs b/compiler/nativeGen/RegAlloc/Graph/SpillCost.hs
index 152d70b966..0dc25f58d2 100644
--- a/compiler/nativeGen/RegAlloc/Graph/SpillCost.hs
+++ b/compiler/nativeGen/RegAlloc/Graph/SpillCost.hs
@@ -78,7 +78,7 @@ slurpSpillCostInfo cmm
-- lookup the regs that are live on entry to this block in
-- the info table from the CmmProc
countBlock info (BasicBlock blockId instrs)
- | LiveInfo _ _ (Just blockLive) <- info
+ | LiveInfo _ _ (Just blockLive) _ <- info
, Just rsLiveEntry <- lookupBlockEnv blockLive blockId
, rsLiveEntry_virt <- takeVirtuals rsLiveEntry
= countLIs rsLiveEntry_virt instrs