summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSimon Marlow <marlowsd@gmail.com>2012-01-25 10:08:20 +0000
committerSimon Marlow <marlowsd@gmail.com>2012-01-25 10:08:20 +0000
commit19be2021689f9134316ba567e0a7c8198f0487ae (patch)
tree64e097873283e593f67105284e8d35b16c359456
parent9b6dbdea12e607a7012c73c38f1e876d43cf1274 (diff)
downloadhaskell-19be2021689f9134316ba567e0a7c8198f0487ae.tar.gz
Different implementation of MkGraph
-rw-r--r--compiler/cmm/MkGraph.hs444
-rw-r--r--compiler/codeGen/StgCmmBind.hs18
-rw-r--r--compiler/codeGen/StgCmmExpr.hs31
-rw-r--r--compiler/codeGen/StgCmmForeign.hs9
-rw-r--r--compiler/codeGen/StgCmmHeap.hs44
-rw-r--r--compiler/codeGen/StgCmmLayout.hs10
-rw-r--r--compiler/codeGen/StgCmmMonad.hs95
-rw-r--r--compiler/codeGen/StgCmmPrim.hs44
-rw-r--r--compiler/codeGen/StgCmmProf.hs8
-rw-r--r--compiler/codeGen/StgCmmTicky.hs4
-rw-r--r--compiler/codeGen/StgCmmUtils.hs207
11 files changed, 466 insertions, 448 deletions
diff --git a/compiler/cmm/MkGraph.hs b/compiler/cmm/MkGraph.hs
index 2561eed35b..0d75235a52 100644
--- a/compiler/cmm/MkGraph.hs
+++ b/compiler/cmm/MkGraph.hs
@@ -1,35 +1,17 @@
{-# LANGUAGE GADTs #-}
-{-# OPTIONS -fno-warn-tabs #-}
--- The above warning supression flag is a temporary kludge.
--- While working on this module you are encouraged to remove it and
--- detab the module (please do the detabbing in a separate patch). See
--- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
--- for details
-
--- ToDo: remove -fno-warn-warnings-deprecations
-{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
--- ToDo: remove -fno-warn-incomplete-patterns
-{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
-
--- Module for building CmmAGraphs.
-
--- As the CmmAGraph is a wrapper over Graph CmmNode O x, it is different
--- from Hoopl's AGraph. The current clients expect functions with the
--- same names Hoopl uses, so this module cannot be in the same namespace
--- as Compiler.Hoopl.
module MkGraph
- ( CmmAGraph
- , emptyAGraph, (<*>), catAGraphs, outOfLine
+ ( CmmAGraph, CgStmt(..)
+ , (<*>), catAGraphs
, mkLabel, mkMiddle, mkLast
- , withFreshLabel, withUnique, lgraphOfAGraph, labelAGraph
+ , lgraphOfAGraph, labelAGraph
, stackStubExpr
- , mkNop, mkAssign, mkStore, mkCall, mkCmmCall, mkSafeCall, mkUnsafeCall, mkFinalCall
- , mkJump, mkDirectJump, mkForeignJump, mkJumpGC, mkCbranch, mkSwitch
- , mkReturn, mkReturnSimple, mkComment, mkCallEntry
- , mkBranch, mkCmmIfThenElse, mkCmmIfThen, mkCmmWhileDo
- , copyInOflow, copyInSlot, copyOutOflow, copyOutSlot
+ , mkNop, mkAssign, mkStore, mkUnsafeCall, mkFinalCall, lastWithArgs
+ , mkJump, mkDirectJump, mkForeignJump, mkJumpGC, mkCbranch, mkSwitch
+ , mkReturn, mkReturnSimple, mkComment, mkCallEntry, mkBranch
+ , copyInOflow, copyInSlot, copyOutOflow, copyOutSlot
+ , toCall, Transfer(..)
)
where
@@ -37,249 +19,214 @@ import BlockId
import Cmm
import CmmCallConv (assignArgumentsPos, ParamLocation(..))
+
import Compiler.Hoopl hiding (Unique, (<*>), mkFirst, mkMiddle, mkLast, mkLabel, mkBranch, Shape(..))
-import qualified Compiler.Hoopl as H
-import Compiler.Hoopl.GHC (uniqueToLbl)
import FastString
import ForeignCall
import Outputable
import Prelude hiding (succ)
import SMRep (ByteOff)
-import StaticFlags
-import Unique
import UniqSupply
+import OrdList
#include "HsVersions.h"
-{-
-A 'CmmAGraph' is an abstract version of a 'Graph CmmNode O x' from module
-'Cmm'. The difference is that the 'CmmAGraph' can be eigher open of closed at
-exit and it can supply fresh Labels and Uniques.
-
-It also supports a splicing operation <*>, which is different from the Hoopl's
-<*>, because it splices two CmmAGraphs. Specifically, it can splice Graph
-O C and Graph O x. In this case, the open beginning of the second graph is
-thrown away. In the debug mode this sequence is checked to be empty or
-containing a branch (see note [Branch follows branch]).
-
-When an CmmAGraph open at exit is being converted to a CmmGraph, the output
-exit sequence is considered unreachable. If the graph consist of one block
-only, if it not the case and we crash. Otherwise we just throw the exit
-sequence away (and in debug mode we test that it really was unreachable).
--}
-
-{-
-Node [Branch follows branch]
-============================
-Why do we say it's ok for a Branch to follow a Branch?
-Because the standard constructor mkLabel has fall-through
-semantics. So if you do a mkLabel, you finish the current block,
-giving it a label, and start a new one that branches to that label.
-Emitting a Branch at this point is fine:
- goto L1; L2: ...stuff...
--}
-
-data CmmGraphOC = Opened (Graph CmmNode O O)
- | Closed (Graph CmmNode O C)
-type CmmAGraph = UniqSM CmmGraphOC -- Graph open at entry
-
-{-
-MS: I began with
- newtype CmmAGraph = forall x. AG (UniqSM (Graph CmmNode O x))
-but that does not work well, because we cannot take the graph
-out of the monad -- we do not know the type of what we would take
-out and pattern matching does not help, as we cannot pattern match
-on a graph inside the monad.
--}
-data Transfer = Call | Jump | Ret deriving Eq
+-----------------------------------------------------------------------------
+-- Building Graphs
+
+
+-- | CmmAGraph is a chunk of code consisting of:
+--
+-- * ordinary statements (assignments, stores etc.)
+-- * jumps
+-- * labels
+-- * out-of-line labelled blocks
+--
+-- The semantics is that control falls through labels and out-of-line
+-- blocks. Everything after a jump up to the next label is by
+-- definition unreachable code, and will be discarded.
+--
+-- Two CmmAGraphs can be stuck together with <*>, with the meaning that
+-- control flows from the first to the second.
+--
+-- A 'CmmAGraph' can be turned into a 'CmmGraph' (closed at both ends)
+-- by providing a label for the entry point; see 'labelAGraph'.
+--
+type CmmAGraph = OrdList CgStmt
+
+data CgStmt
+ = CgLabel BlockId
+ | CgStmt (CmmNode O O)
+ | CgLast (CmmNode O C)
+ | CgFork BlockId CmmAGraph
+
+flattenCmmAGraph :: BlockId -> CmmAGraph -> CmmGraph
+flattenCmmAGraph id stmts =
+ CmmGraph { g_entry = id,
+ g_graph = GMany NothingO body NothingO }
+ where
+ (block, blocks) = flatten (fromOL stmts)
+ entry = blockJoinHead (CmmEntry id) block
+ body = foldr addBlock emptyBody (entry:blocks)
+
+ flatten :: [CgStmt] -> (Block CmmNode O C, [Block CmmNode C C])
+ flatten [] = panic "flatten []"
+
+ -- A label at the end of a function or fork: this label must not be reachable,
+ -- but it might be referred to from another BB that also isn't reachable.
+ -- Eliminating these has to be done with a dead-code analysis. For now,
+ -- we just make it into a well-formed block by adding a recursive jump.
+ flatten [CgLabel id]
+ = (goto_id, [blockJoinHead (CmmEntry id) goto_id] )
+ where goto_id = blockJoinTail emptyBlock (CmmBranch id)
+
+ -- A jump/branch: throw away all the code up to the next label, because
+ -- it is unreachable. Be careful to keep forks that we find on the way.
+ flatten (CgLast stmt : stmts)
+ = case dropWhile isOrdinaryStmt stmts of
+ [] ->
+ ( sing, [] )
+ [CgLabel id] ->
+ ( sing, [blockJoin (CmmEntry id) emptyBlock (CmmBranch id)] )
+ (CgLabel id : stmts) ->
+ ( sing, blockJoinHead (CmmEntry id) block : blocks )
+ where (block,blocks) = flatten stmts
+ (CgFork fork_id stmts : ss) ->
+ flatten (CgFork fork_id stmts : CgLast stmt : ss)
+ _ -> panic "MkGraph.flatten"
+ where
+ sing = blockJoinTail emptyBlock stmt
+
+ flatten (s:ss) =
+ case s of
+ CgStmt stmt -> (blockCons stmt block, blocks)
+ CgLabel id -> (blockJoinTail emptyBlock (CmmBranch id),
+ blockJoinHead (CmmEntry id) block : blocks)
+ CgFork fork_id stmts ->
+ (block, blockJoinHead (CmmEntry fork_id) fork_block : fork_blocks ++ blocks)
+ where (fork_block, fork_blocks) = flatten (fromOL stmts)
+ _ -> panic "MkGraph.flatten"
+ where (block,blocks) = flatten ss
+
+isOrdinaryStmt :: CgStmt -> Bool
+isOrdinaryStmt (CgStmt _) = True
+isOrdinaryStmt (CgLast _) = True
+isOrdinaryStmt _ = False
+
+
---------- AGraph manipulation
-emptyAGraph :: CmmAGraph
(<*>) :: CmmAGraph -> CmmAGraph -> CmmAGraph
+(<*>) = appOL
+
catAGraphs :: [CmmAGraph] -> CmmAGraph
+catAGraphs = concatOL
+
+-- | created a sequence "goto id; id:" as an AGraph
+mkLabel :: BlockId -> CmmAGraph
+mkLabel bid = unitOL (CgLabel bid)
-mkLabel :: BlockId -> CmmAGraph -- created a sequence "goto id; id:" as an AGraph
-mkMiddle :: CmmNode O O -> CmmAGraph -- creates an open AGraph from a given node
-mkLast :: CmmNode O C -> CmmAGraph -- created a closed AGraph from a given node
+-- | creates an open AGraph from a given node
+mkMiddle :: CmmNode O O -> CmmAGraph
+mkMiddle middle = unitOL (CgStmt middle)
-withFreshLabel :: String -> (BlockId -> CmmAGraph) -> CmmAGraph
-withUnique :: (Unique -> CmmAGraph) -> CmmAGraph
+-- | created a closed AGraph from a given node
+mkLast :: CmmNode O C -> CmmAGraph
+mkLast last = unitOL (CgLast last)
+
+-- | allocate a fresh label for the entry point
lgraphOfAGraph :: CmmAGraph -> UniqSM CmmGraph
- -- ^ allocate a fresh label for the entry point
+lgraphOfAGraph g = do u <- getUniqueM
+ return (flattenCmmAGraph (mkBlockId u) g)
+
+-- | use the given BlockId as the label of the entry point
labelAGraph :: BlockId -> CmmAGraph -> UniqSM CmmGraph
- -- ^ use the given BlockId as the label of the entry point
+labelAGraph lbl ag = return (flattenCmmAGraph lbl ag)
---------- No-ops
mkNop :: CmmAGraph
+mkNop = nilOL
+
mkComment :: FastString -> CmmAGraph
+#ifdef DEBUG
+-- SDM: generating all those comments takes time, this saved about 4% for me
+mkComment fs = mkMiddle $ CmmComment fs
+#else
+mkComment _ = nilOL
+#endif
---------- Assignment and store
mkAssign :: CmmReg -> CmmExpr -> CmmAGraph
-mkStore :: CmmExpr -> CmmExpr -> CmmAGraph
+mkAssign l r = mkMiddle $ CmmAssign l r
----------- Calls
-mkCall :: CmmExpr -> (Convention, Convention) -> [CmmFormal] -> [CmmActual] ->
- UpdFrameOffset -> CmmAGraph
-mkCmmCall :: CmmExpr -> [CmmFormal] -> [CmmActual] ->
- UpdFrameOffset -> CmmAGraph
- -- Native C-- calling convention
-mkSafeCall :: ForeignTarget -> [CmmFormal] -> [CmmActual] -> UpdFrameOffset -> Bool -> CmmAGraph
-mkUnsafeCall :: ForeignTarget -> [CmmFormal] -> [CmmActual] -> CmmAGraph
-mkFinalCall :: CmmExpr -> CCallConv -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
- -- Never returns; like exit() or barf()
+mkStore :: CmmExpr -> CmmExpr -> CmmAGraph
+mkStore l r = mkMiddle $ CmmStore l r
---------- Control transfer
-mkJump :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
-mkDirectJump :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
-mkJumpGC :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
-mkForeignJump :: Convention -> CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
-mkCbranch :: CmmExpr -> BlockId -> BlockId -> CmmAGraph
-mkSwitch :: CmmExpr -> [Maybe BlockId] -> CmmAGraph
+mkJump :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
+mkJump e actuals updfr_off =
+ lastWithArgs Jump old NativeNodeCall actuals updfr_off $
+ toCall e Nothing updfr_off 0
+
+mkDirectJump :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
+mkDirectJump e actuals updfr_off =
+ lastWithArgs Jump old NativeDirectCall actuals updfr_off $
+ toCall e Nothing updfr_off 0
+
+mkJumpGC :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
+mkJumpGC e actuals updfr_off =
+ lastWithArgs Jump old GC actuals updfr_off $
+ toCall e Nothing updfr_off 0
+
+mkForeignJump :: Convention -> CmmExpr -> [CmmActual] -> UpdFrameOffset
+ -> CmmAGraph
+mkForeignJump conv e actuals updfr_off =
+ lastWithArgs Jump old conv actuals updfr_off $
+ toCall e Nothing updfr_off 0
+
+mkCbranch :: CmmExpr -> BlockId -> BlockId -> CmmAGraph
+mkCbranch pred ifso ifnot = mkLast (CmmCondBranch pred ifso ifnot)
+
+mkSwitch :: CmmExpr -> [Maybe BlockId] -> CmmAGraph
+mkSwitch e tbl = mkLast $ CmmSwitch e tbl
+
mkReturn :: CmmExpr -> [CmmActual] -> UpdFrameOffset -> CmmAGraph
+mkReturn e actuals updfr_off =
+ lastWithArgs Ret old NativeReturn actuals updfr_off $
+ toCall e Nothing updfr_off 0
+ -- where e = CmmLoad (CmmStackSlot (CallArea Old) updfr_off) gcWord
+
mkReturnSimple :: [CmmActual] -> UpdFrameOffset -> CmmAGraph
+mkReturnSimple actuals updfr_off =
+ lastWithArgs Ret old NativeReturn actuals updfr_off $
+ toCall e Nothing updfr_off 0
+ where e = CmmLoad (CmmStackSlot (CallArea Old) updfr_off) gcWord
mkBranch :: BlockId -> CmmAGraph
-mkCmmIfThenElse :: CmmExpr -> CmmAGraph -> CmmAGraph -> CmmAGraph
-mkCmmIfThen :: CmmExpr -> CmmAGraph -> CmmAGraph
-mkCmmWhileDo :: CmmExpr -> CmmAGraph -> CmmAGraph
-
-outOfLine :: CmmAGraph -> CmmAGraph
--- ^ The argument is an CmmAGraph that must have an
--- empty entry sequence and be closed at the end.
--- The result is a new CmmAGraph that is open at the
--- end and goes directly from entry to exit, with the
--- original graph sitting to the side out-of-line.
---
--- Example: mkMiddle (x = 3)
--- <*> outOfLine (mkLabel L <*> ...stuff...)
--- <*> mkMiddle (y = x)
--- Control will flow directly from x=3 to y=x;
--- the block starting with L is "on the side".
---
--- N.B. algebraically forall g g' : g <*> outOfLine g' == outOfLine g' <*> g
+mkBranch bid = mkLast (CmmBranch bid)
+
+mkFinalCall :: CmmExpr -> CCallConv -> [CmmActual] -> UpdFrameOffset
+ -> CmmAGraph
+mkFinalCall f _ actuals updfr_off =
+ lastWithArgs Call old NativeDirectCall actuals updfr_off $
+ toCall f Nothing updfr_off 0
+
+mkUnsafeCall :: ForeignTarget -> [CmmFormal] -> [CmmActual] -> CmmAGraph
+mkUnsafeCall t fs as = mkMiddle $ CmmUnsafeForeignCall t fs as
+
--------------------------------------------------------------------------
--- ================ IMPLEMENTATION ================--
-
---------------------------------------------------
--- Raw CmmAGraph handling
-
-emptyAGraph = return $ Opened emptyGraph
-ag <*> ah = do g <- ag
- h <- ah
- return (case (g, h) of
- (Opened g, Opened h) -> Opened $ g H.<*> h
- (Opened g, Closed h) -> Closed $ g H.<*> h
- (Closed g, Opened GNil) -> Closed g
- (Closed g, Opened (GUnit e)) -> note_unreachable e $ Closed g
- (Closed g, Opened (GMany (JustO e) b x)) -> note_unreachable e $ Opened $ g H.|*><*| GMany NothingO b x
- (Closed g, Closed (GMany (JustO e) b x)) -> note_unreachable e $ Closed $ g H.|*><*| GMany NothingO b x
- :: CmmGraphOC)
-catAGraphs = foldl (<*>) emptyAGraph
-
-outOfLine ag = withFreshLabel "outOfLine" $ \l ->
- do g <- ag
- return (case g of
- Closed (GMany (JustO e) b _) -> note_unreachable e $ Opened $
- GMany (JustO $ BlockOC BNil (CmmBranch l)) b (JustO $ BlockCO (CmmEntry l) BNil)
- _ -> panic "outOfLine"
- :: CmmGraphOC)
-
-note_unreachable :: Block CmmNode O x -> a -> a
-note_unreachable block graph =
- ASSERT (block_is_empty_or_label) -- Note [Branch follows branch]
- graph
- where block_is_empty_or_label :: Bool
- block_is_empty_or_label = case blockToNodeList block of
- (NothingC, [], NothingC) -> True
- (NothingC, [], JustC (CmmBranch _)) -> True
- _ -> False
-
-mkLabel bid = return $ Opened $ H.mkLast (CmmBranch bid) |*><*| H.mkFirst (CmmEntry bid)
-mkMiddle middle = return $ Opened $ H.mkMiddle middle
-mkLast last = return $ Closed $ H.mkLast last
-
-withUnique f = getUniqueM >>= f
-withFreshLabel _name f = getUniqueM >>= f . uniqueToLbl . intToUnique . getKey
-lgraphOfAGraph g = do u <- getUniqueM
- labelAGraph (mkBlockId u) g
-
-labelAGraph lbl ag = do g <- ag
- return $ CmmGraph {g_entry=lbl, g_graph=H.mkFirst (CmmEntry lbl) H.<*> closed g}
- where closed :: CmmGraphOC -> Graph CmmNode O C
- closed (Closed g) = g
- closed (Opened g@(GMany entry body (JustO exit))) =
- ASSERT (entryLabel exit `notElem` map entryLabel (postorder_dfs g))
- GMany entry body NothingO
- closed (Opened _) = panic "labelAGraph"
-
---------------------------------------------------
--- CmmAGraph constructions
-
-mkNop = emptyAGraph
-mkComment fs = mkMiddle $ CmmComment fs
-mkStore l r = mkMiddle $ CmmStore l r
-
--- NEED A COMPILER-DEBUGGING FLAG HERE
--- Sanity check: any value assigned to a pointer must be non-zero.
--- If it's 0, cause a crash immediately.
-mkAssign l r = if opt_StubDeadValues then assign l r <*> check l else assign l r
- where assign l r = mkMiddle (CmmAssign l r)
- check (CmmGlobal _) = mkNop
- check l@(CmmLocal reg) = -- if a ptr arg is NULL, cause a crash!
- if isGcPtrType ty then
- mkCmmIfThen (CmmMachOp (MO_Eq w) [r, stackStubExpr w])
- (assign l (CmmLoad (CmmLit (CmmInt 0 w)) ty))
- else mkNop
- where ty = localRegType reg
- w = typeWidth ty
- r = CmmReg l
-- Why are we inserting extra blocks that simply branch to the successors?
-- Because in addition to the branch instruction, @mkBranch@ will insert
-- a necessary adjustment to the stack pointer.
-mkCbranch pred ifso ifnot = mkLast (CmmCondBranch pred ifso ifnot)
-mkSwitch e tbl = mkLast $ CmmSwitch e tbl
-mkSafeCall t fs as upd i = withFreshLabel "safe call" $ body
- where
- body k =
- ( mkStore (CmmStackSlot (CallArea (Young k)) (widthInBytes wordWidth))
- (CmmLit (CmmBlock k))
- <*> mkLast (CmmForeignCall {tgt=t, res=fs, args=as, succ=k, updfr=upd, intrbl=i})
- <*> mkLabel k)
-mkUnsafeCall t fs as = mkMiddle $ CmmUnsafeForeignCall t fs as
-
-mkBranch bid = mkLast (CmmBranch bid)
-
-mkCmmIfThenElse e tbranch fbranch =
- withFreshLabel "end of if" $ \endif ->
- withFreshLabel "start of then" $ \tid ->
- withFreshLabel "start of else" $ \fid ->
- mkCbranch e tid fid <*>
- mkLabel tid <*> tbranch <*> mkBranch endif <*>
- mkLabel fid <*> fbranch <*> mkLabel endif
-
-mkCmmIfThen e tbranch
- = withFreshLabel "end of if" $ \endif ->
- withFreshLabel "start of then" $ \tid ->
- mkCbranch e tid endif <*>
- mkLabel tid <*> tbranch <*> mkLabel endif
-
-mkCmmWhileDo e body =
- withFreshLabel "loop test" $ \test ->
- withFreshLabel "loop head" $ \head ->
- withFreshLabel "end while" $ \endwhile ->
- -- Forrest Baskett's while-loop layout
- mkBranch test <*> mkLabel head <*> body
- <*> mkLabel test <*> mkCbranch e head endwhile
- <*> mkLabel endwhile
-- For debugging purposes, we can stub out dead stack slots:
stackStubExpr :: Width -> CmmExpr
@@ -333,17 +280,22 @@ oneCopySlotI _ (reg, _) (n, ms) =
-- Factoring out the common parts of the copyout functions yielded something
-- more complicated:
+data Transfer = Call | Jump | Ret deriving Eq
+
copyOutOflow :: Convention -> Transfer -> Area -> [CmmActual] -> UpdFrameOffset ->
(Int, CmmAGraph)
+
-- Generate code to move the actual parameters into the locations
--- required by the calling convention. This includes a store for the return address.
+-- required by the calling convention. This includes a store for the
+-- return address.
--
--- The argument layout function ignores the pointer to the info table, so we slot that
--- in here. When copying-out to a young area, we set the info table for return
--- and adjust the offsets of the other parameters.
--- If this is a call instruction, we adjust the offsets of the other parameters.
+-- The argument layout function ignores the pointer to the info table,
+-- so we slot that in here. When copying-out to a young area, we set
+-- the info table for return and adjust the offsets of the other
+-- parameters. If this is a call instruction, we adjust the offsets
+-- of the other parameters.
copyOutOflow conv transfer area@(CallArea a) actuals updfr_off
- = foldr co (init_offset, emptyAGraph) args'
+ = foldr co (init_offset, mkNop) args'
where
co (v, RegisterParam r) (n, ms) = (n, mkAssign (CmmGlobal r) v <*> ms)
co (v, StackParam off) (n, ms) = (max n off, mkStore (CmmStackSlot area off) v <*> ms)
@@ -387,34 +339,8 @@ lastWithArgs transfer area conv actuals updfr_off last =
-- procedure entry.
old :: Area
old = CallArea Old
-toCall :: CmmExpr -> Maybe BlockId -> UpdFrameOffset -> ByteOff -> ByteOff -> CmmAGraph
+
+toCall :: CmmExpr -> Maybe BlockId -> UpdFrameOffset -> ByteOff -> ByteOff
+ -> CmmAGraph
toCall e cont updfr_off res_space arg_space =
mkLast $ CmmCall e cont arg_space res_space updfr_off
-mkJump e actuals updfr_off =
- lastWithArgs Jump old NativeNodeCall actuals updfr_off $ toCall e Nothing updfr_off 0
-mkDirectJump e actuals updfr_off =
- lastWithArgs Jump old NativeDirectCall actuals updfr_off $ toCall e Nothing updfr_off 0
-mkJumpGC e actuals updfr_off =
- lastWithArgs Jump old GC actuals updfr_off $ toCall e Nothing updfr_off 0
-mkForeignJump conv e actuals updfr_off =
- lastWithArgs Jump old conv actuals updfr_off $ toCall e Nothing updfr_off 0
-mkReturn e actuals updfr_off =
- lastWithArgs Ret old NativeReturn actuals updfr_off $ toCall e Nothing updfr_off 0
- -- where e = CmmLoad (CmmStackSlot (CallArea Old) updfr_off) gcWord
-mkReturnSimple actuals updfr_off =
- lastWithArgs Ret old NativeReturn actuals updfr_off $ toCall e Nothing updfr_off 0
- where e = CmmLoad (CmmStackSlot (CallArea Old) updfr_off) gcWord
-
-mkFinalCall f _ actuals updfr_off =
- lastWithArgs Call old NativeDirectCall actuals updfr_off $ toCall f Nothing updfr_off 0
-
-mkCmmCall f results actuals = mkCall f (NativeDirectCall, NativeReturn) results actuals
-
--- I'm dropping the SRT, but that should be okay: we plan to reconstruct it later.
-mkCall f (callConv, retConv) results actuals updfr_off =
- withFreshLabel "call successor" $ \k ->
- let area = CallArea $ Young k
- (off, copyin) = copyInOflow retConv area results
- copyout = lastWithArgs Call area callConv actuals updfr_off
- (toCall f (Just k) updfr_off off)
- in (copyout <*> mkLabel k <*> copyin)
diff --git a/compiler/codeGen/StgCmmBind.hs b/compiler/codeGen/StgCmmBind.hs
index 9bf57b1cb4..724f28d142 100644
--- a/compiler/codeGen/StgCmmBind.hs
+++ b/compiler/codeGen/StgCmmBind.hs
@@ -109,7 +109,7 @@ cgBind (StgNonRec name rhs)
; emit (init <*> body) }
cgBind (StgRec pairs)
- = do { ((new_binds, inits), body) <- getCodeR $ fixC (\ new_binds_inits ->
+ = do { ((new_binds, inits), body) <- getCodeR $ fixC (\ new_binds_inits ->
do { addBindsC $ fst new_binds_inits -- avoid premature deconstruction
; liftM unzip $ listFCs [ cgRhs b e | (b,e) <- pairs ] })
; addBindsC new_binds
@@ -547,10 +547,10 @@ emitBlackHoleCode is_single_entry = do
whenC eager_blackholing $ do
tickyBlackHole (not is_single_entry)
- emit (mkStore (cmmOffsetW (CmmReg nodeReg) fixedHdrSize)
- (CmmReg (CmmGlobal CurrentTSO)))
+ emitStore (cmmOffsetW (CmmReg nodeReg) fixedHdrSize)
+ (CmmReg (CmmGlobal CurrentTSO))
emitPrimCall [] MO_WriteBarrier []
- emit (mkStore (CmmReg nodeReg) (CmmReg (CmmGlobal EagerBlackholeInfo)))
+ emitStore (CmmReg nodeReg) (CmmReg (CmmGlobal EagerBlackholeInfo))
setupUpdate :: ClosureInfo -> LocalReg -> FCode () -> FCode ()
-- Nota Bene: this function does not change Node (even if it's a CAF),
@@ -596,7 +596,7 @@ pushUpdateFrame es body
offset <- foldM push updfr es
withUpdFrameOff offset body
where push off e =
- do emit (mkStore (CmmStackSlot (CallArea Old) base) e)
+ do emitStore (CmmStackSlot (CallArea Old) base) e
return base
where base = off + widthInBytes (cmmExprWidth e)
@@ -664,13 +664,13 @@ link_caf _is_upd = do
-- node is live, so save it.
-- see Note [atomic CAF entry] in rts/sm/Storage.c
- ; emit $ mkCmmIfThen
- (CmmMachOp mo_wordEq [ CmmReg (CmmLocal ret), CmmLit zeroCLit]) $
+ ; emit =<< mkCmmIfThen
+ (CmmMachOp mo_wordEq [ CmmReg (CmmLocal ret), CmmLit zeroCLit])
-- re-enter R1. Doing this directly is slightly dodgy; we're
-- assuming lots of things, like the stack pointer hasn't
-- moved since we entered the CAF.
- let target = entryCode (closureInfoPtr (CmmReg nodeReg)) in
- mkJump target [] 0
+ (let target = entryCode (closureInfoPtr (CmmReg nodeReg)) in
+ mkJump target [] 0)
; return hp_rel }
diff --git a/compiler/codeGen/StgCmmExpr.hs b/compiler/codeGen/StgCmmExpr.hs
index 5ea935984d..0c5dcb5f6a 100644
--- a/compiler/codeGen/StgCmmExpr.hs
+++ b/compiler/codeGen/StgCmmExpr.hs
@@ -77,7 +77,7 @@ cgExpr (StgLetNoEscape _ _ binds expr) =
; let join_id = mkBlockId (uniqFromSupply us)
; cgLneBinds join_id binds
; cgExpr expr
- ; emit $ mkLabel join_id}
+ ; emitLabel join_id}
cgExpr (StgCase expr _live_vars _save_vars bndr srt alt_type alts) =
cgCase expr bndr srt alt_type alts
@@ -130,7 +130,7 @@ cgLetNoEscapeRhs
cgLetNoEscapeRhs join_id local_cc bndr rhs =
do { (info, rhs_body) <- getCodeR $ cgLetNoEscapeRhsBody local_cc bndr rhs
; let (bid, _) = expectJust "cgLetNoEscapeRhs" $ maybeLetNoEscape info
- ; emit (outOfLine $ mkLabel bid <*> rhs_body <*> mkBranch join_id)
+ ; emitOutOfLine bid $ rhs_body <*> mkBranch join_id
; return info
}
@@ -319,7 +319,7 @@ cgCase (StgApp v []) bndr _ alt_type@(PrimAlt _) alts
do { when (not reps_compatible) $
panic "cgCase: reps do not match, perhaps a dodgy unsafeCoerce?"
; v_info <- getCgIdInfo v
- ; emit (mkAssign (CmmLocal (idToReg (NonVoid bndr))) (idInfoToAmode v_info))
+ ; emitAssign (CmmLocal (idToReg (NonVoid bndr))) (idInfoToAmode v_info)
; _ <- bindArgsToRegs [NonVoid bndr]
; cgAlts NoGcInAlts (NonVoid bndr) alt_type alts }
where
@@ -330,8 +330,11 @@ cgCase scrut@(StgApp v []) _ _ (PrimAlt _) _
do { mb_cc <- maybeSaveCostCentre True
; withSequel (AssignTo [idToReg (NonVoid v)] False) (cgExpr scrut)
; restoreCurrentCostCentre mb_cc
- ; emit $ mkComment $ mkFastString "should be unreachable code"
- ; emit $ withFreshLabel "l" (\l -> mkLabel l <*> mkBranch l)}
+ ; emitComment $ mkFastString "should be unreachable code"
+ ; l <- newLabelC
+ ; emitLabel l
+ ; emit (mkBranch l)
+ }
{-
case seq# a s of v
@@ -433,7 +436,7 @@ cgAlts gc_plan bndr (PrimAlt _) alts
tagged_cmms' = [(lit,code)
| (LitAlt lit, code) <- tagged_cmms]
- ; emit (mkCmmLitSwitch (CmmReg bndr_reg) tagged_cmms' deflt) }
+ ; emitCmmLitSwitch (CmmReg bndr_reg) tagged_cmms' deflt }
cgAlts gc_plan bndr (AlgAlt tycon) alts
= do { tagged_cmms <- cgAltRhss gc_plan bndr alts
@@ -517,8 +520,8 @@ cgIdApp fun_id args
cgLneJump :: BlockId -> [LocalReg] -> [StgArg] -> FCode ()
cgLneJump blk_id lne_regs args -- Join point; discard sequel
= do { cmm_args <- getNonVoidArgAmodes args
- ; emit (mkMultiAssign lne_regs cmm_args
- <*> mkBranch blk_id) }
+ ; emitMultiAssign lne_regs cmm_args
+ ; emit (mkBranch blk_id) }
cgTailCall :: Id -> CgIdInfo -> [StgArg] -> FCode ()
cgTailCall fun_id fun_info args = do
@@ -532,24 +535,24 @@ cgTailCall fun_id fun_info args = do
do { let fun' = CmmLoad fun (cmmExprType fun)
; [ret,call] <- forkAlts [
getCode $ emitReturn [fun], -- Is tagged; no need to untag
- getCode $ do -- emit (mkAssign nodeReg fun)
+ getCode $ do -- emitAssign nodeReg fun
emitCall (NativeNodeCall, NativeReturn)
(entryCode fun') [fun]] -- Not tagged
- ; emit (mkCmmIfThenElse (cmmIsTagged fun) ret call) }
+ ; emit =<< mkCmmIfThenElse (cmmIsTagged fun) ret call }
SlowCall -> do -- A slow function call via the RTS apply routines
{ tickySlowCall lf_info args
- ; emit $ mkComment $ mkFastString "slowCall"
+ ; emitComment $ mkFastString "slowCall"
; slowCall fun args }
-- A direct function call (possibly with some left-over arguments)
DirectEntry lbl arity -> do
{ tickyDirectCall arity args
; if node_points then
- do emit $ mkComment $ mkFastString "directEntry"
- emit (mkAssign nodeReg fun)
+ do emitComment $ mkFastString "directEntry"
+ emitAssign nodeReg fun
directCall lbl arity args
- else do emit $ mkComment $ mkFastString "directEntry else"
+ else do emitComment $ mkFastString "directEntry else"
directCall lbl arity args }
JumpToIt {} -> panic "cgTailCall" -- ???
diff --git a/compiler/codeGen/StgCmmForeign.hs b/compiler/codeGen/StgCmmForeign.hs
index 7c739c61b6..f4be622092 100644
--- a/compiler/codeGen/StgCmmForeign.hs
+++ b/compiler/codeGen/StgCmmForeign.hs
@@ -127,7 +127,8 @@ emitForeignCall safety results target args _srt _ret
| otherwise = do
updfr_off <- getUpdFrameOff
temp_target <- load_target_into_temp target
- emit $ mkSafeCall temp_target results args updfr_off (playInterruptible safety)
+ emit =<< mkSafeCall temp_target results args updfr_off
+ (playInterruptible safety)
{-
@@ -160,7 +161,7 @@ maybe_assign_temp e
-- expressions, which are wrong here.
-- this is a NonPtr because it only duplicates an existing
reg <- newTemp (cmmExprType e) --TODO FIXME NOW
- emit (mkAssign (CmmLocal reg) e)
+ emitAssign (CmmLocal reg) e
return (CmmReg (CmmLocal reg))
-- -----------------------------------------------------------------------------
@@ -182,12 +183,12 @@ saveThreadState =
emitSaveThreadState :: BlockId -> FCode ()
emitSaveThreadState bid = do
-- CurrentTSO->stackobj->sp = Sp;
- emit $ mkStore (cmmOffset (CmmLoad (cmmOffset stgCurrentTSO tso_stackobj) bWord) stack_SP)
+ emitStore (cmmOffset (CmmLoad (cmmOffset stgCurrentTSO tso_stackobj) bWord) stack_SP)
(CmmStackSlot (CallArea (Young bid)) (widthInBytes (typeWidth gcWord)))
emit closeNursery
-- and save the current cost centre stack in the TSO when profiling:
when opt_SccProfilingOn $
- emit (mkStore (cmmOffset stgCurrentTSO tso_CCCS) curCCS)
+ emitStore (cmmOffset stgCurrentTSO tso_CCCS) curCCS
-- CurrentNursery->free = Hp+1;
closeNursery :: CmmAGraph
diff --git a/compiler/codeGen/StgCmmHeap.hs b/compiler/codeGen/StgCmmHeap.hs
index 690b0a9622..2b0b6f895e 100644
--- a/compiler/codeGen/StgCmmHeap.hs
+++ b/compiler/codeGen/StgCmmHeap.hs
@@ -109,7 +109,7 @@ allocDynClosureCmm info_tbl lf_info use_cc _blame_cc amodes_w_offsets
-- ALLOCATE THE OBJECT
; base <- getHpRelOffset info_offset
- ; emit (mkComment $ mkFastString "allocDynClosure")
+ ; emitComment $ mkFastString "allocDynClosure"
; emitSetDynHdr base info_ptr use_cc
; let (cmm_args, offsets) = unzip amodes_w_offsets
; hpStore base cmm_args offsets
@@ -410,7 +410,8 @@ entryHeapCheck cl_info offset nodeSet arity args code
altHeapCheck :: [LocalReg] -> FCode a -> FCode a
altHeapCheck regs code
= do updfr_sz <- getUpdFrameOff
- heapCheck False (gc_call updfr_sz) code
+ gc_call_code <- gc_call updfr_sz
+ heapCheck False gc_call_code code
where
reg_exprs = map (CmmReg . CmmLocal) regs
@@ -451,7 +452,7 @@ heapCheck checkStack do_gc code
= getHeapUsage $ \ hpHw ->
-- Emit heap checks, but be sure to do it lazily so
-- that the conditionals on hpHw don't cause a black hole
- do { emit $ do_checks checkStack hpHw do_gc
+ do { codeOnly $ do_checks checkStack hpHw do_gc
; tickyAllocHeap hpHw
; doGranAllocate hpHw
; setRealHp hpHw
@@ -460,22 +461,27 @@ heapCheck checkStack do_gc code
do_checks :: Bool -- Should we check the stack?
-> WordOff -- Heap headroom
-> CmmAGraph -- What to do on failure
- -> CmmAGraph
-do_checks checkStack alloc do_gc
- = withFreshLabel "gc" $ \ loop_id ->
- withFreshLabel "gc" $ \ gc_id ->
- mkLabel loop_id
- <*> (let hpCheck = if alloc == 0 then mkNop
- else mkAssign hpReg bump_hp <*>
- mkCmmIfThen hp_oflo (alloc_n <*> mkBranch gc_id)
- in if checkStack
- then mkCmmIfThenElse sp_oflo (mkBranch gc_id) hpCheck
- else hpCheck)
- <*> mkComment (mkFastString "outOfLine should follow:")
- <*> outOfLine (mkLabel gc_id
- <*> mkComment (mkFastString "outOfLine here")
- <*> do_gc
- <*> mkBranch loop_id)
+ -> FCode ()
+do_checks checkStack alloc do_gc = do
+ loop_id <- newLabelC
+ gc_id <- newLabelC
+ emitLabel loop_id
+ hp_check <- if alloc == 0
+ then return mkNop
+ else do
+ ifthen <- mkCmmIfThen hp_oflo (alloc_n <*> mkBranch gc_id)
+ return (mkAssign hpReg bump_hp <*> ifthen)
+
+ if checkStack
+ then emit =<< mkCmmIfThenElse sp_oflo (mkBranch gc_id) hp_check
+ else emit hp_check
+
+ emit $ mkComment (mkFastString "outOfLine should follow:")
+
+ emitOutOfLine gc_id $
+ mkComment (mkFastString "outOfLine here") <*>
+ do_gc <*>
+ mkBranch loop_id
-- Test for stack pointer exhaustion, then
-- bump heap pointer, and test for heap exhaustion
-- Note that we don't move the heap pointer unless the
diff --git a/compiler/codeGen/StgCmmLayout.hs b/compiler/codeGen/StgCmmLayout.hs
index 9afcd029a4..0299bc0f96 100644
--- a/compiler/codeGen/StgCmmLayout.hs
+++ b/compiler/codeGen/StgCmmLayout.hs
@@ -74,14 +74,14 @@ emitReturn :: [CmmExpr] -> FCode ()
emitReturn results
= do { sequel <- getSequel;
; updfr_off <- getUpdFrameOff
- ; emit $ mkComment $ mkFastString ("emitReturn: " ++ show sequel)
+ ; emitComment $ mkFastString ("emitReturn: " ++ show sequel)
; case sequel of
Return _ ->
do { adjustHpBackwards
; emit (mkReturnSimple results updfr_off) }
AssignTo regs adjust ->
do { if adjust then adjustHpBackwards else return ()
- ; emit (mkMultiAssign regs results) }
+ ; emitMultiAssign regs results }
}
emitCall :: (Convention, Convention) -> CmmExpr -> [CmmExpr] -> FCode ()
@@ -91,10 +91,10 @@ emitCall convs@(callConv, _) fun args
= do { adjustHpBackwards
; sequel <- getSequel
; updfr_off <- getUpdFrameOff
- ; emit $ mkComment $ mkFastString ("emitCall: " ++ show sequel)
+ ; emitComment $ mkFastString ("emitCall: " ++ show sequel)
; case sequel of
Return _ -> emit (mkForeignJump callConv fun args updfr_off)
- AssignTo res_regs _ -> emit (mkCall fun convs res_regs args updfr_off)
+ AssignTo res_regs _ -> emit =<< mkCall fun convs res_regs args updfr_off
}
adjustHpBackwards :: FCode ()
@@ -179,7 +179,7 @@ slow_call fun args reps
= do dflags <- getDynFlags
let platform = targetPlatform dflags
call <- getCode $ direct_call "slow_call" (mkRtsApFastLabel rts_fun) arity args reps
- emit $ mkComment $ mkFastString ("slow_call for " ++ showSDoc (pprPlatform platform fun) ++
+ emitComment $ mkFastString ("slow_call for " ++ showSDoc (pprPlatform platform fun) ++
" with pat " ++ showSDoc (ftext rts_fun))
emit (mkAssign nodeReg fun <*> call)
where
diff --git a/compiler/codeGen/StgCmmMonad.hs b/compiler/codeGen/StgCmmMonad.hs
index cab0897fe8..8001edc5d8 100644
--- a/compiler/codeGen/StgCmmMonad.hs
+++ b/compiler/codeGen/StgCmmMonad.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE GADTs #-}
-----------------------------------------------------------------------------
--
-- Monad for Stg to C-- code generation
@@ -20,12 +21,17 @@ module StgCmmMonad (
returnFC, fixC, fixC_, nopC, whenC,
newUnique, newUniqSupply,
+ newLabelC, emitLabel,
+
emit, emitDecl, emitProc, emitProcWithConvention, emitSimpleProc,
+ emitOutOfLine, emitAssign, emitStore, emitComment,
getCmm, cgStmtsToBlocks,
getCodeR, getCode, getHeapUsage,
- forkClosureBody, forkStatics, forkAlts, forkProc, codeOnly,
+ mkCmmIfThenElse, mkCmmIfThen, mkCall, mkCmmCall, mkSafeCall,
+
+ forkClosureBody, forkStatics, forkAlts, forkProc, codeOnly,
ConTagZ,
@@ -69,12 +75,14 @@ import VarEnv
import OrdList
import Unique
import UniqSupply
-import FastString(sLit)
+import FastString
import Outputable
+import Compiler.Hoopl hiding (Unique, (<*>), mkLabel, mkBranch, mkLast)
+
import Control.Monad
import Data.List
-import Prelude hiding( sequence )
+import Prelude hiding( sequence, succ )
import qualified Prelude( sequence )
infixr 9 `thenC` -- Right-associative!
@@ -270,6 +278,8 @@ data HeapUsage =
type VirtualHpOffset = WordOff
+
+
initCgState :: UniqSupply -> CgState
initCgState uniqs
= MkCgState { cgs_stmts = mkNop, cgs_tops = nilOL,
@@ -308,7 +318,6 @@ initHpUsage = HeapUsage { virtHp = 0, realHp = 0 }
maxHpHw :: HeapUsage -> VirtualHpOffset -> HeapUsage
hp_usg `maxHpHw` hw = hp_usg { virtHp = virtHp hp_usg `max` hw }
-
--------------------------------------------------------
-- Operators for getting and setting the state and "info_down".
--------------------------------------------------------
@@ -591,6 +600,33 @@ getHeapUsage fcode
-- ----------------------------------------------------------------------------
-- Combinators for emitting code
+emitCgStmt :: CgStmt -> FCode ()
+emitCgStmt stmt
+ = do { state <- getState
+ ; setState $ state { cgs_stmts = cgs_stmts state `snocOL` stmt }
+ }
+
+emitLabel :: BlockId -> FCode ()
+emitLabel id = emitCgStmt (CgLabel id)
+
+emitComment :: FastString -> FCode ()
+#ifdef DEBUG
+emitComment s = emitCgStmt (CgStmt (CmmComment s))
+#else
+emitComment s = return ()
+#endif
+
+emitAssign :: CmmReg -> CmmExpr -> FCode ()
+emitAssign l r = emitCgStmt (CgStmt (CmmAssign l r))
+
+emitStore :: CmmExpr -> CmmExpr -> FCode ()
+emitStore l r = emitCgStmt (CgStmt (CmmStore l r))
+
+
+newLabelC :: FCode BlockId
+newLabelC = do { u <- newUnique
+ ; return $ mkBlockId u }
+
emit :: CmmAGraph -> FCode ()
emit ag
= do { state <- getState
@@ -601,6 +637,9 @@ emitDecl decl
= do { state <- getState
; setState $ state { cgs_tops = cgs_tops state `snocOL` decl } }
+emitOutOfLine :: BlockId -> CmmAGraph -> FCode ()
+emitOutOfLine l stmts = emitCgStmt (CgFork l stmts)
+
emitProcWithConvention :: Convention -> CmmInfoTable -> CLabel -> [CmmFormal] ->
CmmAGraph -> FCode ()
emitProcWithConvention conv info lbl args blocks
@@ -629,6 +668,53 @@ getCmm code
; setState $ state2 { cgs_tops = cgs_tops state1 }
; return (fromOL (cgs_tops state2)) }
+
+mkCmmIfThenElse :: CmmExpr -> CmmAGraph -> CmmAGraph -> FCode CmmAGraph
+mkCmmIfThenElse e tbranch fbranch = do
+ endif <- newLabelC
+ tid <- newLabelC
+ fid <- newLabelC
+ return $ mkCbranch e tid fid <*>
+ mkLabel tid <*> tbranch <*> mkBranch endif <*>
+ mkLabel fid <*> fbranch <*> mkLabel endif
+
+mkCmmIfThen :: CmmExpr -> CmmAGraph -> FCode CmmAGraph
+mkCmmIfThen e tbranch = do
+ endif <- newLabelC
+ tid <- newLabelC
+ return $ mkCbranch e tid endif <*>
+ mkLabel tid <*> tbranch <*> mkLabel endif
+
+
+mkCall :: CmmExpr -> (Convention, Convention) -> [CmmFormal] -> [CmmActual]
+ -> UpdFrameOffset -> FCode CmmAGraph
+mkCall f (callConv, retConv) results actuals updfr_off = do
+ k <- newLabelC
+ let area = CallArea $ Young k
+ (off, copyin) = copyInOflow retConv area results
+ copyout = lastWithArgs Call area callConv actuals updfr_off
+ (toCall f (Just k) updfr_off off)
+ return (copyout <*> mkLabel k <*> copyin)
+
+
+mkCmmCall :: CmmExpr -> [CmmFormal] -> [CmmActual] -> UpdFrameOffset
+ -> FCode CmmAGraph
+mkCmmCall f results actuals
+ = mkCall f (NativeDirectCall, NativeReturn) results actuals
+
+
+mkSafeCall :: ForeignTarget -> [CmmFormal] -> [CmmActual]
+ -> UpdFrameOffset -> Bool
+ -> FCode CmmAGraph
+mkSafeCall t fs as upd i = do
+ k <- newLabelC
+ return
+ ( mkStore (CmmStackSlot (CallArea (Young k)) (widthInBytes wordWidth))
+ (CmmLit (CmmBlock k))
+ <*> mkLast (CmmForeignCall {tgt=t, res=fs, args=as, succ=k, updfr=upd, intrbl=i})
+ <*> mkLabel k)
+
+
-- ----------------------------------------------------------------------------
-- CgStmts
@@ -640,4 +726,3 @@ cgStmtsToBlocks :: CmmAGraph -> FCode CmmGraph
cgStmtsToBlocks stmts
= do { us <- newUniqSupply
; return (initUs_ us (lgraphOfAGraph stmts)) }
-
diff --git a/compiler/codeGen/StgCmmPrim.hs b/compiler/codeGen/StgCmmPrim.hs
index 1d5a5b3cda..5927faa78e 100644
--- a/compiler/codeGen/StgCmmPrim.hs
+++ b/compiler/codeGen/StgCmmPrim.hs
@@ -228,23 +228,23 @@ emitPrimOp [res] SparkOp [arg]
[(tmp2,NoHint)]
(CmmLit (CmmLabel (mkCmmCodeLabel rtsPackageId (fsLit "newSpark"))))
[(CmmReg (CmmGlobal BaseReg), AddrHint), ((CmmReg (CmmLocal tmp)), AddrHint)]
- emit (mkAssign (CmmLocal res) (CmmReg (CmmLocal tmp)))
+ emitAssign (CmmLocal res) (CmmReg (CmmLocal tmp))
emitPrimOp [res] GetCCSOfOp [arg]
- = emit (mkAssign (CmmLocal res) val)
+ = emitAssign (CmmLocal res) val
where
val | opt_SccProfilingOn = costCentreFrom (cmmUntag arg)
| otherwise = CmmLit zeroCLit
emitPrimOp [res] GetCurrentCCSOp [_dummy_arg]
- = emit (mkAssign (CmmLocal res) curCCS)
+ = emitAssign (CmmLocal res) curCCS
emitPrimOp [res] ReadMutVarOp [mutv]
- = emit (mkAssign (CmmLocal res) (cmmLoadIndexW mutv fixedHdrSize gcWord))
+ = emitAssign (CmmLocal res) (cmmLoadIndexW mutv fixedHdrSize gcWord)
emitPrimOp [] WriteMutVarOp [mutv,var]
= do
- emit (mkStore (cmmOffsetW mutv fixedHdrSize) var)
+ emitStore (cmmOffsetW mutv fixedHdrSize) var
emitCCall
[{-no results-}]
(CmmLit (CmmLabel mkDirty_MUT_VAR_Label))
@@ -268,32 +268,32 @@ emitPrimOp res@[] TouchOp args@[_arg]
-- #define byteArrayContentszh(r,a) r = BYTE_ARR_CTS(a)
emitPrimOp [res] ByteArrayContents_Char [arg]
- = emit (mkAssign (CmmLocal res) (cmmOffsetB arg arrWordsHdrSize))
+ = emitAssign (CmmLocal res) (cmmOffsetB arg arrWordsHdrSize)
-- #define stableNameToIntzh(r,s) (r = ((StgStableName *)s)->sn)
emitPrimOp [res] StableNameToIntOp [arg]
- = emit (mkAssign (CmmLocal res) (cmmLoadIndexW arg fixedHdrSize bWord))
+ = emitAssign (CmmLocal res) (cmmLoadIndexW arg fixedHdrSize bWord)
-- #define eqStableNamezh(r,sn1,sn2) \
-- (r = (((StgStableName *)sn1)->sn == ((StgStableName *)sn2)->sn))
emitPrimOp [res] EqStableNameOp [arg1,arg2]
- = emit (mkAssign (CmmLocal res) (CmmMachOp mo_wordEq [
+ = emitAssign (CmmLocal res) (CmmMachOp mo_wordEq [
cmmLoadIndexW arg1 fixedHdrSize bWord,
cmmLoadIndexW arg2 fixedHdrSize bWord
- ]))
+ ])
emitPrimOp [res] ReallyUnsafePtrEqualityOp [arg1,arg2]
- = emit (mkAssign (CmmLocal res) (CmmMachOp mo_wordEq [arg1,arg2]))
+ = emitAssign (CmmLocal res) (CmmMachOp mo_wordEq [arg1,arg2])
-- #define addrToHValuezh(r,a) r=(P_)a
emitPrimOp [res] AddrToAnyOp [arg]
- = emit (mkAssign (CmmLocal res) arg)
+ = emitAssign (CmmLocal res) arg
-- #define dataToTagzh(r,a) r=(GET_TAG(((StgClosure *)a)->header.info))
-- Note: argument may be tagged!
emitPrimOp [res] DataToTagOp [arg]
- = emit (mkAssign (CmmLocal res) (getConstrTag (cmmUntag arg)))
+ = emitAssign (CmmLocal res) (getConstrTag (cmmUntag arg))
{- Freezing arrays-of-ptrs requires changing an info table, for the
benefit of the generational collector. It needs to scavenge mutable
@@ -316,7 +316,7 @@ emitPrimOp [res] UnsafeFreezeArrayArrayOp [arg]
-- #define unsafeFreezzeByteArrayzh(r,a) r=(a)
emitPrimOp [res] UnsafeFreezeByteArrayOp [arg]
- = emit (mkAssign (CmmLocal res) arg)
+ = emitAssign (CmmLocal res) arg
-- Copying pointer arrays
@@ -474,11 +474,11 @@ emitPrimOp [res] PopCntOp [w] = emitPopCntCall res w wordWidth
-- The rest just translate straightforwardly
emitPrimOp [res] op [arg]
| nopOp op
- = emit (mkAssign (CmmLocal res) arg)
+ = emitAssign (CmmLocal res) arg
| Just (mop,rep) <- narrowOp op
- = emit (mkAssign (CmmLocal res) $
- CmmMachOp (mop rep wordWidth) [CmmMachOp (mop wordWidth rep) [arg]])
+ = emitAssign (CmmLocal res) $
+ CmmMachOp (mop rep wordWidth) [CmmMachOp (mop wordWidth rep) [arg]]
emitPrimOp r@[res] op args
| Just prim <- callishOp op
@@ -723,15 +723,15 @@ loadArrPtrsSize addr = CmmLoad (cmmOffsetB addr off) bWord
mkBasicIndexedRead :: ByteOff -> Maybe MachOp -> CmmType
-> LocalReg -> CmmExpr -> CmmExpr -> FCode ()
mkBasicIndexedRead off Nothing read_rep res base idx
- = emit (mkAssign (CmmLocal res) (cmmLoadIndexOffExpr off read_rep base idx))
+ = emitAssign (CmmLocal res) (cmmLoadIndexOffExpr off read_rep base idx)
mkBasicIndexedRead off (Just cast) read_rep res base idx
- = emit (mkAssign (CmmLocal res) (CmmMachOp cast [
- cmmLoadIndexOffExpr off read_rep base idx]))
+ = emitAssign (CmmLocal res) (CmmMachOp cast [
+ cmmLoadIndexOffExpr off read_rep base idx])
mkBasicIndexedWrite :: ByteOff -> Maybe MachOp
-> CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
mkBasicIndexedWrite off Nothing base idx val
- = emit (mkStore (cmmIndexOffExpr off (typeWidth (cmmExprType val)) base idx) val)
+ = emitStore (cmmIndexOffExpr off (typeWidth (cmmExprType val)) base idx) val
mkBasicIndexedWrite off (Just cast) base idx val
= mkBasicIndexedWrite off Nothing base idx (CmmMachOp cast [val])
@@ -782,7 +782,7 @@ doCopyMutableByteArrayOp = emitCopyByteArray copy
getCode $ emitMemmoveCall dst_p src_p bytes (CmmLit (mkIntCLit 1)),
getCode $ emitMemcpyCall dst_p src_p bytes (CmmLit (mkIntCLit 1))
]
- emit $ mkCmmIfThenElse (cmmEqWord src dst) moveCall cpyCall
+ emit =<< mkCmmIfThenElse (cmmEqWord src dst) moveCall cpyCall
emitCopyByteArray :: (CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr
-> FCode ())
@@ -840,7 +840,7 @@ doCopyMutableArrayOp = emitCopyArray copy
getCode $ emitMemmoveCall dst_p src_p bytes (CmmLit (mkIntCLit wORD_SIZE)),
getCode $ emitMemcpyCall dst_p src_p bytes (CmmLit (mkIntCLit wORD_SIZE))
]
- emit $ mkCmmIfThenElse (cmmEqWord src dst) moveCall cpyCall
+ emit =<< mkCmmIfThenElse (cmmEqWord src dst) moveCall cpyCall
emitCopyArray :: (CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr -> CmmExpr
-> FCode ())
diff --git a/compiler/codeGen/StgCmmProf.hs b/compiler/codeGen/StgCmmProf.hs
index 6d16f012b3..c147708cef 100644
--- a/compiler/codeGen/StgCmmProf.hs
+++ b/compiler/codeGen/StgCmmProf.hs
@@ -103,7 +103,7 @@ initUpdFrameProf :: CmmExpr -> FCode ()
-- Initialise the profiling field of an update frame
initUpdFrameProf frame_amode
= ifProfiling $ -- frame->header.prof.ccs = CCCS
- emit (mkStore (cmmOffsetB frame_amode oFFSET_StgHeader_ccs) curCCS)
+ emitStore (cmmOffsetB frame_amode oFFSET_StgHeader_ccs) curCCS
-- frame->header.prof.hp.rs = NULL (or frame-header.prof.hp.ldvw = 0)
-- is unnecessary because it is not used anyhow.
@@ -143,7 +143,7 @@ saveCurrentCostCentre
= return Nothing
| otherwise
= do { local_cc <- newTemp ccType
- ; emit (mkAssign (CmmLocal local_cc) curCCS)
+ ; emitAssign (CmmLocal local_cc) curCCS
; return (Just local_cc) }
restoreCurrentCostCentre :: Maybe LocalReg -> FCode ()
@@ -337,9 +337,9 @@ ldvEnter cl_ptr
-- if (era > 0) {
-- LDVW((c)) = (LDVW((c)) & LDV_CREATE_MASK) |
-- era | LDV_STATE_USE }
- emit (mkCmmIfThenElse (CmmMachOp mo_wordUGt [loadEra, CmmLit zeroCLit])
+ emit =<< mkCmmIfThenElse (CmmMachOp mo_wordUGt [loadEra, CmmLit zeroCLit])
(mkStore ldv_wd new_ldv_wd)
- mkNop)
+ mkNop
where
-- don't forget to substract node's tag
ldv_wd = ldvWord cl_ptr
diff --git a/compiler/codeGen/StgCmmTicky.hs b/compiler/codeGen/StgCmmTicky.hs
index a6c592cfd8..ea74a03e1e 100644
--- a/compiler/codeGen/StgCmmTicky.hs
+++ b/compiler/codeGen/StgCmmTicky.hs
@@ -181,7 +181,7 @@ registerTickyCtr :: CLabel -> FCode ()
-- ticky_entry_ctrs = & (f_ct); /* mark it as "registered" */
-- f_ct.registeredp = 1 }
registerTickyCtr ctr_lbl
- = emit (mkCmmIfThen test (catAGraphs register_stmts))
+ = emit =<< mkCmmIfThen test (catAGraphs register_stmts)
where
-- krc: code generator doesn't handle Not, so we test for Eq 0 instead
test = CmmMachOp (MO_Eq wordWidth)
@@ -353,7 +353,7 @@ bumpHistogram _lbl _n
bumpHistogramE :: LitString -> CmmExpr -> FCode ()
bumpHistogramE lbl n
= do t <- newTemp cLong
- emit (mkAssign (CmmLocal t) n)
+ emitAssign (CmmLocal t) n
emit (mkCmmIfThen (CmmMachOp (MO_U_Le cLongWidth) [CmmReg (CmmLocal t), eight])
(mkAssign (CmmLocal t) eight))
emit (addToMem cLong
diff --git a/compiler/codeGen/StgCmmUtils.hs b/compiler/codeGen/StgCmmUtils.hs
index c3327138b3..93a8bf317b 100644
--- a/compiler/codeGen/StgCmmUtils.hs
+++ b/compiler/codeGen/StgCmmUtils.hs
@@ -18,12 +18,11 @@ module StgCmmUtils (
emitDataLits, mkDataLits,
emitRODataLits, mkRODataLits,
emitRtsCall, emitRtsCallWithVols, emitRtsCallWithResult, emitRtsCallGen,
- assignTemp, newTemp, withTemp,
+ assignTemp, newTemp,
newUnboxedTupleRegs,
- mkMultiAssign, mkCmmSwitch, mkCmmLitSwitch,
- emitSwitch,
+ emitMultiAssign, emitCmmLitSwitch, emitSwitch,
tagToClosure, mkTaggedObjectLoad,
@@ -202,14 +201,14 @@ emitRtsCallGen
emitRtsCallGen res pkg fun args _vols safe
= do { updfr_off <- getUpdFrameOff
; emit caller_save
- ; emit $ call updfr_off
+ ; call updfr_off
; emit caller_load }
where
call updfr_off =
if safe then
- mkCmmCall fun_expr res' args' updfr_off
+ emit =<< mkCmmCall fun_expr res' args' updfr_off
else
- mkUnsafeCall (ForeignTarget fun_expr
+ emit $ mkUnsafeCall (ForeignTarget fun_expr
(ForeignConvention CCallConv arg_hints res_hints)) res' args'
(args', arg_hints) = unzip args
(res', res_hints) = unzip res
@@ -439,7 +438,7 @@ assignTemp :: CmmExpr -> FCode LocalReg
assignTemp (CmmReg (CmmLocal reg)) = return reg
assignTemp e = do { uniq <- newUnique
; let reg = LocalReg uniq (cmmExprType e)
- ; emit (mkAssign (CmmLocal reg) e)
+ ; emitAssign (CmmLocal reg) e
; return reg }
newTemp :: CmmType -> FCode LocalReg
@@ -469,10 +468,10 @@ newUnboxedTupleRegs res_ty
-------------------------------------------------------------------------
--- mkMultiAssign
+-- emitMultiAssign
-------------------------------------------------------------------------
-mkMultiAssign :: [LocalReg] -> [CmmExpr] -> CmmAGraph
+emitMultiAssign :: [LocalReg] -> [CmmExpr] -> FCode ()
-- Emit code to perform the assignments in the
-- input simultaneously, using temporary variables when necessary.
@@ -487,14 +486,13 @@ type Stmt = (LocalReg, CmmExpr) -- r := e
-- s1 assigns to something s2 uses
-- that is, if s1 should *follow* s2 in the final order
-mkMultiAssign [] [] = mkNop
-mkMultiAssign [reg] [rhs] = mkAssign (CmmLocal reg) rhs
-mkMultiAssign regs rhss = ASSERT( equalLength regs rhss )
- unscramble ([1..] `zip` (regs `zip` rhss))
+emitMultiAssign [] [] = return ()
+emitMultiAssign [reg] [rhs] = emitAssign (CmmLocal reg) rhs
+emitMultiAssign regs rhss = ASSERT( equalLength regs rhss )
+ unscramble ([1..] `zip` (regs `zip` rhss))
-unscramble :: [Vrtx] -> CmmAGraph
-unscramble vertices
- = catAGraphs (map do_component components)
+unscramble :: [Vrtx] -> FCode ()
+unscramble vertices = mapM_ do_component components
where
edges :: [ (Vrtx, Key, [Key]) ]
edges = [ (vertex, key1, edges_from stmt1)
@@ -509,19 +507,19 @@ unscramble vertices
-- do_components deal with one strongly-connected component
-- Not cyclic, or singleton? Just do it
- do_component :: SCC Vrtx -> CmmAGraph
- do_component (AcyclicSCC (_,stmt)) = mk_graph stmt
+ do_component :: SCC Vrtx -> FCode ()
+ do_component (AcyclicSCC (_,stmt)) = mk_graph stmt
do_component (CyclicSCC []) = panic "do_component"
do_component (CyclicSCC [(_,stmt)]) = mk_graph stmt
-- Cyclic? Then go via temporaries. Pick one to
-- break the loop and try again with the rest.
- do_component (CyclicSCC ((_,first_stmt) : rest))
- = withUnique $ \u ->
+ do_component (CyclicSCC ((_,first_stmt) : rest)) = do
+ u <- newUnique
let (to_tmp, from_tmp) = split u first_stmt
- in mk_graph to_tmp
- <*> unscramble rest
- <*> mk_graph from_tmp
+ mk_graph to_tmp
+ unscramble rest
+ mk_graph from_tmp
split :: Unique -> Stmt -> (Stmt, Stmt)
split uniq (reg, rhs)
@@ -530,8 +528,8 @@ unscramble vertices
rep = cmmExprType rhs
tmp = LocalReg uniq rep
- mk_graph :: Stmt -> CmmAGraph
- mk_graph (reg, rhs) = mkAssign (CmmLocal reg) rhs
+ mk_graph :: Stmt -> FCode ()
+ mk_graph (reg, rhs) = emitAssign (CmmLocal reg) rhs
mustFollow :: Stmt -> Stmt -> Bool
(reg, _) `mustFollow` (_, rhs) = CmmLocal reg `regUsedIn` rhs
@@ -549,7 +547,7 @@ emitSwitch :: CmmExpr -- Tag to switch on
-> FCode ()
emitSwitch tag_expr branches mb_deflt lo_tag hi_tag
= do { dflags <- getDynFlags
- ; emit (mkCmmSwitch (via_C dflags) tag_expr branches mb_deflt lo_tag hi_tag) }
+ ; mkCmmSwitch (via_C dflags) tag_expr branches mb_deflt lo_tag hi_tag }
where
via_C dflags | HscC <- hscTarget dflags = True
| otherwise = False
@@ -561,23 +559,25 @@ mkCmmSwitch :: Bool -- True <=> never generate a conditional tree
-> Maybe CmmAGraph -- Default branch (if any)
-> ConTagZ -> ConTagZ -- Min and Max possible values; behaviour
-- outside this range is undefined
- -> CmmAGraph
+ -> FCode ()
-- First, two rather common cases in which there is no work to do
-mkCmmSwitch _ _ [] (Just code) _ _ = code
-mkCmmSwitch _ _ [(_,code)] Nothing _ _ = code
+mkCmmSwitch _ _ [] (Just code) _ _ = emit code
+mkCmmSwitch _ _ [(_,code)] Nothing _ _ = emit code
-- Right, off we go
-mkCmmSwitch via_C tag_expr branches mb_deflt lo_tag hi_tag
- = withFreshLabel "switch join" $ \ join_lbl ->
- label_default join_lbl mb_deflt $ \ mb_deflt ->
- label_branches join_lbl branches $ \ branches ->
- assignTemp' tag_expr $ \tag_expr' ->
+mkCmmSwitch via_C tag_expr branches mb_deflt lo_tag hi_tag = do
+ join_lbl <- newLabelC
+ mb_deflt_lbl <- label_default join_lbl mb_deflt
+ branches_lbls <- label_branches join_lbl branches
+ tag_expr' <- assignTemp' tag_expr
- mk_switch tag_expr' (sortLe le branches) mb_deflt
- lo_tag hi_tag via_C
- -- Sort the branches before calling mk_switch
- <*> mkLabel join_lbl
+ emit =<< mk_switch tag_expr' (sortLe le branches_lbls) mb_deflt_lbl
+ lo_tag hi_tag via_C
+
+ -- Sort the branches before calling mk_switch
+
+ emitLabel join_lbl
where
(t1,_) `le` (t2,_) = t1 <= t2
@@ -585,17 +585,17 @@ mkCmmSwitch via_C tag_expr branches mb_deflt lo_tag hi_tag
mk_switch :: CmmExpr -> [(ConTagZ, BlockId)]
-> Maybe BlockId
-> ConTagZ -> ConTagZ -> Bool
- -> CmmAGraph
+ -> FCode CmmAGraph
-- SINGLETON TAG RANGE: no case analysis to do
mk_switch _tag_expr [(tag, lbl)] _ lo_tag hi_tag _via_C
| lo_tag == hi_tag
= ASSERT( tag == lo_tag )
- mkBranch lbl
+ return (mkBranch lbl)
-- SINGLETON BRANCH, NO DEFAULT: no case analysis to do
mk_switch _tag_expr [(_tag,lbl)] Nothing _ _ _
- = mkBranch lbl
+ = return (mkBranch lbl)
-- The simplifier might have eliminated a case
-- so we may have e.g. case xs of
-- [] -> e
@@ -604,7 +604,7 @@ mk_switch _tag_expr [(_tag,lbl)] Nothing _ _ _
-- SINGLETON BRANCH: one equality check to do
mk_switch tag_expr [(tag,lbl)] (Just deflt) _ _ _
- = mkCbranch cond deflt lbl
+ = return (mkCbranch cond deflt lbl)
where
cond = cmmNeWord tag_expr (CmmLit (mkIntCLit tag))
-- We have lo_tag < hi_tag, but there's only one branch,
@@ -637,30 +637,34 @@ mk_switch tag_expr branches mb_deflt lo_tag hi_tag via_C
arms :: [Maybe BlockId]
arms = [ find_branch i | i <- [real_lo_tag..real_hi_tag]]
in
- mkSwitch (cmmOffset tag_expr (- real_lo_tag)) arms
+ return (mkSwitch (cmmOffset tag_expr (- real_lo_tag)) arms)
-- if we can knock off a bunch of default cases with one if, then do so
| Just deflt <- mb_deflt, (lowest_branch - lo_tag) >= n_branches
- = mkCmmIfThenElse
+ = do stmts <- mk_switch tag_expr branches mb_deflt
+ lowest_branch hi_tag via_C
+ mkCmmIfThenElse
(cmmULtWord tag_expr (CmmLit (mkIntCLit lowest_branch)))
(mkBranch deflt)
- (mk_switch tag_expr branches mb_deflt
- lowest_branch hi_tag via_C)
+ stmts
| Just deflt <- mb_deflt, (hi_tag - highest_branch) >= n_branches
- = mkCmmIfThenElse
+ = do stmts <- mk_switch tag_expr branches mb_deflt
+ lo_tag highest_branch via_C
+ mkCmmIfThenElse
(cmmUGtWord tag_expr (CmmLit (mkIntCLit highest_branch)))
(mkBranch deflt)
- (mk_switch tag_expr branches mb_deflt
- lo_tag highest_branch via_C)
+ stmts
| otherwise -- Use an if-tree
- = mkCmmIfThenElse
+ = do lo_stmts <- mk_switch tag_expr lo_branches mb_deflt
+ lo_tag (mid_tag-1) via_C
+ hi_stmts <- mk_switch tag_expr hi_branches mb_deflt
+ mid_tag hi_tag via_C
+ mkCmmIfThenElse
(cmmUGeWord tag_expr (CmmLit (mkIntCLit mid_tag)))
- (mk_switch tag_expr hi_branches mb_deflt
- mid_tag hi_tag via_C)
- (mk_switch tag_expr lo_branches mb_deflt
- lo_tag (mid_tag-1) via_C)
+ hi_stmts
+ lo_stmts
-- we test (e >= mid_tag) rather than (e < mid_tag), because
-- the former works better when e is a comparison, and there
-- are two tags 0 & 1 (mid_tag == 1). In this case, the code
@@ -715,32 +719,32 @@ mk_switch tag_expr branches mb_deflt lo_tag hi_tag via_C
is_lo (t,_) = t < mid_tag
--------------
-mkCmmLitSwitch :: CmmExpr -- Tag to switch on
+emitCmmLitSwitch :: CmmExpr -- Tag to switch on
-> [(Literal, CmmAGraph)] -- Tagged branches
-> CmmAGraph -- Default branch (always)
- -> CmmAGraph -- Emit the code
+ -> FCode () -- Emit the code
-- Used for general literals, whose size might not be a word,
-- where there is always a default case, and where we don't know
-- the range of values for certain. For simplicity we always generate a tree.
--
-- ToDo: for integers we could do better here, perhaps by generalising
-- mk_switch and using that. --SDM 15/09/2004
-mkCmmLitSwitch _scrut [] deflt = deflt
-mkCmmLitSwitch scrut branches deflt
- = assignTemp' scrut $ \ scrut' ->
- withFreshLabel "switch join" $ \ join_lbl ->
- label_code join_lbl deflt $ \ deflt ->
- label_branches join_lbl branches $ \ branches ->
- mk_lit_switch scrut' deflt (sortLe le branches)
- <*> mkLabel join_lbl
+emitCmmLitSwitch _scrut [] deflt = emit deflt
+emitCmmLitSwitch scrut branches deflt = do
+ scrut' <- assignTemp' scrut
+ join_lbl <- newLabelC
+ deflt_lbl <- label_code join_lbl deflt
+ branches_lbls <- label_branches join_lbl branches
+ emit =<< mk_lit_switch scrut' deflt_lbl (sortLe le branches_lbls)
+ emitLabel join_lbl
where
le (t1,_) (t2,_) = t1 <= t2
mk_lit_switch :: CmmExpr -> BlockId
-> [(Literal,BlockId)]
- -> CmmAGraph
+ -> FCode CmmAGraph
mk_lit_switch scrut deflt [(lit,blk)]
- = mkCbranch (CmmMachOp ne [scrut, CmmLit cmm_lit]) deflt blk
+ = return (mkCbranch (CmmMachOp ne [scrut, CmmLit cmm_lit]) deflt blk)
where
cmm_lit = mkSimpleLit lit
cmm_ty = cmmLitType cmm_lit
@@ -748,9 +752,9 @@ mk_lit_switch scrut deflt [(lit,blk)]
ne = if isFloatType cmm_ty then MO_F_Ne rep else MO_Ne rep
mk_lit_switch scrut deflt_blk_id branches
- = mkCmmIfThenElse cond
- (mk_lit_switch scrut deflt_blk_id lo_branches)
- (mk_lit_switch scrut deflt_blk_id hi_branches)
+ = do hi_blk <- mk_lit_switch scrut deflt_blk_id lo_branches
+ lo_blk <- mk_lit_switch scrut deflt_blk_id hi_branches
+ mkCmmIfThenElse cond lo_blk hi_blk
where
n_branches = length branches
(mid_lit,_) = branches !! (n_branches `div` 2)
@@ -764,49 +768,42 @@ mk_lit_switch scrut deflt_blk_id branches
--------------
-label_default :: BlockId -> Maybe CmmAGraph
- -> (Maybe BlockId -> CmmAGraph)
- -> CmmAGraph
-label_default _ Nothing thing_inside
- = thing_inside Nothing
-label_default join_lbl (Just code) thing_inside
- = label_code join_lbl code $ \ lbl ->
- thing_inside (Just lbl)
+label_default :: BlockId -> Maybe CmmAGraph -> FCode (Maybe BlockId)
+label_default _ Nothing
+ = return Nothing
+label_default join_lbl (Just code)
+ = do lbl <- label_code join_lbl code
+ return (Just lbl)
--------------
-label_branches :: BlockId -> [(a,CmmAGraph)]
- -> ([(a,BlockId)] -> CmmAGraph)
- -> CmmAGraph
-label_branches _join_lbl [] thing_inside
- = thing_inside []
-label_branches join_lbl ((tag,code):branches) thing_inside
- = label_code join_lbl code $ \ lbl ->
- label_branches join_lbl branches $ \ branches' ->
- thing_inside ((tag,lbl):branches')
+label_branches :: BlockId -> [(a,CmmAGraph)] -> FCode [(a,BlockId)]
+label_branches _join_lbl []
+ = return []
+label_branches join_lbl ((tag,code):branches)
+ = do lbl <- label_code join_lbl code
+ branches' <- label_branches join_lbl branches
+ return ((tag,lbl):branches')
--------------
-label_code :: BlockId -> CmmAGraph -> (BlockId -> CmmAGraph) -> CmmAGraph
--- (label_code J code fun)
+label_code :: BlockId -> CmmAGraph -> FCode BlockId
+-- label_code J code
-- generates
--- [L: code; goto J] fun L
-label_code join_lbl code thing_inside
- = withFreshLabel "switch" $ \lbl ->
- outOfLine (mkLabel lbl <*> code <*> mkBranch join_lbl)
- <*> thing_inside lbl
-
+-- [L: code; goto J]
+-- and returns L
+label_code join_lbl code = do
+ lbl <- newLabelC
+ emitOutOfLine lbl (code <*> mkBranch join_lbl)
+ return lbl
--------------
-assignTemp' :: CmmExpr -> (CmmExpr -> CmmAGraph) -> CmmAGraph
-assignTemp' e thing_inside
- | isTrivialCmmExpr e = thing_inside e
- | otherwise = withTemp (cmmExprType e) $ \ lreg ->
- let reg = CmmLocal lreg in
- mkAssign reg e <*> thing_inside (CmmReg reg)
-
-withTemp :: CmmType -> (LocalReg -> CmmAGraph) -> CmmAGraph
-withTemp rep thing_inside
- = withUnique $ \uniq -> thing_inside (LocalReg uniq rep)
-
+assignTemp' :: CmmExpr -> FCode CmmExpr
+assignTemp' e
+ | isTrivialCmmExpr e = return e
+ | otherwise = do
+ lreg <- newTemp (cmmExprType e)
+ let reg = CmmLocal lreg
+ emitAssign reg e
+ return (CmmReg reg)
-------------------------------------------------------------------------
--