diff options
author | Iavor S. Diatchki <iavor.diatchki@gmail.com> | 2014-07-19 14:29:57 -0700 |
---|---|---|
committer | Iavor S. Diatchki <iavor.diatchki@gmail.com> | 2014-07-19 14:29:57 -0700 |
commit | 524634641c61ab42c555452f6f87119b27f6c331 (patch) | |
tree | f78d17bb6b09fb3b2e22cb4d93c2a3d45accc2d9 | |
parent | 79ad1d20c5500e17ce5daaf93b171131669bddad (diff) | |
parent | c41b716d82b1722f909979d02a76e21e9b68886c (diff) | |
download | haskell-wip/ext-solver.tar.gz |
Merge branch 'master' into wip/ext-solverwip/ext-solver
847 files changed, 16923 insertions, 19435 deletions
diff --git a/.arcconfig b/.arcconfig new file mode 100644 index 0000000000..b9c39321ed --- /dev/null +++ b/.arcconfig @@ -0,0 +1,5 @@ +{ + "project.name" : "ghc", + "repository.callsign" : "GHC", + "phabricator.uri" : "https://phabricator.haskell.org" +} diff --git a/.arclint b/.arclint new file mode 100644 index 0000000000..21ca5f08c0 --- /dev/null +++ b/.arclint @@ -0,0 +1,31 @@ +{ + "linters": { + "filename": { + "type": "filename" + }, + "generated": { + "type": "generated" + }, + "merge-conflict": { + "type": "merge-conflict" + }, + "nolint": { + "type": "nolint" + }, + "text": { + "type": "text", + "exclude": [ "(\\.xml$)" ], + "severity": { + "5": "disabled" + } + }, + "text-xml": { + "type": "text", + "include": "(\\.xml$)", + "severity": { + "5": "disabled", + "3": "disabled" + } + } + } +} diff --git a/.gitignore b/.gitignore index 93fb88153d..99bf3a6bab 100644 --- a/.gitignore +++ b/.gitignore @@ -46,27 +46,6 @@ _darcs/ # sub-repositories /ghc-tarballs/ -/libffi-tarballs/ -/libraries/array/ -/libraries/deepseq/ -/libraries/directory/ -/libraries/dph/ -/libraries/extensible-exceptions/ -/libraries/filepath/ -/libraries/haskell2010/ -/libraries/haskell98/ -/libraries/hoopl/ -/libraries/hpc/ -/libraries/mtl/ -/libraries/old-locale/ -/libraries/old-time/ -/libraries/parallel/ -/libraries/process/ -/libraries/stm/ -/libraries/unix/ -/libraries/utf8-string/ -/nofib/ -/utils/hsc2hs/ # ----------------------------------------------------------------------------- # Cabal dist directories @@ -167,6 +146,7 @@ _darcs/ /utils/runghc/runghc.cabal /extra-gcc-opts +/sdistprep .tm_properties VERSION diff --git a/.gitmodules b/.gitmodules index 99893a4ab9..b5e29b9e61 100644 --- a/.gitmodules +++ b/.gitmodules @@ -54,6 +54,79 @@ path = libraries/random url = ../packages/random.git ignore = untracked +[submodule "libraries/array"] + path = libraries/array + url = ../packages/array.git + ignore = none +[submodule "libraries/deepseq"] + path = libraries/deepseq + url = ../packages/deepseq.git + ignore = none +[submodule "libraries/directory"] + path = libraries/directory + url = ../packages/directory.git + ignore = none +[submodule "libraries/filepath"] + path = libraries/filepath + url = ../packages/filepath.git + ignore = none +[submodule "libraries/haskell98"] + path = libraries/haskell98 + url = ../packages/haskell98.git + ignore = none +[submodule "libraries/haskell2010"] + path = libraries/haskell2010 + url = ../packages/haskell2010.git + ignore = none +[submodule "libraries/hoopl"] + path = libraries/hoopl + url = ../packages/hoopl.git + ignore = none +[submodule "libraries/hpc"] + path = libraries/hpc + url = ../packages/hpc.git + ignore = none +[submodule "libraries/old-locale"] + path = libraries/old-locale + url = ../packages/old-locale.git + ignore = none +[submodule "libraries/old-time"] + path = libraries/old-time + url = ../packages/old-time.git + ignore = none +[submodule "libraries/process"] + path = libraries/process + url = ../packages/process.git + ignore = none +[submodule "libraries/unix"] + path = libraries/unix + url = ../packages/unix.git + ignore = none +[submodule "libraries/parallel"] + path = libraries/parallel + url = ../packages/parallel.git + ignore = none +[submodule "libraries/stm"] + path = libraries/stm + url = ../packages/stm.git + ignore = none +[submodule "libraries/dph"] + path = libraries/dph + url = ../packages/dph.git + ignore = none [submodule "utils/haddock"] path = utils/haddock url = ../haddock.git + ignore = none +[submodule "nofib"] + path = nofib + url = ../nofib.git + ignore = none +[submodule "utils/hsc2hs"] + path = utils/hsc2hs + url = ../hsc2hs.git + ignore = none +[submodule "libffi-tarballs"] + path = libffi-tarballs + url = ../libffi-tarballs.git + ignore = none diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000000..8b64940960 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,36 @@ +git: + submodules: false + +notifications: + email: + - mail@joachim-breitner.de + - ghc-builds@haskell.org + +env: + - DEBUG_STAGE2=YES + - DEBUG_STAGE2=NO + +before_install: + - git config --global url."git://github.com/ghc/packages-".insteadOf git://github.com/ghc/packages/ + - git config --global url."http://github.com/ghc/packages-".insteadOf http://github.com/ghc/packages/ + - git config --global url."https://github.com/ghc/packages-".insteadOf https://github.com/ghc/packages/ + - git config --global url."ssh://git@github.com/ghc/packages-".insteadOf ssh://git@github.com/ghc/packages/ + - git config --global url."git@github.com:/ghc/packages-".insteadOf git@github.com:/ghc/packages/ + - git submodule update --init --recursive +install: + - sudo apt-get update + - sudo apt-get install haskell-platform autoconf libtool make ncurses-dev g++ dblatex docbook-xsl docbook-utils + - sudo cabal update + - sudo cabal install --global happy alex process +script: + - echo 'V = 0' >> mk/validate.mk # otherwise we hit log file limits on travis. + # do not build docs + - echo 'HADDOCK_DOCS = NO' >> mk/validate.mk + - echo 'BUILD_DOCBOOK_HTML = NO' >> mk/validate.mk + - echo 'BUILD_DOCBOOK_PS = NO' >> mk/validate.mk + - echo 'BUILD_DOCBOOK_PDF = NO' >> mk/validate.mk + # do not build dynamic libraries + - echo 'DYNAMIC_GHC_PROGRAMS = NO' >> mk/validate.mk + - echo 'GhcLibWays = v' >> mk/validate.mk + - if [ "$DEBUG_STAGE" = "YES" ]; then echo 'GhcStage2HcOpts += -DDEBUG' >> mk/validate.mk; fi + - CPUS=2 SKIP_PERF_TESTS=YES PATH=~/.cabal/bin:$PATH ./validate --fast --no-dph @@ -33,7 +33,7 @@ default : all help: @cat MAKEHELP -ifneq "$(filter clean help,$(MAKECMDGOALS))" "" +ifneq "$(filter maintainer-clean distclean clean help,$(MAKECMDGOALS))" "" -include mk/config.mk else include mk/config.mk @@ -1,6 +1,8 @@ The Glasgow Haskell Compiler ============================ +[](http://travis-ci.org/ghc/ghc) + This is the source tree for [GHC][1], a compiler and interactive environment for the Haskell functional programming language. diff --git a/aclocal.m4 b/aclocal.m4 index f9b574b360..42f760c2ed 100644 --- a/aclocal.m4 +++ b/aclocal.m4 @@ -454,6 +454,8 @@ AC_DEFUN([FP_SETTINGS], then mingw_bin_prefix=mingw/bin/ SettingsCCompilerCommand="\$topdir/../${mingw_bin_prefix}gcc.exe" + SettingsHaskellCPPCommand="\$topdir/../${mingw_bin_prefix}gcc.exe" + SettingsHaskellCPPFlags="$HaskellCPPArgs" SettingsLdCommand="\$topdir/../${mingw_bin_prefix}ld.exe" SettingsArCommand="\$topdir/../${mingw_bin_prefix}ar.exe" SettingsPerlCommand='$topdir/../perl/perl.exe' @@ -462,6 +464,8 @@ AC_DEFUN([FP_SETTINGS], SettingsTouchCommand='$topdir/touchy.exe' else SettingsCCompilerCommand="$WhatGccIsCalled" + SettingsHaskellCPPCommand="$HaskellCPPCmd" + SettingsHaskellCPPFlags="$HaskellCPPArgs" SettingsLdCommand="$LdCmd" SettingsArCommand="$ArCmd" SettingsPerlCommand="$PerlCmd" @@ -486,6 +490,8 @@ AC_DEFUN([FP_SETTINGS], SettingsCCompilerLinkFlags="$CONF_GCC_LINKER_OPTS_STAGE2" SettingsLdFlags="$CONF_LD_LINKER_OPTS_STAGE2" AC_SUBST(SettingsCCompilerCommand) + AC_SUBST(SettingsHaskellCPPCommand) + AC_SUBST(SettingsHaskellCPPFlags) AC_SUBST(SettingsCCompilerFlags) AC_SUBST(SettingsCCompilerLinkFlags) AC_SUBST(SettingsLdCommand) @@ -520,6 +526,9 @@ AC_DEFUN([FPTOOLS_SET_C_LD_FLAGS], esac case $$1 in + i386-unknown-mingw32) + $2="$$2 -march=i686" + ;; i386-apple-darwin) $2="$$2 -m32" $3="$$3 -m32" @@ -532,6 +541,12 @@ AC_DEFUN([FPTOOLS_SET_C_LD_FLAGS], $4="$$4 -arch x86_64" $5="$$5 -m64" ;; + x86_64-unknown-solaris2) + $2="$$2 -m64" + $3="$$3 -m64" + $4="$$4 -m64" + $5="$$5 -m64" + ;; alpha-*) # For now, to suppress the gcc warning "call-clobbered # register used for global register variable", we simply @@ -706,6 +721,8 @@ AC_ARG_WITH($2, ) ]) # FP_ARG_WITH_PATH_GNU_PROG_OPTIONAL + + # FP_PROG_CONTEXT_DIFF # -------------------- # Figure out how to do context diffs. Sets the output variable ContextDiffCmd. @@ -875,7 +892,7 @@ else fi; changequote([, ])dnl ]) -if test ! -f compiler/parser/Parser.hs || test ! -f compiler/cmm/CmmParse.hs || test ! -f compiler/parser/ParserCore.hs +if test ! -f compiler/parser/Parser.hs || test ! -f compiler/cmm/CmmParse.hs then FP_COMPARE_VERSIONS([$fptools_cv_happy_version],[-lt],[1.19], [AC_MSG_ERROR([Happy version 1.19 or later is required to compile GHC.])])[] @@ -2101,4 +2118,6 @@ AC_DEFUN([MAYBE_OVERRIDE_STAGE0],[ fi ]) + + # LocalWords: fi @@ -1,5 +1,6 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl +use warnings; use strict; use Cwd; diff --git a/compiler/basicTypes/BasicTypes.lhs b/compiler/basicTypes/BasicTypes.lhs index 9a92b003bc..f4a7aaf335 100644 --- a/compiler/basicTypes/BasicTypes.lhs +++ b/compiler/basicTypes/BasicTypes.lhs @@ -41,7 +41,7 @@ module BasicTypes( TopLevelFlag(..), isTopLevel, isNotTopLevel, - OverlapFlag(..), + OverlapFlag(..), OverlapMode(..), setOverlapModeMaybe, Boxity(..), isBoxed, @@ -447,9 +447,19 @@ instance Outputable Origin where -- | The semantics allowed for overlapping instances for a particular -- instance. See Note [Safe Haskell isSafeOverlap] (in `InstEnv.lhs`) for a -- explanation of the `isSafeOverlap` field. -data OverlapFlag +data OverlapFlag = OverlapFlag + { overlapMode :: OverlapMode + , isSafeOverlap :: Bool + } deriving (Eq, Data, Typeable) + +setOverlapModeMaybe :: OverlapFlag -> Maybe OverlapMode -> OverlapFlag +setOverlapModeMaybe f Nothing = f +setOverlapModeMaybe f (Just m) = f { overlapMode = m } + + +data OverlapMode -- | This instance must not overlap another - = NoOverlap { isSafeOverlap :: Bool } + = NoOverlap -- | Silently ignore this instance if you find a -- more specific one that matches the constraint @@ -461,7 +471,7 @@ data OverlapFlag -- Since the second instance has the OverlapOk flag, -- the first instance will be chosen (otherwise -- its ambiguous which to choose) - | OverlapOk { isSafeOverlap :: Bool } + | OverlapOk -- | Silently ignore this instance if you find any other that matches the -- constraing you are trying to resolve, including when checking if there are @@ -473,13 +483,16 @@ data OverlapFlag -- Without the Incoherent flag, we'd complain that -- instantiating 'b' would change which instance -- was chosen. See also note [Incoherent instances] - | Incoherent { isSafeOverlap :: Bool } + | Incoherent deriving (Eq, Data, Typeable) instance Outputable OverlapFlag where - ppr (NoOverlap b) = empty <+> pprSafeOverlap b - ppr (OverlapOk b) = ptext (sLit "[overlap ok]") <+> pprSafeOverlap b - ppr (Incoherent b) = ptext (sLit "[incoherent]") <+> pprSafeOverlap b + ppr flag = ppr (overlapMode flag) <+> pprSafeOverlap (isSafeOverlap flag) + +instance Outputable OverlapMode where + ppr NoOverlap = empty + ppr OverlapOk = ptext (sLit "[overlap ok]") + ppr Incoherent = ptext (sLit "[incoherent]") pprSafeOverlap :: Bool -> SDoc pprSafeOverlap True = ptext $ sLit "[safe]" diff --git a/compiler/basicTypes/ConLike.lhs b/compiler/basicTypes/ConLike.lhs index de10d0fb0a..3414aa4230 100644 --- a/compiler/basicTypes/ConLike.lhs +++ b/compiler/basicTypes/ConLike.lhs @@ -5,6 +5,7 @@ \section[ConLike]{@ConLike@: Constructor-like things} \begin{code} +{-# LANGUAGE CPP, DeriveDataTypeable #-} module ConLike ( ConLike(..) diff --git a/compiler/basicTypes/DataCon.lhs b/compiler/basicTypes/DataCon.lhs index ad56290694..0dcf98f6c5 100644 --- a/compiler/basicTypes/DataCon.lhs +++ b/compiler/basicTypes/DataCon.lhs @@ -5,7 +5,8 @@ \section[DataCon]{@DataCon@: Data Constructors} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, DeriveDataTypeable #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/basicTypes/Demand.lhs b/compiler/basicTypes/Demand.lhs index 8a082b98ad..ed055b5808 100644 --- a/compiler/basicTypes/Demand.lhs +++ b/compiler/basicTypes/Demand.lhs @@ -5,6 +5,7 @@ \section[Demand]{@Demand@: A decoupled implementation of a demand domain} \begin{code} +{-# LANGUAGE CPP, FlexibleInstances, TypeSynonymInstances #-} module Demand ( StrDmd, UseDmd(..), Count(..), @@ -41,7 +42,7 @@ module Demand ( deferAfterIO, postProcessUnsat, postProcessDmdTypeM, - splitProdDmd, splitProdDmd_maybe, peelCallDmd, mkCallDmd, + splitProdDmd_maybe, peelCallDmd, mkCallDmd, dmdTransformSig, dmdTransformDataConSig, dmdTransformDictSelSig, argOneShots, argsOneShots, trimToType, TypeShape(..), @@ -65,7 +66,7 @@ import BasicTypes import Binary import Maybes ( orElse ) -import Type ( Type ) +import Type ( Type, isUnLiftedType ) import TyCon ( isNewTyCon, isClassTyCon ) import DataCon ( splitDataProductType_maybe ) import FastString @@ -200,11 +201,13 @@ seqMaybeStr Lazy = () seqMaybeStr (Str s) = seqStrDmd s -- Splitting polymorphic demands -splitStrProdDmd :: Int -> StrDmd -> [MaybeStr] -splitStrProdDmd n HyperStr = replicate n strBot -splitStrProdDmd n HeadStr = replicate n strTop -splitStrProdDmd n (SProd ds) = ASSERT( ds `lengthIs` n) ds -splitStrProdDmd _ d@(SCall {}) = pprPanic "attempt to prod-split strictness call demand" (ppr d) +splitStrProdDmd :: Int -> StrDmd -> Maybe [MaybeStr] +splitStrProdDmd n HyperStr = Just (replicate n strBot) +splitStrProdDmd n HeadStr = Just (replicate n strTop) +splitStrProdDmd n (SProd ds) = ASSERT( ds `lengthIs` n) Just ds +splitStrProdDmd _ (SCall {}) = Nothing + -- This can happen when the programmer uses unsafeCoerce, + -- and we don't then want to crash the compiler (Trac #9208) \end{code} %************************************************************************ @@ -441,13 +444,15 @@ seqMaybeUsed (Use c u) = c `seq` seqUseDmd u seqMaybeUsed _ = () -- Splitting polymorphic Maybe-Used demands -splitUseProdDmd :: Int -> UseDmd -> [MaybeUsed] -splitUseProdDmd n Used = replicate n useTop -splitUseProdDmd n UHead = replicate n Abs -splitUseProdDmd n (UProd ds) = ASSERT2( ds `lengthIs` n, text "splitUseProdDmd" $$ ppr n $$ ppr ds ) ds -splitUseProdDmd _ d@(UCall _ _) = pprPanic "attempt to prod-split usage call demand" (ppr d) +splitUseProdDmd :: Int -> UseDmd -> Maybe [MaybeUsed] +splitUseProdDmd n Used = Just (replicate n useTop) +splitUseProdDmd n UHead = Just (replicate n Abs) +splitUseProdDmd n (UProd ds) = ASSERT2( ds `lengthIs` n, text "splitUseProdDmd" $$ ppr n $$ ppr ds ) + Just ds +splitUseProdDmd _ (UCall _ _) = Nothing + -- This can happen when the programmer uses unsafeCoerce, + -- and we don't then want to crash the compiler (Trac #9208) \end{code} - %************************************************************************ %* * \subsection{Joint domain for Strictness and Absence} @@ -719,26 +724,18 @@ can be expanded to saturate a callee's arity. \begin{code} -splitProdDmd :: Arity -> JointDmd -> [JointDmd] -splitProdDmd n (JD {strd = s, absd = u}) - = mkJointDmds (split_str s) (split_abs u) - where - split_str Lazy = replicate n Lazy - split_str (Str s) = splitStrProdDmd n s - - split_abs Abs = replicate n Abs - split_abs (Use _ u) = splitUseProdDmd n u - splitProdDmd_maybe :: JointDmd -> Maybe [JointDmd] -- Split a product into its components, iff there is any -- useful information to be extracted thereby -- The demand is not necessarily strict! splitProdDmd_maybe (JD {strd = s, absd = u}) = case (s,u) of - (Str (SProd sx), Use _ u) -> Just (mkJointDmds sx (splitUseProdDmd (length sx) u)) - (Str s, Use _ (UProd ux)) -> Just (mkJointDmds (splitStrProdDmd (length ux) s) ux) - (Lazy, Use _ (UProd ux)) -> Just (mkJointDmds (replicate (length ux) Lazy) ux) - _ -> Nothing + (Str (SProd sx), Use _ u) | Just ux <- splitUseProdDmd (length sx) u + -> Just (mkJointDmds sx ux) + (Str s, Use _ (UProd ux)) | Just sx <- splitStrProdDmd (length ux) s + -> Just (mkJointDmds sx ux) + (Lazy, Use _ (UProd ux)) -> Just (mkJointDmds (replicate (length ux) Lazy) ux) + _ -> Nothing \end{code} %************************************************************************ @@ -1204,13 +1201,18 @@ type DeferAndUse -- Describes how to degrade a result type type DeferAndUseM = Maybe DeferAndUse -- Nothing <=> absent-ify the result type; it will never be used -toCleanDmd :: Demand -> (CleanDemand, DeferAndUseM) --- See Note [Analyzing with lazy demand and lambdas] -toCleanDmd (JD { strd = s, absd = u }) +toCleanDmd :: Demand -> Type -> (CleanDemand, DeferAndUseM) +toCleanDmd (JD { strd = s, absd = u }) expr_ty = case (s,u) of - (Str s', Use c u') -> (CD { sd = s', ud = u' }, Just (False, c)) - (Lazy, Use c u') -> (CD { sd = HeadStr, ud = u' }, Just (True, c)) - (_, Abs) -> (CD { sd = HeadStr, ud = Used }, Nothing) + (Str s', Use c u') -> -- The normal case + (CD { sd = s', ud = u' }, Just (False, c)) + + (Lazy, Use c u') -> -- See Note [Analyzing with lazy demand and lambdas] + (CD { sd = HeadStr, ud = u' }, Just (True, c)) + + (_, Abs) -- See Note [Analysing with absent demand] + | isUnLiftedType expr_ty -> (CD { sd = HeadStr, ud = Used }, Just (False, One)) + | otherwise -> (CD { sd = HeadStr, ud = Used }, Nothing) -- This is used in dmdAnalStar when post-processing -- a function's argument demand. So we only care about what @@ -1385,13 +1387,13 @@ cardinality analysis of the following example: {-# NOINLINE build #-} build g = (g (:) [], g (:) []) -h c z = build (\x -> - let z1 = z ++ z +h c z = build (\x -> + let z1 = z ++ z in if c then \y -> x (y ++ z1) else \y -> x (z1 ++ y)) -One can see that `build` assigns to `g` demand <L,C(C1(U))>. +One can see that `build` assigns to `g` demand <L,C(C1(U))>. Therefore, when analyzing the lambda `(\x -> ...)`, we expect each lambda \y -> ... to be annotated as "one-shot" one. Therefore (\x -> \y -> x (y ++ z)) should be analyzed with a @@ -1400,6 +1402,46 @@ demand <C(C(..), C(C1(U))>. This is achieved by, first, converting the lazy demand L into the strict S by the second clause of the analysis. +Note [Analysing with absent demand] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Suppose we analyse an expression with demand <L,A>. The "A" means +"absent", so this expression will never be needed. What should happen? +There are several wrinkles: + +* We *do* want to analyse the expression regardless. + Reason: Note [Always analyse in virgin pass] + + But we can post-process the results to ignore all the usage + demands coming back. This is done by postProcessDmdTypeM. + +* But in the case of an *unlifted type* we must be extra careful, + because unlifted values are evaluated even if they are not used. + Example (see Trac #9254): + f :: (() -> (# Int#, () #)) -> () + -- Strictness signature is + -- <C(S(LS)), 1*C1(U(A,1*U()))> + -- I.e. calls k, but discards first component of result + f k = case k () of (# _, r #) -> r + + g :: Int -> () + g y = f (\n -> (# case y of I# y2 -> y2, n #)) + + Here f's strictness signature says (correctly) that it calls its + argument function and ignores the first component of its result. + This is correct in the sense that it'd be fine to (say) modify the + function so that always returned 0# in the first component. + + But in function g, we *will* evaluate the 'case y of ...', because + it has type Int#. So 'y' will be evaluated. So we must record this + usage of 'y', else 'g' will say 'y' is absent, and will w/w so that + 'y' is bound to an aBSENT_ERROR thunk. + + An alternative would be to replace the 'case y of ...' with (say) 0#, + but I have not tried that. It's not a common situation, but it is + not theoretical: unsafePerformIO's implementation is very very like + 'f' above. + + %************************************************************************ %* * Demand signatures @@ -1521,12 +1563,12 @@ dmdTransformDataConSig arity (StrictSig (DmdType _ _ con_res)) | otherwise -- Not saturated = nopDmdType where - go_str 0 dmd = Just (splitStrProdDmd arity dmd) + go_str 0 dmd = splitStrProdDmd arity dmd go_str n (SCall s') = go_str (n-1) s' go_str n HyperStr = go_str (n-1) HyperStr go_str _ _ = Nothing - go_abs 0 dmd = Just (splitUseProdDmd arity dmd) + go_abs 0 dmd = splitUseProdDmd arity dmd go_abs n (UCall One u') = go_abs (n-1) u' go_abs _ _ = Nothing diff --git a/compiler/basicTypes/Id.lhs b/compiler/basicTypes/Id.lhs index aada6dccc2..85e9b3083a 100644 --- a/compiler/basicTypes/Id.lhs +++ b/compiler/basicTypes/Id.lhs @@ -5,6 +5,8 @@ \section[Id]{@Ids@: Value and constructor identifiers} \begin{code} +{-# LANGUAGE CPP #-} + -- | -- #name_types# -- GHC uses several kinds of name internally: @@ -252,8 +254,9 @@ mkLocalIdWithInfo name ty info = Var.mkLocalVar VanillaId name ty info -- | Create a local 'Id' that is marked as exported. -- This prevents things attached to it from being removed as dead code. -mkExportedLocalId :: Name -> Type -> Id -mkExportedLocalId name ty = Var.mkExportedLocalVar VanillaId name ty vanillaIdInfo +-- See Note [Exported LocalIds] +mkExportedLocalId :: IdDetails -> Name -> Type -> Id +mkExportedLocalId details name ty = Var.mkExportedLocalVar details name ty vanillaIdInfo -- Note [Free type variables] @@ -305,6 +308,40 @@ mkTemplateLocalsNum :: Int -> [Type] -> [Id] mkTemplateLocalsNum n tys = zipWith mkTemplateLocal [n..] tys \end{code} +Note [Exported LocalIds] +~~~~~~~~~~~~~~~~~~~~~~~~ +We use mkExportedLocalId for things like + - Dictionary functions (DFunId) + - Wrapper and matcher Ids for pattern synonyms + - Default methods for classes + - etc + +They marked as "exported" in the sense that they should be kept alive +even if apparently unused in other bindings, and not dropped as dead +code by the occurrence analyser. (But "exported" here does not mean +"brought into lexical scope by an import declaration". Indeed these +things are always internal Ids that the user never sees.) + +It's very important that they are *LocalIds*, not GlobalIs, for lots +of reasons: + + * We want to treat them as free variables for the purpose of + dependency analysis (e.g. CoreFVs.exprFreeVars). + + * Look them up in the current substitution when we come across + occurrences of them (in Subst.lookupIdSubst) + + * Ensure that for dfuns that the specialiser does not float dict uses + above their defns, which would prevent good simplifications happening. + + * The strictness analyser treats a occurrence of a GlobalId as + imported and assumes it contains strictness in its IdInfo, which + isn't true if the thing is bound in the same module as the + occurrence. + +In CoreTidy we must make all these LocalIds into GlobalIds, so that in +importing modules (in --make mode) we treat them as properly global. +That is what is happening in, say tidy_insts in TidyPgm. %************************************************************************ %* * diff --git a/compiler/basicTypes/IdInfo.lhs b/compiler/basicTypes/IdInfo.lhs index 94b3d2a71e..d9bce17def 100644 --- a/compiler/basicTypes/IdInfo.lhs +++ b/compiler/basicTypes/IdInfo.lhs @@ -8,7 +8,7 @@ Haskell. [WDP 94/11]) \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/basicTypes/Literal.lhs b/compiler/basicTypes/Literal.lhs index c77915fef6..13fbb4d46d 100644 --- a/compiler/basicTypes/Literal.lhs +++ b/compiler/basicTypes/Literal.lhs @@ -5,7 +5,7 @@ \section[Literal]{@Literal@: Machine literals (unboxed, of course)} \begin{code} -{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE CPP, DeriveDataTypeable #-} module Literal ( diff --git a/compiler/basicTypes/MkId.lhs b/compiler/basicTypes/MkId.lhs index 38922fcd00..7816ad9005 100644 --- a/compiler/basicTypes/MkId.lhs +++ b/compiler/basicTypes/MkId.lhs @@ -12,7 +12,8 @@ have a standard form, namely: - primitive operations \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -20,7 +21,7 @@ have a standard form, namely: -- for details module MkId ( - mkDictFunId, mkDictFunTy, mkDictSelId, + mkDictFunId, mkDictFunTy, mkDictSelId, mkDictSelRhs, mkPrimOpId, mkFCallId, @@ -66,7 +67,6 @@ import PrimOp import ForeignCall import DataCon import Id -import Var ( mkExportedLocalVar ) import IdInfo import Demand import CoreSyn @@ -272,39 +272,36 @@ at the outside. When dealing with classes it's very convenient to recover the original type signature from the class op selector. \begin{code} -mkDictSelId :: DynFlags - -> Bool -- True <=> don't include the unfolding - -- Little point on imports without -O, because the - -- dictionary itself won't be visible - -> Name -- Name of one of the *value* selectors +mkDictSelId :: Name -- Name of one of the *value* selectors -- (dictionary superclass or method) -> Class -> Id -mkDictSelId dflags no_unf name clas +mkDictSelId name clas = mkGlobalId (ClassOpId clas) name sel_ty info where - sel_ty = mkForAllTys tyvars (mkFunTy (idType dict_id) (idType the_arg_id)) - -- We can't just say (exprType rhs), because that would give a type - -- C a -> C a - -- for a single-op class (after all, the selector is the identity) - -- But it's type must expose the representation of the dictionary - -- to get (say) C a -> (a -> a) + tycon = classTyCon clas + sel_names = map idName (classAllSelIds clas) + new_tycon = isNewTyCon tycon + [data_con] = tyConDataCons tycon + tyvars = dataConUnivTyVars data_con + arg_tys = dataConRepArgTys data_con -- Includes the dictionary superclasses + val_index = assoc "MkId.mkDictSelId" (sel_names `zip` [0..]) name + + sel_ty = mkForAllTys tyvars (mkFunTy (mkClassPred clas (mkTyVarTys tyvars)) + (getNth arg_tys val_index)) base_info = noCafIdInfo `setArityInfo` 1 `setStrictnessInfo` strict_sig - `setUnfoldingInfo` (if no_unf then noUnfolding - else mkImplicitUnfolding dflags rhs) - -- In module where class op is defined, we must add - -- the unfolding, even though it'll never be inlined - -- because we use that to generate a top-level binding - -- for the ClassOp - - info | new_tycon = base_info `setInlinePragInfo` alwaysInlinePragma + + info | new_tycon + = base_info `setInlinePragInfo` alwaysInlinePragma + `setUnfoldingInfo` mkInlineUnfolding (Just 1) (mkDictSelRhs clas val_index) -- See Note [Single-method classes] in TcInstDcls -- for why alwaysInlinePragma - | otherwise = base_info `setSpecInfo` mkSpecInfo [rule] - `setInlinePragInfo` neverInlinePragma - -- Add a magic BuiltinRule, and never inline it + + | otherwise + = base_info `setSpecInfo` mkSpecInfo [rule] + -- Add a magic BuiltinRule, but no unfolding -- so that the rule is always available to fire. -- See Note [ClassOp/DFun selection] in TcInstDcls @@ -326,25 +323,26 @@ mkDictSelId dflags no_unf name clas strict_sig = mkClosedStrictSig [arg_dmd] topRes arg_dmd | new_tycon = evalDmd | otherwise = mkManyUsedDmd $ - mkProdDmd [ if the_arg_id == id then evalDmd else absDmd - | id <- arg_ids ] - + mkProdDmd [ if name == sel_name then evalDmd else absDmd + | sel_name <- sel_names ] + +mkDictSelRhs :: Class + -> Int -- 0-indexed selector among (superclasses ++ methods) + -> CoreExpr +mkDictSelRhs clas val_index + = mkLams tyvars (Lam dict_id rhs_body) + where tycon = classTyCon clas new_tycon = isNewTyCon tycon [data_con] = tyConDataCons tycon tyvars = dataConUnivTyVars data_con arg_tys = dataConRepArgTys data_con -- Includes the dictionary superclasses - -- 'index' is a 0-index into the *value* arguments of the dictionary - val_index = assoc "MkId.mkDictSelId" sel_index_prs name - sel_index_prs = map idName (classAllSelIds clas) `zip` [0..] - the_arg_id = getNth arg_ids val_index pred = mkClassPred clas (mkTyVarTys tyvars) dict_id = mkTemplateLocal 1 pred arg_ids = mkTemplateLocalsNum 2 arg_tys - rhs = mkLams tyvars (Lam dict_id rhs_body) rhs_body | new_tycon = unwrapNewTypeBody tycon (map mkTyVarTy tyvars) (Var dict_id) | otherwise = Case (Var dict_id) dict_id (idType the_arg_id) [(DataAlt data_con, arg_ids, varToCoreExpr the_arg_id)] @@ -956,29 +954,13 @@ mkFCallId dflags uniq fcall ty %* * %************************************************************************ -Important notes about dict funs and default methods -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Note [Dict funs and default methods] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Dict funs and default methods are *not* ImplicitIds. Their definition involves user-written code, so we can't figure out their strictness etc based on fixed info, as we can for constructors and record selectors (say). -We build them as LocalIds, but with External Names. This ensures that -they are taken to account by free-variable finding and dependency -analysis (e.g. CoreFVs.exprFreeVars). - -Why shouldn't they be bound as GlobalIds? Because, in particular, if -they are globals, the specialiser floats dict uses above their defns, -which prevents good simplifications happening. Also the strictness -analyser treats a occurrence of a GlobalId as imported and assumes it -contains strictness in its IdInfo, which isn't true if the thing is -bound in the same module as the occurrence. - -It's OK for dfuns to be LocalIds, because we form the instance-env to -pass on to the next module (md_insts) in CoreTidy, afer tidying -and globalising the top-level Ids. - -BUT make sure they are *exported* LocalIds (mkExportedLocalId) so -that they aren't discarded by the occurrence analyser. +NB: See also Note [Exported LocalIds] in Id \begin{code} mkDictFunId :: Name -- Name to use for the dict fun; @@ -988,12 +970,12 @@ mkDictFunId :: Name -- Name to use for the dict fun; -> [Type] -> Id -- Implements the DFun Superclass Invariant (see TcInstDcls) +-- See Note [Dict funs and default methods] mkDictFunId dfun_name tvs theta clas tys - = mkExportedLocalVar (DFunId n_silent is_nt) - dfun_name - dfun_ty - vanillaIdInfo + = mkExportedLocalId (DFunId n_silent is_nt) + dfun_name + dfun_ty where is_nt = isNewTyCon (classTyCon clas) (n_silent, dfun_ty) = mkDictFunTy tvs theta clas tys diff --git a/compiler/basicTypes/Module.lhs b/compiler/basicTypes/Module.lhs index 90bf717a85..080ae47ac9 100644 --- a/compiler/basicTypes/Module.lhs +++ b/compiler/basicTypes/Module.lhs @@ -9,6 +9,7 @@ These are Uniquable, hence we can build Maps with Modules as the keys. \begin{code} +{-# LANGUAGE DeriveDataTypeable #-} module Module ( diff --git a/compiler/basicTypes/Name.lhs b/compiler/basicTypes/Name.lhs index e2742bb3a8..c2e7aeabdc 100644 --- a/compiler/basicTypes/Name.lhs +++ b/compiler/basicTypes/Name.lhs @@ -5,6 +5,8 @@ \section[Name]{@Name@: to transmit name info from renamer to typechecker} \begin{code} +{-# LANGUAGE DeriveDataTypeable #-} + -- | -- #name_types# -- GHC uses several kinds of name internally: diff --git a/compiler/basicTypes/NameEnv.lhs b/compiler/basicTypes/NameEnv.lhs index 292ee3d1ec..f39627706d 100644 --- a/compiler/basicTypes/NameEnv.lhs +++ b/compiler/basicTypes/NameEnv.lhs @@ -5,7 +5,8 @@ \section[NameEnv]{@NameEnv@: name environments} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/basicTypes/NameSet.lhs b/compiler/basicTypes/NameSet.lhs index ed42c2b1aa..9cd9fcef93 100644 --- a/compiler/basicTypes/NameSet.lhs +++ b/compiler/basicTypes/NameSet.lhs @@ -4,7 +4,8 @@ % \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/basicTypes/OccName.lhs b/compiler/basicTypes/OccName.lhs index b41d711f69..d942362db7 100644 --- a/compiler/basicTypes/OccName.lhs +++ b/compiler/basicTypes/OccName.lhs @@ -4,6 +4,8 @@ % \begin{code} +{-# LANGUAGE DeriveDataTypeable #-} + -- | -- #name_types# -- GHC uses several kinds of name internally: @@ -20,7 +22,7 @@ -- -- * 'Var.Var': see "Var#name_types" -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -30,6 +32,8 @@ module OccName ( -- * The 'NameSpace' type NameSpace, -- Abstract + + nameSpacesRelated, -- ** Construction -- $real_vs_source_data_constructors @@ -86,7 +90,7 @@ module OccName ( lookupOccEnv, mkOccEnv, mkOccEnv_C, extendOccEnvList, elemOccEnv, occEnvElts, foldOccEnv, plusOccEnv, plusOccEnv_C, extendOccEnv_C, extendOccEnv_Acc, filterOccEnv, delListFromOccEnv, delFromOccEnv, - alterOccEnv, + alterOccEnv, pprOccEnv, -- * The 'OccSet' type OccSet, emptyOccSet, unitOccSet, mkOccSet, extendOccSet, @@ -100,7 +104,10 @@ module OccName ( -- * Lexical characteristics of Haskell names isLexCon, isLexVar, isLexId, isLexSym, isLexConId, isLexConSym, isLexVarId, isLexVarSym, - startsVarSym, startsVarId, startsConSym, startsConId + startsVarSym, startsVarId, startsConSym, startsConId, + + -- FsEnv + FastStringEnv, emptyFsEnv, lookupFsEnv, extendFsEnv, mkFsEnv ) where import Util @@ -117,6 +124,29 @@ import Data.Data %************************************************************************ %* * + FastStringEnv +%* * +%************************************************************************ + +FastStringEnv can't be in FastString because the env depends on UniqFM + +\begin{code} +type FastStringEnv a = UniqFM a -- Keyed by FastString + + +emptyFsEnv :: FastStringEnv a +lookupFsEnv :: FastStringEnv a -> FastString -> Maybe a +extendFsEnv :: FastStringEnv a -> FastString -> a -> FastStringEnv a +mkFsEnv :: [(FastString,a)] -> FastStringEnv a + +emptyFsEnv = emptyUFM +lookupFsEnv = lookupUFM +extendFsEnv = addToUFM +mkFsEnv = listToUFM +\end{code} + +%************************************************************************ +%* * \subsection{Name space} %* * %************************************************************************ @@ -244,6 +274,9 @@ instance Data OccName where toConstr _ = abstractConstr "OccName" gunfold _ _ = error "gunfold" dataTypeOf _ = mkNoRepType "OccName" + +instance HasOccName OccName where + occName = id \end{code} @@ -339,7 +372,20 @@ demoteOccName (OccName space name) = do space' <- demoteNameSpace space return $ OccName space' name -{- | Other names in the compiler add aditional information to an OccName. +-- Name spaces are related if there is a chance to mean the one when one writes +-- the other, i.e. variables <-> data constructors and type variables <-> type constructors +nameSpacesRelated :: NameSpace -> NameSpace -> Bool +nameSpacesRelated ns1 ns2 = ns1 == ns2 || otherNameSpace ns1 == ns2 + +otherNameSpace :: NameSpace -> NameSpace +otherNameSpace VarName = DataName +otherNameSpace DataName = VarName +otherNameSpace TvName = TcClsName +otherNameSpace TcClsName = TvName + + + +{- | Other names in the compiler add additional information to an OccName. This class provides a consistent way to access the underlying OccName. -} class HasOccName name where occName :: name -> OccName @@ -416,7 +462,10 @@ filterOccEnv x (A y) = A $ filterUFM x y alterOccEnv fn (A y) k = A $ alterUFM fn y k instance Outputable a => Outputable (OccEnv a) where - ppr (A x) = ppr x + ppr x = pprOccEnv ppr x + +pprOccEnv :: (a -> SDoc) -> OccEnv a -> SDoc +pprOccEnv ppr_elt (A env) = pprUniqFM ppr_elt env type OccSet = UniqSet OccName @@ -852,9 +901,12 @@ isLexConSym cs -- Infix type or data constructors | otherwise = startsConSym (headFS cs) isLexVarSym fs -- Infix identifiers e.g. "+" + | fs == (fsLit "~R#") = True + | otherwise = case (if nullFS fs then [] else unpackFS fs) of [] -> False (c:cs) -> startsVarSym c && all isVarSymChar cs + -- See Note [Classification of generated names] ------------- startsVarSym, startsVarId, startsConSym, startsConId :: Char -> Bool diff --git a/compiler/basicTypes/PatSyn.lhs b/compiler/basicTypes/PatSyn.lhs index 9285b3c365..cba8427292 100644 --- a/compiler/basicTypes/PatSyn.lhs +++ b/compiler/basicTypes/PatSyn.lhs @@ -5,21 +5,25 @@ \section[PatSyn]{@PatSyn@: Pattern synonyms} \begin{code} +{-# LANGUAGE CPP, DeriveDataTypeable #-} module PatSyn ( -- * Main data types PatSyn, mkPatSyn, -- ** Type deconstruction - patSynId, patSynType, patSynArity, patSynIsInfix, - patSynArgs, patSynArgTys, patSynTyDetails, + patSynName, patSynArity, patSynIsInfix, + patSynArgs, patSynTyDetails, patSynType, patSynWrapper, patSynMatcher, - patSynExTyVars, patSynSig, patSynInstArgTys + patSynExTyVars, patSynSig, + patSynInstArgTys, patSynInstResTy, + tidyPatSynIds, patSynIds ) where #include "HsVersions.h" import Type +import TcType( mkSigmaTy ) import Name import Outputable import Unique @@ -27,8 +31,6 @@ import Util import BasicTypes import FastString import Var -import Id -import TcType import HsBinds( HsPatSynDetails(..) ) import qualified Data.Data as Data @@ -37,8 +39,8 @@ import Data.Function \end{code} -Pattern synonym representation -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Note [Pattern synonym representation] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider the following pattern synonym declaration pattern P x = MkT [x] (Just 42) @@ -58,15 +60,49 @@ with the following typeclass constraints: In this case, the fields of MkPatSyn will be set as follows: - psArgs = [x :: b] + psArgs = [b] psArity = 1 psInfix = False psUnivTyVars = [t] psExTyVars = [b] - psTheta = ((Show (Maybe t), Ord b), (Eq t, Num t)) + psProvTheta = (Show (Maybe t), Ord b) + psReqTheta = (Eq t, Num t) psOrigResTy = T (Maybe t) +Note [Matchers and wrappers for pattern synonyms] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +For each pattern synonym, we generate a single matcher function which +implements the actual matching. For the above example, the matcher +will have type: + + $mP :: forall r t. (Eq t, Num t) + => T (Maybe t) + -> (forall b. (Show (Maybe t), Ord b) => b -> r) + -> r + -> r + +with the following implementation: + + $mP @r @t $dEq $dNum scrut cont fail = case scrut of + MkT @b $dShow $dOrd [x] (Just 42) -> cont @b $dShow $dOrd x + _ -> fail + +For *bidirectional* pattern synonyms, we also generate a single wrapper +function which implements the pattern synonym in an expression +context. For our running example, it will be: + + $WP :: forall t b. (Show (Maybe t), Ord b, Eq t, Num t) + => b -> T (Maybe t) + $WP x = MkT [x] (Just 42) + +NB: the existential/universal and required/provided split does not +apply to the wrapper since you are only putting stuff in, not getting +stuff out. + +Injectivity of bidirectional pattern synonyms is checked in +tcPatToExpr which walks the pattern and returns its corresponding +expression when available. %************************************************************************ %* * @@ -76,21 +112,36 @@ In this case, the fields of MkPatSyn will be set as follows: \begin{code} -- | A pattern synonym +-- See Note [Pattern synonym representation] data PatSyn = MkPatSyn { - psId :: Id, - psUnique :: Unique, -- Cached from Name - psMatcher :: Id, - psWrapper :: Maybe Id, + psName :: Name, + psUnique :: Unique, -- Cached from Name - psArgs :: [Var], - psArity :: Arity, -- == length psArgs - psInfix :: Bool, -- True <=> declared infix + psArgs :: [Type], + psArity :: Arity, -- == length psArgs + psInfix :: Bool, -- True <=> declared infix - psUnivTyVars :: [TyVar], -- Universially-quantified type variables - psExTyVars :: [TyVar], -- Existentially-quantified type vars - psTheta :: (ThetaType, ThetaType), -- Provided and required dictionaries - psOrigResTy :: Type + psUnivTyVars :: [TyVar], -- Universially-quantified type variables + psExTyVars :: [TyVar], -- Existentially-quantified type vars + psProvTheta :: ThetaType, -- Provided dictionaries + psReqTheta :: ThetaType, -- Required dictionaries + psOrigResTy :: Type, -- Mentions only psUnivTyVars + + -- See Note [Matchers and wrappers for pattern synonyms] + psMatcher :: Id, + -- Matcher function, of type + -- forall r univ_tvs. req_theta + -- => res_ty + -- -> (forall ex_tvs. prov_theta -> arg_tys -> r) + -- -> r -> r + + psWrapper :: Maybe Id + -- Nothing => uni-directional pattern synonym + -- Just wid => bi-direcitonal + -- Wrapper function, of type + -- forall univ_tvs, ex_tvs. (prov_theta, req_theta) + -- => arg_tys -> res_ty } deriving Data.Typeable.Typeable \end{code} @@ -117,7 +168,7 @@ instance Uniquable PatSyn where getUnique = psUnique instance NamedThing PatSyn where - getName = getName . psId + getName = patSynName instance Outputable PatSyn where ppr = ppr . getName @@ -144,7 +195,7 @@ instance Data.Data PatSyn where -- | Build a new pattern synonym mkPatSyn :: Name -> Bool -- ^ Is the pattern synonym declared infix? - -> [Var] -- ^ Original arguments + -> [Type] -- ^ Original arguments -> [TyVar] -- ^ Universially-quantified type variables -> [TyVar] -- ^ Existentially-quantified type variables -> ThetaType -- ^ Wanted dicts @@ -158,29 +209,30 @@ mkPatSyn name declared_infix orig_args prov_theta req_theta orig_res_ty matcher wrapper - = MkPatSyn {psId = id, psUnique = getUnique name, + = MkPatSyn {psName = name, psUnique = getUnique name, psUnivTyVars = univ_tvs, psExTyVars = ex_tvs, - psTheta = (prov_theta, req_theta), + psProvTheta = prov_theta, psReqTheta = req_theta, psInfix = declared_infix, psArgs = orig_args, psArity = length orig_args, psOrigResTy = orig_res_ty, psMatcher = matcher, psWrapper = wrapper } - where - pat_ty = mkSigmaTy univ_tvs req_theta $ - mkSigmaTy ex_tvs prov_theta $ - mkFunTys (map varType orig_args) orig_res_ty - id = mkLocalId name pat_ty \end{code} \begin{code} -- | The 'Name' of the 'PatSyn', giving it a unique, rooted identification -patSynId :: PatSyn -> Id -patSynId = psId +patSynName :: PatSyn -> Name +patSynName = psName patSynType :: PatSyn -> Type -patSynType = psOrigResTy +-- The full pattern type, used only in error messages +patSynType (MkPatSyn { psUnivTyVars = univ_tvs, psReqTheta = req_theta + , psExTyVars = ex_tvs, psProvTheta = prov_theta + , psArgs = orig_args, psOrigResTy = orig_res_ty }) + = mkSigmaTy univ_tvs req_theta $ + mkSigmaTy ex_tvs prov_theta $ + mkFunTys orig_args orig_res_ty -- | Should the 'PatSyn' be presented infix? patSynIsInfix :: PatSyn -> Bool @@ -190,22 +242,24 @@ patSynIsInfix = psInfix patSynArity :: PatSyn -> Arity patSynArity = psArity -patSynArgs :: PatSyn -> [Var] +patSynArgs :: PatSyn -> [Type] patSynArgs = psArgs -patSynArgTys :: PatSyn -> [Type] -patSynArgTys = map varType . patSynArgs - patSynTyDetails :: PatSyn -> HsPatSynDetails Type -patSynTyDetails ps = case (patSynIsInfix ps, patSynArgTys ps) of - (True, [left, right]) -> InfixPatSyn left right - (_, tys) -> PrefixPatSyn tys +patSynTyDetails (MkPatSyn { psInfix = is_infix, psArgs = arg_tys }) + | is_infix, [left,right] <- arg_tys + = InfixPatSyn left right + | otherwise + = PrefixPatSyn arg_tys patSynExTyVars :: PatSyn -> [TyVar] patSynExTyVars = psExTyVars -patSynSig :: PatSyn -> ([TyVar], [TyVar], (ThetaType, ThetaType)) -patSynSig ps = (psUnivTyVars ps, psExTyVars ps, psTheta ps) +patSynSig :: PatSyn -> ([TyVar], [TyVar], ThetaType, ThetaType, [Type], Type) +patSynSig (MkPatSyn { psUnivTyVars = univ_tvs, psExTyVars = ex_tvs + , psProvTheta = prov, psReqTheta = req + , psArgs = arg_tys, psOrigResTy = res_ty }) + = (univ_tvs, ex_tvs, prov, req, arg_tys, res_ty) patSynWrapper :: PatSyn -> Maybe Id patSynWrapper = psWrapper @@ -213,13 +267,43 @@ patSynWrapper = psWrapper patSynMatcher :: PatSyn -> Id patSynMatcher = psMatcher +patSynIds :: PatSyn -> [Id] +patSynIds (MkPatSyn { psMatcher = match_id, psWrapper = mb_wrap_id }) + = case mb_wrap_id of + Nothing -> [match_id] + Just wrap_id -> [match_id, wrap_id] + +tidyPatSynIds :: (Id -> Id) -> PatSyn -> PatSyn +tidyPatSynIds tidy_fn ps@(MkPatSyn { psMatcher = match_id, psWrapper = mb_wrap_id }) + = ps { psMatcher = tidy_fn match_id, psWrapper = fmap tidy_fn mb_wrap_id } + patSynInstArgTys :: PatSyn -> [Type] -> [Type] -patSynInstArgTys ps inst_tys +-- Return the types of the argument patterns +-- e.g. data D a = forall b. MkD a b (b->a) +-- pattern P f x y = MkD (x,True) y f +-- D :: forall a. forall b. a -> b -> (b->a) -> D a +-- P :: forall c. forall b. (b->(c,Bool)) -> c -> b -> P c +-- patSynInstArgTys P [Int,bb] = [bb->(Int,Bool), Int, bb] +-- NB: the inst_tys should be both universal and existential +patSynInstArgTys (MkPatSyn { psName = name, psUnivTyVars = univ_tvs + , psExTyVars = ex_tvs, psArgs = arg_tys }) + inst_tys = ASSERT2( length tyvars == length inst_tys - , ptext (sLit "patSynInstArgTys") <+> ppr ps $$ ppr tyvars $$ ppr inst_tys ) + , ptext (sLit "patSynInstArgTys") <+> ppr name $$ ppr tyvars $$ ppr inst_tys ) map (substTyWith tyvars inst_tys) arg_tys where - (univ_tvs, ex_tvs, _) = patSynSig ps - arg_tys = map varType (psArgs ps) tyvars = univ_tvs ++ ex_tvs + +patSynInstResTy :: PatSyn -> [Type] -> Type +-- Return the type of whole pattern +-- E.g. pattern P x y = Just (x,x,y) +-- P :: a -> b -> Just (a,a,b) +-- (patSynInstResTy P [Int,Bool] = Maybe (Int,Int,Bool) +-- NB: unlikepatSynInstArgTys, the inst_tys should be just the *universal* tyvars +patSynInstResTy (MkPatSyn { psName = name, psUnivTyVars = univ_tvs + , psOrigResTy = res_ty }) + inst_tys + = ASSERT2( length univ_tvs == length inst_tys + , ptext (sLit "patSynInstResTy") <+> ppr name $$ ppr univ_tvs $$ ppr inst_tys ) + substTyWith univ_tvs inst_tys res_ty \end{code} diff --git a/compiler/basicTypes/RdrName.lhs b/compiler/basicTypes/RdrName.lhs index 3ff771f0fe..ebfb71aa65 100644 --- a/compiler/basicTypes/RdrName.lhs +++ b/compiler/basicTypes/RdrName.lhs @@ -4,7 +4,7 @@ % \begin{code} -{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE CPP, DeriveDataTypeable #-} -- | -- #name_types# @@ -331,49 +331,71 @@ instance Ord RdrName where -- It is keyed by OccName, because we never use it for qualified names -- We keep the current mapping, *and* the set of all Names in scope -- Reason: see Note [Splicing Exact Names] in RnEnv -type LocalRdrEnv = (OccEnv Name, NameSet) +data LocalRdrEnv = LRE { lre_env :: OccEnv Name + , lre_in_scope :: NameSet } + +instance Outputable LocalRdrEnv where + ppr (LRE {lre_env = env, lre_in_scope = ns}) + = hang (ptext (sLit "LocalRdrEnv {")) + 2 (vcat [ ptext (sLit "env =") <+> pprOccEnv ppr_elt env + , ptext (sLit "in_scope =") <+> braces (pprWithCommas ppr (nameSetToList ns)) + ] <+> char '}') + where + ppr_elt name = parens (ppr (getUnique (nameOccName name))) <+> ppr name + -- So we can see if the keys line up correctly emptyLocalRdrEnv :: LocalRdrEnv -emptyLocalRdrEnv = (emptyOccEnv, emptyNameSet) +emptyLocalRdrEnv = LRE { lre_env = emptyOccEnv, lre_in_scope = emptyNameSet } extendLocalRdrEnv :: LocalRdrEnv -> Name -> LocalRdrEnv -- The Name should be a non-top-level thing -extendLocalRdrEnv (env, ns) name +extendLocalRdrEnv (LRE { lre_env = env, lre_in_scope = ns }) name = WARN( isExternalName name, ppr name ) - ( extendOccEnv env (nameOccName name) name - , addOneToNameSet ns name - ) + LRE { lre_env = extendOccEnv env (nameOccName name) name + , lre_in_scope = addOneToNameSet ns name } extendLocalRdrEnvList :: LocalRdrEnv -> [Name] -> LocalRdrEnv -extendLocalRdrEnvList (env, ns) names +extendLocalRdrEnvList (LRE { lre_env = env, lre_in_scope = ns }) names = WARN( any isExternalName names, ppr names ) - ( extendOccEnvList env [(nameOccName n, n) | n <- names] - , addListToNameSet ns names - ) + LRE { lre_env = extendOccEnvList env [(nameOccName n, n) | n <- names] + , lre_in_scope = addListToNameSet ns names } lookupLocalRdrEnv :: LocalRdrEnv -> RdrName -> Maybe Name -lookupLocalRdrEnv (env, _) (Unqual occ) = lookupOccEnv env occ -lookupLocalRdrEnv _ _ = Nothing +lookupLocalRdrEnv (LRE { lre_env = env }) (Unqual occ) = lookupOccEnv env occ +lookupLocalRdrEnv _ _ = Nothing lookupLocalRdrOcc :: LocalRdrEnv -> OccName -> Maybe Name -lookupLocalRdrOcc (env, _) occ = lookupOccEnv env occ +lookupLocalRdrOcc (LRE { lre_env = env }) occ = lookupOccEnv env occ elemLocalRdrEnv :: RdrName -> LocalRdrEnv -> Bool -elemLocalRdrEnv rdr_name (env, _) - | isUnqual rdr_name = rdrNameOcc rdr_name `elemOccEnv` env - | otherwise = False +elemLocalRdrEnv rdr_name (LRE { lre_env = env, lre_in_scope = ns }) + = case rdr_name of + Unqual occ -> occ `elemOccEnv` env + Exact name -> name `elemNameSet` ns -- See Note [Local bindings with Exact Names] + Qual {} -> False + Orig {} -> False localRdrEnvElts :: LocalRdrEnv -> [Name] -localRdrEnvElts (env, _) = occEnvElts env +localRdrEnvElts (LRE { lre_env = env }) = occEnvElts env inLocalRdrEnvScope :: Name -> LocalRdrEnv -> Bool -- This is the point of the NameSet -inLocalRdrEnvScope name (_, ns) = name `elemNameSet` ns +inLocalRdrEnvScope name (LRE { lre_in_scope = ns }) = name `elemNameSet` ns delLocalRdrEnvList :: LocalRdrEnv -> [OccName] -> LocalRdrEnv -delLocalRdrEnvList (env, ns) occs = (delListFromOccEnv env occs, ns) +delLocalRdrEnvList (LRE { lre_env = env, lre_in_scope = ns }) occs + = LRE { lre_env = delListFromOccEnv env occs + , lre_in_scope = ns } \end{code} +Note [Local bindings with Exact Names] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +With Template Haskell we can make local bindings that have Exact Names. +Computing shadowing etc may use elemLocalRdrEnv (at least it certainly +does so in RnTpes.bindHsTyVars), so for an Exact Name we must consult +the in-scope-name-set. + + %************************************************************************ %* * GlobalRdrEnv diff --git a/compiler/basicTypes/SrcLoc.lhs b/compiler/basicTypes/SrcLoc.lhs index d53ac2b0ea..ab58a4f9f5 100644 --- a/compiler/basicTypes/SrcLoc.lhs +++ b/compiler/basicTypes/SrcLoc.lhs @@ -3,6 +3,7 @@ % \begin{code} +{-# LANGUAGE DeriveDataTypeable #-} {-# OPTIONS_GHC -fno-omit-interface-pragmas #-} -- Workaround for Trac #5252 crashes the bootstrap compiler without -O -- When the earliest compiler we want to boostrap with is diff --git a/compiler/basicTypes/UniqSupply.lhs b/compiler/basicTypes/UniqSupply.lhs index fea1489efb..6ceee20793 100644 --- a/compiler/basicTypes/UniqSupply.lhs +++ b/compiler/basicTypes/UniqSupply.lhs @@ -4,6 +4,8 @@ % \begin{code} +{-# LANGUAGE UnboxedTuples #-} + module UniqSupply ( -- * Main data type UniqSupply, -- Abstractly diff --git a/compiler/basicTypes/Unique.lhs b/compiler/basicTypes/Unique.lhs index 037aed0641..897b093e39 100644 --- a/compiler/basicTypes/Unique.lhs +++ b/compiler/basicTypes/Unique.lhs @@ -16,7 +16,7 @@ Some of the other hair in this code is to be able to use a Haskell). \begin{code} -{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE CPP, BangPatterns, MagicHash #-} module Unique ( -- * Main data types diff --git a/compiler/basicTypes/Var.lhs b/compiler/basicTypes/Var.lhs index 70c5d4491a..1f20d4adec 100644 --- a/compiler/basicTypes/Var.lhs +++ b/compiler/basicTypes/Var.lhs @@ -5,7 +5,8 @@ \section{@Vars@: Variables} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, DeriveDataTypeable #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/basicTypes/VarSet.lhs b/compiler/basicTypes/VarSet.lhs index b756283b91..8b7f755dcd 100644 --- a/compiler/basicTypes/VarSet.lhs +++ b/compiler/basicTypes/VarSet.lhs @@ -4,7 +4,8 @@ % \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/cmm/Bitmap.hs b/compiler/cmm/Bitmap.hs index 54db1a9a67..e7aa072063 100644 --- a/compiler/cmm/Bitmap.hs +++ b/compiler/cmm/Bitmap.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- -- (c) The University of Glasgow 2003-2006 -- diff --git a/compiler/cmm/BlockId.hs b/compiler/cmm/BlockId.hs index 8a46aed8f0..e4cc0bccb7 100644 --- a/compiler/cmm/BlockId.hs +++ b/compiler/cmm/BlockId.hs @@ -1,5 +1,7 @@ -{- BlockId module should probably go away completely, being superseded by Label -} +{-# LANGUAGE TypeSynonymInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} + +{- BlockId module should probably go away completely, being superseded by Label -} module BlockId ( BlockId, mkBlockId -- ToDo: BlockId should be abstract, but it isn't yet , BlockSet, BlockEnv diff --git a/compiler/cmm/CLabel.hs b/compiler/cmm/CLabel.hs index 407002f1c7..9dccd29135 100644 --- a/compiler/cmm/CLabel.hs +++ b/compiler/cmm/CLabel.hs @@ -888,6 +888,8 @@ labelDynamic dflags this_pkg this_mod lbl = PlainModuleInitLabel m -> not (gopt Opt_Static dflags) && this_pkg /= (modulePackageId m) + HpcTicksLabel m -> not (gopt Opt_Static dflags) && this_pkg /= (modulePackageId m) + -- Note that DynamicLinkerLabels do NOT require dynamic linking themselves. _ -> False where os = platformOS (targetPlatform dflags) diff --git a/compiler/cmm/Cmm.hs b/compiler/cmm/Cmm.hs index fadce0b5eb..e21efc13af 100644 --- a/compiler/cmm/Cmm.hs +++ b/compiler/cmm/Cmm.hs @@ -1,5 +1,5 @@ -- Cmm representations using Hoopl's Graph CmmNode e x. -{-# LANGUAGE GADTs #-} +{-# LANGUAGE CPP, GADTs #-} module Cmm ( -- * Cmm top-level datatypes diff --git a/compiler/cmm/CmmBuildInfoTables.hs b/compiler/cmm/CmmBuildInfoTables.hs index 16ace5245f..e10716a2ac 100644 --- a/compiler/cmm/CmmBuildInfoTables.hs +++ b/compiler/cmm/CmmBuildInfoTables.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE GADTs #-} +{-# LANGUAGE CPP, GADTs #-} -- See Note [Deprecations in Hoopl] in Hoopl module {-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} diff --git a/compiler/cmm/CmmCallConv.hs b/compiler/cmm/CmmCallConv.hs index 60e2c8c8f7..f36fc0bae5 100644 --- a/compiler/cmm/CmmCallConv.hs +++ b/compiler/cmm/CmmCallConv.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CmmCallConv ( ParamLocation(..), diff --git a/compiler/cmm/CmmExpr.hs b/compiler/cmm/CmmExpr.hs index 0c0c9714ec..1d6c97f41e 100644 --- a/compiler/cmm/CmmExpr.hs +++ b/compiler/cmm/CmmExpr.hs @@ -1,4 +1,7 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE UndecidableInstances #-} module CmmExpr diff --git a/compiler/cmm/CmmInfo.hs b/compiler/cmm/CmmInfo.hs index 42c9e6ba53..aae3ea1c71 100644 --- a/compiler/cmm/CmmInfo.hs +++ b/compiler/cmm/CmmInfo.hs @@ -1,4 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/cmm/CmmLayoutStack.hs b/compiler/cmm/CmmLayoutStack.hs index bdc947829d..db22deb639 100644 --- a/compiler/cmm/CmmLayoutStack.hs +++ b/compiler/cmm/CmmLayoutStack.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE RecordWildCards, GADTs #-} +{-# LANGUAGE CPP, RecordWildCards, GADTs #-} module CmmLayoutStack ( cmmLayoutStack, setInfoTableStackMap ) where diff --git a/compiler/cmm/CmmLive.hs b/compiler/cmm/CmmLive.hs index 24202cbe8c..dfacd139b6 100644 --- a/compiler/cmm/CmmLive.hs +++ b/compiler/cmm/CmmLive.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE BangPatterns #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE ScopedTypeVariables #-} diff --git a/compiler/cmm/CmmMachOp.hs b/compiler/cmm/CmmMachOp.hs index 684a4b9729..d8ce492de1 100644 --- a/compiler/cmm/CmmMachOp.hs +++ b/compiler/cmm/CmmMachOp.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CmmMachOp ( MachOp(..) @@ -18,6 +19,9 @@ module CmmMachOp -- CallishMachOp , CallishMachOp(..), callishMachOpHints , pprCallishMachOp + + -- Atomic read-modify-write + , AtomicMachOp(..) ) where @@ -546,8 +550,24 @@ data CallishMachOp | MO_PopCnt Width | MO_BSwap Width + + -- Atomic read-modify-write. + | MO_AtomicRMW Width AtomicMachOp + | MO_AtomicRead Width + | MO_AtomicWrite Width + | MO_Cmpxchg Width deriving (Eq, Show) +-- | The operation to perform atomically. +data AtomicMachOp = + AMO_Add + | AMO_Sub + | AMO_And + | AMO_Nand + | AMO_Or + | AMO_Xor + deriving (Eq, Show) + pprCallishMachOp :: CallishMachOp -> SDoc pprCallishMachOp mo = text (show mo) diff --git a/compiler/cmm/CmmNode.hs b/compiler/cmm/CmmNode.hs index 5c520d3899..7eb2b61d9a 100644 --- a/compiler/cmm/CmmNode.hs +++ b/compiler/cmm/CmmNode.hs @@ -1,9 +1,14 @@ --- CmmNode type for representation using Hoopl graphs. +{-# LANGUAGE CPP #-} +{-# LANGUAGE ExplicitForAll #-} {-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} +{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE UndecidableInstances #-} +-- CmmNode type for representation using Hoopl graphs. + module CmmNode ( CmmNode(..), CmmFormal, CmmActual, UpdFrameOffset, Convention(..), diff --git a/compiler/cmm/CmmOpt.hs b/compiler/cmm/CmmOpt.hs index 54dbbebae6..84499b97de 100644 --- a/compiler/cmm/CmmOpt.hs +++ b/compiler/cmm/CmmOpt.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Cmm optimisation diff --git a/compiler/cmm/CmmPipeline.hs b/compiler/cmm/CmmPipeline.hs index 1447f6d8cd..4314695201 100644 --- a/compiler/cmm/CmmPipeline.hs +++ b/compiler/cmm/CmmPipeline.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE BangPatterns #-} + module CmmPipeline ( -- | Converts C-- with an implicit stack and native C-- calls into -- optimized, CPS converted and native-call-less C--. The latter @@ -36,8 +38,6 @@ cmmPipeline :: HscEnv -- Compilation env including cmmPipeline hsc_env topSRT prog = do let dflags = hsc_dflags hsc_env - showPass dflags "CPSZ" - tops <- {-# SCC "tops" #-} mapM (cpsTop hsc_env) prog (topSRT, cmms) <- {-# SCC "doSRTs" #-} doSRTs dflags topSRT tops diff --git a/compiler/cmm/CmmSink.hs b/compiler/cmm/CmmSink.hs index 187f4c47df..4dced9afd2 100644 --- a/compiler/cmm/CmmSink.hs +++ b/compiler/cmm/CmmSink.hs @@ -9,6 +9,7 @@ import BlockId import CmmLive import CmmUtils import Hoopl +import CodeGen.Platform import DynFlags import UniqFM @@ -16,6 +17,7 @@ import PprCmm () import Data.List (partition) import qualified Data.Set as Set +import Data.Maybe -- ----------------------------------------------------------------------------- -- Sinking and inlining @@ -197,7 +199,7 @@ cmmSink dflags graph = ofBlockList (g_entry graph) $ sink mapEmpty $ blocks drop_if a@(r,rhs,_) live_sets = (should_drop, live_sets') where should_drop = conflicts dflags a final_last - || not (isTrivial rhs) && live_in_multi live_sets r + || not (isTrivial dflags rhs) && live_in_multi live_sets r || r `Set.member` live_in_joins live_sets' | should_drop = live_sets @@ -219,26 +221,24 @@ cmmSink dflags graph = ofBlockList (g_entry graph) $ sink mapEmpty $ blocks -- small: an expression we don't mind duplicating isSmall :: CmmExpr -> Bool -isSmall (CmmReg (CmmLocal _)) = True -- not globals, we want to coalesce them instead* See below +isSmall (CmmReg (CmmLocal _)) = True -- isSmall (CmmLit _) = True isSmall (CmmMachOp (MO_Add _) [x,y]) = isTrivial x && isTrivial y isSmall (CmmRegOff (CmmLocal _) _) = True isSmall _ = False - -Coalesce global registers? What does that mean? We observed no decrease -in performance comming from inlining of global registers, hence we do it now -(see isTrivial function). Ideally we'd like to measure performance using -some tool like perf or VTune and make decisions what to inline based on that. -} -- -- We allow duplication of trivial expressions: registers (both local and -- global) and literals. -- -isTrivial :: CmmExpr -> Bool -isTrivial (CmmReg _) = True -isTrivial (CmmLit _) = True -isTrivial _ = False +isTrivial :: DynFlags -> CmmExpr -> Bool +isTrivial _ (CmmReg (CmmLocal _)) = True +isTrivial dflags (CmmReg (CmmGlobal r)) = -- see Note [Inline GlobalRegs?] + isJust (globalRegMaybe (targetPlatform dflags) r) + -- GlobalRegs that are loads from BaseReg are not trivial +isTrivial _ (CmmLit _) = True +isTrivial _ _ = False -- -- annotate each node with the set of registers live *after* the node @@ -401,7 +401,7 @@ tryToInline dflags live node assigs = go usages node [] assigs | cannot_inline = dont_inline | occurs_none = discard -- Note [discard during inlining] | occurs_once = inline_and_discard - | isTrivial rhs = inline_and_keep + | isTrivial dflags rhs = inline_and_keep | otherwise = dont_inline where inline_and_discard = go usages' inl_node skipped rest @@ -650,6 +650,10 @@ data AbsMem -- perhaps we ought to have a special annotation for calls that can -- modify heap/stack memory. For now we just use the conservative -- definition here. +-- +-- Some CallishMachOp imply a memory barrier e.g. AtomicRMW and +-- therefore we should never float any memory operations across one of +-- these calls. bothMems :: AbsMem -> AbsMem -> AbsMem @@ -695,3 +699,91 @@ regAddr _ (CmmGlobal Hp) _ _ = HeapMem regAddr _ (CmmGlobal CurrentTSO) _ _ = HeapMem -- important for PrimOps regAddr dflags r _ _ | isGcPtrType (cmmRegType dflags r) = HeapMem -- yay! GCPtr pays for itself regAddr _ _ _ _ = AnyMem + +{- +Note [Inline GlobalRegs?] + +Should we freely inline GlobalRegs? + +Actually it doesn't make a huge amount of difference either way, so we +*do* currently treat GlobalRegs as "trivial" and inline them +everywhere, but for what it's worth, here is what I discovered when I +(SimonM) looked into this: + +Common sense says we should not inline GlobalRegs, because when we +have + + x = R1 + +the register allocator will coalesce this assignment, generating no +code, and simply record the fact that x is bound to $rbx (or +whatever). Furthermore, if we were to sink this assignment, then the +range of code over which R1 is live increases, and the range of code +over which x is live decreases. All things being equal, it is better +for x to be live than R1, because R1 is a fixed register whereas x can +live in any register. So we should neither sink nor inline 'x = R1'. + +However, not inlining GlobalRegs can have surprising +consequences. e.g. (cgrun020) + + c3EN: + _s3DB::P64 = R1; + _c3ES::P64 = _s3DB::P64 & 7; + if (_c3ES::P64 >= 2) goto c3EU; else goto c3EV; + c3EU: + _s3DD::P64 = P64[_s3DB::P64 + 6]; + _s3DE::P64 = P64[_s3DB::P64 + 14]; + I64[Sp - 8] = c3F0; + R1 = _s3DE::P64; + P64[Sp] = _s3DD::P64; + +inlining the GlobalReg gives: + + c3EN: + if (R1 & 7 >= 2) goto c3EU; else goto c3EV; + c3EU: + I64[Sp - 8] = c3F0; + _s3DD::P64 = P64[R1 + 6]; + R1 = P64[R1 + 14]; + P64[Sp] = _s3DD::P64; + +but if we don't inline the GlobalReg, instead we get: + + _s3DB::P64 = R1; + if (_s3DB::P64 & 7 >= 2) goto c3EU; else goto c3EV; + c3EU: + I64[Sp - 8] = c3F0; + R1 = P64[_s3DB::P64 + 14]; + P64[Sp] = P64[_s3DB::P64 + 6]; + +This looks better - we managed to inline _s3DD - but in fact it +generates an extra reg-reg move: + +.Lc3EU: + movq $c3F0_info,-8(%rbp) + movq %rbx,%rax + movq 14(%rbx),%rbx + movq 6(%rax),%rax + movq %rax,(%rbp) + +because _s3DB is now live across the R1 assignment, we lost the +benefit of coalescing. + +Who is at fault here? Perhaps if we knew that _s3DB was an alias for +R1, then we would not sink a reference to _s3DB past the R1 +assignment. Or perhaps we *should* do that - we might gain by sinking +it, despite losing the coalescing opportunity. + +Sometimes not inlining global registers wins by virtue of the rule +about not inlining into arguments of a foreign call, e.g. (T7163) this +is what happens when we inlined F1: + + _s3L2::F32 = F1; + _c3O3::F32 = %MO_F_Mul_W32(F1, 10.0 :: W32); + (_s3L7::F32) = call "ccall" arg hints: [] result hints: [] rintFloat(_c3O3::F32); + +but if we don't inline F1: + + (_s3L7::F32) = call "ccall" arg hints: [] result hints: [] rintFloat(%MO_F_Mul_W32(_s3L2::F32, + 10.0 :: W32)); +-} diff --git a/compiler/cmm/CmmType.hs b/compiler/cmm/CmmType.hs index d03c2dc0b9..37d92c207d 100644 --- a/compiler/cmm/CmmType.hs +++ b/compiler/cmm/CmmType.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CmmType ( CmmType -- Abstract diff --git a/compiler/cmm/CmmUtils.hs b/compiler/cmm/CmmUtils.hs index afba245fbc..1f6d1ac0e3 100644 --- a/compiler/cmm/CmmUtils.hs +++ b/compiler/cmm/CmmUtils.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE GADTs #-} +{-# LANGUAGE CPP, GADTs, RankNTypes #-} ----------------------------------------------------------------------------- -- diff --git a/compiler/cmm/Hoopl.hs b/compiler/cmm/Hoopl.hs index 2d7139af9f..4b3717288f 100644 --- a/compiler/cmm/Hoopl.hs +++ b/compiler/cmm/Hoopl.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE RankNTypes, ScopedTypeVariables #-} + module Hoopl ( module Compiler.Hoopl, module Hoopl.Dataflow, diff --git a/compiler/cmm/Hoopl/Dataflow.hs b/compiler/cmm/Hoopl/Dataflow.hs index 78b930a20f..f5511515a9 100644 --- a/compiler/cmm/Hoopl/Dataflow.hs +++ b/compiler/cmm/Hoopl/Dataflow.hs @@ -1,3 +1,12 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE Trustworthy #-} +{-# LANGUAGE TypeFamilies #-} +{-# OPTIONS_GHC -fprof-auto-top #-} + -- -- Copyright (c) 2010, João Dias, Simon Marlow, Simon Peyton Jones, -- and Norman Ramsey @@ -9,10 +18,6 @@ -- specialised to the UniqSM monad. -- -{-# LANGUAGE RankNTypes, ScopedTypeVariables, GADTs, EmptyDataDecls, PatternGuards, TypeFamilies, MultiParamTypeClasses #-} -{-# OPTIONS_GHC -fprof-auto-top #-} -{-# LANGUAGE Trustworthy #-} - module Hoopl.Dataflow ( DataflowLattice(..), OldFact(..), NewFact(..), Fact, mkFactBase , ChangeFlag(..) diff --git a/compiler/cmm/MkGraph.hs b/compiler/cmm/MkGraph.hs index 6f9bbf8872..9bc2bd9ddc 100644 --- a/compiler/cmm/MkGraph.hs +++ b/compiler/cmm/MkGraph.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE GADTs #-} +{-# LANGUAGE BangPatterns, CPP, GADTs #-} module MkGraph ( CmmAGraph, CgStmt(..) diff --git a/compiler/cmm/PprC.hs b/compiler/cmm/PprC.hs index 23989811dd..455c79ba02 100644 --- a/compiler/cmm/PprC.hs +++ b/compiler/cmm/PprC.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, GADTs #-} + ----------------------------------------------------------------------------- -- -- Pretty-printing of Cmm as C, suitable for feeding gcc @@ -16,7 +18,6 @@ -- ----------------------------------------------------------------------------- -{-# LANGUAGE GADTs #-} module PprC ( writeCs, pprStringInCStyle @@ -752,6 +753,10 @@ pprCallishMachOp_for_C mop MO_Memmove -> ptext (sLit "memmove") (MO_BSwap w) -> ptext (sLit $ bSwapLabel w) (MO_PopCnt w) -> ptext (sLit $ popCntLabel w) + (MO_AtomicRMW w amop) -> ptext (sLit $ atomicRMWLabel w amop) + (MO_Cmpxchg w) -> ptext (sLit $ cmpxchgLabel w) + (MO_AtomicRead w) -> ptext (sLit $ atomicReadLabel w) + (MO_AtomicWrite w) -> ptext (sLit $ atomicWriteLabel w) (MO_UF_Conv w) -> ptext (sLit $ word2FloatLabel w) MO_S_QuotRem {} -> unsupported diff --git a/compiler/cmm/PprCmm.hs b/compiler/cmm/PprCmm.hs index 46257b4188..b5beb07ae9 100644 --- a/compiler/cmm/PprCmm.hs +++ b/compiler/cmm/PprCmm.hs @@ -1,3 +1,6 @@ +{-# LANGUAGE GADTs, TypeFamilies, FlexibleContexts, FlexibleInstances #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + ---------------------------------------------------------------------------- -- -- Pretty-printing of Cmm as (a superset of) C-- @@ -30,8 +33,6 @@ -- -- A useful example pass over Cmm is in nativeGen/MachCodeGen.hs -{-# OPTIONS_GHC -fno-warn-orphans #-} -{-# LANGUAGE GADTs, TypeFamilies, FlexibleContexts #-} module PprCmm ( module PprCmmDecl , module PprCmmExpr diff --git a/compiler/cmm/PprCmmDecl.hs b/compiler/cmm/PprCmmDecl.hs index 354a3d4563..dd80f5cd56 100644 --- a/compiler/cmm/PprCmmDecl.hs +++ b/compiler/cmm/PprCmmDecl.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ---------------------------------------------------------------------------- -- -- Pretty-printing of common Cmm types diff --git a/compiler/cmm/SMRep.lhs b/compiler/cmm/SMRep.lhs index 704c22db6a..b23bcc11ce 100644 --- a/compiler/cmm/SMRep.lhs +++ b/compiler/cmm/SMRep.lhs @@ -6,7 +6,7 @@ Storage manager representation of closures \begin{code} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE CPP,GeneralizedNewtypeDeriving #-} module SMRep ( -- * Words and bytes diff --git a/compiler/codeGen/CgUtils.hs b/compiler/codeGen/CgUtils.hs index 6b36ab09cd..51b8ed9ec8 100644 --- a/compiler/codeGen/CgUtils.hs +++ b/compiler/codeGen/CgUtils.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, GADTs #-} + ----------------------------------------------------------------------------- -- -- Code generator utilities; mostly monadic @@ -6,7 +8,6 @@ -- ----------------------------------------------------------------------------- -{-# LANGUAGE GADTs #-} module CgUtils ( fixStgRegisters ) where #include "HsVersions.h" diff --git a/compiler/codeGen/CodeGen/Platform/ARM.hs b/compiler/codeGen/CodeGen/Platform/ARM.hs index 727a43561f..5d1148496c 100644 --- a/compiler/codeGen/CodeGen/Platform/ARM.hs +++ b/compiler/codeGen/CodeGen/Platform/ARM.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CodeGen.Platform.ARM where diff --git a/compiler/codeGen/CodeGen/Platform/NoRegs.hs b/compiler/codeGen/CodeGen/Platform/NoRegs.hs index c4c63b7572..0c85ffbda7 100644 --- a/compiler/codeGen/CodeGen/Platform/NoRegs.hs +++ b/compiler/codeGen/CodeGen/Platform/NoRegs.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CodeGen.Platform.NoRegs where diff --git a/compiler/codeGen/CodeGen/Platform/PPC.hs b/compiler/codeGen/CodeGen/Platform/PPC.hs index bcbdfe244b..76a2b020ac 100644 --- a/compiler/codeGen/CodeGen/Platform/PPC.hs +++ b/compiler/codeGen/CodeGen/Platform/PPC.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CodeGen.Platform.PPC where diff --git a/compiler/codeGen/CodeGen/Platform/PPC_Darwin.hs b/compiler/codeGen/CodeGen/Platform/PPC_Darwin.hs index 42bf22f26c..a98e558cc1 100644 --- a/compiler/codeGen/CodeGen/Platform/PPC_Darwin.hs +++ b/compiler/codeGen/CodeGen/Platform/PPC_Darwin.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CodeGen.Platform.PPC_Darwin where diff --git a/compiler/codeGen/CodeGen/Platform/SPARC.hs b/compiler/codeGen/CodeGen/Platform/SPARC.hs index b49af14409..991f515eaf 100644 --- a/compiler/codeGen/CodeGen/Platform/SPARC.hs +++ b/compiler/codeGen/CodeGen/Platform/SPARC.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CodeGen.Platform.SPARC where diff --git a/compiler/codeGen/CodeGen/Platform/X86.hs b/compiler/codeGen/CodeGen/Platform/X86.hs index 6dd74df130..e74807ff88 100644 --- a/compiler/codeGen/CodeGen/Platform/X86.hs +++ b/compiler/codeGen/CodeGen/Platform/X86.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CodeGen.Platform.X86 where diff --git a/compiler/codeGen/CodeGen/Platform/X86_64.hs b/compiler/codeGen/CodeGen/Platform/X86_64.hs index 190d642ea6..102132d679 100644 --- a/compiler/codeGen/CodeGen/Platform/X86_64.hs +++ b/compiler/codeGen/CodeGen/Platform/X86_64.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module CodeGen.Platform.X86_64 where diff --git a/compiler/codeGen/StgCmm.hs b/compiler/codeGen/StgCmm.hs index a92f80439b..efc89fe04a 100644 --- a/compiler/codeGen/StgCmm.hs +++ b/compiler/codeGen/StgCmm.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Stg to C-- code generation @@ -37,7 +39,6 @@ import DataCon import Name import TyCon import Module -import ErrUtils import Outputable import Stream import BasicTypes @@ -60,9 +61,7 @@ codeGen :: DynFlags codeGen dflags this_mod data_tycons cost_centre_info stg_binds hpc_info - = do { liftIO $ showPass dflags "New CodeGen" - - -- cg: run the code generator, and yield the resulting CmmGroup + = do { -- cg: run the code generator, and yield the resulting CmmGroup -- Using an IORef to store the state is a bit crude, but otherwise -- we would need to add a state monad layer. ; cgref <- liftIO $ newIORef =<< initC diff --git a/compiler/codeGen/StgCmmBind.hs b/compiler/codeGen/StgCmmBind.hs index 06e17164dd..4631b2dc14 100644 --- a/compiler/codeGen/StgCmmBind.hs +++ b/compiler/codeGen/StgCmmBind.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Stg to C-- code generation: bindings diff --git a/compiler/codeGen/StgCmmClosure.hs b/compiler/codeGen/StgCmmClosure.hs index c9302f21a1..b65d56bae2 100644 --- a/compiler/codeGen/StgCmmClosure.hs +++ b/compiler/codeGen/StgCmmClosure.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, RecordWildCards #-} + ----------------------------------------------------------------------------- -- -- Stg to C-- code generation: @@ -9,8 +11,6 @@ -- ----------------------------------------------------------------------------- -{-# LANGUAGE RecordWildCards #-} - module StgCmmClosure ( DynTag, tagForCon, isSmallFamily, ConTagZ, dataConTagZ, diff --git a/compiler/codeGen/StgCmmCon.hs b/compiler/codeGen/StgCmmCon.hs index a02a5da616..1a69927b5c 100644 --- a/compiler/codeGen/StgCmmCon.hs +++ b/compiler/codeGen/StgCmmCon.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Stg to C--: code generation for constructors diff --git a/compiler/codeGen/StgCmmEnv.hs b/compiler/codeGen/StgCmmEnv.hs index 2b8677c408..4127b67401 100644 --- a/compiler/codeGen/StgCmmEnv.hs +++ b/compiler/codeGen/StgCmmEnv.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Stg to C-- code generation: the binding environment diff --git a/compiler/codeGen/StgCmmExpr.hs b/compiler/codeGen/StgCmmExpr.hs index 9b9d6397c4..ad34b5ba19 100644 --- a/compiler/codeGen/StgCmmExpr.hs +++ b/compiler/codeGen/StgCmmExpr.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Stg to C-- code generation: expressions diff --git a/compiler/codeGen/StgCmmForeign.hs b/compiler/codeGen/StgCmmForeign.hs index bf88f1ccb3..6937c85d01 100644 --- a/compiler/codeGen/StgCmmForeign.hs +++ b/compiler/codeGen/StgCmmForeign.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Code generation for foreign calls. diff --git a/compiler/codeGen/StgCmmHeap.hs b/compiler/codeGen/StgCmmHeap.hs index a3a47a65e7..d00dc6ec84 100644 --- a/compiler/codeGen/StgCmmHeap.hs +++ b/compiler/codeGen/StgCmmHeap.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Stg to C--: heap management functions diff --git a/compiler/codeGen/StgCmmLayout.hs b/compiler/codeGen/StgCmmLayout.hs index a56248dcb9..99e926c987 100644 --- a/compiler/codeGen/StgCmmLayout.hs +++ b/compiler/codeGen/StgCmmLayout.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Building info tables. diff --git a/compiler/codeGen/StgCmmMonad.hs b/compiler/codeGen/StgCmmMonad.hs index 348b7b9299..cad261bcfb 100644 --- a/compiler/codeGen/StgCmmMonad.hs +++ b/compiler/codeGen/StgCmmMonad.hs @@ -1,4 +1,5 @@ -{-# LANGUAGE GADTs #-} +{-# LANGUAGE CPP, GADTs, UnboxedTuples #-} + ----------------------------------------------------------------------------- -- -- Monad for Stg to C-- code generation diff --git a/compiler/codeGen/StgCmmPrim.hs b/compiler/codeGen/StgCmmPrim.hs index 5c75acba5a..e4c682bf02 100644 --- a/compiler/codeGen/StgCmmPrim.hs +++ b/compiler/codeGen/StgCmmPrim.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ---------------------------------------------------------------------------- -- -- Stg to C--: primitive operations @@ -767,6 +769,25 @@ emitPrimOp _ res PrefetchByteArrayOp0 args = doPrefetchByteArrayOp 0 res emitPrimOp _ res PrefetchMutableByteArrayOp0 args = doPrefetchByteArrayOp 0 res args emitPrimOp _ res PrefetchAddrOp0 args = doPrefetchAddrOp 0 res args +-- Atomic read-modify-write +emitPrimOp dflags [res] FetchAddByteArrayOp_Int [mba, ix, n] = + doAtomicRMW res AMO_Add mba ix (bWord dflags) n +emitPrimOp dflags [res] FetchSubByteArrayOp_Int [mba, ix, n] = + doAtomicRMW res AMO_Sub mba ix (bWord dflags) n +emitPrimOp dflags [res] FetchAndByteArrayOp_Int [mba, ix, n] = + doAtomicRMW res AMO_And mba ix (bWord dflags) n +emitPrimOp dflags [res] FetchNandByteArrayOp_Int [mba, ix, n] = + doAtomicRMW res AMO_Nand mba ix (bWord dflags) n +emitPrimOp dflags [res] FetchOrByteArrayOp_Int [mba, ix, n] = + doAtomicRMW res AMO_Or mba ix (bWord dflags) n +emitPrimOp dflags [res] FetchXorByteArrayOp_Int [mba, ix, n] = + doAtomicRMW res AMO_Xor mba ix (bWord dflags) n +emitPrimOp dflags [res] AtomicReadByteArrayOp_Int [mba, ix] = + doAtomicReadByteArray res mba ix (bWord dflags) +emitPrimOp dflags [] AtomicWriteByteArrayOp_Int [mba, ix, val] = + doAtomicWriteByteArray mba ix (bWord dflags) val +emitPrimOp dflags [res] CasByteArrayOp_Int [mba, ix, old, new] = + doCasByteArray res mba ix (bWord dflags) old new -- The rest just translate straightforwardly emitPrimOp dflags [res] op [arg] @@ -1931,6 +1952,81 @@ doWriteSmallPtrArrayOp addr idx val = do emit (setInfo addr (CmmLit (CmmLabel mkSMAP_DIRTY_infoLabel))) ------------------------------------------------------------------------------ +-- Atomic read-modify-write + +-- | Emit an atomic modification to a byte array element. The result +-- reg contains that previous value of the element. Implies a full +-- memory barrier. +doAtomicRMW :: LocalReg -- ^ Result reg + -> AtomicMachOp -- ^ Atomic op (e.g. add) + -> CmmExpr -- ^ MutableByteArray# + -> CmmExpr -- ^ Index + -> CmmType -- ^ Type of element by which we are indexing + -> CmmExpr -- ^ Op argument (e.g. amount to add) + -> FCode () +doAtomicRMW res amop mba idx idx_ty n = do + dflags <- getDynFlags + let width = typeWidth idx_ty + addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags) + width mba idx + emitPrimCall + [ res ] + (MO_AtomicRMW width amop) + [ addr, n ] + +-- | Emit an atomic read to a byte array that acts as a memory barrier. +doAtomicReadByteArray + :: LocalReg -- ^ Result reg + -> CmmExpr -- ^ MutableByteArray# + -> CmmExpr -- ^ Index + -> CmmType -- ^ Type of element by which we are indexing + -> FCode () +doAtomicReadByteArray res mba idx idx_ty = do + dflags <- getDynFlags + let width = typeWidth idx_ty + addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags) + width mba idx + emitPrimCall + [ res ] + (MO_AtomicRead width) + [ addr ] + +-- | Emit an atomic write to a byte array that acts as a memory barrier. +doAtomicWriteByteArray + :: CmmExpr -- ^ MutableByteArray# + -> CmmExpr -- ^ Index + -> CmmType -- ^ Type of element by which we are indexing + -> CmmExpr -- ^ Value to write + -> FCode () +doAtomicWriteByteArray mba idx idx_ty val = do + dflags <- getDynFlags + let width = typeWidth idx_ty + addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags) + width mba idx + emitPrimCall + [ {- no results -} ] + (MO_AtomicWrite width) + [ addr, val ] + +doCasByteArray + :: LocalReg -- ^ Result reg + -> CmmExpr -- ^ MutableByteArray# + -> CmmExpr -- ^ Index + -> CmmType -- ^ Type of element by which we are indexing + -> CmmExpr -- ^ Old value + -> CmmExpr -- ^ New value + -> FCode () +doCasByteArray res mba idx idx_ty old new = do + dflags <- getDynFlags + let width = (typeWidth idx_ty) + addr = cmmIndexOffExpr dflags (arrWordsHdrSize dflags) + width mba idx + emitPrimCall + [ res ] + (MO_Cmpxchg width) + [ addr, old, new ] + +------------------------------------------------------------------------------ -- Helpers for emitting function calls -- | Emit a call to @memcpy@. diff --git a/compiler/codeGen/StgCmmProf.hs b/compiler/codeGen/StgCmmProf.hs index f858c5a0b6..1aa08a1e58 100644 --- a/compiler/codeGen/StgCmmProf.hs +++ b/compiler/codeGen/StgCmmProf.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Code generation for profiling diff --git a/compiler/codeGen/StgCmmTicky.hs b/compiler/codeGen/StgCmmTicky.hs index b1218201a6..6913c9ec15 100644 --- a/compiler/codeGen/StgCmmTicky.hs +++ b/compiler/codeGen/StgCmmTicky.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE BangPatterns, CPP #-} + ----------------------------------------------------------------------------- -- -- Code generation for ticky-ticky profiling diff --git a/compiler/codeGen/StgCmmUtils.hs b/compiler/codeGen/StgCmmUtils.hs index 1c6c3f2eae..bc1a15fe3c 100644 --- a/compiler/codeGen/StgCmmUtils.hs +++ b/compiler/codeGen/StgCmmUtils.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Code generator utilities; mostly monadic diff --git a/compiler/coreSyn/CoreArity.lhs b/compiler/coreSyn/CoreArity.lhs index ca7216fe29..26669b6d32 100644 --- a/compiler/coreSyn/CoreArity.lhs +++ b/compiler/coreSyn/CoreArity.lhs @@ -6,7 +6,8 @@ Arity and eta expansion \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/coreSyn/CoreFVs.lhs b/compiler/coreSyn/CoreFVs.lhs index 636c049c42..4011191d75 100644 --- a/compiler/coreSyn/CoreFVs.lhs +++ b/compiler/coreSyn/CoreFVs.lhs @@ -5,6 +5,8 @@ Taken quite directly from the Peyton Jones/Lester paper. \begin{code} +{-# LANGUAGE CPP #-} + -- | A module concerned with finding the free variables of an expression. module CoreFVs ( -- * Free variables of expressions and binding groups diff --git a/compiler/coreSyn/CoreLint.lhs b/compiler/coreSyn/CoreLint.lhs index b5c79855f2..a5868108d9 100644 --- a/compiler/coreSyn/CoreLint.lhs +++ b/compiler/coreSyn/CoreLint.lhs @@ -7,12 +7,7 @@ A ``lint'' pass to check for Core correctness \begin{code} --- The above warning supression flag is a temporary kludge. --- While working on this module you are encouraged to remove it and --- detab the module (please do the detabbing in a separate patch). See --- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces --- for details - +{-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fprof-auto #-} module CoreLint ( lintCoreBindings, lintUnfolding, lintExpr ) where @@ -856,6 +851,9 @@ lintCoercion co@(TyConAppCo r tc cos) ; checkRole co2 r r2 ; return (rk, mkFunTy s1 s2, mkFunTy t1 t2, r) } + | Just {} <- synTyConDefn_maybe tc + = failWithL (ptext (sLit "Synonym in TyConAppCo:") <+> ppr co) + | otherwise = do { (ks,ss,ts,rs) <- mapAndUnzip4M lintCoercion cos ; rk <- lint_co_app co (tyConKind tc) (ss `zip` ks) diff --git a/compiler/coreSyn/CorePrep.lhs b/compiler/coreSyn/CorePrep.lhs index 5e0cd6599d..c754aae4e7 100644 --- a/compiler/coreSyn/CorePrep.lhs +++ b/compiler/coreSyn/CorePrep.lhs @@ -5,7 +5,7 @@ Core pass to saturate constructors and PrimOps \begin{code} -{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE BangPatterns, CPP #-} module CorePrep ( corePrepPgm, corePrepExpr, cvtLitInteger, @@ -196,6 +196,7 @@ corePrepTopBinds initialCorePrepEnv binds mkDataConWorkers :: [TyCon] -> [CoreBind] -- See Note [Data constructor workers] +-- c.f. Note [Injecting implicit bindings] in TidyPgm mkDataConWorkers data_tycons = [ NonRec id (Var id) -- The ice is thin here, but it works | tycon <- data_tycons, -- CorePrep will eta-expand it diff --git a/compiler/coreSyn/CoreSubst.lhs b/compiler/coreSyn/CoreSubst.lhs index ef601a2a09..f3215094df 100644 --- a/compiler/coreSyn/CoreSubst.lhs +++ b/compiler/coreSyn/CoreSubst.lhs @@ -6,7 +6,8 @@ Utility functions on @Core@ syntax \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/coreSyn/CoreSyn.lhs b/compiler/coreSyn/CoreSyn.lhs index defd669a78..b36cb6d8a6 100644 --- a/compiler/coreSyn/CoreSyn.lhs +++ b/compiler/coreSyn/CoreSyn.lhs @@ -4,9 +4,8 @@ % \begin{code} -{-# LANGUAGE DeriveDataTypeable, DeriveFunctor #-} - -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, DeriveDataTypeable, DeriveFunctor #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/coreSyn/CoreTidy.lhs b/compiler/coreSyn/CoreTidy.lhs index 8c0ae4a65a..4754aa5afb 100644 --- a/compiler/coreSyn/CoreTidy.lhs +++ b/compiler/coreSyn/CoreTidy.lhs @@ -7,7 +7,8 @@ This module contains "tidying" code for *nested* expressions, bindings, rules. The code for *top-level* bindings is in TidyPgm. \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -33,7 +34,6 @@ import Name hiding (tidyNameOcc) import SrcLoc import Maybes import Data.List -import Outputable \end{code} @@ -141,18 +141,48 @@ tidyBndr env var tidyBndrs :: TidyEnv -> [Var] -> (TidyEnv, [Var]) tidyBndrs env vars = mapAccumL tidyBndr env vars +-- Non-top-level variables +tidyIdBndr :: TidyEnv -> Id -> (TidyEnv, Id) +tidyIdBndr env@(tidy_env, var_env) id + = -- Do this pattern match strictly, otherwise we end up holding on to + -- stuff in the OccName. + case tidyOccName tidy_env (getOccName id) of { (tidy_env', occ') -> + let + -- Give the Id a fresh print-name, *and* rename its type + -- The SrcLoc isn't important now, + -- though we could extract it from the Id + -- + ty' = tidyType env (idType id) + name' = mkInternalName (idUnique id) occ' noSrcSpan + id' = mkLocalIdWithInfo name' ty' new_info + var_env' = extendVarEnv var_env id id' + + -- Note [Tidy IdInfo] + new_info = vanillaIdInfo `setOccInfo` occInfo old_info + `setUnfoldingInfo` new_unf + old_info = idInfo id + old_unf = unfoldingInfo old_info + new_unf | isEvaldUnfolding old_unf = evaldUnfolding + | otherwise = noUnfolding + -- See Note [Preserve evaluatedness] + in + ((tidy_env', var_env'), id') + } + tidyLetBndr :: TidyEnv -- Knot-tied version for unfoldings -> TidyEnv -- The one to extend -> (Id, CoreExpr) -> (TidyEnv, Var) -- Used for local (non-top-level) let(rec)s -tidyLetBndr rec_tidy_env env (id,rhs) - = ((tidy_occ_env,new_var_env), final_id) - where - ((tidy_occ_env,var_env), new_id) = tidyIdBndr env id - new_var_env = extendVarEnv var_env id final_id - -- Override the env we get back from tidyId with the - -- new IdInfo so it gets propagated to the usage sites. +-- Just like tidyIdBndr above, but with more IdInfo +tidyLetBndr rec_tidy_env env@(tidy_env, var_env) (id,rhs) + = case tidyOccName tidy_env (getOccName id) of { (tidy_env', occ') -> + let + ty' = tidyType env (idType id) + name' = mkInternalName (idUnique id) occ' noSrcSpan + id' = mkLocalIdWithInfo name' ty' new_info + var_env' = extendVarEnv var_env id id' + -- Note [Tidy IdInfo] -- We need to keep around any interesting strictness and -- demand info because later on we may need to use it when -- converting to A-normal form. @@ -161,48 +191,27 @@ tidyLetBndr rec_tidy_env env (id,rhs) -- into case (g x) of z -> f z by CorePrep, but only if f still -- has its strictness info. -- - -- Similarly for the demand info - on a let binder, this tells + -- Similarly for the demand info - on a let binder, this tells -- CorePrep to turn the let into a case. -- -- Similarly arity info for eta expansion in CorePrep - -- - -- Set inline-prag info so that we preseve it across + -- + -- Set inline-prag info so that we preseve it across -- separate compilation boundaries - final_id = new_id `setIdInfo` new_info - idinfo = idInfo id - new_info = idInfo new_id - `setArityInfo` exprArity rhs - `setStrictnessInfo` strictnessInfo idinfo - `setDemandInfo` demandInfo idinfo - `setInlinePragInfo` inlinePragInfo idinfo - `setUnfoldingInfo` new_unf - - new_unf | isStableUnfolding unf = tidyUnfolding rec_tidy_env unf (panic "tidy_unf") - | otherwise = noUnfolding - unf = unfoldingInfo idinfo - --- Non-top-level variables -tidyIdBndr :: TidyEnv -> Id -> (TidyEnv, Id) -tidyIdBndr env@(tidy_env, var_env) id - = -- Do this pattern match strictly, otherwise we end up holding on to - -- stuff in the OccName. - case tidyOccName tidy_env (getOccName id) of { (tidy_env', occ') -> - let - -- Give the Id a fresh print-name, *and* rename its type - -- The SrcLoc isn't important now, - -- though we could extract it from the Id - -- - ty' = tidyType env (idType id) - name' = mkInternalName (idUnique id) occ' noSrcSpan - id' = mkLocalIdWithInfo name' ty' new_info - var_env' = extendVarEnv var_env id id' - - -- Note [Tidy IdInfo] - new_info = vanillaIdInfo `setOccInfo` occInfo old_info old_info = idInfo id + new_info = vanillaIdInfo + `setOccInfo` occInfo old_info + `setArityInfo` exprArity rhs + `setStrictnessInfo` strictnessInfo old_info + `setDemandInfo` demandInfo old_info + `setInlinePragInfo` inlinePragInfo old_info + `setUnfoldingInfo` new_unf + + new_unf | isStableUnfolding old_unf = tidyUnfolding rec_tidy_env old_unf old_unf + | otherwise = noUnfolding + old_unf = unfoldingInfo old_info in - ((tidy_env', var_env'), id') - } + ((tidy_env', var_env'), id') } ------------ Unfolding -------------- tidyUnfolding :: TidyEnv -> Unfolding -> Unfolding -> Unfolding @@ -234,9 +243,26 @@ two reasons: the benefit of that occurrence analysis when we use the rule or or inline the function. In particular, it's vital not to lose loop-breaker info, else we get an infinite inlining loop - + Note that tidyLetBndr puts more IdInfo back. +Note [Preserve evaluatedness] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Consider + data T = MkT !Bool + ....(case v of MkT y -> + let z# = case y of + True -> 1# + False -> 2# + in ...) + +The z# binding is ok because the RHS is ok-for-speculation, +but Lint will complain unless it can *see* that. So we +preserve the evaluated-ness on 'y' in tidyBndr. + +(Another alternative would be to tidy unboxed lets into cases, +but that seems more indirect and surprising.) + \begin{code} (=:) :: a -> (a -> b) -> b diff --git a/compiler/coreSyn/CoreUnfold.lhs b/compiler/coreSyn/CoreUnfold.lhs index 3a2c237602..fa9259a005 100644 --- a/compiler/coreSyn/CoreUnfold.lhs +++ b/compiler/coreSyn/CoreUnfold.lhs @@ -15,7 +15,8 @@ literal''). In the corner of a @CoreUnfolding@ unfolding, you will find, unsurprisingly, a Core expression. \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/coreSyn/CoreUtils.lhs b/compiler/coreSyn/CoreUtils.lhs index ea2e17fb04..3bf07febf3 100644 --- a/compiler/coreSyn/CoreUtils.lhs +++ b/compiler/coreSyn/CoreUtils.lhs @@ -6,6 +6,8 @@ Utility functions on @Core@ syntax \begin{code} +{-# LANGUAGE CPP #-} + -- | Commonly useful utilites for manipulating the Core language module CoreUtils ( -- * Constructing expressions @@ -215,7 +217,7 @@ mkCast expr co -- if to_ty `eqType` from_ty -- then expr -- else - WARN(not (from_ty `eqType` exprType expr), text "Trying to coerce" <+> text "(" <> ppr expr $$ text "::" <+> ppr (exprType expr) <> text ")" $$ ppr co $$ pprEqPred (coercionKind co)) + WARN(not (from_ty `eqType` exprType expr), text "Trying to coerce" <+> text "(" <> ppr expr $$ text "::" <+> ppr (exprType expr) <> text ")" $$ ppr co $$ ppr (coercionType co)) (Cast expr co) \end{code} @@ -1222,7 +1224,7 @@ dataConInstPat :: [FastString] -- A long enough list of FSs to use for -> [Unique] -- An equally long list of uniques, at least one for each binder -> DataCon -> [Type] -- Types to instantiate the universally quantified tyvars - -> ([TyVar], [Id]) -- Return instantiated variables + -> ([TyVar], [Id]) -- Return instantiated variables -- dataConInstPat arg_fun fss us con inst_tys returns a triple -- (ex_tvs, arg_ids), -- @@ -1250,14 +1252,14 @@ dataConInstPat :: [FastString] -- A long enough list of FSs to use for -- -- where the double-primed variables are created with the FastStrings and -- Uniques given as fss and us -dataConInstPat fss uniqs con inst_tys +dataConInstPat fss uniqs con inst_tys = ASSERT( univ_tvs `equalLength` inst_tys ) (ex_bndrs, arg_ids) - where + where univ_tvs = dataConUnivTyVars con ex_tvs = dataConExTyVars con arg_tys = dataConRepArgTys con - + arg_strs = dataConRepStrictness con -- 1-1 with arg_tys n_ex = length ex_tvs -- split the Uniques and FastStrings @@ -1268,7 +1270,7 @@ dataConInstPat fss uniqs con inst_tys univ_subst = zipOpenTvSubst univ_tvs inst_tys -- Make existential type variables, applyingn and extending the substitution - (full_subst, ex_bndrs) = mapAccumL mk_ex_var univ_subst + (full_subst, ex_bndrs) = mapAccumL mk_ex_var univ_subst (zip3 ex_tvs ex_fss ex_uniqs) mk_ex_var :: TvSubst -> (TyVar, FastString, Unique) -> (TvSubst, TyVar) @@ -1280,11 +1282,30 @@ dataConInstPat fss uniqs con inst_tys kind = Type.substTy subst (tyVarKind tv) -- Make value vars, instantiating types - arg_ids = zipWith3 mk_id_var id_uniqs id_fss arg_tys - mk_id_var uniq fs ty = mkUserLocal (mkVarOccFS fs) uniq - (Type.substTy full_subst ty) noSrcSpan + arg_ids = zipWith4 mk_id_var id_uniqs id_fss arg_tys arg_strs + mk_id_var uniq fs ty str + = mkLocalIdWithInfo name (Type.substTy full_subst ty) info + where + name = mkInternalName uniq (mkVarOccFS fs) noSrcSpan + info | isMarkedStrict str = vanillaIdInfo `setUnfoldingInfo` evaldUnfolding + | otherwise = vanillaIdInfo + -- See Note [Mark evaluated arguments] \end{code} +Note [Mark evaluated arguments] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +When pattern matching on a constructor with strict fields, the binder +can have an 'evaldUnfolding'. Moreover, it *should* have one, so that +when loading an interface file unfolding like: + data T = MkT !Int + f x = case x of { MkT y -> let v::Int# = case y of I# n -> n+1 + in ... } +we don't want Lint to complain. The 'y' is evaluated, so the +case in the RHS of the binding for 'v' is fine. But only if we +*know* that 'y' is evaluated. + +c.f. add_evals in Simplify.simplAlt + %************************************************************************ %* * Equality diff --git a/compiler/coreSyn/ExternalCore.lhs b/compiler/coreSyn/ExternalCore.lhs deleted file mode 100644 index ecc24b1155..0000000000 --- a/compiler/coreSyn/ExternalCore.lhs +++ /dev/null @@ -1,118 +0,0 @@ -% -% (c) The University of Glasgow 2001-2006 -% -\begin{code} -module ExternalCore where - -import Data.Word - -data Module - = Module Mname [Tdef] [Vdefg] - -data Tdef - = Data (Qual Tcon) [Tbind] [Cdef] - | Newtype (Qual Tcon) (Qual Tcon) [Tbind] Ty - -data Cdef - = Constr (Qual Dcon) [Tbind] [Ty] - | GadtConstr (Qual Dcon) Ty - -data Vdefg - = Rec [Vdef] - | Nonrec Vdef - --- Top-level bindings are qualified, so that the printer doesn't have to pass --- around the module name. -type Vdef = (Bool,Qual Var,Ty,Exp) - -data Exp - = Var (Qual Var) - | Dcon (Qual Dcon) - | Lit Lit - | App Exp Exp - | Appt Exp Ty - | Lam Bind Exp - | Let Vdefg Exp - | Case Exp Vbind Ty [Alt] {- non-empty list -} - | Cast Exp Coercion - | Tick String Exp {- XXX probably wrong -} - | External String String Ty {- target name, convention, and type -} - | DynExternal String Ty {- convention and type (incl. Addr# of target as first arg) -} - | Label String - -data Bind - = Vb Vbind - | Tb Tbind - -data Alt - = Acon (Qual Dcon) [Tbind] [Vbind] Exp - | Alit Lit Exp - | Adefault Exp - -type Vbind = (Var,Ty) -type Tbind = (Tvar,Kind) - -data Ty - = Tvar Tvar - | Tcon (Qual Tcon) - | Tapp Ty Ty - | Tforall Tbind Ty - -data Coercion --- We distinguish primitive coercions because External Core treats --- them specially, so we have to print them out with special syntax. - = ReflCoercion Role Ty - | SymCoercion Coercion - | TransCoercion Coercion Coercion - | TyConAppCoercion Role (Qual Tcon) [Coercion] - | AppCoercion Coercion Coercion - | ForAllCoercion Tbind Coercion - | CoVarCoercion Var - | UnivCoercion Role Ty Ty - | InstCoercion Coercion Ty - | NthCoercion Int Coercion - | AxiomCoercion (Qual Tcon) Int [Coercion] - | LRCoercion LeftOrRight Coercion - | SubCoercion Coercion - -data Role = Nominal | Representational | Phantom - -data LeftOrRight = CLeft | CRight - -data Kind - = Klifted - | Kunlifted - | Kunboxed - | Kopen - | Karrow Kind Kind - -data Lit - = Lint Integer Ty - | Lrational Rational Ty - | Lchar Char Ty - | Lstring [Word8] Ty - - -type Mname = Id -type Var = Id -type Tvar = Id -type Tcon = Id -type Dcon = Id - -type Qual t = (Mname,t) - -type Id = String - -primMname :: Mname --- For truly horrible reasons, this must be z-encoded. --- With any hope, the z-encoding will die soon. -primMname = "ghczmprim:GHCziPrim" - -tcArrow :: Qual Tcon -tcArrow = (primMname, "(->)") - -\end{code} - - - - diff --git a/compiler/coreSyn/MkCore.lhs b/compiler/coreSyn/MkCore.lhs index f71b4b4ff6..5213f92bac 100644 --- a/compiler/coreSyn/MkCore.lhs +++ b/compiler/coreSyn/MkCore.lhs @@ -1,5 +1,6 @@ \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -303,9 +304,9 @@ mkStringExprFS str mkEqBox :: Coercion -> CoreExpr mkEqBox co = ASSERT2( typeKind ty2 `eqKind` k, ppr co $$ ppr ty1 $$ ppr ty2 $$ ppr (typeKind ty1) $$ ppr (typeKind ty2) ) Var (dataConWorkId datacon) `mkTyApps` [k, ty1, ty2] `App` Coercion co - where Pair ty1 ty2 = coercionKind co + where (Pair ty1 ty2, role) = coercionKindRole co k = typeKind ty1 - datacon = case coercionRole co of + datacon = case role of Nominal -> eqBoxDataCon Representational -> coercibleDataCon Phantom -> pprPanic "mkEqBox does not support boxing phantom coercions" diff --git a/compiler/coreSyn/MkExternalCore.lhs b/compiler/coreSyn/MkExternalCore.lhs deleted file mode 100644 index 6a6f0551ed..0000000000 --- a/compiler/coreSyn/MkExternalCore.lhs +++ /dev/null @@ -1,360 +0,0 @@ - -% (c) The University of Glasgow 2001-2006 -% -\begin{code} -module MkExternalCore ( - emitExternalCore -) where - -#include "HsVersions.h" - -import qualified ExternalCore as C -import Module -import CoreSyn -import HscTypes -import TyCon -import CoAxiom --- import Class -import TypeRep -import Type -import Kind -import PprExternalCore () -- Instances -import DataCon -import Coercion -import Var -import IdInfo -import Literal -import Name -import Outputable -import Encoding -import ForeignCall -import DynFlags -import FastString -import Exception - -import Control.Applicative (Applicative(..)) -import Control.Monad -import qualified Data.ByteString as BS -import Data.Char -import System.IO - -emitExternalCore :: DynFlags -> FilePath -> CgGuts -> IO () -emitExternalCore dflags extCore_filename cg_guts - | gopt Opt_EmitExternalCore dflags - = (do handle <- openFile extCore_filename WriteMode - hPutStrLn handle (show (mkExternalCore dflags cg_guts)) - hClose handle) - `catchIO` (\_ -> pprPanic "Failed to open or write external core output file" - (text extCore_filename)) -emitExternalCore _ _ _ - | otherwise - = return () - --- Reinventing the Reader monad; whee. -newtype CoreM a = CoreM (CoreState -> (CoreState, a)) -data CoreState = CoreState { - cs_dflags :: DynFlags, - cs_module :: Module - } - -instance Functor CoreM where - fmap = liftM - -instance Applicative CoreM where - pure = return - (<*>) = ap - -instance Monad CoreM where - (CoreM m) >>= f = CoreM (\ s -> case m s of - (s',r) -> case f r of - CoreM f' -> f' s') - return x = CoreM (\ s -> (s, x)) -runCoreM :: CoreM a -> CoreState -> a -runCoreM (CoreM f) s = snd $ f s -ask :: CoreM CoreState -ask = CoreM (\ s -> (s,s)) - -instance HasDynFlags CoreM where - getDynFlags = liftM cs_dflags ask - -mkExternalCore :: DynFlags -> CgGuts -> C.Module --- The ModGuts has been tidied, but the implicit bindings have --- not been injected, so we have to add them manually here --- We don't include the strange data-con *workers* because they are --- implicit in the data type declaration itself -mkExternalCore dflags (CgGuts {cg_module=this_mod, cg_tycons = tycons, - cg_binds = binds}) -{- Note that modules can be mutually recursive, but even so, we - print out dependency information within each module. -} - = C.Module (mname dflags) tdefs (runCoreM (mapM (make_vdef True) binds) initialState) - where - initialState = CoreState { - cs_dflags = dflags, - cs_module = this_mod - } - mname dflags = make_mid dflags this_mod - tdefs = foldr (collect_tdefs dflags) [] tycons - -collect_tdefs :: DynFlags -> TyCon -> [C.Tdef] -> [C.Tdef] -collect_tdefs dflags tcon tdefs - | isAlgTyCon tcon = tdef: tdefs - where - tdef | isNewTyCon tcon = - C.Newtype (qtc dflags tcon) - (qcc dflags (newTyConCo tcon)) - (map make_tbind tyvars) - (make_ty dflags (snd (newTyConRhs tcon))) - | otherwise = - C.Data (qtc dflags tcon) (map make_tbind tyvars) - (map (make_cdef dflags) (tyConDataCons tcon)) - tyvars = tyConTyVars tcon - -collect_tdefs _ _ tdefs = tdefs - -qtc :: DynFlags -> TyCon -> C.Qual C.Tcon -qtc dflags = make_con_qid dflags . tyConName - -qcc :: DynFlags -> CoAxiom br -> C.Qual C.Tcon -qcc dflags = make_con_qid dflags . co_ax_name - -make_cdef :: DynFlags -> DataCon -> C.Cdef -make_cdef dflags dcon = C.Constr dcon_name existentials tys - where - dcon_name = make_qid dflags False False (dataConName dcon) - existentials = map make_tbind ex_tyvars - ex_tyvars = dataConExTyVars dcon - tys = map (make_ty dflags) (dataConRepArgTys dcon) - -make_tbind :: TyVar -> C.Tbind -make_tbind tv = (make_var_id (tyVarName tv), make_kind (tyVarKind tv)) - -make_vbind :: DynFlags -> Var -> C.Vbind -make_vbind dflags v = (make_var_id (Var.varName v), make_ty dflags (varType v)) - -make_vdef :: Bool -> CoreBind -> CoreM C.Vdefg -make_vdef topLevel b = - case b of - NonRec v e -> f (v,e) >>= (return . C.Nonrec) - Rec ves -> mapM f ves >>= (return . C.Rec) - where - f :: (CoreBndr,CoreExpr) -> CoreM C.Vdef - f (v,e) = do - localN <- isALocal vName - let local = not topLevel || localN - rhs <- make_exp e - -- use local flag to determine where to add the module name - dflags <- getDynFlags - return (local, make_qid dflags local True vName, make_ty dflags (varType v),rhs) - where vName = Var.varName v - -make_exp :: CoreExpr -> CoreM C.Exp -make_exp (Var v) = do - let vName = Var.varName v - isLocal <- isALocal vName - dflags <- getDynFlags - return $ - case idDetails v of - FCallId (CCall (CCallSpec (StaticTarget nm _ True) callconv _)) - -> C.External (unpackFS nm) (showPpr dflags callconv) (make_ty dflags (varType v)) - FCallId (CCall (CCallSpec (StaticTarget _ _ False) _ _)) -> - panic "make_exp: FFI values not supported" - FCallId (CCall (CCallSpec DynamicTarget callconv _)) - -> C.DynExternal (showPpr dflags callconv) (make_ty dflags (varType v)) - -- Constructors are always exported, so make sure to declare them - -- with qualified names - DataConWorkId _ -> C.Var (make_var_qid dflags False vName) - DataConWrapId _ -> C.Var (make_var_qid dflags False vName) - _ -> C.Var (make_var_qid dflags isLocal vName) -make_exp (Lit (MachLabel s _ _)) = return $ C.Label (unpackFS s) -make_exp (Lit l) = do dflags <- getDynFlags - return $ C.Lit (make_lit dflags l) -make_exp (App e (Type t)) = do b <- make_exp e - dflags <- getDynFlags - return $ C.Appt b (make_ty dflags t) -make_exp (App _e (Coercion _co)) = error "make_exp (App _ (Coercion _))" -- TODO -make_exp (App e1 e2) = do - rator <- make_exp e1 - rand <- make_exp e2 - return $ C.App rator rand -make_exp (Lam v e) | isTyVar v = make_exp e >>= (\ b -> - return $ C.Lam (C.Tb (make_tbind v)) b) -make_exp (Lam v e) | otherwise = do b <- make_exp e - dflags <- getDynFlags - return $ C.Lam (C.Vb (make_vbind dflags v)) b -make_exp (Cast e co) = do b <- make_exp e - dflags <- getDynFlags - return $ C.Cast b (make_co dflags co) -make_exp (Let b e) = do - vd <- make_vdef False b - body <- make_exp e - return $ C.Let vd body -make_exp (Case e v ty alts) = do - scrut <- make_exp e - newAlts <- mapM make_alt alts - dflags <- getDynFlags - return $ C.Case scrut (make_vbind dflags v) (make_ty dflags ty) newAlts -make_exp (Tick _ e) = make_exp e >>= (return . C.Tick "SCC") -- temporary -make_exp _ = error "MkExternalCore died: make_exp" - -make_alt :: CoreAlt -> CoreM C.Alt -make_alt (DataAlt dcon, vs, e) = do - newE <- make_exp e - dflags <- getDynFlags - return $ C.Acon (make_con_qid dflags (dataConName dcon)) - (map make_tbind tbs) - (map (make_vbind dflags) vbs) - newE - where (tbs,vbs) = span isTyVar vs -make_alt (LitAlt l,_,e) = do x <- make_exp e - dflags <- getDynFlags - return $ C.Alit (make_lit dflags l) x -make_alt (DEFAULT,[],e) = make_exp e >>= (return . C.Adefault) --- This should never happen, as the DEFAULT alternative binds no variables, --- but we might as well check for it: -make_alt a@(DEFAULT,_ ,_) = pprPanic ("MkExternalCore: make_alt: DEFAULT " - ++ "alternative had a non-empty var list") (ppr a) - - -make_lit :: DynFlags -> Literal -> C.Lit -make_lit dflags l = - case l of - -- Note that we need to check whether the character is "big". - -- External Core only allows character literals up to '\xff'. - MachChar i | i <= chr 0xff -> C.Lchar i t - -- For a character bigger than 0xff, we represent it in ext-core - -- as an int lit with a char type. - MachChar i -> C.Lint (fromIntegral $ ord i) t - MachStr s -> C.Lstring (BS.unpack s) t - MachNullAddr -> C.Lint 0 t - MachInt i -> C.Lint i t - MachInt64 i -> C.Lint i t - MachWord i -> C.Lint i t - MachWord64 i -> C.Lint i t - MachFloat r -> C.Lrational r t - MachDouble r -> C.Lrational r t - LitInteger i _ -> C.Lint i t - _ -> pprPanic "MkExternalCore died: make_lit" (ppr l) - where - t = make_ty dflags (literalType l) - --- Expand type synonyms, then convert. -make_ty :: DynFlags -> Type -> C.Ty -- Be sure to expand types recursively! - -- example: FilePath ~> String ~> [Char] -make_ty dflags t | Just expanded <- tcView t = make_ty dflags expanded -make_ty dflags t = make_ty' dflags t - --- note calls to make_ty so as to expand types recursively -make_ty' :: DynFlags -> Type -> C.Ty -make_ty' _ (TyVarTy tv) = C.Tvar (make_var_id (tyVarName tv)) -make_ty' dflags (AppTy t1 t2) = C.Tapp (make_ty dflags t1) (make_ty dflags t2) -make_ty' dflags (FunTy t1 t2) = make_ty dflags (TyConApp funTyCon [t1,t2]) -make_ty' dflags (ForAllTy tv t) = C.Tforall (make_tbind tv) (make_ty dflags t) -make_ty' dflags (TyConApp tc ts) = make_tyConApp dflags tc ts -make_ty' _ (LitTy {}) = panic "MkExernalCore can't do literal types yet" - --- Newtypes are treated just like any other type constructor; not expanded --- Reason: predTypeRep does substitution and, while substitution deals --- correctly with name capture, it's only correct if you see the uniques! --- If you just see occurrence names, name capture may occur. --- Example: newtype A a = A (forall b. b -> a) --- test :: forall q b. q -> A b --- test _ = undefined --- Here the 'a' gets substituted by 'b', which is captured. --- Another solution would be to expand newtypes before tidying; but that would --- expose the representation in interface files, which definitely isn't right. --- Maybe CoreTidy should know whether to expand newtypes or not? - -make_tyConApp :: DynFlags -> TyCon -> [Type] -> C.Ty -make_tyConApp dflags tc ts = - foldl C.Tapp (C.Tcon (qtc dflags tc)) - (map (make_ty dflags) ts) - -make_kind :: Kind -> C.Kind -make_kind (FunTy k1 k2) = C.Karrow (make_kind k1) (make_kind k2) -make_kind k - | isLiftedTypeKind k = C.Klifted - | isUnliftedTypeKind k = C.Kunlifted - | isOpenTypeKind k = C.Kopen -make_kind _ = error "MkExternalCore died: make_kind" - -{- Id generation. -} - -make_id :: Bool -> Name -> C.Id --- include uniques for internal names in order to avoid name shadowing -make_id _is_var nm = ((occNameString . nameOccName) nm) - ++ (if isInternalName nm then (show . nameUnique) nm else "") - -make_var_id :: Name -> C.Id -make_var_id = make_id True - --- It's important to encode the module name here, because in External Core, --- base:GHC.Base => base:GHCziBase --- We don't do this in pprExternalCore because we --- *do* want to keep the package name (we don't want baseZCGHCziBase, --- because that would just be ugly.) --- SIGH. --- We encode the package name as well. -make_mid :: DynFlags -> Module -> C.Id --- Super ugly code, but I can't find anything else that does quite what I --- want (encodes the hierarchical module name without encoding the colon --- that separates the package name from it.) -make_mid dflags m - = showSDoc dflags $ - (text $ zEncodeString $ packageIdString $ modulePackageId m) - <> text ":" - <> (pprEncoded $ pprModuleName $ moduleName m) - where pprEncoded = pprCode CStyle - -make_qid :: DynFlags -> Bool -> Bool -> Name -> C.Qual C.Id -make_qid dflags force_unqual is_var n = (mname,make_id is_var n) - where mname = - case nameModule_maybe n of - Just m | not force_unqual -> make_mid dflags m - _ -> "" - -make_var_qid :: DynFlags -> Bool -> Name -> C.Qual C.Id -make_var_qid dflags force_unqual = make_qid dflags force_unqual True - -make_con_qid :: DynFlags -> Name -> C.Qual C.Id -make_con_qid dflags = make_qid dflags False False - -make_co :: DynFlags -> Coercion -> C.Coercion -make_co dflags (Refl r ty) = C.ReflCoercion (make_role r) $ make_ty dflags ty -make_co dflags (TyConAppCo r tc cos) = C.TyConAppCoercion (make_role r) (qtc dflags tc) (map (make_co dflags) cos) -make_co dflags (AppCo c1 c2) = C.AppCoercion (make_co dflags c1) (make_co dflags c2) -make_co dflags (ForAllCo tv co) = C.ForAllCoercion (make_tbind tv) (make_co dflags co) -make_co _ (CoVarCo cv) = C.CoVarCoercion (make_var_id (coVarName cv)) -make_co dflags (AxiomInstCo cc ind cos) = C.AxiomCoercion (qcc dflags cc) ind (map (make_co dflags) cos) -make_co dflags (UnivCo r t1 t2) = C.UnivCoercion (make_role r) (make_ty dflags t1) (make_ty dflags t2) -make_co dflags (SymCo co) = C.SymCoercion (make_co dflags co) -make_co dflags (TransCo c1 c2) = C.TransCoercion (make_co dflags c1) (make_co dflags c2) -make_co dflags (NthCo d co) = C.NthCoercion d (make_co dflags co) -make_co dflags (LRCo lr co) = C.LRCoercion (make_lr lr) (make_co dflags co) -make_co dflags (InstCo co ty) = C.InstCoercion (make_co dflags co) (make_ty dflags ty) -make_co dflags (SubCo co) = C.SubCoercion (make_co dflags co) -make_co _ (AxiomRuleCo {}) = panic "make_co AxiomRuleCo: not yet implemented" - - -make_lr :: LeftOrRight -> C.LeftOrRight -make_lr CLeft = C.CLeft -make_lr CRight = C.CRight - -make_role :: Role -> C.Role -make_role Nominal = C.Nominal -make_role Representational = C.Representational -make_role Phantom = C.Phantom - -------- -isALocal :: Name -> CoreM Bool -isALocal vName = do - modName <- liftM cs_module ask - return $ case nameModule_maybe vName of - -- Not sure whether isInternalName corresponds to "local"ness - -- in the External Core sense; need to re-read the spec. - Just m | m == modName -> isInternalName vName - _ -> False -\end{code} - - - - diff --git a/compiler/coreSyn/PprCore.lhs b/compiler/coreSyn/PprCore.lhs index 35c0630736..f86a911ede 100644 --- a/compiler/coreSyn/PprCore.lhs +++ b/compiler/coreSyn/PprCore.lhs @@ -121,7 +121,7 @@ ppr_expr add_par (Cast expr co) if gopt Opt_SuppressCoercions dflags then ptext (sLit "...") else parens $ - sep [ppr co, dcolon <+> pprEqPred (coercionKind co)] + sep [ppr co, dcolon <+> ppr (coercionType co)] ppr_expr add_par expr@(Lam _ _) diff --git a/compiler/coreSyn/PprExternalCore.lhs b/compiler/coreSyn/PprExternalCore.lhs deleted file mode 100644 index 7fd3ac1d65..0000000000 --- a/compiler/coreSyn/PprExternalCore.lhs +++ /dev/null @@ -1,260 +0,0 @@ -% -% (c) The University of Glasgow 2001-2006 -% - -\begin{code} -{-# OPTIONS_GHC -fno-warn-orphans #-} - -module PprExternalCore () where - -import Encoding -import ExternalCore - -import Pretty -import Data.Char -import Data.Ratio - -instance Show Module where - showsPrec _ m = shows (pmodule m) - -instance Show Tdef where - showsPrec _ t = shows (ptdef t) - -instance Show Cdef where - showsPrec _ c = shows (pcdef c) - -instance Show Vdefg where - showsPrec _ v = shows (pvdefg v) - -instance Show Exp where - showsPrec _ e = shows (pexp e) - -instance Show Alt where - showsPrec _ a = shows (palt a) - -instance Show Ty where - showsPrec _ t = shows (pty t) - -instance Show Kind where - showsPrec _ k = shows (pkind k) - -instance Show Lit where - showsPrec _ l = shows (plit l) - - -indent :: Doc -> Doc -indent = nest 2 - -pmodule :: Module -> Doc -pmodule (Module mname tdefs vdefgs) = - (text "%module" <+> text mname) - $$ indent ((vcat (map ((<> char ';') . ptdef) tdefs)) - $$ (vcat (map ((<> char ';') . pvdefg) vdefgs))) - -ptdef :: Tdef -> Doc -ptdef (Data tcon tbinds cdefs) = - (text "%data" <+> pqname tcon <+> (hsep (map ptbind tbinds)) <+> char '=') - $$ indent (braces ((vcat (punctuate (char ';') (map pcdef cdefs))))) - -ptdef (Newtype tcon coercion tbinds rep) = - text "%newtype" <+> pqname tcon <+> pqname coercion - <+> (hsep (map ptbind tbinds)) $$ indent repclause - where repclause = char '=' <+> pty rep - -pcdef :: Cdef -> Doc -pcdef (Constr dcon tbinds tys) = - (pqname dcon) <+> (sep [hsep (map pattbind tbinds),sep (map paty tys)]) -pcdef (GadtConstr dcon ty) = - (pqname dcon) <+> text "::" <+> pty ty - -pname :: Id -> Doc -pname id = text (zEncodeString id) - -pqname :: Qual Id -> Doc -pqname ("",id) = pname id -pqname (m,id) = text m <> char '.' <> pname id - -ptbind, pattbind :: Tbind -> Doc -ptbind (t,Klifted) = pname t -ptbind (t,k) = parens (pname t <> text "::" <> pkind k) - -pattbind (t,k) = char '@' <> ptbind (t,k) - -pakind, pkind :: Kind -> Doc -pakind (Klifted) = char '*' -pakind (Kunlifted) = char '#' -pakind (Kopen) = char '?' -pakind k = parens (pkind k) - -pkind (Karrow k1 k2) = parens (pakind k1 <> text "->" <> pkind k2) -pkind k = pakind k - -paty, pbty, pty :: Ty -> Doc --- paty: print in parens, if non-atomic (like a name) --- pbty: print in parens, if arrow (used only for lhs of arrow) --- pty: not in parens -paty (Tvar n) = pname n -paty (Tcon c) = pqname c -paty t = parens (pty t) - -pbty (Tapp(Tapp(Tcon tc) t1) t2) | tc == tcArrow = parens(fsep [pbty t1, text "->",pty t2]) -pbty t = paty t - -pty (Tapp(Tapp(Tcon tc) t1) t2) | tc == tcArrow = fsep [pbty t1, text "->",pty t2] -pty (Tforall tb t) = text "%forall" <+> pforall [tb] t -pty ty@(Tapp {}) = pappty ty [] -pty ty@(Tvar {}) = paty ty -pty ty@(Tcon {}) = paty ty - -pappty :: Ty -> [Ty] -> Doc -pappty (Tapp t1 t2) ts = pappty t1 (t2:ts) -pappty t ts = sep (map paty (t:ts)) - -pforall :: [Tbind] -> Ty -> Doc -pforall tbs (Tforall tb t) = pforall (tbs ++ [tb]) t -pforall tbs t = hsep (map ptbind tbs) <+> char '.' <+> pty t - -paco, pbco, pco :: Coercion -> Doc -paco (ReflCoercion r ty) = char '<' <> pty ty <> text ">_" <> prole r -paco (TyConAppCoercion r qtc []) = pqname qtc <> char '_' <> prole r -paco (AxiomCoercion qtc i []) = pqname qtc <> char '[' <> int i <> char ']' -paco (CoVarCoercion cv) = pname cv -paco c = parens (pco c) - -pbco (TyConAppCoercion _ arr [co1, co2]) - | arr == tcArrow - = parens (fsep [pbco co1, text "->", pco co2]) -pbco co = paco co - -pco c@(ReflCoercion {}) = paco c -pco (SymCoercion co) = sep [text "%sub", paco co] -pco (TransCoercion co1 co2) = sep [text "%trans", paco co1, paco co2] -pco (TyConAppCoercion _ arr [co1, co2]) - | arr == tcArrow = fsep [pbco co1, text "->", pco co2] -pco (TyConAppCoercion r qtc cos) = parens (pqname qtc <+> sep (map paco cos)) <> char '_' <> prole r -pco co@(AppCoercion {}) = pappco co [] -pco (ForAllCoercion tb co) = text "%forall" <+> pforallco [tb] co -pco co@(CoVarCoercion {}) = paco co -pco (UnivCoercion r ty1 ty2) = sep [text "%univ", prole r, paty ty1, paty ty2] -pco (InstCoercion co ty) = sep [text "%inst", paco co, paty ty] -pco (NthCoercion i co) = sep [text "%nth", int i, paco co] -pco (AxiomCoercion qtc i cos) = pqname qtc <> char '[' <> int i <> char ']' <+> sep (map paco cos) -pco (LRCoercion CLeft co) = sep [text "%left", paco co] -pco (LRCoercion CRight co) = sep [text "%right", paco co] -pco (SubCoercion co) = sep [text "%sub", paco co] - -pappco :: Coercion -> [Coercion ] -> Doc -pappco (AppCoercion co1 co2) cos = pappco co1 (co2:cos) -pappco co cos = sep (map paco (co:cos)) - -pforallco :: [Tbind] -> Coercion -> Doc -pforallco tbs (ForAllCoercion tb co) = pforallco (tbs ++ [tb]) co -pforallco tbs co = hsep (map ptbind tbs) <+> char '.' <+> pco co - -prole :: Role -> Doc -prole Nominal = char 'N' -prole Representational = char 'R' -prole Phantom = char 'P' - -pvdefg :: Vdefg -> Doc -pvdefg (Rec vdefs) = text "%rec" $$ braces (indent (vcat (punctuate (char ';') (map pvdef vdefs)))) -pvdefg (Nonrec vdef) = pvdef vdef - -pvdef :: Vdef -> Doc --- TODO: Think about whether %local annotations are actually needed. --- Right now, the local flag is never used, because the Core doc doesn't --- explain the meaning of %local. -pvdef (_l,v,t,e) = sep [(pqname v <+> text "::" <+> pty t <+> char '='), - indent (pexp e)] - -paexp, pfexp, pexp :: Exp -> Doc -paexp (Var x) = pqname x -paexp (Dcon x) = pqname x -paexp (Lit l) = plit l -paexp e = parens(pexp e) - -plamexp :: [Bind] -> Exp -> Doc -plamexp bs (Lam b e) = plamexp (bs ++ [b]) e -plamexp bs e = sep [sep (map pbind bs) <+> text "->", - indent (pexp e)] - -pbind :: Bind -> Doc -pbind (Tb tb) = char '@' <+> ptbind tb -pbind (Vb vb) = pvbind vb - -pfexp (App e1 e2) = pappexp e1 [Left e2] -pfexp (Appt e t) = pappexp e [Right t] -pfexp e = paexp e - -pappexp :: Exp -> [Either Exp Ty] -> Doc -pappexp (App e1 e2) as = pappexp e1 (Left e2:as) -pappexp (Appt e t) as = pappexp e (Right t:as) -pappexp e as = fsep (paexp e : map pa as) - where pa (Left e) = paexp e - pa (Right t) = char '@' <+> paty t - -pexp (Lam b e) = char '\\' <+> plamexp [b] e -pexp (Let vd e) = (text "%let" <+> pvdefg vd) $$ (text "%in" <+> pexp e) -pexp (Case e vb ty alts) = sep [text "%case" <+> paty ty <+> paexp e, - text "%of" <+> pvbind vb] - $$ (indent (braces (vcat (punctuate (char ';') (map palt alts))))) -pexp (Cast e co) = (text "%cast" <+> parens (pexp e)) $$ paco co -pexp (Tick s e) = (text "%source" <+> pstring s) $$ pexp e -pexp (External n cc t) = (text "%external" <+> text cc <+> pstring n) $$ paty t -pexp (DynExternal cc t) = (text "%dynexternal" <+> text cc) $$ paty t -pexp (Label n) = (text "%label" <+> pstring n) -pexp e = pfexp e - -pvbind :: Vbind -> Doc -pvbind (x,t) = parens(pname x <> text "::" <> pty t) - -palt :: Alt -> Doc -palt (Acon c tbs vbs e) = - sep [pqname c, - sep (map pattbind tbs), - sep (map pvbind vbs) <+> text "->"] - $$ indent (pexp e) -palt (Alit l e) = - (plit l <+> text "->") - $$ indent (pexp e) -palt (Adefault e) = - (text "%_ ->") - $$ indent (pexp e) - -plit :: Lit -> Doc -plit (Lint i t) = parens (integer i <> text "::" <> pty t) --- we use (text (show (numerator r))) (and the same for denominator) --- because "(rational r)" was printing out things like "2.0e-2" (which --- isn't External Core), and (text (show r)) was printing out things --- like "((-1)/5)" which isn't either (it should be "(-1/5)"). -plit (Lrational r t) = parens (text (show (numerator r)) <+> char '%' - <+> text (show (denominator r)) <> text "::" <> pty t) -plit (Lchar c t) = parens (text ("\'" ++ escape [c] ++ "\'") <> text "::" <> pty t) --- This is a little messy. We shouldn't really be going via String. -plit (Lstring bs t) = parens (pstring str <> text "::" <> pty t) - where str = map (chr . fromIntegral) bs - -pstring :: String -> Doc -pstring s = doubleQuotes(text (escape s)) - -escape :: String -> String -escape s = foldr f [] (map ord s) - where - f cv rest - | cv > 0xFF = '\\':'x':hs ++ rest - | (cv < 0x20 || cv > 0x7e || cv == 0x22 || cv == 0x27 || cv == 0x5c) = - '\\':'x':h1:h0:rest - where (q1,r1) = quotRem cv 16 - h1 = intToDigit q1 - h0 = intToDigit r1 - hs = dropWhile (=='0') $ reverse $ mkHex cv - mkHex 0 = "" - mkHex cv = intToDigit r : mkHex q - where (q,r) = quotRem cv 16 - f cv rest = (chr cv):rest - -\end{code} - - - - diff --git a/compiler/coreSyn/TrieMap.lhs b/compiler/coreSyn/TrieMap.lhs index ac04adab1b..2744c5d0b8 100644 --- a/compiler/coreSyn/TrieMap.lhs +++ b/compiler/coreSyn/TrieMap.lhs @@ -4,14 +4,14 @@ % \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See -- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces -- for details -{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE RankNTypes, TypeFamilies #-} module TrieMap( CoreMap, emptyCoreMap, extendCoreMap, lookupCoreMap, foldCoreMap, TypeMap, emptyTypeMap, extendTypeMap, lookupTypeMap, foldTypeMap, diff --git a/compiler/deSugar/Check.lhs b/compiler/deSugar/Check.lhs index c0fe9c03e3..e07a70fc65 100644 --- a/compiler/deSugar/Check.lhs +++ b/compiler/deSugar/Check.lhs @@ -5,6 +5,8 @@ % Author: Juan J. Quintela <quintela@krilin.dc.fi.udc.es> \begin{code} +{-# LANGUAGE CPP #-} + module Check ( check , ExhaustivePat ) where #include "HsVersions.h" @@ -21,7 +23,6 @@ import Name import TysWiredIn import PrelNames import TyCon -import Type import SrcLoc import UniqSet import Util @@ -123,7 +124,7 @@ untidy_message :: (Name, [HsLit]) -> (Name, [HsLit]) untidy_message (string, lits) = (string, map untidy_lit lits) \end{code} -The function @untidy@ does the reverse work of the @tidy_pat@ funcion. +The function @untidy@ does the reverse work of the @tidy_pat@ function. \begin{code} @@ -144,7 +145,7 @@ untidy b (L loc p) = L loc (untidy' b p) untidy' _ p@(ConPatIn _ (PrefixCon [])) = p untidy' b (ConPatIn name ps) = pars b (L loc (ConPatIn name (untidy_con ps))) untidy' _ (ListPat pats ty Nothing) = ListPat (map untidy_no_pars pats) ty Nothing - untidy' _ (TuplePat pats box ty) = TuplePat (map untidy_no_pars pats) box ty + untidy' _ (TuplePat pats box tys) = TuplePat (map untidy_no_pars pats) box tys untidy' _ (ListPat _ _ (Just _)) = panic "Check.untidy: Overloaded ListPat" untidy' _ (PArrPat _ _) = panic "Check.untidy: Shouldn't get a parallel array here!" untidy' _ (SigPatIn _ _) = panic "Check.untidy: SigPat" @@ -468,8 +469,8 @@ get_unused_cons used_cons = ASSERT( not (null used_cons) ) unused_cons where used_set :: UniqSet DataCon used_set = mkUniqSet [d | ConPatOut{ pat_con = L _ (RealDataCon d) } <- used_cons] - (ConPatOut { pat_ty = ty }) = head used_cons - Just (ty_con, inst_tys) = splitTyConApp_maybe ty + (ConPatOut { pat_con = L _ (RealDataCon con1), pat_arg_tys = inst_tys }) = head used_cons + ty_con = dataConTyCon con1 unused_cons = filterOut is_used (tyConDataCons ty_con) is_used con = con `elementOfUniqSet` used_set || dataConCannotMatch inst_tys con @@ -593,9 +594,9 @@ make_con (ConPatOut{ pat_con = L _ (RealDataCon id) }) (lp:lq:ps, constraints) | isInfixCon id = (nlInfixConPat (getName id) lp lq : ps, constraints) where q = unLoc lq -make_con (ConPatOut{ pat_con = L _ (RealDataCon id), pat_args = PrefixCon pats, pat_ty = ty }) (ps, constraints) - | isTupleTyCon tc = (noLoc (TuplePat pats_con (tupleTyConBoxity tc) ty) : rest_pats, constraints) - | isPArrFakeCon id = (noLoc (PArrPat pats_con placeHolderType) : rest_pats, constraints) +make_con (ConPatOut{ pat_con = L _ (RealDataCon id), pat_args = PrefixCon pats, pat_arg_tys = tys }) (ps, constraints) + | isTupleTyCon tc = (noLoc (TuplePat pats_con (tupleTyConBoxity tc) tys) : rest_pats, constraints) + | isPArrFakeCon id = (noLoc (PArrPat pats_con placeHolderType) : rest_pats, constraints) | otherwise = (nlConPat name pats_con : rest_pats, constraints) where name = getName id @@ -696,17 +697,16 @@ tidy_pat (CoPat _ pat _) = tidy_pat pat tidy_pat (NPlusKPat id _ _ _) = WildPat (idType (unLoc id)) tidy_pat (ViewPat _ _ ty) = WildPat ty tidy_pat (ListPat _ _ (Just (ty,_))) = WildPat ty -tidy_pat (ConPatOut { pat_con = L _ PatSynCon{}, pat_ty = ty }) - = WildPat ty +tidy_pat (ConPatOut { pat_con = L _ (PatSynCon syn), pat_arg_tys = tys }) + = WildPat (patSynInstResTy syn tys) tidy_pat pat@(ConPatOut { pat_con = L _ con, pat_args = ps }) = pat { pat_args = tidy_con con ps } tidy_pat (ListPat ps ty Nothing) - = unLoc $ foldr (\ x y -> mkPrefixConPat consDataCon [x,y] list_ty) - (mkNilPat list_ty) + = unLoc $ foldr (\ x y -> mkPrefixConPat consDataCon [x,y] [ty]) + (mkNilPat ty) (map tidy_lpat ps) - where list_ty = mkListTy ty -- introduce fake parallel array constructors to be able to handle parallel -- arrays with the existing machinery for constructor pattern @@ -714,11 +714,11 @@ tidy_pat (ListPat ps ty Nothing) tidy_pat (PArrPat ps ty) = unLoc $ mkPrefixConPat (parrFakeCon (length ps)) (map tidy_lpat ps) - (mkPArrTy ty) + [ty] -tidy_pat (TuplePat ps boxity ty) +tidy_pat (TuplePat ps boxity tys) = unLoc $ mkPrefixConPat (tupleCon (boxityNormalTupleSort boxity) arity) - (map tidy_lpat ps) ty + (map tidy_lpat ps) tys where arity = length ps @@ -735,8 +735,8 @@ tidy_lit_pat :: HsLit -> Pat Id -- overlap with each other, or even explicit lists of Chars. tidy_lit_pat lit | HsString s <- lit - = unLoc $ foldr (\c pat -> mkPrefixConPat consDataCon [mkCharLitPat c, pat] stringTy) - (mkPrefixConPat nilDataCon [] stringTy) (unpackFS s) + = unLoc $ foldr (\c pat -> mkPrefixConPat consDataCon [mkCharLitPat c, pat] [charTy]) + (mkPrefixConPat nilDataCon [] [charTy]) (unpackFS s) | otherwise = tidyLitPat lit diff --git a/compiler/deSugar/Coverage.lhs b/compiler/deSugar/Coverage.lhs index 6bdc61d9c2..e646667651 100644 --- a/compiler/deSugar/Coverage.lhs +++ b/compiler/deSugar/Coverage.lhs @@ -3,6 +3,8 @@ % (c) University of Glasgow, 2007 % \begin{code} +{-# LANGUAGE NondecreasingIndentation #-} + module Coverage (addTicksToBinds, hpcInitCode) where import Type diff --git a/compiler/deSugar/Desugar.lhs b/compiler/deSugar/Desugar.lhs index cd75de9a3a..3160b35f15 100644 --- a/compiler/deSugar/Desugar.lhs +++ b/compiler/deSugar/Desugar.lhs @@ -6,6 +6,8 @@ The Desugarer: turning HsSyn into Core. \begin{code} +{-# LANGUAGE CPP #-} + module Desugar ( deSugar, deSugarExpr ) where import DynFlags @@ -50,8 +52,6 @@ import OrdList import Data.List import Data.IORef import Control.Monad( when ) -import Data.Maybe ( mapMaybe ) -import UniqFM \end{code} %************************************************************************ @@ -123,27 +123,20 @@ deSugar hsc_env ; let hpc_init | gopt Opt_Hpc dflags = hpcInitCode mod ds_hpc_info | otherwise = empty - ; let patsyn_defs = [(patSynId ps, ps) | ps <- patsyns] ; return ( ds_ev_binds , foreign_prs `appOL` core_prs `appOL` spec_prs , spec_rules ++ ds_rules, ds_vects - , ds_fords `appendStubC` hpc_init - , patsyn_defs) } + , ds_fords `appendStubC` hpc_init) } ; case mb_res of { Nothing -> return (msgs, Nothing) ; - Just (ds_ev_binds, all_prs, all_rules, vects0, ds_fords, patsyn_defs) -> do + Just (ds_ev_binds, all_prs, all_rules, vects0, ds_fords) -> do do { -- Add export flags to bindings keep_alive <- readIORef keep_var - ; let (rules_for_locals, rules_for_imps) - = partition isLocalRule all_rules - final_patsyns = addExportFlagsAndRules target export_set keep_alive [] patsyn_defs - exp_patsyn_wrappers = mapMaybe (patSynWrapper . snd) final_patsyns - exp_patsyn_matchers = map (patSynMatcher . snd) final_patsyns - keep_alive' = addListToUFM keep_alive (map (\x -> (x, getName x)) (exp_patsyn_wrappers ++ exp_patsyn_matchers)) - final_prs = addExportFlagsAndRules target - export_set keep_alive' rules_for_locals (fromOL all_prs) + ; let (rules_for_locals, rules_for_imps) = partition isLocalRule all_rules + final_prs = addExportFlagsAndRules target export_set keep_alive + rules_for_locals (fromOL all_prs) final_pgm = combineEvBinds ds_ev_binds final_prs -- Notice that we put the whole lot in a big Rec, even the foreign binds @@ -187,7 +180,7 @@ deSugar hsc_env mg_fam_insts = fam_insts, mg_inst_env = inst_env, mg_fam_inst_env = fam_inst_env, - mg_patsyns = map snd . filter (isExportedId . fst) $ final_patsyns, + mg_patsyns = filter ((`elemNameSet` export_set) . patSynName) patsyns, mg_rules = ds_rules_for_imps, mg_binds = ds_binds, mg_foreign = ds_fords, diff --git a/compiler/deSugar/DsArrows.lhs b/compiler/deSugar/DsArrows.lhs index f87877681c..1bbcc05e40 100644 --- a/compiler/deSugar/DsArrows.lhs +++ b/compiler/deSugar/DsArrows.lhs @@ -6,7 +6,8 @@ Desugaring arrow commands \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/deSugar/DsBinds.lhs b/compiler/deSugar/DsBinds.lhs index 1dbf530123..9691b99975 100644 --- a/compiler/deSugar/DsBinds.lhs +++ b/compiler/deSugar/DsBinds.lhs @@ -10,7 +10,8 @@ in that the @Rec@/@NonRec@/etc structure is thrown away (whereas at lower levels it is preserved with @let@/@letrec@s). \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/deSugar/DsCCall.lhs b/compiler/deSugar/DsCCall.lhs index 80f2ec525f..217a4ce7c9 100644 --- a/compiler/deSugar/DsCCall.lhs +++ b/compiler/deSugar/DsCCall.lhs @@ -6,7 +6,8 @@ Desugaring foreign calls \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/deSugar/DsExpr.lhs b/compiler/deSugar/DsExpr.lhs index 859309d592..4eadef69b8 100644 --- a/compiler/deSugar/DsExpr.lhs +++ b/compiler/deSugar/DsExpr.lhs @@ -6,6 +6,8 @@ Desugaring exporessions. \begin{code} +{-# LANGUAGE CPP #-} + module DsExpr ( dsExpr, dsLExpr, dsLocalBinds, dsValBinds, dsLit ) where #include "HsVersions.h" @@ -548,7 +550,7 @@ dsExpr expr@(RecordUpd record_expr (HsRecFields { rec_flds = fields }) , pat_dicts = eqs_vars ++ theta_vars , pat_binds = emptyTcEvBinds , pat_args = PrefixCon $ map nlVarPat arg_ids - , pat_ty = in_ty + , pat_arg_tys = in_inst_tys , pat_wrap = idHsWrapper } ; let wrapped_rhs | null eq_spec = rhs | otherwise = mkLHsWrap (mkWpCast (mkTcSubCo wrap_co)) rhs diff --git a/compiler/deSugar/DsForeign.lhs b/compiler/deSugar/DsForeign.lhs index e2f4f4ff3c..0654ebc983 100644 --- a/compiler/deSugar/DsForeign.lhs +++ b/compiler/deSugar/DsForeign.lhs @@ -6,6 +6,8 @@ Desugaring foreign declarations (see also DsCCall). \begin{code} +{-# LANGUAGE CPP #-} + module DsForeign ( dsForeigns , dsForeigns' , dsFImport, dsCImport, dsFCall, dsPrimCall diff --git a/compiler/deSugar/DsGRHSs.lhs b/compiler/deSugar/DsGRHSs.lhs index 4573e54ce0..a571e807d4 100644 --- a/compiler/deSugar/DsGRHSs.lhs +++ b/compiler/deSugar/DsGRHSs.lhs @@ -6,6 +6,8 @@ Matching guarded right-hand-sides (GRHSs) \begin{code} +{-# LANGUAGE CPP #-} + module DsGRHSs ( dsGuarded, dsGRHSs, dsGRHS ) where #include "HsVersions.h" @@ -61,10 +63,8 @@ dsGRHSs hs_ctx _ (GRHSs grhss binds) rhs_ty = ASSERT( notNull grhss ) do { match_results <- mapM (dsGRHS hs_ctx rhs_ty) grhss ; let match_result1 = foldr1 combineMatchResults match_results - match_result2 = adjustMatchResultDs - (\e -> dsLocalBinds binds e) - match_result1 - -- NB: nested dsLet inside matchResult + match_result2 = adjustMatchResultDs (dsLocalBinds binds) match_result1 + -- NB: nested dsLet inside matchResult ; return match_result2 } dsGRHS :: HsMatchContext Name -> Type -> LGRHS Id (LHsExpr Id) -> DsM MatchResult diff --git a/compiler/deSugar/DsListComp.lhs b/compiler/deSugar/DsListComp.lhs index a1131a8126..2111c95f82 100644 --- a/compiler/deSugar/DsListComp.lhs +++ b/compiler/deSugar/DsListComp.lhs @@ -6,7 +6,7 @@ Desugaring list comprehensions, monad comprehensions and array comprehensions \begin{code} -{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE CPP, NamedFieldPuns #-} module DsListComp ( dsListComp, dsPArrComp, dsMonadComp ) where diff --git a/compiler/deSugar/DsMeta.hs b/compiler/deSugar/DsMeta.hs index 73c1adfdc8..adfc0f688f 100644 --- a/compiler/deSugar/DsMeta.hs +++ b/compiler/deSugar/DsMeta.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- (c) The University of Glasgow 2006 @@ -394,10 +396,10 @@ repTyFamInstD decl@(TyFamInstDecl { tfid_eqn = eqn }) ; repTySynInst tc eqn1 } repTyFamEqn :: LTyFamInstEqn Name -> DsM (Core TH.TySynEqnQ) -repTyFamEqn (L loc (TyFamInstEqn { tfie_pats = HsWB { hswb_cts = tys - , hswb_kvs = kv_names - , hswb_tvs = tv_names } - , tfie_rhs = rhs })) +repTyFamEqn (L loc (TyFamEqn { tfe_pats = HsWB { hswb_cts = tys + , hswb_kvs = kv_names + , hswb_tvs = tv_names } + , tfe_rhs = rhs })) = do { let hs_tvs = HsQTvs { hsq_kvs = kv_names , hsq_tvs = userHsTyVarBndrs loc tv_names } -- Yuk ; addTyClTyVarBinds hs_tvs $ \ _ -> @@ -705,12 +707,14 @@ addTyVarBinds :: LHsTyVarBndrs Name -- the binders to -- the computations passed as the second argument is executed in that extended -- meta environment and gets the *new* names on Core-level as an argument -addTyVarBinds tvs m - = do { freshNames <- mkGenSyms (hsLKiTyVarNames tvs) - ; term <- addBinds freshNames $ - do { kbs <- repList tyVarBndrTyConName mk_tv_bndr (hsQTvBndrs tvs `zip` freshNames) +addTyVarBinds (HsQTvs { hsq_kvs = kvs, hsq_tvs = tvs }) m + = do { fresh_kv_names <- mkGenSyms kvs + ; fresh_tv_names <- mkGenSyms (map hsLTyVarName tvs) + ; let fresh_names = fresh_kv_names ++ fresh_tv_names + ; term <- addBinds fresh_names $ + do { kbs <- repList tyVarBndrTyConName mk_tv_bndr (tvs `zip` fresh_tv_names) ; m kbs } - ; wrapGenSyms freshNames term } + ; wrapGenSyms fresh_names term } where mk_tv_bndr (tv, (_,v)) = repTyVarBndrWithKind tv (coreVar v) diff --git a/compiler/deSugar/DsMonad.lhs b/compiler/deSugar/DsMonad.lhs index b590f4b2d2..c017a7cc01 100644 --- a/compiler/deSugar/DsMonad.lhs +++ b/compiler/deSugar/DsMonad.lhs @@ -6,6 +6,8 @@ @DsMonad@: monadery used in desugaring \begin{code} +{-# LANGUAGE FlexibleInstances #-} + module DsMonad ( DsM, mapM, mapAndUnzipM, initDs, initDsTc, fixDs, diff --git a/compiler/deSugar/DsUtils.lhs b/compiler/deSugar/DsUtils.lhs index 2ad70c67d3..c52b917efd 100644 --- a/compiler/deSugar/DsUtils.lhs +++ b/compiler/deSugar/DsUtils.lhs @@ -8,7 +8,8 @@ Utilities for desugaring This module exports some utility functions of no great interest. \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -64,7 +65,6 @@ import ConLike import DataCon import PatSyn import Type -import Coercion import TysPrim import TysWiredIn import BasicTypes @@ -638,12 +638,13 @@ mkSelectorBinds ticks pat val_expr -- efficient too. -- For the error message we make one error-app, to avoid duplication. - -- But we need it at different types... so we use coerce for that - ; err_expr <- mkErrorAppDs iRREFUT_PAT_ERROR_ID unitTy (ppr pat) - ; err_var <- newSysLocalDs unitTy - ; binds <- zipWithM (mk_bind val_var err_var) ticks' binders - ; return ( (val_var, val_expr) : - (err_var, err_expr) : + -- But we need it at different types, so we make it polymorphic: + -- err_var = /\a. iRREFUT_PAT_ERR a "blah blah blah" + ; err_app <- mkErrorAppDs iRREFUT_PAT_ERROR_ID alphaTy (ppr pat) + ; err_var <- newSysLocalDs (mkForAllTy alphaTyVar alphaTy) + ; binds <- zipWithM (mk_bind val_var err_var) ticks' binders + ; return ( (val_var, val_expr) : + (err_var, Lam alphaTyVar err_app) : binds ) } | otherwise @@ -665,14 +666,13 @@ mkSelectorBinds ticks pat val_expr mk_bind scrut_var err_var tick bndr_var = do -- (mk_bind sv err_var) generates - -- bv = case sv of { pat -> bv; other -> coerce (type-of-bv) err_var } + -- bv = case sv of { pat -> bv; other -> err_var @ type-of-bv } -- Remember, pat binds bv rhs_expr <- matchSimply (Var scrut_var) PatBindRhs pat (Var bndr_var) error_expr return (bndr_var, mkOptTickBox tick rhs_expr) where - error_expr = mkCast (Var err_var) co - co = mkUnsafeCo (exprType (Var err_var)) (idType bndr_var) + error_expr = Var err_var `App` Type (idType bndr_var) is_simple_lpat p = is_simple_pat (unLoc p) @@ -709,8 +709,7 @@ mkLHsVarPatTup bs = mkLHsPatTup (map nlVarPat bs) mkVanillaTuplePat :: [OutPat Id] -> Boxity -> Pat Id -- A vanilla tuple pattern simply gets its type from its sub-patterns -mkVanillaTuplePat pats box - = TuplePat pats box (mkTupleTy (boxityNormalTupleSort box) (map hsLPatType pats)) +mkVanillaTuplePat pats box = TuplePat pats box (map hsLPatType pats) -- The Big equivalents for the source tuple expressions mkBigLHsVarTup :: [Id] -> LHsExpr Id diff --git a/compiler/deSugar/Match.lhs b/compiler/deSugar/Match.lhs index b42a720c32..a14027862a 100644 --- a/compiler/deSugar/Match.lhs +++ b/compiler/deSugar/Match.lhs @@ -6,6 +6,8 @@ The @match@ function \begin{code} +{-# LANGUAGE CPP #-} + module Match ( match, matchEquations, matchWrapper, matchSimply, matchSinglePat ) where #include "HsVersions.h" @@ -552,9 +554,8 @@ tidy1 v (LazyPat pat) tidy1 _ (ListPat pats ty Nothing) = return (idDsWrapper, unLoc list_ConPat) where - list_ty = mkListTy ty - list_ConPat = foldr (\ x y -> mkPrefixConPat consDataCon [x, y] list_ty) - (mkNilPat list_ty) + list_ConPat = foldr (\ x y -> mkPrefixConPat consDataCon [x, y] [ty]) + (mkNilPat ty) pats -- Introduce fake parallel array constructors to be able to handle parallel @@ -563,13 +564,13 @@ tidy1 _ (PArrPat pats ty) = return (idDsWrapper, unLoc parrConPat) where arity = length pats - parrConPat = mkPrefixConPat (parrFakeCon arity) pats (mkPArrTy ty) + parrConPat = mkPrefixConPat (parrFakeCon arity) pats [ty] -tidy1 _ (TuplePat pats boxity ty) +tidy1 _ (TuplePat pats boxity tys) = return (idDsWrapper, unLoc tuple_ConPat) where arity = length pats - tuple_ConPat = mkPrefixConPat (tupleCon (boxityNormalTupleSort boxity) arity) pats ty + tuple_ConPat = mkPrefixConPat (tupleCon (boxityNormalTupleSort boxity) arity) pats tys -- LitPats: we *might* be able to replace these w/ a simpler form tidy1 _ (LitPat lit) diff --git a/compiler/deSugar/MatchCon.lhs b/compiler/deSugar/MatchCon.lhs index 2b51638bf3..8e581f66e2 100644 --- a/compiler/deSugar/MatchCon.lhs +++ b/compiler/deSugar/MatchCon.lhs @@ -6,7 +6,8 @@ Pattern-matching constructors \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -124,7 +125,7 @@ matchOneConLike :: [Id] -> [EquationInfo] -> DsM (CaseAlt ConLike) matchOneConLike vars ty (eqn1 : eqns) -- All eqns for a single constructor - = do { arg_vars <- selectConMatchVars arg_tys args1 + = do { arg_vars <- selectConMatchVars val_arg_tys args1 -- Use the first equation as a source of -- suggestions for the new variables @@ -140,27 +141,24 @@ matchOneConLike vars ty (eqn1 : eqns) -- All eqns for a single constructor alt_wrapper = wrapper1, alt_result = foldr1 combineMatchResults match_results } } where - ConPatOut { pat_con = L _ con1, pat_ty = pat_ty1, pat_wrap = wrapper1, + ConPatOut { pat_con = L _ con1, pat_arg_tys = arg_tys, pat_wrap = wrapper1, pat_tvs = tvs1, pat_dicts = dicts1, pat_args = args1 } = firstPat eqn1 fields1 = case con1 of - RealDataCon dcon1 -> dataConFieldLabels dcon1 - PatSynCon{} -> [] - - arg_tys = inst inst_tys - where - inst = case con1 of - RealDataCon dcon1 -> dataConInstOrigArgTys dcon1 - PatSynCon psyn1 -> patSynInstArgTys psyn1 - inst_tys = tcTyConAppArgs pat_ty1 ++ - mkTyVarTys (takeList exVars tvs1) - -- Newtypes opaque, hence tcTyConAppArgs + RealDataCon dcon1 -> dataConFieldLabels dcon1 + PatSynCon{} -> [] + + val_arg_tys = case con1 of + RealDataCon dcon1 -> dataConInstOrigArgTys dcon1 inst_tys + PatSynCon psyn1 -> patSynInstArgTys psyn1 inst_tys + inst_tys = ASSERT( tvs1 `equalLength` ex_tvs ) + arg_tys ++ mkTyVarTys tvs1 -- dataConInstOrigArgTys takes the univ and existential tyvars -- and returns the types of the *value* args, which is what we want - where - exVars = case con1 of - RealDataCon dcon1 -> dataConExTyVars dcon1 - PatSynCon psyn1 -> patSynExTyVars psyn1 + + ex_tvs = case con1 of + RealDataCon dcon1 -> dataConExTyVars dcon1 + PatSynCon psyn1 -> patSynExTyVars psyn1 match_group :: [Id] -> [(ConArgPats, EquationInfo)] -> DsM MatchResult -- All members of the group have compatible ConArgPats @@ -178,7 +176,7 @@ matchOneConLike vars ty (eqn1 : eqns) -- All eqns for a single constructor return ( wrapBinds (tvs `zip` tvs1) . wrapBinds (ds `zip` dicts1) . mkCoreLets ds_bind - , eqn { eqn_pats = conArgPats arg_tys args ++ pats } + , eqn { eqn_pats = conArgPats val_arg_tys args ++ pats } ) shift (_, (EqnInfo { eqn_pats = ps })) = pprPanic "matchOneCon/shift" (ppr ps) diff --git a/compiler/deSugar/MatchLit.lhs b/compiler/deSugar/MatchLit.lhs index 7429a613d9..350ed22d69 100644 --- a/compiler/deSugar/MatchLit.lhs +++ b/compiler/deSugar/MatchLit.lhs @@ -6,6 +6,8 @@ Pattern-matching literal patterns \begin{code} +{-# LANGUAGE CPP, ScopedTypeVariables #-} + module MatchLit ( dsLit, dsOverLit, hsLitKey, hsOverLitKey , tidyLitPat, tidyNPat , matchLiterals, matchNPlusKPats, matchNPats @@ -264,8 +266,8 @@ tidyLitPat :: HsLit -> Pat Id tidyLitPat (HsChar c) = unLoc (mkCharLitPat c) tidyLitPat (HsString s) | lengthFS s <= 1 -- Short string literals only - = unLoc $ foldr (\c pat -> mkPrefixConPat consDataCon [mkCharLitPat c, pat] stringTy) - (mkNilPat stringTy) (unpackFS s) + = unLoc $ foldr (\c pat -> mkPrefixConPat consDataCon [mkCharLitPat c, pat] [charTy]) + (mkNilPat charTy) (unpackFS s) -- The stringTy is the type of the whole pattern, not -- the type to instantiate (:) or [] with! tidyLitPat lit = LitPat lit @@ -297,7 +299,7 @@ tidyNPat tidy_lit_pat (OverLit val False _ ty) mb_neg _ | isStringTy ty, Just str_lit <- mb_str_lit = tidy_lit_pat (HsString str_lit) where mk_con_pat :: DataCon -> HsLit -> Pat Id - mk_con_pat con lit = unLoc (mkPrefixConPat con [noLoc $ LitPat lit] ty) + mk_con_pat con lit = unLoc (mkPrefixConPat con [noLoc $ LitPat lit] []) mb_int_lit :: Maybe Integer mb_int_lit = case (mb_neg, val) of diff --git a/compiler/ghc.cabal.in b/compiler/ghc.cabal.in index bf62ac3996..e6f86c97d9 100644 --- a/compiler/ghc.cabal.in +++ b/compiler/ghc.cabal.in @@ -18,7 +18,7 @@ Description: through this package. Category: Development Build-Type: Simple -Cabal-Version: >= 1.2.3 +Cabal-Version: >=1.10 Flag ghci Description: Build GHCi support. @@ -41,6 +41,7 @@ Flag stage3 Manual: True Library + Default-Language: Haskell2010 Exposed: False Build-Depends: base >= 4 && < 5, @@ -53,7 +54,9 @@ Library filepath >= 1 && < 1.4, Cabal, hpc, - transformers + transformers, + bin-package-db, + hoopl if flag(stage1) && impl(ghc < 7.5) Build-Depends: old-time >= 1 && < 1.2 @@ -70,16 +73,34 @@ Library CPP-Options: -DGHCI Include-Dirs: ../rts/dist/build @FFIIncludeDir@ - Build-Depends: bin-package-db - Build-Depends: hoopl - - Extensions: CPP, MagicHash, UnboxedTuples, PatternGuards, - ForeignFunctionInterface, EmptyDataDecls, - TypeSynonymInstances, MultiParamTypeClasses, - FlexibleInstances, RankNTypes, ScopedTypeVariables, - DeriveDataTypeable, BangPatterns - if impl(ghc >= 7.1) - Extensions: NondecreasingIndentation + Other-Extensions: + BangPatterns + CPP + DataKinds + DeriveDataTypeable + DeriveFoldable + DeriveFunctor + DeriveTraversable + DisambiguateRecordFields + ExplicitForAll + FlexibleContexts + FlexibleInstances + GADTs + GeneralizedNewtypeDeriving + MagicHash + MultiParamTypeClasses + NamedFieldPuns + NondecreasingIndentation + RankNTypes + RecordWildCards + ScopedTypeVariables + StandaloneDeriving + Trustworthy + TupleSections + TypeFamilies + TypeSynonymInstances + UnboxedTuples + UndecidableInstances Include-Dirs: . parser utils @@ -96,8 +117,6 @@ Library c-sources: parser/cutils.c - - c-sources: ghci/keepCAFsForGHCi.c cbits/genSym.c @@ -232,11 +251,8 @@ Library CoreTidy CoreUnfold CoreUtils - ExternalCore MkCore - MkExternalCore PprCore - PprExternalCore Check Coverage Desugar @@ -303,12 +319,9 @@ Library TidyPgm Ctype HaddockUtils - LexCore Lexer OptCoercion Parser - ParserCore - ParserCoreUtils RdrHsSyn ForeignCall PrelInfo diff --git a/compiler/ghc.mk b/compiler/ghc.mk index 4977e28769..c236bcf7ff 100644 --- a/compiler/ghc.mk +++ b/compiler/ghc.mk @@ -99,8 +99,6 @@ endif @echo 'cGhcEnableTablesNextToCode = "$(GhcEnableTablesNextToCode)"' >> $@ @echo 'cLeadingUnderscore :: String' >> $@ @echo 'cLeadingUnderscore = "$(LeadingUnderscore)"' >> $@ - @echo 'cRAWCPP_FLAGS :: String' >> $@ - @echo 'cRAWCPP_FLAGS = "$(RAWCPP_FLAGS)"' >> $@ @echo 'cGHC_UNLIT_PGM :: String' >> $@ @echo 'cGHC_UNLIT_PGM = "$(utils/unlit_dist_PROG)"' >> $@ @echo 'cGHC_SPLIT_PGM :: String' >> $@ @@ -667,9 +665,9 @@ compiler_stage2_CONFIGURE_OPTS += --disable-library-for-ghci compiler_stage3_CONFIGURE_OPTS += --disable-library-for-ghci # after build-package, because that sets compiler_stage1_HC_OPTS: -compiler_stage1_HC_OPTS += $(GhcStage1HcOpts) -compiler_stage2_HC_OPTS += $(GhcStage2HcOpts) -compiler_stage3_HC_OPTS += $(GhcStage3HcOpts) +compiler_stage1_HC_OPTS += $(GhcHcOpts) $(GhcStage1HcOpts) +compiler_stage2_HC_OPTS += $(GhcHcOpts) $(GhcStage2HcOpts) +compiler_stage3_HC_OPTS += $(GhcHcOpts) $(GhcStage3HcOpts) ifneq "$(BINDIST)" "YES" diff --git a/compiler/ghci/ByteCodeAsm.lhs b/compiler/ghci/ByteCodeAsm.lhs index 9ec783a40d..52d6adde86 100644 --- a/compiler/ghci/ByteCodeAsm.lhs +++ b/compiler/ghci/ByteCodeAsm.lhs @@ -5,8 +5,8 @@ ByteCodeLink: Bytecode assembler and linker \begin{code} -{-# OPTIONS -optc-DNON_POSIX_SOURCE #-} -{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE BangPatterns, CPP, MagicHash #-} +{-# OPTIONS_GHC -optc-DNON_POSIX_SOURCE #-} module ByteCodeAsm ( assembleBCOs, assembleBCO, diff --git a/compiler/ghci/ByteCodeGen.lhs b/compiler/ghci/ByteCodeGen.lhs index 6dfee5629a..d4a58044f5 100644 --- a/compiler/ghci/ByteCodeGen.lhs +++ b/compiler/ghci/ByteCodeGen.lhs @@ -5,7 +5,8 @@ ByteCodeGen: Generate bytecode from Core \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, MagicHash #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/ghci/ByteCodeInstr.lhs b/compiler/ghci/ByteCodeInstr.lhs index 005a430cd9..548c29f514 100644 --- a/compiler/ghci/ByteCodeInstr.lhs +++ b/compiler/ghci/ByteCodeInstr.lhs @@ -4,7 +4,8 @@ ByteCodeInstrs: Bytecode instruction definitions \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, MagicHash #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/ghci/ByteCodeItbls.lhs b/compiler/ghci/ByteCodeItbls.lhs index ce6bd01f16..7a7a62d980 100644 --- a/compiler/ghci/ByteCodeItbls.lhs +++ b/compiler/ghci/ByteCodeItbls.lhs @@ -4,7 +4,8 @@ ByteCodeItbls: Generate infotables for interpreter-made bytecodes \begin{code} -{-# OPTIONS -optc-DNON_POSIX_SOURCE #-} +{-# LANGUAGE CPP, MagicHash #-} +{-# OPTIONS_GHC -optc-DNON_POSIX_SOURCE #-} module ByteCodeItbls ( ItblEnv, ItblPtr(..), itblCode, mkITbls, peekItbl , StgInfoTable(..) diff --git a/compiler/ghci/ByteCodeLink.lhs b/compiler/ghci/ByteCodeLink.lhs index 4c484097f0..d508a1c5aa 100644 --- a/compiler/ghci/ByteCodeLink.lhs +++ b/compiler/ghci/ByteCodeLink.lhs @@ -5,7 +5,12 @@ ByteCodeLink: Bytecode assembler and linker \begin{code} {-# LANGUAGE BangPatterns #-} -{-# OPTIONS -optc-DNON_POSIX_SOURCE #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE MagicHash #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE UnboxedTuples #-} +{-# OPTIONS_GHC -optc-DNON_POSIX_SOURCE #-} module ByteCodeLink ( ClosureEnv, emptyClosureEnv, extendClosureEnv, diff --git a/compiler/ghci/Debugger.hs b/compiler/ghci/Debugger.hs index 0807bf17b5..4966714181 100644 --- a/compiler/ghci/Debugger.hs +++ b/compiler/ghci/Debugger.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE MagicHash #-} + ----------------------------------------------------------------------------- -- -- GHCi Interactive debugging commands diff --git a/compiler/ghci/DebuggerUtils.hs b/compiler/ghci/DebuggerUtils.hs index 3d73e69e2b..67767e41b9 100644 --- a/compiler/ghci/DebuggerUtils.hs +++ b/compiler/ghci/DebuggerUtils.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + module DebuggerUtils ( dataConInfoPtrToName, ) where diff --git a/compiler/ghci/Linker.lhs b/compiler/ghci/Linker.lhs index 274f2fbd44..0dbab24de7 100644 --- a/compiler/ghci/Linker.lhs +++ b/compiler/ghci/Linker.lhs @@ -2,15 +2,16 @@ % (c) The University of Glasgow 2005-2012 % \begin{code} +{-# LANGUAGE CPP, NondecreasingIndentation #-} +{-# OPTIONS_GHC -fno-cse #-} +-- -fno-cse is needed for GLOBAL_VAR's to behave properly + -- | The dynamic linker for GHCi. -- -- This module deals with the top-level issues of dynamic linking, -- calling the object-code linker and the byte-code linker where -- necessary. -{-# OPTIONS -fno-cse #-} --- -fno-cse is needed for GLOBAL_VAR's to behave properly - module Linker ( getHValue, showLinkerState, linkExpr, linkDecls, unload, withExtendedLinkEnv, extendLinkEnv, deleteFromLinkEnv, @@ -1208,7 +1209,9 @@ locateLib dflags is_hs dirs lib mk_hs_dyn_lib_path dir = dir </> mkHsSOName platform hs_dyn_lib_name so_name = mkSOName platform lib - mk_dyn_lib_path dir = dir </> so_name + mk_dyn_lib_path dir = case (arch, os) of + (ArchX86_64, OSSolaris2) -> dir </> ("64/" ++ so_name) + _ -> dir </> so_name findObject = liftM (fmap Object) $ findFile mk_obj_path dirs findDynObject = liftM (fmap Object) $ findFile mk_dyn_obj_path dirs @@ -1225,6 +1228,8 @@ locateLib dflags is_hs dirs lib Nothing -> g platform = targetPlatform dflags + arch = platformArch platform + os = platformOS platform searchForLibUsingGcc :: DynFlags -> String -> [FilePath] -> IO (Maybe FilePath) searchForLibUsingGcc dflags so dirs = do diff --git a/compiler/ghci/RtClosureInspect.hs b/compiler/ghci/RtClosureInspect.hs index 5e9bddca88..a2f9af92f1 100644 --- a/compiler/ghci/RtClosureInspect.hs +++ b/compiler/ghci/RtClosureInspect.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, ScopedTypeVariables, MagicHash, UnboxedTuples #-} + ----------------------------------------------------------------------------- -- -- GHC Interactive support for inspecting arbitrary closures at runtime @@ -6,7 +8,7 @@ -- ----------------------------------------------------------------------------- -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/hsSyn/Convert.lhs b/compiler/hsSyn/Convert.lhs index bcea29bea2..e22af3b947 100644 --- a/compiler/hsSyn/Convert.lhs +++ b/compiler/hsSyn/Convert.lhs @@ -6,6 +6,8 @@ This module converts Template Haskell syntax into HsSyn \begin{code} +{-# LANGUAGE MagicHash #-} + module Convert( convertToHsExpr, convertToPat, convertToHsDecls, convertToHsType, thRdrNameGuesses ) where @@ -199,13 +201,20 @@ cvtDec (ClassD ctxt cl tvs fds decs) ; unless (null adts') (failWith $ (ptext (sLit "Default data instance declarations are not allowed:")) $$ (Outputable.ppr adts')) + ; at_defs <- mapM cvt_at_def ats' ; returnL $ TyClD $ ClassDecl { tcdCtxt = cxt', tcdLName = tc', tcdTyVars = tvs' , tcdFDs = fds', tcdSigs = sigs', tcdMeths = binds' - , tcdATs = fams', tcdATDefs = ats', tcdDocs = [] + , tcdATs = fams', tcdATDefs = at_defs, tcdDocs = [] , tcdFVs = placeHolderNames } -- no docs in TH ^^ } + where + cvt_at_def :: LTyFamInstDecl RdrName -> CvtM (LTyFamDefltEqn RdrName) + -- Very similar to what happens in RdrHsSyn.mkClassDecl + cvt_at_def decl = case RdrHsSyn.mkATDefault decl of + Right def -> return def + Left (_, msg) -> failWith msg cvtDec (InstanceD ctxt ty decs) = do { let doc = ptext (sLit "an instance declaration") @@ -214,7 +223,7 @@ cvtDec (InstanceD ctxt ty decs) ; ctxt' <- cvtContext ctxt ; L loc ty' <- cvtType ty ; let inst_ty' = L loc $ mkImplicitHsForAllTy ctxt' $ L loc ty' - ; returnL $ InstD (ClsInstD (ClsInstDecl inst_ty' binds' sigs' ats' adts')) } + ; returnL $ InstD (ClsInstD (ClsInstDecl inst_ty' binds' sigs' ats' adts' Nothing)) } cvtDec (ForeignD ford) = do { ford' <- cvtForD ford @@ -278,9 +287,9 @@ cvtTySynEqn :: Located RdrName -> TySynEqn -> CvtM (LTyFamInstEqn RdrName) cvtTySynEqn tc (TySynEqn lhs rhs) = do { lhs' <- mapM cvtType lhs ; rhs' <- cvtType rhs - ; returnL $ TyFamInstEqn { tfie_tycon = tc - , tfie_pats = mkHsWithBndrs lhs' - , tfie_rhs = rhs' } } + ; returnL $ TyFamEqn { tfe_tycon = tc + , tfe_pats = mkHsWithBndrs lhs' + , tfe_rhs = rhs' } } ---------------- cvt_ci_decs :: MsgDoc -> [TH.Dec] @@ -828,8 +837,8 @@ cvtp (TH.LitP l) | otherwise = do { l' <- cvtLit l; return $ Hs.LitPat l' } cvtp (TH.VarP s) = do { s' <- vName s; return $ Hs.VarPat s' } cvtp (TupP [p]) = do { p' <- cvtPat p; return $ ParPat p' } -- Note [Dropping constructors] -cvtp (TupP ps) = do { ps' <- cvtPats ps; return $ TuplePat ps' Boxed void } -cvtp (UnboxedTupP ps) = do { ps' <- cvtPats ps; return $ TuplePat ps' Unboxed void } +cvtp (TupP ps) = do { ps' <- cvtPats ps; return $ TuplePat ps' Boxed [] } +cvtp (UnboxedTupP ps) = do { ps' <- cvtPats ps; return $ TuplePat ps' Unboxed [] } cvtp (ConP s ps) = do { s' <- cNameL s; ps' <- cvtPats ps ; return $ ConPatIn s' (PrefixCon ps') } cvtp (InfixP p1 s p2) = do { s' <- cNameL s; p1' <- cvtPat p1; p2' <- cvtPat p2 @@ -1156,7 +1165,7 @@ Consider this TH term construction: ; x3 <- TH.newName "x" ; let x = mkName "x" -- mkName :: String -> TH.Name - -- Builds a NameL + -- Builds a NameS ; return (LamE (..pattern [x1,x2]..) $ LamE (VarPat x3) $ diff --git a/compiler/hsSyn/HsDecls.lhs b/compiler/hsSyn/HsDecls.lhs index bae804eb07..845c05296c 100644 --- a/compiler/hsSyn/HsDecls.lhs +++ b/compiler/hsSyn/HsDecls.lhs @@ -29,7 +29,7 @@ module HsDecls ( InstDecl(..), LInstDecl, NewOrData(..), FamilyInfo(..), TyFamInstDecl(..), LTyFamInstDecl, instDeclDataFamInsts, DataFamInstDecl(..), LDataFamInstDecl, pprDataFamInstFlavour, - TyFamInstEqn(..), LTyFamInstEqn, + TyFamEqn(..), TyFamInstEqn, LTyFamInstEqn, TyFamDefltEqn, LTyFamDefltEqn, LClsInstDecl, ClsInstDecl(..), -- ** Standalone deriving declarations @@ -472,7 +472,7 @@ data TyClDecl name tcdSigs :: [LSig name], -- ^ Methods' signatures tcdMeths :: LHsBinds name, -- ^ Default methods tcdATs :: [LFamilyDecl name], -- ^ Associated types; ie - tcdATDefs :: [LTyFamInstDecl name], -- ^ Associated type defaults + tcdATDefs :: [LTyFamDefltEqn name], -- ^ Associated type defaults tcdDocs :: [LDocDecl], -- ^ Haddock docs tcdFVs :: NameSet } @@ -573,7 +573,7 @@ tyFamInstDeclName = unLoc . tyFamInstDeclLName tyFamInstDeclLName :: OutputableBndr name => TyFamInstDecl name -> Located name tyFamInstDeclLName (TyFamInstDecl { tfid_eqn = - (L _ (TyFamInstEqn { tfie_tycon = ln })) }) + (L _ (TyFamEqn { tfe_tycon = ln })) }) = ln tyClDeclLName :: TyClDecl name -> Located name @@ -632,7 +632,7 @@ instance OutputableBndr name | otherwise -- Laid out = vcat [ top_matter <+> ptext (sLit "where") , nest 2 $ pprDeclList (map ppr ats ++ - map ppr at_defs ++ + map ppr_fam_deflt_eqn at_defs ++ pprLHsBindsForUser methods sigs) ] where top_matter = ptext (sLit "class") @@ -657,7 +657,7 @@ instance (OutputableBndr name) => Outputable (FamilyDecl name) where ClosedTypeFamily eqns -> ( ptext (sLit "where") , if null eqns then ptext (sLit "..") - else vcat $ map ppr eqns ) + else vcat $ map ppr_fam_inst_eqn eqns ) _ -> (empty, empty) pprFlavour :: FamilyInfo name -> SDoc @@ -678,7 +678,7 @@ pp_vanilla_decl_head thing tyvars context pp_fam_inst_lhs :: OutputableBndr name => Located name - -> HsWithBndrs [LHsType name] + -> HsTyPats name -> HsContext name -> SDoc pp_fam_inst_lhs thing (HsWB { hswb_cts = typats }) context -- explicit type patterns @@ -686,12 +686,13 @@ pp_fam_inst_lhs thing (HsWB { hswb_cts = typats }) context -- explicit type patt , hsep (map (pprParendHsType.unLoc) typats)] pprTyClDeclFlavour :: TyClDecl a -> SDoc -pprTyClDeclFlavour (ClassDecl {}) = ptext (sLit "class") -pprTyClDeclFlavour (FamDecl {}) = ptext (sLit "family") -pprTyClDeclFlavour (SynDecl {}) = ptext (sLit "type") -pprTyClDeclFlavour (DataDecl { tcdDataDefn = (HsDataDefn { dd_ND = nd }) }) - = ppr nd +pprTyClDeclFlavour (ClassDecl {}) = ptext (sLit "class") +pprTyClDeclFlavour (SynDecl {}) = ptext (sLit "type") pprTyClDeclFlavour (ForeignType {}) = ptext (sLit "foreign type") +pprTyClDeclFlavour (FamDecl { tcdFam = FamilyDecl { fdInfo = info }}) + = pprFlavour info +pprTyClDeclFlavour (DataDecl { tcdDataDefn = HsDataDefn { dd_ND = nd } }) + = ppr nd \end{code} %************************************************************************ @@ -893,25 +894,49 @@ pprConDecl decl@(ConDecl { con_details = InfixCon ty1 ty2, con_res = ResTyGADT { %* * %************************************************************************ +Note [Type family instance declarations in HsSyn] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The data type TyFamEqn represents one equation of a type family instance. +It is parameterised over its tfe_pats field: + + * An ordinary type family instance declaration looks like this in source Haskell + type instance T [a] Int = a -> a + (or something similar for a closed family) + It is represented by a TyFamInstEqn, with *type* in the tfe_pats field. + + * On the other hand, the *default instance* of an associated type looksl like + this in source Haskell + class C a where + type T a b + type T a b = a -> b -- The default instance + It is represented by a TyFamDefltEqn, with *type variables8 in the tfe_pats field. + \begin{code} ----------------- Type synonym family instances ------------- +type LTyFamInstEqn name = Located (TyFamInstEqn name) +type LTyFamDefltEqn name = Located (TyFamDefltEqn name) -type LTyFamInstEqn name = Located (TyFamInstEqn name) - --- | One equation in a type family instance declaration -data TyFamInstEqn name - = TyFamInstEqn - { tfie_tycon :: Located name - , tfie_pats :: HsWithBndrs [LHsType name] +type HsTyPats name = HsWithBndrs [LHsType name] -- ^ Type patterns (with kind and type bndrs) -- See Note [Family instance declaration binders] - , tfie_rhs :: LHsType name } + +type TyFamInstEqn name = TyFamEqn name (HsTyPats name) +type TyFamDefltEqn name = TyFamEqn name (LHsTyVarBndrs name) + -- See Note [Type family instance declarations in HsSyn] + +-- | One equation in a type family instance declaration +-- See Note [Type family instance declarations in HsSyn] +data TyFamEqn name pats + = TyFamEqn + { tfe_tycon :: Located name + , tfe_pats :: pats + , tfe_rhs :: LHsType name } deriving( Typeable, Data ) type LTyFamInstDecl name = Located (TyFamInstDecl name) -data TyFamInstDecl name +data TyFamInstDecl name = TyFamInstDecl - { tfid_eqn :: LTyFamInstEqn name + { tfid_eqn :: LTyFamInstEqn name , tfid_fvs :: NameSet } deriving( Typeable, Data ) @@ -921,11 +946,9 @@ type LDataFamInstDecl name = Located (DataFamInstDecl name) data DataFamInstDecl name = DataFamInstDecl { dfid_tycon :: Located name - , dfid_pats :: HsWithBndrs [LHsType name] -- lhs - -- ^ Type patterns (with kind and type bndrs) - -- See Note [Family instance declaration binders] - , dfid_defn :: HsDataDefn name -- rhs - , dfid_fvs :: NameSet } -- free vars for dependency analysis + , dfid_pats :: HsTyPats name -- LHS + , dfid_defn :: HsDataDefn name -- RHS + , dfid_fvs :: NameSet } -- Rree vars for dependency analysis deriving( Typeable, Data ) @@ -937,10 +960,11 @@ data ClsInstDecl name { cid_poly_ty :: LHsType name -- Context => Class Instance-type -- Using a polytype means that the renamer conveniently -- figures out the quantified type variables for us. - , cid_binds :: LHsBinds name - , cid_sigs :: [LSig name] -- User-supplied pragmatic info - , cid_tyfam_insts :: [LTyFamInstDecl name] -- type family instances - , cid_datafam_insts :: [LDataFamInstDecl name] -- data family instances + , cid_binds :: LHsBinds name -- Class methods + , cid_sigs :: [LSig name] -- User-supplied pragmatic info + , cid_tyfam_insts :: [LTyFamInstDecl name] -- Type family instances + , cid_datafam_insts :: [LDataFamInstDecl name] -- Data family instances + , cid_overlap_mode :: Maybe OverlapMode } deriving (Data, Typeable) @@ -983,17 +1007,23 @@ instance (OutputableBndr name) => Outputable (TyFamInstDecl name) where pprTyFamInstDecl :: OutputableBndr name => TopLevelFlag -> TyFamInstDecl name -> SDoc pprTyFamInstDecl top_lvl (TyFamInstDecl { tfid_eqn = eqn }) - = ptext (sLit "type") <+> ppr_instance_keyword top_lvl <+> (ppr eqn) + = ptext (sLit "type") <+> ppr_instance_keyword top_lvl <+> ppr_fam_inst_eqn eqn ppr_instance_keyword :: TopLevelFlag -> SDoc ppr_instance_keyword TopLevel = ptext (sLit "instance") ppr_instance_keyword NotTopLevel = empty -instance (OutputableBndr name) => Outputable (TyFamInstEqn name) where - ppr (TyFamInstEqn { tfie_tycon = tycon - , tfie_pats = pats - , tfie_rhs = rhs }) - = (pp_fam_inst_lhs tycon pats []) <+> equals <+> (ppr rhs) +ppr_fam_inst_eqn :: OutputableBndr name => LTyFamInstEqn name -> SDoc +ppr_fam_inst_eqn (L _ (TyFamEqn { tfe_tycon = tycon + , tfe_pats = pats + , tfe_rhs = rhs })) + = pp_fam_inst_lhs tycon pats [] <+> equals <+> ppr rhs + +ppr_fam_deflt_eqn :: OutputableBndr name => LTyFamDefltEqn name -> SDoc +ppr_fam_deflt_eqn (L _ (TyFamEqn { tfe_tycon = tycon + , tfe_pats = tvs + , tfe_rhs = rhs })) + = pp_vanilla_decl_head tycon tvs [] <+> equals <+> ppr rhs instance (OutputableBndr name) => Outputable (DataFamInstDecl name) where ppr = pprDataFamInstDecl TopLevel @@ -1013,6 +1043,7 @@ pprDataFamInstFlavour (DataFamInstDecl { dfid_defn = (HsDataDefn { dd_ND = nd }) instance (OutputableBndr name) => Outputable (ClsInstDecl name) where ppr (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = binds , cid_sigs = sigs, cid_tyfam_insts = ats + , cid_overlap_mode = mbOverlap , cid_datafam_insts = adts }) | null sigs, null ats, null adts, isEmptyBag binds -- No "where" part = top_matter @@ -1024,7 +1055,19 @@ instance (OutputableBndr name) => Outputable (ClsInstDecl name) where map (pprDataFamInstDecl NotTopLevel . unLoc) adts ++ pprLHsBindsForUser binds sigs ] where - top_matter = ptext (sLit "instance") <+> ppr inst_ty + top_matter = ptext (sLit "instance") <+> ppOveralapPragma mbOverlap + <+> ppr inst_ty + +ppOveralapPragma :: Maybe OverlapMode -> SDoc +ppOveralapPragma mb = + case mb of + Nothing -> empty + Just NoOverlap -> ptext (sLit "{-# NO_OVERLAP #-}") + Just OverlapOk -> ptext (sLit "{-# OVERLAP #-}") + Just Incoherent -> ptext (sLit "{-# INCOHERENT #-}") + + + instance (OutputableBndr name) => Outputable (InstDecl name) where ppr (ClsInstD { cid_inst = decl }) = ppr decl @@ -1052,12 +1095,14 @@ instDeclDataFamInsts inst_decls \begin{code} type LDerivDecl name = Located (DerivDecl name) -data DerivDecl name = DerivDecl { deriv_type :: LHsType name } +data DerivDecl name = DerivDecl { deriv_type :: LHsType name + , deriv_overlap_mode :: Maybe OverlapMode + } deriving (Data, Typeable) instance (OutputableBndr name) => Outputable (DerivDecl name) where - ppr (DerivDecl ty) - = hsep [ptext (sLit "deriving instance"), ppr ty] + ppr (DerivDecl ty o) + = hsep [ptext (sLit "deriving instance"), ppOveralapPragma o, ppr ty] \end{code} %************************************************************************ @@ -1236,7 +1281,7 @@ instance OutputableBndr name => Outputable (RuleDecl name) where nest 4 (equals <+> pprExpr (unLoc rhs) <+> text "#-}") ] where pp_forall | null ns = empty - | otherwise = text "forall" <+> fsep (map ppr ns) <> dot + | otherwise = forAllLit <+> fsep (map ppr ns) <> dot instance OutputableBndr name => Outputable (RuleBndr name) where ppr (RuleBndr name) = ppr name diff --git a/compiler/hsSyn/HsDoc.hs b/compiler/hsSyn/HsDoc.hs index 2cb28540f9..72bf0e56a4 100644 --- a/compiler/hsSyn/HsDoc.hs +++ b/compiler/hsSyn/HsDoc.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE CPP, DeriveDataTypeable #-} module HsDoc ( HsDocString(..), diff --git a/compiler/hsSyn/HsExpr.lhs b/compiler/hsSyn/HsExpr.lhs index f5ba1903ee..69b6df64ec 100644 --- a/compiler/hsSyn/HsExpr.lhs +++ b/compiler/hsSyn/HsExpr.lhs @@ -3,7 +3,7 @@ % (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 % \begin{code} -{-# LANGUAGE DeriveDataTypeable, ScopedTypeVariables #-} +{-# LANGUAGE CPP, DeriveDataTypeable, ScopedTypeVariables #-} -- | Abstract Haskell syntax for expressions. module HsExpr where @@ -79,8 +79,6 @@ noSyntaxExpr = HsLit (HsString (fsLit "noSyntaxExpr")) type CmdSyntaxTable id = [(Name, SyntaxExpr id)] -- See Note [CmdSyntaxTable] -noSyntaxTable :: CmdSyntaxTable id -noSyntaxTable = [] \end{code} Note [CmdSyntaxtable] @@ -88,7 +86,7 @@ Note [CmdSyntaxtable] Used only for arrow-syntax stuff (HsCmdTop), the CmdSyntaxTable keeps track of the methods needed for a Cmd. -* Before the renamer, this list is 'noSyntaxTable' +* Before the renamer, this list is an empty list * After the renamer, it takes the form @[(std_name, HsVar actual_name)]@ For example, for the 'arr' method @@ -630,13 +628,13 @@ ppr_expr (HsTickPragma externalSrcLoc exp) ptext (sLit ")")] ppr_expr (HsArrApp arrow arg _ HsFirstOrderApp True) - = hsep [ppr_lexpr arrow, ptext (sLit "-<"), ppr_lexpr arg] + = hsep [ppr_lexpr arrow, larrowt, ppr_lexpr arg] ppr_expr (HsArrApp arrow arg _ HsFirstOrderApp False) - = hsep [ppr_lexpr arg, ptext (sLit ">-"), ppr_lexpr arrow] + = hsep [ppr_lexpr arg, arrowt, ppr_lexpr arrow] ppr_expr (HsArrApp arrow arg _ HsHigherOrderApp True) - = hsep [ppr_lexpr arrow, ptext (sLit "-<<"), ppr_lexpr arg] + = hsep [ppr_lexpr arrow, larrowtt, ppr_lexpr arg] ppr_expr (HsArrApp arrow arg _ HsHigherOrderApp False) - = hsep [ppr_lexpr arg, ptext (sLit ">>-"), ppr_lexpr arrow] + = hsep [ppr_lexpr arg, arrowtt, ppr_lexpr arrow] ppr_expr (HsArrForm (L _ (HsVar v)) (Just _) [arg1, arg2]) = sep [pprCmdArg (unLoc arg1), hsep [pprInfixOcc v, pprCmdArg (unLoc arg2)]] @@ -849,13 +847,13 @@ ppr_cmd (HsCmdCast co cmd) = sep [ ppr_cmd cmd , ptext (sLit "|>") <+> ppr co ] ppr_cmd (HsCmdArrApp arrow arg _ HsFirstOrderApp True) - = hsep [ppr_lexpr arrow, ptext (sLit "-<"), ppr_lexpr arg] + = hsep [ppr_lexpr arrow, larrowt, ppr_lexpr arg] ppr_cmd (HsCmdArrApp arrow arg _ HsFirstOrderApp False) - = hsep [ppr_lexpr arg, ptext (sLit ">-"), ppr_lexpr arrow] + = hsep [ppr_lexpr arg, arrowt, ppr_lexpr arrow] ppr_cmd (HsCmdArrApp arrow arg _ HsHigherOrderApp True) - = hsep [ppr_lexpr arrow, ptext (sLit "-<<"), ppr_lexpr arg] + = hsep [ppr_lexpr arrow, larrowtt, ppr_lexpr arg] ppr_cmd (HsCmdArrApp arrow arg _ HsHigherOrderApp False) - = hsep [ppr_lexpr arg, ptext (sLit ">>-"), ppr_lexpr arrow] + = hsep [ppr_lexpr arg, arrowtt, ppr_lexpr arrow] ppr_cmd (HsCmdArrForm (L _ (HsVar v)) (Just _) [arg1, arg2]) = sep [pprCmdArg (unLoc arg1), hsep [pprInfixOcc v, pprCmdArg (unLoc arg2)]] @@ -1300,7 +1298,7 @@ instance (OutputableBndr idL, OutputableBndr idR, Outputable body) pprStmt :: (OutputableBndr idL, OutputableBndr idR, Outputable body) => (StmtLR idL idR body) -> SDoc pprStmt (LastStmt expr _) = ifPprDebug (ptext (sLit "[last]")) <+> ppr expr -pprStmt (BindStmt pat expr _ _) = hsep [ppr pat, ptext (sLit "<-"), ppr expr] +pprStmt (BindStmt pat expr _ _) = hsep [ppr pat, larrow, ppr expr] pprStmt (LetStmt binds) = hsep [ptext (sLit "let"), pprBinds binds] pprStmt (BodyStmt expr _ _ _) = ppr expr pprStmt (ParStmt stmtss _ _) = sep (punctuate (ptext (sLit " | ")) (map ppr stmtss)) diff --git a/compiler/hsSyn/HsLit.lhs b/compiler/hsSyn/HsLit.lhs index 9565acbc8f..a4749dd730 100644 --- a/compiler/hsSyn/HsLit.lhs +++ b/compiler/hsSyn/HsLit.lhs @@ -5,14 +5,14 @@ \section[HsLit]{Abstract syntax: source-language literals} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See -- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces -- for details -{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE CPP, DeriveDataTypeable #-} module HsLit where diff --git a/compiler/hsSyn/HsPat.lhs b/compiler/hsSyn/HsPat.lhs index ef888fe5a8..4b8fcdaae7 100644 --- a/compiler/hsSyn/HsPat.lhs +++ b/compiler/hsSyn/HsPat.lhs @@ -75,10 +75,13 @@ data Pat id -- overall type of the pattern, and the toList -- function to convert the scrutinee to a list value - | TuplePat [LPat id] -- Tuple - Boxity -- UnitPat is TuplePat [] - PostTcType - -- You might think that the PostTcType was redundant, but it's essential + | TuplePat [LPat id] -- Tuple sub-patterns + Boxity -- UnitPat is TuplePat [] + [PostTcType] -- [] before typechecker, filled in afterwards with + -- the types of the tuple components + -- You might think that the PostTcType was redundant, because we can + -- get the pattern type by getting the types of the sub-patterns. + -- But it's essential -- data T a where -- T1 :: Int -> T Int -- f :: (T a, a) -> Int @@ -89,6 +92,8 @@ data Pat id -- Note the (w::a), NOT (w::Int), because we have not yet -- refined 'a' to Int. So we must know that the second component -- of the tuple is of type 'a' not Int. See selectMatchVar + -- (June 14: I'm not sure this comment is right; the sub-patterns + -- will be wrapped in CoPats, no?) | PArrPat [LPat id] -- Syntactic parallel array PostTcType -- The type of the elements @@ -98,14 +103,18 @@ data Pat id (HsConPatDetails id) | ConPatOut { - pat_con :: Located ConLike, + pat_con :: Located ConLike, + pat_arg_tys :: [Type], -- The univeral arg types, 1-1 with the universal + -- tyvars of the constructor/pattern synonym + -- Use (conLikeResTy pat_con pat_arg_tys) to get + -- the type of the pattern + pat_tvs :: [TyVar], -- Existentially bound type variables (tyvars only) pat_dicts :: [EvVar], -- Ditto *coercion variables* and *dictionaries* -- One reason for putting coercion variable here, I think, -- is to ensure their kinds are zonked pat_binds :: TcEvBinds, -- Bindings involving those dictionaries pat_args :: HsConPatDetails id, - pat_ty :: Type, -- The type of the pattern pat_wrap :: HsWrapper -- Extra wrapper to pass to the matcher } @@ -313,18 +322,18 @@ instance (OutputableBndr id, Outputable arg) %************************************************************************ \begin{code} -mkPrefixConPat :: DataCon -> [OutPat id] -> Type -> OutPat id +mkPrefixConPat :: DataCon -> [OutPat id] -> [Type] -> OutPat id -- Make a vanilla Prefix constructor pattern -mkPrefixConPat dc pats ty +mkPrefixConPat dc pats tys = noLoc $ ConPatOut { pat_con = noLoc (RealDataCon dc), pat_tvs = [], pat_dicts = [], pat_binds = emptyTcEvBinds, pat_args = PrefixCon pats, - pat_ty = ty, pat_wrap = idHsWrapper } + pat_arg_tys = tys, pat_wrap = idHsWrapper } mkNilPat :: Type -> OutPat id -mkNilPat ty = mkPrefixConPat nilDataCon [] ty +mkNilPat ty = mkPrefixConPat nilDataCon [] [ty] mkCharLitPat :: Char -> OutPat id -mkCharLitPat c = mkPrefixConPat charDataCon [noLoc $ LitPat (HsCharPrim c)] charTy +mkCharLitPat c = mkPrefixConPat charDataCon [noLoc $ LitPat (HsCharPrim c)] [] \end{code} diff --git a/compiler/hsSyn/HsSyn.lhs b/compiler/hsSyn/HsSyn.lhs index e9c3a5eeee..72cbac1487 100644 --- a/compiler/hsSyn/HsSyn.lhs +++ b/compiler/hsSyn/HsSyn.lhs @@ -23,7 +23,7 @@ module HsSyn ( module HsDoc, Fixity, - HsModule(..), HsExtCore(..), + HsModule(..) ) where -- friends: @@ -40,10 +40,9 @@ import HsDoc -- others: import OccName ( HasOccName ) -import IfaceSyn ( IfaceBinding ) import Outputable import SrcLoc -import Module ( Module, ModuleName ) +import Module ( ModuleName ) import FastString -- libraries: @@ -77,13 +76,6 @@ data HsModule name hsmodHaddockModHeader :: Maybe LHsDocString -- ^ Haddock module info and description, unparsed } deriving (Data, Typeable) - -data HsExtCore name -- Read from Foo.hcr - = HsExtCore - Module - [TyClDecl name] -- Type declarations only; just as in Haskell source, - -- so that we can infer kinds etc - [IfaceBinding] -- And the bindings \end{code} diff --git a/compiler/hsSyn/HsTypes.lhs b/compiler/hsSyn/HsTypes.lhs index 28c6a2b89c..08a0eef498 100644 --- a/compiler/hsSyn/HsTypes.lhs +++ b/compiler/hsSyn/HsTypes.lhs @@ -35,7 +35,7 @@ module HsTypes ( splitHsAppTys, hsTyGetAppHead_maybe, mkHsAppTys, mkHsOpTy, -- Printing - pprParendHsType, pprHsForAll, pprHsContext, pprHsContextNoArrow, ppr_hs_context, + pprParendHsType, pprHsForAll, pprHsContext, pprHsContextNoArrow, ) where import {-# SOURCE #-} HsExpr ( HsSplice, pprUntypedSplice ) @@ -45,6 +45,7 @@ import HsLit import Name( Name ) import RdrName( RdrName ) import DataCon( HsBang(..) ) +import TysPrim( funTyConName ) import Type import HsDoc import BasicTypes @@ -162,7 +163,7 @@ mkHsWithBndrs x = HsWB { hswb_cts = x, hswb_kvs = panic "mkHsTyWithBndrs:kvs" , hswb_tvs = panic "mkHsTyWithBndrs:tvs" } --- | These names are used eary on to store the names of implicit +-- | These names are used early on to store the names of implicit -- parameters. They completely disappear after type-checking. newtype HsIPName = HsIPName FastString-- ?x deriving( Eq, Data, Typeable ) @@ -506,15 +507,31 @@ splitLHsClassTy_maybe ty HsKindSig ty _ -> checkl ty args _ -> Nothing --- Splits HsType into the (init, last) parts +-- splitHsFunType decomposes a type (t1 -> t2 ... -> tn) -- Breaks up any parens in the result type: -- splitHsFunType (a -> (b -> c)) = ([a,b], c) -splitHsFunType :: LHsType name -> ([LHsType name], LHsType name) -splitHsFunType (L _ (HsFunTy x y)) = (x:args, res) - where - (args, res) = splitHsFunType y -splitHsFunType (L _ (HsParTy ty)) = splitHsFunType ty -splitHsFunType other = ([], other) +-- Also deals with (->) t1 t2; that is why it only works on LHsType Name +-- (see Trac #9096) +splitHsFunType :: LHsType Name -> ([LHsType Name], LHsType Name) +splitHsFunType (L _ (HsParTy ty)) + = splitHsFunType ty + +splitHsFunType (L _ (HsFunTy x y)) + | (args, res) <- splitHsFunType y + = (x:args, res) + +splitHsFunType orig_ty@(L _ (HsAppTy t1 t2)) + = go t1 [t2] + where -- Look for (->) t1 t2, possibly with parenthesisation + go (L _ (HsTyVar fn)) tys | fn == funTyConName + , [t1,t2] <- tys + , (args, res) <- splitHsFunType t2 + = (t1:args, res) + go (L _ (HsAppTy t1 t2)) tys = go t1 (t2:tys) + go (L _ (HsParTy ty)) tys = go ty tys + go _ _ = ([], orig_ty) -- Failure to match + +splitHsFunType other = ([], other) \end{code} @@ -550,7 +567,7 @@ pprHsForAll exp qtvs cxt show_forall = opt_PprStyle_Debug || (not (null (hsQTvBndrs qtvs)) && is_explicit) is_explicit = case exp of {Explicit -> True; Implicit -> False} - forall_part = ptext (sLit "forall") <+> ppr qtvs <> dot + forall_part = forAllLit <+> ppr qtvs <> dot pprHsContext :: (OutputableBndr name) => HsContext name -> SDoc pprHsContext [] = empty @@ -558,12 +575,8 @@ pprHsContext cxt = pprHsContextNoArrow cxt <+> darrow pprHsContextNoArrow :: (OutputableBndr name) => HsContext name -> SDoc pprHsContextNoArrow [] = empty -pprHsContextNoArrow [L _ pred] = ppr pred -pprHsContextNoArrow cxt = ppr_hs_context cxt - -ppr_hs_context :: (OutputableBndr name) => HsContext name -> SDoc -ppr_hs_context [] = empty -ppr_hs_context cxt = parens (interpp'SP cxt) +pprHsContextNoArrow [L _ pred] = ppr_mono_ty FunPrec pred +pprHsContextNoArrow cxt = parens (interpp'SP cxt) pprConDeclFields :: OutputableBndr name => [ConDeclField name] -> SDoc pprConDeclFields fields = braces (sep (punctuate comma (map ppr_fld fields))) @@ -585,27 +598,12 @@ and the problem doesn't show up; but having the flag on a KindedTyVar seems like the Right Thing anyway.) \begin{code} -pREC_TOP, pREC_FUN, pREC_OP, pREC_CON :: Int -pREC_TOP = 0 -- type in ParseIface.y -pREC_FUN = 1 -- btype in ParseIface.y - -- Used for LH arg of (->) -pREC_OP = 2 -- Used for arg of any infix operator - -- (we don't keep their fixities around) -pREC_CON = 3 -- Used for arg of type applicn: - -- always parenthesise unless atomic - -maybeParen :: Int -- Precedence of context - -> Int -- Precedence of top-level operator - -> SDoc -> SDoc -- Wrap in parens if (ctxt >= op) -maybeParen ctxt_prec op_prec p | ctxt_prec >= op_prec = parens p - | otherwise = p - --- printing works more-or-less as for Types +-- Printing works more-or-less as for Types pprHsType, pprParendHsType :: (OutputableBndr name) => HsType name -> SDoc -pprHsType ty = getPprStyle $ \sty -> ppr_mono_ty pREC_TOP (prepare sty ty) -pprParendHsType ty = ppr_mono_ty pREC_CON ty +pprHsType ty = getPprStyle $ \sty -> ppr_mono_ty TopPrec (prepare sty ty) +pprParendHsType ty = ppr_mono_ty TyConPrec ty -- Before printing a type -- (a) Remove outermost HsParTy parens @@ -615,15 +613,15 @@ prepare :: PprStyle -> HsType name -> HsType name prepare sty (HsParTy ty) = prepare sty (unLoc ty) prepare _ ty = ty -ppr_mono_lty :: (OutputableBndr name) => Int -> LHsType name -> SDoc +ppr_mono_lty :: (OutputableBndr name) => TyPrec -> LHsType name -> SDoc ppr_mono_lty ctxt_prec ty = ppr_mono_ty ctxt_prec (unLoc ty) -ppr_mono_ty :: (OutputableBndr name) => Int -> HsType name -> SDoc +ppr_mono_ty :: (OutputableBndr name) => TyPrec -> HsType name -> SDoc ppr_mono_ty ctxt_prec (HsForAllTy exp tvs ctxt ty) - = maybeParen ctxt_prec pREC_FUN $ - sep [pprHsForAll exp tvs ctxt, ppr_mono_lty pREC_TOP ty] + = maybeParen ctxt_prec FunPrec $ + sep [pprHsForAll exp tvs ctxt, ppr_mono_lty TopPrec ty] -ppr_mono_ty _ (HsBangTy b ty) = ppr b <> ppr_mono_lty pREC_CON ty +ppr_mono_ty _ (HsBangTy b ty) = ppr b <> ppr_mono_lty TyConPrec ty ppr_mono_ty _ (HsQuasiQuoteTy qq) = ppr qq ppr_mono_ty _ (HsRecTy flds) = pprConDeclFields flds ppr_mono_ty _ (HsTyVar name) = pprPrefixOcc name @@ -632,10 +630,10 @@ ppr_mono_ty _ (HsTupleTy con tys) = tupleParens std_con (interpp'SP tys) where std_con = case con of HsUnboxedTuple -> UnboxedTuple _ -> BoxedTuple -ppr_mono_ty _ (HsKindSig ty kind) = parens (ppr_mono_lty pREC_TOP ty <+> dcolon <+> ppr kind) -ppr_mono_ty _ (HsListTy ty) = brackets (ppr_mono_lty pREC_TOP ty) -ppr_mono_ty _ (HsPArrTy ty) = paBrackets (ppr_mono_lty pREC_TOP ty) -ppr_mono_ty prec (HsIParamTy n ty) = maybeParen prec pREC_FUN (ppr n <+> dcolon <+> ppr_mono_lty pREC_TOP ty) +ppr_mono_ty _ (HsKindSig ty kind) = parens (ppr_mono_lty TopPrec ty <+> dcolon <+> ppr kind) +ppr_mono_ty _ (HsListTy ty) = brackets (ppr_mono_lty TopPrec ty) +ppr_mono_ty _ (HsPArrTy ty) = paBrackets (ppr_mono_lty TopPrec ty) +ppr_mono_ty prec (HsIParamTy n ty) = maybeParen prec FunPrec (ppr n <+> dcolon <+> ppr_mono_lty TopPrec ty) ppr_mono_ty _ (HsSpliceTy s _) = pprUntypedSplice s ppr_mono_ty _ (HsCoreTy ty) = ppr ty ppr_mono_ty _ (HsExplicitListTy _ tys) = quote $ brackets (interpp'SP tys) @@ -651,45 +649,45 @@ ppr_mono_ty ctxt_prec (HsWrapTy (WpKiApps _kis) ty) where go ctxt_prec [] ty = ppr_mono_ty ctxt_prec ty go ctxt_prec (ki:kis) ty - = maybeParen ctxt_prec pREC_CON $ - hsep [ go pREC_FUN kis ty + = maybeParen ctxt_prec TyConPrec $ + hsep [ go FunPrec kis ty , ptext (sLit "@") <> pprParendKind ki ] -} ppr_mono_ty ctxt_prec (HsEqTy ty1 ty2) - = maybeParen ctxt_prec pREC_OP $ - ppr_mono_lty pREC_OP ty1 <+> char '~' <+> ppr_mono_lty pREC_OP ty2 + = maybeParen ctxt_prec TyOpPrec $ + ppr_mono_lty TyOpPrec ty1 <+> char '~' <+> ppr_mono_lty TyOpPrec ty2 ppr_mono_ty ctxt_prec (HsAppTy fun_ty arg_ty) - = maybeParen ctxt_prec pREC_CON $ - hsep [ppr_mono_lty pREC_FUN fun_ty, ppr_mono_lty pREC_CON arg_ty] + = maybeParen ctxt_prec TyConPrec $ + hsep [ppr_mono_lty FunPrec fun_ty, ppr_mono_lty TyConPrec arg_ty] ppr_mono_ty ctxt_prec (HsOpTy ty1 (_wrapper, L _ op) ty2) - = maybeParen ctxt_prec pREC_OP $ - sep [ ppr_mono_lty pREC_OP ty1 - , sep [pprInfixOcc op, ppr_mono_lty pREC_OP ty2 ] ] + = maybeParen ctxt_prec TyOpPrec $ + sep [ ppr_mono_lty TyOpPrec ty1 + , sep [pprInfixOcc op, ppr_mono_lty TyOpPrec ty2 ] ] -- Don't print the wrapper (= kind applications) -- c.f. HsWrapTy ppr_mono_ty _ (HsParTy ty) - = parens (ppr_mono_lty pREC_TOP ty) + = parens (ppr_mono_lty TopPrec ty) -- Put the parens in where the user did -- But we still use the precedence stuff to add parens because -- toHsType doesn't put in any HsParTys, so we may still need them ppr_mono_ty ctxt_prec (HsDocTy ty doc) - = maybeParen ctxt_prec pREC_OP $ - ppr_mono_lty pREC_OP ty <+> ppr (unLoc doc) + = maybeParen ctxt_prec TyOpPrec $ + ppr_mono_lty TyOpPrec ty <+> ppr (unLoc doc) -- we pretty print Haddock comments on types as if they were -- postfix operators -------------------------- -ppr_fun_ty :: (OutputableBndr name) => Int -> LHsType name -> LHsType name -> SDoc +ppr_fun_ty :: (OutputableBndr name) => TyPrec -> LHsType name -> LHsType name -> SDoc ppr_fun_ty ctxt_prec ty1 ty2 - = let p1 = ppr_mono_lty pREC_FUN ty1 - p2 = ppr_mono_lty pREC_TOP ty2 + = let p1 = ppr_mono_lty FunPrec ty1 + p2 = ppr_mono_lty TopPrec ty2 in - maybeParen ctxt_prec pREC_FUN $ + maybeParen ctxt_prec FunPrec $ sep [p1, ptext (sLit "->") <+> p2] -------------------------- diff --git a/compiler/hsSyn/HsUtils.lhs b/compiler/hsSyn/HsUtils.lhs index eff67df3cf..42838ef93f 100644 --- a/compiler/hsSyn/HsUtils.lhs +++ b/compiler/hsSyn/HsUtils.lhs @@ -4,7 +4,7 @@ Here we collect a variety of helper functions that construct or analyse HsSyn. All these functions deal with generic HsSyn; functions -which deal with the intantiated versions are located elsewhere: +which deal with the instantiated versions are located elsewhere: Parameterised by Module ---------------- ------------- @@ -13,7 +13,8 @@ which deal with the intantiated versions are located elsewhere: Id typecheck/TcHsSyn \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -382,7 +383,7 @@ mkLHsVarTuple :: [a] -> LHsExpr a mkLHsVarTuple ids = mkLHsTupleExpr (map nlHsVar ids) nlTuplePat :: [LPat id] -> Boxity -> LPat id -nlTuplePat pats box = noLoc (TuplePat pats box placeHolderType) +nlTuplePat pats box = noLoc (TuplePat pats box []) missingTupArg :: HsTupArg a missingTupArg = Missing placeHolderType diff --git a/compiler/iface/BinIface.hs b/compiler/iface/BinIface.hs index 9fd0c33423..9dd95fc0f2 100644 --- a/compiler/iface/BinIface.hs +++ b/compiler/iface/BinIface.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- -- (c) The University of Glasgow 2002-2006 -- diff --git a/compiler/iface/BuildTyCl.lhs b/compiler/iface/BuildTyCl.lhs index e412d7ef30..f2d6f7e39a 100644 --- a/compiler/iface/BuildTyCl.lhs +++ b/compiler/iface/BuildTyCl.lhs @@ -4,7 +4,8 @@ % \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -15,7 +16,7 @@ module BuildTyCl ( buildSynTyCon, buildAlgTyCon, buildDataCon, - buildPatSyn, mkPatSynMatcherId, mkPatSynWrapperId, + buildPatSyn, TcMethInfo, buildClass, distinctAbstractTyConRhs, totallyAbstractTyConRhs, mkNewTyConRhs, mkDataTyConRhs, @@ -36,10 +37,9 @@ import MkId import Class import TyCon import Type -import TypeRep -import TcType import Id import Coercion +import TcType import DynFlags import TcRnMonad @@ -184,67 +184,34 @@ mkDataConStupidTheta tycon arg_tys univ_tvs ------------------------------------------------------ -buildPatSyn :: Name -> Bool -> Bool - -> [Var] +buildPatSyn :: Name -> Bool + -> Id -> Maybe Id + -> [Type] -> [TyVar] -> [TyVar] -- Univ and ext -> ThetaType -> ThetaType -- Prov and req -> Type -- Result type - -> TyVar - -> TcRnIf m n PatSyn -buildPatSyn src_name declared_infix has_wrapper args univ_tvs ex_tvs prov_theta req_theta pat_ty tv - = do { (matcher, _, _) <- mkPatSynMatcherId src_name args - univ_tvs ex_tvs - prov_theta req_theta - pat_ty tv - ; wrapper <- case has_wrapper of - False -> return Nothing - True -> fmap Just $ - mkPatSynWrapperId src_name args - (univ_tvs ++ ex_tvs) (prov_theta ++ req_theta) - pat_ty - ; return $ mkPatSyn src_name declared_infix - args - univ_tvs ex_tvs - prov_theta req_theta - pat_ty - matcher - wrapper } - -mkPatSynMatcherId :: Name - -> [Var] - -> [TyVar] - -> [TyVar] - -> ThetaType -> ThetaType - -> Type - -> TyVar - -> TcRnIf n m (Id, Type, Type) -mkPatSynMatcherId name args univ_tvs ex_tvs prov_theta req_theta pat_ty res_tv - = do { matcher_name <- newImplicitBinder name mkMatcherOcc - - ; let res_ty = TyVarTy res_tv - cont_ty = mkSigmaTy ex_tvs prov_theta $ - mkFunTys (map varType args) res_ty - - ; let matcher_tau = mkFunTys [pat_ty, cont_ty, res_ty] res_ty - matcher_sigma = mkSigmaTy (res_tv:univ_tvs) req_theta matcher_tau - matcher_id = mkVanillaGlobal matcher_name matcher_sigma - ; return (matcher_id, res_ty, cont_ty) } - -mkPatSynWrapperId :: Name - -> [Var] - -> [TyVar] - -> ThetaType - -> Type - -> TcRnIf n m Id -mkPatSynWrapperId name args qtvs theta pat_ty - = do { wrapper_name <- newImplicitBinder name mkDataConWrapperOcc - - ; let wrapper_tau = mkFunTys (map varType args) pat_ty - wrapper_sigma = mkSigmaTy qtvs theta wrapper_tau - - ; let wrapper_id = mkVanillaGlobal wrapper_name wrapper_sigma - ; return wrapper_id } - + -> PatSyn +buildPatSyn src_name declared_infix matcher wrapper + args univ_tvs ex_tvs prov_theta req_theta pat_ty + = ASSERT((and [ univ_tvs == univ_tvs' + , ex_tvs == ex_tvs' + , pat_ty `eqType` pat_ty' + , prov_theta `eqTypes` prov_theta' + , req_theta `eqTypes` req_theta' + , args `eqTypes` args' + ])) + mkPatSyn src_name declared_infix + args + univ_tvs ex_tvs + prov_theta req_theta + pat_ty + matcher + wrapper + where + ((_:univ_tvs'), req_theta', tau) = tcSplitSigmaTy $ idType matcher + ([pat_ty', cont_sigma, _], _) = tcSplitFunTys tau + (ex_tvs', prov_theta', cont_tau) = tcSplitSigmaTy cont_sigma + (args', _) = tcSplitFunTys cont_tau \end{code} @@ -254,10 +221,7 @@ type TcMethInfo = (Name, DefMethSpec, Type) -- A temporary intermediate, to communicate between -- tcClassSigs and buildClass. -buildClass :: Bool -- True <=> do not include unfoldings - -- on dict selectors - -- Used when importing a class without -O - -> Name -> [TyVar] -> [Role] -> ThetaType +buildClass :: Name -> [TyVar] -> [Role] -> ThetaType -> [FunDep TyVar] -- Functional dependencies -> [ClassATItem] -- Associated types -> [TcMethInfo] -- Method info @@ -265,10 +229,9 @@ buildClass :: Bool -- True <=> do not include unfoldings -> RecFlag -- Info for type constructor -> TcRnIf m n Class -buildClass no_unf tycon_name tvs roles sc_theta fds at_items sig_stuff mindef tc_isrec +buildClass tycon_name tvs roles sc_theta fds at_items sig_stuff mindef tc_isrec = fixM $ \ rec_clas -> -- Only name generation inside loop do { traceIf (text "buildClass") - ; dflags <- getDynFlags ; datacon_name <- newImplicitBinder tycon_name mkClassDataConOcc -- The class name is the 'parent' for this datacon, not its tycon, @@ -282,7 +245,7 @@ buildClass no_unf tycon_name tvs roles sc_theta fds at_items sig_stuff mindef tc -- Make selectors for the superclasses ; sc_sel_names <- mapM (newImplicitBinder tycon_name . mkSuperDictSelOcc) [1..length sc_theta] - ; let sc_sel_ids = [ mkDictSelId dflags no_unf sc_name rec_clas + ; let sc_sel_ids = [ mkDictSelId sc_name rec_clas | sc_name <- sc_sel_names] -- We number off the Dict superclass selectors, 1, 2, 3 etc so that we -- can construct names for the selectors. Thus @@ -348,14 +311,13 @@ buildClass no_unf tycon_name tvs roles sc_theta fds at_items sig_stuff mindef tc where mk_op_item :: Class -> TcMethInfo -> TcRnIf n m ClassOpItem mk_op_item rec_clas (op_name, dm_spec, _) - = do { dflags <- getDynFlags - ; dm_info <- case dm_spec of + = do { dm_info <- case dm_spec of NoDM -> return NoDefMeth GenericDM -> do { dm_name <- newImplicitBinder op_name mkGenDefMethodOcc ; return (GenDefMeth dm_name) } VanillaDM -> do { dm_name <- newImplicitBinder op_name mkDefaultMethodOcc ; return (DefMeth dm_name) } - ; return (mkDictSelId dflags no_unf op_name rec_clas, dm_info) } + ; return (mkDictSelId op_name rec_clas, dm_info) } \end{code} Note [Class newtypes and equality predicates] diff --git a/compiler/iface/IfaceEnv.lhs b/compiler/iface/IfaceEnv.lhs index 4a00c91381..c29778dc23 100644 --- a/compiler/iface/IfaceEnv.lhs +++ b/compiler/iface/IfaceEnv.lhs @@ -1,7 +1,8 @@ (c) The University of Glasgow 2002-2006 \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, RankNTypes #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/iface/IfaceSyn.lhs b/compiler/iface/IfaceSyn.lhs index 1283b095fd..935b8eda93 100644 --- a/compiler/iface/IfaceSyn.lhs +++ b/compiler/iface/IfaceSyn.lhs @@ -4,7 +4,8 @@ % \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -15,13 +16,14 @@ module IfaceSyn ( module IfaceType, IfaceDecl(..), IfaceSynTyConRhs(..), IfaceClassOp(..), IfaceAT(..), - IfaceConDecl(..), IfaceConDecls(..), + IfaceConDecl(..), IfaceConDecls(..), IfaceEqSpec, IfaceExpr(..), IfaceAlt, IfaceLetBndr(..), IfaceBinding(..), IfaceConAlt(..), IfaceIdInfo(..), IfaceIdDetails(..), IfaceUnfolding(..), IfaceInfoItem(..), IfaceRule(..), IfaceAnnotation(..), IfaceAnnTarget, IfaceClsInst(..), IfaceFamInst(..), IfaceTickish(..), IfaceBang(..), IfaceAxBranch(..), + IfaceTyConParent(..), -- Misc ifaceDeclImplicitBndrs, visibleIfConDecls, @@ -31,7 +33,9 @@ module IfaceSyn ( freeNamesIfDecl, freeNamesIfRule, freeNamesIfFamInst, -- Pretty printing - pprIfaceExpr + pprIfaceExpr, + pprIfaceDecl, + ShowSub(..), ShowHowMuch(..) ) where #include "HsVersions.h" @@ -51,14 +55,17 @@ import BasicTypes import Outputable import FastString import Module -import TysWiredIn ( eqTyConName ) import Fingerprint import Binary import BooleanFormula ( BooleanFormula ) import HsBinds +import TyCon (Role (..)) +import StaticFlags (opt_PprStyle_Debug) +import Util( filterOut ) import Control.Monad import System.IO.Unsafe +import Data.Maybe (isJust) infixl 3 &&& \end{code} @@ -66,18 +73,27 @@ infixl 3 &&& %************************************************************************ %* * - Data type declarations + Declarations %* * %************************************************************************ \begin{code} +type IfaceTopBndr = OccName + -- It's convenient to have an OccName in the IfaceSyn, altough in each + -- case the namespace is implied by the context. However, having an + -- OccNames makes things like ifaceDeclImplicitBndrs and ifaceDeclFingerprints + -- very convenient. + -- + -- We don't serialise the namespace onto the disk though; rather we + -- drop it when serialising and add it back in when deserialising. + data IfaceDecl - = IfaceId { ifName :: OccName, + = IfaceId { ifName :: IfaceTopBndr, ifType :: IfaceType, ifIdDetails :: IfaceIdDetails, ifIdInfo :: IfaceIdInfo } - | IfaceData { ifName :: OccName, -- Type constructor + | IfaceData { ifName :: IfaceTopBndr, -- Type constructor ifCType :: Maybe CType, -- C type for CAPI FFI ifTyVars :: [IfaceTvBndr], -- Type variables ifRoles :: [Role], -- Roles @@ -87,355 +103,115 @@ data IfaceDecl ifPromotable :: Bool, -- Promotable to kind level? ifGadtSyntax :: Bool, -- True <=> declared using -- GADT syntax - ifAxiom :: Maybe IfExtName -- The axiom, for a newtype, - -- or data/newtype family instance + ifParent :: IfaceTyConParent -- The axiom, for a newtype, + -- or data/newtype family instance } - | IfaceSyn { ifName :: OccName, -- Type constructor + | IfaceSyn { ifName :: IfaceTopBndr, -- Type constructor ifTyVars :: [IfaceTvBndr], -- Type variables ifRoles :: [Role], -- Roles ifSynKind :: IfaceKind, -- Kind of the *rhs* (not of the tycon) ifSynRhs :: IfaceSynTyConRhs } - | IfaceClass { ifCtxt :: IfaceContext, -- Context... - ifName :: OccName, -- Name of the class TyCon - ifTyVars :: [IfaceTvBndr], -- Type variables - ifRoles :: [Role], -- Roles - ifFDs :: [FunDep FastString], -- Functional dependencies - ifATs :: [IfaceAT], -- Associated type families - ifSigs :: [IfaceClassOp], -- Method signatures - ifMinDef :: BooleanFormula OccName, -- Minimal complete definition - ifRec :: RecFlag -- Is newtype/datatype associated - -- with the class recursive? + | IfaceClass { ifCtxt :: IfaceContext, -- Context... + ifName :: IfaceTopBndr, -- Name of the class TyCon + ifTyVars :: [IfaceTvBndr], -- Type variables + ifRoles :: [Role], -- Roles + ifFDs :: [FunDep FastString], -- Functional dependencies + ifATs :: [IfaceAT], -- Associated type families + ifSigs :: [IfaceClassOp], -- Method signatures + ifMinDef :: BooleanFormula IfLclName, -- Minimal complete definition + ifRec :: RecFlag -- Is newtype/datatype associated + -- with the class recursive? } - | IfaceAxiom { ifName :: OccName, -- Axiom name + | IfaceAxiom { ifName :: IfaceTopBndr, -- Axiom name ifTyCon :: IfaceTyCon, -- LHS TyCon ifRole :: Role, -- Role of axiom ifAxBranches :: [IfaceAxBranch] -- Branches } - | IfaceForeign { ifName :: OccName, -- Needs expanding when we move + | IfaceForeign { ifName :: IfaceTopBndr, -- Needs expanding when we move -- beyond .NET ifExtName :: Maybe FastString } - | IfacePatSyn { ifName :: OccName, -- Name of the pattern synonym - ifPatHasWrapper :: Bool, + | IfacePatSyn { ifName :: IfaceTopBndr, -- Name of the pattern synonym ifPatIsInfix :: Bool, + ifPatMatcher :: IfExtName, + ifPatWrapper :: Maybe IfExtName, + -- Everything below is redundant, + -- but needed to implement pprIfaceDecl ifPatUnivTvs :: [IfaceTvBndr], ifPatExTvs :: [IfaceTvBndr], ifPatProvCtxt :: IfaceContext, ifPatReqCtxt :: IfaceContext, - ifPatArgs :: [IfaceIdBndr], + ifPatArgs :: [IfaceType], ifPatTy :: IfaceType } --- A bit of magic going on here: there's no need to store the OccName --- for a decl on the disk, since we can infer the namespace from the --- context; however it is useful to have the OccName in the IfaceDecl --- to avoid re-building it in various places. So we build the OccName --- when de-serialising. - -instance Binary IfaceDecl where - put_ bh (IfaceId name ty details idinfo) = do - putByte bh 0 - put_ bh (occNameFS name) - put_ bh ty - put_ bh details - put_ bh idinfo - - put_ _ (IfaceForeign _ _) = - error "Binary.put_(IfaceDecl): IfaceForeign" - - put_ bh (IfaceData a1 a2 a3 a4 a5 a6 a7 a8 a9 a10) = do - putByte bh 2 - put_ bh (occNameFS a1) - put_ bh a2 - put_ bh a3 - put_ bh a4 - put_ bh a5 - put_ bh a6 - put_ bh a7 - put_ bh a8 - put_ bh a9 - put_ bh a10 - - put_ bh (IfaceSyn a1 a2 a3 a4 a5) = do - putByte bh 3 - put_ bh (occNameFS a1) - put_ bh a2 - put_ bh a3 - put_ bh a4 - put_ bh a5 - - put_ bh (IfaceClass a1 a2 a3 a4 a5 a6 a7 a8 a9) = do - putByte bh 4 - put_ bh a1 - put_ bh (occNameFS a2) - put_ bh a3 - put_ bh a4 - put_ bh a5 - put_ bh a6 - put_ bh a7 - put_ bh a8 - put_ bh a9 - - put_ bh (IfaceAxiom a1 a2 a3 a4) = do - putByte bh 5 - put_ bh (occNameFS a1) - put_ bh a2 - put_ bh a3 - put_ bh a4 - put_ bh (IfacePatSyn name a2 a3 a4 a5 a6 a7 a8 a9) = do - putByte bh 6 - put_ bh (occNameFS name) - put_ bh a2 - put_ bh a3 - put_ bh a4 - put_ bh a5 - put_ bh a6 - put_ bh a7 - put_ bh a8 - put_ bh a9 - - get bh = do - h <- getByte bh - case h of - 0 -> do name <- get bh - ty <- get bh - details <- get bh - idinfo <- get bh - occ <- return $! mkOccNameFS varName name - return (IfaceId occ ty details idinfo) - 1 -> error "Binary.get(TyClDecl): ForeignType" - 2 -> do a1 <- get bh - a2 <- get bh - a3 <- get bh - a4 <- get bh - a5 <- get bh - a6 <- get bh - a7 <- get bh - a8 <- get bh - a9 <- get bh - a10 <- get bh - occ <- return $! mkOccNameFS tcName a1 - return (IfaceData occ a2 a3 a4 a5 a6 a7 a8 a9 a10) - 3 -> do a1 <- get bh - a2 <- get bh - a3 <- get bh - a4 <- get bh - a5 <- get bh - occ <- return $! mkOccNameFS tcName a1 - return (IfaceSyn occ a2 a3 a4 a5) - 4 -> do a1 <- get bh - a2 <- get bh - a3 <- get bh - a4 <- get bh - a5 <- get bh - a6 <- get bh - a7 <- get bh - a8 <- get bh - a9 <- get bh - occ <- return $! mkOccNameFS clsName a2 - return (IfaceClass a1 occ a3 a4 a5 a6 a7 a8 a9) - 5 -> do a1 <- get bh - a2 <- get bh - a3 <- get bh - a4 <- get bh - occ <- return $! mkOccNameFS tcName a1 - return (IfaceAxiom occ a2 a3 a4) - 6 -> do a1 <- get bh - a2 <- get bh - a3 <- get bh - a4 <- get bh - a5 <- get bh - a6 <- get bh - a7 <- get bh - a8 <- get bh - a9 <- get bh - occ <- return $! mkOccNameFS dataName a1 - return (IfacePatSyn occ a2 a3 a4 a5 a6 a7 a8 a9) - _ -> panic (unwords ["Unknown IfaceDecl tag:", show h]) +data IfaceTyConParent + = IfNoParent + | IfDataInstance IfExtName + IfaceTyCon + IfaceTcArgs data IfaceSynTyConRhs = IfaceOpenSynFamilyTyCon - | IfaceClosedSynFamilyTyCon IfExtName -- name of associated axiom + | IfaceClosedSynFamilyTyCon IfExtName -- name of associated axiom + [IfaceAxBranch] -- for pretty printing purposes only | IfaceAbstractClosedSynFamilyTyCon | IfaceSynonymTyCon IfaceType + | IfaceBuiltInSynFamTyCon -- for pretty printing purposes only -instance Binary IfaceSynTyConRhs where - put_ bh IfaceOpenSynFamilyTyCon = putByte bh 0 - put_ bh (IfaceClosedSynFamilyTyCon ax) = putByte bh 1 >> put_ bh ax - put_ bh IfaceAbstractClosedSynFamilyTyCon = putByte bh 2 - put_ bh (IfaceSynonymTyCon ty) = putByte bh 3 >> put_ bh ty - - get bh = do { h <- getByte bh - ; case h of - 0 -> return IfaceOpenSynFamilyTyCon - 1 -> do { ax <- get bh - ; return (IfaceClosedSynFamilyTyCon ax) } - 2 -> return IfaceAbstractClosedSynFamilyTyCon - _ -> do { ty <- get bh - ; return (IfaceSynonymTyCon ty) } } - -data IfaceClassOp = IfaceClassOp OccName DefMethSpec IfaceType +data IfaceClassOp = IfaceClassOp IfaceTopBndr DefMethSpec IfaceType -- Nothing => no default method -- Just False => ordinary polymorphic default method -- Just True => generic default method -instance Binary IfaceClassOp where - put_ bh (IfaceClassOp n def ty) = do - put_ bh (occNameFS n) - put_ bh def - put_ bh ty - get bh = do - n <- get bh - def <- get bh - ty <- get bh - occ <- return $! mkOccNameFS varName n - return (IfaceClassOp occ def ty) +data IfaceAT = IfaceAT -- See Class.ClassATItem + IfaceDecl -- The associated type declaration + (Maybe IfaceType) -- Default associated type instance, if any -data IfaceAT = IfaceAT - IfaceDecl -- The associated type declaration - [IfaceAxBranch] -- Default associated type instances, if any -instance Binary IfaceAT where - put_ bh (IfaceAT dec defs) = do - put_ bh dec - put_ bh defs - get bh = do - dec <- get bh - defs <- get bh - return (IfaceAT dec defs) - -instance Outputable IfaceAxBranch where - ppr = pprAxBranch Nothing - -pprAxBranch :: Maybe IfaceTyCon -> IfaceAxBranch -> SDoc -pprAxBranch mtycon (IfaceAxBranch { ifaxbTyVars = tvs - , ifaxbLHS = pat_tys - , ifaxbRHS = ty - , ifaxbIncomps = incomps }) - = ppr tvs <+> ppr_lhs <+> char '=' <+> ppr ty $+$ - nest 2 maybe_incomps - where - ppr_lhs - | Just tycon <- mtycon - = ppr (IfaceTyConApp tycon pat_tys) - | otherwise - = hsep (map ppr pat_tys) - - maybe_incomps - | [] <- incomps - = empty - - | otherwise - = parens (ptext (sLit "incompatible indices:") <+> ppr incomps) - --- this is just like CoAxBranch +-- This is just like CoAxBranch data IfaceAxBranch = IfaceAxBranch { ifaxbTyVars :: [IfaceTvBndr] - , ifaxbLHS :: [IfaceType] + , ifaxbLHS :: IfaceTcArgs , ifaxbRoles :: [Role] , ifaxbRHS :: IfaceType , ifaxbIncomps :: [BranchIndex] } -- See Note [Storing compatibility] in CoAxiom -instance Binary IfaceAxBranch where - put_ bh (IfaceAxBranch a1 a2 a3 a4 a5) = do - put_ bh a1 - put_ bh a2 - put_ bh a3 - put_ bh a4 - put_ bh a5 - get bh = do - a1 <- get bh - a2 <- get bh - a3 <- get bh - a4 <- get bh - a5 <- get bh - return (IfaceAxBranch a1 a2 a3 a4 a5) - data IfaceConDecls = IfAbstractTyCon Bool -- c.f TyCon.AbstractTyCon | IfDataFamTyCon -- Data family | IfDataTyCon [IfaceConDecl] -- Data type decls | IfNewTyCon IfaceConDecl -- Newtype decls -instance Binary IfaceConDecls where - put_ bh (IfAbstractTyCon d) = putByte bh 0 >> put_ bh d - put_ bh IfDataFamTyCon = putByte bh 1 - put_ bh (IfDataTyCon cs) = putByte bh 2 >> put_ bh cs - put_ bh (IfNewTyCon c) = putByte bh 3 >> put_ bh c - get bh = do - h <- getByte bh - case h of - 0 -> liftM IfAbstractTyCon $ get bh - 1 -> return IfDataFamTyCon - 2 -> liftM IfDataTyCon $ get bh - _ -> liftM IfNewTyCon $ get bh - -visibleIfConDecls :: IfaceConDecls -> [IfaceConDecl] -visibleIfConDecls (IfAbstractTyCon {}) = [] -visibleIfConDecls IfDataFamTyCon = [] -visibleIfConDecls (IfDataTyCon cs) = cs -visibleIfConDecls (IfNewTyCon c) = [c] - data IfaceConDecl = IfCon { - ifConOcc :: OccName, -- Constructor name + ifConOcc :: IfaceTopBndr, -- Constructor name ifConWrapper :: Bool, -- True <=> has a wrapper ifConInfix :: Bool, -- True <=> declared infix - ifConUnivTvs :: [IfaceTvBndr], -- Universal tyvars + + -- The universal type variables are precisely those + -- of the type constructor of this data constructor + -- This is *easy* to guarantee when creating the IfCon + -- but it's not so easy for the original TyCon/DataCon + -- So this guarantee holds for IfaceConDecl, but *not* for DataCon + ifConExTvs :: [IfaceTvBndr], -- Existential tyvars - ifConEqSpec :: [(OccName,IfaceType)], -- Equality constraints + ifConEqSpec :: IfaceEqSpec, -- Equality constraints ifConCtxt :: IfaceContext, -- Non-stupid context ifConArgTys :: [IfaceType], -- Arg types - ifConFields :: [OccName], -- ...ditto... (field labels) + ifConFields :: [IfaceTopBndr], -- ...ditto... (field labels) ifConStricts :: [IfaceBang]} -- Empty (meaning all lazy), -- or 1-1 corresp with arg tys -instance Binary IfaceConDecl where - put_ bh (IfCon a1 a2 a3 a4 a5 a6 a7 a8 a9 a10) = do - put_ bh a1 - put_ bh a2 - put_ bh a3 - put_ bh a4 - put_ bh a5 - put_ bh a6 - put_ bh a7 - put_ bh a8 - put_ bh a9 - put_ bh a10 - get bh = do - a1 <- get bh - a2 <- get bh - a3 <- get bh - a4 <- get bh - a5 <- get bh - a6 <- get bh - a7 <- get bh - a8 <- get bh - a9 <- get bh - a10 <- get bh - return (IfCon a1 a2 a3 a4 a5 a6 a7 a8 a9 a10) +type IfaceEqSpec = [(IfLclName,IfaceType)] data IfaceBang = IfNoBang | IfStrict | IfUnpack | IfUnpackCo IfaceCoercion -instance Binary IfaceBang where - put_ bh IfNoBang = putByte bh 0 - put_ bh IfStrict = putByte bh 1 - put_ bh IfUnpack = putByte bh 2 - put_ bh (IfUnpackCo co) = putByte bh 3 >> put_ bh co - - get bh = do - h <- getByte bh - case h of - 0 -> do return IfNoBang - 1 -> do return IfStrict - 2 -> do return IfUnpack - _ -> do { a <- get bh; return (IfUnpackCo a) } - data IfaceClsInst = IfaceClsInst { ifInstCls :: IfExtName, -- See comments with ifInstTys :: [Maybe IfaceTyCon], -- the defn of ClsInst @@ -449,21 +225,6 @@ data IfaceClsInst -- If this instance decl is *used*, we'll record a usage on the dfun; -- and if the head does not change it won't be used if it wasn't before -instance Binary IfaceClsInst where - put_ bh (IfaceClsInst cls tys dfun flag orph) = do - put_ bh cls - put_ bh tys - put_ bh dfun - put_ bh flag - put_ bh orph - get bh = do - cls <- get bh - tys <- get bh - dfun <- get bh - flag <- get bh - orph <- get bh - return (IfaceClsInst cls tys dfun flag orph) - -- The ifFamInstTys field of IfaceFamInst contains a list of the rough -- match types data IfaceFamInst @@ -473,19 +234,6 @@ data IfaceFamInst , ifFamInstOrph :: Maybe OccName -- Just like IfaceClsInst } -instance Binary IfaceFamInst where - put_ bh (IfaceFamInst fam tys name orph) = do - put_ bh fam - put_ bh tys - put_ bh name - put_ bh orph - get bh = do - fam <- get bh - tys <- get bh - name <- get bh - orph <- get bh - return (IfaceFamInst fam tys name orph) - data IfaceRule = IfaceRule { ifRuleName :: RuleName, @@ -498,82 +246,14 @@ data IfaceRule ifRuleOrph :: Maybe OccName -- Just like IfaceClsInst } -instance Binary IfaceRule where - put_ bh (IfaceRule a1 a2 a3 a4 a5 a6 a7 a8) = do - put_ bh a1 - put_ bh a2 - put_ bh a3 - put_ bh a4 - put_ bh a5 - put_ bh a6 - put_ bh a7 - put_ bh a8 - get bh = do - a1 <- get bh - a2 <- get bh - a3 <- get bh - a4 <- get bh - a5 <- get bh - a6 <- get bh - a7 <- get bh - a8 <- get bh - return (IfaceRule a1 a2 a3 a4 a5 a6 a7 a8) - data IfaceAnnotation = IfaceAnnotation { ifAnnotatedTarget :: IfaceAnnTarget, ifAnnotatedValue :: AnnPayload } -instance Outputable IfaceAnnotation where - ppr (IfaceAnnotation target value) = ppr target <+> colon <+> ppr value - -instance Binary IfaceAnnotation where - put_ bh (IfaceAnnotation a1 a2) = do - put_ bh a1 - put_ bh a2 - get bh = do - a1 <- get bh - a2 <- get bh - return (IfaceAnnotation a1 a2) - type IfaceAnnTarget = AnnTarget OccName --- We only serialise the IdDetails of top-level Ids, and even then --- we only need a very limited selection. Notably, none of the --- implicit ones are needed here, because they are not put it --- interface files - -data IfaceIdDetails - = IfVanillaId - | IfRecSelId IfaceTyCon Bool - | IfDFunId Int -- Number of silent args - -instance Binary IfaceIdDetails where - put_ bh IfVanillaId = putByte bh 0 - put_ bh (IfRecSelId a b) = putByte bh 1 >> put_ bh a >> put_ bh b - put_ bh (IfDFunId n) = do { putByte bh 2; put_ bh n } - get bh = do - h <- getByte bh - case h of - 0 -> return IfVanillaId - 1 -> do { a <- get bh; b <- get bh; return (IfRecSelId a b) } - _ -> do { n <- get bh; return (IfDFunId n) } - -data IfaceIdInfo - = NoInfo -- When writing interface file without -O - | HasInfo [IfaceInfoItem] -- Has info, and here it is - -instance Binary IfaceIdInfo where - put_ bh NoInfo = putByte bh 0 - put_ bh (HasInfo i) = putByte bh 1 >> lazyPut bh i -- NB lazyPut - - get bh = do - h <- getByte bh - case h of - 0 -> return NoInfo - _ -> liftM HasInfo $ lazyGet bh -- NB lazyGet - -- Here's a tricky case: -- * Compile with -O module A, and B which imports A.f -- * Change function f in A, and recompile without -O @@ -584,6 +264,10 @@ instance Binary IfaceIdInfo where -- * The version comparison sees that new (=NoInfo) differs from old (=HasInfo *) -- and so gives a new version. +data IfaceIdInfo + = NoInfo -- When writing interface file without -O + | HasInfo [IfaceInfoItem] -- Has info, and here it is + data IfaceInfoItem = HsArity Arity | HsStrictness StrictSig @@ -592,23 +276,6 @@ data IfaceInfoItem IfaceUnfolding -- See Note [Expose recursive functions] | HsNoCafRefs -instance Binary IfaceInfoItem where - put_ bh (HsArity aa) = putByte bh 0 >> put_ bh aa - put_ bh (HsStrictness ab) = putByte bh 1 >> put_ bh ab - put_ bh (HsUnfold lb ad) = putByte bh 2 >> put_ bh lb >> put_ bh ad - put_ bh (HsInline ad) = putByte bh 3 >> put_ bh ad - put_ bh HsNoCafRefs = putByte bh 4 - get bh = do - h <- getByte bh - case h of - 0 -> liftM HsArity $ get bh - 1 -> liftM HsStrictness $ get bh - 2 -> do lb <- get bh - ad <- get bh - return (HsUnfold lb ad) - 3 -> liftM HsInline $ get bh - _ -> return HsNoCafRefs - -- NB: Specialisations and rules come in separately and are -- only later attached to the Id. Partial reason: some are orphans. @@ -626,253 +293,18 @@ data IfaceUnfolding | IfDFunUnfold [IfaceBndr] [IfaceExpr] -instance Binary IfaceUnfolding where - put_ bh (IfCoreUnfold s e) = do - putByte bh 0 - put_ bh s - put_ bh e - put_ bh (IfInlineRule a b c d) = do - putByte bh 1 - put_ bh a - put_ bh b - put_ bh c - put_ bh d - put_ bh (IfDFunUnfold as bs) = do - putByte bh 2 - put_ bh as - put_ bh bs - put_ bh (IfCompulsory e) = do - putByte bh 3 - put_ bh e - get bh = do - h <- getByte bh - case h of - 0 -> do s <- get bh - e <- get bh - return (IfCoreUnfold s e) - 1 -> do a <- get bh - b <- get bh - c <- get bh - d <- get bh - return (IfInlineRule a b c d) - 2 -> do as <- get bh - bs <- get bh - return (IfDFunUnfold as bs) - _ -> do e <- get bh - return (IfCompulsory e) - --------------------------------- -data IfaceExpr - = IfaceLcl IfLclName - | IfaceExt IfExtName - | IfaceType IfaceType - | IfaceCo IfaceCoercion - | IfaceTuple TupleSort [IfaceExpr] -- Saturated; type arguments omitted - | IfaceLam IfaceBndr IfaceExpr - | IfaceApp IfaceExpr IfaceExpr - | IfaceCase IfaceExpr IfLclName [IfaceAlt] - | IfaceECase IfaceExpr IfaceType -- See Note [Empty case alternatives] - | IfaceLet IfaceBinding IfaceExpr - | IfaceCast IfaceExpr IfaceCoercion - | IfaceLit Literal - | IfaceFCall ForeignCall IfaceType - | IfaceTick IfaceTickish IfaceExpr -- from Tick tickish E -instance Binary IfaceExpr where - put_ bh (IfaceLcl aa) = do - putByte bh 0 - put_ bh aa - put_ bh (IfaceType ab) = do - putByte bh 1 - put_ bh ab - put_ bh (IfaceCo ab) = do - putByte bh 2 - put_ bh ab - put_ bh (IfaceTuple ac ad) = do - putByte bh 3 - put_ bh ac - put_ bh ad - put_ bh (IfaceLam ae af) = do - putByte bh 4 - put_ bh ae - put_ bh af - put_ bh (IfaceApp ag ah) = do - putByte bh 5 - put_ bh ag - put_ bh ah - put_ bh (IfaceCase ai aj ak) = do - putByte bh 6 - put_ bh ai - put_ bh aj - put_ bh ak - put_ bh (IfaceLet al am) = do - putByte bh 7 - put_ bh al - put_ bh am - put_ bh (IfaceTick an ao) = do - putByte bh 8 - put_ bh an - put_ bh ao - put_ bh (IfaceLit ap) = do - putByte bh 9 - put_ bh ap - put_ bh (IfaceFCall as at) = do - putByte bh 10 - put_ bh as - put_ bh at - put_ bh (IfaceExt aa) = do - putByte bh 11 - put_ bh aa - put_ bh (IfaceCast ie ico) = do - putByte bh 12 - put_ bh ie - put_ bh ico - put_ bh (IfaceECase a b) = do - putByte bh 13 - put_ bh a - put_ bh b - get bh = do - h <- getByte bh - case h of - 0 -> do aa <- get bh - return (IfaceLcl aa) - 1 -> do ab <- get bh - return (IfaceType ab) - 2 -> do ab <- get bh - return (IfaceCo ab) - 3 -> do ac <- get bh - ad <- get bh - return (IfaceTuple ac ad) - 4 -> do ae <- get bh - af <- get bh - return (IfaceLam ae af) - 5 -> do ag <- get bh - ah <- get bh - return (IfaceApp ag ah) - 6 -> do ai <- get bh - aj <- get bh - ak <- get bh - return (IfaceCase ai aj ak) - 7 -> do al <- get bh - am <- get bh - return (IfaceLet al am) - 8 -> do an <- get bh - ao <- get bh - return (IfaceTick an ao) - 9 -> do ap <- get bh - return (IfaceLit ap) - 10 -> do as <- get bh - at <- get bh - return (IfaceFCall as at) - 11 -> do aa <- get bh - return (IfaceExt aa) - 12 -> do ie <- get bh - ico <- get bh - return (IfaceCast ie ico) - 13 -> do a <- get bh - b <- get bh - return (IfaceECase a b) - _ -> panic ("get IfaceExpr " ++ show h) - -data IfaceTickish - = IfaceHpcTick Module Int -- from HpcTick x - | IfaceSCC CostCentre Bool Bool -- from ProfNote - -- no breakpoints: we never export these into interface files - -instance Binary IfaceTickish where - put_ bh (IfaceHpcTick m ix) = do - putByte bh 0 - put_ bh m - put_ bh ix - put_ bh (IfaceSCC cc tick push) = do - putByte bh 1 - put_ bh cc - put_ bh tick - put_ bh push - - get bh = do - h <- getByte bh - case h of - 0 -> do m <- get bh - ix <- get bh - return (IfaceHpcTick m ix) - 1 -> do cc <- get bh - tick <- get bh - push <- get bh - return (IfaceSCC cc tick push) - _ -> panic ("get IfaceTickish " ++ show h) - -type IfaceAlt = (IfaceConAlt, [IfLclName], IfaceExpr) - -- Note: IfLclName, not IfaceBndr (and same with the case binder) - -- We reconstruct the kind/type of the thing from the context - -- thus saving bulk in interface files - -data IfaceConAlt = IfaceDefault - | IfaceDataAlt IfExtName - | IfaceLitAlt Literal - -instance Binary IfaceConAlt where - put_ bh IfaceDefault = putByte bh 0 - put_ bh (IfaceDataAlt aa) = putByte bh 1 >> put_ bh aa - put_ bh (IfaceLitAlt ac) = putByte bh 2 >> put_ bh ac - get bh = do - h <- getByte bh - case h of - 0 -> return IfaceDefault - 1 -> liftM IfaceDataAlt $ get bh - _ -> liftM IfaceLitAlt $ get bh - -data IfaceBinding - = IfaceNonRec IfaceLetBndr IfaceExpr - | IfaceRec [(IfaceLetBndr, IfaceExpr)] - -instance Binary IfaceBinding where - put_ bh (IfaceNonRec aa ab) = putByte bh 0 >> put_ bh aa >> put_ bh ab - put_ bh (IfaceRec ac) = putByte bh 1 >> put_ bh ac - get bh = do - h <- getByte bh - case h of - 0 -> do { aa <- get bh; ab <- get bh; return (IfaceNonRec aa ab) } - _ -> do { ac <- get bh; return (IfaceRec ac) } - --- IfaceLetBndr is like IfaceIdBndr, but has IdInfo too --- It's used for *non-top-level* let/rec binders --- See Note [IdInfo on nested let-bindings] -data IfaceLetBndr = IfLetBndr IfLclName IfaceType IfaceIdInfo +-- We only serialise the IdDetails of top-level Ids, and even then +-- we only need a very limited selection. Notably, none of the +-- implicit ones are needed here, because they are not put it +-- interface files -instance Binary IfaceLetBndr where - put_ bh (IfLetBndr a b c) = do - put_ bh a - put_ bh b - put_ bh c - get bh = do a <- get bh - b <- get bh - c <- get bh - return (IfLetBndr a b c) +data IfaceIdDetails + = IfVanillaId + | IfRecSelId IfaceTyCon Bool + | IfDFunId Int -- Number of silent args \end{code} -Note [Empty case alternatives] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In IfaceSyn an IfaceCase does not record the types of the alternatives, -unlike CorSyn Case. But we need this type if the alternatives are empty. -Hence IfaceECase. See Note [Empty case alternatives] in CoreSyn. - -Note [Expose recursive functions] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -For supercompilation we want to put *all* unfoldings in the interface -file, even for functions that are recursive (or big). So we need to -know when an unfolding belongs to a loop-breaker so that we can refrain -from inlining it (except during supercompilation). - -Note [IdInfo on nested let-bindings] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Occasionally we want to preserve IdInfo on nested let bindings. The one -that came up was a NOINLINE pragma on a let-binding inside an INLINE -function. The user (Duncan Coutts) really wanted the NOINLINE control -to cross the separate compilation boundary. - -In general we retain all info that is left by CoreTidy.tidyLetBndr, since -that is what is seen by importing module with --make Note [Orphans]: the ifInstOrph and ifRuleOrph fields ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -949,10 +381,22 @@ Note [Versioning of instances] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ See [http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/RecompilationAvoidance#Instances] + +%************************************************************************ +%* * + Functions over declarations +%* * +%************************************************************************ + \begin{code} --- ----------------------------------------------------------------------------- --- Utils on IfaceSyn +visibleIfConDecls :: IfaceConDecls -> [IfaceConDecl] +visibleIfConDecls (IfAbstractTyCon {}) = [] +visibleIfConDecls IfDataFamTyCon = [] +visibleIfConDecls (IfDataTyCon cs) = cs +visibleIfConDecls (IfNewTyCon c) = [c] +\end{code} +\begin{code} ifaceDeclImplicitBndrs :: IfaceDecl -> [OccName] -- *Excludes* the 'main' name, but *includes* the implicitly-bound names -- Deeply revolting, because it has to predict what gets bound, @@ -1015,11 +459,6 @@ ifaceDeclImplicitBndrs (IfaceClass {ifCtxt = sc_ctxt, ifName = cls_tc_occ, dc_occ = mkClassDataConOcc cls_tc_occ is_newtype = n_sigs + n_ctxt == 1 -- Sigh -ifaceDeclImplicitBndrs (IfacePatSyn{ ifName = ps_occ, ifPatHasWrapper = has_wrapper }) - = [wrap_occ | has_wrapper] - where - wrap_occ = mkDataConWrapperOcc ps_occ -- Id namespace - ifaceDeclImplicitBndrs _ = [] -- ----------------------------------------------------------------------------- @@ -1038,80 +477,308 @@ ifaceDeclFingerprints hash decl computeFingerprint' = unsafeDupablePerformIO . computeFingerprint (panic "ifaceDeclFingerprints") +\end{code} ------------------------------ Printing IfaceDecl ------------------------------ +%************************************************************************ +%* * + Expressions +%* * +%************************************************************************ -instance Outputable IfaceDecl where - ppr = pprIfaceDecl +\begin{code} +data IfaceExpr + = IfaceLcl IfLclName + | IfaceExt IfExtName + | IfaceType IfaceType + | IfaceCo IfaceCoercion + | IfaceTuple TupleSort [IfaceExpr] -- Saturated; type arguments omitted + | IfaceLam IfaceBndr IfaceExpr + | IfaceApp IfaceExpr IfaceExpr + | IfaceCase IfaceExpr IfLclName [IfaceAlt] + | IfaceECase IfaceExpr IfaceType -- See Note [Empty case alternatives] + | IfaceLet IfaceBinding IfaceExpr + | IfaceCast IfaceExpr IfaceCoercion + | IfaceLit Literal + | IfaceFCall ForeignCall IfaceType + | IfaceTick IfaceTickish IfaceExpr -- from Tick tickish E + +data IfaceTickish + = IfaceHpcTick Module Int -- from HpcTick x + | IfaceSCC CostCentre Bool Bool -- from ProfNote + -- no breakpoints: we never export these into interface files + +type IfaceAlt = (IfaceConAlt, [IfLclName], IfaceExpr) + -- Note: IfLclName, not IfaceBndr (and same with the case binder) + -- We reconstruct the kind/type of the thing from the context + -- thus saving bulk in interface files -pprIfaceDecl :: IfaceDecl -> SDoc -pprIfaceDecl (IfaceId {ifName = var, ifType = ty, - ifIdDetails = details, ifIdInfo = info}) - = sep [ pprPrefixOcc var <+> dcolon <+> ppr ty, - nest 2 (ppr details), - nest 2 (ppr info) ] +data IfaceConAlt = IfaceDefault + | IfaceDataAlt IfExtName + | IfaceLitAlt Literal -pprIfaceDecl (IfaceForeign {ifName = tycon}) - = hsep [ptext (sLit "foreign import type dotnet"), ppr tycon] +data IfaceBinding + = IfaceNonRec IfaceLetBndr IfaceExpr + | IfaceRec [(IfaceLetBndr, IfaceExpr)] + +-- IfaceLetBndr is like IfaceIdBndr, but has IdInfo too +-- It's used for *non-top-level* let/rec binders +-- See Note [IdInfo on nested let-bindings] +data IfaceLetBndr = IfLetBndr IfLclName IfaceType IfaceIdInfo +\end{code} + +Note [Empty case alternatives] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +In IfaceSyn an IfaceCase does not record the types of the alternatives, +unlike CorSyn Case. But we need this type if the alternatives are empty. +Hence IfaceECase. See Note [Empty case alternatives] in CoreSyn. + +Note [Expose recursive functions] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +For supercompilation we want to put *all* unfoldings in the interface +file, even for functions that are recursive (or big). So we need to +know when an unfolding belongs to a loop-breaker so that we can refrain +from inlining it (except during supercompilation). + +Note [IdInfo on nested let-bindings] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Occasionally we want to preserve IdInfo on nested let bindings. The one +that came up was a NOINLINE pragma on a let-binding inside an INLINE +function. The user (Duncan Coutts) really wanted the NOINLINE control +to cross the separate compilation boundary. -pprIfaceDecl (IfaceSyn {ifName = tycon, - ifTyVars = tyvars, - ifSynRhs = IfaceSynonymTyCon mono_ty}) - = hang (ptext (sLit "type") <+> pprIfaceDeclHead [] tycon tyvars) - 2 (vcat [equals <+> ppr mono_ty]) +In general we retain all info that is left by CoreTidy.tidyLetBndr, since +that is what is seen by importing module with --make + + +%************************************************************************ +%* * + Printing IfaceDecl +%* * +%************************************************************************ -pprIfaceDecl (IfaceSyn {ifName = tycon, ifTyVars = tyvars, - ifSynRhs = rhs, ifSynKind = kind }) - = hang (ptext (sLit "type family") <+> pprIfaceDeclHead [] tycon tyvars) - 2 (sep [dcolon <+> ppr kind, parens (pp_rhs rhs)]) +\begin{code} +pprAxBranch :: SDoc -> IfaceAxBranch -> SDoc +-- The TyCon might be local (just an OccName), or this might +-- be a branch for an imported TyCon, so it would be an ExtName +-- So it's easier to take an SDoc here +pprAxBranch pp_tc (IfaceAxBranch { ifaxbTyVars = tvs + , ifaxbLHS = pat_tys + , ifaxbRHS = rhs + , ifaxbIncomps = incomps }) + = hang (pprUserIfaceForAll tvs) + 2 (hang pp_lhs 2 (equals <+> ppr rhs)) + $+$ + nest 2 maybe_incomps where - pp_rhs IfaceOpenSynFamilyTyCon = ptext (sLit "open") - pp_rhs (IfaceClosedSynFamilyTyCon ax) = ptext (sLit "closed, axiom") <+> ppr ax - pp_rhs IfaceAbstractClosedSynFamilyTyCon = ptext (sLit "closed, abstract") - pp_rhs _ = panic "pprIfaceDecl syn" + pp_lhs = hang pp_tc 2 (pprParendIfaceTcArgs pat_tys) + maybe_incomps = ppUnless (null incomps) $ parens $ + ptext (sLit "incompatible indices:") <+> ppr incomps + +instance Outputable IfaceAnnotation where + ppr (IfaceAnnotation target value) = ppr target <+> colon <+> ppr value + +instance HasOccName IfaceClassOp where + occName (IfaceClassOp n _ _) = n + +instance HasOccName IfaceConDecl where + occName = ifConOcc -pprIfaceDecl (IfaceData {ifName = tycon, ifCType = cType, - ifCtxt = context, - ifTyVars = tyvars, ifRoles = roles, ifCons = condecls, - ifRec = isrec, ifPromotable = is_prom, - ifAxiom = mbAxiom}) - = hang (pp_nd <+> pprIfaceDeclHead context tycon tyvars) - 2 (vcat [ pprCType cType - , pprRoles roles - , pprRec isrec <> comma <+> pp_prom - , pp_condecls tycon condecls - , pprAxiom mbAxiom]) +instance HasOccName IfaceDecl where + occName = ifName + +instance Outputable IfaceDecl where + ppr = pprIfaceDecl showAll + +data ShowSub + = ShowSub + { ss_ppr_bndr :: OccName -> SDoc -- Pretty-printer for binders in IfaceDecl + -- See Note [Printing IfaceDecl binders] + , ss_how_much :: ShowHowMuch } + +data ShowHowMuch + = ShowHeader -- Header information only, not rhs + | ShowSome [OccName] -- [] <=> Print all sub-components + -- (n:ns) <=> print sub-component 'n' with ShowSub=ns + -- elide other sub-components to "..." + -- May 14: the list is max 1 element long at the moment + | ShowIface -- Everything including GHC-internal information (used in --show-iface) + +showAll :: ShowSub +showAll = ShowSub { ss_how_much = ShowIface, ss_ppr_bndr = ppr } + +ppShowIface :: ShowSub -> SDoc -> SDoc +ppShowIface (ShowSub { ss_how_much = ShowIface }) doc = doc +ppShowIface _ _ = empty + +ppShowRhs :: ShowSub -> SDoc -> SDoc +ppShowRhs (ShowSub { ss_how_much = ShowHeader }) _ = empty +ppShowRhs _ doc = doc + +showSub :: HasOccName n => ShowSub -> n -> Bool +showSub (ShowSub { ss_how_much = ShowHeader }) _ = False +showSub (ShowSub { ss_how_much = ShowSome (n:_) }) thing = n == occName thing +showSub (ShowSub { ss_how_much = _ }) _ = True +\end{code} + +Note [Printing IfaceDecl binders] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The binders in an IfaceDecl are just OccNames, so we don't know what module they +come from. But when we pretty-print a TyThing by converting to an IfaceDecl +(see PprTyThing), the TyThing may come from some other module so we really need +the module qualifier. We solve this by passing in a pretty-printer for the +binders. + +When printing an interface file (--show-iface), we want to print +everything unqualified, so we can just print the OccName directly. + +\begin{code} +ppr_trim :: [Maybe SDoc] -> [SDoc] +-- Collapse a group of Nothings to a single "..." +ppr_trim xs + = snd (foldr go (False, []) xs) where - pp_prom | is_prom = ptext (sLit "Promotable") - | otherwise = ptext (sLit "Not promotable") + go (Just doc) (_, so_far) = (False, doc : so_far) + go Nothing (True, so_far) = (True, so_far) + go Nothing (False, so_far) = (True, ptext (sLit "...") : so_far) + +isIfaceDataInstance :: IfaceTyConParent -> Bool +isIfaceDataInstance IfNoParent = False +isIfaceDataInstance _ = True + +pprIfaceDecl :: ShowSub -> IfaceDecl -> SDoc +-- NB: pprIfaceDecl is also used for pretty-printing TyThings in GHCi +-- See Note [Pretty-printing TyThings] in PprTyThing +pprIfaceDecl ss (IfaceData { ifName = tycon, ifCType = ctype, + ifCtxt = context, ifTyVars = tc_tyvars, + ifRoles = roles, ifCons = condecls, + ifParent = parent, ifRec = isrec, + ifGadtSyntax = gadt, + ifPromotable = is_prom }) + + | gadt_style = vcat [ pp_roles + , pp_nd <+> pp_lhs <+> pp_where + , nest 2 (vcat pp_cons) + , nest 2 $ ppShowIface ss pp_extra ] + | otherwise = vcat [ pp_roles + , hang (pp_nd <+> pp_lhs) 2 (add_bars pp_cons) + , nest 2 $ ppShowIface ss pp_extra ] + where + is_data_instance = isIfaceDataInstance parent + + gadt_style = gadt || any (not . isVanillaIfaceConDecl) cons + cons = visibleIfConDecls condecls + pp_where = ppWhen (gadt_style && not (null cons)) $ ptext (sLit "where") + pp_cons = ppr_trim (map show_con cons) :: [SDoc] + + pp_lhs = case parent of + IfNoParent -> pprIfaceDeclHead context ss tycon tc_tyvars + _ -> ptext (sLit "instance") <+> pprIfaceTyConParent parent + + pp_roles + | is_data_instance = empty + | otherwise = pprRoles (== Representational) (pprPrefixIfDeclBndr ss tycon) + tc_tyvars roles + -- Don't display roles for data family instances (yet) + -- See discussion on Trac #8672. + + add_bars [] = empty + add_bars (c:cs) = sep ((equals <+> c) : map (char '|' <+>) cs) + + ok_con dc = showSub ss dc || any (showSub ss) (ifConFields dc) + + show_con dc + | ok_con dc = Just $ pprIfaceConDecl ss gadt_style mk_user_con_res_ty dc + | otherwise = Nothing + + mk_user_con_res_ty :: IfaceEqSpec -> ([IfaceTvBndr], SDoc) + -- See Note [Result type of a data family GADT] + mk_user_con_res_ty eq_spec + | IfDataInstance _ tc tys <- parent + = (con_univ_tvs, pprIfaceType (IfaceTyConApp tc (substIfaceTcArgs gadt_subst tys))) + | otherwise + = (con_univ_tvs, sdocWithDynFlags (ppr_tc_app gadt_subst)) + where + gadt_subst = mkFsEnv eq_spec + done_univ_tv (tv,_) = isJust (lookupFsEnv gadt_subst tv) + con_univ_tvs = filterOut done_univ_tv tc_tyvars + + ppr_tc_app gadt_subst dflags + = pprPrefixIfDeclBndr ss tycon + <+> sep [ pprParendIfaceType (substIfaceTyVar gadt_subst tv) + | (tv,_kind) <- stripIfaceKindVars dflags tc_tyvars ] + pp_nd = case condecls of - IfAbstractTyCon dis -> ptext (sLit "abstract") <> parens (ppr dis) - IfDataFamTyCon -> ptext (sLit "data family") - IfDataTyCon _ -> ptext (sLit "data") - IfNewTyCon _ -> ptext (sLit "newtype") - -pprIfaceDecl (IfaceClass {ifCtxt = context, ifName = clas, ifTyVars = tyvars, - ifRoles = roles, ifFDs = fds, ifATs = ats, ifSigs = sigs, - ifRec = isrec}) - = hang (ptext (sLit "class") <+> pprIfaceDeclHead context clas tyvars <+> pprFundeps fds) - 2 (vcat [pprRoles roles, - pprRec isrec, - sep (map ppr ats), - sep (map ppr sigs)]) - -pprIfaceDecl (IfaceAxiom {ifName = name, ifTyCon = tycon, ifAxBranches = branches }) - = hang (ptext (sLit "axiom") <+> ppr name <> dcolon) - 2 (vcat $ map (pprAxBranch $ Just tycon) branches) - -pprIfaceDecl (IfacePatSyn { ifName = name, ifPatHasWrapper = has_wrap, - ifPatIsInfix = is_infix, - ifPatUnivTvs = _univ_tvs, ifPatExTvs = _ex_tvs, - ifPatProvCtxt = prov_ctxt, ifPatReqCtxt = req_ctxt, - ifPatArgs = args, - ifPatTy = ty }) + IfAbstractTyCon d -> ptext (sLit "abstract") <> ppShowIface ss (parens (ppr d)) + IfDataFamTyCon -> ptext (sLit "data family") + IfDataTyCon _ -> ptext (sLit "data") + IfNewTyCon _ -> ptext (sLit "newtype") + + pp_extra = vcat [pprCType ctype, pprRec isrec, pp_prom] + + pp_prom | is_prom = ptext (sLit "Promotable") + | otherwise = empty + + +pprIfaceDecl ss (IfaceClass { ifATs = ats, ifSigs = sigs, ifRec = isrec + , ifCtxt = context, ifName = clas + , ifTyVars = tyvars, ifRoles = roles + , ifFDs = fds }) + = vcat [ pprRoles (== Nominal) (pprPrefixIfDeclBndr ss clas) tyvars roles + , ptext (sLit "class") <+> pprIfaceDeclHead context ss clas tyvars + <+> pprFundeps fds <+> pp_where + , nest 2 (vcat [vcat asocs, vcat dsigs, pprec])] + where + pp_where = ppShowRhs ss $ ppUnless (null sigs && null ats) (ptext (sLit "where")) + + asocs = ppr_trim $ map maybeShowAssoc ats + dsigs = ppr_trim $ map maybeShowSig sigs + pprec = ppShowIface ss (pprRec isrec) + + maybeShowAssoc :: IfaceAT -> Maybe SDoc + maybeShowAssoc asc@(IfaceAT d _) + | showSub ss d = Just $ pprIfaceAT ss asc + | otherwise = Nothing + + maybeShowSig :: IfaceClassOp -> Maybe SDoc + maybeShowSig sg + | showSub ss sg = Just $ pprIfaceClassOp ss sg + | otherwise = Nothing + +pprIfaceDecl ss (IfaceSyn { ifName = tc + , ifTyVars = tv + , ifSynRhs = IfaceSynonymTyCon mono_ty }) + = hang (ptext (sLit "type") <+> pprIfaceDeclHead [] ss tc tv <+> equals) + 2 (sep [pprIfaceForAll tvs, pprIfaceContextArr theta, ppr tau]) + where + (tvs, theta, tau) = splitIfaceSigmaTy mono_ty + +pprIfaceDecl ss (IfaceSyn { ifName = tycon, ifTyVars = tyvars + , ifSynRhs = rhs, ifSynKind = kind }) + = vcat [ hang (text "type family" <+> pprIfaceDeclHead [] ss tycon tyvars <+> dcolon) + 2 (ppr kind <+> ppShowRhs ss (pp_rhs rhs)) + , ppShowRhs ss (nest 2 (pp_branches rhs)) ] + where + pp_rhs IfaceOpenSynFamilyTyCon = ppShowIface ss (ptext (sLit "open")) + pp_rhs IfaceAbstractClosedSynFamilyTyCon = ppShowIface ss (ptext (sLit "closed, abstract")) + pp_rhs (IfaceClosedSynFamilyTyCon _ (_:_)) = ptext (sLit "where") + pp_rhs IfaceBuiltInSynFamTyCon = ppShowIface ss (ptext (sLit "built-in")) + pp_rhs _ = panic "pprIfaceDecl syn" + + pp_branches (IfaceClosedSynFamilyTyCon ax brs) + = vcat (map (pprAxBranch (pprPrefixIfDeclBndr ss tycon)) brs) + $$ ppShowIface ss (ptext (sLit "axiom") <+> ppr ax) + pp_branches _ = empty + +pprIfaceDecl _ (IfacePatSyn { ifName = name, ifPatWrapper = wrapper, + ifPatIsInfix = is_infix, + ifPatUnivTvs = _univ_tvs, ifPatExTvs = _ex_tvs, + ifPatProvCtxt = prov_ctxt, ifPatReqCtxt = req_ctxt, + ifPatArgs = args, + ifPatTy = ty }) = pprPatSynSig name has_wrap args' ty' (pprCtxt prov_ctxt) (pprCtxt req_ctxt) where - args' = case (is_infix, map snd args) of + has_wrap = isJust wrapper + args' = case (is_infix, args) of (True, [left_ty, right_ty]) -> InfixPatSyn (pprParendIfaceType left_ty) (pprParendIfaceType right_ty) (_, tys) -> @@ -1122,70 +789,105 @@ pprIfaceDecl (IfacePatSyn { ifName = name, ifPatHasWrapper = has_wrap, pprCtxt [] = Nothing pprCtxt ctxt = Just $ pprIfaceContext ctxt +pprIfaceDecl ss (IfaceId { ifName = var, ifType = ty, + ifIdDetails = details, ifIdInfo = info }) + = vcat [ hang (pprPrefixIfDeclBndr ss var <+> dcolon) + 2 (pprIfaceSigmaType ty) + , ppShowIface ss (ppr details) + , ppShowIface ss (ppr info) ] + +pprIfaceDecl _ (IfaceForeign {ifName = tycon}) + = hsep [ptext (sLit "foreign import type dotnet"), ppr tycon] + +pprIfaceDecl _ (IfaceAxiom { ifName = name, ifTyCon = tycon + , ifAxBranches = branches }) + = hang (ptext (sLit "axiom") <+> ppr name <> dcolon) + 2 (vcat $ map (pprAxBranch (ppr tycon)) branches) + + pprCType :: Maybe CType -> SDoc -pprCType Nothing = ptext (sLit "No C type associated") +pprCType Nothing = empty pprCType (Just cType) = ptext (sLit "C type:") <+> ppr cType -pprRoles :: [Role] -> SDoc -pprRoles [] = empty -pprRoles roles = text "Roles:" <+> ppr roles +-- if, for each role, suppress_if role is True, then suppress the role +-- output +pprRoles :: (Role -> Bool) -> SDoc -> [IfaceTvBndr] -> [Role] -> SDoc +pprRoles suppress_if tyCon tyvars roles + = sdocWithDynFlags $ \dflags -> + let froles = suppressIfaceKinds dflags tyvars roles + in ppUnless (all suppress_if roles || null froles) $ + ptext (sLit "type role") <+> tyCon <+> hsep (map ppr froles) pprRec :: RecFlag -> SDoc -pprRec isrec = ptext (sLit "RecFlag") <+> ppr isrec +pprRec NonRecursive = empty +pprRec Recursive = ptext (sLit "RecFlag: Recursive") -pprAxiom :: Maybe Name -> SDoc -pprAxiom Nothing = ptext (sLit "FamilyInstance: none") -pprAxiom (Just ax) = ptext (sLit "FamilyInstance:") <+> ppr ax +pprInfixIfDeclBndr, pprPrefixIfDeclBndr :: ShowSub -> OccName -> SDoc +pprInfixIfDeclBndr (ShowSub { ss_ppr_bndr = ppr_bndr }) occ + = pprInfixVar (isSymOcc occ) (ppr_bndr occ) +pprPrefixIfDeclBndr (ShowSub { ss_ppr_bndr = ppr_bndr }) occ + = parenSymOcc occ (ppr_bndr occ) instance Outputable IfaceClassOp where - ppr (IfaceClassOp n dm ty) = ppr n <+> ppr dm <+> dcolon <+> ppr ty + ppr = pprIfaceClassOp showAll + +pprIfaceClassOp :: ShowSub -> IfaceClassOp -> SDoc +pprIfaceClassOp ss (IfaceClassOp n dm ty) = hang opHdr 2 (pprIfaceSigmaType ty) + where opHdr = pprPrefixIfDeclBndr ss n + <+> ppShowIface ss (ppr dm) <+> dcolon instance Outputable IfaceAT where - ppr (IfaceAT d defs) - = vcat [ ppr d - , ppUnless (null defs) $ nest 2 $ - ptext (sLit "Defaults:") <+> vcat (map ppr defs) ] - -pprIfaceDeclHead :: IfaceContext -> OccName -> [IfaceTvBndr] -> SDoc -pprIfaceDeclHead context thing tyvars - = hsep [pprIfaceContextArr context, parenSymOcc thing (ppr thing), - pprIfaceTvBndrs tyvars] - -pp_condecls :: OccName -> IfaceConDecls -> SDoc -pp_condecls _ (IfAbstractTyCon {}) = empty -pp_condecls _ IfDataFamTyCon = empty -pp_condecls tc (IfNewTyCon c) = equals <+> pprIfaceConDecl tc c -pp_condecls tc (IfDataTyCon cs) = equals <+> sep (punctuate (ptext (sLit " |")) - (map (pprIfaceConDecl tc) cs)) - -mkIfaceEqPred :: IfaceType -> IfaceType -> IfacePredType --- IA0_NOTE: This is wrong, but only used for pretty-printing. -mkIfaceEqPred ty1 ty2 = IfaceTyConApp (IfaceTc eqTyConName) [ty1, ty2] - -pprIfaceConDecl :: OccName -> IfaceConDecl -> SDoc -pprIfaceConDecl tc - (IfCon { ifConOcc = name, ifConInfix = is_infix, ifConWrapper = has_wrap, - ifConUnivTvs = univ_tvs, ifConExTvs = ex_tvs, + ppr = pprIfaceAT showAll + +pprIfaceAT :: ShowSub -> IfaceAT -> SDoc +pprIfaceAT ss (IfaceAT d mb_def) + = vcat [ pprIfaceDecl ss d + , case mb_def of + Nothing -> empty + Just rhs -> nest 2 $ + ptext (sLit "Default:") <+> ppr rhs ] + +instance Outputable IfaceTyConParent where + ppr p = pprIfaceTyConParent p + +pprIfaceTyConParent :: IfaceTyConParent -> SDoc +pprIfaceTyConParent IfNoParent + = empty +pprIfaceTyConParent (IfDataInstance _ tc tys) + = sdocWithDynFlags $ \dflags -> + let ftys = stripKindArgs dflags tys + in pprIfaceTypeApp tc ftys + +pprIfaceDeclHead :: IfaceContext -> ShowSub -> OccName -> [IfaceTvBndr] -> SDoc +pprIfaceDeclHead context ss tc_occ tv_bndrs + = sdocWithDynFlags $ \ dflags -> + sep [ pprIfaceContextArr context + , pprPrefixIfDeclBndr ss tc_occ + <+> pprIfaceTvBndrs (stripIfaceKindVars dflags tv_bndrs) ] + +isVanillaIfaceConDecl :: IfaceConDecl -> Bool +isVanillaIfaceConDecl (IfCon { ifConExTvs = ex_tvs + , ifConEqSpec = eq_spec + , ifConCtxt = ctxt }) + = (null ex_tvs) && (null eq_spec) && (null ctxt) + +pprIfaceConDecl :: ShowSub -> Bool + -> (IfaceEqSpec -> ([IfaceTvBndr], SDoc)) + -> IfaceConDecl -> SDoc +pprIfaceConDecl ss gadt_style mk_user_con_res_ty + (IfCon { ifConOcc = name, ifConInfix = is_infix, + ifConExTvs = ex_tvs, ifConEqSpec = eq_spec, ifConCtxt = ctxt, ifConArgTys = arg_tys, - ifConStricts = strs, ifConFields = fields }) - = sep [main_payload, - if is_infix then ptext (sLit "Infix") else empty, - if has_wrap then ptext (sLit "HasWrapper") else empty, - ppUnless (null strs) $ - nest 2 (ptext (sLit "Stricts:") <+> hsep (map ppr_bang strs)), - ppUnless (null fields) $ - nest 2 (ptext (sLit "Fields:") <+> hsep (map ppr fields))] + ifConStricts = stricts, ifConFields = labels }) + | gadt_style = pp_prefix_con <+> dcolon <+> ppr_ty + | otherwise = ppr_fields tys_w_strs where - ppr_bang IfNoBang = char '_' -- Want to see these - ppr_bang IfStrict = char '!' - ppr_bang IfUnpack = ptext (sLit "!!") - ppr_bang (IfUnpackCo co) = ptext (sLit "!!") <> pprParendIfaceCoercion co - - main_payload = ppr name <+> dcolon <+> - pprIfaceForAllPart (univ_tvs ++ ex_tvs) (eq_ctxt ++ ctxt) pp_tau + tys_w_strs :: [(IfaceBang, IfaceType)] + tys_w_strs = zip stricts arg_tys + pp_prefix_con = pprPrefixIfDeclBndr ss name - eq_ctxt = [(mkIfaceEqPred (IfaceTyVar (occNameFS tv)) ty) - | (tv,ty) <- eq_spec] + (univ_tvs, pp_res_ty) = mk_user_con_res_ty eq_spec + ppr_ty = pprIfaceForAllPart (univ_tvs ++ ex_tvs) ctxt pp_tau -- A bit gruesome this, but we can't form the full con_tau, and ppr it, -- because we don't have a Name for the tycon, only an OccName @@ -1193,7 +895,26 @@ pprIfaceConDecl tc (t:ts) -> fsep (t : map (arrow <+>) ts) [] -> panic "pp_con_taus" - pp_res_ty = ppr tc <+> fsep [ppr tv | (tv,_) <- univ_tvs] + ppr_bang IfNoBang = ppWhen opt_PprStyle_Debug $ char '_' + ppr_bang IfStrict = char '!' + ppr_bang IfUnpack = ptext (sLit "{-# UNPACK #-}") + ppr_bang (IfUnpackCo co) = ptext (sLit "! {-# UNPACK #-}") <> + pprParendIfaceCoercion co + + pprParendBangTy (bang, ty) = ppr_bang bang <> pprParendIfaceType ty + pprBangTy (bang, ty) = ppr_bang bang <> ppr ty + + maybe_show_label (lbl,bty) + | showSub ss lbl = Just (pprPrefixIfDeclBndr ss lbl <+> dcolon <+> pprBangTy bty) + | otherwise = Nothing + + ppr_fields [ty1, ty2] + | is_infix && null labels + = sep [pprParendBangTy ty1, pprInfixIfDeclBndr ss name, pprParendBangTy ty2] + ppr_fields fields + | null labels = pp_prefix_con <+> sep (map pprParendBangTy fields) + | otherwise = pp_prefix_con <+> (braces $ sep $ punctuate comma $ ppr_trim $ + map maybe_show_label (zip labels fields)) instance Outputable IfaceRule where ppr (IfaceRule { ifRuleName = name, ifActivation = act, ifRuleBndrs = bndrs, @@ -1205,15 +926,15 @@ instance Outputable IfaceRule where ] instance Outputable IfaceClsInst where - ppr (IfaceClsInst {ifDFun = dfun_id, ifOFlag = flag, - ifInstCls = cls, ifInstTys = mb_tcs}) + ppr (IfaceClsInst { ifDFun = dfun_id, ifOFlag = flag + , ifInstCls = cls, ifInstTys = mb_tcs}) = hang (ptext (sLit "instance") <+> ppr flag <+> ppr cls <+> brackets (pprWithCommas ppr_rough mb_tcs)) 2 (equals <+> ppr dfun_id) instance Outputable IfaceFamInst where - ppr (IfaceFamInst {ifFamInstFam = fam, ifFamInstTys = mb_tcs, - ifFamInstAxiom = tycon_ax}) + ppr (IfaceFamInst { ifFamInstFam = fam, ifFamInstTys = mb_tcs + , ifFamInstAxiom = tycon_ax}) = hang (ptext (sLit "family instance") <+> ppr fam <+> pprWithCommas (brackets . ppr_rough) mb_tcs) 2 (equals <+> ppr tycon_ax) @@ -1223,6 +944,26 @@ ppr_rough Nothing = dot ppr_rough (Just tc) = ppr tc \end{code} +Note [Result type of a data family GADT] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Consider + data family T a + data instance T (p,q) where + T1 :: T (Int, Maybe c) + T2 :: T (Bool, q) + +The IfaceDecl actually looks like + + data TPr p q where + T1 :: forall p q. forall c. (p~Int,q~Maybe c) => TPr p q + T2 :: forall p q. (p~Bool) => TPr p q + +To reconstruct the result types for T1 and T2 that we +want to pretty print, we substitute the eq-spec +[p->Int, q->Maybe c] in the arg pattern (p,q) to give + T (Int, Maybe c) +Remember that in IfaceSyn, the TyCon and DataCon share the same +universal type variables. ----------------------------- Printing IfaceExpr ------------------------------------ @@ -1230,6 +971,9 @@ ppr_rough (Just tc) = ppr tc instance Outputable IfaceExpr where ppr e = pprIfaceExpr noParens e +noParens :: SDoc -> SDoc +noParens pp = pp + pprParendIfaceExpr :: IfaceExpr -> SDoc pprParendIfaceExpr = pprIfaceExpr parens @@ -1355,17 +1099,22 @@ instance Outputable IfaceUnfolding where pprParendIfaceExpr e] ppr (IfDFunUnfold bs es) = hang (ptext (sLit "DFun:") <+> sep (map ppr bs) <> dot) 2 (sep (map pprParendIfaceExpr es)) +\end{code} --- ----------------------------------------------------------------------------- --- | Finding the Names in IfaceSyn +%************************************************************************ +%* * + Finding the Names in IfaceSyn +%* * +%************************************************************************ --- This is used for dependency analysis in MkIface, so that we --- fingerprint a declaration before the things that depend on it. It --- is specific to interface-file fingerprinting in the sense that we --- don't collect *all* Names: for example, the DFun of an instance is --- recorded textually rather than by its fingerprint when --- fingerprinting the instance, so DFuns are not dependencies. +This is used for dependency analysis in MkIface, so that we +fingerprint a declaration before the things that depend on it. It +is specific to interface-file fingerprinting in the sense that we +don't collect *all* Names: for example, the DFun of an instance is +recorded textually rather than by its fingerprint when +fingerprinting the instance, so DFuns are not dependencies. +\begin{code} freeNamesIfDecl :: IfaceDecl -> NameSet freeNamesIfDecl (IfaceId _s t d i) = freeNamesIfType t &&& @@ -1375,7 +1124,7 @@ freeNamesIfDecl IfaceForeign{} = emptyNameSet freeNamesIfDecl d@IfaceData{} = freeNamesIfTvBndrs (ifTyVars d) &&& - maybe emptyNameSet unitNameSet (ifAxiom d) &&& + freeNamesIfaceTyConParent (ifParent d) &&& freeNamesIfContext (ifCtxt d) &&& freeNamesIfConDecls (ifCons d) freeNamesIfDecl d@IfaceSyn{} = @@ -1392,11 +1141,13 @@ freeNamesIfDecl d@IfaceAxiom{} = freeNamesIfTc (ifTyCon d) &&& fnList freeNamesIfAxBranch (ifAxBranches d) freeNamesIfDecl d@IfacePatSyn{} = + unitNameSet (ifPatMatcher d) &&& + maybe emptyNameSet unitNameSet (ifPatWrapper d) &&& freeNamesIfTvBndrs (ifPatUnivTvs d) &&& freeNamesIfTvBndrs (ifPatExTvs d) &&& freeNamesIfContext (ifPatProvCtxt d) &&& freeNamesIfContext (ifPatReqCtxt d) &&& - fnList freeNamesIfType (map snd (ifPatArgs d)) &&& + fnList freeNamesIfType (ifPatArgs d) &&& freeNamesIfType (ifPatTy d) freeNamesIfAxBranch :: IfaceAxBranch -> NameSet @@ -1404,7 +1155,7 @@ freeNamesIfAxBranch (IfaceAxBranch { ifaxbTyVars = tyvars , ifaxbLHS = lhs , ifaxbRHS = rhs }) = freeNamesIfTvBndrs tyvars &&& - fnList freeNamesIfType lhs &&& + freeNamesIfTcArgs lhs &&& freeNamesIfType rhs freeNamesIfIdDetails :: IfaceIdDetails -> NameSet @@ -1415,16 +1166,20 @@ freeNamesIfIdDetails _ = emptyNameSet freeNamesIfSynRhs :: IfaceSynTyConRhs -> NameSet freeNamesIfSynRhs (IfaceSynonymTyCon ty) = freeNamesIfType ty freeNamesIfSynRhs IfaceOpenSynFamilyTyCon = emptyNameSet -freeNamesIfSynRhs (IfaceClosedSynFamilyTyCon ax) = unitNameSet ax +freeNamesIfSynRhs (IfaceClosedSynFamilyTyCon ax br) + = unitNameSet ax &&& fnList freeNamesIfAxBranch br freeNamesIfSynRhs IfaceAbstractClosedSynFamilyTyCon = emptyNameSet +freeNamesIfSynRhs IfaceBuiltInSynFamTyCon = emptyNameSet freeNamesIfContext :: IfaceContext -> NameSet freeNamesIfContext = fnList freeNamesIfType freeNamesIfAT :: IfaceAT -> NameSet -freeNamesIfAT (IfaceAT decl defs) +freeNamesIfAT (IfaceAT decl mb_def) = freeNamesIfDecl decl &&& - fnList freeNamesIfAxBranch defs + case mb_def of + Nothing -> emptyNameSet + Just rhs -> freeNamesIfType rhs freeNamesIfClsSig :: IfaceClassOp -> NameSet freeNamesIfClsSig (IfaceClassOp _n _dm ty) = freeNamesIfType ty @@ -1435,25 +1190,30 @@ freeNamesIfConDecls (IfNewTyCon c) = freeNamesIfConDecl c freeNamesIfConDecls _ = emptyNameSet freeNamesIfConDecl :: IfaceConDecl -> NameSet -freeNamesIfConDecl c = - freeNamesIfTvBndrs (ifConUnivTvs c) &&& - freeNamesIfTvBndrs (ifConExTvs c) &&& - freeNamesIfContext (ifConCtxt c) &&& - fnList freeNamesIfType (ifConArgTys c) &&& - fnList freeNamesIfType (map snd (ifConEqSpec c)) -- equality constraints +freeNamesIfConDecl c + = freeNamesIfTvBndrs (ifConExTvs c) &&& + freeNamesIfContext (ifConCtxt c) &&& + fnList freeNamesIfType (ifConArgTys c) &&& + fnList freeNamesIfType (map snd (ifConEqSpec c)) -- equality constraints freeNamesIfKind :: IfaceType -> NameSet freeNamesIfKind = freeNamesIfType +freeNamesIfTcArgs :: IfaceTcArgs -> NameSet +freeNamesIfTcArgs (ITC_Type t ts) = freeNamesIfType t &&& freeNamesIfTcArgs ts +freeNamesIfTcArgs (ITC_Kind k ks) = freeNamesIfKind k &&& freeNamesIfTcArgs ks +freeNamesIfTcArgs ITC_Nil = emptyNameSet + freeNamesIfType :: IfaceType -> NameSet freeNamesIfType (IfaceTyVar _) = emptyNameSet freeNamesIfType (IfaceAppTy s t) = freeNamesIfType s &&& freeNamesIfType t freeNamesIfType (IfaceTyConApp tc ts) = - freeNamesIfTc tc &&& fnList freeNamesIfType ts + freeNamesIfTc tc &&& freeNamesIfTcArgs ts freeNamesIfType (IfaceLitTy _) = emptyNameSet freeNamesIfType (IfaceForAllTy tv t) = freeNamesIfTvBndr tv &&& freeNamesIfType t freeNamesIfType (IfaceFunTy s t) = freeNamesIfType s &&& freeNamesIfType t +freeNamesIfType (IfaceDFunTy s t) = freeNamesIfType s &&& freeNamesIfType t freeNamesIfCoercion :: IfaceCoercion -> NameSet freeNamesIfCoercion (IfaceReflCo _ t) = freeNamesIfType t @@ -1535,8 +1295,7 @@ freeNamesIfExpr (IfaceCast e co) = freeNamesIfExpr e &&& freeNamesIfCoercion co freeNamesIfExpr (IfaceTick _ e) = freeNamesIfExpr e freeNamesIfExpr (IfaceECase e ty) = freeNamesIfExpr e &&& freeNamesIfType ty freeNamesIfExpr (IfaceCase s _ alts) - = freeNamesIfExpr s - &&& fnList fn_alt alts &&& fn_cons alts + = freeNamesIfExpr s &&& fnList fn_alt alts &&& fn_cons alts where fn_alt (_con,_bs,r) = freeNamesIfExpr r @@ -1558,7 +1317,7 @@ freeNamesIfExpr (IfaceLet (IfaceRec as) x) freeNamesIfExpr _ = emptyNameSet freeNamesIfTc :: IfaceTyCon -> NameSet -freeNamesIfTc (IfaceTc tc) = unitNameSet tc +freeNamesIfTc tc = unitNameSet (ifaceTyConName tc) -- ToDo: shouldn't we include IfaceIntTc & co.? freeNamesIfRule :: IfaceRule -> NameSet @@ -1568,13 +1327,18 @@ freeNamesIfRule (IfaceRule { ifRuleBndrs = bs, ifRuleHead = f fnList freeNamesIfBndr bs &&& fnList freeNamesIfExpr es &&& freeNamesIfExpr rhs - + freeNamesIfFamInst :: IfaceFamInst -> NameSet freeNamesIfFamInst (IfaceFamInst { ifFamInstFam = famName , ifFamInstAxiom = axName }) = unitNameSet famName &&& unitNameSet axName +freeNamesIfaceTyConParent :: IfaceTyConParent -> NameSet +freeNamesIfaceTyConParent IfNoParent = emptyNameSet +freeNamesIfaceTyConParent (IfDataInstance ax tc tys) + = unitNameSet ax &&& freeNamesIfTc tc &&& freeNamesIfTcArgs tys + -- helpers (&&&) :: NameSet -> NameSet -> NameSet (&&&) = unionNameSets @@ -1608,3 +1372,538 @@ Now, lookupModule depends on DynFlags, but the transitive dependency on the *locally-defined* type PackageState is not visible. We need to take account of the use of the data constructor PS in the pattern match. + +%************************************************************************ +%* * + Binary instances +%* * +%************************************************************************ + +\begin{code} +instance Binary IfaceDecl where + put_ bh (IfaceId name ty details idinfo) = do + putByte bh 0 + put_ bh (occNameFS name) + put_ bh ty + put_ bh details + put_ bh idinfo + + put_ _ (IfaceForeign _ _) = + error "Binary.put_(IfaceDecl): IfaceForeign" + + put_ bh (IfaceData a1 a2 a3 a4 a5 a6 a7 a8 a9 a10) = do + putByte bh 2 + put_ bh (occNameFS a1) + put_ bh a2 + put_ bh a3 + put_ bh a4 + put_ bh a5 + put_ bh a6 + put_ bh a7 + put_ bh a8 + put_ bh a9 + put_ bh a10 + + put_ bh (IfaceSyn a1 a2 a3 a4 a5) = do + putByte bh 3 + put_ bh (occNameFS a1) + put_ bh a2 + put_ bh a3 + put_ bh a4 + put_ bh a5 + + put_ bh (IfaceClass a1 a2 a3 a4 a5 a6 a7 a8 a9) = do + putByte bh 4 + put_ bh a1 + put_ bh (occNameFS a2) + put_ bh a3 + put_ bh a4 + put_ bh a5 + put_ bh a6 + put_ bh a7 + put_ bh a8 + put_ bh a9 + + put_ bh (IfaceAxiom a1 a2 a3 a4) = do + putByte bh 5 + put_ bh (occNameFS a1) + put_ bh a2 + put_ bh a3 + put_ bh a4 + + put_ bh (IfacePatSyn name a2 a3 a4 a5 a6 a7 a8 a9 a10) = do + putByte bh 6 + put_ bh (occNameFS name) + put_ bh a2 + put_ bh a3 + put_ bh a4 + put_ bh a5 + put_ bh a6 + put_ bh a7 + put_ bh a8 + put_ bh a9 + put_ bh a10 + + get bh = do + h <- getByte bh + case h of + 0 -> do name <- get bh + ty <- get bh + details <- get bh + idinfo <- get bh + occ <- return $! mkVarOccFS name + return (IfaceId occ ty details idinfo) + 1 -> error "Binary.get(TyClDecl): ForeignType" + 2 -> do a1 <- get bh + a2 <- get bh + a3 <- get bh + a4 <- get bh + a5 <- get bh + a6 <- get bh + a7 <- get bh + a8 <- get bh + a9 <- get bh + a10 <- get bh + occ <- return $! mkTcOccFS a1 + return (IfaceData occ a2 a3 a4 a5 a6 a7 a8 a9 a10) + 3 -> do a1 <- get bh + a2 <- get bh + a3 <- get bh + a4 <- get bh + a5 <- get bh + occ <- return $! mkTcOccFS a1 + return (IfaceSyn occ a2 a3 a4 a5) + 4 -> do a1 <- get bh + a2 <- get bh + a3 <- get bh + a4 <- get bh + a5 <- get bh + a6 <- get bh + a7 <- get bh + a8 <- get bh + a9 <- get bh + occ <- return $! mkClsOccFS a2 + return (IfaceClass a1 occ a3 a4 a5 a6 a7 a8 a9) + 5 -> do a1 <- get bh + a2 <- get bh + a3 <- get bh + a4 <- get bh + occ <- return $! mkTcOccFS a1 + return (IfaceAxiom occ a2 a3 a4) + 6 -> do a1 <- get bh + a2 <- get bh + a3 <- get bh + a4 <- get bh + a5 <- get bh + a6 <- get bh + a7 <- get bh + a8 <- get bh + a9 <- get bh + a10 <- get bh + occ <- return $! mkDataOccFS a1 + return (IfacePatSyn occ a2 a3 a4 a5 a6 a7 a8 a9 a10) + _ -> panic (unwords ["Unknown IfaceDecl tag:", show h]) + +instance Binary IfaceSynTyConRhs where + put_ bh IfaceOpenSynFamilyTyCon = putByte bh 0 + put_ bh (IfaceClosedSynFamilyTyCon ax br) = putByte bh 1 >> put_ bh ax + >> put_ bh br + put_ bh IfaceAbstractClosedSynFamilyTyCon = putByte bh 2 + put_ bh (IfaceSynonymTyCon ty) = putByte bh 3 >> put_ bh ty + put_ _ IfaceBuiltInSynFamTyCon + = pprPanic "Cannot serialize IfaceBuiltInSynFamTyCon, used for pretty-printing only" empty + + get bh = do { h <- getByte bh + ; case h of + 0 -> return IfaceOpenSynFamilyTyCon + 1 -> do { ax <- get bh + ; br <- get bh + ; return (IfaceClosedSynFamilyTyCon ax br) } + 2 -> return IfaceAbstractClosedSynFamilyTyCon + _ -> do { ty <- get bh + ; return (IfaceSynonymTyCon ty) } } + +instance Binary IfaceClassOp where + put_ bh (IfaceClassOp n def ty) = do + put_ bh (occNameFS n) + put_ bh def + put_ bh ty + get bh = do + n <- get bh + def <- get bh + ty <- get bh + occ <- return $! mkVarOccFS n + return (IfaceClassOp occ def ty) + +instance Binary IfaceAT where + put_ bh (IfaceAT dec defs) = do + put_ bh dec + put_ bh defs + get bh = do + dec <- get bh + defs <- get bh + return (IfaceAT dec defs) + +instance Binary IfaceAxBranch where + put_ bh (IfaceAxBranch a1 a2 a3 a4 a5) = do + put_ bh a1 + put_ bh a2 + put_ bh a3 + put_ bh a4 + put_ bh a5 + get bh = do + a1 <- get bh + a2 <- get bh + a3 <- get bh + a4 <- get bh + a5 <- get bh + return (IfaceAxBranch a1 a2 a3 a4 a5) + +instance Binary IfaceConDecls where + put_ bh (IfAbstractTyCon d) = putByte bh 0 >> put_ bh d + put_ bh IfDataFamTyCon = putByte bh 1 + put_ bh (IfDataTyCon cs) = putByte bh 2 >> put_ bh cs + put_ bh (IfNewTyCon c) = putByte bh 3 >> put_ bh c + get bh = do + h <- getByte bh + case h of + 0 -> liftM IfAbstractTyCon $ get bh + 1 -> return IfDataFamTyCon + 2 -> liftM IfDataTyCon $ get bh + _ -> liftM IfNewTyCon $ get bh + +instance Binary IfaceConDecl where + put_ bh (IfCon a1 a2 a3 a4 a5 a6 a7 a8 a9) = do + put_ bh a1 + put_ bh a2 + put_ bh a3 + put_ bh a4 + put_ bh a5 + put_ bh a6 + put_ bh a7 + put_ bh a8 + put_ bh a9 + get bh = do + a1 <- get bh + a2 <- get bh + a3 <- get bh + a4 <- get bh + a5 <- get bh + a6 <- get bh + a7 <- get bh + a8 <- get bh + a9 <- get bh + return (IfCon a1 a2 a3 a4 a5 a6 a7 a8 a9) + +instance Binary IfaceBang where + put_ bh IfNoBang = putByte bh 0 + put_ bh IfStrict = putByte bh 1 + put_ bh IfUnpack = putByte bh 2 + put_ bh (IfUnpackCo co) = putByte bh 3 >> put_ bh co + + get bh = do + h <- getByte bh + case h of + 0 -> do return IfNoBang + 1 -> do return IfStrict + 2 -> do return IfUnpack + _ -> do { a <- get bh; return (IfUnpackCo a) } + +instance Binary IfaceClsInst where + put_ bh (IfaceClsInst cls tys dfun flag orph) = do + put_ bh cls + put_ bh tys + put_ bh dfun + put_ bh flag + put_ bh orph + get bh = do + cls <- get bh + tys <- get bh + dfun <- get bh + flag <- get bh + orph <- get bh + return (IfaceClsInst cls tys dfun flag orph) + +instance Binary IfaceFamInst where + put_ bh (IfaceFamInst fam tys name orph) = do + put_ bh fam + put_ bh tys + put_ bh name + put_ bh orph + get bh = do + fam <- get bh + tys <- get bh + name <- get bh + orph <- get bh + return (IfaceFamInst fam tys name orph) + +instance Binary IfaceRule where + put_ bh (IfaceRule a1 a2 a3 a4 a5 a6 a7 a8) = do + put_ bh a1 + put_ bh a2 + put_ bh a3 + put_ bh a4 + put_ bh a5 + put_ bh a6 + put_ bh a7 + put_ bh a8 + get bh = do + a1 <- get bh + a2 <- get bh + a3 <- get bh + a4 <- get bh + a5 <- get bh + a6 <- get bh + a7 <- get bh + a8 <- get bh + return (IfaceRule a1 a2 a3 a4 a5 a6 a7 a8) + +instance Binary IfaceAnnotation where + put_ bh (IfaceAnnotation a1 a2) = do + put_ bh a1 + put_ bh a2 + get bh = do + a1 <- get bh + a2 <- get bh + return (IfaceAnnotation a1 a2) + +instance Binary IfaceIdDetails where + put_ bh IfVanillaId = putByte bh 0 + put_ bh (IfRecSelId a b) = putByte bh 1 >> put_ bh a >> put_ bh b + put_ bh (IfDFunId n) = do { putByte bh 2; put_ bh n } + get bh = do + h <- getByte bh + case h of + 0 -> return IfVanillaId + 1 -> do { a <- get bh; b <- get bh; return (IfRecSelId a b) } + _ -> do { n <- get bh; return (IfDFunId n) } + +instance Binary IfaceIdInfo where + put_ bh NoInfo = putByte bh 0 + put_ bh (HasInfo i) = putByte bh 1 >> lazyPut bh i -- NB lazyPut + + get bh = do + h <- getByte bh + case h of + 0 -> return NoInfo + _ -> liftM HasInfo $ lazyGet bh -- NB lazyGet + +instance Binary IfaceInfoItem where + put_ bh (HsArity aa) = putByte bh 0 >> put_ bh aa + put_ bh (HsStrictness ab) = putByte bh 1 >> put_ bh ab + put_ bh (HsUnfold lb ad) = putByte bh 2 >> put_ bh lb >> put_ bh ad + put_ bh (HsInline ad) = putByte bh 3 >> put_ bh ad + put_ bh HsNoCafRefs = putByte bh 4 + get bh = do + h <- getByte bh + case h of + 0 -> liftM HsArity $ get bh + 1 -> liftM HsStrictness $ get bh + 2 -> do lb <- get bh + ad <- get bh + return (HsUnfold lb ad) + 3 -> liftM HsInline $ get bh + _ -> return HsNoCafRefs + +instance Binary IfaceUnfolding where + put_ bh (IfCoreUnfold s e) = do + putByte bh 0 + put_ bh s + put_ bh e + put_ bh (IfInlineRule a b c d) = do + putByte bh 1 + put_ bh a + put_ bh b + put_ bh c + put_ bh d + put_ bh (IfDFunUnfold as bs) = do + putByte bh 2 + put_ bh as + put_ bh bs + put_ bh (IfCompulsory e) = do + putByte bh 3 + put_ bh e + get bh = do + h <- getByte bh + case h of + 0 -> do s <- get bh + e <- get bh + return (IfCoreUnfold s e) + 1 -> do a <- get bh + b <- get bh + c <- get bh + d <- get bh + return (IfInlineRule a b c d) + 2 -> do as <- get bh + bs <- get bh + return (IfDFunUnfold as bs) + _ -> do e <- get bh + return (IfCompulsory e) + + +instance Binary IfaceExpr where + put_ bh (IfaceLcl aa) = do + putByte bh 0 + put_ bh aa + put_ bh (IfaceType ab) = do + putByte bh 1 + put_ bh ab + put_ bh (IfaceCo ab) = do + putByte bh 2 + put_ bh ab + put_ bh (IfaceTuple ac ad) = do + putByte bh 3 + put_ bh ac + put_ bh ad + put_ bh (IfaceLam ae af) = do + putByte bh 4 + put_ bh ae + put_ bh af + put_ bh (IfaceApp ag ah) = do + putByte bh 5 + put_ bh ag + put_ bh ah + put_ bh (IfaceCase ai aj ak) = do + putByte bh 6 + put_ bh ai + put_ bh aj + put_ bh ak + put_ bh (IfaceLet al am) = do + putByte bh 7 + put_ bh al + put_ bh am + put_ bh (IfaceTick an ao) = do + putByte bh 8 + put_ bh an + put_ bh ao + put_ bh (IfaceLit ap) = do + putByte bh 9 + put_ bh ap + put_ bh (IfaceFCall as at) = do + putByte bh 10 + put_ bh as + put_ bh at + put_ bh (IfaceExt aa) = do + putByte bh 11 + put_ bh aa + put_ bh (IfaceCast ie ico) = do + putByte bh 12 + put_ bh ie + put_ bh ico + put_ bh (IfaceECase a b) = do + putByte bh 13 + put_ bh a + put_ bh b + get bh = do + h <- getByte bh + case h of + 0 -> do aa <- get bh + return (IfaceLcl aa) + 1 -> do ab <- get bh + return (IfaceType ab) + 2 -> do ab <- get bh + return (IfaceCo ab) + 3 -> do ac <- get bh + ad <- get bh + return (IfaceTuple ac ad) + 4 -> do ae <- get bh + af <- get bh + return (IfaceLam ae af) + 5 -> do ag <- get bh + ah <- get bh + return (IfaceApp ag ah) + 6 -> do ai <- get bh + aj <- get bh + ak <- get bh + return (IfaceCase ai aj ak) + 7 -> do al <- get bh + am <- get bh + return (IfaceLet al am) + 8 -> do an <- get bh + ao <- get bh + return (IfaceTick an ao) + 9 -> do ap <- get bh + return (IfaceLit ap) + 10 -> do as <- get bh + at <- get bh + return (IfaceFCall as at) + 11 -> do aa <- get bh + return (IfaceExt aa) + 12 -> do ie <- get bh + ico <- get bh + return (IfaceCast ie ico) + 13 -> do a <- get bh + b <- get bh + return (IfaceECase a b) + _ -> panic ("get IfaceExpr " ++ show h) + +instance Binary IfaceTickish where + put_ bh (IfaceHpcTick m ix) = do + putByte bh 0 + put_ bh m + put_ bh ix + put_ bh (IfaceSCC cc tick push) = do + putByte bh 1 + put_ bh cc + put_ bh tick + put_ bh push + + get bh = do + h <- getByte bh + case h of + 0 -> do m <- get bh + ix <- get bh + return (IfaceHpcTick m ix) + 1 -> do cc <- get bh + tick <- get bh + push <- get bh + return (IfaceSCC cc tick push) + _ -> panic ("get IfaceTickish " ++ show h) + +instance Binary IfaceConAlt where + put_ bh IfaceDefault = putByte bh 0 + put_ bh (IfaceDataAlt aa) = putByte bh 1 >> put_ bh aa + put_ bh (IfaceLitAlt ac) = putByte bh 2 >> put_ bh ac + get bh = do + h <- getByte bh + case h of + 0 -> return IfaceDefault + 1 -> liftM IfaceDataAlt $ get bh + _ -> liftM IfaceLitAlt $ get bh + +instance Binary IfaceBinding where + put_ bh (IfaceNonRec aa ab) = putByte bh 0 >> put_ bh aa >> put_ bh ab + put_ bh (IfaceRec ac) = putByte bh 1 >> put_ bh ac + get bh = do + h <- getByte bh + case h of + 0 -> do { aa <- get bh; ab <- get bh; return (IfaceNonRec aa ab) } + _ -> do { ac <- get bh; return (IfaceRec ac) } + +instance Binary IfaceLetBndr where + put_ bh (IfLetBndr a b c) = do + put_ bh a + put_ bh b + put_ bh c + get bh = do a <- get bh + b <- get bh + c <- get bh + return (IfLetBndr a b c) + +instance Binary IfaceTyConParent where + put_ bh IfNoParent = putByte bh 0 + put_ bh (IfDataInstance ax pr ty) = do + putByte bh 1 + put_ bh ax + put_ bh pr + put_ bh ty + get bh = do + h <- getByte bh + case h of + 0 -> return IfNoParent + _ -> do + ax <- get bh + pr <- get bh + ty <- get bh + return $ IfDataInstance ax pr ty +\end{code}
\ No newline at end of file diff --git a/compiler/iface/IfaceType.lhs b/compiler/iface/IfaceType.lhs index e4a789f0f5..c55edc6185 100644 --- a/compiler/iface/IfaceType.lhs +++ b/compiler/iface/IfaceType.lhs @@ -6,17 +6,22 @@ This module defines interface types and binders \begin{code} +{-# LANGUAGE CPP #-} module IfaceType ( IfExtName, IfLclName, IfaceType(..), IfacePredType, IfaceKind, IfaceTyCon(..), IfaceCoercion(..), - IfaceTyLit(..), - IfaceContext, IfaceBndr(..), IfaceTvBndr, IfaceIdBndr, + IfaceTyLit(..), IfaceTcArgs(..), + IfaceContext, IfaceBndr(..), IfaceTvBndr, IfaceIdBndr, -- Conversion from Type -> IfaceType - toIfaceType, toIfaceKind, toIfaceContext, - toIfaceBndr, toIfaceIdBndr, toIfaceTvBndrs, - toIfaceTyCon, toIfaceTyCon_name, + toIfaceType, toIfaceTypes, toIfaceKind, toIfaceTyVar, + toIfaceContext, toIfaceBndr, toIfaceIdBndr, + toIfaceTvBndrs, toIfaceTyCon, toIfaceTyCon_name, + toIfaceTcArgs, + + -- Conversion from IfaceTcArgs -> IfaceType + tcArgsIfaceTypes, -- Conversion from Coercion -> IfaceCoercion toIfaceCoercion, @@ -24,31 +29,40 @@ module IfaceType ( -- Printing pprIfaceType, pprParendIfaceType, pprIfaceContext, pprIfaceContextArr, pprIfaceIdBndr, pprIfaceTvBndr, pprIfaceTvBndrs, - pprIfaceBndrs, - tOP_PREC, tYCON_PREC, noParens, maybeParen, pprIfaceForAllPart, - pprIfaceCoercion, pprParendIfaceCoercion - + pprIfaceBndrs, pprIfaceTcArgs, pprParendIfaceTcArgs, + pprIfaceForAllPart, pprIfaceForAll, pprIfaceSigmaType, + pprIfaceCoercion, pprParendIfaceCoercion, + splitIfaceSigmaTy, pprIfaceTypeApp, pprUserIfaceForAll, + + suppressIfaceKinds, + stripIfaceKindVars, + stripKindArgs, + substIfaceType, substIfaceTyVar, substIfaceTcArgs, mkIfaceTySubst ) where +#include "HsVersions.h" + import Coercion +import DataCon ( dataConTyCon ) import TcType import DynFlags -import TypeRep hiding( maybeParen ) +import TypeRep import Unique( hasKey ) -import TyCon +import Util ( filterOut, lengthIs, zipWithEqual ) +import TyCon hiding ( pprPromotionQuote ) import CoAxiom import Id import Var +-- import RnEnv( FastStringEnv, mkFsEnv, lookupFsEnv ) import TysWiredIn import TysPrim -import PrelNames( funTyConKey ) +import PrelNames( funTyConKey, ipClassName ) import Name import BasicTypes import Binary import Outputable import FastString - -import Control.Monad +import UniqSet \end{code} %************************************************************************ @@ -77,8 +91,9 @@ data IfaceType -- A kind of universal type, used for types and kinds = IfaceTyVar IfLclName -- Type/coercion variable only, not tycon | IfaceAppTy IfaceType IfaceType | IfaceFunTy IfaceType IfaceType + | IfaceDFunTy IfaceType IfaceType | IfaceForAllTy IfaceTvBndr IfaceType - | IfaceTyConApp IfaceTyCon [IfaceType] -- Not necessarily saturated + | IfaceTyConApp IfaceTyCon IfaceTcArgs -- Not necessarily saturated -- Includes newtypes, synonyms, tuples | IfaceLitTy IfaceTyLit @@ -89,9 +104,24 @@ data IfaceTyLit = IfaceNumTyLit Integer | IfaceStrTyLit FastString --- Encodes type constructors, kind constructors --- coercion constructors, the lot -newtype IfaceTyCon = IfaceTc { ifaceTyConName :: IfExtName } +-- See Note [Suppressing kinds] +-- We use a new list type (rather than [(IfaceType,Bool)], because +-- it'll be more compact and faster to parse in interface +-- files. Rather than two bytes and two decisions (nil/cons, and +-- type/kind) there'll just be one. +data IfaceTcArgs + = ITC_Nil + | ITC_Type IfaceType IfaceTcArgs + | ITC_Kind IfaceKind IfaceTcArgs + +-- Encodes type constructors, kind constructors, +-- coercion constructors, the lot. +-- We have to tag them in order to pretty print them +-- properly. +data IfaceTyCon + = IfaceTc { ifaceTyConName :: IfExtName } + | IfacePromotedDataCon { ifaceTyConName :: IfExtName } + | IfacePromotedTyCon { ifaceTyConName :: IfExtName } data IfaceCoercion = IfaceReflCo Role IfaceType @@ -131,40 +161,167 @@ splitIfaceSigmaTy ty = case split_foralls ty of { (tvs, rho) -> (tv:tvs, rho) } split_foralls rho = ([], rho) - split_rho (IfaceFunTy ty1 ty2) - | isIfacePredTy ty1 = case split_rho ty2 of { (ps, tau) -> (ty1:ps, tau) } + split_rho (IfaceDFunTy ty1 ty2) + = case split_rho ty2 of { (ps, tau) -> (ty1:ps, tau) } split_rho tau = ([], tau) + +suppressIfaceKinds :: DynFlags -> [IfaceTvBndr] -> [a] -> [a] +suppressIfaceKinds dflags tys xs + | gopt Opt_PrintExplicitKinds dflags = xs + | otherwise = suppress tys xs + where + suppress _ [] = [] + suppress [] a = a + suppress (k:ks) a@(_:xs) + | isIfaceKindVar k = suppress ks xs + | otherwise = a + +stripIfaceKindVars :: DynFlags -> [IfaceTvBndr] -> [IfaceTvBndr] +stripIfaceKindVars dflags tyvars + | gopt Opt_PrintExplicitKinds dflags = tyvars + | otherwise = filterOut isIfaceKindVar tyvars + +isIfaceKindVar :: IfaceTvBndr -> Bool +isIfaceKindVar (_, IfaceTyConApp tc _) = ifaceTyConName tc == superKindTyConName +isIfaceKindVar _ = False + +ifTyVarsOfType :: IfaceType -> UniqSet IfLclName +ifTyVarsOfType ty + = case ty of + IfaceTyVar v -> unitUniqSet v + IfaceAppTy fun arg + -> ifTyVarsOfType fun `unionUniqSets` ifTyVarsOfType arg + IfaceFunTy arg res + -> ifTyVarsOfType arg `unionUniqSets` ifTyVarsOfType res + IfaceDFunTy arg res + -> ifTyVarsOfType arg `unionUniqSets` ifTyVarsOfType res + IfaceForAllTy (var,t) ty + -> delOneFromUniqSet (ifTyVarsOfType ty) var `unionUniqSets` + ifTyVarsOfType t + IfaceTyConApp _ args -> ifTyVarsOfArgs args + IfaceLitTy _ -> emptyUniqSet + +ifTyVarsOfArgs :: IfaceTcArgs -> UniqSet IfLclName +ifTyVarsOfArgs args = argv emptyUniqSet args + where + argv vs (ITC_Type t ts) = argv (vs `unionUniqSets` (ifTyVarsOfType t)) ts + argv vs (ITC_Kind k ks) = argv (vs `unionUniqSets` (ifTyVarsOfType k)) ks + argv vs ITC_Nil = vs +\end{code} + +Substitutions on IfaceType. This is only used during pretty-printing to construct +the result type of a GADT, and does not deal with binders (eg IfaceForAll), so +it doesn't need fancy capture stuff. + +\begin{code} +type IfaceTySubst = FastStringEnv IfaceType + +mkIfaceTySubst :: [IfaceTvBndr] -> [IfaceType] -> IfaceTySubst +mkIfaceTySubst tvs tys = mkFsEnv $ zipWithEqual "mkIfaceTySubst" (\(fs,_) ty -> (fs,ty)) tvs tys + +substIfaceType :: IfaceTySubst -> IfaceType -> IfaceType +substIfaceType env ty + = go ty + where + go (IfaceTyVar tv) = substIfaceTyVar env tv + go (IfaceAppTy t1 t2) = IfaceAppTy (go t1) (go t2) + go (IfaceFunTy t1 t2) = IfaceFunTy (go t1) (go t2) + go (IfaceDFunTy t1 t2) = IfaceDFunTy (go t1) (go t2) + go ty@(IfaceLitTy {}) = ty + go (IfaceTyConApp tc tys) = IfaceTyConApp tc (substIfaceTcArgs env tys) + go (IfaceForAllTy {}) = pprPanic "substIfaceType" (ppr ty) + +substIfaceTcArgs :: IfaceTySubst -> IfaceTcArgs -> IfaceTcArgs +substIfaceTcArgs env args + = go args + where + go ITC_Nil = ITC_Nil + go (ITC_Type ty tys) = ITC_Type (substIfaceType env ty) (go tys) + go (ITC_Kind ty tys) = ITC_Kind (substIfaceType env ty) (go tys) + +substIfaceTyVar :: IfaceTySubst -> IfLclName -> IfaceType +substIfaceTyVar env tv + | Just ty <- lookupFsEnv env tv = ty + | otherwise = IfaceTyVar tv \end{code} %************************************************************************ %* * - Pretty-printing + Functions over IFaceTcArgs +%* * +%************************************************************************ + + +\begin{code} +stripKindArgs :: DynFlags -> IfaceTcArgs -> IfaceTcArgs +stripKindArgs dflags tys + | gopt Opt_PrintExplicitKinds dflags = tys + | otherwise = suppressKinds tys + where + suppressKinds c + = case c of + ITC_Kind _ ts -> suppressKinds ts + _ -> c + +toIfaceTcArgs :: TyCon -> [Type] -> IfaceTcArgs +-- See Note [Suppressing kinds] +toIfaceTcArgs tc ty_args + = go (tyConKind tc) ty_args + where + go _ [] = ITC_Nil + go (ForAllTy _ res) (t:ts) = ITC_Kind (toIfaceKind t) (go res ts) + go (FunTy _ res) (t:ts) = ITC_Type (toIfaceType t) (go res ts) + go kind (t:ts) = WARN( True, ppr tc $$ ppr (tyConKind tc) $$ ppr ty_args ) + ITC_Type (toIfaceType t) (go kind ts) -- Ill-kinded + +tcArgsIfaceTypes :: IfaceTcArgs -> [IfaceType] +tcArgsIfaceTypes ITC_Nil = [] +tcArgsIfaceTypes (ITC_Kind t ts) = t : tcArgsIfaceTypes ts +tcArgsIfaceTypes (ITC_Type t ts) = t : tcArgsIfaceTypes ts +\end{code} + +Note [Suppressing kinds] +~~~~~~~~~~~~~~~~~~~~~~~~ +We use the IfaceTcArgs to specify which of the arguments to a type +constructor instantiate a for-all, and which are regular kind args. +This in turn used to control kind-suppression when printing types, +under the control of -fprint-explicit-kinds. See also TypeRep.suppressKinds. +For example, given + T :: forall k. (k->*) -> k -> * -- Ordinary kind polymorphism + 'Just :: forall k. k -> 'Maybe k -- Promoted +we want + T * Tree Int prints as T Tree Int + 'Just * prints as Just * + + +%************************************************************************ +%* * + Functions over IFaceTyCon %* * %************************************************************************ -Precedence -~~~~~~~~~~ -@ppr_ty@ takes an @Int@ that is the precedence of the context. -The precedence levels are: -\begin{description} -\item[tOP_PREC] No parens required. -\item[fUN_PREC] Left hand argument of a function arrow. -\item[tYCON_PREC] Argument of a type constructor. -\end{description} +\begin{code} +--isPromotedIfaceTyCon :: IfaceTyCon -> Bool +--isPromotedIfaceTyCon (IfacePromotedTyCon _) = True +--isPromotedIfaceTyCon _ = False +\end{code} +%************************************************************************ +%* * + Pretty-printing +%* * +%************************************************************************ \begin{code} -tOP_PREC, fUN_PREC, tYCON_PREC :: Int -tOP_PREC = 0 -- type in ParseIface.y -fUN_PREC = 1 -- btype in ParseIface.y -tYCON_PREC = 2 -- atype in ParseIface.y - -noParens :: SDoc -> SDoc -noParens pp = pp - -maybeParen :: Int -> Int -> SDoc -> SDoc -maybeParen ctxt_prec inner_prec pretty - | ctxt_prec < inner_prec = pretty - | otherwise = parens pretty +pprIfaceInfixApp :: (TyPrec -> a -> SDoc) -> TyPrec -> SDoc -> a -> a -> SDoc +pprIfaceInfixApp pp p pp_tc ty1 ty2 + = maybeParen p FunPrec $ + sep [pp FunPrec ty1, pprInfixVar True pp_tc <+> pp FunPrec ty2] + +pprIfacePrefixApp :: TyPrec -> SDoc -> [SDoc] -> SDoc +pprIfacePrefixApp p pp_fun pp_tys + | null pp_tys = pp_fun + | otherwise = maybeParen p TyConPrec $ + hang pp_fun 2 (sep pp_tys) \end{code} @@ -182,9 +339,9 @@ pprIfaceIdBndr :: (IfLclName, IfaceType) -> SDoc pprIfaceIdBndr (name, ty) = hsep [ppr name, dcolon, ppr ty] pprIfaceTvBndr :: IfaceTvBndr -> SDoc -pprIfaceTvBndr (tv, IfaceTyConApp tc []) +pprIfaceTvBndr (tv, IfaceTyConApp tc ITC_Nil) | ifaceTyConName tc == liftedTypeKindTyConName = ppr tv -pprIfaceTvBndr (tv, kind) = parens (ppr tv <> dcolon <> ppr kind) +pprIfaceTvBndr (tv, kind) = parens (ppr tv <+> dcolon <+> ppr kind) pprIfaceTvBndrs :: [IfaceTvBndr] -> SDoc pprIfaceTvBndrs tyvars = sep (map pprIfaceTvBndr tyvars) @@ -213,109 +370,200 @@ instance Outputable IfaceType where ppr ty = pprIfaceType ty pprIfaceType, pprParendIfaceType ::IfaceType -> SDoc -pprIfaceType = ppr_ty tOP_PREC -pprParendIfaceType = ppr_ty tYCON_PREC - -isIfacePredTy :: IfaceType -> Bool -isIfacePredTy _ = False --- FIXME: fix this to print iface pred tys correctly --- isIfacePredTy ty = isConstraintKind (ifaceTypeKind ty) +pprIfaceType = ppr_ty TopPrec +pprParendIfaceType = ppr_ty TyConPrec -ppr_ty :: Int -> IfaceType -> SDoc +ppr_ty :: TyPrec -> IfaceType -> SDoc ppr_ty _ (IfaceTyVar tyvar) = ppr tyvar -ppr_ty ctxt_prec (IfaceTyConApp tc tys) = ppr_tc_app ppr_ty ctxt_prec tc tys - -ppr_ty _ (IfaceLitTy n) = ppr_tylit n - +ppr_ty ctxt_prec (IfaceTyConApp tc tys) = sdocWithDynFlags (pprTyTcApp ctxt_prec tc tys) +ppr_ty _ (IfaceLitTy n) = ppr_tylit n -- Function types ppr_ty ctxt_prec (IfaceFunTy ty1 ty2) = -- We don't want to lose synonyms, so we mustn't use splitFunTys here. - maybeParen ctxt_prec fUN_PREC $ - sep (ppr_ty fUN_PREC ty1 : ppr_fun_tail ty2) + maybeParen ctxt_prec FunPrec $ + sep [ppr_ty FunPrec ty1, sep (ppr_fun_tail ty2)] where - arr | isIfacePredTy ty1 = darrow - | otherwise = arrow - ppr_fun_tail (IfaceFunTy ty1 ty2) - = (arr <+> ppr_ty fUN_PREC ty1) : ppr_fun_tail ty2 + = (arrow <+> ppr_ty FunPrec ty1) : ppr_fun_tail ty2 ppr_fun_tail other_ty - = [arr <+> pprIfaceType other_ty] + = [arrow <+> pprIfaceType other_ty] ppr_ty ctxt_prec (IfaceAppTy ty1 ty2) - = maybeParen ctxt_prec tYCON_PREC $ - ppr_ty fUN_PREC ty1 <+> pprParendIfaceType ty2 + = maybeParen ctxt_prec TyConPrec $ + ppr_ty FunPrec ty1 <+> pprParendIfaceType ty2 -ppr_ty ctxt_prec ty@(IfaceForAllTy _ _) - = maybeParen ctxt_prec fUN_PREC (pprIfaceForAllPart tvs theta (pprIfaceType tau)) - where - (tvs, theta, tau) = splitIfaceSigmaTy ty +ppr_ty ctxt_prec ty + = maybeParen ctxt_prec FunPrec (ppr_iface_sigma_type True ty) + +instance Outputable IfaceTcArgs where + ppr tca = pprIfaceTcArgs tca + +pprIfaceTcArgs, pprParendIfaceTcArgs :: IfaceTcArgs -> SDoc +pprIfaceTcArgs = ppr_tc_args TopPrec +pprParendIfaceTcArgs = ppr_tc_args TyConPrec + +ppr_tc_args :: TyPrec -> IfaceTcArgs -> SDoc +ppr_tc_args ctx_prec args + = let pprTys t ts = ppr_ty ctx_prec t <+> ppr_tc_args ctx_prec ts + in case args of + ITC_Nil -> empty + ITC_Type t ts -> pprTys t ts + ITC_Kind t ts -> pprTys t ts ------------------- --- needs to handle type contexts and coercion contexts, hence the --- generality -pprIfaceForAllPart :: Outputable a => [IfaceTvBndr] -> [a] -> SDoc -> SDoc -pprIfaceForAllPart tvs ctxt doc - = sep [ppr_tvs, pprIfaceContextArr ctxt, doc] +ppr_iface_sigma_type :: Bool -> IfaceType -> SDoc +ppr_iface_sigma_type show_foralls_unconditionally ty + = ppr_iface_forall_part show_foralls_unconditionally tvs theta (ppr tau) where - ppr_tvs | null tvs = empty - | otherwise = sdocWithDynFlags $ \ dflags -> - if gopt Opt_PrintExplicitForalls dflags - then ptext (sLit "forall") <+> pprIfaceTvBndrs tvs <> dot - else empty + (tvs, theta, tau) = splitIfaceSigmaTy ty +pprIfaceForAllPart :: Outputable a => [IfaceTvBndr] -> [a] -> SDoc -> SDoc +pprIfaceForAllPart tvs ctxt sdoc = ppr_iface_forall_part False tvs ctxt sdoc + +ppr_iface_forall_part :: Outputable a + => Bool -> [IfaceTvBndr] -> [a] -> SDoc -> SDoc +ppr_iface_forall_part show_foralls_unconditionally tvs ctxt sdoc + = sep [ if show_foralls_unconditionally + then pprIfaceForAll tvs + else pprUserIfaceForAll tvs + , pprIfaceContextArr ctxt + , sdoc] + +pprIfaceForAll :: [IfaceTvBndr] -> SDoc +pprIfaceForAll [] = empty +pprIfaceForAll tvs = ptext (sLit "forall") <+> pprIfaceTvBndrs tvs <> dot + +pprIfaceSigmaType :: IfaceType -> SDoc +pprIfaceSigmaType ty = ppr_iface_sigma_type False ty + +pprUserIfaceForAll :: [IfaceTvBndr] -> SDoc +pprUserIfaceForAll tvs + = sdocWithDynFlags $ \dflags -> + ppWhen (any tv_has_kind_var tvs || gopt Opt_PrintExplicitForalls dflags) $ + pprIfaceForAll tvs + where + tv_has_kind_var (_,t) = not (isEmptyUniqSet (ifTyVarsOfType t)) ------------------- -ppr_tc_app :: (Int -> a -> SDoc) -> Int -> IfaceTyCon -> [a] -> SDoc -ppr_tc_app _ _ tc [] = ppr_tc tc - - -ppr_tc_app pp _ (IfaceTc n) [ty] - | n == listTyConName - = brackets (pp tOP_PREC ty) - | n == parrTyConName - = paBrackets (pp tOP_PREC ty) -ppr_tc_app pp _ (IfaceTc n) tys - | Just (ATyCon tc) <- wiredInNameTyThing_maybe n - , Just sort <- tyConTuple_maybe tc - , tyConArity tc == length tys - = tupleParens sort (sep (punctuate comma (map (pp tOP_PREC) tys))) -ppr_tc_app pp ctxt_prec tc tys - = maybeParen ctxt_prec tYCON_PREC - (sep [ppr_tc tc, nest 4 (sep (map (pp tYCON_PREC) tys))]) - -ppr_tc :: IfaceTyCon -> SDoc --- Wrap infix type constructors in parens -ppr_tc tc = wrap (ifaceTyConName tc) (ppr tc) + +-- See equivalent function in TypeRep.lhs +pprIfaceTyList :: TyPrec -> IfaceType -> IfaceType -> SDoc +-- Given a type-level list (t1 ': t2), see if we can print +-- it in list notation [t1, ...]. +-- Precondition: Opt_PrintExplicitKinds is off +pprIfaceTyList ctxt_prec ty1 ty2 + = case gather ty2 of + (arg_tys, Nothing) + -> char '\'' <> brackets (fsep (punctuate comma + (map (ppr_ty TopPrec) (ty1:arg_tys)))) + (arg_tys, Just tl) + -> maybeParen ctxt_prec FunPrec $ hang (ppr_ty FunPrec ty1) + 2 (fsep [ colon <+> ppr_ty FunPrec ty | ty <- arg_tys ++ [tl]]) + where + gather :: IfaceType -> ([IfaceType], Maybe IfaceType) + -- (gather ty) = (tys, Nothing) means ty is a list [t1, .., tn] + -- = (tys, Just tl) means ty is of form t1:t2:...tn:tl + gather (IfaceTyConApp tc tys) + | tcname == consDataConName + , (ITC_Kind _ (ITC_Type ty1 (ITC_Type ty2 ITC_Nil))) <- tys + , (args, tl) <- gather ty2 + = (ty1:args, tl) + | tcname == nilDataConName + = ([], Nothing) + where tcname = ifaceTyConName tc + gather ty = ([], Just ty) + +pprIfaceTypeApp :: IfaceTyCon -> IfaceTcArgs -> SDoc +pprIfaceTypeApp tc args = sdocWithDynFlags (pprTyTcApp TopPrec tc args) + +pprTyTcApp :: TyPrec -> IfaceTyCon -> IfaceTcArgs -> DynFlags -> SDoc +pprTyTcApp ctxt_prec tc tys dflags + | ifaceTyConName tc == ipClassName + , ITC_Type (IfaceLitTy (IfaceStrTyLit n)) (ITC_Type ty ITC_Nil) <- tys + = char '?' <> ftext n <> ptext (sLit "::") <> ppr_ty TopPrec ty + + | ifaceTyConName tc == consDataConName + , not (gopt Opt_PrintExplicitKinds dflags) + , ITC_Kind _ (ITC_Type ty1 (ITC_Type ty2 ITC_Nil)) <- tys + = pprIfaceTyList ctxt_prec ty1 ty2 + + | otherwise + = ppr_iface_tc_app ppr_ty ctxt_prec tc tys_wo_kinds + where + tys_wo_kinds = tcArgsIfaceTypes $ stripKindArgs dflags tys + +pprIfaceCoTcApp :: TyPrec -> IfaceTyCon -> [IfaceCoercion] -> SDoc +pprIfaceCoTcApp ctxt_prec tc tys = ppr_iface_tc_app ppr_co ctxt_prec tc tys + +ppr_iface_tc_app :: (TyPrec -> a -> SDoc) -> TyPrec -> IfaceTyCon -> [a] -> SDoc +ppr_iface_tc_app pp _ tc [ty] + | n == listTyConName = pprPromotionQuote tc <> brackets (pp TopPrec ty) + | n == parrTyConName = pprPromotionQuote tc <> paBrackets (pp TopPrec ty) + where + n = ifaceTyConName tc + +ppr_iface_tc_app pp ctxt_prec tc tys + | Just (tup_sort, tup_args) <- is_tuple + = pprPromotionQuote tc <> + tupleParens tup_sort (sep (punctuate comma (map (pp TopPrec) tup_args))) + + | not (isSymOcc (nameOccName tc_name)) + = pprIfacePrefixApp ctxt_prec (ppr tc) (map (pp TyConPrec) tys) + + | [ty1,ty2] <- tys -- Infix, two arguments; + -- we know nothing of precedence though + = pprIfaceInfixApp pp ctxt_prec (ppr tc) ty1 ty2 + + | tc_name == liftedTypeKindTyConName || tc_name == unliftedTypeKindTyConName + = ppr tc -- Do not wrap *, # in parens + + | otherwise + = pprIfacePrefixApp ctxt_prec (parens (ppr tc)) (map (pp TyConPrec) tys) where - -- The kind * does not get wrapped in parens. - wrap name | name == liftedTypeKindTyConName = id - wrap name = parenSymOcc (getOccName name) + tc_name = ifaceTyConName tc + + is_tuple = case wiredInNameTyThing_maybe tc_name of + Just (ATyCon tc) + | Just sort <- tyConTuple_maybe tc + , tyConArity tc == length tys + -> Just (sort, tys) + + | Just dc <- isPromotedDataCon_maybe tc + , let dc_tc = dataConTyCon dc + , isTupleTyCon dc_tc + , let arity = tyConArity dc_tc + ty_args = drop arity tys + , ty_args `lengthIs` arity + -> Just (tupleTyConSort tc, ty_args) + + _ -> Nothing + ppr_tylit :: IfaceTyLit -> SDoc ppr_tylit (IfaceNumTyLit n) = integer n ppr_tylit (IfaceStrTyLit n) = text (show n) pprIfaceCoercion, pprParendIfaceCoercion :: IfaceCoercion -> SDoc -pprIfaceCoercion = ppr_co tOP_PREC -pprParendIfaceCoercion = ppr_co tYCON_PREC +pprIfaceCoercion = ppr_co TopPrec +pprParendIfaceCoercion = ppr_co TyConPrec -ppr_co :: Int -> IfaceCoercion -> SDoc +ppr_co :: TyPrec -> IfaceCoercion -> SDoc ppr_co _ (IfaceReflCo r ty) = angleBrackets (ppr ty) <> ppr_role r ppr_co ctxt_prec (IfaceFunCo r co1 co2) - = maybeParen ctxt_prec fUN_PREC $ - sep (ppr_co fUN_PREC co1 : ppr_fun_tail co2) + = maybeParen ctxt_prec FunPrec $ + sep (ppr_co FunPrec co1 : ppr_fun_tail co2) where ppr_fun_tail (IfaceFunCo r co1 co2) - = (arrow <> ppr_role r <+> ppr_co fUN_PREC co1) : ppr_fun_tail co2 + = (arrow <> ppr_role r <+> ppr_co FunPrec co1) : ppr_fun_tail co2 ppr_fun_tail other_co = [arrow <> ppr_role r <+> pprIfaceCoercion other_co] ppr_co _ (IfaceTyConAppCo r tc cos) - = parens (ppr_tc_app ppr_co tOP_PREC tc cos) <> ppr_role r + = parens (pprIfaceCoTcApp TopPrec tc cos) <> ppr_role r ppr_co ctxt_prec (IfaceAppCo co1 co2) - = maybeParen ctxt_prec tYCON_PREC $ - ppr_co fUN_PREC co1 <+> pprParendIfaceCoercion co2 + = maybeParen ctxt_prec TyConPrec $ + ppr_co FunPrec co1 <+> pprParendIfaceCoercion co2 ppr_co ctxt_prec co@(IfaceForAllCo _ _) - = maybeParen ctxt_prec fUN_PREC (sep [ppr_tvs, pprIfaceCoercion inner_co]) + = maybeParen ctxt_prec FunPrec (sep [ppr_tvs, pprIfaceCoercion inner_co]) where (tvs, inner_co) = split_co co ppr_tvs = ptext (sLit "forall") <+> pprIfaceTvBndrs tvs <> dot @@ -327,16 +575,16 @@ ppr_co ctxt_prec co@(IfaceForAllCo _ _) ppr_co _ (IfaceCoVarCo covar) = ppr covar ppr_co ctxt_prec (IfaceUnivCo r ty1 ty2) - = maybeParen ctxt_prec tYCON_PREC $ + = maybeParen ctxt_prec TyConPrec $ ptext (sLit "UnivCo") <+> ppr r <+> pprParendIfaceType ty1 <+> pprParendIfaceType ty2 ppr_co ctxt_prec (IfaceInstCo co ty) - = maybeParen ctxt_prec tYCON_PREC $ + = maybeParen ctxt_prec TyConPrec $ ptext (sLit "Inst") <+> pprParendIfaceCoercion co <+> pprParendIfaceType ty ppr_co ctxt_prec (IfaceAxiomRuleCo tc tys cos) - = maybeParen ctxt_prec tYCON_PREC + = maybeParen ctxt_prec TyConPrec (sep [ppr tc, nest 4 (sep (map pprParendIfaceType tys ++ map pprParendIfaceCoercion cos))]) ppr_co ctxt_prec co @@ -351,9 +599,9 @@ ppr_co ctxt_prec co ; IfaceSubCo co -> (ptext (sLit "Sub"), [co]) ; _ -> panic "pprIfaceCo" } -ppr_special_co :: Int -> SDoc -> [IfaceCoercion] -> SDoc +ppr_special_co :: TyPrec -> SDoc -> [IfaceCoercion] -> SDoc ppr_special_co ctxt_prec doc cos - = maybeParen ctxt_prec tYCON_PREC + = maybeParen ctxt_prec TyConPrec (sep [doc, nest 4 (sep (map pprParendIfaceCoercion cos))]) ppr_role :: Role -> SDoc @@ -365,14 +613,30 @@ ppr_role r = underscore <> pp_role ------------------- instance Outputable IfaceTyCon where - ppr = ppr . ifaceTyConName + ppr tc = pprPromotionQuote tc <> ppr (ifaceTyConName tc) + +pprPromotionQuote :: IfaceTyCon -> SDoc +pprPromotionQuote (IfacePromotedDataCon _ ) = char '\'' +pprPromotionQuote (IfacePromotedTyCon _) = ifPprDebug (char '\'') +pprPromotionQuote _ = empty instance Outputable IfaceCoercion where ppr = pprIfaceCoercion instance Binary IfaceTyCon where - put_ bh (IfaceTc ext) = put_ bh ext - get bh = liftM IfaceTc (get bh) + put_ bh tc = + case tc of + IfaceTc n -> putByte bh 0 >> put_ bh n + IfacePromotedDataCon n -> putByte bh 1 >> put_ bh n + IfacePromotedTyCon n -> putByte bh 2 >> put_ bh n + + get bh = + do tc <- getByte bh + case tc of + 0 -> get bh >>= return . IfaceTc + 1 -> get bh >>= return . IfacePromotedDataCon + 2 -> get bh >>= return . IfacePromotedTyCon + _ -> panic ("get IfaceTyCon " ++ show tc) instance Outputable IfaceTyLit where ppr = ppr_tylit @@ -390,6 +654,27 @@ instance Binary IfaceTyLit where ; return (IfaceStrTyLit n) } _ -> panic ("get IfaceTyLit " ++ show tag) +instance Binary IfaceTcArgs where + put_ bh tk = + case tk of + ITC_Type t ts -> putByte bh 0 >> put_ bh t >> put_ bh ts + ITC_Kind t ts -> putByte bh 1 >> put_ bh t >> put_ bh ts + ITC_Nil -> putByte bh 2 + + get bh = + do c <- getByte bh + case c of + 0 -> do + t <- get bh + ts <- get bh + return $! ITC_Type t ts + 1 -> do + t <- get bh + ts <- get bh + return $! ITC_Kind t ts + 2 -> return ITC_Nil + _ -> panic ("get IfaceTcArgs " ++ show c) + ------------------- pprIfaceContextArr :: Outputable a => [a] -> SDoc -- Prints "(C a, D b) =>", including the arrow @@ -398,7 +683,7 @@ pprIfaceContextArr theta = pprIfaceContext theta <+> darrow pprIfaceContext :: Outputable a => [a] -> SDoc pprIfaceContext [pred] = ppr pred -- No parens -pprIfaceContext preds = parens (sep (punctuate comma (map ppr preds))) +pprIfaceContext preds = parens (fsep (punctuate comma (map ppr preds))) instance Binary IfaceType where put_ bh (IfaceForAllTy aa ab) = do @@ -416,6 +701,10 @@ instance Binary IfaceType where putByte bh 3 put_ bh ag put_ bh ah + put_ bh (IfaceDFunTy ag ah) = do + putByte bh 4 + put_ bh ag + put_ bh ah put_ bh (IfaceTyConApp tc tys) = do { putByte bh 5; put_ bh tc; put_ bh tys } @@ -436,9 +725,11 @@ instance Binary IfaceType where 3 -> do ag <- get bh ah <- get bh return (IfaceFunTy ag ah) + 4 -> do ag <- get bh + ah <- get bh + return (IfaceDFunTy ag ah) 5 -> do { tc <- get bh; tys <- get bh ; return (IfaceTyConApp tc tys) } - 30 -> do n <- get bh return (IfaceLitTy n) @@ -558,7 +849,7 @@ instance Binary IfaceCoercion where b <- get bh c <- get bh return $ IfaceAxiomRuleCo a b c - _ -> panic ("get IfaceCoercion " ++ show tag) + _ -> panic ("get IfaceCoercion " ++ show tag) \end{code} @@ -590,8 +881,10 @@ toIfaceType :: Type -> IfaceType -- Synonyms are retained in the interface type toIfaceType (TyVarTy tv) = IfaceTyVar (toIfaceTyVar tv) toIfaceType (AppTy t1 t2) = IfaceAppTy (toIfaceType t1) (toIfaceType t2) -toIfaceType (FunTy t1 t2) = IfaceFunTy (toIfaceType t1) (toIfaceType t2) -toIfaceType (TyConApp tc tys) = IfaceTyConApp (toIfaceTyCon tc) (toIfaceTypes tys) +toIfaceType (FunTy t1 t2) + | isPredTy t1 = IfaceDFunTy (toIfaceType t1) (toIfaceType t2) + | otherwise = IfaceFunTy (toIfaceType t1) (toIfaceType t2) +toIfaceType (TyConApp tc tys) = IfaceTyConApp (toIfaceTyCon tc) (toIfaceTcArgs tc tys) toIfaceType (LitTy n) = IfaceLitTy (toIfaceTyLit n) toIfaceType (ForAllTy tv t) = IfaceForAllTy (toIfaceTvBndr tv) (toIfaceType t) @@ -603,7 +896,11 @@ toIfaceCoVar = occNameFS . getOccName ---------------- toIfaceTyCon :: TyCon -> IfaceTyCon -toIfaceTyCon = toIfaceTyCon_name . tyConName +toIfaceTyCon tc + | isPromotedDataCon tc = IfacePromotedDataCon tc_name + | isPromotedTyCon tc = IfacePromotedTyCon tc_name + | otherwise = IfaceTc tc_name + where tc_name = tyConName tc toIfaceTyCon_name :: Name -> IfaceTyCon toIfaceTyCon_name = IfaceTc @@ -652,4 +949,3 @@ toIfaceCoercion (AxiomRuleCo co ts cs) = IfaceAxiomRuleCo (map toIfaceType ts) (map toIfaceCoercion cs) \end{code} - diff --git a/compiler/iface/LoadIface.lhs b/compiler/iface/LoadIface.lhs index d787794326..03ce53fff8 100644 --- a/compiler/iface/LoadIface.lhs +++ b/compiler/iface/LoadIface.lhs @@ -6,6 +6,7 @@ Loading interface files \begin{code} +{-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module LoadIface ( -- RnM/TcM functions @@ -391,7 +392,7 @@ compiler expects. -- the declaration itself, will find the fully-glorious Name -- -- We handle ATs specially. They are not main declarations, but also not --- implict things (in particular, adding them to `implicitTyThings' would mess +-- implicit things (in particular, adding them to `implicitTyThings' would mess -- things up in the renaming/type checking of source programs). ----------------------------------------------------- @@ -416,7 +417,6 @@ loadDecl ignore_prags mod (_version, decl) = do { -- Populate the name cache with final versions of all -- the names associated with the decl main_name <- lookupOrig mod (ifName decl) --- ; traceIf (text "Loading decl for " <> ppr main_name) -- Typecheck the thing, lazily -- NB. Firstly, the laziness is there in case we never need the @@ -445,11 +445,11 @@ loadDecl ignore_prags mod (_version, decl) -- [ "MkT" -> <datacon MkT>, "x" -> <selector x>, ... ] -- (where the "MkT" is the *Name* associated with MkT, etc.) -- - -- We do this by mapping the implict_names to the associated + -- We do this by mapping the implicit_names to the associated -- TyThings. By the invariant on ifaceDeclImplicitBndrs and -- implicitTyThings, we can use getOccName on the implicit -- TyThings to make this association: each Name's OccName should - -- be the OccName of exactly one implictTyThing. So the key is + -- be the OccName of exactly one implicitTyThing. So the key is -- to define a "mini-env" -- -- [ 'MkT' -> <datacon MkT>, 'x' -> <selector x>, ... ] @@ -457,7 +457,7 @@ loadDecl ignore_prags mod (_version, decl) -- -- However, there is a subtlety: due to how type checking needs -- to be staged, we can't poke on the forkM'd thunks inside the - -- implictTyThings while building this mini-env. + -- implicitTyThings while building this mini-env. -- If we poke these thunks too early, two problems could happen: -- (1) When processing mutually recursive modules across -- hs-boot boundaries, poking too early will do the @@ -490,9 +490,11 @@ loadDecl ignore_prags mod (_version, decl) pprPanic "loadDecl" (ppr main_name <+> ppr n $$ ppr (decl)) ; implicit_names <- mapM (lookupOrig mod) (ifaceDeclImplicitBndrs decl) + +-- ; traceIf (text "Loading decl for " <> ppr main_name $$ ppr implicit_names) ; return $ (main_name, thing) : -- uses the invariant that implicit_names and - -- implictTyThings are bijective + -- implicitTyThings are bijective [(n, lookup n) | n <- implicit_names] } where @@ -751,7 +753,7 @@ pprModIface iface , vcat (map pprUsage (mi_usages iface)) , vcat (map pprIfaceAnnotation (mi_anns iface)) , pprFixities (mi_fixities iface) - , vcat (map pprIfaceDecl (mi_decls iface)) + , vcat [ppr ver $$ nest 2 (ppr decl) | (ver,decl) <- mi_decls iface] , vcat (map ppr (mi_insts iface)) , vcat (map ppr (mi_fam_insts iface)) , vcat (map ppr (mi_rules iface)) @@ -817,10 +819,6 @@ pprDeps (Deps { dep_mods = mods, dep_pkgs = pkgs, dep_orphs = orphs, ppr_boot True = text "[boot]" ppr_boot False = empty -pprIfaceDecl :: (Fingerprint, IfaceDecl) -> SDoc -pprIfaceDecl (ver, decl) - = ppr ver $$ nest 2 (ppr decl) - pprFixities :: [(OccName, Fixity)] -> SDoc pprFixities [] = empty pprFixities fixes = ptext (sLit "fixities") <+> pprWithCommas pprFix fixes diff --git a/compiler/iface/MkIface.lhs b/compiler/iface/MkIface.lhs index bb51cdae9d..460c6076ba 100644 --- a/compiler/iface/MkIface.lhs +++ b/compiler/iface/MkIface.lhs @@ -4,6 +4,8 @@ % \begin{code} +{-# LANGUAGE CPP, NondecreasingIndentation #-} + -- | Module for constructing @ModIface@ values (interface files), -- writing them to disk and comparing two versions to see if -- recompilation is required. @@ -78,6 +80,7 @@ import DataCon import PatSyn import Type import TcType +import TysPrim ( alphaTyVars ) import InstEnv import FamInstEnv import TcRnMonad @@ -876,6 +879,13 @@ instOrphWarn :: DynFlags -> PrintUnqualified -> ClsInst -> WarnMsg instOrphWarn dflags unqual inst = mkWarnMsg dflags (getSrcSpan inst) unqual $ hang (ptext (sLit "Orphan instance:")) 2 (pprInstanceHdr inst) + $$ text "To avoid this" + $$ nest 4 (vcat possibilities) + where + possibilities = + text "move the instance declaration to the module of the class or of the type, or" : + text "wrap the type with a newtype and declare the instance on the new type." : + [] ruleOrphWarn :: DynFlags -> PrintUnqualified -> Module -> IfaceRule -> WarnMsg ruleOrphWarn dflags unqual mod rule @@ -1131,27 +1141,35 @@ recompileRequired _ = True -- first element is a bool saying if we should recompile the object file -- and the second is maybe the interface file, where Nothng means to -- rebuild the interface file not use the exisitng one. -checkOldIface :: HscEnv - -> ModSummary - -> SourceModified - -> Maybe ModIface -- Old interface from compilation manager, if any - -> IO (RecompileRequired, Maybe ModIface) +checkOldIface + :: HscEnv + -> ModSummary + -> SourceModified + -> Maybe ModIface -- Old interface from compilation manager, if any + -> IO (RecompileRequired, Maybe ModIface) checkOldIface hsc_env mod_summary source_modified maybe_iface = do let dflags = hsc_dflags hsc_env showPass dflags $ - "Checking old interface for " ++ (showPpr dflags $ ms_mod mod_summary) + "Checking old interface for " ++ + (showPpr dflags $ ms_mod mod_summary) initIfaceCheck hsc_env $ check_old_iface hsc_env mod_summary source_modified maybe_iface -check_old_iface :: HscEnv -> ModSummary -> SourceModified -> Maybe ModIface - -> IfG (RecompileRequired, Maybe ModIface) +check_old_iface + :: HscEnv + -> ModSummary + -> SourceModified + -> Maybe ModIface + -> IfG (RecompileRequired, Maybe ModIface) + check_old_iface hsc_env mod_summary src_modified maybe_iface = let dflags = hsc_dflags hsc_env getIface = case maybe_iface of Just _ -> do - traceIf (text "We already have the old interface for" <+> ppr (ms_mod mod_summary)) + traceIf (text "We already have the old interface for" <+> + ppr (ms_mod mod_summary)) return maybe_iface Nothing -> loadIface @@ -1458,7 +1476,7 @@ checkList (check:checks) = do recompile <- check \begin{code} tyThingToIfaceDecl :: TyThing -> IfaceDecl tyThingToIfaceDecl (AnId id) = idToIfaceDecl id -tyThingToIfaceDecl (ATyCon tycon) = tyConToIfaceDecl emptyTidyEnv tycon +tyThingToIfaceDecl (ATyCon tycon) = snd (tyConToIfaceDecl emptyTidyEnv tycon) tyThingToIfaceDecl (ACoAxiom ax) = coAxiomToIfaceDecl ax tyThingToIfaceDecl (AConLike cl) = case cl of RealDataCon dc -> dataConToIfaceDecl dc -- for ppr purposes only @@ -1488,25 +1506,24 @@ dataConToIfaceDecl dataCon patSynToIfaceDecl :: PatSyn -> IfaceDecl patSynToIfaceDecl ps = IfacePatSyn { ifName = getOccName . getName $ ps - , ifPatHasWrapper = isJust $ patSynWrapper ps + , ifPatMatcher = matcher + , ifPatWrapper = wrapper , ifPatIsInfix = patSynIsInfix ps , ifPatUnivTvs = toIfaceTvBndrs univ_tvs' , ifPatExTvs = toIfaceTvBndrs ex_tvs' , ifPatProvCtxt = tidyToIfaceContext env2 prov_theta , ifPatReqCtxt = tidyToIfaceContext env2 req_theta - , ifPatArgs = map toIfaceArg args + , ifPatArgs = map (tidyToIfaceType env2) args , ifPatTy = tidyToIfaceType env2 rhs_ty } where - toIfaceArg var = (occNameFS (getOccName var), - tidyToIfaceType env2 (varType var)) - - (univ_tvs, ex_tvs, (prov_theta, req_theta)) = patSynSig ps - args = patSynArgs ps - rhs_ty = patSynType ps + (univ_tvs, ex_tvs, prov_theta, req_theta, args, rhs_ty) = patSynSig ps (env1, univ_tvs') = tidyTyVarBndrs emptyTidyEnv univ_tvs (env2, ex_tvs') = tidyTyVarBndrs env1 ex_tvs + matcher = idName (patSynMatcher ps) + wrapper = fmap idName (patSynWrapper ps) + -------------------------- coAxiomToIfaceDecl :: CoAxiom br -> IfaceDecl @@ -1517,19 +1534,19 @@ coAxiomToIfaceDecl ax@(CoAxiom { co_ax_tc = tycon, co_ax_branches = branches = IfaceAxiom { ifName = name , ifTyCon = toIfaceTyCon tycon , ifRole = role - , ifAxBranches = brListMap (coAxBranchToIfaceBranch - emptyTidyEnv - (brListMap coAxBranchLHS branches)) branches } + , ifAxBranches = brListMap (coAxBranchToIfaceBranch tycon + (brListMap coAxBranchLHS branches)) + branches } where name = getOccName ax -- 2nd parameter is the list of branch LHSs, for conversion from incompatible branches -- to incompatible indices -- See Note [Storing compatibility] in CoAxiom -coAxBranchToIfaceBranch :: TidyEnv -> [[Type]] -> CoAxBranch -> IfaceAxBranch -coAxBranchToIfaceBranch env0 lhs_s +coAxBranchToIfaceBranch :: TyCon -> [[Type]] -> CoAxBranch -> IfaceAxBranch +coAxBranchToIfaceBranch tc lhs_s branch@(CoAxBranch { cab_incomps = incomps }) - = (coAxBranchToIfaceBranch' env0 branch) { ifaxbIncomps = iface_incomps } + = (coAxBranchToIfaceBranch' tc branch) { ifaxbIncomps = iface_incomps } where iface_incomps = map (expectJust "iface_incomps" . (flip findIndex lhs_s @@ -1537,63 +1554,91 @@ coAxBranchToIfaceBranch env0 lhs_s . coAxBranchLHS) incomps -- use this one for standalone branches without incompatibles -coAxBranchToIfaceBranch' :: TidyEnv -> CoAxBranch -> IfaceAxBranch -coAxBranchToIfaceBranch' env0 - (CoAxBranch { cab_tvs = tvs, cab_lhs = lhs - , cab_roles = roles, cab_rhs = rhs }) +coAxBranchToIfaceBranch' :: TyCon -> CoAxBranch -> IfaceAxBranch +coAxBranchToIfaceBranch' tc (CoAxBranch { cab_tvs = tvs, cab_lhs = lhs + , cab_roles = roles, cab_rhs = rhs }) = IfaceAxBranch { ifaxbTyVars = toIfaceTvBndrs tv_bndrs - , ifaxbLHS = map (tidyToIfaceType env1) lhs + , ifaxbLHS = tidyToIfaceTcArgs env1 tc lhs , ifaxbRoles = roles , ifaxbRHS = tidyToIfaceType env1 rhs , ifaxbIncomps = [] } where - (env1, tv_bndrs) = tidyTyClTyVarBndrs env0 tvs + (env1, tv_bndrs) = tidyTyClTyVarBndrs emptyTidyEnv tvs -- Don't re-bind in-scope tyvars -- See Note [CoAxBranch type variables] in CoAxiom ----------------- -tyConToIfaceDecl :: TidyEnv -> TyCon -> IfaceDecl +tyConToIfaceDecl :: TidyEnv -> TyCon -> (TidyEnv, IfaceDecl) -- We *do* tidy TyCons, because they are not (and cannot -- conveniently be) built in tidy form +-- The returned TidyEnv is the one after tidying the tyConTyVars tyConToIfaceDecl env tycon | Just clas <- tyConClass_maybe tycon = classToIfaceDecl env clas | Just syn_rhs <- synTyConRhs_maybe tycon - = IfaceSyn { ifName = getOccName tycon, - ifTyVars = toIfaceTvBndrs tyvars, - ifRoles = tyConRoles tycon, - ifSynRhs = to_ifsyn_rhs syn_rhs, - ifSynKind = tidyToIfaceType env1 (synTyConResKind tycon) } + = ( tc_env1 + , IfaceSyn { ifName = getOccName tycon, + ifTyVars = if_tc_tyvars, + ifRoles = tyConRoles tycon, + ifSynRhs = to_ifsyn_rhs syn_rhs, + ifSynKind = tidyToIfaceType tc_env1 (synTyConResKind tycon) }) | isAlgTyCon tycon - = IfaceData { ifName = getOccName tycon, - ifCType = tyConCType tycon, - ifTyVars = toIfaceTvBndrs tyvars, - ifRoles = tyConRoles tycon, - ifCtxt = tidyToIfaceContext env1 (tyConStupidTheta tycon), - ifCons = ifaceConDecls (algTyConRhs tycon), - ifRec = boolToRecFlag (isRecursiveTyCon tycon), - ifGadtSyntax = isGadtSyntaxTyCon tycon, - ifPromotable = isJust (promotableTyCon_maybe tycon), - ifAxiom = fmap coAxiomName (tyConFamilyCoercion_maybe tycon) } + = ( tc_env1 + , IfaceData { ifName = getOccName tycon, + ifCType = tyConCType tycon, + ifTyVars = if_tc_tyvars, + ifRoles = tyConRoles tycon, + ifCtxt = tidyToIfaceContext tc_env1 (tyConStupidTheta tycon), + ifCons = ifaceConDecls (algTyConRhs tycon), + ifRec = boolToRecFlag (isRecursiveTyCon tycon), + ifGadtSyntax = isGadtSyntaxTyCon tycon, + ifPromotable = isJust (promotableTyCon_maybe tycon), + ifParent = parent }) | isForeignTyCon tycon - = IfaceForeign { ifName = getOccName tycon, - ifExtName = tyConExtName tycon } - - | otherwise = pprPanic "toIfaceDecl" (ppr tycon) + = (env, IfaceForeign { ifName = getOccName tycon, + ifExtName = tyConExtName tycon }) + + | otherwise -- FunTyCon, PrimTyCon, promoted TyCon/DataCon + -- For pretty printing purposes only. + = ( env + , IfaceData { ifName = getOccName tycon, + ifCType = Nothing, + ifTyVars = funAndPrimTyVars, + ifRoles = tyConRoles tycon, + ifCtxt = [], + ifCons = IfDataTyCon [], + ifRec = boolToRecFlag False, + ifGadtSyntax = False, + ifPromotable = False, + ifParent = IfNoParent }) where - (env1, tyvars) = tidyTyClTyVarBndrs env (tyConTyVars tycon) + (tc_env1, tc_tyvars) = tidyTyClTyVarBndrs env (tyConTyVars tycon) + if_tc_tyvars = toIfaceTvBndrs tc_tyvars + + funAndPrimTyVars = toIfaceTvBndrs $ take (tyConArity tycon) alphaTyVars + + parent = case tyConFamInstSig_maybe tycon of + Just (tc, ty, ax) -> IfDataInstance (coAxiomName ax) + (toIfaceTyCon tc) + (tidyToIfaceTcArgs tc_env1 tc ty) + Nothing -> IfNoParent + + to_ifsyn_rhs OpenSynFamilyTyCon = IfaceOpenSynFamilyTyCon + to_ifsyn_rhs (ClosedSynFamilyTyCon ax) = IfaceClosedSynFamilyTyCon axn ibr + where defs = fromBranchList $ coAxiomBranches ax + ibr = map (coAxBranchToIfaceBranch' tycon) defs + axn = coAxiomName ax + to_ifsyn_rhs AbstractClosedSynFamilyTyCon + = IfaceAbstractClosedSynFamilyTyCon - to_ifsyn_rhs OpenSynFamilyTyCon = IfaceOpenSynFamilyTyCon - to_ifsyn_rhs (ClosedSynFamilyTyCon ax) - = IfaceClosedSynFamilyTyCon (coAxiomName ax) - to_ifsyn_rhs AbstractClosedSynFamilyTyCon = IfaceAbstractClosedSynFamilyTyCon to_ifsyn_rhs (SynonymTyCon ty) - = IfaceSynonymTyCon (tidyToIfaceType env1 ty) + = IfaceSynonymTyCon (tidyToIfaceType tc_env1 ty) - to_ifsyn_rhs (BuiltInSynFamTyCon {}) = pprPanic "toIfaceDecl: BuiltInFamTyCon" (ppr tycon) + to_ifsyn_rhs (BuiltInSynFamTyCon {}) + = IfaceBuiltInSynFamTyCon ifaceConDecls (NewTyCon { data_con = con }) = IfNewTyCon (ifaceConDecl con) @@ -1609,23 +1654,28 @@ tyConToIfaceDecl env tycon = IfCon { ifConOcc = getOccName (dataConName data_con), ifConInfix = dataConIsInfix data_con, ifConWrapper = isJust (dataConWrapId_maybe data_con), - ifConUnivTvs = toIfaceTvBndrs univ_tvs', ifConExTvs = toIfaceTvBndrs ex_tvs', - ifConEqSpec = to_eq_spec eq_spec, - ifConCtxt = tidyToIfaceContext env2 theta, - ifConArgTys = map (tidyToIfaceType env2) arg_tys, + ifConEqSpec = map to_eq_spec eq_spec, + ifConCtxt = tidyToIfaceContext con_env2 theta, + ifConArgTys = map (tidyToIfaceType con_env2) arg_tys, ifConFields = map getOccName (dataConFieldLabels data_con), - ifConStricts = map (toIfaceBang env2) (dataConRepBangs data_con) } + ifConStricts = map (toIfaceBang con_env2) (dataConRepBangs data_con) } where (univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _) = dataConFullSig data_con - -- Start with 'emptyTidyEnv' not 'env1', because the type of the - -- data constructor is fully standalone - (env1, univ_tvs') = tidyTyVarBndrs emptyTidyEnv univ_tvs - (env2, ex_tvs') = tidyTyVarBndrs env1 ex_tvs - to_eq_spec spec = [ (getOccName (tidyTyVar env2 tv), tidyToIfaceType env2 ty) - | (tv,ty) <- spec] + -- Tidy the univ_tvs of the data constructor to be identical + -- to the tyConTyVars of the type constructor. This means + -- (a) we don't need to redundantly put them into the interface file + -- (b) when pretty-printing an Iface data declaration in H98-style syntax, + -- we know that the type variables will line up + -- The latter (b) is important because we pretty-print type construtors + -- by converting to IfaceSyn and pretty-printing that + con_env1 = (fst tc_env1, mkVarEnv (zipEqual "ifaceConDecl" univ_tvs tc_tyvars)) + -- A bit grimy, perhaps, but it's simple! + + (con_env2, ex_tvs') = tidyTyVarBndrs con_env1 ex_tvs + to_eq_spec (tv,ty) = (toIfaceTyVar (tidyTyVar con_env2 tv), tidyToIfaceType con_env2 ty) toIfaceBang :: TidyEnv -> HsBang -> IfaceBang toIfaceBang _ HsNoBang = IfNoBang @@ -1634,17 +1684,18 @@ toIfaceBang env (HsUnpack (Just co)) = IfUnpackCo (toIfaceCoercion (tidyCo env c toIfaceBang _ HsStrict = IfStrict toIfaceBang _ (HsUserBang {}) = panic "toIfaceBang" -classToIfaceDecl :: TidyEnv -> Class -> IfaceDecl +classToIfaceDecl :: TidyEnv -> Class -> (TidyEnv, IfaceDecl) classToIfaceDecl env clas - = IfaceClass { ifCtxt = tidyToIfaceContext env1 sc_theta, - ifName = getOccName (classTyCon clas), - ifTyVars = toIfaceTvBndrs clas_tyvars', - ifRoles = tyConRoles (classTyCon clas), - ifFDs = map toIfaceFD clas_fds, - ifATs = map toIfaceAT clas_ats, - ifSigs = map toIfaceClassOp op_stuff, - ifMinDef = fmap getOccName (classMinimalDef clas), - ifRec = boolToRecFlag (isRecursiveTyCon tycon) } + = ( env1 + , IfaceClass { ifCtxt = tidyToIfaceContext env1 sc_theta, + ifName = getOccName (classTyCon clas), + ifTyVars = toIfaceTvBndrs clas_tyvars', + ifRoles = tyConRoles (classTyCon clas), + ifFDs = map toIfaceFD clas_fds, + ifATs = map toIfaceAT clas_ats, + ifSigs = map toIfaceClassOp op_stuff, + ifMinDef = fmap getFS (classMinimalDef clas), + ifRec = boolToRecFlag (isRecursiveTyCon tycon) }) where (clas_tyvars, clas_fds, sc_theta, _, clas_ats, op_stuff) = classExtraBigSig clas @@ -1653,8 +1704,10 @@ classToIfaceDecl env clas (env1, clas_tyvars') = tidyTyVarBndrs env clas_tyvars toIfaceAT :: ClassATItem -> IfaceAT - toIfaceAT (tc, defs) - = IfaceAT (tyConToIfaceDecl env1 tc) (map (coAxBranchToIfaceBranch' env1) defs) + toIfaceAT (ATI tc def) + = IfaceAT if_decl (fmap (tidyToIfaceType env2) def) + where + (env2, if_decl) = tyConToIfaceDecl env1 tc toIfaceClassOp (sel_id, def_meth) = ASSERT(sel_tyvars == clas_tyvars) @@ -1680,6 +1733,9 @@ classToIfaceDecl env clas tidyToIfaceType :: TidyEnv -> Type -> IfaceType tidyToIfaceType env ty = toIfaceType (tidyType env ty) +tidyToIfaceTcArgs :: TidyEnv -> TyCon -> [Type] -> IfaceTcArgs +tidyToIfaceTcArgs env tc tys = toIfaceTcArgs tc (tidyTypes env tys) + tidyToIfaceContext :: TidyEnv -> ThetaType -> IfaceContext tidyToIfaceContext env theta = map (tidyToIfaceType env) theta diff --git a/compiler/iface/TcIface.lhs b/compiler/iface/TcIface.lhs index cc45648ea2..68f9e8fd65 100644 --- a/compiler/iface/TcIface.lhs +++ b/compiler/iface/TcIface.lhs @@ -6,14 +6,15 @@ Type checking of type signatures in interface files \begin{code} +{-# LANGUAGE CPP #-} + module TcIface ( tcLookupImported_maybe, importDecl, checkWiredInTyCon, tcHiBootIface, typecheckIface, tcIfaceDecl, tcIfaceInst, tcIfaceFamInst, tcIfaceRules, tcIfaceVectInfo, tcIfaceAnnotations, tcIfaceExpr, -- Desired by HERMIT (Trac #7683) - tcIfaceGlobal, - tcExtCoreBindings + tcIfaceGlobal ) where #include "HsVersions.h" @@ -343,26 +344,34 @@ tcHiBootIface hsc_src mod else do -- OK, so we're in one-shot mode. - -- In that case, we're read all the direct imports by now, - -- so eps_is_boot will record if any of our imports mention us by - -- way of hi-boot file - { eps <- getEps - ; case lookupUFM (eps_is_boot eps) (moduleName mod) of { - Nothing -> return emptyModDetails ; -- The typical case + -- Re #9245, we always check if there is an hi-boot interface + -- to check consistency against, rather than just when we notice + -- that an hi-boot is necessary due to a circular import. + { read_result <- findAndReadIface + need mod + True -- Hi-boot file - Just (_, False) -> failWithTc moduleLoop ; + ; case read_result of { + Succeeded (iface, _path) -> typecheckIface iface ; + Failed err -> + + -- There was no hi-boot file. But if there is circularity in + -- the module graph, there really should have been one. + -- Since we've read all the direct imports by now, + -- eps_is_boot will record if any of our imports mention the + -- current module, which either means a module loop (not + -- a SOURCE import) or that our hi-boot file has mysteriously + -- disappeared. + do { eps <- getEps + ; case lookupUFM (eps_is_boot eps) (moduleName mod) of + Nothing -> return emptyModDetails -- The typical case + + Just (_, False) -> failWithTc moduleLoop -- Someone below us imported us! -- This is a loop with no hi-boot in the way - Just (_mod, True) -> -- There's a hi-boot interface below us - - do { read_result <- findAndReadIface - need mod - True -- Hi-boot file - - ; case read_result of - Failed err -> failWithTc (elaborate err) - Succeeded (iface, _path) -> typecheckIface iface + Just (_mod, True) -> failWithTc (elaborate err) + -- The hi-boot file has mysteriously disappeared. }}}} where need = ptext (sLit "Need the hi-boot interface for") <+> ppr mod @@ -451,41 +460,26 @@ tc_iface_decl parent _ (IfaceData {ifName = occ_name, ifCtxt = ctxt, ifGadtSyntax = gadt_syn, ifCons = rdr_cons, ifRec = is_rec, ifPromotable = is_prom, - ifAxiom = mb_axiom_name }) + ifParent = mb_parent }) = bindIfaceTyVars_AT tv_bndrs $ \ tyvars -> do { tc_name <- lookupIfaceTop occ_name ; tycon <- fixM $ \ tycon -> do { stupid_theta <- tcIfaceCtxt ctxt - ; parent' <- tc_parent tyvars mb_axiom_name + ; parent' <- tc_parent mb_parent ; cons <- tcIfaceDataCons tc_name tycon tyvars rdr_cons ; return (buildAlgTyCon tc_name tyvars roles cType stupid_theta cons is_rec is_prom gadt_syn parent') } ; traceIf (text "tcIfaceDecl4" <+> ppr tycon) ; return (ATyCon tycon) } where - tc_parent :: [TyVar] -> Maybe Name -> IfL TyConParent - tc_parent _ Nothing = return parent - tc_parent tyvars (Just ax_name) + tc_parent :: IfaceTyConParent -> IfL TyConParent + tc_parent IfNoParent = return parent + tc_parent (IfDataInstance ax_name _ arg_tys) = ASSERT( isNoParent parent ) do { ax <- tcIfaceCoAxiom ax_name - ; let fam_tc = coAxiomTyCon ax + ; let fam_tc = coAxiomTyCon ax ax_unbr = toUnbranchedAxiom ax - -- data families don't have branches: - branch = coAxiomSingleBranch ax_unbr - ax_tvs = coAxBranchTyVars branch - ax_lhs = coAxBranchLHS branch - tycon_tys = mkTyVarTys tyvars - subst = mkTopTvSubst (ax_tvs `zip` tycon_tys) - -- The subst matches the tyvar of the TyCon - -- with those from the CoAxiom. They aren't - -- necessarily the same, since the two may be - -- gotten from separate interface-file declarations - -- NB: ax_tvs may be shorter because of eta-reduction - -- See Note [Eta reduction for data family axioms] in TcInstDcls - lhs_tys = substTys subst ax_lhs `chkAppend` - dropList ax_tvs tycon_tys - -- The 'lhs_tys' should be 1-1 with the 'tyvars' - -- but ax_tvs maybe shorter because of eta-reduction + ; lhs_tys <- tcIfaceTcArgs arg_tys ; return (FamInstTyCon ax_unbr fam_tc lhs_tys) } tc_iface_decl parent _ (IfaceSyn {ifName = occ_name, ifTyVars = tv_bndrs, @@ -502,12 +496,14 @@ tc_iface_decl parent _ (IfaceSyn {ifName = occ_name, ifTyVars = tv_bndrs, where mk_doc n = ptext (sLit "Type syonym") <+> ppr n tc_syn_rhs IfaceOpenSynFamilyTyCon = return OpenSynFamilyTyCon - tc_syn_rhs (IfaceClosedSynFamilyTyCon ax_name) + tc_syn_rhs (IfaceClosedSynFamilyTyCon ax_name _) = do { ax <- tcIfaceCoAxiom ax_name ; return (ClosedSynFamilyTyCon ax) } tc_syn_rhs IfaceAbstractClosedSynFamilyTyCon = return AbstractClosedSynFamilyTyCon tc_syn_rhs (IfaceSynonymTyCon ty) = do { rhs_ty <- tcIfaceType ty ; return (SynonymTyCon rhs_ty) } + tc_syn_rhs IfaceBuiltInSynFamTyCon = pprPanic "tc_iface_decl" + (ptext (sLit "IfaceBuiltInSynFamTyCon in interface file")) tc_iface_decl _parent ignore_prags (IfaceClass {ifCtxt = rdr_ctxt, ifName = tc_occ, @@ -524,11 +520,11 @@ tc_iface_decl _parent ignore_prags ; sigs <- mapM tc_sig rdr_sigs ; fds <- mapM tc_fd rdr_fds ; traceIf (text "tc-iface-class3" <+> ppr tc_occ) - ; mindef <- traverse lookupIfaceTop mindef_occ + ; mindef <- traverse (lookupIfaceTop . mkVarOccFS) mindef_occ ; cls <- fixM $ \ cls -> do { ats <- mapM (tc_at cls) rdr_ats ; traceIf (text "tc-iface-class4" <+> ppr tc_occ) - ; buildClass ignore_prags tc_name tyvars roles ctxt fds ats sigs mindef tc_isrec } + ; buildClass tc_name tyvars roles ctxt fds ats sigs mindef tc_isrec } ; return (ATyCon (classTyCon cls)) } where tc_sc pred = forkM (mk_sc_doc pred) (tcIfaceType pred) @@ -548,13 +544,18 @@ tc_iface_decl _parent ignore_prags -- it mentions unless it's necessary to do so ; return (op_name, dm, op_ty) } - tc_at cls (IfaceAT tc_decl defs_decls) + tc_at cls (IfaceAT tc_decl if_def) = do ATyCon tc <- tc_iface_decl (AssocFamilyTyCon cls) ignore_prags tc_decl - defs <- forkM (mk_at_doc tc) (tc_ax_branches tc defs_decls) + mb_def <- case if_def of + Nothing -> return Nothing + Just def -> forkM (mk_at_doc tc) $ + extendIfaceTyVarEnv (tyConTyVars tc) $ + do { tc_def <- tcIfaceType def + ; return (Just tc_def) } -- Must be done lazily in case the RHS of the defaults mention -- the type constructor being defined here -- e.g. type AT a; type AT b = AT [b] Trac #8002 - return (tc, defs) + return (ATI tc mb_def) mk_sc_doc pred = ptext (sLit "Superclass") <+> ppr pred mk_at_doc tc = ptext (sLit "Associated type") <+> ppr tc @@ -573,7 +574,7 @@ tc_iface_decl _ _ (IfaceAxiom { ifName = ax_occ, ifTyCon = tc , ifAxBranches = branches, ifRole = role }) = do { tc_name <- lookupIfaceTop ax_occ ; tc_tycon <- tcIfaceTyCon tc - ; tc_branches <- tc_ax_branches tc_tycon branches + ; tc_branches <- tc_ax_branches branches ; let axiom = CoAxiom { co_ax_unique = nameUnique tc_name , co_ax_name = tc_name , co_ax_tc = tc_tycon @@ -583,7 +584,8 @@ tc_iface_decl _ _ (IfaceAxiom { ifName = ax_occ, ifTyCon = tc ; return (ACoAxiom axiom) } tc_iface_decl _ _ (IfacePatSyn{ ifName = occ_name - , ifPatHasWrapper = has_wrapper + , ifPatMatcher = matcher_name + , ifPatWrapper = wrapper_name , ifPatIsInfix = is_infix , ifPatUnivTvs = univ_tvs , ifPatExTvs = ex_tvs @@ -593,31 +595,35 @@ tc_iface_decl _ _ (IfacePatSyn{ ifName = occ_name , ifPatTy = pat_ty }) = do { name <- lookupIfaceTop occ_name ; traceIf (ptext (sLit "tc_iface_decl") <+> ppr name) + ; matcher <- tcExt "Matcher" matcher_name + ; wrapper <- case wrapper_name of + Nothing -> return Nothing + Just wn -> do { wid <- tcExt "Wrapper" wn + ; return (Just wid) } ; bindIfaceTyVars univ_tvs $ \univ_tvs -> do { bindIfaceTyVars ex_tvs $ \ex_tvs -> do - { bindIfaceIdVars args $ \args -> do - { ~(prov_theta, req_theta, pat_ty) <- forkM (mk_doc name) $ + { patsyn <- forkM (mk_doc name) $ do { prov_theta <- tcIfaceCtxt prov_ctxt ; req_theta <- tcIfaceCtxt req_ctxt ; pat_ty <- tcIfaceType pat_ty - ; return (prov_theta, req_theta, pat_ty) } - ; bindIfaceTyVar (fsLit "r", toIfaceKind liftedTypeKind) $ \tv -> do - { patsyn <- buildPatSyn name is_infix has_wrapper args univ_tvs ex_tvs prov_theta req_theta pat_ty tv - ; return (AConLike (PatSynCon patsyn)) }}}}} + ; arg_tys <- mapM tcIfaceType args + ; return $ buildPatSyn name is_infix matcher wrapper + arg_tys univ_tvs ex_tvs prov_theta req_theta pat_ty } + ; return $ AConLike . PatSynCon $ patsyn }}} where mk_doc n = ptext (sLit "Pattern synonym") <+> ppr n + tcExt s name = forkM (ptext (sLit s) <+> ppr name) $ tcIfaceExtId name +tc_ax_branches :: [IfaceAxBranch] -> IfL [CoAxBranch] +tc_ax_branches if_branches = foldlM tc_ax_branch [] if_branches -tc_ax_branches :: TyCon -> [IfaceAxBranch] -> IfL [CoAxBranch] -tc_ax_branches tc if_branches = foldlM (tc_ax_branch (tyConKind tc)) [] if_branches - -tc_ax_branch :: Kind -> [CoAxBranch] -> IfaceAxBranch -> IfL [CoAxBranch] -tc_ax_branch tc_kind prev_branches +tc_ax_branch :: [CoAxBranch] -> IfaceAxBranch -> IfL [CoAxBranch] +tc_ax_branch prev_branches (IfaceAxBranch { ifaxbTyVars = tv_bndrs, ifaxbLHS = lhs, ifaxbRHS = rhs , ifaxbRoles = roles, ifaxbIncomps = incomps }) = bindIfaceTyVars_AT tv_bndrs $ \ tvs -> do -- The _AT variant is needed here; see Note [CoAxBranch type variables] in CoAxiom - { tc_lhs <- tcIfaceTcArgs tc_kind lhs -- See Note [Checking IfaceTypes vs IfaceKinds] + { tc_lhs <- tcIfaceTcArgs lhs -- See Note [Checking IfaceTypes vs IfaceKinds] ; tc_rhs <- tcIfaceType rhs ; let br = CoAxBranch { cab_loc = noSrcSpan , cab_tvs = tvs @@ -628,7 +634,7 @@ tc_ax_branch tc_kind prev_branches ; return (prev_branches ++ [br]) } tcIfaceDataCons :: Name -> TyCon -> [TyVar] -> IfaceConDecls -> IfL AlgTyConRhs -tcIfaceDataCons tycon_name tycon _ if_cons +tcIfaceDataCons tycon_name tycon tc_tyvars if_cons = case if_cons of IfAbstractTyCon dis -> return (AbstractTyCon dis) IfDataFamTyCon -> return DataFamilyTyCon @@ -638,11 +644,12 @@ tcIfaceDataCons tycon_name tycon _ if_cons ; mkNewTyConRhs tycon_name tycon data_con } where tc_con_decl (IfCon { ifConInfix = is_infix, - ifConUnivTvs = univ_tvs, ifConExTvs = ex_tvs, + ifConExTvs = ex_tvs, ifConOcc = occ, ifConCtxt = ctxt, ifConEqSpec = spec, ifConArgTys = args, ifConFields = field_lbls, ifConStricts = if_stricts}) - = bindIfaceTyVars univ_tvs $ \ univ_tyvars -> do + = -- Universally-quantified tyvars are shared with + -- parent TyCon, and are alrady in scope bindIfaceTyVars ex_tvs $ \ ex_tyvars -> do { traceIf (text "Start interface-file tc_con_decl" <+> ppr occ) ; name <- lookupIfaceTop occ @@ -664,12 +671,12 @@ tcIfaceDataCons tycon_name tycon _ if_cons -- Remember, tycon is the representation tycon ; let orig_res_ty = mkFamilyTyConApp tycon - (substTyVars (mkTopTvSubst eq_spec) univ_tyvars) + (substTyVars (mkTopTvSubst eq_spec) tc_tyvars) ; con <- buildDataCon (pprPanic "tcIfaceDataCons: FamInstEnvs" (ppr name)) name is_infix stricts lbl_names - univ_tyvars ex_tyvars + tc_tyvars ex_tyvars eq_spec theta arg_tys orig_res_ty tycon ; traceIf (text "Done interface-file tc_con_decl" <+> ppr name) @@ -682,11 +689,11 @@ tcIfaceDataCons tycon_name tycon _ if_cons tc_strict (IfUnpackCo if_co) = do { co <- tcIfaceCo if_co ; return (HsUnpack (Just co)) } -tcIfaceEqSpec :: [(OccName, IfaceType)] -> IfL [(TyVar, Type)] +tcIfaceEqSpec :: IfaceEqSpec -> IfL [(TyVar, Type)] tcIfaceEqSpec spec = mapM do_item spec where - do_item (occ, if_ty) = do { tv <- tcIfaceTyVar (occNameFS occ) + do_item (occ, if_ty) = do { tv <- tcIfaceTyVar occ ; ty <- tcIfaceType if_ty ; return (tv,ty) } \end{code} @@ -957,25 +964,38 @@ tcIfaceType :: IfaceType -> IfL Type tcIfaceType (IfaceTyVar n) = do { tv <- tcIfaceTyVar n; return (TyVarTy tv) } tcIfaceType (IfaceAppTy t1 t2) = do { t1' <- tcIfaceType t1; t2' <- tcIfaceType t2; return (AppTy t1' t2') } tcIfaceType (IfaceLitTy l) = do { l1 <- tcIfaceTyLit l; return (LitTy l1) } -tcIfaceType (IfaceFunTy t1 t2) = do { t1' <- tcIfaceType t1; t2' <- tcIfaceType t2; return (FunTy t1' t2') } +tcIfaceType (IfaceFunTy t1 t2) = tcIfaceTypeFun t1 t2 +tcIfaceType (IfaceDFunTy t1 t2) = tcIfaceTypeFun t1 t2 tcIfaceType (IfaceTyConApp tc tks) = do { tc' <- tcIfaceTyCon tc - ; tks' <- tcIfaceTcArgs (tyConKind tc') tks + ; tks' <- tcIfaceTcArgs tks ; return (mkTyConApp tc' tks') } tcIfaceType (IfaceForAllTy tv t) = bindIfaceTyVar tv $ \ tv' -> do { t' <- tcIfaceType t; return (ForAllTy tv' t') } -tcIfaceTypes :: [IfaceType] -> IfL [Type] -tcIfaceTypes tys = mapM tcIfaceType tys - -tcIfaceTcArgs :: Kind -> [IfaceType] -> IfL [Type] -tcIfaceTcArgs _ [] - = return [] -tcIfaceTcArgs kind (tk:tks) - = case splitForAllTy_maybe kind of - Nothing -> tcIfaceTypes (tk:tks) - Just (_, kind') -> do { k' <- tcIfaceKind tk - ; tks' <- tcIfaceTcArgs kind' tks - ; return (k':tks') } - +tcIfaceTypeFun :: IfaceType -> IfaceType -> IfL Type +tcIfaceTypeFun t1 t2 = do { t1' <- tcIfaceType t1; t2' <- tcIfaceType t2; return (FunTy t1' t2') } + +tcIfaceKind :: IfaceKind -> IfL Type +tcIfaceKind (IfaceAppTy t1 t2) = do { t1' <- tcIfaceKind t1; t2' <- tcIfaceKind t2; return (AppTy t1' t2') } +tcIfaceKind (IfaceFunTy t1 t2) = tcIfaceKindFun t1 t2 +tcIfaceKind (IfaceDFunTy t1 t2) = tcIfaceKindFun t1 t2 +tcIfaceKind (IfaceLitTy l) = pprPanic "tcIfaceKind" (ppr l) +tcIfaceKind k = tcIfaceType k + +tcIfaceKindFun :: IfaceKind -> IfaceKind -> IfL Type +tcIfaceKindFun t1 t2 = do { t1' <- tcIfaceKind t1; t2' <- tcIfaceKind t2; return (FunTy t1' t2') } + +tcIfaceTcArgs :: IfaceTcArgs -> IfL [Type] +tcIfaceTcArgs args + = case args of + ITC_Type t ts -> + do { t' <- tcIfaceType t + ; ts' <- tcIfaceTcArgs ts + ; return (t':ts') } + ITC_Kind k ks -> + do { k' <- tcIfaceKind k + ; ks' <- tcIfaceTcArgs ks + ; return (k':ks') } + ITC_Nil -> return [] ----------------------------------------- tcIfaceCtxt :: IfaceContext -> IfL ThetaType tcIfaceCtxt sts = mapM tcIfaceType sts @@ -984,43 +1004,8 @@ tcIfaceCtxt sts = mapM tcIfaceType sts tcIfaceTyLit :: IfaceTyLit -> IfL TyLit tcIfaceTyLit (IfaceNumTyLit n) = return (NumTyLit n) tcIfaceTyLit (IfaceStrTyLit n) = return (StrTyLit n) - ------------------------------------------ -tcIfaceKind :: IfaceKind -> IfL Kind -- See Note [Checking IfaceTypes vs IfaceKinds] -tcIfaceKind (IfaceTyVar n) = do { tv <- tcIfaceTyVar n; return (TyVarTy tv) } -tcIfaceKind (IfaceAppTy t1 t2) = do { t1' <- tcIfaceKind t1; t2' <- tcIfaceKind t2; return (AppTy t1' t2') } -tcIfaceKind (IfaceFunTy t1 t2) = do { t1' <- tcIfaceKind t1; t2' <- tcIfaceKind t2; return (FunTy t1' t2') } -tcIfaceKind (IfaceTyConApp tc ts) = do { tc' <- tcIfaceKindCon tc; ts' <- tcIfaceKinds ts; return (mkTyConApp tc' ts') } -tcIfaceKind (IfaceForAllTy tv t) = bindIfaceTyVar tv $ \ tv' -> do { t' <- tcIfaceKind t; return (ForAllTy tv' t') } -tcIfaceKind t = pprPanic "tcIfaceKind" (ppr t) -- IfaceCoApp, IfaceLitTy - -tcIfaceKinds :: [IfaceKind] -> IfL [Kind] -tcIfaceKinds tys = mapM tcIfaceKind tys \end{code} -Note [Checking IfaceTypes vs IfaceKinds] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -We need to know whether we are checking a *type* or a *kind*. -Consider module M where - Proxy :: forall k. k -> * - data T = T -and consider the two IfaceTypes - M.Proxy * M.T{tc} - M.Proxy 'M.T{tc} 'M.T(d} -The first is conventional, but in the latter we use the promoted -type constructor (as a kind) and data constructor (as a type). However, -the Name of the promoted type constructor is just M.T; it's the *same name* -as the ordinary type constructor. - -We could add a "promoted" flag to an IfaceTyCon, but that's a bit heavy. -Instead we use context to distinguish, as in the source language. - - When checking a kind, we look up M.T{tc} and promote it - - When checking a type, we look up M.T{tc} and don't promote it - and M.T{d} and promote it - See tcIfaceKindCon and tcIfaceKTyCon respectively - -This context business is why we need tcIfaceTcArgs, and tcIfaceApps - %************************************************************************ %* * @@ -1186,7 +1171,7 @@ tcIfaceApps fun arg go_up fun _ [] = return fun go_up fun fun_ty (IfaceType t : args) | Just (tv,body_ty) <- splitForAllTy_maybe fun_ty - = do { t' <- if isKindVar tv -- See Note [Checking IfaceTypes vs IfaceKinds] + = do { t' <- if isKindVar tv then tcIfaceKind t else tcIfaceType t ; let fun_ty' = substTyWith [tv] [t'] body_ty @@ -1251,30 +1236,6 @@ tcIfaceDataAlt con inst_tys arg_strs rhs \end{code} -\begin{code} -tcExtCoreBindings :: [IfaceBinding] -> IfL CoreProgram -- Used for external core -tcExtCoreBindings [] = return [] -tcExtCoreBindings (b:bs) = do_one b (tcExtCoreBindings bs) - -do_one :: IfaceBinding -> IfL [CoreBind] -> IfL [CoreBind] -do_one (IfaceNonRec bndr rhs) thing_inside - = do { rhs' <- tcIfaceExpr rhs - ; bndr' <- newExtCoreBndr bndr - ; extendIfaceIdEnv [bndr'] $ do - { core_binds <- thing_inside - ; return (NonRec bndr' rhs' : core_binds) }} - -do_one (IfaceRec pairs) thing_inside - = do { bndrs' <- mapM newExtCoreBndr bndrs - ; extendIfaceIdEnv bndrs' $ do - { rhss' <- mapM tcIfaceExpr rhss - ; core_binds <- thing_inside - ; return (Rec (bndrs' `zip` rhss') : core_binds) }} - where - (bndrs,rhss) = unzip pairs -\end{code} - - %************************************************************************ %* * IdInfo @@ -1457,26 +1418,19 @@ tcIfaceGlobal name -- emasculated form (e.g. lacking data constructors). tcIfaceTyCon :: IfaceTyCon -> IfL TyCon -tcIfaceTyCon (IfaceTc name) - = do { thing <- tcIfaceGlobal name - ; case thing of -- A "type constructor" can be a promoted data constructor - -- c.f. Trac #5881 - ATyCon tc -> return tc - AConLike (RealDataCon dc) -> return (promoteDataCon dc) - _ -> pprPanic "tcIfaceTyCon" (ppr name $$ ppr thing) } - -tcIfaceKindCon :: IfaceTyCon -> IfL TyCon -tcIfaceKindCon (IfaceTc name) - = do { thing <- tcIfaceGlobal name - ; case thing of -- A "type constructor" here is a promoted type constructor - -- c.f. Trac #5881 - ATyCon tc - | isSuperKind (tyConKind tc) - -> return tc -- Mainly just '*' or 'AnyK' - | Just prom_tc <- promotableTyCon_maybe tc - -> return prom_tc - - _ -> pprPanic "tcIfaceKindCon" (ppr name $$ ppr thing) } +tcIfaceTyCon itc + = do { + ; thing <- tcIfaceGlobal (ifaceTyConName itc) + ; case itc of + IfaceTc _ -> return $ tyThingTyCon thing + IfacePromotedDataCon _ -> return $ promoteDataCon $ tyThingDataCon thing + IfacePromotedTyCon name -> + let ktycon tc + | isSuperKind (tyConKind tc) = return tc + | Just prom_tc <- promotableTyCon_maybe tc = return prom_tc + | otherwise = pprPanic "tcIfaceTyCon" (ppr name $$ ppr thing) + in ktycon (tyThingTyCon thing) + } tcIfaceCoAxiom :: Name -> IfL (CoAxiom Branched) tcIfaceCoAxiom name = do { thing <- tcIfaceGlobal name @@ -1519,14 +1473,6 @@ bindIfaceBndrs (b:bs) thing_inside thing_inside (b':bs') ----------------------- -newExtCoreBndr :: IfaceLetBndr -> IfL Id -newExtCoreBndr (IfLetBndr var ty _) -- Ignoring IdInfo for now - = do { mod <- getIfModule - ; name <- newGlobalBinder mod (mkVarOccFS var) noSrcSpan - ; ty' <- tcIfaceType ty - ; return (mkLocalId name ty') } - ------------------------ bindIfaceTyVar :: IfaceTvBndr -> (TyVar -> IfL a) -> IfL a bindIfaceTyVar (occ,kind) thing_inside = do { name <- newIfaceName (mkTyVarOccFS occ) @@ -1547,22 +1493,8 @@ bindIfaceTyVars bndrs thing_inside where (occs,kinds) = unzip bndrs -bindIfaceIdVar :: IfaceIdBndr -> (Id -> IfL a) -> IfL a -bindIfaceIdVar (occ, ty) thing_inside - = do { name <- newIfaceName (mkVarOccFS occ) - ; ty' <- tcIfaceType ty - ; let id = mkLocalId name ty' - ; extendIfaceIdEnv [id] (thing_inside id) } - -bindIfaceIdVars :: [IfaceIdBndr] -> ([Id] -> IfL a) -> IfL a -bindIfaceIdVars [] thing_inside = thing_inside [] -bindIfaceIdVars (v:vs) thing_inside - = bindIfaceIdVar v $ \ v' -> - bindIfaceIdVars vs $ \ vs' -> - thing_inside (v':vs') - isSuperIfaceKind :: IfaceKind -> Bool -isSuperIfaceKind (IfaceTyConApp (IfaceTc n) []) = n == superKindTyConName +isSuperIfaceKind (IfaceTyConApp tc ITC_Nil) = ifaceTyConName tc == superKindTyConName isSuperIfaceKind _ = False mk_iface_tyvar :: Name -> IfaceKind -> IfL TyVar diff --git a/compiler/llvmGen/Llvm/AbsSyn.hs b/compiler/llvmGen/Llvm/AbsSyn.hs index f92bd89c5c..24d0856ea3 100644 --- a/compiler/llvmGen/Llvm/AbsSyn.hs +++ b/compiler/llvmGen/Llvm/AbsSyn.hs @@ -65,6 +65,8 @@ data LlvmFunction = LlvmFunction { type LlvmFunctions = [LlvmFunction] +type SingleThreaded = Bool + -- | LLVM ordering types for synchronization purposes. (Introduced in LLVM -- 3.0). Please see the LLVM documentation for a better description. data LlvmSyncOrdering @@ -224,6 +226,11 @@ data LlvmExpression | Load LlvmVar {- | + Atomic load of the value at location ptr + -} + | ALoad LlvmSyncOrdering SingleThreaded LlvmVar + + {- | Navigate in an structure, selecting elements * inbound: Is the pointer inbounds? (computed pointer doesn't overflow) * ptr: Location of the structure diff --git a/compiler/llvmGen/Llvm/PpLlvm.hs b/compiler/llvmGen/Llvm/PpLlvm.hs index b8343ceff3..73077257f8 100644 --- a/compiler/llvmGen/Llvm/PpLlvm.hs +++ b/compiler/llvmGen/Llvm/PpLlvm.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -------------------------------------------------------------------------------- -- | Pretty print LLVM IR Code. -- @@ -237,6 +239,7 @@ ppLlvmExpression expr Insert vec elt idx -> ppInsert vec elt idx GetElemPtr inb ptr indexes -> ppGetElementPtr inb ptr indexes Load ptr -> ppLoad ptr + ALoad ord st ptr -> ppALoad ord st ptr Malloc tp amount -> ppMalloc tp amount Phi tp precessors -> ppPhi tp precessors Asm asm c ty v se sk -> ppAsm asm c ty v se sk @@ -325,13 +328,18 @@ ppSyncOrdering SyncSeqCst = text "seq_cst" -- of specifying alignment. ppLoad :: LlvmVar -> SDoc -ppLoad var - | isVecPtrVar var = text "load" <+> ppr var <> - comma <+> text "align 1" - | otherwise = text "load" <+> ppr var +ppLoad var = text "load" <+> ppr var <> align where - isVecPtrVar :: LlvmVar -> Bool - isVecPtrVar = isVector . pLower . getVarType + align | isVector . pLower . getVarType $ var = text ", align 1" + | otherwise = empty + +ppALoad :: LlvmSyncOrdering -> SingleThreaded -> LlvmVar -> SDoc +ppALoad ord st var = sdocWithDynFlags $ \dflags -> + let alignment = (llvmWidthInBits dflags $ getVarType var) `quot` 8 + align = text ", align" <+> ppr alignment + sThreaded | st = text " singlethread" + | otherwise = empty + in text "load atomic" <+> ppr var <> sThreaded <+> ppSyncOrdering ord <> align ppStore :: LlvmVar -> LlvmVar -> SDoc ppStore val dst diff --git a/compiler/llvmGen/Llvm/Types.hs b/compiler/llvmGen/Llvm/Types.hs index 6b9c8c181a..89b0e4e141 100644 --- a/compiler/llvmGen/Llvm/Types.hs +++ b/compiler/llvmGen/Llvm/Types.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE CPP, GeneralizedNewtypeDeriving #-} -------------------------------------------------------------------------------- -- | The LLVM Type System. diff --git a/compiler/llvmGen/LlvmCodeGen.hs b/compiler/llvmGen/LlvmCodeGen.hs index 61e7e39a49..dd16e52868 100644 --- a/compiler/llvmGen/LlvmCodeGen.hs +++ b/compiler/llvmGen/LlvmCodeGen.hs @@ -1,8 +1,8 @@ +{-# LANGUAGE CPP, TypeFamilies #-} + -- ----------------------------------------------------------------------------- -- | This is the top-level module in the LLVM code generator. -- - -{-# LANGUAGE TypeFamilies #-} module LlvmCodeGen ( llvmCodeGen, llvmFixupAsm ) where #include "HsVersions.h" diff --git a/compiler/llvmGen/LlvmCodeGen/Base.hs b/compiler/llvmGen/LlvmCodeGen/Base.hs index 5d5f385ade..686b352c2a 100644 --- a/compiler/llvmGen/LlvmCodeGen/Base.hs +++ b/compiler/llvmGen/LlvmCodeGen/Base.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- ---------------------------------------------------------------------------- -- | Base LLVM Code Generation module -- diff --git a/compiler/llvmGen/LlvmCodeGen/CodeGen.hs b/compiler/llvmGen/LlvmCodeGen/CodeGen.hs index 808c591d92..4a56600937 100644 --- a/compiler/llvmGen/LlvmCodeGen/CodeGen.hs +++ b/compiler/llvmGen/LlvmCodeGen/CodeGen.hs @@ -1,9 +1,8 @@ -{-# OPTIONS -fno-warn-type-defaults #-} +{-# LANGUAGE CPP, GADTs #-} +{-# OPTIONS_GHC -fno-warn-type-defaults #-} -- ---------------------------------------------------------------------------- -- | Handle conversion of CmmProc to LLVM code. -- - -{-# LANGUAGE GADTs #-} module LlvmCodeGen.CodeGen ( genLlvmProc ) where #include "HsVersions.h" @@ -16,6 +15,7 @@ import BlockId import CodeGen.Platform ( activeStgRegs, callerSaves ) import CLabel import Cmm +import CPrim import PprCmm import CmmUtils import Hoopl @@ -33,6 +33,7 @@ import Unique import Data.List ( nub ) import Data.Maybe ( catMaybes ) +type Atomic = Bool type LlvmStatements = OrdList LlvmStatement -- ----------------------------------------------------------------------------- @@ -229,6 +230,17 @@ genCall t@(PrimTarget (MO_PopCnt w)) dsts args = genCall t@(PrimTarget (MO_BSwap w)) dsts args = genCallSimpleCast w t dsts args +genCall (PrimTarget (MO_AtomicRead _)) [dst] [addr] = do + dstV <- getCmmReg (CmmLocal dst) + (v1, stmts, top) <- genLoad True addr (localRegType dst) + let stmt1 = Store v1 dstV + return (stmts `snocOL` stmt1, top) + +-- TODO: implement these properly rather than calling to RTS functions. +-- genCall t@(PrimTarget (MO_AtomicWrite width)) [] [addr, val] = undefined +-- genCall t@(PrimTarget (MO_AtomicRMW width amop)) [dst] [addr, n] = undefined +-- genCall t@(PrimTarget (MO_Cmpxchg width)) [dst] [addr, old, new] = undefined + -- Handle memcpy function specifically since llvm's intrinsic version takes -- some extra parameters. genCall t@(PrimTarget op) [] args' @@ -549,7 +561,6 @@ cmmPrimOpFunctions mop = do (MO_Prefetch_Data _ )-> fsLit "llvm.prefetch" - MO_S_QuotRem {} -> unsupported MO_U_QuotRem {} -> unsupported MO_U_QuotRem2 {} -> unsupported @@ -559,6 +570,12 @@ cmmPrimOpFunctions mop = do MO_Touch -> unsupported MO_UF_Conv _ -> unsupported + MO_AtomicRead _ -> unsupported + + MO_AtomicRMW w amop -> fsLit $ atomicRMWLabel w amop + MO_Cmpxchg w -> fsLit $ cmpxchgLabel w + MO_AtomicWrite w -> fsLit $ atomicWriteLabel w + -- | Tail function calls genJump :: CmmExpr -> [GlobalReg] -> LlvmM StmtData @@ -805,7 +822,7 @@ genSwitch cond maybe_ids = do let pairs = [ (ix, id) | (ix,Just id) <- zip [0..] maybe_ids ] let labels = map (\(ix, b) -> (mkIntLit ty ix, blockIdToLlvm b)) pairs - -- out of range is undefied, so lets just branch to first label + -- out of range is undefined, so let's just branch to first label let (_, defLbl) = head labels let s1 = Switch vc defLbl labels @@ -850,7 +867,7 @@ exprToVarOpt opt e = case e of -> genLit opt lit CmmLoad e' ty - -> genLoad e' ty + -> genLoad False e' ty -- Cmmreg in expression is the value, so must load. If you want actual -- reg pointer, call getCmmReg directly. @@ -1002,8 +1019,8 @@ genMachOp _ op [x] = case op of sameConv from ty reduce expand = do x'@(vx, stmts, top) <- exprToVar x let sameConv' op = do - (v1, s1) <- doExpr ty $ Cast op vx ty - return (v1, stmts `snocOL` s1, top) + (v1, s1) <- doExpr ty $ Cast op vx ty + return (v1, stmts `snocOL` s1, top) dflags <- getDynFlags let toWidth = llvmWidthInBits dflags ty -- LLVM doesn't like trying to convert to same width, so @@ -1269,41 +1286,41 @@ genMachOp_slow _ _ _ = panic "genMachOp: More then 2 expressions in MachOp!" -- | Handle CmmLoad expression. -genLoad :: CmmExpr -> CmmType -> LlvmM ExprData +genLoad :: Atomic -> CmmExpr -> CmmType -> LlvmM ExprData -- First we try to detect a few common cases and produce better code for -- these then the default case. We are mostly trying to detect Cmm code -- like I32[Sp + n] and use 'getelementptr' operations instead of the -- generic case that uses casts and pointer arithmetic -genLoad e@(CmmReg (CmmGlobal r)) ty - = genLoad_fast e r 0 ty +genLoad atomic e@(CmmReg (CmmGlobal r)) ty + = genLoad_fast atomic e r 0 ty -genLoad e@(CmmRegOff (CmmGlobal r) n) ty - = genLoad_fast e r n ty +genLoad atomic e@(CmmRegOff (CmmGlobal r) n) ty + = genLoad_fast atomic e r n ty -genLoad e@(CmmMachOp (MO_Add _) [ +genLoad atomic e@(CmmMachOp (MO_Add _) [ (CmmReg (CmmGlobal r)), (CmmLit (CmmInt n _))]) ty - = genLoad_fast e r (fromInteger n) ty + = genLoad_fast atomic e r (fromInteger n) ty -genLoad e@(CmmMachOp (MO_Sub _) [ +genLoad atomic e@(CmmMachOp (MO_Sub _) [ (CmmReg (CmmGlobal r)), (CmmLit (CmmInt n _))]) ty - = genLoad_fast e r (negate $ fromInteger n) ty + = genLoad_fast atomic e r (negate $ fromInteger n) ty -- generic case -genLoad e ty +genLoad atomic e ty = do other <- getTBAAMeta otherN - genLoad_slow e ty other + genLoad_slow atomic e ty other -- | Handle CmmLoad expression. -- This is a special case for loading from a global register pointer -- offset such as I32[Sp+8]. -genLoad_fast :: CmmExpr -> GlobalReg -> Int -> CmmType - -> LlvmM ExprData -genLoad_fast e r n ty = do +genLoad_fast :: Atomic -> CmmExpr -> GlobalReg -> Int -> CmmType + -> LlvmM ExprData +genLoad_fast atomic e r n ty = do dflags <- getDynFlags (gv, grt, s1) <- getCmmRegVal (CmmGlobal r) meta <- getTBAARegMeta r @@ -1316,7 +1333,7 @@ genLoad_fast e r n ty = do case grt == ty' of -- were fine True -> do - (var, s3) <- doExpr ty' (MExpr meta $ Load ptr) + (var, s3) <- doExpr ty' (MExpr meta $ loadInstr ptr) return (var, s1 `snocOL` s2 `snocOL` s3, []) @@ -1324,32 +1341,34 @@ genLoad_fast e r n ty = do False -> do let pty = pLift ty' (ptr', s3) <- doExpr pty $ Cast LM_Bitcast ptr pty - (var, s4) <- doExpr ty' (MExpr meta $ Load ptr') + (var, s4) <- doExpr ty' (MExpr meta $ loadInstr ptr') return (var, s1 `snocOL` s2 `snocOL` s3 `snocOL` s4, []) -- If its a bit type then we use the slow method since -- we can't avoid casting anyway. - False -> genLoad_slow e ty meta - + False -> genLoad_slow atomic e ty meta + where + loadInstr ptr | atomic = ALoad SyncSeqCst False ptr + | otherwise = Load ptr -- | Handle Cmm load expression. -- Generic case. Uses casts and pointer arithmetic if needed. -genLoad_slow :: CmmExpr -> CmmType -> [MetaAnnot] -> LlvmM ExprData -genLoad_slow e ty meta = do +genLoad_slow :: Atomic -> CmmExpr -> CmmType -> [MetaAnnot] -> LlvmM ExprData +genLoad_slow atomic e ty meta = do (iptr, stmts, tops) <- exprToVar e dflags <- getDynFlags case getVarType iptr of LMPointer _ -> do (dvar, load) <- doExpr (cmmToLlvmType ty) - (MExpr meta $ Load iptr) + (MExpr meta $ loadInstr iptr) return (dvar, stmts `snocOL` load, tops) i@(LMInt _) | i == llvmWord dflags -> do let pty = LMPointer $ cmmToLlvmType ty (ptr, cast) <- doExpr pty $ Cast LM_Inttoptr iptr pty (dvar, load) <- doExpr (cmmToLlvmType ty) - (MExpr meta $ Load ptr) + (MExpr meta $ loadInstr ptr) return (dvar, stmts `snocOL` cast `snocOL` load, tops) other -> do dflags <- getDynFlags @@ -1358,6 +1377,9 @@ genLoad_slow e ty meta = do "Size of Ptr: " ++ show (llvmPtrBits dflags) ++ ", Size of var: " ++ show (llvmWidthInBits dflags other) ++ ", Var: " ++ showSDoc dflags (ppr iptr))) + where + loadInstr ptr | atomic = ALoad SyncSeqCst False ptr + | otherwise = Load ptr -- | Handle CmmReg expression. This will return a pointer to the stack diff --git a/compiler/llvmGen/LlvmCodeGen/Data.hs b/compiler/llvmGen/LlvmCodeGen/Data.hs index 6212cfc9fb..1dbfb4b527 100644 --- a/compiler/llvmGen/LlvmCodeGen/Data.hs +++ b/compiler/llvmGen/LlvmCodeGen/Data.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} -- ---------------------------------------------------------------------------- -- | Handle conversion of CmmData to LLVM code. -- diff --git a/compiler/llvmGen/LlvmCodeGen/Ppr.hs b/compiler/llvmGen/LlvmCodeGen/Ppr.hs index 202e685c0e..9c6a719613 100644 --- a/compiler/llvmGen/LlvmCodeGen/Ppr.hs +++ b/compiler/llvmGen/LlvmCodeGen/Ppr.hs @@ -1,7 +1,8 @@ +{-# LANGUAGE CPP #-} + -- ---------------------------------------------------------------------------- -- | Pretty print helpers for the LLVM Code generator. -- - module LlvmCodeGen.Ppr ( pprLlvmHeader, pprLlvmCmmDecl, pprLlvmData, infoSection, iTableSuf ) where diff --git a/compiler/llvmGen/LlvmCodeGen/Regs.hs b/compiler/llvmGen/LlvmCodeGen/Regs.hs index 9f20aa5de5..0048659069 100644 --- a/compiler/llvmGen/LlvmCodeGen/Regs.hs +++ b/compiler/llvmGen/LlvmCodeGen/Regs.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -------------------------------------------------------------------------------- -- | Deal with Cmm registers -- diff --git a/compiler/llvmGen/LlvmMangler.hs b/compiler/llvmGen/LlvmMangler.hs index a9054174e1..7084a2e727 100644 --- a/compiler/llvmGen/LlvmMangler.hs +++ b/compiler/llvmGen/LlvmMangler.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- ----------------------------------------------------------------------------- -- | GHC LLVM Mangler -- diff --git a/compiler/main/BreakArray.hs b/compiler/main/BreakArray.hs index d16d6f229d..6455912b67 100644 --- a/compiler/main/BreakArray.hs +++ b/compiler/main/BreakArray.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, MagicHash, UnboxedTuples #-} + ------------------------------------------------------------------------------- -- -- | Break Arrays in the IO monad diff --git a/compiler/main/CmdLineParser.hs b/compiler/main/CmdLineParser.hs index 22811d44cc..5ee7086cbc 100644 --- a/compiler/main/CmdLineParser.hs +++ b/compiler/main/CmdLineParser.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ------------------------------------------------------------------------------- -- -- | Command-line parser diff --git a/compiler/main/CodeOutput.lhs b/compiler/main/CodeOutput.lhs index b8b187241b..c0a609ba2e 100644 --- a/compiler/main/CodeOutput.lhs +++ b/compiler/main/CodeOutput.lhs @@ -4,6 +4,8 @@ \section{Code output phase} \begin{code} +{-# LANGUAGE CPP #-} + module CodeOutput( codeOutput, outputForeignStubs ) where #include "HsVersions.h" @@ -72,7 +74,6 @@ codeOutput dflags this_mod filenm location foreign_stubs pkg_deps cmm_stream ; return cmm } - ; showPass dflags "CodeOutput" ; stubs_exist <- outputForeignStubs dflags this_mod location foreign_stubs ; case hscTarget dflags of { HscAsm -> outputAsm dflags this_mod filenm linted_cmm_stream; @@ -190,11 +191,8 @@ outputForeignStubs dflags mod location stubs stub_c <- newTempName dflags "c" case stubs of - NoStubs -> do - -- When compiling External Core files, may need to use stub - -- files from a previous compilation - stub_h_exists <- doesFileExist stub_h - return (stub_h_exists, Nothing) + NoStubs -> + return (False, Nothing) ForeignStubs h_code c_code -> do let diff --git a/compiler/main/DriverMkDepend.hs b/compiler/main/DriverMkDepend.hs index cda0b4729f..03545d4828 100644 --- a/compiler/main/DriverMkDepend.hs +++ b/compiler/main/DriverMkDepend.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Makefile Dependency Generation diff --git a/compiler/main/DriverPhases.hs b/compiler/main/DriverPhases.hs index 2981269d54..fa8b2d060f 100644 --- a/compiler/main/DriverPhases.hs +++ b/compiler/main/DriverPhases.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- $Id: DriverPhases.hs,v 1.38 2005/05/17 11:01:59 simonmar Exp $ -- @@ -18,7 +20,6 @@ module DriverPhases ( isHaskellSrcSuffix, isObjectSuffix, isCishSuffix, - isExtCoreSuffix, isDynLibSuffix, isHaskellUserSrcSuffix, isSourceSuffix, @@ -27,7 +28,6 @@ module DriverPhases ( isHaskellSrcFilename, isObjectFilename, isCishFilename, - isExtCoreFilename, isDynLibFilename, isHaskellUserSrcFilename, isSourceFilename @@ -56,7 +56,7 @@ import System.FilePath -} data HscSource - = HsSrcFile | HsBootFile | ExtCoreFile + = HsSrcFile | HsBootFile deriving( Eq, Ord, Show ) -- Ord needed for the finite maps we build in CompManager @@ -64,7 +64,6 @@ data HscSource hscSourceString :: HscSource -> String hscSourceString HsSrcFile = "" hscSourceString HsBootFile = "[boot]" -hscSourceString ExtCoreFile = "[ext core]" isHsBoot :: HscSource -> Bool isHsBoot HsBootFile = True @@ -175,7 +174,6 @@ startPhase "hs" = Cpp HsSrcFile startPhase "hs-boot" = Cpp HsBootFile startPhase "hscpp" = HsPp HsSrcFile startPhase "hspp" = Hsc HsSrcFile -startPhase "hcr" = Hsc ExtCoreFile startPhase "hc" = HCc startPhase "c" = Cc startPhase "cpp" = Ccpp @@ -202,7 +200,6 @@ startPhase _ = StopLn -- all unknown file types phaseInputExt :: Phase -> String phaseInputExt (Unlit HsSrcFile) = "lhs" phaseInputExt (Unlit HsBootFile) = "lhs-boot" -phaseInputExt (Unlit ExtCoreFile) = "lhcr" phaseInputExt (Cpp _) = "lpp" -- intermediate only phaseInputExt (HsPp _) = "hscpp" -- intermediate only phaseInputExt (Hsc _) = "hspp" -- intermediate only @@ -227,13 +224,12 @@ phaseInputExt MergeStub = "o" phaseInputExt StopLn = "o" haskellish_src_suffixes, haskellish_suffixes, cish_suffixes, - extcoreish_suffixes, haskellish_user_src_suffixes + haskellish_user_src_suffixes :: [String] haskellish_src_suffixes = haskellish_user_src_suffixes ++ [ "hspp", "hscpp", "hcr", "cmm", "cmmcpp" ] haskellish_suffixes = haskellish_src_suffixes ++ ["hc", "raw_s"] cish_suffixes = [ "c", "cpp", "C", "cc", "cxx", "s", "S", "ll", "bc", "lm_s", "m", "M", "mm" ] -extcoreish_suffixes = [ "hcr" ] -- Will not be deleted as temp files: haskellish_user_src_suffixes = [ "hs", "lhs", "hs-boot", "lhs-boot" ] @@ -250,13 +246,12 @@ dynlib_suffixes platform = case platformOS platform of OSDarwin -> ["dylib", "so"] _ -> ["so"] -isHaskellishSuffix, isHaskellSrcSuffix, isCishSuffix, isExtCoreSuffix, +isHaskellishSuffix, isHaskellSrcSuffix, isCishSuffix, isHaskellUserSrcSuffix :: String -> Bool isHaskellishSuffix s = s `elem` haskellish_suffixes isHaskellSrcSuffix s = s `elem` haskellish_src_suffixes isCishSuffix s = s `elem` cish_suffixes -isExtCoreSuffix s = s `elem` extcoreish_suffixes isHaskellUserSrcSuffix s = s `elem` haskellish_user_src_suffixes isObjectSuffix, isDynLibSuffix :: Platform -> String -> Bool @@ -267,13 +262,12 @@ isSourceSuffix :: String -> Bool isSourceSuffix suff = isHaskellishSuffix suff || isCishSuffix suff isHaskellishFilename, isHaskellSrcFilename, isCishFilename, - isExtCoreFilename, isHaskellUserSrcFilename, isSourceFilename + isHaskellUserSrcFilename, isSourceFilename :: FilePath -> Bool -- takeExtension return .foo, so we drop 1 to get rid of the . isHaskellishFilename f = isHaskellishSuffix (drop 1 $ takeExtension f) isHaskellSrcFilename f = isHaskellSrcSuffix (drop 1 $ takeExtension f) isCishFilename f = isCishSuffix (drop 1 $ takeExtension f) -isExtCoreFilename f = isExtCoreSuffix (drop 1 $ takeExtension f) isHaskellUserSrcFilename f = isHaskellUserSrcSuffix (drop 1 $ takeExtension f) isSourceFilename f = isSourceSuffix (drop 1 $ takeExtension f) diff --git a/compiler/main/DriverPipeline.hs b/compiler/main/DriverPipeline.hs index b93cef1fba..11427e27cf 100644 --- a/compiler/main/DriverPipeline.hs +++ b/compiler/main/DriverPipeline.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-cse #-} -{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE CPP, NamedFieldPuns, NondecreasingIndentation #-} +{-# OPTIONS_GHC -fno-cse #-} -- -fno-cse is needed for GLOBAL_VAR's to behave properly ----------------------------------------------------------------------------- @@ -54,7 +54,6 @@ import Util import StringBuffer ( hGetStringBuffer ) import BasicTypes ( SuccessFlag(..) ) import Maybes ( expectJust ) -import ParserCoreUtils ( getCoreModuleName ) import SrcLoc import FastString import LlvmCodeGen ( llvmFixupAsm ) @@ -169,8 +168,6 @@ compileOne' m_tc_result mHscMessage output_fn <- getOutputFilename next_phase Temporary basename dflags next_phase (Just location) - let extCore_filename = basename ++ ".hcr" - -- -fforce-recomp should also work with --make let force_recomp = gopt Opt_ForceRecomp dflags source_modified @@ -207,7 +204,7 @@ compileOne' m_tc_result mHscMessage hm_linkable = maybe_old_linkable }) _ -> do guts0 <- hscDesugar hsc_env summary tc_result guts <- hscSimplify hsc_env guts0 - (iface, _changed, details, cgguts) <- hscNormalIface hsc_env extCore_filename guts mb_old_hash + (iface, _changed, details, cgguts) <- hscNormalIface hsc_env guts mb_old_hash (hasStub, comp_bc, modBreaks) <- hscInteractive hsc_env cgguts summary stub_o <- case hasStub of @@ -231,7 +228,9 @@ compileOne' m_tc_result mHscMessage hm_iface = iface, hm_linkable = Just linkable }) HscNothing -> - do (iface, _changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash + do (iface, changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash + when (gopt Opt_WriteInterface dflags) $ + hscWriteIface dflags iface changed summary let linkable = if isHsBoot src_flavour then maybe_old_linkable else Just (LM (ms_hs_date summary) this_mod []) @@ -251,7 +250,7 @@ compileOne' m_tc_result mHscMessage _ -> do guts0 <- hscDesugar hsc_env summary tc_result guts <- hscSimplify hsc_env guts0 - (iface, changed, details, cgguts) <- hscNormalIface hsc_env extCore_filename guts mb_old_hash + (iface, changed, details, cgguts) <- hscNormalIface hsc_env guts mb_old_hash hscWriteIface dflags iface changed summary -- We're in --make mode: finish the compilation pipeline. @@ -892,16 +891,11 @@ runPhase (RealPhase (Hsc src_flavour)) input_fn dflags0 setDynFlags dflags -- gather the imports and module name - (hspp_buf,mod_name,imps,src_imps) <- liftIO $ - case src_flavour of - ExtCoreFile -> do -- no explicit imports in ExtCore input. - m <- getCoreModuleName input_fn - return (Nothing, mkModuleName m, [], []) - - _ -> do - buf <- hGetStringBuffer input_fn - (src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff) - return (Just buf, mod_name, imps, src_imps) + (hspp_buf,mod_name,imps,src_imps) <- liftIO $ do + do + buf <- hGetStringBuffer input_fn + (src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff) + return (Just buf, mod_name, imps, src_imps) -- Take -o into account if present -- Very like -ohi, but we must *only* do this if we aren't linking @@ -936,8 +930,6 @@ runPhase (RealPhase (Hsc src_flavour)) input_fn dflags0 then return SourceUnmodified else return SourceModified - let extCore_filename = basename ++ ".hcr" - PipeState{hsc_env=hsc_env'} <- getPipeState -- Tell the finder cache about this module @@ -957,7 +949,7 @@ runPhase (RealPhase (Hsc src_flavour)) input_fn dflags0 ms_srcimps = src_imps } -- run the compiler! - result <- liftIO $ hscCompileOneShot hsc_env' extCore_filename + result <- liftIO $ hscCompileOneShot hsc_env' mod_summary source_unchanged return (HscOut src_flavour mod_name result, @@ -1216,6 +1208,7 @@ runPhase (RealPhase (As with_cpp)) input_fn dflags -- might be a hierarchical module. liftIO $ createDirectoryIfMissing True (takeDirectory output_fn) + ccInfo <- liftIO $ getCompilerInfo dflags let runAssembler inputFilename outputFilename = liftIO $ as_prog dflags ([ SysTools.Option ("-I" ++ p) | p <- cmdline_include_paths ] @@ -1230,7 +1223,9 @@ runPhase (RealPhase (As with_cpp)) input_fn dflags ++ (if platformArch (targetPlatform dflags) == ArchSPARC then [SysTools.Option "-mcpu=v9"] else []) - + ++ (if any (ccInfo ==) [Clang, AppleClang, AppleClang51] + then [SysTools.Option "-Qunused-arguments"] + else []) ++ [ SysTools.Option "-x" , if with_cpp then SysTools.Option "assembler-with-cpp" @@ -2139,26 +2134,27 @@ joinObjectFiles dflags o_files output_fn = do let mySettings = settings dflags ldIsGnuLd = sLdIsGnuLd mySettings osInfo = platformOS (targetPlatform dflags) - ld_r args ccInfo = SysTools.runLink dflags ([ - SysTools.Option "-nostdlib", - SysTools.Option "-Wl,-r" - ] - ++ (if ccInfo == Clang then [] - else [SysTools.Option "-nodefaultlibs"]) - ++ (if osInfo == OSFreeBSD - then [SysTools.Option "-L/usr/lib"] - else []) - -- gcc on sparc sets -Wl,--relax implicitly, but - -- -r and --relax are incompatible for ld, so - -- disable --relax explicitly. - ++ (if platformArch (targetPlatform dflags) == ArchSPARC - && ldIsGnuLd - then [SysTools.Option "-Wl,-no-relax"] - else []) - ++ map SysTools.Option ld_build_id - ++ [ SysTools.Option "-o", - SysTools.FileOption "" output_fn ] - ++ args) + ld_r args cc = SysTools.runLink dflags ([ + SysTools.Option "-nostdlib", + SysTools.Option "-Wl,-r" + ] + ++ (if any (cc ==) [Clang, AppleClang, AppleClang51] + then [] + else [SysTools.Option "-nodefaultlibs"]) + ++ (if osInfo == OSFreeBSD + then [SysTools.Option "-L/usr/lib"] + else []) + -- gcc on sparc sets -Wl,--relax implicitly, but + -- -r and --relax are incompatible for ld, so + -- disable --relax explicitly. + ++ (if platformArch (targetPlatform dflags) == ArchSPARC + && ldIsGnuLd + then [SysTools.Option "-Wl,-no-relax"] + else []) + ++ map SysTools.Option ld_build_id + ++ [ SysTools.Option "-o", + SysTools.FileOption "" output_fn ] + ++ args) -- suppress the generation of the .note.gnu.build-id section, -- which we don't need and sometimes causes ld to emit a diff --git a/compiler/main/DynFlags.hs b/compiler/main/DynFlags.hs index 72ebb38fc2..122eafff19 100644 --- a/compiler/main/DynFlags.hs +++ b/compiler/main/DynFlags.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ------------------------------------------------------------------------------- -- -- | Dynamic flags @@ -11,7 +13,7 @@ -- ------------------------------------------------------------------------------- -{-# OPTIONS -fno-cse #-} +{-# OPTIONS_GHC -fno-cse #-} -- -fno-cse is needed for GLOBAL_VAR's to behave properly module DynFlags ( @@ -30,6 +32,7 @@ module DynFlags ( wopt, wopt_set, wopt_unset, xopt, xopt_set, xopt_unset, lang_set, + useUnicodeSyntax, whenGeneratingDynamicToo, ifGeneratingDynamicToo, whenCannotGenerateDynamicToo, dynamicTooMkDynamicDynFlags, @@ -330,6 +333,7 @@ data GeneralFlag | Opt_IgnoreInterfacePragmas | Opt_OmitInterfacePragmas | Opt_ExposeAllUnfoldings + | Opt_WriteInterface -- forces .hi files to be written even with -fno-code -- profiling opts | Opt_AutoSccsOnIndividualCafs @@ -403,8 +407,6 @@ data GeneralFlag | Opt_SuppressUniques -- temporary flags - | Opt_RunCPS - | Opt_RunCPSZ | Opt_AutoLinkPackages | Opt_ImplicitImportQualified @@ -580,6 +582,7 @@ data ExtensionFlag | Opt_TraditionalRecordSyntax | Opt_LambdaCase | Opt_MultiWayIf + | Opt_BinaryLiterals | Opt_NegativeLiterals | Opt_EmptyCase | Opt_PatternSynonyms @@ -774,7 +777,7 @@ data DynFlags = DynFlags { pprCols :: Int, traceLevel :: Int, -- Standard level is 1. Less verbose is 0. - useUnicodeQuotes :: Bool, + useUnicode :: Bool, -- | what kind of {-# SCC #-} to add automatically profAuto :: ProfAuto, @@ -1292,12 +1295,12 @@ initDynFlags dflags = do refRtldInfo <- newIORef Nothing refRtccInfo <- newIORef Nothing wrapperNum <- newIORef emptyModuleEnv - canUseUnicodeQuotes <- do let enc = localeEncoding - str = "‘’" - (withCString enc str $ \cstr -> - do str' <- peekCString enc cstr - return (str == str')) - `catchIOError` \_ -> return False + canUseUnicode <- do let enc = localeEncoding + str = "‘’" + (withCString enc str $ \cstr -> + do str' <- peekCString enc cstr + return (str == str')) + `catchIOError` \_ -> return False return dflags{ canGenerateDynamicToo = refCanGenerateDynamicToo, nextTempSuffix = refNextTempSuffix, @@ -1307,7 +1310,7 @@ initDynFlags dflags = do generatedDumps = refGeneratedDumps, llvmVersion = refLlvmVersion, nextWrapperNum = wrapperNum, - useUnicodeQuotes = canUseUnicodeQuotes, + useUnicode = canUseUnicode, rtldInfo = refRtldInfo, rtccInfo = refRtccInfo } @@ -1446,7 +1449,7 @@ defaultDynFlags mySettings = flushErr = defaultFlushErr, pprUserLength = 5, pprCols = 100, - useUnicodeQuotes = False, + useUnicode = False, traceLevel = 1, profAuto = NoProfAuto, llvmVersion = panic "defaultDynFlags: No llvmVersion", @@ -1682,6 +1685,9 @@ lang_set dflags lang = extensionFlags = flattenExtensionFlags lang (extensions dflags) } +useUnicodeSyntax :: DynFlags -> Bool +useUnicodeSyntax = xopt Opt_UnicodeSyntax + -- | Set the Haskell language standard to use setLanguage :: Language -> DynP () setLanguage l = upd (`lang_set` Just l) @@ -2187,16 +2193,9 @@ dynamic_flags = [ -------- ghc -M ----------------------------------------------------- , Flag "dep-suffix" (hasArg addDepSuffix) - , Flag "optdep-s" (hasArgDF addDepSuffix "Use -dep-suffix instead") , Flag "dep-makefile" (hasArg setDepMakefile) - , Flag "optdep-f" (hasArgDF setDepMakefile "Use -dep-makefile instead") - , Flag "optdep-w" (NoArg (deprecate "doesn't do anything")) , Flag "include-pkg-deps" (noArg (setDepIncludePkgDeps True)) - , Flag "optdep--include-prelude" (noArgDF (setDepIncludePkgDeps True) "Use -include-pkg-deps instead") - , Flag "optdep--include-pkg-deps" (noArgDF (setDepIncludePkgDeps True) "Use -include-pkg-deps instead") , Flag "exclude-module" (hasArg addDepExcludeMod) - , Flag "optdep--exclude-module" (hasArgDF addDepExcludeMod "Use -exclude-module instead") - , Flag "optdep-x" (hasArgDF addDepExcludeMod "Use -exclude-module instead") -------- Linking ---------------------------------------------------- , Flag "no-link" (noArg (\d -> d{ ghcLink=NoLink })) @@ -2650,6 +2649,7 @@ fFlags = [ ( "pedantic-bottoms", Opt_PedanticBottoms, nop ), ( "ignore-interface-pragmas", Opt_IgnoreInterfacePragmas, nop ), ( "omit-interface-pragmas", Opt_OmitInterfacePragmas, nop ), + ( "write-interface", Opt_WriteInterface, nop ), ( "expose-all-unfoldings", Opt_ExposeAllUnfoldings, nop ), ( "do-lambda-eta-expansion", Opt_DoLambdaEtaExpansion, nop ), ( "ignore-asserts", Opt_IgnoreAsserts, nop ), @@ -2669,8 +2669,6 @@ fFlags = [ ( "break-on-error", Opt_BreakOnError, nop ), ( "print-evld-with-show", Opt_PrintEvldWithShow, nop ), ( "print-bind-contents", Opt_PrintBindContents, nop ), - ( "run-cps", Opt_RunCPS, nop ), - ( "run-cpsz", Opt_RunCPSZ, nop ), ( "vectorise", Opt_Vectorise, nop ), ( "vectorisation-avoidance", Opt_VectorisationAvoidance, nop ), ( "regs-graph", Opt_RegsGraph, nop ), @@ -2685,7 +2683,8 @@ fFlags = [ ( "fun-to-thunk", Opt_FunToThunk, nop ), ( "gen-manifest", Opt_GenManifest, nop ), ( "embed-manifest", Opt_EmbedManifest, nop ), - ( "ext-core", Opt_EmitExternalCore, nop ), + ( "ext-core", Opt_EmitExternalCore, + \_ -> deprecate "it has no effect, and will be removed in GHC 7.12" ), ( "shared-implib", Opt_SharedImplib, nop ), ( "ghci-sandbox", Opt_GhciSandbox, nop ), ( "ghci-history", Opt_GhciHistory, nop ), @@ -2869,13 +2868,15 @@ xFlags = [ ( "FlexibleInstances", Opt_FlexibleInstances, nop ), ( "ConstrainedClassMethods", Opt_ConstrainedClassMethods, nop ), ( "MultiParamTypeClasses", Opt_MultiParamTypeClasses, nop ), - ( "NullaryTypeClasses", Opt_NullaryTypeClasses, nop ), + ( "NullaryTypeClasses", Opt_NullaryTypeClasses, + deprecatedForExtension "MultiParamTypeClasses" ), ( "FunctionalDependencies", Opt_FunctionalDependencies, nop ), ( "GeneralizedNewtypeDeriving", Opt_GeneralizedNewtypeDeriving, setGenDeriving ), ( "OverlappingInstances", Opt_OverlappingInstances, nop ), ( "UndecidableInstances", Opt_UndecidableInstances, nop ), ( "IncoherentInstances", Opt_IncoherentInstances, nop ), ( "PackageImports", Opt_PackageImports, nop ), + ( "BinaryLiterals", Opt_BinaryLiterals, nop ), ( "NegativeLiterals", Opt_NegativeLiterals, nop ), ( "EmptyCase", Opt_EmptyCase, nop ), ( "PatternSynonyms", Opt_PatternSynonyms, nop ) @@ -2960,6 +2961,9 @@ impliedFlags , (Opt_ImplicitParams, turnOn, Opt_FlexibleInstances) , (Opt_JavaScriptFFI, turnOn, Opt_InterruptibleFFI) + + , (Opt_DeriveTraversable, turnOn, Opt_DeriveFunctor) + , (Opt_DeriveTraversable, turnOn, Opt_DeriveFoldable) ] optLevelFlags :: [([Int], GeneralFlag)] @@ -3187,16 +3191,9 @@ noArg fn = NoArg (upd fn) noArgM :: (DynFlags -> DynP DynFlags) -> OptKind (CmdLineP DynFlags) noArgM fn = NoArg (updM fn) -noArgDF :: (DynFlags -> DynFlags) -> String -> OptKind (CmdLineP DynFlags) -noArgDF fn deprec = NoArg (upd fn >> deprecate deprec) - hasArg :: (String -> DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags) hasArg fn = HasArg (upd . fn) -hasArgDF :: (String -> DynFlags -> DynFlags) -> String -> OptKind (CmdLineP DynFlags) -hasArgDF fn deprec = HasArg (\s -> do upd (fn s) - deprecate deprec) - sepArg :: (String -> DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags) sepArg fn = SepArg (upd . fn) @@ -3764,6 +3761,8 @@ data LinkerInfo data CompilerInfo = GCC | Clang + | AppleClang + | AppleClang51 | UnknownCC deriving Eq diff --git a/compiler/main/DynFlags.hs-boot b/compiler/main/DynFlags.hs-boot index 04ec5a4e7d..5cf21669bd 100644 --- a/compiler/main/DynFlags.hs-boot +++ b/compiler/main/DynFlags.hs-boot @@ -9,4 +9,5 @@ targetPlatform :: DynFlags -> Platform pprUserLength :: DynFlags -> Int pprCols :: DynFlags -> Int unsafeGlobalDynFlags :: DynFlags -useUnicodeQuotes :: DynFlags -> Bool +useUnicode :: DynFlags -> Bool +useUnicodeSyntax :: DynFlags -> Bool diff --git a/compiler/main/DynamicLoading.hs b/compiler/main/DynamicLoading.hs index ffafc78216..046d13cee5 100644 --- a/compiler/main/DynamicLoading.hs +++ b/compiler/main/DynamicLoading.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, MagicHash #-} + -- | Dynamically lookup up values from modules and loading them. module DynamicLoading ( #ifdef GHCI diff --git a/compiler/main/ErrUtils.lhs b/compiler/main/ErrUtils.lhs index 12b6bad68a..02f731d3c2 100644 --- a/compiler/main/ErrUtils.lhs +++ b/compiler/main/ErrUtils.lhs @@ -4,6 +4,7 @@ \section[ErrsUtils]{Utilities for error reporting} \begin{code} +{-# LANGUAGE CPP #-} module ErrUtils ( ErrMsg, WarnMsg, Severity(..), diff --git a/compiler/main/Finder.lhs b/compiler/main/Finder.lhs index 60683b2289..cbfd4e4f1c 100644 --- a/compiler/main/Finder.lhs +++ b/compiler/main/Finder.lhs @@ -4,6 +4,8 @@ \section[Finder]{Module Finder} \begin{code} +{-# LANGUAGE CPP #-} + module Finder ( flushFinderCaches, FindResult(..), @@ -432,8 +434,8 @@ mkHomeModLocation2 :: DynFlags mkHomeModLocation2 dflags mod src_basename ext = do let mod_basename = moduleNameSlashes mod - obj_fn <- mkObjPath dflags src_basename mod_basename - hi_fn <- mkHiPath dflags src_basename mod_basename + obj_fn = mkObjPath dflags src_basename mod_basename + hi_fn = mkHiPath dflags src_basename mod_basename return (ModLocation{ ml_hs_file = Just (src_basename <.> ext), ml_hi_file = hi_fn, @@ -443,7 +445,7 @@ mkHiOnlyModLocation :: DynFlags -> Suffix -> FilePath -> String -> IO ModLocation mkHiOnlyModLocation dflags hisuf path basename = do let full_basename = path </> basename - obj_fn <- mkObjPath dflags full_basename basename + obj_fn = mkObjPath dflags full_basename basename return ModLocation{ ml_hs_file = Nothing, ml_hi_file = full_basename <.> hisuf, -- Remove the .hi-boot suffix from @@ -459,16 +461,15 @@ mkObjPath :: DynFlags -> FilePath -- the filename of the source file, minus the extension -> String -- the module name with dots replaced by slashes - -> IO FilePath -mkObjPath dflags basename mod_basename - = do let + -> FilePath +mkObjPath dflags basename mod_basename = obj_basename <.> osuf + where odir = objectDir dflags osuf = objectSuf dflags obj_basename | Just dir <- odir = dir </> mod_basename | otherwise = basename - return (obj_basename <.> osuf) -- | Constructs the filename of a .hi file for a given source file. -- Does /not/ check whether the .hi file exists @@ -476,16 +477,15 @@ mkHiPath :: DynFlags -> FilePath -- the filename of the source file, minus the extension -> String -- the module name with dots replaced by slashes - -> IO FilePath -mkHiPath dflags basename mod_basename - = do let + -> FilePath +mkHiPath dflags basename mod_basename = hi_basename <.> hisuf + where hidir = hiDir dflags hisuf = hiSuf dflags hi_basename | Just dir <- hidir = dir </> mod_basename | otherwise = basename - return (hi_basename <.> hisuf) -- ----------------------------------------------------------------------------- diff --git a/compiler/main/GHC.hs b/compiler/main/GHC.hs index 7694bc9821..13d4f87009 100644 --- a/compiler/main/GHC.hs +++ b/compiler/main/GHC.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, NondecreasingIndentation, ScopedTypeVariables #-} + -- ----------------------------------------------------------------------------- -- -- (c) The University of Glasgow, 2005-2012 @@ -53,7 +55,6 @@ module GHC ( -- ** Compiling to Core CoreModule(..), compileToCoreModule, compileToCoreSimplified, - compileCoreToObj, -- * Inspecting the module structure of the program ModuleGraph, ModSummary(..), ms_mod_name, ModLocation(..), @@ -261,6 +262,7 @@ import InteractiveEval import TcRnDriver ( runTcInteractive ) #endif +import PprTyThing ( pprFamInst ) import HscMain import GhcMake import DriverPipeline ( compileOne' ) @@ -283,7 +285,7 @@ import DataCon import Name hiding ( varName ) import Avail import InstEnv -import FamInstEnv +import FamInstEnv ( FamInst ) import SrcLoc import CoreSyn import TidyPgm @@ -310,7 +312,7 @@ import FastString import qualified Parser import Lexer -import System.Directory ( doesFileExist, getCurrentDirectory ) +import System.Directory ( doesFileExist ) import Data.Maybe import Data.List ( find ) import Data.Time @@ -925,43 +927,6 @@ compileToCoreModule = compileCore False -- as to return simplified and tidied Core. compileToCoreSimplified :: GhcMonad m => FilePath -> m CoreModule compileToCoreSimplified = compileCore True --- | Takes a CoreModule and compiles the bindings therein --- to object code. The first argument is a bool flag indicating --- whether to run the simplifier. --- The resulting .o, .hi, and executable files, if any, are stored in the --- current directory, and named according to the module name. --- This has only so far been tested with a single self-contained module. -compileCoreToObj :: GhcMonad m - => Bool -> CoreModule -> FilePath -> FilePath -> m () -compileCoreToObj simplify cm@(CoreModule{ cm_module = mName }) - output_fn extCore_filename = do - dflags <- getSessionDynFlags - currentTime <- liftIO $ getCurrentTime - cwd <- liftIO $ getCurrentDirectory - modLocation <- liftIO $ mkHiOnlyModLocation dflags (hiSuf dflags) cwd - ((moduleNameSlashes . moduleName) mName) - - let modSum = ModSummary { ms_mod = mName, - ms_hsc_src = ExtCoreFile, - ms_location = modLocation, - -- By setting the object file timestamp to Nothing, - -- we always force recompilation, which is what we - -- want. (Thus it doesn't matter what the timestamp - -- for the (nonexistent) source file is.) - ms_hs_date = currentTime, - ms_obj_date = Nothing, - -- Only handling the single-module case for now, so no imports. - ms_srcimps = [], - ms_textual_imps = [], - -- No source file - ms_hspp_file = "", - ms_hspp_opts = dflags, - ms_hspp_buf = Nothing - } - - hsc_env <- getSession - liftIO $ hscCompileCore hsc_env simplify (cm_safe cm) modSum (cm_binds cm) output_fn extCore_filename - compileCore :: GhcMonad m => Bool -> FilePath -> m CoreModule compileCore simplify fn = do diff --git a/compiler/main/GhcMake.hs b/compiler/main/GhcMake.hs index b7a1282f5c..694778115d 100644 --- a/compiler/main/GhcMake.hs +++ b/compiler/main/GhcMake.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE BangPatterns, CPP, NondecreasingIndentation, ScopedTypeVariables #-} {-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} -- NB: we specifically ignore deprecations. GHC 7.6 marks the .QSem module as -- deprecated, although it became un-deprecated later. As a result, using 7.6 diff --git a/compiler/main/GhcMonad.hs b/compiler/main/GhcMonad.hs index 68b4e2b2a2..5fa6452d58 100644 --- a/compiler/main/GhcMonad.hs +++ b/compiler/main/GhcMonad.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE RankNTypes #-} {-# OPTIONS_GHC -funbox-strict-fields #-} -- ----------------------------------------------------------------------------- -- diff --git a/compiler/main/HeaderInfo.hs b/compiler/main/HeaderInfo.hs index a083f4fcd8..fcf235bd23 100644 --- a/compiler/main/HeaderInfo.hs +++ b/compiler/main/HeaderInfo.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- | Parsing the top of a Haskell source file to get its module name, @@ -185,8 +187,8 @@ lazyGetToks dflags filename handle = do -- large module names (#5981) nextbuf <- hGetStringBufferBlock handle new_size if (len nextbuf == 0) then lazyLexBuf handle state True new_size else do - newbuf <- appendStringBuffers (buffer state) nextbuf - unsafeInterleaveIO $ lazyLexBuf handle state{buffer=newbuf} False new_size + newbuf <- appendStringBuffers (buffer state) nextbuf + unsafeInterleaveIO $ lazyLexBuf handle state{buffer=newbuf} False new_size getToks :: DynFlags -> FilePath -> StringBuffer -> [Located Token] diff --git a/compiler/main/Hooks.lhs b/compiler/main/Hooks.lhs index 3bd9643dc6..63aaafa2a7 100644 --- a/compiler/main/Hooks.lhs +++ b/compiler/main/Hooks.lhs @@ -63,7 +63,7 @@ data Hooks = Hooks , tcForeignImportsHook :: Maybe ([LForeignDecl Name] -> TcM ([Id], [LForeignDecl Id], Bag GlobalRdrElt)) , tcForeignExportsHook :: Maybe ([LForeignDecl Name] -> TcM (LHsBinds TcId, [LForeignDecl TcId], Bag GlobalRdrElt)) , hscFrontendHook :: Maybe (ModSummary -> Hsc TcGblEnv) - , hscCompileOneShotHook :: Maybe (HscEnv -> FilePath -> ModSummary -> SourceModified -> IO HscStatus) + , hscCompileOneShotHook :: Maybe (HscEnv -> ModSummary -> SourceModified -> IO HscStatus) , hscCompileCoreExprHook :: Maybe (HscEnv -> SrcSpan -> CoreExpr -> IO HValue) , ghcPrimIfaceHook :: Maybe ModIface , runPhaseHook :: Maybe (PhasePlus -> FilePath -> DynFlags -> CompPipeline (PhasePlus, FilePath)) diff --git a/compiler/main/HscMain.hs b/compiler/main/HscMain.hs index 748f7480ec..aef6007fb7 100644 --- a/compiler/main/HscMain.hs +++ b/compiler/main/HscMain.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE BangPatterns, CPP, MagicHash, NondecreasingIndentation #-} + ------------------------------------------------------------------------------- -- -- | Main API for compiling plain Haskell source code. @@ -146,7 +148,6 @@ import ErrUtils import Outputable import HscStats ( ppSourceStats ) import HscTypes -import MkExternalCore ( emitExternalCore ) import FastString import UniqFM ( emptyUFM ) import UniqSupply @@ -516,8 +517,9 @@ genericHscCompileGetFrontendResult :: -> (Int,Int) -- (i,n) = module i of n (for msgs) -> IO (Either ModIface (TcGblEnv, Maybe Fingerprint)) -genericHscCompileGetFrontendResult always_do_basic_recompilation_check m_tc_result - mHscMessage hsc_env mod_summary source_modified mb_old_iface mod_index +genericHscCompileGetFrontendResult + always_do_basic_recompilation_check m_tc_result + mHscMessage hsc_env mod_summary source_modified mb_old_iface mod_index = do let msg what = case mHscMessage of @@ -553,16 +555,19 @@ genericHscCompileGetFrontendResult always_do_basic_recompilation_check m_tc_resu case mb_checked_iface of Just iface | not (recompileRequired recomp_reqd) -> - -- If the module used TH splices when it was last compiled, - -- then the recompilation check is not accurate enough (#481) - -- and we must ignore it. However, if the module is stable - -- (none of the modules it depends on, directly or indirectly, - -- changed), then we *can* skip recompilation. This is why - -- the SourceModified type contains SourceUnmodifiedAndStable, - -- and it's pretty important: otherwise ghc --make would - -- always recompile TH modules, even if nothing at all has - -- changed. Stability is just the same check that make is - -- doing for us in one-shot mode. + -- If the module used TH splices when it was last + -- compiled, then the recompilation check is not + -- accurate enough (#481) and we must ignore + -- it. However, if the module is stable (none of + -- the modules it depends on, directly or + -- indirectly, changed), then we *can* skip + -- recompilation. This is why the SourceModified + -- type contains SourceUnmodifiedAndStable, and + -- it's pretty important: otherwise ghc --make + -- would always recompile TH modules, even if + -- nothing at all has changed. Stability is just + -- the same check that make is doing for us in + -- one-shot mode. case m_tc_result of Nothing | mi_used_th iface && not stable -> @@ -580,31 +585,25 @@ genericHscFrontend mod_summary = getHooked hscFrontendHook genericHscFrontend' >>= ($ mod_summary) genericHscFrontend' :: ModSummary -> Hsc TcGblEnv -genericHscFrontend' mod_summary - | ExtCoreFile <- ms_hsc_src mod_summary = - panic "GHC does not currently support reading External Core files" - | otherwise = - hscFileFrontEnd mod_summary +genericHscFrontend' mod_summary = hscFileFrontEnd mod_summary -------------------------------------------------------------- -- Compilers -------------------------------------------------------------- hscCompileOneShot :: HscEnv - -> FilePath -> ModSummary -> SourceModified -> IO HscStatus hscCompileOneShot env = lookupHook hscCompileOneShotHook hscCompileOneShot' (hsc_dflags env) env --- Compile Haskell, boot and extCore in OneShot mode. +-- Compile Haskell/boot in OneShot mode. hscCompileOneShot' :: HscEnv - -> FilePath -> ModSummary -> SourceModified -> IO HscStatus -hscCompileOneShot' hsc_env extCore_filename mod_summary src_changed +hscCompileOneShot' hsc_env mod_summary src_changed = do -- One-shot mode needs a knot-tying mutable variable for interface -- files. See TcRnTypes.TcGblEnv.tcg_type_env_var. @@ -624,7 +623,11 @@ hscCompileOneShot' hsc_env extCore_filename mod_summary src_changed guts0 <- hscDesugar' (ms_location mod_summary) tc_result dflags <- getDynFlags case hscTarget dflags of - HscNothing -> return HscNotGeneratingCode + HscNothing -> do + when (gopt Opt_WriteInterface dflags) $ liftIO $ do + (iface, changed, _details) <- hscSimpleIface hsc_env tc_result mb_old_hash + hscWriteIface dflags iface changed mod_summary + return HscNotGeneratingCode _ -> case ms_hsc_src mod_summary of HsBootFile -> @@ -633,7 +636,7 @@ hscCompileOneShot' hsc_env extCore_filename mod_summary src_changed return HscUpdateBoot _ -> do guts <- hscSimplify' guts0 - (iface, changed, _details, cgguts) <- hscNormalIface' extCore_filename guts mb_old_hash + (iface, changed, _details, cgguts) <- hscNormalIface' guts mb_old_hash liftIO $ hscWriteIface dflags iface changed mod_summary return $ HscRecomp cgguts mod_summary @@ -1070,18 +1073,16 @@ hscSimpleIface' tc_result mb_old_iface = do return (new_iface, no_change, details) hscNormalIface :: HscEnv - -> FilePath -> ModGuts -> Maybe Fingerprint -> IO (ModIface, Bool, ModDetails, CgGuts) -hscNormalIface hsc_env extCore_filename simpl_result mb_old_iface = - runHsc hsc_env $ hscNormalIface' extCore_filename simpl_result mb_old_iface +hscNormalIface hsc_env simpl_result mb_old_iface = + runHsc hsc_env $ hscNormalIface' simpl_result mb_old_iface -hscNormalIface' :: FilePath - -> ModGuts +hscNormalIface' :: ModGuts -> Maybe Fingerprint -> Hsc (ModIface, Bool, ModDetails, CgGuts) -hscNormalIface' extCore_filename simpl_result mb_old_iface = do +hscNormalIface' simpl_result mb_old_iface = do hsc_env <- getHscEnv (cg_guts, details) <- {-# SCC "CoreTidy" #-} liftIO $ tidyProgram hsc_env simpl_result @@ -1096,11 +1097,6 @@ hscNormalIface' extCore_filename simpl_result mb_old_iface = do ioMsgMaybe $ mkIface hsc_env mb_old_iface details simpl_result - -- Emit external core - -- This should definitely be here and not after CorePrep, - -- because CorePrep produces unqualified constructor wrapper declarations, - -- so its output isn't valid External Core (without some preprocessing). - liftIO $ emitExternalCore (hsc_dflags hsc_env) extCore_filename cg_guts liftIO $ dumpIfaceStats hsc_env -- Return the prepared code. @@ -1158,8 +1154,15 @@ hscGenHardCode hsc_env cgguts mod_summary output_filename = do ------------------ Code generation ------------------ - cmms <- {-# SCC "NewCodeGen" #-} - tryNewCodeGen hsc_env this_mod data_tycons + -- The back-end is streamed: each top-level function goes + -- from Stg all the way to asm before dealing with the next + -- top-level function, so showPass isn't very useful here. + -- Hence we have one showPass for the whole backend, the + -- next showPass after this will be "Assembler". + showPass dflags "CodeGen" + + cmms <- {-# SCC "StgCmm" #-} + doCodeGen hsc_env this_mod data_tycons cost_centre_info stg_binds hpc_info @@ -1236,15 +1239,15 @@ hscCompileCmmFile hsc_env filename output_filename = runHsc hsc_env $ do -------------------- Stuff for new code gen --------------------- -tryNewCodeGen :: HscEnv -> Module -> [TyCon] - -> CollectedCCs - -> [StgBinding] - -> HpcInfo - -> IO (Stream IO CmmGroup ()) +doCodeGen :: HscEnv -> Module -> [TyCon] + -> CollectedCCs + -> [StgBinding] + -> HpcInfo + -> IO (Stream IO CmmGroup ()) -- Note we produce a 'Stream' of CmmGroups, so that the -- backend can be run incrementally. Otherwise it generates all -- the C-- up front, which has a significant space cost. -tryNewCodeGen hsc_env this_mod data_tycons +doCodeGen hsc_env this_mod data_tycons cost_centre_info stg_binds hpc_info = do let dflags = hsc_dflags hsc_env @@ -1533,11 +1536,11 @@ hscParseThingWithLocation source linenumber parser str return thing hscCompileCore :: HscEnv -> Bool -> SafeHaskellMode -> ModSummary - -> CoreProgram -> FilePath -> FilePath -> IO () -hscCompileCore hsc_env simplify safe_mode mod_summary binds output_filename extCore_filename + -> CoreProgram -> FilePath -> IO () +hscCompileCore hsc_env simplify safe_mode mod_summary binds output_filename = runHsc hsc_env $ do guts <- maybe_simplify (mkModGuts (ms_mod mod_summary) safe_mode binds) - (iface, changed, _details, cgguts) <- hscNormalIface' extCore_filename guts Nothing + (iface, changed, _details, cgguts) <- hscNormalIface' guts Nothing liftIO $ hscWriteIface (hsc_dflags hsc_env) iface changed mod_summary _ <- liftIO $ hscGenHardCode hsc_env cgguts mod_summary output_filename return () diff --git a/compiler/main/HscTypes.lhs b/compiler/main/HscTypes.lhs index 6fcf8e24a7..9738f590b6 100644 --- a/compiler/main/HscTypes.lhs +++ b/compiler/main/HscTypes.lhs @@ -4,6 +4,7 @@ \section[HscTypes]{Types for the per-module compiler} \begin{code} +{-# LANGUAGE CPP, DeriveDataTypeable, ScopedTypeVariables #-} -- | Types for the per-module compiler module HscTypes ( @@ -71,7 +72,7 @@ module HscTypes ( TypeEnv, lookupType, lookupTypeHscEnv, mkTypeEnv, emptyTypeEnv, typeEnvFromEntities, mkTypeEnvWithImplicits, extendTypeEnv, extendTypeEnvList, - extendTypeEnvWithIds, extendTypeEnvWithPatSyns, + extendTypeEnvWithIds, lookupTypeEnv, typeEnvElts, typeEnvTyCons, typeEnvIds, typeEnvPatSyns, typeEnvDataCons, typeEnvCoAxioms, typeEnvClasses, @@ -951,7 +952,8 @@ data ModDetails -- The next two fields are created by the typechecker md_exports :: [AvailInfo], md_types :: !TypeEnv, -- ^ Local type environment for this particular module - md_insts :: ![ClsInst], -- ^ 'DFunId's for the instances in this module + -- Includes Ids, TyCons, PatSyns + md_insts :: ![ClsInst], -- ^ 'DFunId's for the instances in this module md_fam_insts :: ![FamInst], md_rules :: ![CoreRule], -- ^ Domain may include 'Id's from other modules md_anns :: ![Annotation], -- ^ Annotations present in this module: currently @@ -1483,7 +1485,7 @@ Examples: IfaceClass decl happens to use IfaceDecl recursively for the associated types, but that's irrelevant here.) - * Dictionary function Ids are not implict. + * Dictionary function Ids are not implicit. * Axioms for newtypes are implicit (same as above), but axioms for data/type family instances are *not* implicit (like DFunIds). @@ -1504,15 +1506,17 @@ implicitTyThings :: TyThing -> [TyThing] implicitTyThings (AnId _) = [] implicitTyThings (ACoAxiom _cc) = [] implicitTyThings (ATyCon tc) = implicitTyConThings tc -implicitTyThings (AConLike cl) = case cl of - RealDataCon dc -> - -- For data cons add the worker and (possibly) wrapper - map AnId (dataConImplicitIds dc) - PatSynCon ps -> - -- For bidirectional pattern synonyms, add the wrapper - case patSynWrapper ps of - Nothing -> [] - Just id -> [AnId id] +implicitTyThings (AConLike cl) = implicitConLikeThings cl + +implicitConLikeThings :: ConLike -> [TyThing] +implicitConLikeThings (RealDataCon dc) + = map AnId (dataConImplicitIds dc) + -- For data cons add the worker and (possibly) wrapper + +implicitConLikeThings (PatSynCon {}) + = [] -- Pattern synonyms have no implicit Ids; the wrapper and matcher + -- are not "implicit"; they are simply new top-level bindings, + -- and they have their own declaration in an interface fiel implicitClassThings :: Class -> [TyThing] implicitClassThings cl @@ -1561,8 +1565,8 @@ implicitCoTyCon tc -- other declaration. isImplicitTyThing :: TyThing -> Bool isImplicitTyThing (AConLike cl) = case cl of - RealDataCon{} -> True - PatSynCon ps -> isImplicitId (patSynId ps) + RealDataCon {} -> True + PatSynCon {} -> False isImplicitTyThing (AnId id) = isImplicitId id isImplicitTyThing (ATyCon tc) = isImplicitTyCon tc isImplicitTyThing (ACoAxiom ax) = isImplicitCoAxiom ax @@ -1678,17 +1682,6 @@ extendTypeEnvList env things = foldl extendTypeEnv env things extendTypeEnvWithIds :: TypeEnv -> [Id] -> TypeEnv extendTypeEnvWithIds env ids = extendNameEnvList env [(getName id, AnId id) | id <- ids] - -extendTypeEnvWithPatSyns :: TypeEnv -> [PatSyn] -> TypeEnv -extendTypeEnvWithPatSyns env patsyns - = extendNameEnvList env $ concatMap pat_syn_things patsyns - where - pat_syn_things :: PatSyn -> [(Name, TyThing)] - pat_syn_things ps = (getName ps, AConLike (PatSynCon ps)): - case patSynWrapper ps of - Just wrap_id -> [(getName wrap_id, AnId wrap_id)] - Nothing -> [] - \end{code} \begin{code} @@ -2207,37 +2200,50 @@ type ModuleGraph = [ModSummary] emptyMG :: ModuleGraph emptyMG = [] --- | A single node in a 'ModuleGraph. The nodes of the module graph are one of: +-- | A single node in a 'ModuleGraph'. The nodes of the module graph +-- are one of: -- -- * A regular Haskell source module --- -- * A hi-boot source module --- -- * An external-core source module +-- data ModSummary = ModSummary { - ms_mod :: Module, -- ^ Identity of the module - ms_hsc_src :: HscSource, -- ^ The module source either plain Haskell, hs-boot or external core - ms_location :: ModLocation, -- ^ Location of the various files belonging to the module - ms_hs_date :: UTCTime, -- ^ Timestamp of source file - ms_obj_date :: Maybe UTCTime, -- ^ Timestamp of object, if we have one - ms_srcimps :: [Located (ImportDecl RdrName)], -- ^ Source imports of the module - ms_textual_imps :: [Located (ImportDecl RdrName)], -- ^ Non-source imports of the module from the module *text* - ms_hspp_file :: FilePath, -- ^ Filename of preprocessed source file - ms_hspp_opts :: DynFlags, -- ^ Cached flags from @OPTIONS@, @INCLUDE@ - -- and @LANGUAGE@ pragmas in the modules source code - ms_hspp_buf :: Maybe StringBuffer -- ^ The actual preprocessed source, if we have it + ms_mod :: Module, + -- ^ Identity of the module + ms_hsc_src :: HscSource, + -- ^ The module source either plain Haskell, hs-boot or external core + ms_location :: ModLocation, + -- ^ Location of the various files belonging to the module + ms_hs_date :: UTCTime, + -- ^ Timestamp of source file + ms_obj_date :: Maybe UTCTime, + -- ^ Timestamp of object, if we have one + ms_srcimps :: [Located (ImportDecl RdrName)], + -- ^ Source imports of the module + ms_textual_imps :: [Located (ImportDecl RdrName)], + -- ^ Non-source imports of the module from the module *text* + ms_hspp_file :: FilePath, + -- ^ Filename of preprocessed source file + ms_hspp_opts :: DynFlags, + -- ^ Cached flags from @OPTIONS@, @INCLUDE@ and @LANGUAGE@ + -- pragmas in the modules source code + ms_hspp_buf :: Maybe StringBuffer + -- ^ The actual preprocessed source, if we have it } ms_mod_name :: ModSummary -> ModuleName ms_mod_name = moduleName . ms_mod ms_imps :: ModSummary -> [Located (ImportDecl RdrName)] -ms_imps ms = ms_textual_imps ms ++ map mk_additional_import (dynFlagDependencies (ms_hspp_opts ms)) +ms_imps ms = + ms_textual_imps ms ++ + map mk_additional_import (dynFlagDependencies (ms_hspp_opts ms)) where - -- This is a not-entirely-satisfactory means of creating an import that corresponds to an - -- import that did not occur in the program text, such as those induced by the use of - -- plugins (the -plgFoo flag) + -- This is a not-entirely-satisfactory means of creating an import + -- that corresponds to an import that did not occur in the program + -- text, such as those induced by the use of plugins (the -plgFoo + -- flag) mk_additional_import mod_nm = noLoc $ ImportDecl { ideclName = noLoc mod_nm, ideclPkgQual = Nothing, diff --git a/compiler/main/InteractiveEval.hs b/compiler/main/InteractiveEval.hs index ede519982a..cfcc076235 100644 --- a/compiler/main/InteractiveEval.hs +++ b/compiler/main/InteractiveEval.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, MagicHash, NondecreasingIndentation, UnboxedTuples #-} + -- ----------------------------------------------------------------------------- -- -- (c) The University of Glasgow, 2005-2007 diff --git a/compiler/main/InteractiveEvalTypes.hs b/compiler/main/InteractiveEvalTypes.hs index e3324a39a1..6ea1a25648 100644 --- a/compiler/main/InteractiveEvalTypes.hs +++ b/compiler/main/InteractiveEvalTypes.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- ----------------------------------------------------------------------------- -- -- (c) The University of Glasgow, 2005-2007 diff --git a/compiler/main/PackageConfig.hs b/compiler/main/PackageConfig.hs index d34d9e1f5c..514a2e004f 100644 --- a/compiler/main/PackageConfig.hs +++ b/compiler/main/PackageConfig.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- | -- Package configuration information: essentially the interface to Cabal, with -- some utilities @@ -45,16 +47,11 @@ defaultPackageConfig = emptyInstalledPackageInfo -- $package_naming -- #package_naming# --- Mostly the compiler deals in terms of 'PackageName's, which don't --- have the version suffix. This is so that we don't need to know the --- version for the @-package-name@ flag, or know the versions of --- wired-in packages like @base@ & @rts@. Versions are confined to the --- package sub-system. --- --- This means that in theory you could have multiple base packages installed --- (for example), and switch between them using @-package@\/@-hide-package@. --- --- A 'PackageId' is a string of the form @<pkg>-<version>@. +-- Mostly the compiler deals in terms of 'PackageId's, which have the +-- form @<pkg>-<version>@. You're expected to pass in the version for +-- the @-package-name@ flag. However, for wired-in packages like @base@ +-- & @rts@, we don't necessarily know what the version is, so these are +-- handled specially; see #wired_in_packages#. -- | Turn a Cabal 'PackageIdentifier' into a GHC 'PackageId' mkPackageId :: PackageIdentifier -> PackageId diff --git a/compiler/main/Packages.lhs b/compiler/main/Packages.lhs index a13b3599b8..bb2e048cc3 100644 --- a/compiler/main/Packages.lhs +++ b/compiler/main/Packages.lhs @@ -2,13 +2,15 @@ % (c) The University of Glasgow, 2006 % \begin{code} +{-# LANGUAGE CPP #-} + -- | Package manipulation module Packages ( module PackageConfig, -- * The PackageConfigMap PackageConfigMap, emptyPackageConfigMap, lookupPackage, - extendPackageConfigMap, dumpPackages, + extendPackageConfigMap, dumpPackages, simpleDumpPackages, -- * Reading the package config, and processing cmdline args PackageState(..), @@ -1078,12 +1080,26 @@ isDllName dflags _this_pkg this_mod name -- ----------------------------------------------------------------------------- -- Displaying packages --- | Show package info on console, if verbosity is >= 3 +-- | Show (very verbose) package info on console, if verbosity is >= 5 dumpPackages :: DynFlags -> IO () -dumpPackages dflags +dumpPackages = dumpPackages' showInstalledPackageInfo + +dumpPackages' :: (InstalledPackageInfo -> String) -> DynFlags -> IO () +dumpPackages' showIPI dflags = do let pkg_map = pkgIdMap (pkgState dflags) putMsg dflags $ - vcat (map (text . showInstalledPackageInfo + vcat (map (text . showIPI . packageConfigToInstalledPackageInfo) (eltsUFM pkg_map)) + +-- | Show simplified package info on console, if verbosity == 4. +-- The idea is to only print package id, and any information that might +-- be different from the package databases (exposure, trust) +simpleDumpPackages :: DynFlags -> IO () +simpleDumpPackages = dumpPackages' showIPI + where showIPI ipi = let InstalledPackageId i = installedPackageId ipi + e = if exposed ipi then "E" else " " + t = if trusted ipi then "T" else " " + in e ++ t ++ " " ++ i + \end{code} diff --git a/compiler/main/PlatformConstants.hs b/compiler/main/PlatformConstants.hs index 03e146ca7c..b2ca32be68 100644 --- a/compiler/main/PlatformConstants.hs +++ b/compiler/main/PlatformConstants.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ------------------------------------------------------------------------------- -- -- | Platform constants diff --git a/compiler/main/PprTyThing.hs b/compiler/main/PprTyThing.hs index 1fd5d0cbcf..d993ab87c8 100644 --- a/compiler/main/PprTyThing.hs +++ b/compiler/main/PprTyThing.hs @@ -6,7 +6,8 @@ -- ----------------------------------------------------------------------------- -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -19,51 +20,83 @@ module PprTyThing ( pprTyThingLoc, pprTyThingInContextLoc, pprTyThingHdr, - pprTypeForUser + pprTypeForUser, + pprFamInst ) where +#include "HsVersions.h" + import TypeRep ( TyThing(..) ) -import DataCon -import Id -import TyCon -import Class -import Coercion( pprCoAxBranch ) -import CoAxiom( CoAxiom(..), brListMap ) +import CoAxiom ( coAxiomTyCon ) import HscTypes( tyThingParent_maybe ) -import Type( tidyTopType, tidyOpenType, splitForAllTys, funResultTy ) -import Kind( synTyConResKind ) -import TypeRep( pprTvBndrs, pprForAll, suppressKinds ) -import TysPrim( alphaTyVars ) import MkIface ( tyThingToIfaceDecl ) +import Type ( tidyOpenType ) +import IfaceSyn ( pprIfaceDecl, ShowSub(..), ShowHowMuch(..) ) +import FamInstEnv( FamInst( .. ), FamFlavor(..) ) import TcType import Name import VarEnv( emptyTidyEnv ) -import StaticFlags( opt_PprStyle_Debug ) -import DynFlags import Outputable import FastString -- ----------------------------------------------------------------------------- -- Pretty-printing entities that we get from the GHC API --- This should be a good source of sample code for using the GHC API to --- inspect source code entities. - -type ShowSub = [Name] --- [] <=> print all sub-components of the current thing --- (n:ns) <=> print sub-component 'n' with ShowSub=ns --- elide other sub-components to "..." -showAll :: ShowSub -showAll = [] +{- Note [Pretty-printing TyThings] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +We pretty-print a TyThing by converting it to an IfaceDecl, +and pretty-printing that (see ppr_ty_thing below). +Here is why: + +* When pretty-printing (a type, say), the idiomatic solution is not to + "rename type variables on the fly", but rather to "tidy" the type + (which gives each variable a distinct print-name), and then + pretty-print it (without renaming). Separate the two + concerns. Functions like tidyType do this. + +* Alas, for type constructors, TyCon, tidying does not work well, + because a TyCon includes DataCons which include Types, which mention + TyCons. And tidying can't tidy a mutually recursive data structure + graph, only trees. + +* One alternative would be to ensure that TyCons get type variables + with distinct print-names. That's ok for type variables but less + easy for kind variables. Processing data type declarations is + already so complicated that I don't think it's sensible to add the + extra requirement that it generates only "pretty" types and kinds. + +* One place the non-pretty names can show up is in GHCi. But another + is in interface files. Look at MkIface.tyThingToIfaceDecl which + converts a TyThing (i.e. TyCon, Class etc) to an IfaceDecl. And it + already does tidying as part of that conversion! Why? Because + interface files contains fast-strings, not uniques, so the names + must at least be distinct. + +So if we convert to IfaceDecl, we get a nice tidy IfaceDecl, and can +print that. Of course, that means that pretty-printing IfaceDecls +must be careful to display nice user-friendly results, but that's ok. + +See #7730, #8776 for details -} + +-------------------- +-- | Pretty-prints a 'FamInst' (type/data family instance) with its defining location. +pprFamInst :: FamInst -> SDoc +-- * For data instances we go via pprTyThing of the represntational TyCon, +-- because there is already much cleverness associated with printing +-- data type declarations that I don't want to duplicate +-- * For type instances we print directly here; there is no TyCon +-- to give to pprTyThing +-- +-- FamInstEnv.pprFamInst does a more quick-and-dirty job for internal purposes -showSub :: NamedThing n => ShowSub -> n -> Bool -showSub [] _ = True -showSub (n:_) thing = n == getName thing +pprFamInst (FamInst { fi_flavor = DataFamilyInst rep_tc }) + = pprTyThingInContextLoc (ATyCon rep_tc) -showSub_maybe :: NamedThing n => ShowSub -> n -> Maybe ShowSub -showSub_maybe [] _ = Just [] -showSub_maybe (n:ns) thing = if n == getName thing then Just ns - else Nothing +pprFamInst (FamInst { fi_flavor = SynFamilyInst, fi_axiom = axiom + , fi_tys = lhs_tys, fi_rhs = rhs }) + = showWithLoc (pprDefinedAt (getName axiom)) $ + hang (ptext (sLit "type instance") <+> pprTypeApp (coAxiomTyCon axiom) lhs_tys) + 2 (equals <+> ppr rhs) ---------------------------- -- | Pretty-prints a 'TyThing' with its defining location. @@ -73,7 +106,13 @@ pprTyThingLoc tyThing -- | Pretty-prints a 'TyThing'. pprTyThing :: TyThing -> SDoc -pprTyThing thing = ppr_ty_thing (Just showAll) thing +pprTyThing = ppr_ty_thing False [] + +-- | Pretty-prints the 'TyThing' header. For functions and data constructors +-- the function is equivalent to 'pprTyThing' but for type constructors +-- and classes it prints only the header part of the declaration. +pprTyThingHdr :: TyThing -> SDoc +pprTyThingHdr = ppr_ty_thing True [] -- | Pretty-prints a 'TyThing' in context: that is, if the entity -- is a data constructor, record selector, or class method, then @@ -84,8 +123,8 @@ pprTyThingInContext thing = go [] thing where go ss thing = case tyThingParent_maybe thing of - Just parent -> go (getName thing : ss) parent - Nothing -> ppr_ty_thing (Just ss) thing + Just parent -> go (getOccName thing : ss) parent + Nothing -> ppr_ty_thing False ss thing -- | Like 'pprTyThingInContext', but adds the defining location. pprTyThingInContextLoc :: TyThing -> SDoc @@ -93,256 +132,49 @@ pprTyThingInContextLoc tyThing = showWithLoc (pprDefinedAt (getName tyThing)) (pprTyThingInContext tyThing) --- | Pretty-prints the 'TyThing' header. For functions and data constructors --- the function is equivalent to 'pprTyThing' but for type constructors --- and classes it prints only the header part of the declaration. -pprTyThingHdr :: TyThing -> SDoc -pprTyThingHdr = ppr_ty_thing Nothing - ------------------------ --- NOTE: We pretty-print 'TyThing' via 'IfaceDecl' so that we can reuse the --- 'TyCon' tidying happening in 'tyThingToIfaceDecl'. See #8776 for details. -ppr_ty_thing :: Maybe ShowSub -> TyThing -> SDoc -ppr_ty_thing mss tyThing = case tyThing of - AnId id -> pprId id - ATyCon tyCon -> case mss of - Nothing -> pprTyConHdr tyCon - Just ss -> pprTyCon ss tyCon - _ -> ppr $ tyThingToIfaceDecl tyThing - -pprTyConHdr :: TyCon -> SDoc -pprTyConHdr tyCon - | Just (fam_tc, tys) <- tyConFamInst_maybe tyCon - = ptext keyword <+> ptext (sLit "instance") <+> pprTypeApp fam_tc tys - | Just cls <- tyConClass_maybe tyCon - = pprClassHdr cls - | otherwise - = sdocWithDynFlags $ \dflags -> - ptext keyword <+> opt_family <+> opt_stupid <+> ppr_bndr tyCon - <+> pprTvBndrs (suppressKinds dflags (tyConKind tyCon) vars) - where - vars | isPrimTyCon tyCon || - isFunTyCon tyCon = take (tyConArity tyCon) alphaTyVars - | otherwise = tyConTyVars tyCon - - keyword | isSynTyCon tyCon = sLit "type" - | isNewTyCon tyCon = sLit "newtype" - | otherwise = sLit "data" - - opt_family - | isFamilyTyCon tyCon = ptext (sLit "family") - | otherwise = empty - - opt_stupid -- The "stupid theta" part of the declaration - | isAlgTyCon tyCon = pprThetaArrowTy (tyConStupidTheta tyCon) - | otherwise = empty -- Returns 'empty' if null theta - -pprClassHdr :: Class -> SDoc -pprClassHdr cls - = sdocWithDynFlags $ \dflags -> - ptext (sLit "class") <+> - sep [ pprThetaArrowTy (classSCTheta cls) - , ppr_bndr cls - <+> pprTvBndrs (suppressKinds dflags (tyConKind (classTyCon cls)) tvs) - , pprFundeps funDeps ] +ppr_ty_thing :: Bool -> [OccName] -> TyThing -> SDoc +-- We pretty-print 'TyThing' via 'IfaceDecl' +-- See Note [Pretty-pringint TyThings] +ppr_ty_thing hdr_only path ty_thing + = pprIfaceDecl ss (tyThingToIfaceDecl ty_thing) where - (tvs, funDeps) = classTvsFds cls - -pprId :: Var -> SDoc -pprId ident - = hang (ppr_bndr ident <+> dcolon) - 2 (pprTypeForUser (idType ident)) + ss = ShowSub { ss_how_much = how_much, ss_ppr_bndr = ppr_bndr } + how_much | hdr_only = ShowHeader + | otherwise = ShowSome path + name = getName ty_thing + ppr_bndr :: OccName -> SDoc + ppr_bndr | isBuiltInSyntax name + = ppr + | otherwise + = case nameModule_maybe name of + Just mod -> \ occ -> getPprStyle $ \sty -> + pprModulePrefix sty mod occ <> ppr occ + Nothing -> WARN( True, ppr name ) ppr + -- Nothing is unexpected here; TyThings have External names pprTypeForUser :: Type -> SDoc -- We do two things here. -- a) We tidy the type, regardless --- b) If Opt_PrintExplicitForAlls is True, we discard the foralls --- but we do so `deeply' +-- b) Swizzle the foralls to the top, so that without +-- -fprint-explicit-foralls we'll suppress all the foralls -- Prime example: a class op might have type -- forall a. C a => forall b. Ord b => stuff -- Then we want to display -- (C a, Ord b) => stuff pprTypeForUser ty - = sdocWithDynFlags $ \ dflags -> - if gopt Opt_PrintExplicitForalls dflags - then ppr tidy_ty - else ppr (mkPhiTy ctxt ty') + = pprSigmaType (mkSigmaTy tvs ctxt tau) where - (_, ctxt, ty') = tcSplitSigmaTy tidy_ty - (_, tidy_ty) = tidyOpenType emptyTidyEnv ty + (tvs, ctxt, tau) = tcSplitSigmaTy tidy_ty + (_, tidy_ty) = tidyOpenType emptyTidyEnv ty -- Often the types/kinds we print in ghci are fully generalised -- and have no free variables, but it turns out that we sometimes -- print un-generalised kinds (eg when doing :k T), so it's -- better to use tidyOpenType here -pprTyCon :: ShowSub -> TyCon -> SDoc -pprTyCon ss tyCon - | Just syn_rhs <- synTyConRhs_maybe tyCon - = case syn_rhs of - OpenSynFamilyTyCon -> pp_tc_with_kind - BuiltInSynFamTyCon {} -> pp_tc_with_kind - - ClosedSynFamilyTyCon (CoAxiom { co_ax_branches = branches }) - -> hang closed_family_header - 2 (vcat (brListMap (pprCoAxBranch tyCon) branches)) - - AbstractClosedSynFamilyTyCon - -> closed_family_header <+> ptext (sLit "..") - - SynonymTyCon rhs_ty - -> hang (pprTyConHdr tyCon <+> equals) - 2 (ppr rhs_ty) -- Don't suppress foralls on RHS type! - - -- e.g. type T = forall a. a->a - | Just cls <- tyConClass_maybe tyCon - = (pp_roles (== Nominal)) $$ pprClass ss cls - - | otherwise - = (pp_roles (== Representational)) $$ pprAlgTyCon ss tyCon - - where - -- if, for each role, suppress_if role is True, then suppress the role - -- output - pp_roles :: (Role -> Bool) -> SDoc - pp_roles suppress_if - = sdocWithDynFlags $ \dflags -> - let roles = suppressKinds dflags (tyConKind tyCon) (tyConRoles tyCon) - in ppUnless (isFamInstTyCon tyCon || all suppress_if roles) $ - -- Don't display roles for data family instances (yet) - -- See discussion on Trac #8672. - ptext (sLit "type role") <+> ppr tyCon <+> hsep (map ppr roles) - - pp_tc_with_kind = vcat [ pp_roles (const True) - , pprTyConHdr tyCon <+> dcolon - <+> pprTypeForUser (synTyConResKind tyCon) ] - closed_family_header - = pp_tc_with_kind <+> ptext (sLit "where") - -pprAlgTyCon :: ShowSub -> TyCon -> SDoc -pprAlgTyCon ss tyCon - | gadt = pprTyConHdr tyCon <+> ptext (sLit "where") $$ - nest 2 (vcat (ppr_trim (map show_con datacons))) - | otherwise = hang (pprTyConHdr tyCon) - 2 (add_bars (ppr_trim (map show_con datacons))) - where - datacons = tyConDataCons tyCon - gadt = any (not . isVanillaDataCon) datacons - - ok_con dc = showSub ss dc || any (showSub ss) (dataConFieldLabels dc) - show_con dc - | ok_con dc = Just (pprDataConDecl ss gadt dc) - | otherwise = Nothing - -pprDataConDecl :: ShowSub -> Bool -> DataCon -> SDoc -pprDataConDecl ss gadt_style dataCon - | not gadt_style = ppr_fields tys_w_strs - | otherwise = ppr_bndr dataCon <+> dcolon <+> - sep [ pp_foralls, pprThetaArrowTy theta, pp_tau ] - -- Printing out the dataCon as a type signature, in GADT style - where - (forall_tvs, theta, tau) = tcSplitSigmaTy (dataConUserType dataCon) - (arg_tys, res_ty) = tcSplitFunTys tau - labels = dataConFieldLabels dataCon - stricts = dataConStrictMarks dataCon - tys_w_strs = zip (map user_ify stricts) arg_tys - pp_foralls = sdocWithDynFlags $ \dflags -> - ppWhen (gopt Opt_PrintExplicitForalls dflags) - (pprForAll forall_tvs) - - pp_tau = foldr add (ppr res_ty) tys_w_strs - add str_ty pp_ty = pprParendBangTy str_ty <+> arrow <+> pp_ty - - pprParendBangTy (bang,ty) = ppr bang <> pprParendType ty - pprBangTy (bang,ty) = ppr bang <> ppr ty - - -- See Note [Printing bangs on data constructors] - user_ify :: HsBang -> HsBang - user_ify bang | opt_PprStyle_Debug = bang - user_ify HsStrict = HsUserBang Nothing True - user_ify (HsUnpack {}) = HsUserBang (Just True) True - user_ify bang = bang - - maybe_show_label (lbl,bty) - | showSub ss lbl = Just (ppr_bndr lbl <+> dcolon <+> pprBangTy bty) - | otherwise = Nothing - - ppr_fields [ty1, ty2] - | dataConIsInfix dataCon && null labels - = sep [pprParendBangTy ty1, pprInfixName dataCon, pprParendBangTy ty2] - ppr_fields fields - | null labels - = ppr_bndr dataCon <+> sep (map pprParendBangTy fields) - | otherwise - = ppr_bndr dataCon - <+> (braces $ sep $ punctuate comma $ ppr_trim $ - map maybe_show_label (zip labels fields)) - -pprClass :: ShowSub -> Class -> SDoc -pprClass ss cls - | null methods && null assoc_ts - = pprClassHdr cls - | otherwise - = vcat [ pprClassHdr cls <+> ptext (sLit "where") - , nest 2 (vcat $ ppr_trim $ - map show_at assoc_ts ++ map show_meth methods)] - where - methods = classMethods cls - assoc_ts = classATs cls - show_meth id | showSub ss id = Just (pprClassMethod id) - | otherwise = Nothing - show_at tc = case showSub_maybe ss tc of - Just ss' -> Just (pprTyCon ss' tc) - Nothing -> Nothing - -pprClassMethod :: Id -> SDoc -pprClassMethod id - = hang (ppr_bndr id <+> dcolon) 2 (pprTypeForUser op_ty) - where - -- Here's the magic incantation to strip off the dictionary - -- from the class op type. Stolen from IfaceSyn.tyThingToIfaceDecl. - -- - -- It's important to tidy it *before* splitting it up, so that if - -- we have class C a b where - -- op :: forall a. a -> b - -- then the inner forall on op gets renamed to a1, and we print - -- (when dropping foralls) - -- class C a b where - -- op :: a1 -> b - - tidy_sel_ty = tidyTopType (idType id) - (_sel_tyvars, rho_ty) = splitForAllTys tidy_sel_ty - op_ty = funResultTy rho_ty - -ppr_trim :: [Maybe SDoc] -> [SDoc] --- Collapse a group of Nothings to a single "..." -ppr_trim xs - = snd (foldr go (False, []) xs) - where - go (Just doc) (_, so_far) = (False, doc : so_far) - go Nothing (True, so_far) = (True, so_far) - go Nothing (False, so_far) = (True, ptext (sLit "...") : so_far) - -add_bars :: [SDoc] -> SDoc -add_bars [] = empty -add_bars [c] = equals <+> c -add_bars (c:cs) = sep ((equals <+> c) : map (char '|' <+>) cs) - --- Wrap operators in () -ppr_bndr :: NamedThing a => a -> SDoc -ppr_bndr a = parenSymOcc (getOccName a) (ppr (getName a)) - showWithLoc :: SDoc -> SDoc -> SDoc showWithLoc loc doc = hang doc 2 (char '\t' <> comment <+> loc) -- The tab tries to make them line up a bit where comment = ptext (sLit "--") - -{- -Note [Printing bangs on data constructors] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -For imported data constructors the dataConStrictMarks are the -representation choices (see Note [Bangs on data constructor arguments] -in DataCon.lhs). So we have to fiddle a little bit here to turn them -back into user-printable form. --} diff --git a/compiler/main/StaticFlags.hs b/compiler/main/StaticFlags.hs index 01dc3b7275..eb7ede00c6 100644 --- a/compiler/main/StaticFlags.hs +++ b/compiler/main/StaticFlags.hs @@ -1,4 +1,5 @@ -{-# OPTIONS -fno-cse #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-cse #-} -- -fno-cse is needed for GLOBAL_VAR's to behave properly ----------------------------------------------------------------------------- diff --git a/compiler/main/SysTools.lhs b/compiler/main/SysTools.lhs index 53240faf48..641b0cb12f 100644 --- a/compiler/main/SysTools.lhs +++ b/compiler/main/SysTools.lhs @@ -7,6 +7,8 @@ ----------------------------------------------------------------------------- \begin{code} +{-# LANGUAGE CPP, ScopedTypeVariables #-} + module SysTools ( -- Initialisation initSysTools, @@ -233,6 +235,8 @@ initSysTools mbMinusB -- to make that possible, so for now you can't. gcc_prog <- getSetting "C compiler command" gcc_args_str <- getSetting "C compiler flags" + cpp_prog <- getSetting "Haskell CPP command" + cpp_args_str <- getSetting "Haskell CPP flags" let unreg_gcc_args = if targetUnregisterised then ["-DNO_REGS", "-DUSE_MINIINTERPRETER"] else [] @@ -241,6 +245,7 @@ initSysTools mbMinusB | mkTablesNextToCode targetUnregisterised = ["-DTABLES_NEXT_TO_CODE"] | otherwise = [] + cpp_args= map Option (words cpp_args_str) gcc_args = map Option (words gcc_args_str ++ unreg_gcc_args ++ tntc_gcc_args) @@ -283,10 +288,7 @@ initSysTools mbMinusB -- cpp is derived from gcc on all platforms -- HACK, see setPgmP below. We keep 'words' here to remember to fix -- Config.hs one day. - let cpp_prog = gcc_prog - cpp_args = Option "-E" - : map Option (words cRAWCPP_FLAGS) - ++ gcc_args + -- Other things being equal, as and ld are simply gcc gcc_link_args_str <- getSetting "C compiler link flags" @@ -727,7 +729,7 @@ getLinkerInfo' dflags = do -- that doesn't support --version. We can just assume that's -- what we're using. return $ DarwinLD [] - OSiOS -> + OSiOS -> -- Ditto for iOS return $ DarwinLD [] OSMinGW32 -> @@ -786,12 +788,15 @@ getCompilerInfo' dflags = do -- Regular clang | any ("clang version" `isPrefixOf`) stde = return Clang + -- XCode 5.1 clang + | any ("Apple LLVM version 5.1" `isPrefixOf`) stde = + return AppleClang51 -- XCode 5 clang | any ("Apple LLVM version" `isPrefixOf`) stde = - return Clang + return AppleClang -- XCode 4.1 clang | any ("Apple clang version" `isPrefixOf`) stde = - return Clang + return AppleClang -- Unknown linker. | otherwise = fail "invalid -v output, or compiler is unsupported" diff --git a/compiler/main/TidyPgm.lhs b/compiler/main/TidyPgm.lhs index b20658b073..7d47330044 100644 --- a/compiler/main/TidyPgm.lhs +++ b/compiler/main/TidyPgm.lhs @@ -4,6 +4,8 @@ \section{Tidying up Core} \begin{code} +{-# LANGUAGE CPP #-} + module TidyPgm ( mkBootModDetailsTc, tidyProgram, globaliseAndTidyId ) where @@ -21,11 +23,14 @@ import CorePrep import CoreUtils import Literal import Rules +import PatSyn +import ConLike import CoreArity ( exprArity, exprBotStrictness_maybe ) import VarEnv import VarSet import Var import Id +import MkId ( mkDictSelRhs ) import IdInfo import InstEnv import FamInstEnv @@ -129,18 +134,20 @@ mkBootModDetailsTc hsc_env TcGblEnv{ tcg_exports = exports, tcg_type_env = type_env, -- just for the Ids tcg_tcs = tcs, + tcg_patsyns = pat_syns, tcg_insts = insts, tcg_fam_insts = fam_insts } = do { let dflags = hsc_dflags hsc_env ; showPass dflags CoreTidy - ; let { insts' = map (tidyClsInstDFun globaliseAndTidyId) insts - ; dfun_ids = map instanceDFunId insts' + ; let { insts' = map (tidyClsInstDFun globaliseAndTidyId) insts + ; pat_syns' = map (tidyPatSynIds globaliseAndTidyId) pat_syns + ; dfun_ids = map instanceDFunId insts' + ; pat_syn_ids = concatMap patSynIds pat_syns' ; type_env1 = mkBootTypeEnv (availsToNameSet exports) - (typeEnvIds type_env) tcs fam_insts - ; type_env2 = extendTypeEnvWithPatSyns type_env1 (typeEnvPatSyns type_env) - ; type_env' = extendTypeEnvWithIds type_env2 dfun_ids + (typeEnvIds type_env) tcs fam_insts + ; type_env' = extendTypeEnvWithIds type_env1 (pat_syn_ids ++ dfun_ids) } ; return (ModDetails { md_types = type_env' , md_insts = insts' @@ -333,19 +340,13 @@ tidyProgram hsc_env (ModGuts { mg_module = mod ; let { final_ids = [ id | id <- bindersOfBinds tidy_binds, isExternalName (idName id)] - ; final_patsyns = filter (isExternalName . getName) patsyns - - ; type_env' = extendTypeEnvWithIds type_env final_ids - ; type_env'' = extendTypeEnvWithPatSyns type_env' final_patsyns - - ; tidy_type_env = tidyTypeEnv omit_prags type_env'' + ; type_env1 = extendTypeEnvWithIds type_env final_ids - ; tidy_insts = map (tidyClsInstDFun (lookup_dfun tidy_type_env)) insts - -- A DFunId will have a binding in tidy_binds, and so - -- will now be in final_env, replete with IdInfo - -- Its name will be unchanged since it was born, but - -- we want Global, IdInfo-rich (or not) DFunId in the - -- tidy_insts + ; tidy_insts = map (tidyClsInstDFun (lookup_aux_id tidy_type_env)) insts + -- A DFunId will have a binding in tidy_binds, and so will now be in + -- tidy_type_env, replete with IdInfo. Its name will be unchanged since + -- it was born, but we want Global, IdInfo-rich (or not) DFunId in the + -- tidy_insts. Similarly the Ids inside a PatSyn. ; tidy_rules = tidyRules tidy_env ext_rules -- You might worry that the tidy_env contains IdInfo-rich stuff @@ -354,6 +355,16 @@ tidyProgram hsc_env (ModGuts { mg_module = mod ; tidy_vect_info = tidyVectInfo tidy_env vect_info + -- Tidy the Ids inside each PatSyn, very similarly to DFunIds + -- and then override the PatSyns in the type_env with the new tidy ones + -- This is really the only reason we keep mg_patsyns at all; otherwise + -- they could just stay in type_env + ; tidy_patsyns = map (tidyPatSynIds (lookup_aux_id tidy_type_env)) patsyns + ; type_env2 = extendTypeEnvList type_env1 + [AConLike (PatSynCon ps) | ps <- tidy_patsyns ] + + ; tidy_type_env = tidyTypeEnv omit_prags type_env2 + -- See Note [Injecting implicit bindings] ; all_tidy_binds = implicit_binds ++ tidy_binds @@ -405,11 +416,11 @@ tidyProgram hsc_env (ModGuts { mg_module = mod }) } -lookup_dfun :: TypeEnv -> Var -> Id -lookup_dfun type_env dfun_id - = case lookupTypeEnv type_env (idName dfun_id) of - Just (AnId dfun_id') -> dfun_id' - _other -> pprPanic "lookup_dfun" (ppr dfun_id) +lookup_aux_id :: TypeEnv -> Var -> Id +lookup_aux_id type_env id + = case lookupTypeEnv type_env (idName id) of + Just (AnId id') -> id' + _other -> pprPanic "lookup_axu_id" (ppr id) -------------------------- tidyTypeEnv :: Bool -- Compiling without -O, so omit prags @@ -517,7 +528,7 @@ of exceptions, and finally I gave up the battle: Note [Injecting implicit bindings] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -We inject the implict bindings right at the end, in CoreTidy. +We inject the implicit bindings right at the end, in CoreTidy. Some of these bindings, notably record selectors, are not constructed in an optimised form. E.g. record selector for data T = MkT { x :: {-# UNPACK #-} !Int } @@ -559,14 +570,16 @@ Oh: two other reasons for injecting them late: There is one sort of implicit binding that is injected still later, namely those for data constructor workers. Reason (I think): it's really just a code generation trick.... binding itself makes no sense. -See CorePrep Note [Data constructor workers]. +See Note [Data constructor workers] in CorePrep. \begin{code} getTyConImplicitBinds :: TyCon -> [CoreBind] getTyConImplicitBinds tc = map get_defn (mapMaybe dataConWrapId_maybe (tyConDataCons tc)) getClassImplicitBinds :: Class -> [CoreBind] -getClassImplicitBinds cls = map get_defn (classAllSelIds cls) +getClassImplicitBinds cls + = [ NonRec op (mkDictSelRhs cls val_index) + | (op, val_index) <- classAllSelIds cls `zip` [0..] ] get_defn :: Id -> CoreBind get_defn id = NonRec id (unfoldingTemplate (realIdUnfolding id)) diff --git a/compiler/nativeGen/AsmCodeGen.lhs b/compiler/nativeGen/AsmCodeGen.lhs index 09a3bf7ec8..e53bb11cc3 100644 --- a/compiler/nativeGen/AsmCodeGen.lhs +++ b/compiler/nativeGen/AsmCodeGen.lhs @@ -7,7 +7,8 @@ -- ----------------------------------------------------------------------------- \begin{code} -{-# LANGUAGE GADTs #-} +{-# LANGUAGE BangPatterns, CPP, GADTs, ScopedTypeVariables, UnboxedTuples #-} + module AsmCodeGen ( nativeCodeGen ) where #include "HsVersions.h" @@ -605,7 +606,7 @@ makeImportsDoc dflags imports then text ".section .note.GNU-stack,\"\",@progbits" else empty) $$ - -- And just because every other compiler does, lets stick in + -- And just because every other compiler does, let's stick in -- an identifier directive: .ident "GHC x.y.z" (if platformHasIdentDirective platform then let compilerIdent = text "GHC" <+> text cProjectVersion diff --git a/compiler/nativeGen/CPrim.hs b/compiler/nativeGen/CPrim.hs index a6f4cab7bd..34782dfc1c 100644 --- a/compiler/nativeGen/CPrim.hs +++ b/compiler/nativeGen/CPrim.hs @@ -1,11 +1,16 @@ -- | Generating C symbol names emitted by the compiler. module CPrim - ( popCntLabel + ( atomicReadLabel + , atomicWriteLabel + , atomicRMWLabel + , cmpxchgLabel + , popCntLabel , bSwapLabel , word2FloatLabel ) where import CmmType +import CmmMachOp import Outputable popCntLabel :: Width -> String @@ -31,3 +36,46 @@ word2FloatLabel w = "hs_word2float" ++ pprWidth w pprWidth W32 = "32" pprWidth W64 = "64" pprWidth w = pprPanic "word2FloatLabel: Unsupported word width " (ppr w) + +atomicRMWLabel :: Width -> AtomicMachOp -> String +atomicRMWLabel w amop = "hs_atomic_" ++ pprFunName amop ++ pprWidth w + where + pprWidth W8 = "8" + pprWidth W16 = "16" + pprWidth W32 = "32" + pprWidth W64 = "64" + pprWidth w = pprPanic "atomicRMWLabel: Unsupported word width " (ppr w) + + pprFunName AMO_Add = "add" + pprFunName AMO_Sub = "sub" + pprFunName AMO_And = "and" + pprFunName AMO_Nand = "nand" + pprFunName AMO_Or = "or" + pprFunName AMO_Xor = "xor" + +cmpxchgLabel :: Width -> String +cmpxchgLabel w = "hs_cmpxchg" ++ pprWidth w + where + pprWidth W8 = "8" + pprWidth W16 = "16" + pprWidth W32 = "32" + pprWidth W64 = "64" + pprWidth w = pprPanic "cmpxchgLabel: Unsupported word width " (ppr w) + +atomicReadLabel :: Width -> String +atomicReadLabel w = "hs_atomicread" ++ pprWidth w + where + pprWidth W8 = "8" + pprWidth W16 = "16" + pprWidth W32 = "32" + pprWidth W64 = "64" + pprWidth w = pprPanic "atomicReadLabel: Unsupported word width " (ppr w) + +atomicWriteLabel :: Width -> String +atomicWriteLabel w = "hs_atomicwrite" ++ pprWidth w + where + pprWidth W8 = "8" + pprWidth W16 = "16" + pprWidth W32 = "32" + pprWidth W64 = "64" + pprWidth w = pprPanic "atomicWriteLabel: Unsupported word width " (ppr w) diff --git a/compiler/nativeGen/NCGMonad.hs b/compiler/nativeGen/NCGMonad.hs index 3ee3af2ea9..a4c9f74df7 100644 --- a/compiler/nativeGen/NCGMonad.hs +++ b/compiler/nativeGen/NCGMonad.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- ----------------------------------------------------------------------------- -- -- (c) The University of Glasgow 1993-2004 diff --git a/compiler/nativeGen/PPC/CodeGen.hs b/compiler/nativeGen/PPC/CodeGen.hs index 3f0e7632f8..014117dd4c 100644 --- a/compiler/nativeGen/PPC/CodeGen.hs +++ b/compiler/nativeGen/PPC/CodeGen.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP, GADTs #-} ----------------------------------------------------------------------------- -- @@ -12,7 +13,6 @@ -- (c) the #if blah_TARGET_ARCH} things, the -- structure should not be too overwhelming. -{-# LANGUAGE GADTs #-} module PPC.CodeGen ( cmmTopCodeGen, generateJumpTableForInstr, @@ -813,15 +813,6 @@ genBranch = return . toOL . mkJumpInstr Conditional jumps are always to local labels, so we can use branch instructions. We peek at the arguments to decide what kind of comparison to do. - -SPARC: First, we have to ensure that the condition codes are set -according to the supplied comparison operation. We generate slightly -different code for floating point comparisons, because a floating -point operation cannot directly precede a @BF@. We assume the worst -and fill that slot with a @NOP@. - -SPARC: Do not fill the delay slots here; you will confuse the register -allocator. -} @@ -1160,6 +1151,10 @@ genCCall' dflags gcp target dest_regs args0 MO_BSwap w -> (fsLit $ bSwapLabel w, False) MO_PopCnt w -> (fsLit $ popCntLabel w, False) + MO_AtomicRMW w amop -> (fsLit $ atomicRMWLabel w amop, False) + MO_Cmpxchg w -> (fsLit $ cmpxchgLabel w, False) + MO_AtomicRead w -> (fsLit $ atomicReadLabel w, False) + MO_AtomicWrite w -> (fsLit $ atomicWriteLabel w, False) MO_S_QuotRem {} -> unsupported MO_U_QuotRem {} -> unsupported diff --git a/compiler/nativeGen/PPC/Cond.hs b/compiler/nativeGen/PPC/Cond.hs index b8c5208c66..2568da5249 100644 --- a/compiler/nativeGen/PPC/Cond.hs +++ b/compiler/nativeGen/PPC/Cond.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/PPC/Instr.hs b/compiler/nativeGen/PPC/Instr.hs index ddb9c51c7b..3756c649bb 100644 --- a/compiler/nativeGen/PPC/Instr.hs +++ b/compiler/nativeGen/PPC/Instr.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Machine-dependent assembly language diff --git a/compiler/nativeGen/PPC/RegInfo.hs b/compiler/nativeGen/PPC/RegInfo.hs index 8b35d87573..bffa9ea63f 100644 --- a/compiler/nativeGen/PPC/RegInfo.hs +++ b/compiler/nativeGen/PPC/RegInfo.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Machine-specific parts of the register allocator @@ -6,7 +8,7 @@ -- ----------------------------------------------------------------------------- -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/PPC/Regs.hs b/compiler/nativeGen/PPC/Regs.hs index f92351bd22..0f636bf64c 100644 --- a/compiler/nativeGen/PPC/Regs.hs +++ b/compiler/nativeGen/PPC/Regs.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- ----------------------------------------------------------------------------- -- -- (c) The University of Glasgow 1994-2004 diff --git a/compiler/nativeGen/Reg.hs b/compiler/nativeGen/Reg.hs index fee74be355..77ca7480d6 100644 --- a/compiler/nativeGen/Reg.hs +++ b/compiler/nativeGen/Reg.hs @@ -5,7 +5,7 @@ -- by all architectures. -- -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/RegAlloc/Graph/Main.hs b/compiler/nativeGen/RegAlloc/Graph/Main.hs index dbaf5098ce..05db68dd46 100644 --- a/compiler/nativeGen/RegAlloc/Graph/Main.hs +++ b/compiler/nativeGen/RegAlloc/Graph/Main.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE ScopedTypeVariables #-} + -- | Graph coloring register allocator. module RegAlloc.Graph.Main ( regAlloc diff --git a/compiler/nativeGen/RegAlloc/Graph/Stats.hs b/compiler/nativeGen/RegAlloc/Graph/Stats.hs index 7bc842d1c9..8fada96ee2 100644 --- a/compiler/nativeGen/RegAlloc/Graph/Stats.hs +++ b/compiler/nativeGen/RegAlloc/Graph/Stats.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE BangPatterns, CPP #-} -- | Carries interesting info for debugging / profiling of the -- graph coloring register allocator. diff --git a/compiler/nativeGen/RegAlloc/Graph/TrivColorable.hs b/compiler/nativeGen/RegAlloc/Graph/TrivColorable.hs index 2d58ed9981..eba2e43149 100644 --- a/compiler/nativeGen/RegAlloc/Graph/TrivColorable.hs +++ b/compiler/nativeGen/RegAlloc/Graph/TrivColorable.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE BangPatterns, CPP #-} module RegAlloc.Graph.TrivColorable ( trivColorable, diff --git a/compiler/nativeGen/RegAlloc/Linear/FreeRegs.hs b/compiler/nativeGen/RegAlloc/Linear/FreeRegs.hs index 0247c9dfae..a1a00ba582 100644 --- a/compiler/nativeGen/RegAlloc/Linear/FreeRegs.hs +++ b/compiler/nativeGen/RegAlloc/Linear/FreeRegs.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP #-} module RegAlloc.Linear.FreeRegs ( FR(..), diff --git a/compiler/nativeGen/RegAlloc/Linear/Main.hs b/compiler/nativeGen/RegAlloc/Linear/Main.hs index 46d5309f70..ee43d25aa3 100644 --- a/compiler/nativeGen/RegAlloc/Linear/Main.hs +++ b/compiler/nativeGen/RegAlloc/Linear/Main.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, ScopedTypeVariables #-} + ----------------------------------------------------------------------------- -- -- The register allocator diff --git a/compiler/nativeGen/RegAlloc/Linear/PPC/FreeRegs.hs b/compiler/nativeGen/RegAlloc/Linear/PPC/FreeRegs.hs index 0bdb49fb2e..b76fe79d7d 100644 --- a/compiler/nativeGen/RegAlloc/Linear/PPC/FreeRegs.hs +++ b/compiler/nativeGen/RegAlloc/Linear/PPC/FreeRegs.hs @@ -1,4 +1,3 @@ - -- | Free regs map for PowerPC module RegAlloc.Linear.PPC.FreeRegs where diff --git a/compiler/nativeGen/RegAlloc/Linear/State.hs b/compiler/nativeGen/RegAlloc/Linear/State.hs index dc499c9c1f..39b5777ef3 100644 --- a/compiler/nativeGen/RegAlloc/Linear/State.hs +++ b/compiler/nativeGen/RegAlloc/Linear/State.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE UnboxedTuples #-} + -- | State monad for the linear register allocator. -- Here we keep all the state that the register allocator keeps track diff --git a/compiler/nativeGen/RegAlloc/Liveness.hs b/compiler/nativeGen/RegAlloc/Liveness.hs index b0e763a6f0..1cb6dc8268 100644 --- a/compiler/nativeGen/RegAlloc/Liveness.hs +++ b/compiler/nativeGen/RegAlloc/Liveness.hs @@ -1,3 +1,8 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} + ----------------------------------------------------------------------------- -- -- The register liveness determinator @@ -5,7 +10,7 @@ -- (c) The University of Glasgow 2004-2013 -- ----------------------------------------------------------------------------- -{-# LANGUAGE FlexibleContexts, TypeFamilies #-} + module RegAlloc.Liveness ( RegSet, RegMap, emptyRegMap, @@ -666,14 +671,20 @@ sccBlocks sccBlocks blocks entries = map (fmap get_node) sccs where - sccs = stronglyConnCompFromG graph roots - - graph = graphFromEdgedVertices nodes - -- nodes :: [(NatBasicBlock instr, Unique, [Unique])] nodes = [ (block, id, getOutEdges instrs) | block@(BasicBlock id instrs) <- blocks ] + g1 = graphFromEdgedVertices nodes + + reachable :: BlockSet + reachable = setFromList [ id | (_,id,_) <- reachablesG g1 roots ] + + g2 = graphFromEdgedVertices [ node | node@(_,id,_) <- nodes + , id `setMember` reachable ] + + sccs = stronglyConnCompG g2 + get_node (n, _, _) = n getOutEdges :: Instruction instr => [instr] -> [BlockId] diff --git a/compiler/nativeGen/RegClass.hs b/compiler/nativeGen/RegClass.hs index 7ccc0c1bec..cac4e64221 100644 --- a/compiler/nativeGen/RegClass.hs +++ b/compiler/nativeGen/RegClass.hs @@ -1,4 +1,4 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/CodeGen.hs b/compiler/nativeGen/SPARC/CodeGen.hs index 5d65b427e1..51f89d629f 100644 --- a/compiler/nativeGen/SPARC/CodeGen.hs +++ b/compiler/nativeGen/SPARC/CodeGen.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Generating machine code (instruction selection) @@ -652,6 +654,10 @@ outOfLineMachOp_table mop MO_BSwap w -> fsLit $ bSwapLabel w MO_PopCnt w -> fsLit $ popCntLabel w + MO_AtomicRMW w amop -> fsLit $ atomicRMWLabel w amop + MO_Cmpxchg w -> fsLit $ cmpxchgLabel w + MO_AtomicRead w -> fsLit $ atomicReadLabel w + MO_AtomicWrite w -> fsLit $ atomicWriteLabel w MO_S_QuotRem {} -> unsupported MO_U_QuotRem {} -> unsupported diff --git a/compiler/nativeGen/SPARC/CodeGen/Amode.hs b/compiler/nativeGen/SPARC/CodeGen/Amode.hs index 324eda94e7..f0aed0d02e 100644 --- a/compiler/nativeGen/SPARC/CodeGen/Amode.hs +++ b/compiler/nativeGen/SPARC/CodeGen/Amode.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/CodeGen/Base.hs b/compiler/nativeGen/SPARC/CodeGen/Base.hs index 03b31e016a..45b7801960 100644 --- a/compiler/nativeGen/SPARC/CodeGen/Base.hs +++ b/compiler/nativeGen/SPARC/CodeGen/Base.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/CodeGen/CondCode.hs b/compiler/nativeGen/SPARC/CodeGen/CondCode.hs index 375a9e1b33..2c3dbe6fc0 100644 --- a/compiler/nativeGen/SPARC/CodeGen/CondCode.hs +++ b/compiler/nativeGen/SPARC/CodeGen/CondCode.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/CodeGen/Expand.hs b/compiler/nativeGen/SPARC/CodeGen/Expand.hs index 03f571c20b..7ebc2f6630 100644 --- a/compiler/nativeGen/SPARC/CodeGen/Expand.hs +++ b/compiler/nativeGen/SPARC/CodeGen/Expand.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/CodeGen/Gen32.hs b/compiler/nativeGen/SPARC/CodeGen/Gen32.hs index df876b4622..43a26e525a 100644 --- a/compiler/nativeGen/SPARC/CodeGen/Gen32.hs +++ b/compiler/nativeGen/SPARC/CodeGen/Gen32.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/CodeGen/Sanity.hs b/compiler/nativeGen/SPARC/CodeGen/Sanity.hs index d4cdaf2b16..5dff9ce704 100644 --- a/compiler/nativeGen/SPARC/CodeGen/Sanity.hs +++ b/compiler/nativeGen/SPARC/CodeGen/Sanity.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/Cond.hs b/compiler/nativeGen/SPARC/Cond.hs index b8919a72a2..198e4a7627 100644 --- a/compiler/nativeGen/SPARC/Cond.hs +++ b/compiler/nativeGen/SPARC/Cond.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/Imm.hs b/compiler/nativeGen/SPARC/Imm.hs index 4c2bb5a481..844a08824b 100644 --- a/compiler/nativeGen/SPARC/Imm.hs +++ b/compiler/nativeGen/SPARC/Imm.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/Instr.hs b/compiler/nativeGen/SPARC/Instr.hs index 601e04787a..8e4a2b32df 100644 --- a/compiler/nativeGen/SPARC/Instr.hs +++ b/compiler/nativeGen/SPARC/Instr.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Machine-dependent assembly language @@ -6,7 +8,7 @@ -- ----------------------------------------------------------------------------- -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/Ppr.hs b/compiler/nativeGen/SPARC/Ppr.hs index 601b5288a0..654179e077 100644 --- a/compiler/nativeGen/SPARC/Ppr.hs +++ b/compiler/nativeGen/SPARC/Ppr.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Pretty-printing assembly language diff --git a/compiler/nativeGen/SPARC/Regs.hs b/compiler/nativeGen/SPARC/Regs.hs index 55b6ac9156..01db0ed3ac 100644 --- a/compiler/nativeGen/SPARC/Regs.hs +++ b/compiler/nativeGen/SPARC/Regs.hs @@ -4,7 +4,7 @@ -- -- ----------------------------------------------------------------------------- -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/ShortcutJump.hs b/compiler/nativeGen/SPARC/ShortcutJump.hs index 7f978c17c5..142ec6e65d 100644 --- a/compiler/nativeGen/SPARC/ShortcutJump.hs +++ b/compiler/nativeGen/SPARC/ShortcutJump.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/SPARC/Stack.hs b/compiler/nativeGen/SPARC/Stack.hs index 4a6f4c1335..3560a0fe82 100644 --- a/compiler/nativeGen/SPARC/Stack.hs +++ b/compiler/nativeGen/SPARC/Stack.hs @@ -1,5 +1,5 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/Size.hs b/compiler/nativeGen/Size.hs index 45a39645cc..1b95ceb98b 100644 --- a/compiler/nativeGen/Size.hs +++ b/compiler/nativeGen/Size.hs @@ -1,4 +1,4 @@ -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/TargetReg.hs b/compiler/nativeGen/TargetReg.hs index 1f7f4e0db0..daf1e254c8 100644 --- a/compiler/nativeGen/TargetReg.hs +++ b/compiler/nativeGen/TargetReg.hs @@ -1,5 +1,5 @@ - -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/X86/CodeGen.hs b/compiler/nativeGen/X86/CodeGen.hs index e659488fe0..8e9b49d78d 100644 --- a/compiler/nativeGen/X86/CodeGen.hs +++ b/compiler/nativeGen/X86/CodeGen.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, GADTs, NondecreasingIndentation #-} + ----------------------------------------------------------------------------- -- -- Generating machine code (instruction selection) @@ -10,7 +12,6 @@ -- (a) the sectioning, and (b) the type signatures, the -- structure should not be too overwhelming. -{-# LANGUAGE GADTs #-} module X86.CodeGen ( cmmTopCodeGen, generateJumpTableForInstr, @@ -804,6 +805,8 @@ getRegister' _ is32Bit (CmmMachOp mop [x, y]) = do -- dyadic MachOps | is32BitInteger y = add_int rep x y add_code rep x y = trivialCode rep (ADD size) (Just (ADD size)) x y where size = intSize rep + -- TODO: There are other interesting patterns we want to replace + -- with a LEA, e.g. `(x + offset) + (y << shift)`. -------------------- sub_code :: Width -> CmmExpr -> CmmExpr -> NatM Register @@ -1024,6 +1027,13 @@ getAmode' is32Bit (CmmMachOp (MO_Add rep) [a@(CmmMachOp (MO_Shl _) _), b@(CmmLit _)]) = getAmode' is32Bit (CmmMachOp (MO_Add rep) [b,a]) +-- Matches: (x + offset) + (y << shift) +getAmode' _ (CmmMachOp (MO_Add _) [CmmRegOff x offset, + CmmMachOp (MO_Shl _) + [y, CmmLit (CmmInt shift _)]]) + | shift == 0 || shift == 1 || shift == 2 || shift == 3 + = x86_complex_amode (CmmReg x) y shift (fromIntegral offset) + getAmode' _ (CmmMachOp (MO_Add _) [x, CmmMachOp (MO_Shl _) [y, CmmLit (CmmInt shift _)]]) | shift == 0 || shift == 1 || shift == 2 || shift == 3 @@ -1047,6 +1057,18 @@ getAmode' _ expr = do (reg,code) <- getSomeReg expr return (Amode (AddrBaseIndex (EABaseReg reg) EAIndexNone (ImmInt 0)) code) +-- | Like 'getAmode', but on 32-bit use simple register addressing +-- (i.e. no index register). This stops us from running out of +-- registers on x86 when using instructions such as cmpxchg, which can +-- use up to three virtual registers and one fixed register. +getSimpleAmode :: DynFlags -> Bool -> CmmExpr -> NatM Amode +getSimpleAmode dflags is32Bit addr + | is32Bit = do + addr_code <- getAnyReg addr + addr_r <- getNewRegNat (intSize (wordWidth dflags)) + let amode = AddrBaseIndex (EABaseReg addr_r) EAIndexNone (ImmInt 0) + return $! Amode amode (addr_code addr_r) + | otherwise = getAmode addr x86_complex_amode :: CmmExpr -> CmmExpr -> Integer -> Integer -> NatM Amode x86_complex_amode base index shift offset @@ -1751,6 +1773,99 @@ genCCall dflags is32Bit (PrimTarget (MO_UF_Conv width)) dest_regs args = do where lbl = mkCmmCodeLabel primPackageId (fsLit (word2FloatLabel width)) +genCCall dflags is32Bit (PrimTarget (MO_AtomicRMW width amop)) [dst] [addr, n] = do + Amode amode addr_code <- + if amop `elem` [AMO_Add, AMO_Sub] + then getAmode addr + else getSimpleAmode dflags is32Bit addr -- See genCCall for MO_Cmpxchg + arg <- getNewRegNat size + arg_code <- getAnyReg n + use_sse2 <- sse2Enabled + let platform = targetPlatform dflags + dst_r = getRegisterReg platform use_sse2 (CmmLocal dst) + code <- op_code dst_r arg amode + return $ addr_code `appOL` arg_code arg `appOL` code + where + -- Code for the operation + op_code :: Reg -- Destination reg + -> Reg -- Register containing argument + -> AddrMode -- Address of location to mutate + -> NatM (OrdList Instr) + op_code dst_r arg amode = case amop of + -- In the common case where dst_r is a virtual register the + -- final move should go away, because it's the last use of arg + -- and the first use of dst_r. + AMO_Add -> return $ toOL [ LOCK + , XADD size (OpReg arg) (OpAddr amode) + , MOV size (OpReg arg) (OpReg dst_r) + ] + AMO_Sub -> return $ toOL [ NEGI size (OpReg arg) + , LOCK + , XADD size (OpReg arg) (OpAddr amode) + , MOV size (OpReg arg) (OpReg dst_r) + ] + AMO_And -> cmpxchg_code (\ src dst -> unitOL $ AND size src dst) + AMO_Nand -> cmpxchg_code (\ src dst -> toOL [ AND size src dst + , NOT size dst + ]) + AMO_Or -> cmpxchg_code (\ src dst -> unitOL $ OR size src dst) + AMO_Xor -> cmpxchg_code (\ src dst -> unitOL $ XOR size src dst) + where + -- Simulate operation that lacks a dedicated instruction using + -- cmpxchg. + cmpxchg_code :: (Operand -> Operand -> OrdList Instr) + -> NatM (OrdList Instr) + cmpxchg_code instrs = do + lbl <- getBlockIdNat + tmp <- getNewRegNat size + return $ toOL + [ MOV size (OpAddr amode) (OpReg eax) + , JXX ALWAYS lbl + , NEWBLOCK lbl + -- Keep old value so we can return it: + , MOV size (OpReg eax) (OpReg dst_r) + , MOV size (OpReg eax) (OpReg tmp) + ] + `appOL` instrs (OpReg arg) (OpReg tmp) `appOL` toOL + [ LOCK + , CMPXCHG size (OpReg tmp) (OpAddr amode) + , JXX NE lbl + ] + + size = intSize width + +genCCall dflags _ (PrimTarget (MO_AtomicRead width)) [dst] [addr] = do + load_code <- intLoadCode (MOV (intSize width)) addr + let platform = targetPlatform dflags + use_sse2 <- sse2Enabled + return (load_code (getRegisterReg platform use_sse2 (CmmLocal dst))) + +genCCall _ _ (PrimTarget (MO_AtomicWrite width)) [] [addr, val] = do + assignMem_IntCode (intSize width) addr val + +genCCall dflags is32Bit (PrimTarget (MO_Cmpxchg width)) [dst] [addr, old, new] = do + -- On x86 we don't have enough registers to use cmpxchg with a + -- complicated addressing mode, so on that architecture we + -- pre-compute the address first. + Amode amode addr_code <- getSimpleAmode dflags is32Bit addr + newval <- getNewRegNat size + newval_code <- getAnyReg new + oldval <- getNewRegNat size + oldval_code <- getAnyReg old + use_sse2 <- sse2Enabled + let platform = targetPlatform dflags + dst_r = getRegisterReg platform use_sse2 (CmmLocal dst) + code = toOL + [ MOV size (OpReg oldval) (OpReg eax) + , LOCK + , CMPXCHG size (OpReg newval) (OpAddr amode) + , MOV size (OpReg eax) (OpReg dst_r) + ] + return $ addr_code `appOL` newval_code newval `appOL` oldval_code oldval + `appOL` code + where + size = intSize width + genCCall _ is32Bit target dest_regs args | is32Bit = genCCall32 target dest_regs args | otherwise = genCCall64 target dest_regs args @@ -2375,6 +2490,11 @@ outOfLineCmmOp mop res args MO_PopCnt _ -> fsLit "popcnt" MO_BSwap _ -> fsLit "bswap" + MO_AtomicRMW _ _ -> fsLit "atomicrmw" + MO_AtomicRead _ -> fsLit "atomicread" + MO_AtomicWrite _ -> fsLit "atomicwrite" + MO_Cmpxchg _ -> fsLit "cmpxchg" + MO_UF_Conv _ -> unsupported MO_S_QuotRem {} -> unsupported diff --git a/compiler/nativeGen/X86/Instr.hs b/compiler/nativeGen/X86/Instr.hs index 75e5b9e737..ac91747171 100644 --- a/compiler/nativeGen/X86/Instr.hs +++ b/compiler/nativeGen/X86/Instr.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP, TypeFamilies #-} + ----------------------------------------------------------------------------- -- -- Machine-dependent assembly language @@ -6,16 +8,15 @@ -- ----------------------------------------------------------------------------- -#include "HsVersions.h" -#include "nativeGen/NCG.h" - -{-# LANGUAGE TypeFamilies #-} module X86.Instr (Instr(..), Operand(..), PrefetchVariant(..), JumpDest, getJumpDestBlockId, canShortcut, shortcutStatics, shortcutJump, i386_insert_ffrees, allocMoreStack, maxSpillSlots, archWordSize) where +#include "HsVersions.h" +#include "nativeGen/NCG.h" + import X86.Cond import X86.Regs import Instruction @@ -326,6 +327,10 @@ data Instr | PREFETCH PrefetchVariant Size Operand -- prefetch Variant, addr size, address to prefetch -- variant can be NTA, Lvl0, Lvl1, or Lvl2 + | LOCK -- lock prefix + | XADD Size Operand Operand -- src (r), dst (r/m) + | CMPXCHG Size Operand Operand -- src (r), dst (r/m), eax implicit + data PrefetchVariant = NTA | Lvl0 | Lvl1 | Lvl2 @@ -336,6 +341,8 @@ data Operand +-- | Returns which registers are read and written as a (read, written) +-- pair. x86_regUsageOfInstr :: Platform -> Instr -> RegUsage x86_regUsageOfInstr platform instr = case instr of @@ -427,10 +434,21 @@ x86_regUsageOfInstr platform instr -- note: might be a better way to do this PREFETCH _ _ src -> mkRU (use_R src []) [] + LOCK -> noUsage + XADD _ src dst -> usageMM src dst + CMPXCHG _ src dst -> usageRMM src dst (OpReg eax) _other -> panic "regUsage: unrecognised instr" - where + -- # Definitions + -- + -- Written: If the operand is a register, it's written. If it's an + -- address, registers mentioned in the address are read. + -- + -- Modified: If the operand is a register, it's both read and + -- written. If it's an address, registers mentioned in the address + -- are read. + -- 2 operand form; first operand Read; second Written usageRW :: Operand -> Operand -> RegUsage usageRW op (OpReg reg) = mkRU (use_R op []) [reg] @@ -443,6 +461,18 @@ x86_regUsageOfInstr platform instr usageRM op (OpAddr ea) = mkRUR (use_R op $! use_EA ea []) usageRM _ _ = panic "X86.RegInfo.usageRM: no match" + -- 2 operand form; first operand Modified; second Modified + usageMM :: Operand -> Operand -> RegUsage + usageMM (OpReg src) (OpReg dst) = mkRU [src, dst] [src, dst] + usageMM (OpReg src) (OpAddr ea) = mkRU (use_EA ea [src]) [src] + usageMM _ _ = panic "X86.RegInfo.usageMM: no match" + + -- 3 operand form; first operand Read; second Modified; third Modified + usageRMM :: Operand -> Operand -> Operand -> RegUsage + usageRMM (OpReg src) (OpReg dst) (OpReg reg) = mkRU [src, dst, reg] [dst, reg] + usageRMM (OpReg src) (OpAddr ea) (OpReg reg) = mkRU (use_EA ea [src, reg]) [reg] + usageRMM _ _ _ = panic "X86.RegInfo.usageRMM: no match" + -- 1 operand form; operand Modified usageM :: Operand -> RegUsage usageM (OpReg reg) = mkRU [reg] [reg] @@ -475,6 +505,7 @@ x86_regUsageOfInstr platform instr where src' = filter (interesting platform) src dst' = filter (interesting platform) dst +-- | Is this register interesting for the register allocator? interesting :: Platform -> Reg -> Bool interesting _ (RegVirtual _) = True interesting platform (RegReal (RealRegSingle i)) = isFastTrue (freeReg platform i) @@ -482,6 +513,8 @@ interesting _ (RegReal (RealRegPair{})) = panic "X86.interesting: no re +-- | Applies the supplied function to all registers in instructions. +-- Typically used to change virtual registers to real registers. x86_patchRegsOfInstr :: Instr -> (Reg -> Reg) -> Instr x86_patchRegsOfInstr instr env = case instr of @@ -570,6 +603,10 @@ x86_patchRegsOfInstr instr env PREFETCH lvl size src -> PREFETCH lvl size (patchOp src) + LOCK -> instr + XADD sz src dst -> patch2 (XADD sz) src dst + CMPXCHG sz src dst -> patch2 (CMPXCHG sz) src dst + _other -> panic "patchRegs: unrecognised instr" where diff --git a/compiler/nativeGen/X86/Ppr.hs b/compiler/nativeGen/X86/Ppr.hs index f38a04d069..7771c02512 100644 --- a/compiler/nativeGen/X86/Ppr.hs +++ b/compiler/nativeGen/X86/Ppr.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Pretty-printing assembly language @@ -884,6 +886,14 @@ pprInstr GFREE ptext (sLit "\tffree %st(4) ;ffree %st(5)") ] +-- Atomics + +pprInstr LOCK = ptext (sLit "\tlock") + +pprInstr (XADD size src dst) = pprSizeOpOp (sLit "xadd") size src dst + +pprInstr (CMPXCHG size src dst) = pprSizeOpOp (sLit "cmpxchg") size src dst + pprInstr _ = panic "X86.Ppr.pprInstr: no match" diff --git a/compiler/nativeGen/X86/RegInfo.hs b/compiler/nativeGen/X86/RegInfo.hs index 8c63933c5b..0303295bc6 100644 --- a/compiler/nativeGen/X86/RegInfo.hs +++ b/compiler/nativeGen/X86/RegInfo.hs @@ -1,5 +1,5 @@ - -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/nativeGen/X86/Regs.hs b/compiler/nativeGen/X86/Regs.hs index 127a811831..4162e2b703 100644 --- a/compiler/nativeGen/X86/Regs.hs +++ b/compiler/nativeGen/X86/Regs.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + module X86.Regs ( -- squeese functions for the graph allocator virtualRegSqueeze, diff --git a/compiler/parser/Ctype.lhs b/compiler/parser/Ctype.lhs index b5173b2612..c024ebe45a 100644 --- a/compiler/parser/Ctype.lhs +++ b/compiler/parser/Ctype.lhs @@ -1,7 +1,8 @@ Character classification \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -18,7 +19,7 @@ module Ctype , is_digit -- Char# -> Bool , is_alphanum -- Char# -> Bool - , is_decdigit, is_hexdigit, is_octdigit + , is_decdigit, is_hexdigit, is_octdigit, is_bindigit , hexDigit, octDecDigit ) where @@ -86,6 +87,9 @@ is_hexdigit c is_octdigit :: Char -> Bool is_octdigit c = c >= '0' && c <= '7' +is_bindigit :: Char -> Bool +is_bindigit c = c == '0' || c == '1' + to_lower :: Char -> Char to_lower c | c >= 'A' && c <= 'Z' = chr (ord c - (ord 'A' - ord 'a')) diff --git a/compiler/parser/LexCore.hs b/compiler/parser/LexCore.hs deleted file mode 100644 index 861fffb7f6..0000000000 --- a/compiler/parser/LexCore.hs +++ /dev/null @@ -1,115 +0,0 @@ -module LexCore where - -import ParserCoreUtils -import Panic -import Data.Char -import Numeric - -isNameChar :: Char -> Bool -isNameChar c = isAlpha c || isDigit c || (c == '_') || (c == '\'') - || (c == '$') || (c == '-') || (c == '.') - -isKeywordChar :: Char -> Bool -isKeywordChar c = isAlpha c || (c == '_') - -lexer :: (Token -> P a) -> P a -lexer cont [] = cont TKEOF [] -lexer cont ('\n':cs) = \line -> lexer cont cs (line+1) -lexer cont ('-':'>':cs) = cont TKrarrow cs - -lexer cont (c:cs) - | isSpace c = lexer cont cs - | isLower c || (c == '_') = lexName cont TKname (c:cs) - | isUpper c = lexName cont TKcname (c:cs) - | isDigit c || (c == '-') = lexNum cont (c:cs) - -lexer cont ('%':cs) = lexKeyword cont cs -lexer cont ('\'':cs) = lexChar cont cs -lexer cont ('\"':cs) = lexString [] cont cs -lexer cont ('#':cs) = cont TKhash cs -lexer cont ('(':cs) = cont TKoparen cs -lexer cont (')':cs) = cont TKcparen cs -lexer cont ('{':cs) = cont TKobrace cs -lexer cont ('}':cs) = cont TKcbrace cs -lexer cont ('=':cs) = cont TKeq cs -lexer cont (':':'=':':':cs) = cont TKcoloneqcolon cs -lexer cont (':':':':cs) = cont TKcoloncolon cs -lexer cont ('*':cs) = cont TKstar cs -lexer cont ('.':cs) = cont TKdot cs -lexer cont ('\\':cs) = cont TKlambda cs -lexer cont ('@':cs) = cont TKat cs -lexer cont ('?':cs) = cont TKquestion cs -lexer cont (';':cs) = cont TKsemicolon cs --- 20060420 GHC spits out constructors with colon in them nowadays. jds --- 20061103 but it's easier to parse if we split on the colon, and treat them --- as several tokens -lexer cont (':':cs) = cont TKcolon cs --- 20060420 Likewise does it create identifiers starting with dollar. jds -lexer cont ('$':cs) = lexName cont TKname ('$':cs) -lexer _ (c:_) = failP "invalid character" [c] - -lexChar :: (Token -> String -> Int -> ParseResult a) -> String -> Int - -> ParseResult a -lexChar cont ('\\':'x':h1:h0:'\'':cs) - | isHexEscape [h1,h0] = cont (TKchar (hexToChar h1 h0)) cs -lexChar _ ('\\':cs) = failP "invalid char character" ('\\':(take 10 cs)) -lexChar _ ('\'':_) = failP "invalid char character" ['\''] -lexChar _ ('\"':_) = failP "invalid char character" ['\"'] -lexChar cont (c:'\'':cs) = cont (TKchar c) cs -lexChar _ cs = panic ("lexChar: " ++ show cs) - -lexString :: String -> (Token -> [Char] -> Int -> ParseResult a) - -> String -> Int -> ParseResult a -lexString s cont ('\\':'x':h1:h0:cs) - | isHexEscape [h1,h0] = lexString (s++[hexToChar h1 h0]) cont cs -lexString _ _ ('\\':_) = failP "invalid string character" ['\\'] -lexString _ _ ('\'':_) = failP "invalid string character" ['\''] -lexString s cont ('\"':cs) = cont (TKstring s) cs -lexString s cont (c:cs) = lexString (s++[c]) cont cs -lexString _ _ [] = panic "lexString []" - -isHexEscape :: String -> Bool -isHexEscape = all (\c -> isHexDigit c && (isDigit c || isLower c)) - -hexToChar :: Char -> Char -> Char -hexToChar h1 h0 = chr (digitToInt h1 * 16 + digitToInt h0) - -lexNum :: (Token -> String -> a) -> String -> a -lexNum cont cs = - case cs of - ('-':cs) -> f (-1) cs - _ -> f 1 cs - where f sgn cs = - case span isDigit cs of - (digits,'.':c:rest) - | isDigit c -> cont (TKrational (fromInteger sgn * r)) rest' - where ((r,rest'):_) = readFloat (digits ++ ('.':c:rest)) - -- When reading a floating-point number, which is - -- a bit complicated, use the standard library function - -- "readFloat" - (digits,rest) -> cont (TKinteger (sgn * (read digits))) rest - -lexName :: (a -> String -> b) -> (String -> a) -> String -> b -lexName cont cstr cs = cont (cstr name) rest - where (name,rest) = span isNameChar cs - -lexKeyword :: (Token -> [Char] -> Int -> ParseResult a) -> String -> Int - -> ParseResult a -lexKeyword cont cs = - case span isKeywordChar cs of - ("module",rest) -> cont TKmodule rest - ("data",rest) -> cont TKdata rest - ("newtype",rest) -> cont TKnewtype rest - ("forall",rest) -> cont TKforall rest - ("rec",rest) -> cont TKrec rest - ("let",rest) -> cont TKlet rest - ("in",rest) -> cont TKin rest - ("case",rest) -> cont TKcase rest - ("of",rest) -> cont TKof rest - ("cast",rest) -> cont TKcast rest - ("note",rest) -> cont TKnote rest - ("external",rest) -> cont TKexternal rest - ("local",rest) -> cont TKlocal rest - ("_",rest) -> cont TKwild rest - _ -> failP "invalid keyword" ('%':cs) - diff --git a/compiler/parser/Lexer.x b/compiler/parser/Lexer.x index 3d02393d17..fe3d6a5d2b 100644 --- a/compiler/parser/Lexer.x +++ b/compiler/parser/Lexer.x @@ -117,6 +117,7 @@ $small = [$ascsmall $unismall \_] $unigraphic = \x06 -- Trick Alex into handling Unicode. See alexGetChar. $graphic = [$small $large $symbol $digit $special $unigraphic \:\"\'] +$binit = 0-1 $octit = 0-7 $hexit = [$decdigit A-F a-f] $symchar = [$symbol \:] @@ -134,6 +135,7 @@ $docsym = [\| \^ \* \$] @consym = \: $symchar* @decimal = $decdigit+ +@binary = $binit+ @octal = $octit+ @hexadecimal = $hexit+ @exponent = [eE] [\-\+]? @decimal @@ -401,9 +403,12 @@ $tab+ { warn Opt_WarnTabs (text "Tab character") } <0> { -- Normal integral literals (:: Num a => a, from Integer) @decimal { tok_num positive 0 0 decimal } + 0[bB] @binary / { ifExtension binaryLiteralsEnabled } { tok_num positive 2 2 binary } 0[oO] @octal { tok_num positive 2 2 octal } 0[xX] @hexadecimal { tok_num positive 2 2 hexadecimal } @negative @decimal / { ifExtension negativeLiteralsEnabled } { tok_num negative 1 1 decimal } + @negative 0[bB] @binary / { ifExtension negativeLiteralsEnabled `alexAndPred` + ifExtension binaryLiteralsEnabled } { tok_num negative 3 3 binary } @negative 0[oO] @octal / { ifExtension negativeLiteralsEnabled } { tok_num negative 3 3 octal } @negative 0[xX] @hexadecimal / { ifExtension negativeLiteralsEnabled } { tok_num negative 3 3 hexadecimal } @@ -417,13 +422,19 @@ $tab+ { warn Opt_WarnTabs (text "Tab character") } -- It's simpler (and faster?) to give separate cases to the negatives, -- especially considering octal/hexadecimal prefixes. @decimal \# / { ifExtension magicHashEnabled } { tok_primint positive 0 1 decimal } + 0[bB] @binary \# / { ifExtension magicHashEnabled `alexAndPred` + ifExtension binaryLiteralsEnabled } { tok_primint positive 2 3 binary } 0[oO] @octal \# / { ifExtension magicHashEnabled } { tok_primint positive 2 3 octal } 0[xX] @hexadecimal \# / { ifExtension magicHashEnabled } { tok_primint positive 2 3 hexadecimal } @negative @decimal \# / { ifExtension magicHashEnabled } { tok_primint negative 1 2 decimal } + @negative 0[bB] @binary \# / { ifExtension magicHashEnabled `alexAndPred` + ifExtension binaryLiteralsEnabled } { tok_primint negative 3 4 binary } @negative 0[oO] @octal \# / { ifExtension magicHashEnabled } { tok_primint negative 3 4 octal } @negative 0[xX] @hexadecimal \# / { ifExtension magicHashEnabled } { tok_primint negative 3 4 hexadecimal } @decimal \# \# / { ifExtension magicHashEnabled } { tok_primword 0 2 decimal } + 0[bB] @binary \# \# / { ifExtension magicHashEnabled `alexAndPred` + ifExtension binaryLiteralsEnabled } { tok_primword 2 4 binary } 0[oO] @octal \# \# / { ifExtension magicHashEnabled } { tok_primword 2 4 octal } 0[xX] @hexadecimal \# \# / { ifExtension magicHashEnabled } { tok_primword 2 4 hexadecimal } @@ -516,6 +527,9 @@ data Token | ITvect_scalar_prag | ITnovect_prag | ITminimal_prag + | ITno_overlap_prag -- instance overlap mode + | IToverlap_prag -- instance overlap mode + | ITincoherent_prag -- instance overlap mode | ITctype | ITdotdot -- reserved symbols @@ -635,7 +649,7 @@ data Token -- facilitates using a keyword in two different extensions that can be -- activated independently) -- -reservedWordsFM :: UniqFM (Token, Int) +reservedWordsFM :: UniqFM (Token, ExtsBitmap) reservedWordsFM = listToUFM $ map (\(x, y, z) -> (mkFastString x, (y, z))) [( "_", ITunderscore, 0 ), @@ -664,34 +678,34 @@ reservedWordsFM = listToUFM $ ( "type", ITtype, 0 ), ( "where", ITwhere, 0 ), - ( "forall", ITforall, bit explicitForallBit .|. - bit inRulePragBit), - ( "mdo", ITmdo, bit recursiveDoBit), + ( "forall", ITforall, xbit ExplicitForallBit .|. + xbit InRulePragBit), + ( "mdo", ITmdo, xbit RecursiveDoBit), -- See Note [Lexing type pseudo-keywords] ( "family", ITfamily, 0 ), ( "role", ITrole, 0 ), - ( "pattern", ITpattern, bit patternSynonymsBit), - ( "group", ITgroup, bit transformComprehensionsBit), - ( "by", ITby, bit transformComprehensionsBit), - ( "using", ITusing, bit transformComprehensionsBit), - - ( "foreign", ITforeign, bit ffiBit), - ( "export", ITexport, bit ffiBit), - ( "label", ITlabel, bit ffiBit), - ( "dynamic", ITdynamic, bit ffiBit), - ( "safe", ITsafe, bit ffiBit .|. - bit safeHaskellBit), - ( "interruptible", ITinterruptible, bit interruptibleFfiBit), - ( "unsafe", ITunsafe, bit ffiBit), - ( "stdcall", ITstdcallconv, bit ffiBit), - ( "ccall", ITccallconv, bit ffiBit), - ( "capi", ITcapiconv, bit cApiFfiBit), - ( "prim", ITprimcallconv, bit ffiBit), - ( "javascript", ITjavascriptcallconv, bit ffiBit), - - ( "rec", ITrec, bit arrowsBit .|. - bit recursiveDoBit), - ( "proc", ITproc, bit arrowsBit) + ( "pattern", ITpattern, xbit PatternSynonymsBit), + ( "group", ITgroup, xbit TransformComprehensionsBit), + ( "by", ITby, xbit TransformComprehensionsBit), + ( "using", ITusing, xbit TransformComprehensionsBit), + + ( "foreign", ITforeign, xbit FfiBit), + ( "export", ITexport, xbit FfiBit), + ( "label", ITlabel, xbit FfiBit), + ( "dynamic", ITdynamic, xbit FfiBit), + ( "safe", ITsafe, xbit FfiBit .|. + xbit SafeHaskellBit), + ( "interruptible", ITinterruptible, xbit InterruptibleFfiBit), + ( "unsafe", ITunsafe, xbit FfiBit), + ( "stdcall", ITstdcallconv, xbit FfiBit), + ( "ccall", ITccallconv, xbit FfiBit), + ( "capi", ITcapiconv, xbit CApiFfiBit), + ( "prim", ITprimcallconv, xbit FfiBit), + ( "javascript", ITjavascriptcallconv, xbit FfiBit), + + ( "rec", ITrec, xbit ArrowsBit .|. + xbit RecursiveDoBit), + ( "proc", ITproc, xbit ArrowsBit) ] {----------------------------------- @@ -711,7 +725,7 @@ Also, note that these are included in the `varid` production in the parser -- a key detail to make all this work. -------------------------------------} -reservedSymsFM :: UniqFM (Token, Int -> Bool) +reservedSymsFM :: UniqFM (Token, ExtsBitmap -> Bool) reservedSymsFM = listToUFM $ map (\ (x,y,z) -> (mkFastString x,(y,z))) [ ("..", ITdotdot, always) @@ -822,11 +836,11 @@ nextCharIs buf p = not (atEnd buf) && p (currentChar buf) nextCharIsNot :: StringBuffer -> (Char -> Bool) -> Bool nextCharIsNot buf p = not (nextCharIs buf p) -notFollowedBy :: Char -> AlexAccPred Int +notFollowedBy :: Char -> AlexAccPred ExtsBitmap notFollowedBy char _ _ _ (AI _ buf) = nextCharIsNot buf (== char) -notFollowedBySymbol :: AlexAccPred Int +notFollowedBySymbol :: AlexAccPred ExtsBitmap notFollowedBySymbol _ _ _ (AI _ buf) = nextCharIsNot buf (`elem` "!#$%&*+./<=>?@\\^|-~") @@ -835,7 +849,7 @@ notFollowedBySymbol _ _ _ (AI _ buf) -- maximal munch, but not always, because the nested comment rule is -- valid in all states, but the doc-comment rules are only valid in -- the non-layout states. -isNormalComment :: AlexAccPred Int +isNormalComment :: AlexAccPred ExtsBitmap isNormalComment bits _ _ (AI _ buf) | haddockEnabled bits = notFollowedByDocOrPragma | otherwise = nextCharIsNot buf (== '#') @@ -849,10 +863,10 @@ afterOptionalSpace buf p then p (snd (nextChar buf)) else p buf -atEOL :: AlexAccPred Int +atEOL :: AlexAccPred ExtsBitmap atEOL _ _ _ (AI _ buf) = atEnd buf || currentChar buf == '\n' -ifExtension :: (Int -> Bool) -> AlexAccPred Int +ifExtension :: (ExtsBitmap -> Bool) -> AlexAccPred ExtsBitmap ifExtension pred bits _ _ _ = pred bits multiline_doc_comment :: Action @@ -954,12 +968,12 @@ withLexedDocType lexDocComment = do -- off again at the end of the pragma. rulePrag :: Action rulePrag span _buf _len = do - setExts (.|. bit inRulePragBit) + setExts (.|. xbit InRulePragBit) return (L span ITrules_prag) endPrag :: Action endPrag span _buf _len = do - setExts (.&. complement (bit inRulePragBit)) + setExts (.&. complement (xbit InRulePragBit)) return (L span ITclose_prag) -- docCommentEnd @@ -1112,6 +1126,7 @@ positive = id negative = negate decimal, octal, hexadecimal :: (Integer, Char -> Int) decimal = (10,octDecDigit) +binary = (2,octDecDigit) octal = (8,octDecDigit) hexadecimal = (16,hexDigit) @@ -1410,6 +1425,7 @@ lex_escape = do 'x' -> readNum is_hexdigit 16 hexDigit 'o' -> readNum is_octdigit 8 octDecDigit + 'b' -> readNum is_bindigit 2 octDecDigit x | is_decdigit x -> readNum2 is_decdigit 10 octDecDigit (octDecDigit x) c1 -> do @@ -1592,7 +1608,7 @@ data PState = PState { last_loc :: RealSrcSpan, -- pos of previous token last_len :: !Int, -- len of previous token loc :: RealSrcLoc, -- current loc (end of prev token + 1) - extsBitmap :: !Int, -- bitmap that determines permitted + extsBitmap :: !ExtsBitmap, -- bitmap that determines permitted -- extensions context :: [LayoutContext], lex_state :: [Int], @@ -1669,13 +1685,13 @@ withThisPackage f = do pkg <- liftM thisPackage getDynFlags return $ f pkg -extension :: (Int -> Bool) -> P Bool +extension :: (ExtsBitmap -> Bool) -> P Bool extension p = P $ \s -> POk s (p $! extsBitmap s) -getExts :: P Int +getExts :: P ExtsBitmap getExts = P $ \s -> POk s (extsBitmap s) -setExts :: (Int -> Int) -> P () +setExts :: (ExtsBitmap -> ExtsBitmap) -> P () setExts f = P $ \s -> POk s{ extsBitmap = f (extsBitmap s) } () setSrcLoc :: RealSrcLoc -> P () @@ -1855,130 +1871,110 @@ setAlrExpectingOCurly b = P $ \s -> POk (s {alr_expecting_ocurly = b}) () -- for reasons of efficiency, flags indicating language extensions (eg, -- -fglasgow-exts or -XParallelArrays) are represented by a bitmap --- stored in an unboxed Int - -ffiBit :: Int -ffiBit= 0 -interruptibleFfiBit :: Int -interruptibleFfiBit = 1 -cApiFfiBit :: Int -cApiFfiBit = 2 -parrBit :: Int -parrBit = 3 -arrowsBit :: Int -arrowsBit = 4 -thBit :: Int -thBit = 5 -ipBit :: Int -ipBit = 6 -explicitForallBit :: Int -explicitForallBit = 7 -- the 'forall' keyword and '.' symbol -bangPatBit :: Int -bangPatBit = 8 -- Tells the parser to understand bang-patterns - -- (doesn't affect the lexer) -patternSynonymsBit :: Int -patternSynonymsBit = 9 -- pattern synonyms -haddockBit :: Int -haddockBit = 10 -- Lex and parse Haddock comments -magicHashBit :: Int -magicHashBit = 11 -- "#" in both functions and operators -kindSigsBit :: Int -kindSigsBit = 12 -- Kind signatures on type variables -recursiveDoBit :: Int -recursiveDoBit = 13 -- mdo -unicodeSyntaxBit :: Int -unicodeSyntaxBit = 14 -- the forall symbol, arrow symbols, etc -unboxedTuplesBit :: Int -unboxedTuplesBit = 15 -- (# and #) -datatypeContextsBit :: Int -datatypeContextsBit = 16 -transformComprehensionsBit :: Int -transformComprehensionsBit = 17 -qqBit :: Int -qqBit = 18 -- enable quasiquoting -inRulePragBit :: Int -inRulePragBit = 19 -rawTokenStreamBit :: Int -rawTokenStreamBit = 20 -- producing a token stream with all comments included -sccProfilingOnBit :: Int -sccProfilingOnBit = 21 -hpcBit :: Int -hpcBit = 22 -alternativeLayoutRuleBit :: Int -alternativeLayoutRuleBit = 23 -relaxedLayoutBit :: Int -relaxedLayoutBit = 24 -nondecreasingIndentationBit :: Int -nondecreasingIndentationBit = 25 -safeHaskellBit :: Int -safeHaskellBit = 26 -traditionalRecordSyntaxBit :: Int -traditionalRecordSyntaxBit = 27 -typeLiteralsBit :: Int -typeLiteralsBit = 28 -explicitNamespacesBit :: Int -explicitNamespacesBit = 29 -lambdaCaseBit :: Int -lambdaCaseBit = 30 -negativeLiteralsBit :: Int -negativeLiteralsBit = 31 - - -always :: Int -> Bool +-- stored in an unboxed Word64 +type ExtsBitmap = Word64 + +xbit :: ExtBits -> ExtsBitmap +xbit = bit . fromEnum + +xtest :: ExtBits -> ExtsBitmap -> Bool +xtest ext xmap = testBit xmap (fromEnum ext) + +data ExtBits + = FfiBit + | InterruptibleFfiBit + | CApiFfiBit + | ParrBit + | ArrowsBit + | ThBit + | IpBit + | ExplicitForallBit -- the 'forall' keyword and '.' symbol + | BangPatBit -- Tells the parser to understand bang-patterns + -- (doesn't affect the lexer) + | PatternSynonymsBit -- pattern synonyms + | HaddockBit-- Lex and parse Haddock comments + | MagicHashBit -- "#" in both functions and operators + | KindSigsBit -- Kind signatures on type variables + | RecursiveDoBit -- mdo + | UnicodeSyntaxBit -- the forall symbol, arrow symbols, etc + | UnboxedTuplesBit -- (# and #) + | DatatypeContextsBit + | TransformComprehensionsBit + | QqBit -- enable quasiquoting + | InRulePragBit + | RawTokenStreamBit -- producing a token stream with all comments included + | SccProfilingOnBit + | HpcBit + | AlternativeLayoutRuleBit + | RelaxedLayoutBit + | NondecreasingIndentationBit + | SafeHaskellBit + | TraditionalRecordSyntaxBit + | TypeLiteralsBit + | ExplicitNamespacesBit + | LambdaCaseBit + | BinaryLiteralsBit + | NegativeLiteralsBit + deriving Enum + + +always :: ExtsBitmap -> Bool always _ = True -parrEnabled :: Int -> Bool -parrEnabled flags = testBit flags parrBit -arrowsEnabled :: Int -> Bool -arrowsEnabled flags = testBit flags arrowsBit -thEnabled :: Int -> Bool -thEnabled flags = testBit flags thBit -ipEnabled :: Int -> Bool -ipEnabled flags = testBit flags ipBit -explicitForallEnabled :: Int -> Bool -explicitForallEnabled flags = testBit flags explicitForallBit -bangPatEnabled :: Int -> Bool -bangPatEnabled flags = testBit flags bangPatBit -haddockEnabled :: Int -> Bool -haddockEnabled flags = testBit flags haddockBit -magicHashEnabled :: Int -> Bool -magicHashEnabled flags = testBit flags magicHashBit --- kindSigsEnabled :: Int -> Bool --- kindSigsEnabled flags = testBit flags kindSigsBit -unicodeSyntaxEnabled :: Int -> Bool -unicodeSyntaxEnabled flags = testBit flags unicodeSyntaxBit -unboxedTuplesEnabled :: Int -> Bool -unboxedTuplesEnabled flags = testBit flags unboxedTuplesBit -datatypeContextsEnabled :: Int -> Bool -datatypeContextsEnabled flags = testBit flags datatypeContextsBit -qqEnabled :: Int -> Bool -qqEnabled flags = testBit flags qqBit -inRulePrag :: Int -> Bool -inRulePrag flags = testBit flags inRulePragBit -rawTokenStreamEnabled :: Int -> Bool -rawTokenStreamEnabled flags = testBit flags rawTokenStreamBit -alternativeLayoutRule :: Int -> Bool -alternativeLayoutRule flags = testBit flags alternativeLayoutRuleBit -hpcEnabled :: Int -> Bool -hpcEnabled flags = testBit flags hpcBit -relaxedLayout :: Int -> Bool -relaxedLayout flags = testBit flags relaxedLayoutBit -nondecreasingIndentation :: Int -> Bool -nondecreasingIndentation flags = testBit flags nondecreasingIndentationBit -sccProfilingOn :: Int -> Bool -sccProfilingOn flags = testBit flags sccProfilingOnBit -traditionalRecordSyntaxEnabled :: Int -> Bool -traditionalRecordSyntaxEnabled flags = testBit flags traditionalRecordSyntaxBit -typeLiteralsEnabled :: Int -> Bool -typeLiteralsEnabled flags = testBit flags typeLiteralsBit - -explicitNamespacesEnabled :: Int -> Bool -explicitNamespacesEnabled flags = testBit flags explicitNamespacesBit -lambdaCaseEnabled :: Int -> Bool -lambdaCaseEnabled flags = testBit flags lambdaCaseBit -negativeLiteralsEnabled :: Int -> Bool -negativeLiteralsEnabled flags = testBit flags negativeLiteralsBit -patternSynonymsEnabled :: Int -> Bool -patternSynonymsEnabled flags = testBit flags patternSynonymsBit +parrEnabled :: ExtsBitmap -> Bool +parrEnabled = xtest ParrBit +arrowsEnabled :: ExtsBitmap -> Bool +arrowsEnabled = xtest ArrowsBit +thEnabled :: ExtsBitmap -> Bool +thEnabled = xtest ThBit +ipEnabled :: ExtsBitmap -> Bool +ipEnabled = xtest IpBit +explicitForallEnabled :: ExtsBitmap -> Bool +explicitForallEnabled = xtest ExplicitForallBit +bangPatEnabled :: ExtsBitmap -> Bool +bangPatEnabled = xtest BangPatBit +haddockEnabled :: ExtsBitmap -> Bool +haddockEnabled = xtest HaddockBit +magicHashEnabled :: ExtsBitmap -> Bool +magicHashEnabled = xtest MagicHashBit +-- kindSigsEnabled :: ExtsBitmap -> Bool +-- kindSigsEnabled = xtest KindSigsBit +unicodeSyntaxEnabled :: ExtsBitmap -> Bool +unicodeSyntaxEnabled = xtest UnicodeSyntaxBit +unboxedTuplesEnabled :: ExtsBitmap -> Bool +unboxedTuplesEnabled = xtest UnboxedTuplesBit +datatypeContextsEnabled :: ExtsBitmap -> Bool +datatypeContextsEnabled = xtest DatatypeContextsBit +qqEnabled :: ExtsBitmap -> Bool +qqEnabled = xtest QqBit +inRulePrag :: ExtsBitmap -> Bool +inRulePrag = xtest InRulePragBit +rawTokenStreamEnabled :: ExtsBitmap -> Bool +rawTokenStreamEnabled = xtest RawTokenStreamBit +alternativeLayoutRule :: ExtsBitmap -> Bool +alternativeLayoutRule = xtest AlternativeLayoutRuleBit +hpcEnabled :: ExtsBitmap -> Bool +hpcEnabled = xtest HpcBit +relaxedLayout :: ExtsBitmap -> Bool +relaxedLayout = xtest RelaxedLayoutBit +nondecreasingIndentation :: ExtsBitmap -> Bool +nondecreasingIndentation = xtest NondecreasingIndentationBit +sccProfilingOn :: ExtsBitmap -> Bool +sccProfilingOn = xtest SccProfilingOnBit +traditionalRecordSyntaxEnabled :: ExtsBitmap -> Bool +traditionalRecordSyntaxEnabled = xtest TraditionalRecordSyntaxBit +typeLiteralsEnabled :: ExtsBitmap -> Bool +typeLiteralsEnabled = xtest TypeLiteralsBit + +explicitNamespacesEnabled :: ExtsBitmap -> Bool +explicitNamespacesEnabled = xtest ExplicitNamespacesBit +lambdaCaseEnabled :: ExtsBitmap -> Bool +lambdaCaseEnabled = xtest LambdaCaseBit +binaryLiteralsEnabled :: ExtsBitmap -> Bool +binaryLiteralsEnabled = xtest BinaryLiteralsBit +negativeLiteralsEnabled :: ExtsBitmap -> Bool +negativeLiteralsEnabled = xtest NegativeLiteralsBit +patternSynonymsEnabled :: ExtsBitmap -> Bool +patternSynonymsEnabled = xtest PatternSynonymsBit -- PState for parsing options pragmas -- @@ -1999,7 +1995,7 @@ mkPState flags buf loc = last_loc = mkRealSrcSpan loc loc, last_len = 0, loc = loc, - extsBitmap = fromIntegral bitmap, + extsBitmap = bitmap, context = [], lex_state = [bol, 0], srcfiles = [], @@ -2011,41 +2007,42 @@ mkPState flags buf loc = alr_justClosedExplicitLetBlock = False } where - bitmap = ffiBit `setBitIf` xopt Opt_ForeignFunctionInterface flags - .|. interruptibleFfiBit `setBitIf` xopt Opt_InterruptibleFFI flags - .|. cApiFfiBit `setBitIf` xopt Opt_CApiFFI flags - .|. parrBit `setBitIf` xopt Opt_ParallelArrays flags - .|. arrowsBit `setBitIf` xopt Opt_Arrows flags - .|. thBit `setBitIf` xopt Opt_TemplateHaskell flags - .|. qqBit `setBitIf` xopt Opt_QuasiQuotes flags - .|. ipBit `setBitIf` xopt Opt_ImplicitParams flags - .|. explicitForallBit `setBitIf` xopt Opt_ExplicitForAll flags - .|. bangPatBit `setBitIf` xopt Opt_BangPatterns flags - .|. haddockBit `setBitIf` gopt Opt_Haddock flags - .|. magicHashBit `setBitIf` xopt Opt_MagicHash flags - .|. kindSigsBit `setBitIf` xopt Opt_KindSignatures flags - .|. recursiveDoBit `setBitIf` xopt Opt_RecursiveDo flags - .|. unicodeSyntaxBit `setBitIf` xopt Opt_UnicodeSyntax flags - .|. unboxedTuplesBit `setBitIf` xopt Opt_UnboxedTuples flags - .|. datatypeContextsBit `setBitIf` xopt Opt_DatatypeContexts flags - .|. transformComprehensionsBit `setBitIf` xopt Opt_TransformListComp flags - .|. transformComprehensionsBit `setBitIf` xopt Opt_MonadComprehensions flags - .|. rawTokenStreamBit `setBitIf` gopt Opt_KeepRawTokenStream flags - .|. hpcBit `setBitIf` gopt Opt_Hpc flags - .|. alternativeLayoutRuleBit `setBitIf` xopt Opt_AlternativeLayoutRule flags - .|. relaxedLayoutBit `setBitIf` xopt Opt_RelaxedLayout flags - .|. sccProfilingOnBit `setBitIf` gopt Opt_SccProfilingOn flags - .|. nondecreasingIndentationBit `setBitIf` xopt Opt_NondecreasingIndentation flags - .|. safeHaskellBit `setBitIf` safeImportsOn flags - .|. traditionalRecordSyntaxBit `setBitIf` xopt Opt_TraditionalRecordSyntax flags - .|. typeLiteralsBit `setBitIf` xopt Opt_DataKinds flags - .|. explicitNamespacesBit `setBitIf` xopt Opt_ExplicitNamespaces flags - .|. lambdaCaseBit `setBitIf` xopt Opt_LambdaCase flags - .|. negativeLiteralsBit `setBitIf` xopt Opt_NegativeLiterals flags - .|. patternSynonymsBit `setBitIf` xopt Opt_PatternSynonyms flags + bitmap = FfiBit `setBitIf` xopt Opt_ForeignFunctionInterface flags + .|. InterruptibleFfiBit `setBitIf` xopt Opt_InterruptibleFFI flags + .|. CApiFfiBit `setBitIf` xopt Opt_CApiFFI flags + .|. ParrBit `setBitIf` xopt Opt_ParallelArrays flags + .|. ArrowsBit `setBitIf` xopt Opt_Arrows flags + .|. ThBit `setBitIf` xopt Opt_TemplateHaskell flags + .|. QqBit `setBitIf` xopt Opt_QuasiQuotes flags + .|. IpBit `setBitIf` xopt Opt_ImplicitParams flags + .|. ExplicitForallBit `setBitIf` xopt Opt_ExplicitForAll flags + .|. BangPatBit `setBitIf` xopt Opt_BangPatterns flags + .|. HaddockBit `setBitIf` gopt Opt_Haddock flags + .|. MagicHashBit `setBitIf` xopt Opt_MagicHash flags + .|. KindSigsBit `setBitIf` xopt Opt_KindSignatures flags + .|. RecursiveDoBit `setBitIf` xopt Opt_RecursiveDo flags + .|. UnicodeSyntaxBit `setBitIf` xopt Opt_UnicodeSyntax flags + .|. UnboxedTuplesBit `setBitIf` xopt Opt_UnboxedTuples flags + .|. DatatypeContextsBit `setBitIf` xopt Opt_DatatypeContexts flags + .|. TransformComprehensionsBit `setBitIf` xopt Opt_TransformListComp flags + .|. TransformComprehensionsBit `setBitIf` xopt Opt_MonadComprehensions flags + .|. RawTokenStreamBit `setBitIf` gopt Opt_KeepRawTokenStream flags + .|. HpcBit `setBitIf` gopt Opt_Hpc flags + .|. AlternativeLayoutRuleBit `setBitIf` xopt Opt_AlternativeLayoutRule flags + .|. RelaxedLayoutBit `setBitIf` xopt Opt_RelaxedLayout flags + .|. SccProfilingOnBit `setBitIf` gopt Opt_SccProfilingOn flags + .|. NondecreasingIndentationBit `setBitIf` xopt Opt_NondecreasingIndentation flags + .|. SafeHaskellBit `setBitIf` safeImportsOn flags + .|. TraditionalRecordSyntaxBit `setBitIf` xopt Opt_TraditionalRecordSyntax flags + .|. TypeLiteralsBit `setBitIf` xopt Opt_DataKinds flags + .|. ExplicitNamespacesBit `setBitIf` xopt Opt_ExplicitNamespaces flags + .|. LambdaCaseBit `setBitIf` xopt Opt_LambdaCase flags + .|. BinaryLiteralsBit `setBitIf` xopt Opt_BinaryLiterals flags + .|. NegativeLiteralsBit `setBitIf` xopt Opt_NegativeLiterals flags + .|. PatternSynonymsBit `setBitIf` xopt Opt_PatternSynonyms flags -- - setBitIf :: Int -> Bool -> Int - b `setBitIf` cond | cond = bit b + setBitIf :: ExtBits -> Bool -> ExtsBitmap + b `setBitIf` cond | cond = xbit b | otherwise = 0 addWarning :: WarningFlag -> SrcSpan -> SDoc -> P () @@ -2434,6 +2431,9 @@ oneWordPrags = Map.fromList([("rules", rulePrag), ("vectorize", token ITvect_prag), ("novectorize", token ITnovect_prag), ("minimal", token ITminimal_prag), + ("no_overlap", token ITno_overlap_prag), + ("overlap", token IToverlap_prag), + ("incoherent", token ITincoherent_prag), ("ctype", token ITctype)]) twoWordPrags = Map.fromList([("inline conlike", token (ITinline_prag Inline ConLike)), @@ -2447,7 +2447,7 @@ dispatch_pragmas prags span buf len = case Map.lookup (clean_pragma (lexemeToStr Just found -> found span buf len Nothing -> lexError "unknown pragma" -known_pragma :: Map String Action -> AlexAccPred Int +known_pragma :: Map String Action -> AlexAccPred ExtsBitmap known_pragma prags _ (AI _ startbuf) _ (AI _ curbuf) = isKnown && nextCharIsNot curbuf pragmaNameChar where l = lexemeToString startbuf (byteDiff startbuf curbuf) diff --git a/compiler/parser/Parser.y.pp b/compiler/parser/Parser.y.pp index 4f4ec0b123..a3c68c3e59 100644 --- a/compiler/parser/Parser.y.pp +++ b/compiler/parser/Parser.y.pp @@ -269,6 +269,9 @@ incorrect. '{-# NOVECTORISE' { L _ ITnovect_prag } '{-# MINIMAL' { L _ ITminimal_prag } '{-# CTYPE' { L _ ITctype } + '{-# NO_OVERLAP' { L _ ITno_overlap_prag } + '{-# OVERLAP' { L _ IToverlap_prag } + '{-# INCOHERENT' { L _ ITincoherent_prag } '#-}' { L _ ITclose_prag } '..' { L _ ITdotdot } -- reserved symbols @@ -654,12 +657,13 @@ ty_decl :: { LTyClDecl RdrName } {% mkFamDecl (comb3 $1 $2 $4) DataFamily $3 (unLoc $4) } inst_decl :: { LInstDecl RdrName } - : 'instance' inst_type where_inst - { let (binds, sigs, _, ats, adts, _) = cvBindsAndSigs (unLoc $3) in - let cid = ClsInstDecl { cid_poly_ty = $2, cid_binds = binds + : 'instance' overlap_pragma inst_type where_inst + { let (binds, sigs, _, ats, adts, _) = cvBindsAndSigs (unLoc $4) in + let cid = ClsInstDecl { cid_poly_ty = $3, cid_binds = binds , cid_sigs = sigs, cid_tyfam_insts = ats + , cid_overlap_mode = $2 , cid_datafam_insts = adts } - in L (comb3 $1 $2 $3) (ClsInstD { cid_inst = cid }) } + in L (comb3 $1 $3 $4) (ClsInstD { cid_inst = cid }) } -- type instance declarations | 'type' 'instance' ty_fam_inst_eqn @@ -677,6 +681,13 @@ inst_decl :: { LInstDecl RdrName } {% mkDataFamInst (comb4 $1 $4 $6 $7) (unLoc $1) $3 $4 (unLoc $5) (unLoc $6) (unLoc $7) } +overlap_pragma :: { Maybe OverlapMode } + : '{-# OVERLAP' '#-}' { Just OverlapOk } + | '{-# INCOHERENT' '#-}' { Just Incoherent } + | '{-# NO_OVERLAP' '#-}' { Just NoOverlap } + | {- empty -} { Nothing } + + -- Closed type families where_type_family :: { Located (FamilyInfo RdrName) } @@ -783,7 +794,7 @@ capi_ctype : '{-# CTYPE' STRING STRING '#-}' { Just (CType (Just (Header (getSTR -- Glasgow extension: stand-alone deriving declarations stand_alone_deriving :: { LDerivDecl RdrName } - : 'deriving' 'instance' inst_type { LL (DerivDecl $3) } + : 'deriving' 'instance' overlap_pragma inst_type { LL (DerivDecl $4 $3) } ----------------------------------------------------------------------------- -- Role annotations diff --git a/compiler/parser/ParserCore.y b/compiler/parser/ParserCore.y deleted file mode 100644 index 4e7f48c6fc..0000000000 --- a/compiler/parser/ParserCore.y +++ /dev/null @@ -1,397 +0,0 @@ -{ -{-# LANGUAGE BangPatterns #-} -- required for versions of Happy before 1.18.6 -{-# OPTIONS -Wwarn -w #-} --- The above warning supression flag is a temporary kludge. --- While working on this module you are encouraged to remove it and fix --- any warnings in the module. See --- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#Warnings --- for details - -module ParserCore ( parseCore ) where - -import IfaceSyn -import ForeignCall -import RdrHsSyn -import HsSyn hiding (toHsType, toHsKind) -import RdrName -import OccName -import TypeRep ( TyThing(..) ) -import Type ( Kind, - liftedTypeKindTyCon, openTypeKindTyCon, unliftedTypeKindTyCon, - mkTyConApp - ) -import Kind( mkArrowKind ) -import Name( Name, nameOccName, nameModule, mkExternalName, wiredInNameTyThing_maybe ) -import Module -import ParserCoreUtils -import LexCore -import Literal -import SrcLoc -import PrelNames -import TysPrim -import TyCon ( TyCon, tyConName ) -import FastString -import Outputable -import Data.Char -import Unique - -#include "../HsVersions.h" - -} - -%name parseCore -%expect 0 -%tokentype { Token } - -%token - '%module' { TKmodule } - '%data' { TKdata } - '%newtype' { TKnewtype } - '%forall' { TKforall } - '%rec' { TKrec } - '%let' { TKlet } - '%in' { TKin } - '%case' { TKcase } - '%of' { TKof } - '%cast' { TKcast } - '%note' { TKnote } - '%external' { TKexternal } - '%local' { TKlocal } - '%_' { TKwild } - '(' { TKoparen } - ')' { TKcparen } - '{' { TKobrace } - '}' { TKcbrace } - '#' { TKhash} - '=' { TKeq } - ':' { TKcolon } - '::' { TKcoloncolon } - ':=:' { TKcoloneqcolon } - '*' { TKstar } - '->' { TKrarrow } - '\\' { TKlambda} - '@' { TKat } - '.' { TKdot } - '?' { TKquestion} - ';' { TKsemicolon } - NAME { TKname $$ } - CNAME { TKcname $$ } - INTEGER { TKinteger $$ } - RATIONAL { TKrational $$ } - STRING { TKstring $$ } - CHAR { TKchar $$ } - -%monad { P } { thenP } { returnP } -%lexer { lexer } { TKEOF } - -%% - -module :: { HsExtCore RdrName } - -- : '%module' modid tdefs vdefgs { HsExtCore $2 $3 $4 } - : '%module' modid tdefs vdefgs { HsExtCore $2 [] [] } - - -------------------------------------------------------------- --- Names: the trickiest bit in here - --- A name of the form A.B.C could be: --- module A.B.C --- dcon C in module A.B --- tcon C in module A.B -modid :: { Module } - : NAME ':' mparts { undefined } - -q_dc_name :: { Name } - : NAME ':' mparts { undefined } - -q_tc_name :: { Name } - : NAME ':' mparts { undefined } - -q_var_occ :: { Name } - : NAME ':' vparts { undefined } - -mparts :: { [String] } - : CNAME { [$1] } - | CNAME '.' mparts { $1:$3 } - -vparts :: { [String] } - : var_occ { [$1] } - | CNAME '.' vparts { $1:$3 } - -------------------------------------------------------------- --- Type and newtype declarations are in HsSyn syntax - -tdefs :: { [TyClDecl RdrName] } - : {- empty -} {[]} - | tdef tdefs {$1:$2} - -tdef :: { TyClDecl RdrName } - : '%data' q_tc_name tv_bndrs '=' '{' cons '}' ';' - { DataDecl { tcdLName = noLoc (ifaceExtRdrName $2) - , tcdTyVars = mkHsQTvs (map toHsTvBndr $3) - , tcdDataDefn = HsDataDefn { dd_ND = DataType, dd_ctxt = noLoc [] - , dd_kindSig = Nothing - , dd_cons = $6, dd_derivs = Nothing } } } - | '%newtype' q_tc_name tv_bndrs trep ';' - { let tc_rdr = ifaceExtRdrName $2 in - DataDecl { tcdLName = noLoc tc_rdr - , tcdTyVars = mkHsQTvs (map toHsTvBndr $3) - , tcdDataDefn = HsDataDefn { dd_ND = NewType, dd_ctxt = noLoc [] - , dd_kindSig = Nothing - , dd_cons = $4 (rdrNameOcc tc_rdr), dd_derivs = Nothing } } } - --- For a newtype we have to invent a fake data constructor name --- It doesn't matter what it is, because it won't be used -trep :: { OccName -> [LConDecl RdrName] } - : {- empty -} { (\ tc_occ -> []) } - | '=' ty { (\ tc_occ -> let { dc_name = mkRdrUnqual (setOccNameSpace dataName tc_occ) ; - con_info = PrefixCon [toHsType $2] } - in [noLoc $ mkSimpleConDecl (noLoc dc_name) [] - (noLoc []) con_info]) } - -cons :: { [LConDecl RdrName] } - : {- empty -} { [] } -- 20060420 Empty data types allowed. jds - | con { [$1] } - | con ';' cons { $1:$3 } - -con :: { LConDecl RdrName } - : d_pat_occ attv_bndrs hs_atys - { noLoc $ mkSimpleConDecl (noLoc (mkRdrUnqual $1)) $2 (noLoc []) (PrefixCon $3) } --- ToDo: parse record-style declarations - -attv_bndrs :: { [LHsTyVarBndr RdrName] } - : {- empty -} { [] } - | '@' tv_bndr attv_bndrs { toHsTvBndr $2 : $3 } - -hs_atys :: { [LHsType RdrName] } - : atys { map toHsType $1 } - - ---------------------------------------- --- Types ---------------------------------------- - -atys :: { [IfaceType] } - : {- empty -} { [] } - | aty atys { $1:$2 } - -aty :: { IfaceType } - : fs_var_occ { IfaceTyVar $1 } - | q_tc_name { IfaceTyConApp (IfaceTc $1) [] } - | '(' ty ')' { $2 } - -bty :: { IfaceType } - : fs_var_occ atys { foldl IfaceAppTy (IfaceTyVar $1) $2 } - | q_var_occ atys { undefined } - | q_tc_name atys { IfaceTyConApp (IfaceTc $1) $2 } - | '(' ty ')' { $2 } - -ty :: { IfaceType } - : bty { $1 } - | bty '->' ty { IfaceFunTy $1 $3 } - | '%forall' tv_bndrs '.' ty { foldr IfaceForAllTy $4 $2 } - ----------------------------------------------- --- Bindings are in Iface syntax - -vdefgs :: { [IfaceBinding] } - : {- empty -} { [] } - | let_bind ';' vdefgs { $1 : $3 } - -let_bind :: { IfaceBinding } - : '%rec' '{' vdefs1 '}' { IfaceRec $3 } -- Can be empty. Do we care? - | vdef { let (b,r) = $1 - in IfaceNonRec b r } - -vdefs1 :: { [(IfaceLetBndr, IfaceExpr)] } - : vdef { [$1] } - | vdef ';' vdefs1 { $1:$3 } - -vdef :: { (IfaceLetBndr, IfaceExpr) } - : fs_var_occ '::' ty '=' exp { (IfLetBndr $1 $3 NoInfo, $5) } - | '%local' vdef { $2 } - - -- NB: qd_occ includes data constructors, because - -- we allow data-constructor wrappers at top level - -- But we discard the module name, because it must be the - -- same as the module being compiled, and Iface syntax only - -- has OccNames in binding positions. Ah, but it has Names now! - ---------------------------------------- --- Binders -bndr :: { IfaceBndr } - : '@' tv_bndr { IfaceTvBndr $2 } - | id_bndr { IfaceIdBndr $1 } - -bndrs :: { [IfaceBndr] } - : bndr { [$1] } - | bndr bndrs { $1:$2 } - -id_bndr :: { IfaceIdBndr } - : '(' fs_var_occ '::' ty ')' { ($2,$4) } - -tv_bndr :: { IfaceTvBndr } - : fs_var_occ { ($1, ifaceLiftedTypeKind) } - | '(' fs_var_occ '::' akind ')' { ($2, $4) } - -tv_bndrs :: { [IfaceTvBndr] } - : {- empty -} { [] } - | tv_bndr tv_bndrs { $1:$2 } - -akind :: { IfaceKind } - : '*' { ifaceLiftedTypeKind } - | '#' { ifaceUnliftedTypeKind } - | '?' { ifaceOpenTypeKind } - | '(' kind ')' { $2 } - -kind :: { IfaceKind } - : akind { $1 } - | akind '->' kind { ifaceArrow $1 $3 } - ------------------------------------------ --- Expressions - -aexp :: { IfaceExpr } - : fs_var_occ { IfaceLcl $1 } - | q_var_occ { IfaceExt $1 } - | q_dc_name { IfaceExt $1 } - | lit { IfaceLit $1 } - | '(' exp ')' { $2 } - -fexp :: { IfaceExpr } - : fexp aexp { IfaceApp $1 $2 } - | fexp '@' aty { IfaceApp $1 (IfaceType $3) } - | aexp { $1 } - -exp :: { IfaceExpr } - : fexp { $1 } - | '\\' bndrs '->' exp { foldr IfaceLam $4 $2 } - | '%let' let_bind '%in' exp { IfaceLet $2 $4 } --- gaw 2004 - | '%case' '(' ty ')' aexp '%of' id_bndr - '{' alts1 '}' { IfaceCase $5 (fst $7) $9 } --- The following line is broken and is hard to fix. Not fixing now --- because this whole parser is bitrotten anyway. --- Richard Eisenberg, July 2013 --- | '%cast' aexp aty { IfaceCast $2 $3 } --- No InlineMe any more --- | '%note' STRING exp --- { case $2 of --- --"SCC" -> IfaceNote (IfaceSCC "scc") $3 --- "InlineMe" -> IfaceNote IfaceInlineMe $3 --- } - | '%external' STRING aty { IfaceFCall (ForeignCall.CCall - (CCallSpec (StaticTarget (mkFastString $2) Nothing True) - CCallConv PlaySafe)) - $3 } - -alts1 :: { [IfaceAlt] } - : alt { [$1] } - | alt ';' alts1 { $1:$3 } - -alt :: { IfaceAlt } - : q_dc_name bndrs '->' exp - { (IfaceDataAlt $1, map ifaceBndrName $2, $4) } - -- The external syntax currently includes the types of the - -- the args, but they aren't needed internally - -- Nor is the module qualifier - | q_dc_name '->' exp - { (IfaceDataAlt $1, [], $3) } - | lit '->' exp - { (IfaceLitAlt $1, [], $3) } - | '%_' '->' exp - { (IfaceDefault, [], $3) } - -lit :: { Literal } - : '(' INTEGER '::' aty ')' { convIntLit $2 $4 } - | '(' RATIONAL '::' aty ')' { convRatLit $2 $4 } - | '(' CHAR '::' aty ')' { MachChar $2 } - | '(' STRING '::' aty ')' { MachStr (fastStringToByteString (mkFastString $2)) } - -fs_var_occ :: { FastString } - : NAME { mkFastString $1 } - -var_occ :: { String } - : NAME { $1 } - - --- Data constructor in a pattern or data type declaration; use the dataName, --- because that's what we expect in Core case patterns -d_pat_occ :: { OccName } - : CNAME { mkOccName dataName $1 } - -{ - -ifaceKind kc = IfaceTyConApp kc [] - -ifaceBndrName (IfaceIdBndr (n,_)) = n -ifaceBndrName (IfaceTvBndr (n,_)) = n - -convIntLit :: Integer -> IfaceType -> Literal -convIntLit i (IfaceTyConApp tc []) - | tc `eqTc` intPrimTyCon = MachInt i - | tc `eqTc` wordPrimTyCon = MachWord i - | tc `eqTc` charPrimTyCon = MachChar (chr (fromInteger i)) - | tc `eqTc` addrPrimTyCon && i == 0 = MachNullAddr -convIntLit i aty - = pprPanic "Unknown integer literal type" (ppr aty) - -convRatLit :: Rational -> IfaceType -> Literal -convRatLit r (IfaceTyConApp tc []) - | tc `eqTc` floatPrimTyCon = MachFloat r - | tc `eqTc` doublePrimTyCon = MachDouble r -convRatLit i aty - = pprPanic "Unknown rational literal type" (ppr aty) - -eqTc :: IfaceTyCon -> TyCon -> Bool -- Ugh! -eqTc (IfaceTc name) tycon = name == tyConName tycon - --- Tiresomely, we have to generate both HsTypes (in type/class decls) --- and IfaceTypes (in Core expressions). So we parse them as IfaceTypes, --- and convert to HsTypes here. But the IfaceTypes we can see here --- are very limited (see the productions for 'ty'), so the translation --- isn't hard -toHsType :: IfaceType -> LHsType RdrName -toHsType (IfaceTyVar v) = noLoc $ HsTyVar (mkRdrUnqual (mkTyVarOccFS v)) -toHsType (IfaceAppTy t1 t2) = noLoc $ HsAppTy (toHsType t1) (toHsType t2) -toHsType (IfaceFunTy t1 t2) = noLoc $ HsFunTy (toHsType t1) (toHsType t2) -toHsType (IfaceTyConApp (IfaceTc tc) ts) = foldl mkHsAppTy (noLoc $ HsTyVar (ifaceExtRdrName tc)) (map toHsType ts) -toHsType (IfaceForAllTy tv t) = add_forall (toHsTvBndr tv) (toHsType t) - --- Only a limited form of kind will be encountered... hopefully -toHsKind :: IfaceKind -> LHsKind RdrName --- IA0_NOTE: Shouldn't we add kind variables? -toHsKind (IfaceFunTy ifK1 ifK2) = noLoc $ HsFunTy (toHsKind ifK1) (toHsKind ifK2) -toHsKind (IfaceTyConApp ifKc []) = noLoc $ HsTyVar (nameRdrName (tyConName (toKindTc ifKc))) -toHsKind other = pprPanic "toHsKind" (ppr other) - -toKindTc :: IfaceTyCon -> TyCon -toKindTc (IfaceTc n) | Just (ATyCon tc) <- wiredInNameTyThing_maybe n = tc -toKindTc other = pprPanic "toKindTc" (ppr other) - -ifaceTcType ifTc = IfaceTyConApp ifTc [] - -ifaceLiftedTypeKind = ifaceTcType (IfaceTc liftedTypeKindTyConName) -ifaceOpenTypeKind = ifaceTcType (IfaceTc openTypeKindTyConName) -ifaceUnliftedTypeKind = ifaceTcType (IfaceTc unliftedTypeKindTyConName) - -ifaceArrow ifT1 ifT2 = IfaceFunTy ifT1 ifT2 - -toHsTvBndr :: IfaceTvBndr -> LHsTyVarBndr RdrName -toHsTvBndr (tv,k) = noLoc $ KindedTyVar (mkRdrUnqual (mkTyVarOccFS tv)) bsig - where - bsig = toHsKind k - -ifaceExtRdrName :: Name -> RdrName -ifaceExtRdrName name = mkOrig (nameModule name) (nameOccName name) -ifaceExtRdrName other = pprPanic "ParserCore.ifaceExtRdrName" (ppr other) - -add_forall tv (L _ (HsForAllTy exp tvs cxt t)) - = noLoc $ HsForAllTy exp (mkHsQTvs (tv : hsQTvBndrs tvs)) cxt t -add_forall tv t - = noLoc $ HsForAllTy Explicit (mkHsQTvs [tv]) (noLoc []) t - -happyError :: P a -happyError s l = failP (show l ++ ": Parse error\n") (take 100 s) l -} - diff --git a/compiler/parser/ParserCoreUtils.hs b/compiler/parser/ParserCoreUtils.hs deleted file mode 100644 index 8f67d96239..0000000000 --- a/compiler/parser/ParserCoreUtils.hs +++ /dev/null @@ -1,77 +0,0 @@ -module ParserCoreUtils where - -import Exception -import System.IO - -data ParseResult a = OkP a | FailP String -type P a = String -> Int -> ParseResult a - -thenP :: P a -> (a -> P b) -> P b -m `thenP` k = \ s l -> - case m s l of - OkP a -> k a s l - FailP s -> FailP s - -returnP :: a -> P a -returnP m _ _ = OkP m - -failP :: String -> P a -failP s s' _ = FailP (s ++ ":" ++ s') - -getCoreModuleName :: FilePath -> IO String -getCoreModuleName fpath = - catchIO (do - h <- openFile fpath ReadMode - ls <- hGetContents h - let mo = findMod (words ls) - -- make sure we close up the file right away. - (length mo) `seq` return () - hClose h - return mo) - (\ _ -> return "Main") - where - findMod [] = "Main" - -- TODO: this should just return the module name, without the package name - findMod ("%module":m:_) = m - findMod (_:xs) = findMod xs - - -data Token = - TKmodule - | TKdata - | TKnewtype - | TKforall - | TKrec - | TKlet - | TKin - | TKcase - | TKof - | TKcast - | TKnote - | TKexternal - | TKlocal - | TKwild - | TKoparen - | TKcparen - | TKobrace - | TKcbrace - | TKhash - | TKeq - | TKcolon - | TKcoloncolon - | TKcoloneqcolon - | TKstar - | TKrarrow - | TKlambda - | TKat - | TKdot - | TKquestion - | TKsemicolon - | TKname String - | TKcname String - | TKinteger Integer - | TKrational Rational - | TKstring String - | TKchar Char - | TKEOF - diff --git a/compiler/parser/RdrHsSyn.lhs b/compiler/parser/RdrHsSyn.lhs index 03ec622223..93a98d068e 100644 --- a/compiler/parser/RdrHsSyn.lhs +++ b/compiler/parser/RdrHsSyn.lhs @@ -4,6 +4,8 @@ o% Functions over HsSyn specialised to RdrName. \begin{code} +{-# LANGUAGE CPP #-} + module RdrHsSyn ( mkHsOpApp, mkHsIntegral, mkHsFractional, mkHsIsString, @@ -32,6 +34,7 @@ module RdrHsSyn ( mkGadtDecl, -- [Located RdrName] -> LHsType RdrName -> ConDecl RdrName mkSimpleConDecl, mkDeprecatedGadtRecordDecl, + mkATDefault, -- Bunch of functions in the parser monad for -- checking and constructing values @@ -71,7 +74,7 @@ import TysWiredIn ( unitTyCon, unitDataCon ) import ForeignCall import OccName ( srcDataName, varName, isDataOcc, isTcOcc, occNameString ) -import PrelNames ( forall_tv_RDR ) +import PrelNames ( forall_tv_RDR, allNameStrings ) import DynFlags import SrcLoc import OrdList ( OrdList, fromOL ) @@ -122,16 +125,31 @@ mkClassDecl :: SrcSpan -> P (LTyClDecl RdrName) mkClassDecl loc (L _ (mcxt, tycl_hdr)) fds where_cls - = do { let (binds, sigs, ats, at_defs, _, docs) = cvBindsAndSigs (unLoc where_cls) + = do { let (binds, sigs, ats, at_insts, _, docs) = cvBindsAndSigs (unLoc where_cls) cxt = fromMaybe (noLoc []) mcxt ; (cls, tparams) <- checkTyClHdr tycl_hdr - ; tyvars <- checkTyVars (ptext (sLit "class")) whereDots - cls tparams -- Only type vars allowed + ; tyvars <- checkTyVarsP (ptext (sLit "class")) whereDots cls tparams + ; at_defs <- mapM (eitherToP . mkATDefault) at_insts ; return (L loc (ClassDecl { tcdCtxt = cxt, tcdLName = cls, tcdTyVars = tyvars, tcdFDs = unLoc fds, tcdSigs = sigs, tcdMeths = binds, tcdATs = ats, tcdATDefs = at_defs, tcdDocs = docs, tcdFVs = placeHolderNames })) } +mkATDefault :: LTyFamInstDecl RdrName + -> Either (SrcSpan, SDoc) (LTyFamDefltEqn RdrName) +-- Take a type-family instance declaration and turn it into +-- a type-family default equation for a class declaration +-- We parse things as the former and use this function to convert to the latter +-- +-- We use the Either monad because this also called +-- from Convert.hs +mkATDefault (L loc (TyFamInstDecl { tfid_eqn = L _ e })) + | TyFamEqn { tfe_tycon = tc, tfe_pats = pats, tfe_rhs = rhs } <- e + = do { tvs <- checkTyVars (ptext (sLit "default")) equalsDots tc (hswb_cts pats) + ; return (L loc (TyFamEqn { tfe_tycon = tc + , tfe_pats = tvs + , tfe_rhs = rhs })) } + mkTyData :: SrcSpan -> NewOrData -> Maybe CType @@ -142,7 +160,7 @@ mkTyData :: SrcSpan -> P (LTyClDecl RdrName) mkTyData loc new_or_data cType (L _ (mcxt, tycl_hdr)) ksig data_cons maybe_deriv = do { (tc, tparams) <- checkTyClHdr tycl_hdr - ; tyvars <- checkTyVars (ppr new_or_data) equalsDots tc tparams + ; tyvars <- checkTyVarsP (ppr new_or_data) equalsDots tc tparams ; defn <- mkDataDefn new_or_data cType mcxt ksig data_cons maybe_deriv ; return (L loc (DataDecl { tcdLName = tc, tcdTyVars = tyvars, tcdDataDefn = defn, @@ -170,7 +188,7 @@ mkTySynonym :: SrcSpan -> P (LTyClDecl RdrName) mkTySynonym loc lhs rhs = do { (tc, tparams) <- checkTyClHdr lhs - ; tyvars <- checkTyVars (ptext (sLit "type")) equalsDots tc tparams + ; tyvars <- checkTyVarsP (ptext (sLit "type")) equalsDots tc tparams ; return (L loc (SynDecl { tcdLName = tc, tcdTyVars = tyvars , tcdRhs = rhs, tcdFVs = placeHolderNames })) } @@ -179,9 +197,9 @@ mkTyFamInstEqn :: LHsType RdrName -> P (TyFamInstEqn RdrName) mkTyFamInstEqn lhs rhs = do { (tc, tparams) <- checkTyClHdr lhs - ; return (TyFamInstEqn { tfie_tycon = tc - , tfie_pats = mkHsWithBndrs tparams - , tfie_rhs = rhs }) } + ; return (TyFamEqn { tfe_tycon = tc + , tfe_pats = mkHsWithBndrs tparams + , tfe_rhs = rhs }) } mkDataFamInst :: SrcSpan -> NewOrData @@ -212,7 +230,7 @@ mkFamDecl :: SrcSpan -> P (LTyClDecl RdrName) mkFamDecl loc info lhs ksig = do { (tc, tparams) <- checkTyClHdr lhs - ; tyvars <- checkTyVars (ppr info) equals_or_where tc tparams + ; tyvars <- checkTyVarsP (ppr info) equals_or_where tc tparams ; return (L loc (FamDecl (FamilyDecl { fdInfo = info, fdLName = tc , fdTyVars = tyvars, fdKindSig = ksig }))) } where @@ -500,26 +518,42 @@ we can bring x,y into scope. So: * For RecCon we do not \begin{code} -checkTyVars :: SDoc -> SDoc -> Located RdrName -> [LHsType RdrName] -> P (LHsTyVarBndrs RdrName) +checkTyVarsP :: SDoc -> SDoc -> Located RdrName -> [LHsType RdrName] -> P (LHsTyVarBndrs RdrName) +-- Same as checkTyVars, but in the P monad +checkTyVarsP pp_what equals_or_where tc tparms + = eitherToP $ checkTyVars pp_what equals_or_where tc tparms + +eitherToP :: Either (SrcSpan, SDoc) a -> P a +-- Adapts the Either monad to the P monad +eitherToP (Left (loc, doc)) = parseErrorSDoc loc doc +eitherToP (Right thing) = return thing +checkTyVars :: SDoc -> SDoc -> Located RdrName -> [LHsType RdrName] + -> Either (SrcSpan, SDoc) (LHsTyVarBndrs RdrName) -- Check whether the given list of type parameters are all type variables --- (possibly with a kind signature). -checkTyVars pp_what equals_or_where tc tparms = do { tvs <- mapM chk tparms - ; return (mkHsQTvs tvs) } +-- (possibly with a kind signature) +-- We use the Either monad because it's also called (via mkATDefault) from +-- Convert.hs +checkTyVars pp_what equals_or_where tc tparms + = do { tvs <- mapM chk tparms + ; return (mkHsQTvs tvs) } where + -- Check that the name space is correct! chk (L l (HsKindSig (L _ (HsTyVar tv)) k)) | isRdrTyVar tv = return (L l (KindedTyVar tv k)) chk (L l (HsTyVar tv)) | isRdrTyVar tv = return (L l (UserTyVar tv)) - chk t@(L l _) - = parseErrorSDoc l $ - vcat [ ptext (sLit "Unexpected type") <+> quotes (ppr t) - , ptext (sLit "In the") <+> pp_what <+> ptext (sLit "declaration for") <+> quotes (ppr tc) - , vcat[ (ptext (sLit "A") <+> pp_what <+> ptext (sLit "declaration should have form")) - , nest 2 (pp_what <+> ppr tc <+> ptext (sLit "a b c") - <+> equals_or_where) ] ] + chk t@(L loc _) + = Left (loc, + vcat [ ptext (sLit "Unexpected type") <+> quotes (ppr t) + , ptext (sLit "In the") <+> pp_what <+> ptext (sLit "declaration for") <+> quotes (ppr tc) + , vcat[ (ptext (sLit "A") <+> pp_what <+> ptext (sLit "declaration should have form")) + , nest 2 (pp_what <+> ppr tc + <+> hsep (map text (takeList tparms allNameStrings)) + <+> equals_or_where) ] ]) whereDots, equalsDots :: SDoc +-- Second argument to checkTyVars whereDots = ptext (sLit "where ...") equalsDots = ptext (sLit "= ...") @@ -666,7 +700,7 @@ checkAPat msg loc e0 = do ExplicitTuple es b | all tupArgPresent es -> do ps <- mapM (checkLPat msg) [e | Present e <- es] - return (TuplePat ps b placeHolderType) + return (TuplePat ps b []) | otherwise -> parseErrorSDoc loc (text "Illegal tuple section in pattern:" $$ ppr e0) RecordCon c _ (HsRecFields fs dd) diff --git a/compiler/prelude/PrelInfo.lhs b/compiler/prelude/PrelInfo.lhs index 014e0e7483..829b5e3bf9 100644 --- a/compiler/prelude/PrelInfo.lhs +++ b/compiler/prelude/PrelInfo.lhs @@ -4,7 +4,8 @@ \section[PrelInfo]{The @PrelInfo@ interface to the compiler's prelude knowledge} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/prelude/PrelNames.lhs b/compiler/prelude/PrelNames.lhs index 1d54726f2f..01c5764fd3 100644 --- a/compiler/prelude/PrelNames.lhs +++ b/compiler/prelude/PrelNames.lhs @@ -102,6 +102,8 @@ This is accomplished through a combination of mechanisms: See also Note [Built-in syntax and the OrigNameCache] \begin{code} +{-# LANGUAGE CPP #-} + module PrelNames ( Unique, Uniquable(..), hasKey, -- Re-exported for convenience @@ -128,6 +130,19 @@ import FastString %************************************************************************ %* * + allNameStrings +%* * +%************************************************************************ + +\begin{code} +allNameStrings :: [String] +-- Infinite list of a,b,c...z, aa, ab, ac, ... etc +allNameStrings = [ c:cs | cs <- "" : allNameStrings, c <- ['a'..'z'] ] +\end{code} + + +%************************************************************************ +%* * \subsection{Local Names} %* * %************************************************************************ @@ -817,20 +832,20 @@ inlineIdName = varQual gHC_MAGIC (fsLit "inline") inlineIdKey -- Base classes (Eq, Ord, Functor) fmapName, eqClassName, eqName, ordClassName, geName, functorClassName :: Name -eqClassName = clsQual gHC_CLASSES (fsLit "Eq") eqClassKey -eqName = methName gHC_CLASSES (fsLit "==") eqClassOpKey -ordClassName = clsQual gHC_CLASSES (fsLit "Ord") ordClassKey -geName = methName gHC_CLASSES (fsLit ">=") geClassOpKey -functorClassName = clsQual gHC_BASE (fsLit "Functor") functorClassKey -fmapName = methName gHC_BASE (fsLit "fmap") fmapClassOpKey +eqClassName = clsQual gHC_CLASSES (fsLit "Eq") eqClassKey +eqName = varQual gHC_CLASSES (fsLit "==") eqClassOpKey +ordClassName = clsQual gHC_CLASSES (fsLit "Ord") ordClassKey +geName = varQual gHC_CLASSES (fsLit ">=") geClassOpKey +functorClassName = clsQual gHC_BASE (fsLit "Functor") functorClassKey +fmapName = varQual gHC_BASE (fsLit "fmap") fmapClassOpKey -- Class Monad monadClassName, thenMName, bindMName, returnMName, failMName :: Name -monadClassName = clsQual gHC_BASE (fsLit "Monad") monadClassKey -thenMName = methName gHC_BASE (fsLit ">>") thenMClassOpKey -bindMName = methName gHC_BASE (fsLit ">>=") bindMClassOpKey -returnMName = methName gHC_BASE (fsLit "return") returnMClassOpKey -failMName = methName gHC_BASE (fsLit "fail") failMClassOpKey +monadClassName = clsQual gHC_BASE (fsLit "Monad") monadClassKey +thenMName = varQual gHC_BASE (fsLit ">>") thenMClassOpKey +bindMName = varQual gHC_BASE (fsLit ">>=") bindMClassOpKey +returnMName = varQual gHC_BASE (fsLit "return") returnMClassOpKey +failMName = varQual gHC_BASE (fsLit "fail") failMClassOpKey -- Classes (Applicative, Foldable, Traversable) applicativeClassName, foldableClassName, traversableClassName :: Name @@ -843,10 +858,10 @@ traversableClassName = clsQual dATA_TRAVERSABLE (fsLit "Traversable") trave -- AMP additions joinMName, apAName, pureAName, alternativeClassName :: Name -joinMName = methName mONAD (fsLit "join") joinMIdKey -apAName = methName cONTROL_APPLICATIVE (fsLit "<*>") apAClassOpKey -pureAName = methName cONTROL_APPLICATIVE (fsLit "pure") pureAClassOpKey -alternativeClassName = clsQual cONTROL_APPLICATIVE (fsLit "Alternative") alternativeClassKey +joinMName = varQual mONAD (fsLit "join") joinMIdKey +apAName = varQual cONTROL_APPLICATIVE (fsLit "<*>") apAClassOpKey +pureAName = varQual cONTROL_APPLICATIVE (fsLit "pure") pureAClassOpKey +alternativeClassName = clsQual cONTROL_APPLICATIVE (fsLit "Alternative") alternativeClassKey joinMIdKey, apAClassOpKey, pureAClassOpKey, alternativeClassKey :: Unique joinMIdKey = mkPreludeMiscIdUnique 750 @@ -864,7 +879,7 @@ fromStringName, otherwiseIdName, foldrName, buildName, augmentName, mapName, appendName, assertName, breakpointName, breakpointCondName, breakpointAutoName, opaqueTyConName :: Name -fromStringName = methName dATA_STRING (fsLit "fromString") fromStringClassOpKey +fromStringName = varQual dATA_STRING (fsLit "fromString") fromStringClassOpKey otherwiseIdName = varQual gHC_BASE (fsLit "otherwise") otherwiseIdKey foldrName = varQual gHC_BASE (fsLit "foldr") foldrIdKey buildName = varQual gHC_BASE (fsLit "build") buildIdKey @@ -875,7 +890,7 @@ assertName = varQual gHC_BASE (fsLit "assert") assertIdKey breakpointName = varQual gHC_BASE (fsLit "breakpoint") breakpointIdKey breakpointCondName= varQual gHC_BASE (fsLit "breakpointCond") breakpointCondIdKey breakpointAutoName= varQual gHC_BASE (fsLit "breakpointAuto") breakpointAutoIdKey -opaqueTyConName = tcQual gHC_BASE (fsLit "Opaque") opaqueTyConKey +opaqueTyConName = tcQual gHC_BASE (fsLit "Opaque") opaqueTyConKey breakpointJumpName :: Name breakpointJumpName @@ -903,10 +918,10 @@ sndName = varQual dATA_TUPLE (fsLit "snd") sndIdKey -- Module GHC.Num numClassName, fromIntegerName, minusName, negateName :: Name -numClassName = clsQual gHC_NUM (fsLit "Num") numClassKey -fromIntegerName = methName gHC_NUM (fsLit "fromInteger") fromIntegerClassOpKey -minusName = methName gHC_NUM (fsLit "-") minusClassOpKey -negateName = methName gHC_NUM (fsLit "negate") negateClassOpKey +numClassName = clsQual gHC_NUM (fsLit "Num") numClassKey +fromIntegerName = varQual gHC_NUM (fsLit "fromInteger") fromIntegerClassOpKey +minusName = varQual gHC_NUM (fsLit "-") minusClassOpKey +negateName = varQual gHC_NUM (fsLit "negate") negateClassOpKey integerTyConName, mkIntegerName, integerToWord64Name, integerToInt64Name, @@ -973,23 +988,23 @@ rationalTyConName, ratioTyConName, ratioDataConName, realClassName, integralClassName, realFracClassName, fractionalClassName, fromRationalName, toIntegerName, toRationalName, fromIntegralName, realToFracName :: Name -rationalTyConName = tcQual gHC_REAL (fsLit "Rational") rationalTyConKey -ratioTyConName = tcQual gHC_REAL (fsLit "Ratio") ratioTyConKey -ratioDataConName = conName gHC_REAL (fsLit ":%") ratioDataConKey -realClassName = clsQual gHC_REAL (fsLit "Real") realClassKey -integralClassName = clsQual gHC_REAL (fsLit "Integral") integralClassKey -realFracClassName = clsQual gHC_REAL (fsLit "RealFrac") realFracClassKey -fractionalClassName = clsQual gHC_REAL (fsLit "Fractional") fractionalClassKey -fromRationalName = methName gHC_REAL (fsLit "fromRational") fromRationalClassOpKey -toIntegerName = methName gHC_REAL (fsLit "toInteger") toIntegerClassOpKey -toRationalName = methName gHC_REAL (fsLit "toRational") toRationalClassOpKey -fromIntegralName = varQual gHC_REAL (fsLit "fromIntegral") fromIntegralIdKey -realToFracName = varQual gHC_REAL (fsLit "realToFrac") realToFracIdKey +rationalTyConName = tcQual gHC_REAL (fsLit "Rational") rationalTyConKey +ratioTyConName = tcQual gHC_REAL (fsLit "Ratio") ratioTyConKey +ratioDataConName = conName gHC_REAL (fsLit ":%") ratioDataConKey +realClassName = clsQual gHC_REAL (fsLit "Real") realClassKey +integralClassName = clsQual gHC_REAL (fsLit "Integral") integralClassKey +realFracClassName = clsQual gHC_REAL (fsLit "RealFrac") realFracClassKey +fractionalClassName = clsQual gHC_REAL (fsLit "Fractional") fractionalClassKey +fromRationalName = varQual gHC_REAL (fsLit "fromRational") fromRationalClassOpKey +toIntegerName = varQual gHC_REAL (fsLit "toInteger") toIntegerClassOpKey +toRationalName = varQual gHC_REAL (fsLit "toRational") toRationalClassOpKey +fromIntegralName = varQual gHC_REAL (fsLit "fromIntegral")fromIntegralIdKey +realToFracName = varQual gHC_REAL (fsLit "realToFrac") realToFracIdKey -- PrelFloat classes floatingClassName, realFloatClassName :: Name -floatingClassName = clsQual gHC_FLOAT (fsLit "Floating") floatingClassKey -realFloatClassName = clsQual gHC_FLOAT (fsLit "RealFloat") realFloatClassKey +floatingClassName = clsQual gHC_FLOAT (fsLit "Floating") floatingClassKey +realFloatClassName = clsQual gHC_FLOAT (fsLit "RealFloat") realFloatClassKey -- other GHC.Float functions rationalToFloatName, rationalToDoubleName :: Name @@ -1005,7 +1020,7 @@ typeableClassName, oldTypeableClassName, oldTypeable1ClassName, oldTypeable2ClassName, oldTypeable3ClassName, oldTypeable4ClassName, oldTypeable5ClassName, oldTypeable6ClassName, oldTypeable7ClassName :: Name -typeableClassName = clsQual tYPEABLE_INTERNAL (fsLit "Typeable") typeableClassKey +typeableClassName = clsQual tYPEABLE_INTERNAL (fsLit "Typeable") typeableClassKey oldTypeableClassName = clsQual oLDTYPEABLE_INTERNAL (fsLit "Typeable") oldTypeableClassKey oldTypeable1ClassName = clsQual oLDTYPEABLE_INTERNAL (fsLit "Typeable1") oldTypeable1ClassKey oldTypeable2ClassName = clsQual oLDTYPEABLE_INTERNAL (fsLit "Typeable2") oldTypeable2ClassKey @@ -1031,33 +1046,33 @@ assertErrorName = varQual gHC_IO_Exception (fsLit "assertError") assertErrorId -- Enum module (Enum, Bounded) enumClassName, enumFromName, enumFromToName, enumFromThenName, enumFromThenToName, boundedClassName :: Name -enumClassName = clsQual gHC_ENUM (fsLit "Enum") enumClassKey -enumFromName = methName gHC_ENUM (fsLit "enumFrom") enumFromClassOpKey -enumFromToName = methName gHC_ENUM (fsLit "enumFromTo") enumFromToClassOpKey -enumFromThenName = methName gHC_ENUM (fsLit "enumFromThen") enumFromThenClassOpKey -enumFromThenToName = methName gHC_ENUM (fsLit "enumFromThenTo") enumFromThenToClassOpKey -boundedClassName = clsQual gHC_ENUM (fsLit "Bounded") boundedClassKey +enumClassName = clsQual gHC_ENUM (fsLit "Enum") enumClassKey +enumFromName = varQual gHC_ENUM (fsLit "enumFrom") enumFromClassOpKey +enumFromToName = varQual gHC_ENUM (fsLit "enumFromTo") enumFromToClassOpKey +enumFromThenName = varQual gHC_ENUM (fsLit "enumFromThen") enumFromThenClassOpKey +enumFromThenToName = varQual gHC_ENUM (fsLit "enumFromThenTo") enumFromThenToClassOpKey +boundedClassName = clsQual gHC_ENUM (fsLit "Bounded") boundedClassKey -- List functions concatName, filterName, zipName :: Name concatName = varQual gHC_LIST (fsLit "concat") concatIdKey filterName = varQual gHC_LIST (fsLit "filter") filterIdKey -zipName = varQual gHC_LIST (fsLit "zip") zipIdKey +zipName = varQual gHC_LIST (fsLit "zip") zipIdKey -- Overloaded lists isListClassName, fromListName, fromListNName, toListName :: Name -isListClassName = clsQual gHC_EXTS (fsLit "IsList") isListClassKey -fromListName = methName gHC_EXTS (fsLit "fromList") fromListClassOpKey -fromListNName = methName gHC_EXTS (fsLit "fromListN") fromListNClassOpKey -toListName = methName gHC_EXTS (fsLit "toList") toListClassOpKey +isListClassName = clsQual gHC_EXTS (fsLit "IsList") isListClassKey +fromListName = varQual gHC_EXTS (fsLit "fromList") fromListClassOpKey +fromListNName = varQual gHC_EXTS (fsLit "fromListN") fromListNClassOpKey +toListName = varQual gHC_EXTS (fsLit "toList") toListClassOpKey -- Class Show showClassName :: Name -showClassName = clsQual gHC_SHOW (fsLit "Show") showClassKey +showClassName = clsQual gHC_SHOW (fsLit "Show") showClassKey -- Class Read readClassName :: Name -readClassName = clsQual gHC_READ (fsLit "Read") readClassKey +readClassName = clsQual gHC_READ (fsLit "Read") readClassKey -- Classes Generic and Generic1, Datatype, Constructor and Selector genClassName, gen1ClassName, datatypeClassName, constructorClassName, @@ -1065,24 +1080,24 @@ genClassName, gen1ClassName, datatypeClassName, constructorClassName, genClassName = clsQual gHC_GENERICS (fsLit "Generic") genClassKey gen1ClassName = clsQual gHC_GENERICS (fsLit "Generic1") gen1ClassKey -datatypeClassName = clsQual gHC_GENERICS (fsLit "Datatype") datatypeClassKey +datatypeClassName = clsQual gHC_GENERICS (fsLit "Datatype") datatypeClassKey constructorClassName = clsQual gHC_GENERICS (fsLit "Constructor") constructorClassKey -selectorClassName = clsQual gHC_GENERICS (fsLit "Selector") selectorClassKey +selectorClassName = clsQual gHC_GENERICS (fsLit "Selector") selectorClassKey -- GHCi things ghciIoClassName, ghciStepIoMName :: Name ghciIoClassName = clsQual gHC_GHCI (fsLit "GHCiSandboxIO") ghciIoClassKey -ghciStepIoMName = methName gHC_GHCI (fsLit "ghciStepIO") ghciStepIoMClassOpKey +ghciStepIoMName = varQual gHC_GHCI (fsLit "ghciStepIO") ghciStepIoMClassOpKey -- IO things ioTyConName, ioDataConName, thenIOName, bindIOName, returnIOName, failIOName :: Name -ioTyConName = tcQual gHC_TYPES (fsLit "IO") ioTyConKey -ioDataConName = conName gHC_TYPES (fsLit "IO") ioDataConKey -thenIOName = varQual gHC_BASE (fsLit "thenIO") thenIOIdKey -bindIOName = varQual gHC_BASE (fsLit "bindIO") bindIOIdKey -returnIOName = varQual gHC_BASE (fsLit "returnIO") returnIOIdKey -failIOName = varQual gHC_IO (fsLit "failIO") failIOIdKey +ioTyConName = tcQual gHC_TYPES (fsLit "IO") ioTyConKey +ioDataConName = conName gHC_TYPES (fsLit "IO") ioDataConKey +thenIOName = varQual gHC_BASE (fsLit "thenIO") thenIOIdKey +bindIOName = varQual gHC_BASE (fsLit "bindIO") bindIOIdKey +returnIOName = varQual gHC_BASE (fsLit "returnIO") returnIOIdKey +failIOName = varQual gHC_IO (fsLit "failIO") failIOIdKey -- IO things printName :: Name @@ -1090,7 +1105,7 @@ printName = varQual sYSTEM_IO (fsLit "print") printIdKey -- Int, Word, and Addr things int8TyConName, int16TyConName, int32TyConName, int64TyConName :: Name -int8TyConName = tcQual gHC_INT (fsLit "Int8") int8TyConKey +int8TyConName = tcQual gHC_INT (fsLit "Int8") int8TyConKey int16TyConName = tcQual gHC_INT (fsLit "Int16") int16TyConKey int32TyConName = tcQual gHC_INT (fsLit "Int32") int32TyConKey int64TyConName = tcQual gHC_INT (fsLit "Int64") int64TyConKey @@ -1104,12 +1119,12 @@ word64TyConName = tcQual gHC_WORD (fsLit "Word64") word64TyConKey -- PrelPtr module ptrTyConName, funPtrTyConName :: Name -ptrTyConName = tcQual gHC_PTR (fsLit "Ptr") ptrTyConKey +ptrTyConName = tcQual gHC_PTR (fsLit "Ptr") ptrTyConKey funPtrTyConName = tcQual gHC_PTR (fsLit "FunPtr") funPtrTyConKey -- Foreign objects and weak pointers stablePtrTyConName, newStablePtrName :: Name -stablePtrTyConName = tcQual gHC_STABLE (fsLit "StablePtr") stablePtrTyConKey +stablePtrTyConName = tcQual gHC_STABLE (fsLit "StablePtr") stablePtrTyConKey newStablePtrName = varQual gHC_STABLE (fsLit "newStablePtr") newStablePtrIdKey -- PrelST module @@ -1119,21 +1134,21 @@ runSTRepName = varQual gHC_ST (fsLit "runSTRep") runSTRepIdKey -- Recursive-do notation monadFixClassName, mfixName :: Name monadFixClassName = clsQual mONAD_FIX (fsLit "MonadFix") monadFixClassKey -mfixName = methName mONAD_FIX (fsLit "mfix") mfixIdKey +mfixName = varQual mONAD_FIX (fsLit "mfix") mfixIdKey -- Arrow notation arrAName, composeAName, firstAName, appAName, choiceAName, loopAName :: Name -arrAName = varQual aRROW (fsLit "arr") arrAIdKey +arrAName = varQual aRROW (fsLit "arr") arrAIdKey composeAName = varQual gHC_DESUGAR (fsLit ">>>") composeAIdKey -firstAName = varQual aRROW (fsLit "first") firstAIdKey -appAName = varQual aRROW (fsLit "app") appAIdKey -choiceAName = varQual aRROW (fsLit "|||") choiceAIdKey -loopAName = varQual aRROW (fsLit "loop") loopAIdKey +firstAName = varQual aRROW (fsLit "first") firstAIdKey +appAName = varQual aRROW (fsLit "app") appAIdKey +choiceAName = varQual aRROW (fsLit "|||") choiceAIdKey +loopAName = varQual aRROW (fsLit "loop") loopAIdKey -- Monad comprehensions guardMName, liftMName, mzipName :: Name -guardMName = varQual mONAD (fsLit "guard") guardMIdKey -liftMName = varQual mONAD (fsLit "liftM") liftMIdKey +guardMName = varQual mONAD (fsLit "guard") guardMIdKey +liftMName = varQual mONAD (fsLit "liftM") liftMIdKey mzipName = varQual mONAD_ZIP (fsLit "mzip") mzipIdKey @@ -1144,9 +1159,9 @@ toAnnotationWrapperName = varQual gHC_DESUGAR (fsLit "toAnnotationWrapper") toAn -- Other classes, needed for type defaulting monadPlusClassName, randomClassName, randomGenClassName, isStringClassName :: Name -monadPlusClassName = clsQual mONAD (fsLit "MonadPlus") monadPlusClassKey -randomClassName = clsQual rANDOM (fsLit "Random") randomClassKey -randomGenClassName = clsQual rANDOM (fsLit "RandomGen") randomGenClassKey +monadPlusClassName = clsQual mONAD (fsLit "MonadPlus") monadPlusClassKey +randomClassName = clsQual rANDOM (fsLit "Random") randomClassKey +randomGenClassName = clsQual rANDOM (fsLit "RandomGen") randomGenClassKey isStringClassName = clsQual dATA_STRING (fsLit "IsString") isStringClassKey -- Type-level naturals @@ -1202,10 +1217,6 @@ mk_known_key_name space modu str unique conName :: Module -> FastString -> Unique -> Name conName modu occ unique = mkExternalName unique modu (mkOccNameFS dataName occ) noSrcSpan - -methName :: Module -> FastString -> Unique -> Name -methName modu occ unique - = mkExternalName unique modu (mkVarOccFS occ) noSrcSpan \end{code} %************************************************************************ diff --git a/compiler/prelude/PrelRules.lhs b/compiler/prelude/PrelRules.lhs index 786780654e..d2e648f382 100644 --- a/compiler/prelude/PrelRules.lhs +++ b/compiler/prelude/PrelRules.lhs @@ -12,8 +12,8 @@ ToDo: (i1 + i2) only if it results in a valid Float. \begin{code} -{-# LANGUAGE RankNTypes #-} -{-# OPTIONS -optc-DNON_POSIX_SOURCE #-} +{-# LANGUAGE CPP, RankNTypes #-} +{-# OPTIONS_GHC -optc-DNON_POSIX_SOURCE #-} module PrelRules ( primOpRules, builtinRules ) where diff --git a/compiler/prelude/PrimOp.lhs b/compiler/prelude/PrimOp.lhs index 12f71c2230..4155a541ba 100644 --- a/compiler/prelude/PrimOp.lhs +++ b/compiler/prelude/PrimOp.lhs @@ -4,6 +4,8 @@ \section[PrimOp]{Primitive operations (machine-level)} \begin{code} +{-# LANGUAGE CPP #-} + module PrimOp ( PrimOp(..), PrimOpVecCat(..), allThePrimOps, primOpType, primOpSig, diff --git a/compiler/prelude/TysPrim.lhs b/compiler/prelude/TysPrim.lhs index 789d121519..de151fd92f 100644 --- a/compiler/prelude/TysPrim.lhs +++ b/compiler/prelude/TysPrim.lhs @@ -6,7 +6,8 @@ \section[TysPrim]{Wired-in knowledge about primitive types} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -158,7 +159,15 @@ mkPrimTc fs unique tycon = mkWiredInName gHC_PRIM (mkTcOccFS fs) unique (ATyCon tycon) -- Relevant TyCon - UserSyntax -- None are built-in syntax + UserSyntax + +mkBuiltInPrimTc :: FastString -> Unique -> TyCon -> Name +mkBuiltInPrimTc fs unique tycon + = mkWiredInName gHC_PRIM (mkTcOccFS fs) + unique + (ATyCon tycon) -- Relevant TyCon + BuiltInSyntax + charPrimTyConName, intPrimTyConName, int32PrimTyConName, int64PrimTyConName, wordPrimTyConName, word32PrimTyConName, word64PrimTyConName, addrPrimTyConName, floatPrimTyConName, doublePrimTyConName, statePrimTyConName, proxyPrimTyConName, realWorldTyConName, arrayPrimTyConName, arrayArrayPrimTyConName, smallArrayPrimTyConName, byteArrayPrimTyConName, mutableArrayPrimTyConName, mutableByteArrayPrimTyConName, mutableArrayArrayPrimTyConName, smallMutableArrayPrimTyConName, mutVarPrimTyConName, mVarPrimTyConName, tVarPrimTyConName, stablePtrPrimTyConName, stableNamePrimTyConName, bcoPrimTyConName, weakPrimTyConName, threadIdPrimTyConName, eqPrimTyConName, eqReprPrimTyConName, voidPrimTyConName :: Name charPrimTyConName = mkPrimTc (fsLit "Char#") charPrimTyConKey charPrimTyCon @@ -175,7 +184,7 @@ statePrimTyConName = mkPrimTc (fsLit "State#") statePrimTyConKey stat voidPrimTyConName = mkPrimTc (fsLit "Void#") voidPrimTyConKey voidPrimTyCon proxyPrimTyConName = mkPrimTc (fsLit "Proxy#") proxyPrimTyConKey proxyPrimTyCon eqPrimTyConName = mkPrimTc (fsLit "~#") eqPrimTyConKey eqPrimTyCon -eqReprPrimTyConName = mkPrimTc (fsLit "~R#") eqReprPrimTyConKey eqReprPrimTyCon +eqReprPrimTyConName = mkBuiltInPrimTc (fsLit "~R#") eqReprPrimTyConKey eqReprPrimTyCon realWorldTyConName = mkPrimTc (fsLit "RealWorld") realWorldTyConKey realWorldTyCon arrayPrimTyConName = mkPrimTc (fsLit "Array#") arrayPrimTyConKey arrayPrimTyCon byteArrayPrimTyConName = mkPrimTc (fsLit "ByteArray#") byteArrayPrimTyConKey byteArrayPrimTyCon @@ -700,7 +709,7 @@ threadIdPrimTyCon = pcPrimTyCon0 threadIdPrimTyConName PtrRep Note [Any types] ~~~~~~~~~~~~~~~~ -The type constructor Any of kind forall k. k -> k has these properties: +The type constructor Any of kind forall k. k has these properties: * It is defined in module GHC.Prim, and exported so that it is available to users. For this reason it's treated like any other @@ -713,7 +722,7 @@ The type constructor Any of kind forall k. k -> k has these properties: g :: ty ~ (Fst ty, Snd ty) If Any was a *data* type, then we'd get inconsistency because 'ty' could be (Any '(k1,k2)) and then we'd have an equality with Any on - one side and '(,) on the other + one side and '(,) on the other. See also #9097. * It is lifted, and hence represented by a pointer @@ -770,20 +779,12 @@ anyTy :: Type anyTy = mkTyConTy anyTyCon anyTyCon :: TyCon -anyTyCon = mkLiftedPrimTyCon anyTyConName kind [Nominal] PtrRep - where kind = ForAllTy kKiVar (mkTyVarTy kKiVar) - -{- Can't do this yet without messing up kind proxies --- RAE: I think you can now. -anyTyCon :: TyCon -anyTyCon = mkSynTyCon anyTyConName kind [kKiVar] +anyTyCon = mkSynTyCon anyTyConName kind [kKiVar] [Nominal] syn_rhs NoParentTyCon where kind = ForAllTy kKiVar (mkTyVarTy kKiVar) - syn_rhs = SynFamilyTyCon { synf_open = False, synf_injective = True } - -- NB Closed, injective --} + syn_rhs = AbstractClosedSynFamilyTyCon anyTypeOfKind :: Kind -> Type anyTypeOfKind kind = TyConApp anyTyCon [kind] diff --git a/compiler/prelude/TysWiredIn.lhs b/compiler/prelude/TysWiredIn.lhs index dc4c775e3a..4586b90cb2 100644 --- a/compiler/prelude/TysWiredIn.lhs +++ b/compiler/prelude/TysWiredIn.lhs @@ -4,6 +4,8 @@ \section[TysWiredIn]{Wired-in knowledge about {\em non-primitive} types} \begin{code} +{-# LANGUAGE CPP #-} + -- | This module is about types that can be defined in Haskell, but which -- must be wired into the compiler nonetheless. C.f module TysPrim module TysWiredIn ( diff --git a/compiler/prelude/primops.txt.pp b/compiler/prelude/primops.txt.pp index 10dd19d4bb..4faa585246 100644 --- a/compiler/prelude/primops.txt.pp +++ b/compiler/prelude/primops.txt.pp @@ -843,8 +843,22 @@ primop CasArrayOp "casArray#" GenPrimOp section "Small Arrays" {Operations on {\tt SmallArray\#}. A {\tt SmallArray\#} works - just like an {\tt Array\#}, except that its implementation is - optimized for small arrays (i.e. no more than 128 elements.)} + just like an {\tt Array\#}, but with different space use and + performance characteristics (that are often useful with small + arrays). The {\tt SmallArray\#} and {\tt SmallMutableArray#} + lack a `card table'. The purpose of a card table is to avoid + having to scan every element of the array on each GC by + keeping track of which elements have changed since the last GC + and only scanning those that have changed. So the consequence + of there being no card table is that the representation is + somewhat smaller and the writes are somewhat faster (because + the card table does not need to be updated). The disadvantage + of course is that for a {\tt SmallMutableArray#} the whole + array has to be scanned on each GC. Thus it is best suited for + use cases where the mutable array is not long lived, e.g. + where a mutable array is initialised quickly and then frozen + to become an immutable {\tt SmallArray\#}. + } ------------------------------------------------------------------------ @@ -1082,34 +1096,42 @@ primop IndexByteArrayOp_StablePtr "indexStablePtrArray#" GenPrimOp primop IndexByteArrayOp_Int8 "indexInt8Array#" GenPrimOp ByteArray# -> Int# -> Int# + {Read 8-bit integer; offset in bytes.} with can_fail = True primop IndexByteArrayOp_Int16 "indexInt16Array#" GenPrimOp ByteArray# -> Int# -> Int# + {Read 16-bit integer; offset in 16-bit words.} with can_fail = True primop IndexByteArrayOp_Int32 "indexInt32Array#" GenPrimOp ByteArray# -> Int# -> INT32 + {Read 32-bit integer; offset in 32-bit words.} with can_fail = True primop IndexByteArrayOp_Int64 "indexInt64Array#" GenPrimOp ByteArray# -> Int# -> INT64 + {Read 64-bit integer; offset in 64-bit words.} with can_fail = True primop IndexByteArrayOp_Word8 "indexWord8Array#" GenPrimOp ByteArray# -> Int# -> Word# + {Read 8-bit word; offset in bytes.} with can_fail = True primop IndexByteArrayOp_Word16 "indexWord16Array#" GenPrimOp ByteArray# -> Int# -> Word# + {Read 16-bit word; offset in 16-bit words.} with can_fail = True primop IndexByteArrayOp_Word32 "indexWord32Array#" GenPrimOp ByteArray# -> Int# -> WORD32 + {Read 32-bit word; offset in 32-bit words.} with can_fail = True primop IndexByteArrayOp_Word64 "indexWord64Array#" GenPrimOp ByteArray# -> Int# -> WORD64 + {Read 64-bit word; offset in 64-bit words.} with can_fail = True primop ReadByteArrayOp_Char "readCharArray#" GenPrimOp @@ -1126,11 +1148,13 @@ primop ReadByteArrayOp_WideChar "readWideCharArray#" GenPrimOp primop ReadByteArrayOp_Int "readIntArray#" GenPrimOp MutableByteArray# s -> Int# -> State# s -> (# State# s, Int# #) + {Read intger; offset in words.} with has_side_effects = True can_fail = True primop ReadByteArrayOp_Word "readWordArray#" GenPrimOp MutableByteArray# s -> Int# -> State# s -> (# State# s, Word# #) + {Read word; offset in words.} with has_side_effects = True can_fail = True @@ -1339,19 +1363,79 @@ primop SetByteArrayOp "setByteArray#" GenPrimOp code_size = { primOpCodeSizeForeignCall + 4 } can_fail = True +-- Atomic operations + +primop AtomicReadByteArrayOp_Int "atomicReadIntArray#" GenPrimOp + MutableByteArray# s -> Int# -> State# s -> (# State# s, Int# #) + {Given an array and an offset in Int units, read an element. The + index is assumed to be in bounds. Implies a full memory barrier.} + with has_side_effects = True + can_fail = True + +primop AtomicWriteByteArrayOp_Int "atomicWriteIntArray#" GenPrimOp + MutableByteArray# s -> Int# -> Int# -> State# s -> State# s + {Given an array and an offset in Int units, write an element. The + index is assumed to be in bounds. Implies a full memory barrier.} + with has_side_effects = True + can_fail = True + primop CasByteArrayOp_Int "casIntArray#" GenPrimOp MutableByteArray# s -> Int# -> Int# -> Int# -> State# s -> (# State# s, Int# #) - {Machine-level atomic compare and swap on a word within a ByteArray.} - with - out_of_line = True - has_side_effects = True + {Given an array, an offset in Int units, the expected old value, and + the new value, perform an atomic compare and swap i.e. write the new + value if the current value matches the provided old value. Returns + the value of the element before the operation. Implies a full memory + barrier.} + with has_side_effects = True + can_fail = True primop FetchAddByteArrayOp_Int "fetchAddIntArray#" GenPrimOp MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s, Int# #) - {Machine-level word-sized fetch-and-add within a ByteArray.} - with - out_of_line = True - has_side_effects = True + {Given an array, and offset in Int units, and a value to add, + atomically add the value to the element. Returns the value of the + element before the operation. Implies a full memory barrier.} + with has_side_effects = True + can_fail = True + +primop FetchSubByteArrayOp_Int "fetchSubIntArray#" GenPrimOp + MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s, Int# #) + {Given an array, and offset in Int units, and a value to subtract, + atomically substract the value to the element. Returns the value of + the element before the operation. Implies a full memory barrier.} + with has_side_effects = True + can_fail = True + +primop FetchAndByteArrayOp_Int "fetchAndIntArray#" GenPrimOp + MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s, Int# #) + {Given an array, and offset in Int units, and a value to AND, + atomically AND the value to the element. Returns the value of the + element before the operation. Implies a full memory barrier.} + with has_side_effects = True + can_fail = True + +primop FetchNandByteArrayOp_Int "fetchNandIntArray#" GenPrimOp + MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s, Int# #) + {Given an array, and offset in Int units, and a value to NAND, + atomically NAND the value to the element. Returns the value of the + element before the operation. Implies a full memory barrier.} + with has_side_effects = True + can_fail = True + +primop FetchOrByteArrayOp_Int "fetchOrIntArray#" GenPrimOp + MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s, Int# #) + {Given an array, and offset in Int units, and a value to OR, + atomically OR the value to the element. Returns the value of the + element before the operation. Implies a full memory barrier.} + with has_side_effects = True + can_fail = True + +primop FetchXorByteArrayOp_Int "fetchXorIntArray#" GenPrimOp + MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s, Int# #) + {Given an array, and offset in Int units, and a value to XOR, + atomically XOR the value to the element. Returns the value of the + element before the operation. Implies a full memory barrier.} + with has_side_effects = True + can_fail = True ------------------------------------------------------------------------ @@ -2413,7 +2497,7 @@ pseudoop "seq" { Evaluates its first argument to head normal form, and then returns its second argument as the result. } -primtype Any k +primtype Any { The type constructor {\tt Any} is type to which you can unsafely coerce any lifted type, and back. @@ -2438,8 +2522,11 @@ primtype Any k {\tt length (Any *) ([] (Any *))} - Note that {\tt Any} is kind polymorphic, and takes a kind {\tt k} as its - first argument. The kind of {\tt Any} is thus {\tt forall k. k -> k}.} + Above, we print kinds explicitly, as if with + {\tt -fprint-explicit-kinds}. + + Note that {\tt Any} is kind polymorphic; its kind is thus + {\tt forall k. k}.} primtype AnyK { The kind {\tt AnyK} is the kind level counterpart to {\tt Any}. In a diff --git a/compiler/profiling/CostCentre.lhs b/compiler/profiling/CostCentre.lhs index fffd6462b2..4a7a063897 100644 --- a/compiler/profiling/CostCentre.lhs +++ b/compiler/profiling/CostCentre.lhs @@ -1,5 +1,5 @@ \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/profiling/SCCfinal.lhs b/compiler/profiling/SCCfinal.lhs index fdcf7447eb..4a6da2417e 100644 --- a/compiler/profiling/SCCfinal.lhs +++ b/compiler/profiling/SCCfinal.lhs @@ -2,6 +2,8 @@ % (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 % \begin{code} +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- Modify and collect code generation for final STG program diff --git a/compiler/rename/RnBinds.lhs b/compiler/rename/RnBinds.lhs index 7251492ccf..e65d3173d6 100644 --- a/compiler/rename/RnBinds.lhs +++ b/compiler/rename/RnBinds.lhs @@ -9,7 +9,7 @@ type-synonym declarations; those cannot be done at this stage because they may be affected by renaming (which isn't fully worked out yet). \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -476,8 +476,9 @@ rnBind _ bind@(PatBind { pat_lhs = pat bndrs = collectPatBinders pat bind' = bind { pat_rhs = grhss', bind_fvs = fvs' } is_wild_pat = case pat of - L _ (WildPat {}) -> True - _ -> False + L _ (WildPat {}) -> True + L _ (BangPat (L _ (WildPat {}))) -> True -- #9127 + _ -> False -- Warn if the pattern binds no variables, except for the -- entirely-explicit idiom _ = rhs diff --git a/compiler/rename/RnEnv.lhs b/compiler/rename/RnEnv.lhs index 178f722d99..f333a239a1 100644 --- a/compiler/rename/RnEnv.lhs +++ b/compiler/rename/RnEnv.lhs @@ -4,6 +4,8 @@ \section[RnEnv]{Environment manipulation for the renamer monad} \begin{code} +{-# LANGUAGE CPP #-} + module RnEnv ( newTopSrcBinder, lookupLocatedTopBndrRn, lookupTopBndrRn, @@ -38,10 +40,7 @@ module RnEnv ( warnUnusedMatches, warnUnusedTopBinds, warnUnusedLocalBinds, dataTcOccs, kindSigErr, perhapsForallMsg, - HsDocContext(..), docOfHsDocContext, - - -- FsEnv - FastStringEnv, emptyFsEnv, lookupFsEnv, extendFsEnv, mkFsEnv + HsDocContext(..), docOfHsDocContext ) where #include "HsVersions.h" @@ -59,7 +58,6 @@ import NameSet import NameEnv import Avail import Module -import UniqFM import ConLike import DataCon ( dataConFieldLabels, dataConTyCon ) import TyCon ( isTupleTyCon, tyConArity ) @@ -270,22 +268,29 @@ lookupExactOcc name ; return name } - (gre:_) -> return (gre_name gre) } + [gre] -> return (gre_name gre) + (gre:_) -> do {addErr dup_nm_err + ; return (gre_name gre) + } -- We can get more than one GRE here, if there are multiple - -- bindings for the same name; but there will already be a - -- reported error for the duplicate. (If we add the error - -- rather than stopping when we encounter it.) - -- So all we need do here is not crash. - -- Example is Trac #8932: + -- bindings for the same name. Sometimes they are caught later + -- by findLocalDupsRdrEnv, like in this example (Trac #8932): -- $( [d| foo :: a->a; foo x = x |]) -- foo = True - -- Here the 'foo' in the splice turns into an Exact Name + -- But when the names are totally identical, we panic (Trac #7241): + -- $(newName "Foo" >>= \o -> return [DataD [] o [] [RecC o []] [''Show]]) + -- So, let's emit an error here, even if it will lead to duplication in some cases. + } where exact_nm_err = hang (ptext (sLit "The exact Name") <+> quotes (ppr name) <+> ptext (sLit "is not in scope")) 2 (vcat [ ptext (sLit "Probable cause: you used a unique Template Haskell name (NameU), ") , ptext (sLit "perhaps via newName, but did not bind it") , ptext (sLit "If that's it, then -ddump-splices might be useful") ]) + dup_nm_err = hang (ptext (sLit "Duplicate exact Name") <+> quotes (ppr $ nameOccName name)) + 2 (vcat [ ptext (sLit "Probable cause: you used a unique Template Haskell name (NameU), ") + , ptext (sLit "perhaps via newName, but bound it multiple times") + , ptext (sLit "If that's it, then -ddump-splices might be useful") ]) ----------------------------------------------- lookupInstDeclBndr :: Name -> SDoc -> RdrName -> RnM Name @@ -1080,20 +1085,6 @@ deprecation declarations, and lookup of names in GHCi. \begin{code} -------------------------------- -type FastStringEnv a = UniqFM a -- Keyed by FastString - - -emptyFsEnv :: FastStringEnv a -lookupFsEnv :: FastStringEnv a -> FastString -> Maybe a -extendFsEnv :: FastStringEnv a -> FastString -> a -> FastStringEnv a -mkFsEnv :: [(FastString,a)] -> FastStringEnv a - -emptyFsEnv = emptyUFM -lookupFsEnv = lookupUFM -extendFsEnv = addToUFM -mkFsEnv = listToUFM - --------------------------------- type MiniFixityEnv = FastStringEnv (Located Fixity) -- Mini fixity env for the names we're about -- to bind, in a single binding group @@ -1461,7 +1452,7 @@ unknownNameSuggestErr where_look tried_rdr_name all_possibilities = [ (showPpr dflags r, (r, Left loc)) | (r,loc) <- local_possibilities local_env ] - ++ [ (showPpr dflags r, rp) | (r,rp) <- global_possibilities global_env ] + ++ [ (showPpr dflags r, rp) | (r, rp) <- global_possibilities global_env ] suggest = fuzzyLookup (showPpr dflags tried_rdr_name) all_possibilities perhaps = ptext (sLit "Perhaps you meant") @@ -1473,19 +1464,24 @@ unknownNameSuggestErr where_look tried_rdr_name ; return extra_err } where pp_item :: (RdrName, HowInScope) -> SDoc - pp_item (rdr, Left loc) = quotes (ppr rdr) <+> loc' -- Locally defined + pp_item (rdr, Left loc) = pp_ns rdr <+> quotes (ppr rdr) <+> loc' -- Locally defined where loc' = case loc of UnhelpfulSpan l -> parens (ppr l) RealSrcSpan l -> parens (ptext (sLit "line") <+> int (srcSpanStartLine l)) - pp_item (rdr, Right is) = quotes (ppr rdr) <+> -- Imported + pp_item (rdr, Right is) = pp_ns rdr <+> quotes (ppr rdr) <+> -- Imported parens (ptext (sLit "imported from") <+> ppr (is_mod is)) + pp_ns :: RdrName -> SDoc + pp_ns rdr | ns /= tried_ns = pprNameSpace ns + | otherwise = empty + where ns = rdrNameSpace rdr + tried_occ = rdrNameOcc tried_rdr_name tried_is_sym = isSymOcc tried_occ tried_ns = occNameSpace tried_occ tried_is_qual = isQual tried_rdr_name - correct_name_space occ = occNameSpace occ == tried_ns + correct_name_space occ = nameSpacesRelated (occNameSpace occ) tried_ns && isSymOcc occ == tried_is_sym -- Treat operator and non-operators as non-matching -- This heuristic avoids things like diff --git a/compiler/rename/RnExpr.lhs b/compiler/rename/RnExpr.lhs index 01e8a4492d..d680292a25 100644 --- a/compiler/rename/RnExpr.lhs +++ b/compiler/rename/RnExpr.lhs @@ -10,6 +10,8 @@ general, all of these functions return a renamed thing, and a set of free variables. \begin{code} +{-# LANGUAGE CPP, ScopedTypeVariables #-} + module RnExpr ( rnLExpr, rnExpr, rnStmts ) where @@ -45,16 +47,6 @@ import Control.Monad import TysWiredIn ( nilDataConName ) \end{code} - -\begin{code} --- XXX -thenM :: Monad a => a b -> (b -> a c) -> a c -thenM = (>>=) - -thenM_ :: Monad a => a b -> a c -> a c -thenM_ = (>>) -\end{code} - %************************************************************************ %* * \subsubsection{Expressions} @@ -66,16 +58,13 @@ rnExprs :: [LHsExpr RdrName] -> RnM ([LHsExpr Name], FreeVars) rnExprs ls = rnExprs' ls emptyUniqSet where rnExprs' [] acc = return ([], acc) - rnExprs' (expr:exprs) acc - = rnLExpr expr `thenM` \ (expr', fvExpr) -> - + rnExprs' (expr:exprs) acc = + do { (expr', fvExpr) <- rnLExpr expr -- Now we do a "seq" on the free vars because typically it's small -- or empty, especially in very long lists of constants - let - acc' = acc `plusFV` fvExpr - in - acc' `seq` rnExprs' exprs acc' `thenM` \ (exprs', fvExprs) -> - return (expr':exprs', fvExprs) + ; let acc' = acc `plusFV` fvExpr + ; (exprs', fvExprs) <- acc' `seq` rnExprs' exprs acc' + ; return (expr':exprs', fvExprs) } \end{code} Variables. We look up the variable and return the resulting name. @@ -120,27 +109,25 @@ rnExpr (HsIPVar v) = return (HsIPVar v, emptyFVs) rnExpr (HsLit lit@(HsString s)) - = do { - opt_OverloadedStrings <- xoptM Opt_OverloadedStrings + = do { opt_OverloadedStrings <- xoptM Opt_OverloadedStrings ; if opt_OverloadedStrings then rnExpr (HsOverLit (mkHsIsString s placeHolderType)) - else -- Same as below - rnLit lit `thenM_` - return (HsLit lit, emptyFVs) - } + else do { + ; rnLit lit + ; return (HsLit lit, emptyFVs) } } rnExpr (HsLit lit) - = rnLit lit `thenM_` - return (HsLit lit, emptyFVs) + = do { rnLit lit + ; return (HsLit lit, emptyFVs) } rnExpr (HsOverLit lit) - = rnOverLit lit `thenM` \ (lit', fvs) -> - return (HsOverLit lit', fvs) + = do { (lit', fvs) <- rnOverLit lit + ; return (HsOverLit lit', fvs) } rnExpr (HsApp fun arg) - = rnLExpr fun `thenM` \ (fun',fvFun) -> - rnLExpr arg `thenM` \ (arg',fvArg) -> - return (HsApp fun' arg', fvFun `plusFV` fvArg) + = do { (fun',fvFun) <- rnLExpr fun + ; (arg',fvArg) <- rnLExpr arg + ; return (HsApp fun' arg', fvFun `plusFV` fvArg) } rnExpr (OpApp e1 (L op_loc (HsVar op_rdr)) _ e2) = do { (e1', fv_e1) <- rnLExpr e1 @@ -163,10 +150,10 @@ rnExpr (OpApp _ other_op _ _) , ptext (sLit "(Probably resulting from a Template Haskell splice)") ]) rnExpr (NegApp e _) - = rnLExpr e `thenM` \ (e', fv_e) -> - lookupSyntaxName negateName `thenM` \ (neg_name, fv_neg) -> - mkNegAppRn e' neg_name `thenM` \ final_e -> - return (final_e, fv_e `plusFV` fv_neg) + = do { (e', fv_e) <- rnLExpr e + ; (neg_name, fv_neg) <- lookupSyntaxName negateName + ; final_e <- mkNegAppRn e' neg_name + ; return (final_e, fv_e `plusFV` fv_neg) } ------------------------------------------ -- Template Haskell extensions @@ -178,10 +165,10 @@ rnExpr (HsSpliceE is_typed splice) = rnSpliceExpr is_typed splice rnExpr (HsQuasiQuoteE qq) - = runQuasiQuoteExpr qq `thenM` \ lexpr' -> - -- Wrap the result of the quasi-quoter in parens so that we don't - -- lose the outermost location set by runQuasiQuote (#7918) - rnExpr (HsPar lexpr') + = do { lexpr' <- runQuasiQuoteExpr qq + -- Wrap the result of the quasi-quoter in parens so that we don't + -- lose the outermost location set by runQuasiQuote (#7918) + ; rnExpr (HsPar lexpr') } --------------------------------------------- -- Sections @@ -205,33 +192,33 @@ rnExpr expr@(SectionR {}) --------------------------------------------- rnExpr (HsCoreAnn ann expr) - = rnLExpr expr `thenM` \ (expr', fvs_expr) -> - return (HsCoreAnn ann expr', fvs_expr) + = do { (expr', fvs_expr) <- rnLExpr expr + ; return (HsCoreAnn ann expr', fvs_expr) } rnExpr (HsSCC lbl expr) - = rnLExpr expr `thenM` \ (expr', fvs_expr) -> - return (HsSCC lbl expr', fvs_expr) + = do { (expr', fvs_expr) <- rnLExpr expr + ; return (HsSCC lbl expr', fvs_expr) } rnExpr (HsTickPragma info expr) - = rnLExpr expr `thenM` \ (expr', fvs_expr) -> - return (HsTickPragma info expr', fvs_expr) + = do { (expr', fvs_expr) <- rnLExpr expr + ; return (HsTickPragma info expr', fvs_expr) } rnExpr (HsLam matches) - = rnMatchGroup LambdaExpr rnLExpr matches `thenM` \ (matches', fvMatch) -> - return (HsLam matches', fvMatch) + = do { (matches', fvMatch) <- rnMatchGroup LambdaExpr rnLExpr matches + ; return (HsLam matches', fvMatch) } rnExpr (HsLamCase arg matches) - = rnMatchGroup CaseAlt rnLExpr matches `thenM` \ (matches', fvs_ms) -> - return (HsLamCase arg matches', fvs_ms) + = do { (matches', fvs_ms) <- rnMatchGroup CaseAlt rnLExpr matches + ; return (HsLamCase arg matches', fvs_ms) } rnExpr (HsCase expr matches) - = rnLExpr expr `thenM` \ (new_expr, e_fvs) -> - rnMatchGroup CaseAlt rnLExpr matches `thenM` \ (new_matches, ms_fvs) -> - return (HsCase new_expr new_matches, e_fvs `plusFV` ms_fvs) + = do { (new_expr, e_fvs) <- rnLExpr expr + ; (new_matches, ms_fvs) <- rnMatchGroup CaseAlt rnLExpr matches + ; return (HsCase new_expr new_matches, e_fvs `plusFV` ms_fvs) } rnExpr (HsLet binds expr) - = rnLocalBindsAndThen binds $ \ binds' -> - rnLExpr expr `thenM` \ (expr',fvExpr) -> - return (HsLet binds' expr', fvExpr) + = rnLocalBindsAndThen binds $ \binds' -> do + { (expr',fvExpr) <- rnLExpr expr + ; return (HsLet binds' expr', fvExpr) } rnExpr (HsDo do_or_lc stmts _) = do { ((stmts', _), fvs) <- rnStmts do_or_lc rnLExpr stmts (\ _ -> return ((), emptyFVs)) @@ -248,8 +235,8 @@ rnExpr (ExplicitList _ _ exps) return (ExplicitList placeHolderType Nothing exps', fvs) } rnExpr (ExplicitPArr _ exps) - = rnExprs exps `thenM` \ (exps', fvs) -> - return (ExplicitPArr placeHolderType exps', fvs) + = do { (exps', fvs) <- rnExprs exps + ; return (ExplicitPArr placeHolderType exps', fvs) } rnExpr (ExplicitTuple tup_args boxity) = do { checkTupleSection tup_args @@ -290,8 +277,8 @@ rnExpr (HsMultiIf ty alts) ; return (HsMultiIf ty alts', fvs) } rnExpr (HsType a) - = rnLHsType HsTypeCtx a `thenM` \ (t, fvT) -> - return (HsType t, fvT) + = do { (t, fvT) <- rnLHsType HsTypeCtx a + ; return (HsType t, fvT) } rnExpr (ArithSeq _ _ seq) = do { opt_OverloadedLists <- xoptM Opt_OverloadedLists @@ -304,8 +291,8 @@ rnExpr (ArithSeq _ _ seq) return (ArithSeq noPostTcExpr Nothing new_seq, fvs) } rnExpr (PArrSeq _ seq) - = rnArithSeq seq `thenM` \ (new_seq, fvs) -> - return (PArrSeq noPostTcExpr new_seq, fvs) + = do { (new_seq, fvs) <- rnArithSeq seq + ; return (PArrSeq noPostTcExpr new_seq, fvs) } \end{code} These three are pattern syntax appearing in expressions. @@ -332,9 +319,9 @@ rnExpr e@(ELazyPat {}) = patSynErr e \begin{code} rnExpr (HsProc pat body) = newArrowScope $ - rnPat ProcExpr pat $ \ pat' -> - rnCmdTop body `thenM` \ (body',fvBody) -> - return (HsProc pat' body', fvBody) + rnPat ProcExpr pat $ \ pat' -> do + { (body',fvBody) <- rnCmdTop body + ; return (HsProc pat' body', fvBody) } -- Ideally, these would be done in parsing, but to keep parsing simple, we do it here. rnExpr e@(HsArrApp {}) = arrowFail e @@ -402,9 +389,9 @@ rnHsRecBinds ctxt rec_binds@(HsRecFields { rec_dotdot = dd }) rnCmdArgs :: [LHsCmdTop RdrName] -> RnM ([LHsCmdTop Name], FreeVars) rnCmdArgs [] = return ([], emptyFVs) rnCmdArgs (arg:args) - = rnCmdTop arg `thenM` \ (arg',fvArg) -> - rnCmdArgs args `thenM` \ (args',fvArgs) -> - return (arg':args', fvArg `plusFV` fvArgs) + = do { (arg',fvArg) <- rnCmdTop arg + ; (args',fvArgs) <- rnCmdArgs args + ; return (arg':args', fvArg `plusFV` fvArgs) } rnCmdTop :: LHsCmdTop RdrName -> RnM (LHsCmdTop Name, FreeVars) rnCmdTop = wrapLocFstM rnCmdTop' @@ -425,10 +412,10 @@ rnLCmd = wrapLocFstM rnCmd rnCmd :: HsCmd RdrName -> RnM (HsCmd Name, FreeVars) rnCmd (HsCmdArrApp arrow arg _ ho rtl) - = select_arrow_scope (rnLExpr arrow) `thenM` \ (arrow',fvArrow) -> - rnLExpr arg `thenM` \ (arg',fvArg) -> - return (HsCmdArrApp arrow' arg' placeHolderType ho rtl, - fvArrow `plusFV` fvArg) + = do { (arrow',fvArrow) <- select_arrow_scope (rnLExpr arrow) + ; (arg',fvArg) <- rnLExpr arg + ; return (HsCmdArrApp arrow' arg' placeHolderType ho rtl, + fvArrow `plusFV` fvArg) } where select_arrow_scope tc = case ho of HsHigherOrderApp -> tc @@ -441,42 +428,37 @@ rnCmd (HsCmdArrApp arrow arg _ ho rtl) -- infix form rnCmd (HsCmdArrForm op (Just _) [arg1, arg2]) - = escapeArrowScope (rnLExpr op) - `thenM` \ (op',fv_op) -> - let L _ (HsVar op_name) = op' in - rnCmdTop arg1 `thenM` \ (arg1',fv_arg1) -> - rnCmdTop arg2 `thenM` \ (arg2',fv_arg2) -> - + = do { (op',fv_op) <- escapeArrowScope (rnLExpr op) + ; let L _ (HsVar op_name) = op' + ; (arg1',fv_arg1) <- rnCmdTop arg1 + ; (arg2',fv_arg2) <- rnCmdTop arg2 -- Deal with fixity - - lookupFixityRn op_name `thenM` \ fixity -> - mkOpFormRn arg1' op' fixity arg2' `thenM` \ final_e -> - - return (final_e, - fv_arg1 `plusFV` fv_op `plusFV` fv_arg2) + ; fixity <- lookupFixityRn op_name + ; final_e <- mkOpFormRn arg1' op' fixity arg2' + ; return (final_e, fv_arg1 `plusFV` fv_op `plusFV` fv_arg2) } rnCmd (HsCmdArrForm op fixity cmds) - = escapeArrowScope (rnLExpr op) `thenM` \ (op',fvOp) -> - rnCmdArgs cmds `thenM` \ (cmds',fvCmds) -> - return (HsCmdArrForm op' fixity cmds', fvOp `plusFV` fvCmds) + = do { (op',fvOp) <- escapeArrowScope (rnLExpr op) + ; (cmds',fvCmds) <- rnCmdArgs cmds + ; return (HsCmdArrForm op' fixity cmds', fvOp `plusFV` fvCmds) } rnCmd (HsCmdApp fun arg) - = rnLCmd fun `thenM` \ (fun',fvFun) -> - rnLExpr arg `thenM` \ (arg',fvArg) -> - return (HsCmdApp fun' arg', fvFun `plusFV` fvArg) + = do { (fun',fvFun) <- rnLCmd fun + ; (arg',fvArg) <- rnLExpr arg + ; return (HsCmdApp fun' arg', fvFun `plusFV` fvArg) } rnCmd (HsCmdLam matches) - = rnMatchGroup LambdaExpr rnLCmd matches `thenM` \ (matches', fvMatch) -> - return (HsCmdLam matches', fvMatch) + = do { (matches', fvMatch) <- rnMatchGroup LambdaExpr rnLCmd matches + ; return (HsCmdLam matches', fvMatch) } rnCmd (HsCmdPar e) = do { (e', fvs_e) <- rnLCmd e ; return (HsCmdPar e', fvs_e) } rnCmd (HsCmdCase expr matches) - = rnLExpr expr `thenM` \ (new_expr, e_fvs) -> - rnMatchGroup CaseAlt rnLCmd matches `thenM` \ (new_matches, ms_fvs) -> - return (HsCmdCase new_expr new_matches, e_fvs `plusFV` ms_fvs) + = do { (new_expr, e_fvs) <- rnLExpr expr + ; (new_matches, ms_fvs) <- rnMatchGroup CaseAlt rnLCmd matches + ; return (HsCmdCase new_expr new_matches, e_fvs `plusFV` ms_fvs) } rnCmd (HsCmdIf _ p b1 b2) = do { (p', fvP) <- rnLExpr p @@ -486,9 +468,9 @@ rnCmd (HsCmdIf _ p b1 b2) ; return (HsCmdIf mb_ite p' b1' b2', plusFVs [fvITE, fvP, fvB1, fvB2]) } rnCmd (HsCmdLet binds cmd) - = rnLocalBindsAndThen binds $ \ binds' -> - rnLCmd cmd `thenM` \ (cmd',fvExpr) -> - return (HsCmdLet binds' cmd', fvExpr) + = rnLocalBindsAndThen binds $ \ binds' -> do + { (cmd',fvExpr) <- rnLCmd cmd + ; return (HsCmdLet binds' cmd', fvExpr) } rnCmd (HsCmdDo stmts _) = do { ((stmts', _), fvs) <- rnStmts ArrowExpr rnLCmd stmts (\ _ -> return ((), emptyFVs)) @@ -578,25 +560,25 @@ methodNamesStmt (TransStmt {}) = emptyFVs \begin{code} rnArithSeq :: ArithSeqInfo RdrName -> RnM (ArithSeqInfo Name, FreeVars) rnArithSeq (From expr) - = rnLExpr expr `thenM` \ (expr', fvExpr) -> - return (From expr', fvExpr) + = do { (expr', fvExpr) <- rnLExpr expr + ; return (From expr', fvExpr) } rnArithSeq (FromThen expr1 expr2) - = rnLExpr expr1 `thenM` \ (expr1', fvExpr1) -> - rnLExpr expr2 `thenM` \ (expr2', fvExpr2) -> - return (FromThen expr1' expr2', fvExpr1 `plusFV` fvExpr2) + = do { (expr1', fvExpr1) <- rnLExpr expr1 + ; (expr2', fvExpr2) <- rnLExpr expr2 + ; return (FromThen expr1' expr2', fvExpr1 `plusFV` fvExpr2) } rnArithSeq (FromTo expr1 expr2) - = rnLExpr expr1 `thenM` \ (expr1', fvExpr1) -> - rnLExpr expr2 `thenM` \ (expr2', fvExpr2) -> - return (FromTo expr1' expr2', fvExpr1 `plusFV` fvExpr2) + = do { (expr1', fvExpr1) <- rnLExpr expr1 + ; (expr2', fvExpr2) <- rnLExpr expr2 + ; return (FromTo expr1' expr2', fvExpr1 `plusFV` fvExpr2) } rnArithSeq (FromThenTo expr1 expr2 expr3) - = rnLExpr expr1 `thenM` \ (expr1', fvExpr1) -> - rnLExpr expr2 `thenM` \ (expr2', fvExpr2) -> - rnLExpr expr3 `thenM` \ (expr3', fvExpr3) -> - return (FromThenTo expr1' expr2' expr3', - plusFVs [fvExpr1, fvExpr2, fvExpr3]) + = do { (expr1', fvExpr1) <- rnLExpr expr1 + ; (expr2', fvExpr2) <- rnLExpr expr2 + ; (expr3', fvExpr3) <- rnLExpr expr3 + ; return (FromThenTo expr1' expr2' expr3', + plusFVs [fvExpr1, fvExpr2, fvExpr3]) } \end{code} %************************************************************************ @@ -959,21 +941,19 @@ rn_rec_stmt rnBody _ (L loc (LastStmt body _)) _ L loc (LastStmt body' ret_op))] } rn_rec_stmt rnBody _ (L loc (BodyStmt body _ _ _)) _ - = rnBody body `thenM` \ (body', fvs) -> - lookupSyntaxName thenMName `thenM` \ (then_op, fvs1) -> - return [(emptyNameSet, fvs `plusFV` fvs1, emptyNameSet, - L loc (BodyStmt body' then_op noSyntaxExpr placeHolderType))] + = do { (body', fvs) <- rnBody body + ; (then_op, fvs1) <- lookupSyntaxName thenMName + ; return [(emptyNameSet, fvs `plusFV` fvs1, emptyNameSet, + L loc (BodyStmt body' then_op noSyntaxExpr placeHolderType))] } rn_rec_stmt rnBody _ (L loc (BindStmt pat' body _ _)) fv_pat - = rnBody body `thenM` \ (body', fv_expr) -> - lookupSyntaxName bindMName `thenM` \ (bind_op, fvs1) -> - lookupSyntaxName failMName `thenM` \ (fail_op, fvs2) -> - let - bndrs = mkNameSet (collectPatBinders pat') - fvs = fv_expr `plusFV` fv_pat `plusFV` fvs1 `plusFV` fvs2 - in - return [(bndrs, fvs, bndrs `intersectNameSet` fvs, - L loc (BindStmt pat' body' bind_op fail_op))] + = do { (body', fv_expr) <- rnBody body + ; (bind_op, fvs1) <- lookupSyntaxName bindMName + ; (fail_op, fvs2) <- lookupSyntaxName failMName + ; let bndrs = mkNameSet (collectPatBinders pat') + fvs = fv_expr `plusFV` fv_pat `plusFV` fvs1 `plusFV` fvs2 + ; return [(bndrs, fvs, bndrs `intersectNameSet` fvs, + L loc (BindStmt pat' body' bind_op fail_op))] } rn_rec_stmt _ _ (L _ (LetStmt binds@(HsIPBinds _))) _ = failWith (badIpBinds (ptext (sLit "an mdo expression")) binds) @@ -1003,9 +983,9 @@ rn_rec_stmts :: Outputable (body RdrName) => -> [Name] -> [(LStmtLR Name RdrName (Located (body RdrName)), FreeVars)] -> RnM [Segment (LStmt Name (Located (body Name)))] -rn_rec_stmts rnBody bndrs stmts = - mapM (uncurry (rn_rec_stmt rnBody bndrs)) stmts `thenM` \ segs_s -> - return (concat segs_s) +rn_rec_stmts rnBody bndrs stmts + = do { segs_s <- mapM (uncurry (rn_rec_stmt rnBody bndrs)) stmts + ; return (concat segs_s) } --------------------------------------------- segmentRecStmts :: HsStmtContext Name diff --git a/compiler/rename/RnNames.lhs b/compiler/rename/RnNames.lhs index 7f6a840295..db4258607a 100644 --- a/compiler/rename/RnNames.lhs +++ b/compiler/rename/RnNames.lhs @@ -4,6 +4,8 @@ \section[RnNames]{Extracting imported and top-level names in scope} \begin{code} +{-# LANGUAGE CPP, NondecreasingIndentation #-} + module RnNames ( rnImports, getLocalNonValBinders, rnExports, extendGlobalRdrEnvRn, @@ -1301,11 +1303,14 @@ type ImportDeclUsage warnUnusedImportDecls :: TcGblEnv -> RnM () warnUnusedImportDecls gbl_env = do { uses <- readMutVar (tcg_used_rdrnames gbl_env) - ; let imports = filter explicit_import (tcg_rn_imports gbl_env) + ; let user_imports = filterOut (ideclImplicit . unLoc) (tcg_rn_imports gbl_env) + -- This whole function deals only with *user* imports + -- both for warning about unnecessary ones, and for + -- deciding the minimal ones rdr_env = tcg_rdr_env gbl_env ; let usage :: [ImportDeclUsage] - usage = findImportUsage imports rdr_env (Set.elems uses) + usage = findImportUsage user_imports rdr_env (Set.elems uses) ; traceRn (vcat [ ptext (sLit "Uses:") <+> ppr (Set.elems uses) , ptext (sLit "Import usage") <+> ppr usage]) @@ -1314,10 +1319,6 @@ warnUnusedImportDecls gbl_env ; whenGOptM Opt_D_dump_minimal_imports $ printMinimalImports usage } - where - explicit_import (L _ decl) = not (ideclImplicit decl) - -- Filter out the implicit Prelude import - -- which we do not want to bleat about \end{code} @@ -1433,6 +1434,11 @@ warnUnusedImport :: ImportDeclUsage -> RnM () warnUnusedImport (L loc decl, used, unused) | Just (False,[]) <- ideclHiding decl = return () -- Do not warn for 'import M()' + + | Just (True, hides) <- ideclHiding decl + , not (null hides) + , pRELUDE_NAME == unLoc (ideclName decl) + = return () -- Note [Do not warn about Prelude hiding] | null used = addWarnAt loc msg1 -- Nothing used; drop entire decl | null unused = return () -- Everything imported is used; nop | otherwise = addWarnAt loc msg2 -- Some imports are unused @@ -1452,6 +1458,19 @@ warnUnusedImport (L loc decl, used, unused) pp_not_used = text "is redundant" \end{code} +Note [Do not warn about Prelude hiding] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +We do not warn about + import Prelude hiding( x, y ) +because even if nothing else from Prelude is used, it may be essential to hide +x,y to avoid name-shadowing warnings. Example (Trac #9061) + import Prelude hiding( log ) + f x = log where log = () + + + +Note [Printing minimal imports] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To print the minimal imports we walk over the user-supplied import decls, and simply trim their import lists. NB that @@ -1462,6 +1481,7 @@ decls, and simply trim their import lists. NB that \begin{code} printMinimalImports :: [ImportDeclUsage] -> RnM () +-- See Note [Printing minimal imports] printMinimalImports imports_w_usage = do { imports' <- mapM mk_minimal imports_w_usage ; this_mod <- getModule diff --git a/compiler/rename/RnPat.lhs b/compiler/rename/RnPat.lhs index 3c48f34032..48fffce374 100644 --- a/compiler/rename/RnPat.lhs +++ b/compiler/rename/RnPat.lhs @@ -10,13 +10,8 @@ general, all of these functions return a renamed thing, and a set of free variables. \begin{code} --- The above warning supression flag is a temporary kludge. --- While working on this module you are encouraged to remove it and --- detab the module (please do the detabbing in a separate patch). See --- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces --- for details +{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-} -{-# LANGUAGE ScopedTypeVariables #-} module RnPat (-- main entry points rnPat, rnPats, rnBindPat, rnPatAndThen, @@ -439,7 +434,7 @@ rnPatAndThen mk (PArrPat pats _) rnPatAndThen mk (TuplePat pats boxed _) = do { liftCps $ checkTupSize (length pats) ; pats' <- rnLPatsAndThen mk pats - ; return (TuplePat pats' boxed placeHolderType) } + ; return (TuplePat pats' boxed []) } rnPatAndThen _ (SplicePat splice) = do { -- XXX How to deal with free variables? diff --git a/compiler/rename/RnSource.lhs b/compiler/rename/RnSource.lhs index fbc22c0c28..9bc0e44780 100644 --- a/compiler/rename/RnSource.lhs +++ b/compiler/rename/RnSource.lhs @@ -4,6 +4,8 @@ \section[RnSource]{Main pass of renamer} \begin{code} +{-# LANGUAGE CPP, ScopedTypeVariables #-} + module RnSource ( rnSrcDecls, addTcgDUs, rnTyClDecls, findSplice ) where @@ -443,12 +445,14 @@ rnSrcInstDecl (ClsInstD { cid_inst = cid }) rnClsInstDecl :: ClsInstDecl RdrName -> RnM (ClsInstDecl Name, FreeVars) rnClsInstDecl (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = mbinds , cid_sigs = uprags, cid_tyfam_insts = ats + , cid_overlap_mode = oflag , cid_datafam_insts = adts }) -- Used for both source and interface file decls = do { (inst_ty', inst_fvs) <- rnLHsInstType (text "In an instance declaration") inst_ty ; case splitLHsInstDeclTy_maybe inst_ty' of { Nothing -> return (ClsInstDecl { cid_poly_ty = inst_ty', cid_binds = emptyLHsBinds , cid_sigs = [], cid_tyfam_insts = [] + , cid_overlap_mode = oflag , cid_datafam_insts = [] } , inst_fvs) ; Just (inst_tyvars, _, L _ cls,_) -> @@ -461,7 +465,7 @@ rnClsInstDecl (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = mbinds ; traceRn (text "rnSrcInstDecl" <+> ppr inst_ty' $$ ppr inst_tyvars $$ ppr ktv_names) ; ((ats', adts', other_sigs'), more_fvs) <- extendTyVarEnvFVRn ktv_names $ - do { (ats', at_fvs) <- rnATInstDecls rnTyFamInstDecl cls inst_tyvars ats + do { (ats', at_fvs) <- rnATInstDecls rnTyFamInstDecl cls inst_tyvars ats ; (adts', adt_fvs) <- rnATInstDecls rnDataFamInstDecl cls inst_tyvars adts ; (other_sigs', sig_fvs) <- renameSigs (InstDeclCtxt cls) other_sigs ; return ( (ats', adts', other_sigs') @@ -491,6 +495,7 @@ rnClsInstDecl (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = mbinds `plusFV` inst_fvs ; return (ClsInstDecl { cid_poly_ty = inst_ty', cid_binds = mbinds' , cid_sigs = uprags', cid_tyfam_insts = ats' + , cid_overlap_mode = oflag , cid_datafam_insts = adts' }, all_fvs) } } } -- We return the renamed associated data type declarations so @@ -559,14 +564,29 @@ rnTyFamInstDecl mb_cls (TyFamInstDecl { tfid_eqn = L loc eqn }) rnTyFamInstEqn :: Maybe (Name, [Name]) -> TyFamInstEqn RdrName -> RnM (TyFamInstEqn Name, FreeVars) -rnTyFamInstEqn mb_cls (TyFamInstEqn { tfie_tycon = tycon - , tfie_pats = HsWB { hswb_cts = pats } - , tfie_rhs = rhs }) +rnTyFamInstEqn mb_cls (TyFamEqn { tfe_tycon = tycon + , tfe_pats = HsWB { hswb_cts = pats } + , tfe_rhs = rhs }) = do { (tycon', pats', rhs', fvs) <- rnFamInstDecl (TySynCtx tycon) mb_cls tycon pats rhs rnTySyn - ; return (TyFamInstEqn { tfie_tycon = tycon' - , tfie_pats = pats' - , tfie_rhs = rhs' }, fvs) } + ; return (TyFamEqn { tfe_tycon = tycon' + , tfe_pats = pats' + , tfe_rhs = rhs' }, fvs) } + +rnTyFamDefltEqn :: Name + -> TyFamDefltEqn RdrName + -> RnM (TyFamDefltEqn Name, FreeVars) +rnTyFamDefltEqn cls (TyFamEqn { tfe_tycon = tycon + , tfe_pats = tyvars + , tfe_rhs = rhs }) + = bindHsTyVars ctx (Just cls) [] tyvars $ \ tyvars' -> + do { tycon' <- lookupFamInstName (Just cls) tycon + ; (rhs', fvs) <- rnLHsType ctx rhs + ; return (TyFamEqn { tfe_tycon = tycon' + , tfe_pats = tyvars' + , tfe_rhs = rhs' }, fvs) } + where + ctx = TyFamilyCtx tycon rnDataFamInstDecl :: Maybe (Name, [Name]) -> DataFamInstDecl RdrName @@ -585,7 +605,7 @@ rnDataFamInstDecl mb_cls (DataFamInstDecl { dfid_tycon = tycon Renaming of the associated types in instances. \begin{code} --- rename associated type family decl in class +-- Rename associated type family decl in class rnATDecls :: Name -- Class -> [LFamilyDecl RdrName] -> RnM ([LFamilyDecl Name], FreeVars) @@ -635,11 +655,11 @@ extendTyVarEnvForMethodBinds ktv_names thing_inside \begin{code} rnSrcDerivDecl :: DerivDecl RdrName -> RnM (DerivDecl Name, FreeVars) -rnSrcDerivDecl (DerivDecl ty) +rnSrcDerivDecl (DerivDecl ty overlap) = do { standalone_deriv_ok <- xoptM Opt_StandaloneDeriving ; unless standalone_deriv_ok (addErr standaloneDerivErr) ; (ty', fvs) <- rnLHsInstType (text "In a deriving declaration") ty - ; return (DerivDecl ty', fvs) } + ; return (DerivDecl ty' overlap, fvs) } standaloneDerivErr :: SDoc standaloneDerivErr @@ -936,7 +956,7 @@ rnTyClDecl (SynDecl { tcdLName = tycon, tcdTyVars = tyvars, tcdRhs = rhs }) do { (rhs', fvs) <- rnTySyn doc rhs ; return ((tyvars', rhs'), fvs) } ; return (SynDecl { tcdLName = tycon', tcdTyVars = tyvars' - , tcdRhs = rhs', tcdFVs = fvs }, fvs) } + , tcdRhs = rhs', tcdFVs = fvs }, fvs) } -- "data", "newtype" declarations -- both top level and (for an associated type) in an instance decl @@ -961,20 +981,20 @@ rnTyClDecl (ClassDecl {tcdCtxt = context, tcdLName = lcls, -- kind signatures on the tyvars -- Tyvars scope over superclass context and method signatures - ; ((tyvars', context', fds', ats', at_defs', sigs'), stuff_fvs) + ; ((tyvars', context', fds', ats', sigs'), stuff_fvs) <- bindHsTyVars cls_doc Nothing kvs tyvars $ \ tyvars' -> do -- Checks for distinct tyvars { (context', cxt_fvs) <- rnContext cls_doc context - ; fds' <- rnFds (docOfHsDocContext cls_doc) fds + ; fds' <- rnFds fds -- The fundeps have no free variables ; (ats', fv_ats) <- rnATDecls cls' ats - ; (at_defs', fv_at_defs) <- rnATInstDecls rnTyFamInstDecl cls' tyvars' at_defs ; (sigs', sig_fvs) <- renameSigs (ClsDeclCtxt cls') sigs ; let fvs = cxt_fvs `plusFV` sig_fvs `plusFV` - fv_ats `plusFV` - fv_at_defs - ; return ((tyvars', context', fds', ats', at_defs', sigs'), fvs) } + fv_ats + ; return ((tyvars', context', fds', ats', sigs'), fvs) } + + ; (at_defs', fv_at_defs) <- rnList (rnTyFamDefltEqn cls') at_defs -- No need to check for duplicate associated type decls -- since that is done by RnNames.extendGlobalRdrEnvRn @@ -1006,7 +1026,7 @@ rnTyClDecl (ClassDecl {tcdCtxt = context, tcdLName = lcls, -- Haddock docs ; docs' <- mapM (wrapLocM rnDocDecl) docs - ; let all_fvs = meth_fvs `plusFV` stuff_fvs + ; let all_fvs = meth_fvs `plusFV` stuff_fvs `plusFV` fv_at_defs ; return (ClassDecl { tcdCtxt = context', tcdLName = lcls', tcdTyVars = tyvars', tcdFDs = fds', tcdSigs = sigs', tcdMeths = mbinds', tcdATs = ats', tcdATDefs = at_defs', @@ -1404,21 +1424,20 @@ extendRecordFieldEnv tycl_decls inst_decls %********************************************************* \begin{code} -rnFds :: SDoc -> [Located (FunDep RdrName)] -> RnM [Located (FunDep Name)] - -rnFds doc fds +rnFds :: [Located (FunDep RdrName)] -> RnM [Located (FunDep Name)] +rnFds fds = mapM (wrapLocM rn_fds) fds where rn_fds (tys1, tys2) - = do { tys1' <- rnHsTyVars doc tys1 - ; tys2' <- rnHsTyVars doc tys2 + = do { tys1' <- rnHsTyVars tys1 + ; tys2' <- rnHsTyVars tys2 ; return (tys1', tys2') } -rnHsTyVars :: SDoc -> [RdrName] -> RnM [Name] -rnHsTyVars doc tvs = mapM (rnHsTyVar doc) tvs +rnHsTyVars :: [RdrName] -> RnM [Name] +rnHsTyVars tvs = mapM rnHsTyVar tvs -rnHsTyVar :: SDoc -> RdrName -> RnM Name -rnHsTyVar _doc tyvar = lookupOccRn tyvar +rnHsTyVar :: RdrName -> RnM Name +rnHsTyVar tyvar = lookupOccRn tyvar \end{code} diff --git a/compiler/rename/RnSplice.lhs b/compiler/rename/RnSplice.lhs index e0614d4248..3c0c145e6b 100644 --- a/compiler/rename/RnSplice.lhs +++ b/compiler/rename/RnSplice.lhs @@ -1,4 +1,6 @@ \begin{code} +{-# LANGUAGE CPP #-} + module RnSplice ( rnTopSpliceDecls, rnSpliceType, rnSpliceExpr, rnSplicePat, rnSpliceDecl, diff --git a/compiler/rename/RnTypes.lhs b/compiler/rename/RnTypes.lhs index 23c54c3bed..2f9bfdd653 100644 --- a/compiler/rename/RnTypes.lhs +++ b/compiler/rename/RnTypes.lhs @@ -4,6 +4,8 @@ \section[RnSource]{Main pass of renamer} \begin{code} +{-# LANGUAGE CPP #-} + module RnTypes ( -- Type related stuff rnHsType, rnLHsType, rnLHsTypes, rnContext, @@ -360,8 +362,9 @@ bindHsTyVars doc mb_assoc kv_bndrs tv_bndrs thing_inside kvs_from_tv_bndrs = [ kv | L _ (KindedTyVar _ kind) <- tvs , let (_, kvs) = extractHsTyRdrTyVars kind , kv <- kvs ] - all_kvs = filterOut (`elemLocalRdrEnv` rdr_env) $ - nub (kv_bndrs ++ kvs_from_tv_bndrs) + all_kvs' = nub (kv_bndrs ++ kvs_from_tv_bndrs) + all_kvs = filterOut (`elemLocalRdrEnv` rdr_env) all_kvs' + overlap_kvs = [ kv | kv <- all_kvs, any ((==) kv . hsLTyVarName) tvs ] -- These variables appear both as kind and type variables -- in the same declaration; eg type family T (x :: *) (y :: x) @@ -395,8 +398,12 @@ bindHsTyVars doc mb_assoc kv_bndrs tv_bndrs thing_inside ; (tv_bndrs', fvs1) <- mapFvRn rn_tv_bndr tvs ; (res, fvs2) <- bindLocalNamesFV (map hsLTyVarName tv_bndrs') $ - do { env <- getLocalRdrEnv - ; traceRn (text "bhtv" <+> (ppr tvs $$ ppr all_kvs $$ ppr env)) + do { inner_rdr_env <- getLocalRdrEnv + ; traceRn (text "bhtv" <+> vcat + [ ppr tvs, ppr kv_bndrs, ppr kvs_from_tv_bndrs + , ppr $ map (`elemLocalRdrEnv` rdr_env) all_kvs' + , ppr $ map (getUnique . rdrNameOcc) all_kvs' + , ppr all_kvs, ppr rdr_env, ppr inner_rdr_env ]) ; thing_inside (HsQTvs { hsq_tvs = tv_bndrs', hsq_kvs = kv_names }) } ; return (res, fvs1 `plusFV` fvs2) } } diff --git a/compiler/simplCore/CSE.lhs b/compiler/simplCore/CSE.lhs index 691f883d02..90715737c2 100644 --- a/compiler/simplCore/CSE.lhs +++ b/compiler/simplCore/CSE.lhs @@ -4,6 +4,8 @@ \section{Common subexpression} \begin{code} +{-# LANGUAGE CPP #-} + module CSE (cseProgram) where #include "HsVersions.h" diff --git a/compiler/simplCore/CoreMonad.lhs b/compiler/simplCore/CoreMonad.lhs index b2f697a632..c06036044d 100644 --- a/compiler/simplCore/CoreMonad.lhs +++ b/compiler/simplCore/CoreMonad.lhs @@ -4,15 +4,14 @@ \section[CoreMonad]{The core pipeline monad} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, UndecidableInstances #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See -- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces -- for details -{-# LANGUAGE UndecidableInstances #-} - module CoreMonad ( -- * Configuration of the core-to-core passes CoreToDo(..), runWhen, runMaybe, diff --git a/compiler/simplCore/FloatIn.lhs b/compiler/simplCore/FloatIn.lhs index 8a35749c67..2cf886c5c6 100644 --- a/compiler/simplCore/FloatIn.lhs +++ b/compiler/simplCore/FloatIn.lhs @@ -12,7 +12,8 @@ case, so that we don't allocate things, save them on the stack, and then discover that they aren't needed in the chosen branch. \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/simplCore/FloatOut.lhs b/compiler/simplCore/FloatOut.lhs index fbe8a3eb8a..dbab552431 100644 --- a/compiler/simplCore/FloatOut.lhs +++ b/compiler/simplCore/FloatOut.lhs @@ -6,8 +6,9 @@ ``Long-distance'' floating of bindings towards the top level. \begin{code} +{-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fno-warn-orphans #-} -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/simplCore/LiberateCase.lhs b/compiler/simplCore/LiberateCase.lhs index a89396b782..2593ab159c 100644 --- a/compiler/simplCore/LiberateCase.lhs +++ b/compiler/simplCore/LiberateCase.lhs @@ -4,7 +4,8 @@ \section[LiberateCase]{Unroll recursion to allow evals to be lifted from a loop} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/simplCore/OccurAnal.lhs b/compiler/simplCore/OccurAnal.lhs index 2487787c8d..c9323359c5 100644 --- a/compiler/simplCore/OccurAnal.lhs +++ b/compiler/simplCore/OccurAnal.lhs @@ -12,7 +12,8 @@ The occurrence analyser re-typechecks a core expression, returning a new core expression with (hopefully) improved usage information. \begin{code} -{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE CPP, BangPatterns #-} + module OccurAnal ( occurAnalysePgm, occurAnalyseExpr, occurAnalyseExpr_NoBinderSwap ) where diff --git a/compiler/simplCore/SAT.lhs b/compiler/simplCore/SAT.lhs index bc1ce42cd6..92ebdfe389 100644 --- a/compiler/simplCore/SAT.lhs +++ b/compiler/simplCore/SAT.lhs @@ -49,7 +49,8 @@ essential to make this work well! \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/simplCore/SetLevels.lhs b/compiler/simplCore/SetLevels.lhs index 6edadb8bd9..225d5d612e 100644 --- a/compiler/simplCore/SetLevels.lhs +++ b/compiler/simplCore/SetLevels.lhs @@ -42,7 +42,8 @@ the scrutinee of the case, and we can inline it. \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/simplCore/SimplCore.lhs b/compiler/simplCore/SimplCore.lhs index 436d1b63aa..59b39a9c60 100644 --- a/compiler/simplCore/SimplCore.lhs +++ b/compiler/simplCore/SimplCore.lhs @@ -4,6 +4,8 @@ \section[SimplCore]{Driver for simplifying @Core@ programs} \begin{code} +{-# LANGUAGE CPP #-} + module SimplCore ( core2core, simplifyExpr ) where #include "HsVersions.h" diff --git a/compiler/simplCore/SimplEnv.lhs b/compiler/simplCore/SimplEnv.lhs index 5f1013def8..1c5ebc501b 100644 --- a/compiler/simplCore/SimplEnv.lhs +++ b/compiler/simplCore/SimplEnv.lhs @@ -4,6 +4,8 @@ \section[SimplMonad]{The simplifier Monad} \begin{code} +{-# LANGUAGE CPP #-} + module SimplEnv ( InId, InBind, InExpr, InAlt, InArg, InType, InBndr, InVar, OutId, OutTyVar, OutBind, OutExpr, OutAlt, OutArg, OutType, OutBndr, OutVar, diff --git a/compiler/simplCore/SimplUtils.lhs b/compiler/simplCore/SimplUtils.lhs index 59e5d4adc1..14789c44a4 100644 --- a/compiler/simplCore/SimplUtils.lhs +++ b/compiler/simplCore/SimplUtils.lhs @@ -4,6 +4,8 @@ \section[SimplUtils]{The simplifier utilities} \begin{code} +{-# LANGUAGE CPP #-} + module SimplUtils ( -- Rebuilding mkLam, mkCase, prepareAlts, tryEtaExpandRhs, diff --git a/compiler/simplCore/Simplify.lhs b/compiler/simplCore/Simplify.lhs index 02470be050..1125c2e883 100644 --- a/compiler/simplCore/Simplify.lhs +++ b/compiler/simplCore/Simplify.lhs @@ -4,6 +4,8 @@ \section[Simplify]{The main module of the simplifier} \begin{code} +{-# LANGUAGE CPP #-} + module Simplify ( simplTopBinds, simplExpr ) where #include "HsVersions.h" @@ -219,9 +221,7 @@ simplTopBinds env0 binds0 -- It's rather as if the top-level binders were imported. -- See note [Glomming] in OccurAnal. ; env1 <- simplRecBndrs env0 (bindersOfBinds binds0) - ; dflags <- getDynFlags - ; let dump_flag = dopt Opt_D_verbose_core2core dflags - ; env2 <- simpl_binds dump_flag env1 binds0 + ; env2 <- simpl_binds env1 binds0 ; freeTick SimplifierDone ; return env2 } where @@ -229,16 +229,10 @@ simplTopBinds env0 binds0 -- they should have their fragile IdInfo zapped (notably occurrence info) -- That's why we run down binds and bndrs' simultaneously. -- - -- The dump-flag emits a trace for each top-level binding, which - -- helps to locate the tracing for inlining and rule firing - simpl_binds :: Bool -> SimplEnv -> [InBind] -> SimplM SimplEnv - simpl_binds _ env [] = return env - simpl_binds dump env (bind:binds) = do { env' <- trace_bind dump bind $ - simpl_bind env bind - ; simpl_binds dump env' binds } - - trace_bind True bind = pprTrace "SimplBind" (ppr (bindersOf bind)) - trace_bind False _ = \x -> x + simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv + simpl_binds env [] = return env + simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind + ; simpl_binds env' binds } simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs simpl_bind env (NonRec b r) = simplRecOrTopPair env' TopLevel NonRecursive b b' r @@ -293,12 +287,21 @@ simplRecOrTopPair :: SimplEnv -> SimplM SimplEnv -- Returns an env that includes the binding simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs - = do dflags <- getDynFlags - -- Check for unconditional inline - if preInlineUnconditionally dflags env top_lvl old_bndr rhs + = do { dflags <- getDynFlags + ; trace_bind dflags $ + if preInlineUnconditionally dflags env top_lvl old_bndr rhs + -- Check for unconditional inline then do tick (PreInlineUnconditionally old_bndr) return (extendIdSubst env old_bndr (mkContEx env rhs)) - else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env + else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env } + where + trace_bind dflags thing_inside + | not (dopt Opt_D_verbose_core2core dflags) + = thing_inside + | otherwise + = pprTrace "SimplBind" (ppr old_bndr) thing_inside + -- trace_bind emits a trace for each top-level binding, which + -- helps to locate the tracing for inlining and rule firing \end{code} diff --git a/compiler/simplStg/SimplStg.lhs b/compiler/simplStg/SimplStg.lhs index c43b6526b5..4d33e3392e 100644 --- a/compiler/simplStg/SimplStg.lhs +++ b/compiler/simplStg/SimplStg.lhs @@ -4,6 +4,8 @@ \section[SimplStg]{Driver for simplifying @STG@ programs} \begin{code} +{-# LANGUAGE CPP #-} + module SimplStg ( stg2stg ) where #include "HsVersions.h" diff --git a/compiler/simplStg/StgStats.lhs b/compiler/simplStg/StgStats.lhs index 5424495468..2a776757da 100644 --- a/compiler/simplStg/StgStats.lhs +++ b/compiler/simplStg/StgStats.lhs @@ -21,6 +21,8 @@ The program gather statistics about \end{enumerate} \begin{code} +{-# LANGUAGE CPP #-} + module StgStats ( showStgStats ) where #include "HsVersions.h" diff --git a/compiler/simplStg/UnariseStg.lhs b/compiler/simplStg/UnariseStg.lhs index b1717ad120..1f121f71fd 100644 --- a/compiler/simplStg/UnariseStg.lhs +++ b/compiler/simplStg/UnariseStg.lhs @@ -27,6 +27,8 @@ which is the Arity taking into account any expanded arguments, and corresponds t the number of (possibly-void) *registers* arguments will arrive in. \begin{code} +{-# LANGUAGE CPP #-} + module UnariseStg (unarise) where #include "HsVersions.h" diff --git a/compiler/specialise/Rules.lhs b/compiler/specialise/Rules.lhs index 70fc09a2ef..2abf7fbdca 100644 --- a/compiler/specialise/Rules.lhs +++ b/compiler/specialise/Rules.lhs @@ -4,6 +4,8 @@ \section[CoreRules]{Transformation rules} \begin{code} +{-# LANGUAGE CPP #-} + -- | Functions for collecting together and applying rewrite rules to a module. -- The 'CoreRule' datatype itself is declared elsewhere. module Rules ( diff --git a/compiler/specialise/SpecConstr.lhs b/compiler/specialise/SpecConstr.lhs index 86a56f4013..24820eba40 100644 --- a/compiler/specialise/SpecConstr.lhs +++ b/compiler/specialise/SpecConstr.lhs @@ -9,6 +9,8 @@ ToDo [Oct 2013] \section[SpecConstr]{Specialise over constructors} \begin{code} +{-# LANGUAGE CPP #-} + module SpecConstr( specConstrProgram #ifdef GHCI @@ -396,16 +398,19 @@ use the calls in the un-specialised RHS as seeds. We call these Note [Top-level recursive groups] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -If all the bindings in a top-level recursive group are not exported, -all the calls are in the rest of the top-level bindings. -This means we can specialise with those call patterns instead of with the RHSs -of the recursive group. +If all the bindings in a top-level recursive group are local (not +exported), then all the calls are in the rest of the top-level +bindings. This means we can specialise with those call patterns +instead of with the RHSs of the recursive group. + +(Question: maybe we should *also* use calls in the rest of the +top-level bindings as seeds? -To get the call usage information, we work backwards through the top-level bindings -so we see the usage before we get to the binding of the function. -Before we can collect the usage though, we go through all the bindings and add them -to the environment. This is necessary because usage is only tracked for functions -in the environment. +To get the call usage information, we work backwards through the +top-level bindings so we see the usage before we get to the binding of +the function. Before we can collect the usage though, we go through +all the bindings and add them to the environment. This is necessary +because usage is only tracked for functions in the environment. The actual seeding of the specialisation is very similar to Note [Local recursive group]. @@ -1323,16 +1328,14 @@ scTopBind env usage (Rec prs) = do { (rhs_usgs, rhss') <- mapAndUnzipM (scExpr env) rhss ; return (usage `combineUsage` (combineUsages rhs_usgs), Rec (bndrs `zip` rhss')) } | otherwise -- Do specialisation - = do { (rhs_usgs, rhs_infos) <- mapAndUnzipM (scRecRhs env) (bndrs `zip` rhss) + = do { (rhs_usgs, rhs_infos) <- mapAndUnzipM (scRecRhs env) prs -- ; pprTrace "scTopBind" (ppr bndrs $$ ppr (map (lookupVarEnv (scu_calls usage)) bndrs)) (return ()) -- Note [Top-level recursive groups] - ; let (usg,rest) = if all (not . isExportedId) bndrs - then -- pprTrace "scTopBind-T" (ppr bndrs $$ ppr (map (fmap (map snd) . lookupVarEnv (scu_calls usage)) bndrs)) - ( usage - , [SI [] 0 (Just us) | us <- rhs_usgs] ) - else ( combineUsages rhs_usgs - , [SI [] 0 Nothing | _ <- rhs_usgs] ) + ; let (usg,rest) | any isExportedId bndrs -- Seed from RHSs + = ( combineUsages rhs_usgs, [SI [] 0 Nothing | _ <- rhs_usgs] ) + | otherwise -- Seed from body only + = ( usage, [SI [] 0 (Just us) | us <- rhs_usgs] ) ; (usage', specs) <- specLoop (scForce env force_spec) (scu_calls usg) rhs_infos nullUsage rest @@ -1446,11 +1449,6 @@ specialise env bind_calls (RI fn _ arg_bndrs body arg_occs) , notNull arg_bndrs -- Only specialise functions , Just all_calls <- lookupVarEnv bind_calls fn = do { (boring_call, pats) <- callsToPats env specs arg_occs all_calls --- ; pprTrace "specialise" (vcat [ ppr fn <+> text "with" <+> int (length pats) <+> text "good patterns" --- , text "arg_occs" <+> ppr arg_occs --- , text "calls" <+> ppr all_calls --- , text "good pats" <+> ppr pats]) $ --- return () -- Bale out if too many specialisations ; let n_pats = length pats @@ -1473,12 +1471,25 @@ specialise env bind_calls (RI fn _ arg_bndrs body arg_occs) _normal_case -> do { - let spec_env = decreaseSpecCount env n_pats +-- ; if (not (null pats) || isJust mb_unspec) then +-- pprTrace "specialise" (vcat [ ppr fn <+> text "with" <+> int (length pats) <+> text "good patterns" +-- , text "mb_unspec" <+> ppr (isJust mb_unspec) +-- , text "arg_occs" <+> ppr arg_occs +-- , text "good pats" <+> ppr pats]) $ +-- return () +-- else return () + + ; let spec_env = decreaseSpecCount env n_pats ; (spec_usgs, new_specs) <- mapAndUnzipM (spec_one spec_env fn arg_bndrs body) (pats `zip` [spec_count..]) -- See Note [Specialise original body] ; let spec_usg = combineUsages spec_usgs + + -- If there were any boring calls among the seeds (= all_calls), then those + -- calls will call the un-specialised function. So we should use the seeds + -- from the _unspecialised_ function's RHS, which are in mb_unspec, by returning + -- then in new_usg. (new_usg, mb_unspec') = case mb_unspec of Just rhs_usg | boring_call -> (spec_usg `combineUsage` rhs_usg, Nothing) diff --git a/compiler/specialise/Specialise.lhs b/compiler/specialise/Specialise.lhs index 3191ae946e..baa5d1971f 100644 --- a/compiler/specialise/Specialise.lhs +++ b/compiler/specialise/Specialise.lhs @@ -4,6 +4,8 @@ \section[Specialise]{Stamping out overloading, and (optionally) polymorphism} \begin{code} +{-# LANGUAGE CPP #-} + module Specialise ( specProgram ) where #include "HsVersions.h" diff --git a/compiler/stgSyn/CoreToStg.lhs b/compiler/stgSyn/CoreToStg.lhs index 0c47042b4d..7807d895dc 100644 --- a/compiler/stgSyn/CoreToStg.lhs +++ b/compiler/stgSyn/CoreToStg.lhs @@ -1,4 +1,6 @@ \begin{code} +{-# LANGUAGE CPP #-} + -- -- (c) The GRASP/AQUA Project, Glasgow University, 1993-1998 -- diff --git a/compiler/stgSyn/StgLint.lhs b/compiler/stgSyn/StgLint.lhs index 04349db3df..ec9f6fa9d6 100644 --- a/compiler/stgSyn/StgLint.lhs +++ b/compiler/stgSyn/StgLint.lhs @@ -4,6 +4,8 @@ \section[StgLint]{A ``lint'' pass to check for Stg correctness} \begin{code} +{-# LANGUAGE CPP #-} + module StgLint ( lintStgBindings ) where import StgSyn diff --git a/compiler/stgSyn/StgSyn.lhs b/compiler/stgSyn/StgSyn.lhs index 3fa8c68c16..2ecd573133 100644 --- a/compiler/stgSyn/StgSyn.lhs +++ b/compiler/stgSyn/StgSyn.lhs @@ -9,6 +9,7 @@ being one that happens to be ideally suited to spineless tagless code generation. \begin{code} +{-# LANGUAGE CPP #-} module StgSyn ( GenStgArg(..), diff --git a/compiler/stranal/DmdAnal.lhs b/compiler/stranal/DmdAnal.lhs index 72137c7b4b..a3b7c0b72a 100644 --- a/compiler/stranal/DmdAnal.lhs +++ b/compiler/stranal/DmdAnal.lhs @@ -7,7 +7,8 @@ ----------------- \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} module DmdAnal ( dmdAnalProgram ) where @@ -114,7 +115,7 @@ dmdAnalStar :: AnalEnv -> Demand -- This one takes a *Demand* -> CoreExpr -> (BothDmdArg, CoreExpr) dmdAnalStar env dmd e - | (cd, defer_and_use) <- toCleanDmd dmd + | (cd, defer_and_use) <- toCleanDmd dmd (exprType e) , (dmd_ty, e') <- dmdAnal env cd e = (postProcessDmdTypeM defer_and_use dmd_ty, e') @@ -595,7 +596,16 @@ dmdAnalRhs :: TopLevelFlag dmdAnalRhs top_lvl rec_flag env id rhs | Just fn <- unpackTrivial rhs -- See Note [Demand analysis for trivial right-hand sides] , let fn_str = getStrictness env fn - = (fn_str, emptyDmdEnv, set_idStrictness env id fn_str, rhs) + fn_fv | isLocalId fn = unitVarEnv fn topDmd + | otherwise = emptyDmdEnv + -- Note [Remember to demand the function itself] + -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + -- fn_fv: don't forget to produce a demand for fn itself + -- Lacking this caused Trac #9128 + -- The demand is very conservative (topDmd), but that doesn't + -- matter; trivial bindings are usually inlined, so it only + -- kicks in for top-level bindings and NOINLINE bindings + = (fn_str, fn_fv, set_idStrictness env id fn_str, rhs) | otherwise = (sig_ty, lazy_fv, id', mkLams bndrs' body') diff --git a/compiler/stranal/WorkWrap.lhs b/compiler/stranal/WorkWrap.lhs index df7edae991..5b9d0a3083 100644 --- a/compiler/stranal/WorkWrap.lhs +++ b/compiler/stranal/WorkWrap.lhs @@ -4,7 +4,8 @@ \section[WorkWrap]{Worker/wrapper-generating back-end of strictness analyser} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/stranal/WwLib.lhs b/compiler/stranal/WwLib.lhs index 4610b58734..7a9845b3d7 100644 --- a/compiler/stranal/WwLib.lhs +++ b/compiler/stranal/WwLib.lhs @@ -4,6 +4,8 @@ \section[WwLib]{A library for the ``worker\/wrapper'' back-end to the strictness analyser} \begin{code} +{-# LANGUAGE CPP #-} + module WwLib ( mkWwBodies, mkWWstr, mkWorkerArgs , deepSplitProductType_maybe, findTypeShape ) where diff --git a/compiler/typecheck/FamInst.lhs b/compiler/typecheck/FamInst.lhs index 572874b875..d0b2d0da5a 100644 --- a/compiler/typecheck/FamInst.lhs +++ b/compiler/typecheck/FamInst.lhs @@ -1,8 +1,8 @@ The @FamInst@ type: family instance heads \begin{code} -{-# LANGUAGE GADTs #-} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, GADTs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -217,9 +217,12 @@ tcLookupFamInst tycon tys | otherwise = do { instEnv <- tcGetFamInstEnvs ; let mb_match = lookupFamInstEnv instEnv tycon tys - ; traceTc "lookupFamInst" ((ppr tycon <+> ppr tys) $$ - pprTvBndrs (varSetElems (tyVarsOfTypes tys)) $$ - ppr mb_match $$ ppr instEnv) + ; traceTc "lookupFamInst" $ + vcat [ ppr tycon <+> ppr tys + , pprTvBndrs (varSetElems (tyVarsOfTypes tys)) + , ppr mb_match + -- , ppr instEnv + ] ; case mb_match of [] -> return Nothing (match:_) @@ -297,8 +300,11 @@ checkForConflicts :: FamInstEnvs -> FamInst -> TcM Bool checkForConflicts inst_envs fam_inst = do { let conflicts = lookupFamInstEnvConflicts inst_envs fam_inst no_conflicts = null conflicts - ; traceTc "checkForConflicts" (ppr (map fim_instance conflicts) $$ - ppr fam_inst $$ ppr inst_envs) + ; traceTc "checkForConflicts" $ + vcat [ ppr (map fim_instance conflicts) + , ppr fam_inst + -- , ppr inst_envs + ] ; unless no_conflicts $ conflictInstErr fam_inst conflicts ; return no_conflicts } diff --git a/compiler/typecheck/FunDeps.lhs b/compiler/typecheck/FunDeps.lhs index 1dc96aa037..e5cd356712 100644 --- a/compiler/typecheck/FunDeps.lhs +++ b/compiler/typecheck/FunDeps.lhs @@ -8,6 +8,8 @@ FunDeps - functional dependencies It's better to read it as: "if we know these, then we're going to know these" \begin{code} +{-# LANGUAGE CPP #-} + module FunDeps ( FDEq (..), Equation(..), pprEquation, @@ -559,7 +561,7 @@ if s1 matches \begin{code} checkFunDeps :: (InstEnv, InstEnv) -> ClsInst -> Maybe [ClsInst] -- Nothing <=> ok - -- Just dfs <=> conflict with dfs + -- Just dfs <=> conflict with dfs -- Check wheher adding DFunId would break functional-dependency constraints -- Used only for instance decls defined in the module being compiled checkFunDeps inst_envs ispec diff --git a/compiler/typecheck/Inst.lhs b/compiler/typecheck/Inst.lhs index e934984383..dac522803f 100644 --- a/compiler/typecheck/Inst.lhs +++ b/compiler/typecheck/Inst.lhs @@ -6,7 +6,8 @@ The @Inst@ type: dictionaries or method instances \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -382,14 +383,15 @@ syntaxNameCtxt name orig ty tidy_env \begin{code} getOverlapFlag :: TcM OverlapFlag -getOverlapFlag +getOverlapFlag = do { dflags <- getDynFlags ; let overlap_ok = xopt Opt_OverlappingInstances dflags incoherent_ok = xopt Opt_IncoherentInstances dflags - safeOverlap = safeLanguageOn dflags - overlap_flag | incoherent_ok = Incoherent safeOverlap - | overlap_ok = OverlapOk safeOverlap - | otherwise = NoOverlap safeOverlap + use x = OverlapFlag { isSafeOverlap = safeLanguageOn dflags + , overlapMode = x } + overlap_flag | incoherent_ok = use Incoherent + | overlap_ok = use OverlapOk + | otherwise = use NoOverlap ; return overlap_flag } @@ -461,10 +463,10 @@ addLocalInst home_ie ispec False -> case dup_ispecs of dup : _ -> dupInstErr ispec dup >> return (extendInstEnv home_ie ispec) [] -> return (extendInstEnv home_ie ispec) - True -> case (dup_ispecs, home_ie_matches, unifs, overlapFlag) of + True -> case (dup_ispecs, home_ie_matches, unifs, overlapMode overlapFlag) of (_, _:_, _, _) -> return (overwriteInstEnv home_ie ispec) (dup:_, [], _, _) -> dupInstErr ispec dup >> return (extendInstEnv home_ie ispec) - ([], _, u:_, NoOverlap _) -> overlappingInstErr ispec u >> return (extendInstEnv home_ie ispec) + ([], _, u:_, NoOverlap) -> overlappingInstErr ispec u >> return (extendInstEnv home_ie ispec) _ -> return (extendInstEnv home_ie ispec) where (homematches, _) = lookupInstEnv' home_ie cls tys home_ie_matches = [ dup_ispec @@ -476,7 +478,8 @@ traceDFuns :: [ClsInst] -> TcRn () traceDFuns ispecs = traceTc "Adding instances:" (vcat (map pp ispecs)) where - pp ispec = ppr (instanceDFunId ispec) <+> colon <+> ppr ispec + pp ispec = hang (ppr (instanceDFunId ispec) <+> colon) + 2 (ppr ispec) -- Print the dfun name itself too funDepErr :: ClsInst -> [ClsInst] -> TcRn () diff --git a/compiler/typecheck/TcAnnotations.lhs b/compiler/typecheck/TcAnnotations.lhs index e12552f419..cbd19cf8f3 100644 --- a/compiler/typecheck/TcAnnotations.lhs +++ b/compiler/typecheck/TcAnnotations.lhs @@ -5,6 +5,8 @@ \section[TcAnnotations]{Typechecking annotations} \begin{code} +{-# LANGUAGE CPP #-} + module TcAnnotations ( tcAnnotations, annCtxt ) where #ifdef GHCI diff --git a/compiler/typecheck/TcArrows.lhs b/compiler/typecheck/TcArrows.lhs index 407e1725ff..eab8941956 100644 --- a/compiler/typecheck/TcArrows.lhs +++ b/compiler/typecheck/TcArrows.lhs @@ -5,16 +5,11 @@ Typecheck arrow notation \begin{code} -{-# OPTIONS -fno-warn-tabs #-} --- The above warning supression flag is a temporary kludge. --- While working on this module you are encouraged to remove it and --- detab the module (please do the detabbing in a separate patch). See --- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces --- for details +{-# LANGUAGE RankNTypes #-} module TcArrows ( tcProc ) where -import {-# SOURCE #-} TcExpr( tcMonoExpr, tcInferRho, tcSyntaxOp, tcCheckId, tcPolyExpr ) +import {-# SOURCE #-} TcExpr( tcMonoExpr, tcInferRho, tcSyntaxOp, tcCheckId, tcPolyExpr ) import HsSyn import TcMatches @@ -77,32 +72,32 @@ Note that %************************************************************************ -%* * - Proc -%* * +%* * + Proc +%* * %************************************************************************ \begin{code} -tcProc :: InPat Name -> LHsCmdTop Name -- proc pat -> expr - -> TcRhoType -- Expected type of whole proc expression +tcProc :: InPat Name -> LHsCmdTop Name -- proc pat -> expr + -> TcRhoType -- Expected type of whole proc expression -> TcM (OutPat TcId, LHsCmdTop TcId, TcCoercion) tcProc pat cmd exp_ty = newArrowScope $ - do { (co, (exp_ty1, res_ty)) <- matchExpectedAppTy exp_ty - ; (co1, (arr_ty, arg_ty)) <- matchExpectedAppTy exp_ty1 - ; let cmd_env = CmdEnv { cmd_arr = arr_ty } + do { (co, (exp_ty1, res_ty)) <- matchExpectedAppTy exp_ty + ; (co1, (arr_ty, arg_ty)) <- matchExpectedAppTy exp_ty1 + ; let cmd_env = CmdEnv { cmd_arr = arr_ty } ; (pat', cmd') <- tcPat ProcExpr pat arg_ty $ - tcCmdTop cmd_env cmd (unitTy, res_ty) + tcCmdTop cmd_env cmd (unitTy, res_ty) ; let res_co = mkTcTransCo co (mkTcAppCo co1 (mkTcNomReflCo res_ty)) ; return (pat', cmd', res_co) } \end{code} %************************************************************************ -%* * - Commands -%* * +%* * + Commands +%* * %************************************************************************ \begin{code} @@ -112,7 +107,7 @@ type CmdArgType = TcTauType -- carg_type, a nested tuple data CmdEnv = CmdEnv { - cmd_arr :: TcType -- arrow type constructor, of kind *->*->* + cmd_arr :: TcType -- arrow type constructor, of kind *->*->* } mkCmdArrTy :: CmdEnv -> TcTauType -> TcTauType -> TcTauType @@ -126,27 +121,27 @@ tcCmdTop :: CmdEnv tcCmdTop env (L loc (HsCmdTop cmd _ _ names)) cmd_ty@(cmd_stk, res_ty) = setSrcSpan loc $ - do { cmd' <- tcCmd env cmd cmd_ty - ; names' <- mapM (tcSyntaxName ProcOrigin (cmd_arr env)) names - ; return (L loc $ HsCmdTop cmd' cmd_stk res_ty names') } + do { cmd' <- tcCmd env cmd cmd_ty + ; names' <- mapM (tcSyntaxName ProcOrigin (cmd_arr env)) names + ; return (L loc $ HsCmdTop cmd' cmd_stk res_ty names') } ---------------------------------------- tcCmd :: CmdEnv -> LHsCmd Name -> CmdType -> TcM (LHsCmd TcId) - -- The main recursive function + -- The main recursive function tcCmd env (L loc cmd) res_ty = setSrcSpan loc $ do - { cmd' <- tc_cmd env cmd res_ty - ; return (L loc cmd') } + { cmd' <- tc_cmd env cmd res_ty + ; return (L loc cmd') } tc_cmd :: CmdEnv -> HsCmd Name -> CmdType -> TcM (HsCmd TcId) tc_cmd env (HsCmdPar cmd) res_ty - = do { cmd' <- tcCmd env cmd res_ty - ; return (HsCmdPar cmd') } + = do { cmd' <- tcCmd env cmd res_ty + ; return (HsCmdPar cmd') } tc_cmd env (HsCmdLet binds (L body_loc body)) res_ty - = do { (binds', body') <- tcLocalBinds binds $ - setSrcSpan body_loc $ - tc_cmd env body res_ty - ; return (HsCmdLet binds' (L body_loc body')) } + = do { (binds', body') <- tcLocalBinds binds $ + setSrcSpan body_loc $ + tc_cmd env body res_ty + ; return (HsCmdLet binds' (L body_loc body')) } tc_cmd env in_cmd@(HsCmdCase scrut matches) (stk, res_ty) = addErrCtxt (cmdCtxt in_cmd) $ do @@ -166,25 +161,25 @@ tc_cmd env (HsCmdIf Nothing pred b1 b2) res_ty -- Ordinary 'if' } tc_cmd env (HsCmdIf (Just fun) pred b1 b2) res_ty -- Rebindable syntax for if - = do { pred_ty <- newFlexiTyVarTy openTypeKind + = do { pred_ty <- newFlexiTyVarTy openTypeKind -- For arrows, need ifThenElse :: forall r. T -> r -> r -> r -- because we're going to apply it to the environment, not -- the return value. ; (_, [r_tv]) <- tcInstSkolTyVars [alphaTyVar] - ; let r_ty = mkTyVarTy r_tv + ; let r_ty = mkTyVarTy r_tv ; let if_ty = mkFunTys [pred_ty, r_ty, r_ty] r_ty ; checkTc (not (r_tv `elemVarSet` tyVarsOfType pred_ty)) (ptext (sLit "Predicate type of `ifThenElse' depends on result type")) - ; fun' <- tcSyntaxOp IfOrigin fun if_ty - ; pred' <- tcMonoExpr pred pred_ty - ; b1' <- tcCmd env b1 res_ty - ; b2' <- tcCmd env b2 res_ty + ; fun' <- tcSyntaxOp IfOrigin fun if_ty + ; pred' <- tcMonoExpr pred pred_ty + ; b1' <- tcCmd env b1 res_ty + ; b2' <- tcCmd env b2 res_ty ; return (HsCmdIf (Just fun') pred' b1' b2') } ------------------------------------------- --- Arrow application --- (f -< a) or (f -<< a) +-- Arrow application +-- (f -< a) or (f -<< a) -- -- D |- fun :: a t1 t2 -- D,G |- arg :: t1 @@ -199,16 +194,16 @@ tc_cmd env (HsCmdIf (Just fun) pred b1 b2) res_ty -- Rebindable syntax for if -- (plus -<< requires ArrowApply) tc_cmd env cmd@(HsCmdArrApp fun arg _ ho_app lr) (_, res_ty) - = addErrCtxt (cmdCtxt cmd) $ + = addErrCtxt (cmdCtxt cmd) $ do { arg_ty <- newFlexiTyVarTy openTypeKind - ; let fun_ty = mkCmdArrTy env arg_ty res_ty - ; fun' <- select_arrow_scope (tcMonoExpr fun fun_ty) + ; let fun_ty = mkCmdArrTy env arg_ty res_ty + ; fun' <- select_arrow_scope (tcMonoExpr fun fun_ty) -- ToDo: There should be no need for the escapeArrowScope stuff -- See Note [Escaping the arrow scope] in TcRnTypes - ; arg' <- tcMonoExpr arg arg_ty + ; arg' <- tcMonoExpr arg arg_ty - ; return (HsCmdArrApp fun' arg' fun_ty ho_app lr) } + ; return (HsCmdArrApp fun' arg' fun_ty ho_app lr) } where -- Before type-checking f, use the environment of the enclosing -- proc for the (-<) case. @@ -219,7 +214,7 @@ tc_cmd env cmd@(HsCmdArrApp fun arg _ ho_app lr) (_, res_ty) HsFirstOrderApp -> escapeArrowScope tc ------------------------------------------- --- Command application +-- Command application -- -- D,G |- exp : t -- D;G |-a cmd : (t,stk) --> res @@ -227,14 +222,14 @@ tc_cmd env cmd@(HsCmdArrApp fun arg _ ho_app lr) (_, res_ty) -- D;G |-a cmd exp : stk --> res tc_cmd env cmd@(HsCmdApp fun arg) (cmd_stk, res_ty) - = addErrCtxt (cmdCtxt cmd) $ + = addErrCtxt (cmdCtxt cmd) $ do { arg_ty <- newFlexiTyVarTy openTypeKind - ; fun' <- tcCmd env fun (mkPairTy arg_ty cmd_stk, res_ty) - ; arg' <- tcMonoExpr arg arg_ty - ; return (HsCmdApp fun' arg') } + ; fun' <- tcCmd env fun (mkPairTy arg_ty cmd_stk, res_ty) + ; arg' <- tcMonoExpr arg arg_ty + ; return (HsCmdApp fun' arg') } ------------------------------------------- --- Lambda +-- Lambda -- -- D;G,x:t |-a cmd : stk --> res -- ------------------------------ @@ -243,60 +238,60 @@ tc_cmd env cmd@(HsCmdApp fun arg) (cmd_stk, res_ty) tc_cmd env (HsCmdLam (MG { mg_alts = [L mtch_loc (match@(Match pats _maybe_rhs_sig grhss))], mg_origin = origin })) (cmd_stk, res_ty) - = addErrCtxt (pprMatchInCtxt match_ctxt match) $ - do { (co, arg_tys, cmd_stk') <- matchExpectedCmdArgs n_pats cmd_stk + = addErrCtxt (pprMatchInCtxt match_ctxt match) $ + do { (co, arg_tys, cmd_stk') <- matchExpectedCmdArgs n_pats cmd_stk - -- Check the patterns, and the GRHSs inside - ; (pats', grhss') <- setSrcSpan mtch_loc $ + -- Check the patterns, and the GRHSs inside + ; (pats', grhss') <- setSrcSpan mtch_loc $ tcPats LambdaExpr pats arg_tys $ tc_grhss grhss cmd_stk' res_ty - ; let match' = L mtch_loc (Match pats' Nothing grhss') + ; let match' = L mtch_loc (Match pats' Nothing grhss') arg_tys = map hsLPatType pats' cmd' = HsCmdLam (MG { mg_alts = [match'], mg_arg_tys = arg_tys , mg_res_ty = res_ty, mg_origin = origin }) - ; return (mkHsCmdCast co cmd') } + ; return (mkHsCmdCast co cmd') } where n_pats = length pats - match_ctxt = (LambdaExpr :: HsMatchContext Name) -- Maybe KappaExpr? + match_ctxt = (LambdaExpr :: HsMatchContext Name) -- Maybe KappaExpr? pg_ctxt = PatGuard match_ctxt tc_grhss (GRHSs grhss binds) stk_ty res_ty - = do { (binds', grhss') <- tcLocalBinds binds $ - mapM (wrapLocM (tc_grhs stk_ty res_ty)) grhss - ; return (GRHSs grhss' binds') } + = do { (binds', grhss') <- tcLocalBinds binds $ + mapM (wrapLocM (tc_grhs stk_ty res_ty)) grhss + ; return (GRHSs grhss' binds') } tc_grhs stk_ty res_ty (GRHS guards body) - = do { (guards', rhs') <- tcStmtsAndThen pg_ctxt tcGuardStmt guards res_ty $ - \ res_ty -> tcCmd env body (stk_ty, res_ty) - ; return (GRHS guards' rhs') } + = do { (guards', rhs') <- tcStmtsAndThen pg_ctxt tcGuardStmt guards res_ty $ + \ res_ty -> tcCmd env body (stk_ty, res_ty) + ; return (GRHS guards' rhs') } ------------------------------------------- --- Do notation +-- Do notation tc_cmd env (HsCmdDo stmts _) (cmd_stk, res_ty) - = do { co <- unifyType unitTy cmd_stk -- Expecting empty argument stack - ; stmts' <- tcStmts ArrowExpr (tcArrDoStmt env) stmts res_ty - ; return (mkHsCmdCast co (HsCmdDo stmts' res_ty)) } + = do { co <- unifyType unitTy cmd_stk -- Expecting empty argument stack + ; stmts' <- tcStmts ArrowExpr (tcArrDoStmt env) stmts res_ty + ; return (mkHsCmdCast co (HsCmdDo stmts' res_ty)) } ----------------------------------------------------------------- --- Arrow ``forms'' (| e c1 .. cn |) +-- Arrow ``forms'' (| e c1 .. cn |) -- --- D; G |-a1 c1 : stk1 --> r1 --- ... --- D; G |-an cn : stkn --> rn --- D |- e :: forall e. a1 (e, stk1) t1 +-- D; G |-a1 c1 : stk1 --> r1 +-- ... +-- D; G |-an cn : stkn --> rn +-- D |- e :: forall e. a1 (e, stk1) t1 -- ... -- -> an (e, stkn) tn -- -> a (e, stk) t --- e \not\in (stk, stk1, ..., stkm, t, t1, ..., tn) --- ---------------------------------------------- --- D; G |-a (| e c1 ... cn |) : stk --> t +-- e \not\in (stk, stk1, ..., stkm, t, t1, ..., tn) +-- ---------------------------------------------- +-- D; G |-a (| e c1 ... cn |) : stk --> t -tc_cmd env cmd@(HsCmdArrForm expr fixity cmd_args) (cmd_stk, res_ty) - = addErrCtxt (cmdCtxt cmd) $ - do { (cmd_args', cmd_tys) <- mapAndUnzipM tc_cmd_arg cmd_args +tc_cmd env cmd@(HsCmdArrForm expr fixity cmd_args) (cmd_stk, res_ty) + = addErrCtxt (cmdCtxt cmd) $ + do { (cmd_args', cmd_tys) <- mapAndUnzipM tc_cmd_arg cmd_args ; let e_ty = mkForAllTy alphaTyVar $ -- We use alphaTyVar for 'w' mkFunTys cmd_tys $ mkCmdArrTy env (mkPairTy alphaTy cmd_stk) res_ty @@ -307,19 +302,19 @@ tc_cmd env cmd@(HsCmdArrForm expr fixity cmd_args) (cmd_stk, res_ty) tc_cmd_arg :: LHsCmdTop Name -> TcM (LHsCmdTop TcId, TcType) tc_cmd_arg cmd = do { arr_ty <- newFlexiTyVarTy arrowTyConKind - ; stk_ty <- newFlexiTyVarTy liftedTypeKind - ; res_ty <- newFlexiTyVarTy liftedTypeKind - ; let env' = env { cmd_arr = arr_ty } - ; cmd' <- tcCmdTop env' cmd (stk_ty, res_ty) - ; return (cmd', mkCmdArrTy env' (mkPairTy alphaTy stk_ty) res_ty) } + ; stk_ty <- newFlexiTyVarTy liftedTypeKind + ; res_ty <- newFlexiTyVarTy liftedTypeKind + ; let env' = env { cmd_arr = arr_ty } + ; cmd' <- tcCmdTop env' cmd (stk_ty, res_ty) + ; return (cmd', mkCmdArrTy env' (mkPairTy alphaTy stk_ty) res_ty) } ----------------------------------------------------------------- --- Base case for illegal commands +-- Base case for illegal commands -- This is where expressions that aren't commands get rejected tc_cmd _ cmd _ = failWithTc (vcat [ptext (sLit "The expression"), nest 2 (ppr cmd), - ptext (sLit "was found where an arrow command was expected")]) + ptext (sLit "was found where an arrow command was expected")]) matchExpectedCmdArgs :: Arity -> TcType -> TcM (TcCoercion, [TcType], TcType) @@ -333,34 +328,34 @@ matchExpectedCmdArgs n ty %************************************************************************ -%* * - Stmts -%* * +%* * + Stmts +%* * %************************************************************************ \begin{code} -------------------------------- --- Mdo-notation +-- Mdo-notation -- The distinctive features here are --- (a) RecStmts, and --- (b) no rebindable syntax +-- (a) RecStmts, and +-- (b) no rebindable syntax tcArrDoStmt :: CmdEnv -> TcCmdStmtChecker tcArrDoStmt env _ (LastStmt rhs _) res_ty thing_inside - = do { rhs' <- tcCmd env rhs (unitTy, res_ty) - ; thing <- thing_inside (panic "tcArrDoStmt") - ; return (LastStmt rhs' noSyntaxExpr, thing) } + = do { rhs' <- tcCmd env rhs (unitTy, res_ty) + ; thing <- thing_inside (panic "tcArrDoStmt") + ; return (LastStmt rhs' noSyntaxExpr, thing) } tcArrDoStmt env _ (BodyStmt rhs _ _ _) res_ty thing_inside - = do { (rhs', elt_ty) <- tc_arr_rhs env rhs - ; thing <- thing_inside res_ty - ; return (BodyStmt rhs' noSyntaxExpr noSyntaxExpr elt_ty, thing) } + = do { (rhs', elt_ty) <- tc_arr_rhs env rhs + ; thing <- thing_inside res_ty + ; return (BodyStmt rhs' noSyntaxExpr noSyntaxExpr elt_ty, thing) } tcArrDoStmt env ctxt (BindStmt pat rhs _ _) res_ty thing_inside - = do { (rhs', pat_ty) <- tc_arr_rhs env rhs - ; (pat', thing) <- tcPat (StmtCtxt ctxt) pat pat_ty $ + = do { (rhs', pat_ty) <- tc_arr_rhs env rhs + ; (pat', thing) <- tcPat (StmtCtxt ctxt) pat pat_ty $ thing_inside res_ty - ; return (BindStmt pat' rhs' noSyntaxExpr noSyntaxExpr, thing) } + ; return (BindStmt pat' rhs' noSyntaxExpr noSyntaxExpr, thing) } tcArrDoStmt env ctxt (RecStmt { recS_stmts = stmts, recS_later_ids = later_names , recS_rec_ids = rec_names }) res_ty thing_inside @@ -369,15 +364,15 @@ tcArrDoStmt env ctxt (RecStmt { recS_stmts = stmts, recS_later_ids = later_names ; let tup_ids = zipWith mkLocalId tup_names tup_elt_tys ; tcExtendIdEnv tup_ids $ do { (stmts', tup_rets) - <- tcStmtsAndThen ctxt (tcArrDoStmt env) stmts res_ty $ \ _res_ty' -> - -- ToDo: res_ty not really right + <- tcStmtsAndThen ctxt (tcArrDoStmt env) stmts res_ty $ \ _res_ty' -> + -- ToDo: res_ty not really right zipWithM tcCheckId tup_names tup_elt_tys ; thing <- thing_inside res_ty - -- NB: The rec_ids for the recursive things - -- already scope over this part. This binding may shadow - -- some of them with polymorphic things with the same Name - -- (see note [RecStmt] in HsExpr) + -- NB: The rec_ids for the recursive things + -- already scope over this part. This binding may shadow + -- some of them with polymorphic things with the same Name + -- (see note [RecStmt] in HsExpr) ; let rec_ids = takeList rec_names tup_ids ; later_ids <- tcLookupLocalIds later_names @@ -390,22 +385,22 @@ tcArrDoStmt env ctxt (RecStmt { recS_stmts = stmts, recS_later_ids = later_names , recS_later_rets = later_rets , recS_rec_ids = rec_ids, recS_rec_rets = rec_rets , recS_ret_ty = res_ty }, thing) - }} + }} tcArrDoStmt _ _ stmt _ _ = pprPanic "tcArrDoStmt: unexpected Stmt" (ppr stmt) tc_arr_rhs :: CmdEnv -> LHsCmd Name -> TcM (LHsCmd TcId, TcType) tc_arr_rhs env rhs = do { ty <- newFlexiTyVarTy liftedTypeKind - ; rhs' <- tcCmd env rhs (unitTy, ty) - ; return (rhs', ty) } + ; rhs' <- tcCmd env rhs (unitTy, ty) + ; return (rhs', ty) } \end{code} %************************************************************************ -%* * - Helpers -%* * +%* * + Helpers +%* * %************************************************************************ @@ -413,15 +408,15 @@ tc_arr_rhs env rhs = do { ty <- newFlexiTyVarTy liftedTypeKind mkPairTy :: Type -> Type -> Type mkPairTy t1 t2 = mkTyConApp pairTyCon [t1,t2] -arrowTyConKind :: Kind -- *->*->* +arrowTyConKind :: Kind -- *->*->* arrowTyConKind = mkArrowKinds [liftedTypeKind, liftedTypeKind] liftedTypeKind \end{code} %************************************************************************ -%* * - Errors -%* * +%* * + Errors +%* * %************************************************************************ \begin{code} diff --git a/compiler/typecheck/TcBinds.lhs b/compiler/typecheck/TcBinds.lhs index 17f124b0d8..887e41c0d5 100644 --- a/compiler/typecheck/TcBinds.lhs +++ b/compiler/typecheck/TcBinds.lhs @@ -5,6 +5,8 @@ \section[TcBinds]{TcBinds} \begin{code} +{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-} + module TcBinds ( tcLocalBinds, tcTopBinds, tcRecSelBinds, tcHsBootSigs, tcPolyCheck, PragFun, tcSpecPrags, tcVectDecls, mkPragFun, @@ -37,6 +39,7 @@ import TysPrim import Id import Var import VarSet +import VarEnv( TidyEnv ) import Module import Name import NameSet @@ -54,7 +57,7 @@ import FastString import Type(mkStrLitTy) import Class(classTyCon) import PrelNames(ipClassName) -import TcValidity (checkValidTheta) +import TcValidity (checkValidType) import Control.Monad @@ -271,6 +274,30 @@ time by defaulting. No no no. However [Oct 10] this is all handled automatically by the untouchable-range idea. +Note [Placeholder PatSyn kinds] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Consider this (Trac #9161) + + {-# LANGUAGE PatternSynonyms, DataKinds #-} + pattern A = () + b :: A + b = undefined + +Here, the type signature for b mentions A. But A is a pattern +synonym, which is typechecked (for very good reasons; a view pattern +in the RHS may mention a value binding) as part of a group of +bindings. It is entirely resonable to reject this, but to do so +we need A to be in the kind environment when kind-checking the signature for B. + +Hence the tcExtendKindEnv2 patsyn_placeholder_kinds, which adds a binding + A -> AGlobal (AConLike (PatSynCon _|_)) +to the environment. Then TcHsType.tcTyVar will find A in the kind environment, +and will give a 'wrongThingErr' as a result. But the lookup of A won't fail. + +The _|_ (= panic "fakePatSynCon") works because the wrongThingErr call, in +tcTyVar, doesn't look inside the TcTyThing. + + \begin{code} tcValBinds :: TopLevelFlag -> [(RecFlag, LHsBinds Name)] -> [LSig Name] @@ -278,19 +305,26 @@ tcValBinds :: TopLevelFlag -> TcM ([(RecFlag, LHsBinds TcId)], thing) tcValBinds top_lvl binds sigs thing_inside - = do { -- Typecheck the signature - (poly_ids, sig_fn) <- tcTySigs sigs + = do { -- Typecheck the signature + ; (poly_ids, sig_fn) <- tcExtendKindEnv2 patsyn_placeholder_kinds $ + -- See Note [Placeholder PatSyn kinds] + tcTySigs sigs ; let prag_fn = mkPragFun sigs (foldr (unionBags . snd) emptyBag binds) -- Extend the envt right away with all -- the Ids declared with type signatures -- Use tcExtendIdEnv2 to avoid extending the TcIdBinder stack - ; (binds', thing) <- tcExtendIdEnv2 [(idName id, id) | id <- poly_ids] $ - tcBindGroups top_lvl sig_fn prag_fn - binds thing_inside - - ; return (binds', thing) } + ; tcExtendIdEnv2 [(idName id, id) | id <- poly_ids] $ + tcBindGroups top_lvl sig_fn prag_fn + binds thing_inside } + where + patsyn_placeholder_kinds -- See Note [Placeholder PatSyn kinds] + = [ (name, placeholder_patsyn_tything) + | (_, lbinds) <- binds + , L _ (PatSynBind{ patsyn_id = L _ name }) <- bagToList lbinds ] + placeholder_patsyn_tything + = AGlobal $ AConLike $ PatSynCon $ panic "fakePatSynCon" ------------------------ tcBindGroups :: TopLevelFlag -> TcSigFun -> PragFun @@ -559,16 +593,11 @@ tcPolyInfer rec_tc prag_fn tc_sig_fn mono closed bind_list ; let name_taus = [(name, idType mono_id) | (name, _, mono_id) <- mono_infos] ; traceTc "simplifyInfer call" (ppr name_taus $$ ppr wanted) - ; (qtvs, givens, mr_bites, ev_binds) <- - simplifyInfer closed mono name_taus wanted - - ; theta <- zonkTcThetaType (map evVarPred givens) - -- We need to check inferred theta for validity. The reason is that we - -- might have inferred theta that requires language extension that is - -- not turned on. See #8883. Example can be found in the T8883 testcase. - ; checkValidTheta (InfSigCtxt (fst . head $ name_taus)) theta - ; exports <- checkNoErrs $ mapM (mkExport prag_fn qtvs theta) mono_infos + ; (qtvs, givens, mr_bites, ev_binds) + <- simplifyInfer closed mono name_taus wanted + ; theta <- zonkTcThetaType (map evVarPred givens) + ; exports <- checkNoErrs $ mapM (mkExport prag_fn qtvs theta) mono_infos ; loc <- getSrcSpanM ; let poly_ids = map abe_poly exports final_closed | closed && not mr_bites = TopLevel @@ -603,20 +632,12 @@ mkExport :: PragFun mkExport prag_fn qtvs theta (poly_name, mb_sig, mono_id) = do { mono_ty <- zonkTcType (idType mono_id) - ; let poly_id = case mb_sig of - Nothing -> mkLocalId poly_name inferred_poly_ty - Just sig -> sig_id sig - -- poly_id has a zonked type - - -- In the inference case (no signature) this stuff figures out - -- the right type variables and theta to quantify over - -- See Note [Impedence matching] - my_tvs2 = closeOverKinds (growThetaTyVars theta (tyVarsOfType mono_ty)) - -- Include kind variables! Trac #7916 - my_tvs = filter (`elemVarSet` my_tvs2) qtvs -- Maintain original order - my_theta = filter (quantifyPred my_tvs2) theta - inferred_poly_ty = mkSigmaTy my_tvs my_theta mono_ty + ; poly_id <- case mb_sig of + Just sig -> return (sig_id sig) + Nothing -> mkInferredPolyId poly_name qtvs theta mono_ty + + -- NB: poly_id has a zonked type ; poly_id <- addInlinePrags poly_id prag_sigs ; spec_prags <- tcSpecPrags poly_id prag_sigs -- tcPrags requires a zonked poly_id @@ -632,7 +653,7 @@ mkExport prag_fn qtvs theta (poly_name, mb_sig, mono_id) -- closed (unless we are doing NoMonoLocalBinds in which case all bets -- are off) -- See Note [Impedence matching] - ; (wrap, wanted) <- addErrCtxtM (mk_msg poly_id) $ + ; (wrap, wanted) <- addErrCtxtM (mk_bind_msg inferred True poly_name (idType poly_id)) $ captureConstraints $ tcSubType origin sig_ctxt sel_poly_ty (idType poly_id) ; ev_binds <- simplifyTop wanted @@ -643,24 +664,58 @@ mkExport prag_fn qtvs theta (poly_name, mb_sig, mono_id) , abe_prags = SpecPrags spec_prags }) } where inferred = isNothing mb_sig - - mk_msg poly_id tidy_env - = return (tidy_env', msg) - where - msg | inferred = hang (ptext (sLit "When checking that") <+> pp_name) - 2 (ptext (sLit "has the inferred type") <+> pp_ty) - $$ ptext (sLit "Probable cause: the inferred type is ambiguous") - | otherwise = hang (ptext (sLit "When checking that") <+> pp_name) - 2 (ptext (sLit "has the specified type") <+> pp_ty) - pp_name = quotes (ppr poly_name) - pp_ty = quotes (ppr tidy_ty) - (tidy_env', tidy_ty) = tidyOpenType tidy_env (idType poly_id) - prag_sigs = prag_fn poly_name origin = AmbigOrigin sig_ctxt sig_ctxt = InfSigCtxt poly_name + +mkInferredPolyId :: Name -> [TyVar] -> TcThetaType -> TcType -> TcM Id +-- In the inference case (no signature) this stuff figures out +-- the right type variables and theta to quantify over +-- See Note [Validity of inferred types] +mkInferredPolyId poly_name qtvs theta mono_ty + = addErrCtxtM (mk_bind_msg True False poly_name inferred_poly_ty) $ + do { checkValidType (InfSigCtxt poly_name) inferred_poly_ty + ; return (mkLocalId poly_name inferred_poly_ty) } + where + my_tvs2 = closeOverKinds (growThetaTyVars theta (tyVarsOfType mono_ty)) + -- Include kind variables! Trac #7916 + my_tvs = filter (`elemVarSet` my_tvs2) qtvs -- Maintain original order + my_theta = filter (quantifyPred my_tvs2) theta + inferred_poly_ty = mkSigmaTy my_tvs my_theta mono_ty + +mk_bind_msg :: Bool -> Bool -> Name -> TcType -> TidyEnv -> TcM (TidyEnv, SDoc) +mk_bind_msg inferred want_ambig poly_name poly_ty tidy_env + = return (tidy_env', msg) + where + msg = vcat [ ptext (sLit "When checking that") <+> quotes (ppr poly_name) + <+> ptext (sLit "has the") <+> what <+> ptext (sLit "type") + , nest 2 (ppr poly_name <+> dcolon <+> ppr tidy_ty) + , ppWhen want_ambig $ + ptext (sLit "Probable cause: the inferred type is ambiguous") ] + what | inferred = ptext (sLit "inferred") + | otherwise = ptext (sLit "specified") + (tidy_env', tidy_ty) = tidyOpenType tidy_env poly_ty \end{code} +Note [Validity of inferred types] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +We need to check inferred type for validity, in case it uses language +extensions that are not turned on. The principle is that if the user +simply adds the inferred type to the program source, it'll compile fine. +See #8883. + +Examples that might fail: + - an inferred theta that requires type equalities e.g. (F a ~ G b) + or multi-parameter type classes + - an inferred type that includes unboxed tuples + +However we don't do the ambiguity check (checkValidType omits it for +InfSigCtxt) because the impedence-matching stage, which follows +immediately, will do it and we don't want two error messages. +Moreover, because of the impedence matching stage, the ambiguity-check +suggestion of -XAllowAmbiguiousTypes will not work. + + Note [Impedence matching] ~~~~~~~~~~~~~~~~~~~~~~~~~ Consider diff --git a/compiler/typecheck/TcCanonical.lhs b/compiler/typecheck/TcCanonical.lhs index 5784d81ce4..43cbb2c49d 100644 --- a/compiler/typecheck/TcCanonical.lhs +++ b/compiler/typecheck/TcCanonical.lhs @@ -1,4 +1,6 @@ \begin{code} +{-# LANGUAGE CPP #-} + module TcCanonical( canonicalize, emitWorkNC, StopOrContinue (..) @@ -1260,7 +1262,7 @@ checkKind new_ev s1 k1 s2 k2 -- See Note [Equalities with incompatible kinds] do { traceTcS "canEqLeaf: incompatible kinds" (vcat [ppr k1, ppr k2]) -- Create a derived kind-equality, and solve it - ; mw <- newDerived kind_co_loc (mkEqPred k1 k2) + ; mw <- newDerived kind_co_loc (mkTcEqPred k1 k2) ; case mw of Nothing -> return () Just kev -> emitWorkNC [kev] diff --git a/compiler/typecheck/TcClassDcl.lhs b/compiler/typecheck/TcClassDcl.lhs index 187aea5083..be5a74f294 100644 --- a/compiler/typecheck/TcClassDcl.lhs +++ b/compiler/typecheck/TcClassDcl.lhs @@ -6,7 +6,8 @@ Typechecking class declarations \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/typecheck/TcDefaults.lhs b/compiler/typecheck/TcDefaults.lhs index a096e506ed..7b5bd27321 100644 --- a/compiler/typecheck/TcDefaults.lhs +++ b/compiler/typecheck/TcDefaults.lhs @@ -5,7 +5,7 @@ \section[TcDefaults]{Typechecking \tr{default} declarations} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/typecheck/TcDeriv.lhs b/compiler/typecheck/TcDeriv.lhs index 71fd25c557..d18c21c9de 100644 --- a/compiler/typecheck/TcDeriv.lhs +++ b/compiler/typecheck/TcDeriv.lhs @@ -6,6 +6,8 @@ Handles @deriving@ clauses on @data@ declarations. \begin{code} +{-# LANGUAGE CPP #-} + module TcDeriv ( tcDeriving ) where #include "HsVersions.h" @@ -18,7 +20,7 @@ import FamInst import TcErrors( reportAllUnsolved ) import TcValidity( validDerivPred ) import TcEnv -import TcTyClsDecls( tcFamTyPats, tcAddDataFamInstCtxt ) +import TcTyClsDecls( tcFamTyPats, famTyConShape, tcAddDataFamInstCtxt ) import TcClassDcl( tcAddDeclCtxt ) -- Small helper import TcGenDeriv -- Deriv stuff import TcGenGenerics @@ -91,6 +93,7 @@ data DerivSpec theta = DS { ds_loc :: SrcSpan , ds_tys :: [Type] , ds_tc :: TyCon , ds_tc_args :: [Type] + , ds_overlap :: Maybe OverlapMode , ds_newtype :: Bool } -- This spec implies a dfun declaration of the form -- df :: forall tvs. theta => C tys @@ -565,6 +568,7 @@ deriveAutoTypeable auto_typeable done_specs tycl_decls do_one cls (L _ decl) = do { tc <- tcLookupTyCon (tcdName decl) ; if (isSynTyCon tc || tyConName tc `elemNameSet` done_tcs) + -- Do not derive Typeable for type synonyms or type families then return [] else mkPolyKindedTypeableEqn cls tc } @@ -597,7 +601,7 @@ deriveFamInst decl@(DataFamInstDecl { dfid_tycon = L _ tc_name, dfid_pats = pats , dfid_defn = HsDataDefn { dd_derivs = Just preds } }) = tcAddDataFamInstCtxt decl $ do { fam_tc <- tcLookupTyCon tc_name - ; tcFamTyPats tc_name (tyConKind fam_tc) pats (\_ -> return ()) $ + ; tcFamTyPats (famTyConShape fam_tc) pats (\_ -> return ()) $ \ tvs' pats' _ -> concatMapM (deriveTyData True tvs' fam_tc pats') preds } -- Tiresomely we must figure out the "lhs", which is awkward for type families @@ -615,7 +619,7 @@ deriveStandalone :: LDerivDecl Name -> TcM [EarlyDerivSpec] -- Standalone deriving declarations -- e.g. deriving instance Show a => Show (T a) -- Rather like tcLocalInstDecl -deriveStandalone (L loc (DerivDecl deriv_ty)) +deriveStandalone (L loc (DerivDecl deriv_ty overlap_mode)) = setSrcSpan loc $ addErrCtxt (standaloneCtxt deriv_ty) $ do { traceTc "Standalone deriving decl for" (ppr deriv_ty) @@ -644,7 +648,7 @@ deriveStandalone (L loc (DerivDecl deriv_ty)) ; mkPolyKindedTypeableEqn cls tc } | isAlgTyCon tc -- All other classes - -> do { spec <- mkEqnHelp tvs cls cls_tys tc tc_args (Just theta) + -> do { spec <- mkEqnHelp overlap_mode tvs cls cls_tys tc tc_args (Just theta) ; return [spec] } _ -> -- Complain about functions, primitive types, etc, @@ -702,8 +706,9 @@ deriveTyData :: Bool -- False <=> data/newtype -- I.e. not standalone deriving deriveTyData is_instance tvs tc tc_args (L loc deriv_pred) = setSrcSpan loc $ -- Use the location of the 'deriving' item - do { (deriv_tvs, cls, cls_tys) <- tcExtendTyVarEnv tvs $ - tcHsDeriv deriv_pred + do { (deriv_tvs, cls, cls_tys, cls_arg_kind) + <- tcExtendTyVarEnv tvs $ + tcHsDeriv deriv_pred -- Deriving preds may (now) mention -- the type variables for the type constructor, hence tcExtendTyVarenv -- The "deriv_pred" is a LHsType to take account of the fact that for @@ -717,12 +722,8 @@ deriveTyData is_instance tvs tc tc_args (L loc deriv_pred) else do { -- Given data T a b c = ... deriving( C d ), - -- we want to drop type variables from T so that (C d (T a)) is well-kinded - ; let cls_tyvars = classTyVars cls - ; checkTc (not (null cls_tyvars)) derivingNullaryErr - - ; let cls_arg_kind = tyVarKind (last cls_tyvars) - (arg_kinds, _) = splitKindFunTys cls_arg_kind + -- we want to drop type variables from T so that (C d (T a)) is well-kinded + let (arg_kinds, _) = splitKindFunTys cls_arg_kind n_args_to_drop = length arg_kinds n_args_to_keep = tyConArity tc - n_args_to_drop args_to_drop = drop n_args_to_keep tc_args @@ -734,9 +735,9 @@ deriveTyData is_instance tvs tc tc_args (L loc deriv_pred) -- to the types. See Note [Unify kinds in deriving] -- We are assuming the tycon tyvars and the class tyvars are distinct mb_match = tcUnifyTy inst_ty_kind cls_arg_kind - Just kind_subst = mb_match + Just kind_subst = mb_match (univ_kvs, univ_tvs) = partition isKindVar $ varSetElems $ - mkVarSet deriv_tvs `unionVarSet` + mkVarSet deriv_tvs `unionVarSet` tyVarsOfTypes tc_args_to_keep univ_kvs' = filter (`notElemTvSubst` kind_subst) univ_kvs (subst', univ_tvs') = mapAccumL substTyVarBndr kind_subst univ_tvs @@ -769,7 +770,7 @@ deriveTyData is_instance tvs tc tc_args (L loc deriv_pred) -- newtype T a s = ... deriving( ST s ) -- newtype K a a = ... deriving( Monad ) - ; spec <- mkEqnHelp (univ_kvs' ++ univ_tvs') + ; spec <- mkEqnHelp Nothing (univ_kvs' ++ univ_tvs') cls final_cls_tys tc final_tc_args Nothing ; return [spec] } } @@ -851,7 +852,8 @@ and occurrence sites. \begin{code} -mkEqnHelp :: [TyVar] +mkEqnHelp :: Maybe OverlapMode + -> [TyVar] -> Class -> [Type] -> TyCon -> [Type] -> DerivContext -- Just => context supplied (standalone deriving) @@ -862,7 +864,7 @@ mkEqnHelp :: [TyVar] -- where the 'theta' is optional (that's the Maybe part) -- Assumes that this declaration is well-kinded -mkEqnHelp tvs cls cls_tys tycon tc_args mtheta +mkEqnHelp overlap_mode tvs cls cls_tys tycon tc_args mtheta | className cls `elem` oldTypeableClassNames = do { dflags <- getDynFlags ; case checkOldTypeableConditions (dflags, tycon, tc_args) of @@ -898,10 +900,10 @@ mkEqnHelp tvs cls cls_tys tycon tc_args mtheta ; dflags <- getDynFlags ; if isDataTyCon rep_tc then - mkDataTypeEqn dflags tvs cls cls_tys + mkDataTypeEqn dflags overlap_mode tvs cls cls_tys tycon tc_args rep_tc rep_tc_args mtheta else - mkNewTypeEqn dflags tvs cls cls_tys + mkNewTypeEqn dflags overlap_mode tvs cls cls_tys tycon tc_args rep_tc rep_tc_args mtheta } where bale_out msg = failWithTc (derivingThingErr False cls cls_tys (mkTyConApp tycon tc_args) msg) @@ -991,6 +993,7 @@ See Note [Eta reduction for data family axioms] in TcInstDcls. \begin{code} mkDataTypeEqn :: DynFlags + -> Maybe OverlapMode -> [Var] -- Universally quantified type variables in the instance -> Class -- Class for which we need to derive an instance -> [Type] -- Other parameters to the class except the last @@ -1002,7 +1005,7 @@ mkDataTypeEqn :: DynFlags -> DerivContext -- Context of the instance, for standalone deriving -> TcRn EarlyDerivSpec -- Return 'Nothing' if error -mkDataTypeEqn dflags tvs cls cls_tys +mkDataTypeEqn dflags overlap_mode tvs cls cls_tys tycon tc_args rep_tc rep_tc_args mtheta = case checkSideConditions dflags mtheta cls cls_tys rep_tc rep_tc_args of -- NB: pass the *representation* tycon to checkSideConditions @@ -1010,13 +1013,13 @@ mkDataTypeEqn dflags tvs cls cls_tys NonDerivableClass -> bale_out (nonStdErr cls) DerivableClassError msg -> bale_out msg where - go_for_it = mk_data_eqn tvs cls tycon tc_args rep_tc rep_tc_args mtheta + go_for_it = mk_data_eqn overlap_mode tvs cls tycon tc_args rep_tc rep_tc_args mtheta bale_out msg = failWithTc (derivingThingErr False cls cls_tys (mkTyConApp tycon tc_args) msg) -mk_data_eqn :: [TyVar] -> Class +mk_data_eqn :: Maybe OverlapMode -> [TyVar] -> Class -> TyCon -> [TcType] -> TyCon -> [TcType] -> DerivContext -> TcM EarlyDerivSpec -mk_data_eqn tvs cls tycon tc_args rep_tc rep_tc_args mtheta +mk_data_eqn overlap_mode tvs cls tycon tc_args rep_tc rep_tc_args mtheta = do loc <- getSrcSpanM dfun_name <- new_dfun_name cls tycon case mtheta of @@ -1028,6 +1031,7 @@ mk_data_eqn tvs cls tycon tc_args rep_tc rep_tc_args mtheta , ds_cls = cls, ds_tys = inst_tys , ds_tc = rep_tc, ds_tc_args = rep_tc_args , ds_theta = inferred_constraints + , ds_overlap = overlap_mode , ds_newtype = False } Just theta -> do -- Specified context return $ GivenTheta $ DS @@ -1036,6 +1040,7 @@ mk_data_eqn tvs cls tycon tc_args rep_tc rep_tc_args mtheta , ds_cls = cls, ds_tys = inst_tys , ds_tc = rep_tc, ds_tc_args = rep_tc_args , ds_theta = theta + , ds_overlap = overlap_mode , ds_newtype = False } where inst_tys = [mkTyConApp tycon tc_args] @@ -1073,7 +1078,9 @@ mkOldTypeableEqn tvs cls tycon tc_args mtheta DS { ds_loc = loc, ds_name = dfun_name, ds_tvs = [] , ds_cls = cls, ds_tys = [mkTyConApp tycon []] , ds_tc = tycon, ds_tc_args = [] - , ds_theta = mtheta `orElse` [], ds_newtype = False }) } + , ds_theta = mtheta `orElse` [] + , ds_overlap = Nothing -- Or, Just NoOverlap? + , ds_newtype = False }) } mkPolyKindedTypeableEqn :: Class -> TyCon -> TcM [EarlyDerivSpec] -- We can arrive here from a 'deriving' clause @@ -1098,6 +1105,9 @@ mkPolyKindedTypeableEqn cls tc -- so we must instantiate it appropiately , ds_tc = tc, ds_tc_args = tc_args , ds_theta = [] -- Context is empty for polykinded Typeable + , ds_overlap = Nothing + -- Perhaps this should be `Just NoOverlap`? + , ds_newtype = False } } where (kvs,tc_app_kind) = splitForAllTys (tyConKind tc) @@ -1121,21 +1131,23 @@ inferConstraints cls inst_tys rep_tc rep_tc_args | otherwise -- The others are a bit more complicated = ASSERT2( equalLength rep_tc_tvs all_rep_tc_args, ppr cls <+> ppr rep_tc ) - return (stupid_constraints ++ extra_constraints - ++ sc_constraints - ++ con_arg_constraints cls get_std_constrained_tys) - + do { traceTc "inferConstraints" (vcat [ppr cls <+> ppr inst_tys, ppr arg_constraints]) + ; return (stupid_constraints ++ extra_constraints + ++ sc_constraints + ++ arg_constraints) } where + arg_constraints = con_arg_constraints cls get_std_constrained_tys + -- Constraints arising from the arguments of each constructor con_arg_constraints cls' get_constrained_tys - = [ mkPredOrigin (DerivOriginDC data_con arg_n) (mkClassPred cls' [arg_ty]) - | data_con <- tyConDataCons rep_tc, - (arg_n, arg_ty) <- - ASSERT( isVanillaDataCon data_con ) - zip [1..] $ -- ASSERT is precondition of dataConInstOrigArgTys - get_constrained_tys $ - dataConInstOrigArgTys data_con all_rep_tc_args, - not (isUnLiftedType arg_ty) ] + = [ mkPredOrigin (DerivOriginDC data_con arg_n) (mkClassPred cls' [inner_ty]) + | data_con <- tyConDataCons rep_tc + , (arg_n, arg_ty) <- ASSERT( isVanillaDataCon data_con ) + zip [1..] $ -- ASSERT is precondition of dataConInstOrigArgTys + dataConInstOrigArgTys data_con all_rep_tc_args + , not (isUnLiftedType arg_ty) + , inner_ty <- get_constrained_tys arg_ty ] + -- No constraints for unlifted types -- See Note [Deriving and unboxed types] @@ -1145,10 +1157,10 @@ inferConstraints cls inst_tys rep_tc rep_tc_args -- (b) The rep_tc_args will be one short is_functor_like = getUnique cls `elem` functorLikeClassKeys - get_std_constrained_tys :: [Type] -> [Type] - get_std_constrained_tys tys - | is_functor_like = concatMap (deepSubtypesContaining last_tv) tys - | otherwise = tys + get_std_constrained_tys :: Type -> [Type] + get_std_constrained_tys ty + | is_functor_like = deepSubtypesContaining last_tv ty + | otherwise = [ty] rep_tc_tvs = tyConTyVars rep_tc last_tv = last rep_tc_tvs @@ -1442,16 +1454,6 @@ cond_functorOK allowFunctions (_, rep_tc, _) functions = ptext (sLit "must not contain function types") wrong_arg = ptext (sLit "must use the type variable only as the last argument of a data type") -allDistinctTyVars :: [KindOrType] -> Bool -allDistinctTyVars tkvs = go emptyVarSet tkvs - where - go _ [] = True - go so_far (ty : tys) - = case getTyVar_maybe ty of - Nothing -> False - Just tv | tv `elemVarSet` so_far -> False - | otherwise -> go (so_far `extendVarSet` tv) tys - checkFlag :: ExtensionFlag -> Condition checkFlag flag (dflags, _, _) | xopt flag dflags = Nothing @@ -1553,14 +1555,15 @@ a context for the Data instances: %************************************************************************ \begin{code} -mkNewTypeEqn :: DynFlags -> [Var] -> Class +mkNewTypeEqn :: DynFlags -> Maybe OverlapMode -> [Var] -> Class -> [Type] -> TyCon -> [Type] -> TyCon -> [Type] -> DerivContext -> TcRn EarlyDerivSpec -mkNewTypeEqn dflags tvs +mkNewTypeEqn dflags overlap_mode tvs cls cls_tys tycon tc_args rep_tycon rep_tc_args mtheta -- Want: instance (...) => cls (cls_tys ++ [tycon tc_args]) where ... - | might_derive_via_coercible && (newtype_deriving || std_class_via_coercible cls) + | ASSERT( length cls_tys + 1 == classArity cls ) + might_derive_via_coercible && (newtype_deriving || std_class_via_coercible cls) = do traceTc "newtype deriving:" (ppr tycon <+> ppr rep_tys <+> ppr all_preds) dfun_name <- new_dfun_name cls tycon loc <- getSrcSpanM @@ -1571,6 +1574,7 @@ mkNewTypeEqn dflags tvs , ds_cls = cls, ds_tys = inst_tys , ds_tc = rep_tycon, ds_tc_args = rep_tc_args , ds_theta = theta + , ds_overlap = overlap_mode , ds_newtype = True } Nothing -> return $ InferTheta $ DS { ds_loc = loc @@ -1578,6 +1582,7 @@ mkNewTypeEqn dflags tvs , ds_cls = cls, ds_tys = inst_tys , ds_tc = rep_tycon, ds_tc_args = rep_tc_args , ds_theta = all_preds + , ds_overlap = overlap_mode , ds_newtype = True } | otherwise = case checkSideConditions dflags mtheta cls cls_tys rep_tycon rep_tc_args of @@ -1591,7 +1596,7 @@ mkNewTypeEqn dflags tvs | otherwise -> bale_out non_std where newtype_deriving = xopt Opt_GeneralizedNewtypeDeriving dflags - go_for_it = mk_data_eqn tvs cls tycon tc_args rep_tycon rep_tc_args mtheta + go_for_it = mk_data_eqn overlap_mode tvs cls tycon tc_args rep_tycon rep_tc_args mtheta bale_out msg = failWithTc (derivingThingErr newtype_deriving cls cls_tys inst_ty msg) non_std = nonStdErr cls @@ -1687,15 +1692,10 @@ mkNewTypeEqn dflags tvs -- See Note [Determining whether newtype-deriving is appropriate] might_derive_via_coercible = not (non_coercible_class cls) - && arity_ok && eta_ok && ats_ok -- && not (isRecursiveTyCon tycon) -- Note [Recursive newtypes] - arity_ok = length cls_tys + 1 == classArity cls - -- Well kinded; eg not: newtype T ... deriving( ST ) - -- because ST needs *2* type params - -- Check that eta reduction is OK eta_ok = nt_eta_arity <= length rep_tc_args -- The newtype can be eta-reduced to match the number @@ -1711,13 +1711,10 @@ mkNewTypeEqn dflags tvs -- so for 'data' instance decls cant_derive_err - = vcat [ ppUnless arity_ok arity_msg - , ppUnless eta_ok eta_msg + = vcat [ ppUnless eta_ok eta_msg , ppUnless ats_ok ats_msg ] - arity_msg = quotes (ppr (mkClassPred cls cls_tys)) <+> ptext (sLit "does not have arity 1") eta_msg = ptext (sLit "cannot eta-reduce the representation type enough") ats_msg = ptext (sLit "the class has associated types") - \end{code} Note [Recursive newtypes] @@ -2058,9 +2055,10 @@ genInst :: Bool -- True <=> standalone deriving -> OverlapFlag -> CommonAuxiliaries -> DerivSpec ThetaType -> TcM (InstInfo RdrName, BagDerivStuff, Maybe Name) -genInst standalone_deriv oflag comauxs +genInst standalone_deriv default_oflag comauxs spec@(DS { ds_tvs = tvs, ds_tc = rep_tycon, ds_tc_args = rep_tc_args , ds_theta = theta, ds_newtype = is_newtype, ds_tys = tys + , ds_overlap = overlap_mode , ds_name = name, ds_cls = clas, ds_loc = loc }) | is_newtype = do { inst_spec <- mkInstance oflag theta spec @@ -2091,6 +2089,7 @@ genInst standalone_deriv oflag comauxs , ib_standalone_deriving = standalone_deriv } } ; return ( inst_info, deriv_stuff, Nothing ) } where + oflag = setOverlapModeMaybe default_oflag overlap_mode rhs_ty = newTyConInstRhs rep_tycon rep_tc_args genDerivStuff :: SrcSpan -> FixityEnv -> Class -> Name -> TyCon diff --git a/compiler/typecheck/TcEnv.lhs b/compiler/typecheck/TcEnv.lhs index f3d754640f..6020797449 100644 --- a/compiler/typecheck/TcEnv.lhs +++ b/compiler/typecheck/TcEnv.lhs @@ -3,7 +3,9 @@ % \begin{code} +{-# LANGUAGE CPP, FlexibleInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} + module TcEnv( TyThing(..), TcTyThing(..), TcId, @@ -66,6 +68,7 @@ import TcIface import PrelNames import TysWiredIn import Id +import IdInfo( IdDetails(VanillaId) ) import Var import VarSet import RdrName @@ -801,7 +804,7 @@ mkStableIdFromString str sig_ty loc occ_wrapper = do name <- mkWrapperName "stable" str let occ = mkVarOccFS name :: OccName gnm = mkExternalName uniq mod (occ_wrapper occ) loc :: Name - id = mkExportedLocalId gnm sig_ty :: Id + id = mkExportedLocalId VanillaId gnm sig_ty :: Id return id mkStableIdFromName :: Name -> Type -> SrcSpan -> (OccName -> OccName) -> TcM TcId @@ -864,13 +867,16 @@ notFound name ptext (sLit "is not in scope during type checking, but it passed the renamer"), ptext (sLit "tcl_env of environment:") <+> ppr (tcl_env lcl_env)] -- Take case: printing the whole gbl env can - -- cause an infnite loop, in the case where we + -- cause an infinite loop, in the case where we -- are in the middle of a recursive TyCon/Class group; -- so let's just not print it! Getting a loop here is -- very unhelpful, because it hides one compiler bug with another } wrongThingErr :: String -> TcTyThing -> Name -> TcM a +-- It's important that this only calls pprTcTyThingCategory, which in +-- turn does not look at the details of the TcTyThing. +-- See Note [Placeholder PatSyn kinds] in TcBinds wrongThingErr expected thing name = failWithTc (pprTcTyThingCategory thing <+> quotes (ppr name) <+> ptext (sLit "used as a") <+> text expected) diff --git a/compiler/typecheck/TcErrors.lhs b/compiler/typecheck/TcErrors.lhs index 3ca1319a9d..8fe97519e1 100644 --- a/compiler/typecheck/TcErrors.lhs +++ b/compiler/typecheck/TcErrors.lhs @@ -1,6 +1,6 @@ \begin{code} -{-# LANGUAGE ScopedTypeVariables #-} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, ScopedTypeVariables #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -668,10 +668,11 @@ mkTyVarEqErr :: DynFlags -> ReportErrCtxt -> SDoc -> Ct -- tv1 and ty2 are already tidied mkTyVarEqErr dflags ctxt extra ct oriented tv1 ty2 | isUserSkolem ctxt tv1 -- ty2 won't be a meta-tyvar, or else the thing would - -- be oriented the other way round; see TcCanonical.reOrient + -- be oriented the other way round; + -- see TcCanonical.canEqTyVarTyVar || isSigTyVar tv1 && not (isTyVarTy ty2) = mkErrorMsg ctxt ct (vcat [ misMatchOrCND ctxt ct oriented ty1 ty2 - , extraTyVarInfo ctxt ty1 ty2 + , extraTyVarInfo ctxt tv1 ty2 , extra ]) -- So tv is a meta tyvar (or started that way before we @@ -701,7 +702,7 @@ mkTyVarEqErr dflags ctxt extra ct oriented tv1 ty2 , Implic { ic_skols = skols } <- implic , tv1 `elem` skols = mkErrorMsg ctxt ct (vcat [ misMatchMsg oriented ty1 ty2 - , extraTyVarInfo ctxt ty1 ty2 + , extraTyVarInfo ctxt tv1 ty2 , extra ]) -- Check for skolem escape @@ -734,7 +735,7 @@ mkTyVarEqErr dflags ctxt extra ct oriented tv1 ty2 , nest 2 $ ptext (sLit "inside the constraints") <+> pprEvVarTheta given , nest 2 $ ptext (sLit "bound by") <+> ppr skol_info , nest 2 $ ptext (sLit "at") <+> ppr (tcl_loc env) ] - tv_extra = extraTyVarInfo ctxt ty1 ty2 + tv_extra = extraTyVarInfo ctxt tv1 ty2 add_sig = suggestAddSig ctxt ty1 ty2 ; mkErrorMsg ctxt ct (vcat [msg, untch_extra, tv_extra, add_sig, extra]) } @@ -793,7 +794,7 @@ misMatchOrCND ctxt ct oriented ty1 ty2 -- or there is no context, don't report the context = misMatchMsg oriented ty1 ty2 | otherwise - = couldNotDeduce givens ([mkEqPred ty1 ty2], orig) + = couldNotDeduce givens ([mkTcEqPred ty1 ty2], orig) where givens = getUserGivens ctxt orig = TypeEqOrigin { uo_actual = ty1, uo_expected = ty2 } @@ -815,15 +816,18 @@ pp_givens givens 2 (sep [ ptext (sLit "bound by") <+> ppr skol_info , ptext (sLit "at") <+> ppr loc]) -extraTyVarInfo :: ReportErrCtxt -> TcType -> TcType -> SDoc +extraTyVarInfo :: ReportErrCtxt -> TcTyVar -> TcType -> SDoc -- Add on extra info about skolem constants -- NB: The types themselves are already tidied -extraTyVarInfo ctxt ty1 ty2 - = nest 2 (tv_extra ty1 $$ tv_extra ty2) +extraTyVarInfo ctxt tv1 ty2 + = nest 2 (tv_extra tv1 $$ ty_extra ty2) where implics = cec_encl ctxt - tv_extra ty | Just tv <- tcGetTyVar_maybe ty - , isTcTyVar tv, isSkolemTyVar tv + ty_extra ty = case tcGetTyVar_maybe ty of + Just tv -> tv_extra tv + Nothing -> empty + + tv_extra tv | isTcTyVar tv, isSkolemTyVar tv , let pp_tv = quotes (ppr tv) = case tcTyVarDetails tv of SkolemTv {} -> pp_tv <+> pprSkol (getSkolemInfo implics tv) (getSrcLoc tv) @@ -1285,29 +1289,51 @@ flattening any further. After all, there can be no instance declarations that match such things. And flattening under a for-all is problematic anyway; consider C (forall a. F a) +Note [Suggest -fprint-explicit-kinds] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +It can be terribly confusing to get an error message like (Trac #9171) + Couldn't match expected type ‘GetParam Base (GetParam Base Int)’ + with actual type ‘GetParam Base (GetParam Base Int)’ +The reason may be that the kinds don't match up. Typically you'll get +more useful information, but not when it's as a result of ambiguity. +This test suggests -fprint-explicit-kinds when all the ambiguous type +variables are kind variables. + \begin{code} mkAmbigMsg :: Ct -> (Bool, SDoc) mkAmbigMsg ct - | isEmptyVarSet ambig_tv_set = (False, empty) - | otherwise = (True, msg) + | null ambig_tkvs = (False, empty) + | otherwise = (True, msg) where - ambig_tv_set = filterVarSet isAmbiguousTyVar (tyVarsOfCt ct) - ambig_tvs = varSetElems ambig_tv_set - - is_or_are | isSingleton ambig_tvs = text "is" - | otherwise = text "are" - - msg | any isRuntimeUnkSkol ambig_tvs -- See Note [Runtime skolems] + ambig_tkv_set = filterVarSet isAmbiguousTyVar (tyVarsOfCt ct) + ambig_tkvs = varSetElems ambig_tkv_set + (ambig_kvs, ambig_tvs) = partition isKindVar ambig_tkvs + + msg | any isRuntimeUnkSkol ambig_tkvs -- See Note [Runtime skolems] = vcat [ ptext (sLit "Cannot resolve unknown runtime type") <> plural ambig_tvs <+> pprQuotedList ambig_tvs , ptext (sLit "Use :print or :force to determine these types")] - | otherwise - = vcat [ text "The type variable" <> plural ambig_tvs - <+> pprQuotedList ambig_tvs - <+> is_or_are <+> text "ambiguous" ] + + | not (null ambig_tvs) + = pp_ambig (ptext (sLit "type")) ambig_tvs + + | otherwise -- All ambiguous kind variabes; suggest -fprint-explicit-kinds + = vcat [ pp_ambig (ptext (sLit "kind")) ambig_kvs + , sdocWithDynFlags suggest_explicit_kinds ] + + pp_ambig what tkvs + = ptext (sLit "The") <+> what <+> ptext (sLit "variable") <> plural tkvs + <+> pprQuotedList tkvs <+> is_or_are tkvs <+> ptext (sLit "ambiguous") + + is_or_are [_] = text "is" + is_or_are _ = text "are" + + suggest_explicit_kinds dflags -- See Note [Suggest -fprint-explicit-kinds] + | gopt Opt_PrintExplicitKinds dflags = empty + | otherwise = ptext (sLit "Use -fprint-explicit-kinds to see the kind arguments") pprSkol :: SkolemInfo -> SrcLoc -> SDoc -pprSkol UnkSkol _ +pprSkol UnkSkol _ = ptext (sLit "is an unknown type variable") pprSkol skol_info tv_loc = sep [ ptext (sLit "is a rigid type variable bound by"), diff --git a/compiler/typecheck/TcEvidence.lhs b/compiler/typecheck/TcEvidence.lhs index a31f66adaa..7fc6194b8f 100644 --- a/compiler/typecheck/TcEvidence.lhs +++ b/compiler/typecheck/TcEvidence.lhs @@ -3,6 +3,8 @@ % \begin{code} +{-# LANGUAGE CPP, DeriveDataTypeable #-} + module TcEvidence ( -- HsWrapper @@ -351,7 +353,7 @@ pprTcCo, pprParendTcCo :: TcCoercion -> SDoc pprTcCo co = ppr_co TopPrec co pprParendTcCo co = ppr_co TyConPrec co -ppr_co :: Prec -> TcCoercion -> SDoc +ppr_co :: TyPrec -> TcCoercion -> SDoc ppr_co _ (TcRefl r ty) = angleBrackets (ppr ty) <> ppr_role r ppr_co p co@(TcTyConAppCo _ tc [_,_]) @@ -404,7 +406,7 @@ ppr_role r = underscore <> pp_role Representational -> char 'R' Phantom -> char 'P' -ppr_fun_co :: Prec -> TcCoercion -> SDoc +ppr_fun_co :: TyPrec -> TcCoercion -> SDoc ppr_fun_co p co = pprArrowChain p (split co) where split :: TcCoercion -> [SDoc] @@ -413,7 +415,7 @@ ppr_fun_co p co = pprArrowChain p (split co) = ppr_co FunPrec arg : split res split co = [ppr_co TopPrec co] -ppr_forall_co :: Prec -> TcCoercion -> SDoc +ppr_forall_co :: TyPrec -> TcCoercion -> SDoc ppr_forall_co p ty = maybeParen p FunPrec $ sep [pprForAll tvs, ppr_co TopPrec rho] @@ -594,7 +596,7 @@ data EvTerm -- dictionaries, even though the former have no -- selector Id. We count up from _0_ - | EvLit EvLit -- Dictionary for KnownNat and KnownLit classes. + | EvLit EvLit -- Dictionary for KnownNat and KnownSymbol classes. -- Note [KnownNat & KnownSymbol and EvLit] deriving( Data.Data, Data.Typeable) @@ -651,7 +653,7 @@ Conclusion: a new wanted coercion variable should be made mutable. Note [KnownNat & KnownSymbol and EvLit] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A part of the type-level literals implementation are the classes -"KnownNat" and "KnownLit", which provide a "smart" constructor for +"KnownNat" and "KnownSymbol", which provide a "smart" constructor for defining singleton values. Here is the key stuff from GHC.TypeLits class KnownNat (n :: Nat) where @@ -692,7 +694,7 @@ especialy when the `KnowNat` evidence is packaged up in an existential. The story for kind `Symbol` is analogous: * class KnownSymbol - * newypte SSymbol + * newtype SSymbol * Evidence: EvLit (EvStr n) diff --git a/compiler/typecheck/TcExpr.lhs b/compiler/typecheck/TcExpr.lhs index 3397b0836a..7e6c495506 100644 --- a/compiler/typecheck/TcExpr.lhs +++ b/compiler/typecheck/TcExpr.lhs @@ -5,6 +5,8 @@ c% \section[TcExpr]{Typecheck an expression} \begin{code} +{-# LANGUAGE CPP #-} + module TcExpr ( tcPolyExpr, tcPolyExprNC, tcMonoExpr, tcMonoExprNC, tcInferRho, tcInferRhoNC, tcSyntaxOp, tcCheckId, @@ -74,7 +76,7 @@ import qualified Data.Set as Set \begin{code} tcPolyExpr, tcPolyExprNC :: LHsExpr Name -- Expression to type check - -> TcSigmaType -- Expected type (could be a polytpye) + -> TcSigmaType -- Expected type (could be a polytype) -> TcM (LHsExpr TcId) -- Generalised expr with expected type -- tcPolyExpr is a convenient place (frequent but not too frequent) @@ -200,7 +202,7 @@ tcExpr (HsIPVar x) res_ty ; ip_var <- emitWanted origin (mkClassPred ipClass [ip_name, ip_ty]) ; tcWrapResult (fromDict ipClass ip_name ip_ty (HsVar ip_var)) ip_ty res_ty } where - -- Coerces a dictionry for `IP "x" t` into `t`. + -- Coerces a dictionary for `IP "x" t` into `t`. fromDict ipClass x ty = case unwrapNewTyCon_maybe (classTyCon ipClass) of Just (_,_,ax) -> HsWrap $ mkWpCast $ mkTcUnbranchedAxInstCo Representational ax [x,ty] @@ -498,7 +500,8 @@ for conditionals: to support expressions like this: ifThenElse :: Maybe a -> (a -> b) -> b -> b - ifThenElse (Just a) f _ = f a ifThenElse Nothing _ e = e + ifThenElse (Just a) f _ = f a + ifThenElse Nothing _ e = e example :: String example = if Just 2 @@ -562,7 +565,7 @@ Note that because MkT3 doesn't contain all the fields being updated, its RHS is simply an error, so it doesn't impose any type constraints. Hence the use of 'relevant_cont'. -Note [Implict type sharing] +Note [Implicit type sharing] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ We also take into account any "implicit" non-update fields. For example data T a b where { MkT { f::a } :: T a a; ... } @@ -748,7 +751,7 @@ tcExpr (RecordUpd record_expr rbinds _ _ _) res_ty -- Universally-quantified tyvars that -- appear in any of the *implicit* -- arguments to the constructor are fixed - -- See Note [Implict type sharing] + -- See Note [Implicit type sharing] fixed_tys = [ty | (fld,ty) <- zip flds arg_tys , not (fld `elem` upd_fld_names)] @@ -804,7 +807,7 @@ tcExpr (PArrSeq _ _) _ \begin{code} tcExpr (HsSpliceE is_ty splice) res_ty - = ASSERT( is_ty ) -- Untyped splices are expanced by the renamer + = ASSERT( is_ty ) -- Untyped splices are expanded by the renamer tcSpliceExpr splice res_ty tcExpr (HsBracket brack) res_ty = tcTypedBracket brack res_ty @@ -963,7 +966,7 @@ tcInferFun fun -- Zonk the function type carefully, to expose any polymorphism -- E.g. (( \(x::forall a. a->a). blah ) e) - -- We can see the rank-2 type of the lambda in time to genrealise e + -- We can see the rank-2 type of the lambda in time to generalise e ; fun_ty' <- zonkTcType fun_ty ; (wrap, rho) <- deeplyInstantiate AppOrigin fun_ty' diff --git a/compiler/typecheck/TcForeign.lhs b/compiler/typecheck/TcForeign.lhs index 63eb020ff1..8370e0aa06 100644 --- a/compiler/typecheck/TcForeign.lhs +++ b/compiler/typecheck/TcForeign.lhs @@ -12,6 +12,8 @@ is restricted to what the outside world understands (read C), and this module checks to see if a foreign declaration has got a legal type. \begin{code} +{-# LANGUAGE CPP #-} + module TcForeign ( tcForeignImports , tcForeignExports @@ -92,6 +94,20 @@ parameters. Similarly, we don't need to look in AppTy's, because nothing headed by an AppTy will be marshalable. +Note [FFI type roles] +~~~~~~~~~~~~~~~~~~~~~ +The 'go' helper function within normaliseFfiType' always produces +representational coercions. But, in the "children_only" case, we need to +use these coercions in a TyConAppCo. Accordingly, the roles on the coercions +must be twiddled to match the expectation of the enclosing TyCon. However, +we cannot easily go from an R coercion to an N one, so we forbid N roles +on FFI type constructors. Currently, only two such type constructors exist: +IO and FunPtr. Thus, this is not an onerous burden. + +If we ever want to lift this restriction, we would need to make 'go' take +the target role as a parameter. This wouldn't be hard, but it's a complication +not yet necessary and so is not yet implemented. + \begin{code} -- normaliseFfiType takes the type from an FFI declaration, and -- evaluates any type synonyms, type functions, and newtypes. However, @@ -114,7 +130,8 @@ normaliseFfiType' env ty0 = go initRecTc ty0 -- We don't want to look through the IO newtype, even if it is -- in scope, so we have a special case for it: | tc_key `elem` [ioTyConKey, funPtrTyConKey] - -- Those *must* have R roles on their parameters! + -- These *must not* have nominal roles on their parameters! + -- See Note [FFI type roles] = children_only | isNewTyCon tc -- Expand newtypes @@ -141,10 +158,14 @@ normaliseFfiType' env ty0 = go initRecTc ty0 = nothing -- see Note [Don't recur in normaliseFfiType'] where tc_key = getUnique tc - children_only + children_only = do xs <- mapM (go rec_nts) tys let (cos, tys', gres) = unzip3 xs - return ( mkTyConAppCo Representational tc cos + -- the (repeat Representational) is because 'go' always + -- returns R coercions + cos' = zipWith3 downgradeRole (tyConRoles tc) + (repeat Representational) cos + return ( mkTyConAppCo Representational tc cos' , mkTyConApp tc tys', unionManyBags gres) nt_co = mkUnbranchedAxInstCo Representational (newTyConCo tc) tys nt_rhs = newTyConInstRhs tc tys diff --git a/compiler/typecheck/TcGenDeriv.lhs b/compiler/typecheck/TcGenDeriv.lhs index 7031e54f6f..960e3faaa3 100644 --- a/compiler/typecheck/TcGenDeriv.lhs +++ b/compiler/typecheck/TcGenDeriv.lhs @@ -11,7 +11,7 @@ This module is nominally ``subordinate'' to @TcDeriv@, which is the This is where we do all the grimy bindings' generation. \begin{code} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE CPP, ScopedTypeVariables #-} module TcGenDeriv ( BagDerivStuff, DerivStuff(..), diff --git a/compiler/typecheck/TcGenGenerics.lhs b/compiler/typecheck/TcGenGenerics.lhs index d9d92ba2ea..385fc37306 100644 --- a/compiler/typecheck/TcGenGenerics.lhs +++ b/compiler/typecheck/TcGenGenerics.lhs @@ -6,13 +6,7 @@ The deriving code for the Generic class (equivalent to the code in TcGenDeriv, for other classes) \begin{code} -{-# LANGUAGE ScopedTypeVariables #-} -{-# OPTIONS -fno-warn-tabs #-} --- The above warning supression flag is a temporary kludge. --- While working on this module you are encouraged to remove it and --- detab the module (please do the detabbing in a separate patch). See --- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces --- for details +{-# LANGUAGE CPP, ScopedTypeVariables #-} module TcGenGenerics (canDoGenerics, canDoGenerics1, @@ -46,7 +40,7 @@ import BuildTyCl import SrcLoc import Bag import VarSet (elemVarSet) -import Outputable +import Outputable import FastString import Util @@ -64,7 +58,7 @@ import Control.Monad (mplus,forM) For the generic representation we need to generate: \begin{itemize} \item A Generic instance -\item A Rep type instance +\item A Rep type instance \item Many auxiliary datatypes and instances for them (for the meta-information) \end{itemize} @@ -90,7 +84,7 @@ genGenericMetaTyCons tc mod = mkTyCon name = ASSERT( isExternalName name ) buildAlgTyCon name [] [] Nothing [] distinctAbstractTyConRhs - NonRecursive + NonRecursive False -- Not promotable False -- Not GADT syntax NoParentTyCon @@ -121,21 +115,21 @@ metaTyConsToDerivStuff tc metaDts = cClas <- tcLookupClass constructorClassName c_dfun_names <- sequence [ new_dfun_name cClas tc | _ <- metaC metaDts ] sClas <- tcLookupClass selectorClassName - s_dfun_names <- sequence (map sequence [ [ new_dfun_name sClas tc - | _ <- x ] + s_dfun_names <- sequence (map sequence [ [ new_dfun_name sClas tc + | _ <- x ] | x <- metaS metaDts ]) fix_env <- getFixityEnv let - safeOverlap = safeLanguageOn dflags (dBinds,cBinds,sBinds) = mkBindsMetaD fix_env tc - mk_inst clas tc dfun_name + mk_inst clas tc dfun_name = mkLocalInstance (mkDictFunId dfun_name [] [] clas tys) - (NoOverlap safeOverlap) + OverlapFlag { overlapMode = NoOverlap + , isSafeOverlap = safeLanguageOn dflags } [] clas tys where tys = [mkTyConTy tc] - + -- Datatype d_metaTycon = metaD metaDts d_inst = mk_inst dClas d_metaTycon d_dfun_name @@ -144,7 +138,7 @@ metaTyConsToDerivStuff tc metaDts = , ib_extensions = [] , ib_standalone_deriving = False } d_mkInst = DerivInst (InstInfo { iSpec = d_inst, iBinds = d_binds }) - + -- Constructor c_metaTycons = metaC metaDts c_insts = [ mk_inst cClas c ds @@ -156,7 +150,7 @@ metaTyConsToDerivStuff tc metaDts = | c <- cBinds ] c_mkInst = [ DerivInst (InstInfo { iSpec = is, iBinds = bs }) | (is,bs) <- myZip1 c_insts c_binds ] - + -- Selector s_metaTycons = metaS metaDts s_insts = map (map (\(s,ds) -> mk_inst sClas s ds)) @@ -169,15 +163,15 @@ metaTyConsToDerivStuff tc metaDts = s_mkInst = map (map (\(is,bs) -> DerivInst (InstInfo { iSpec = is , iBinds = bs}))) (myZip2 s_insts s_binds) - + myZip1 :: [a] -> [b] -> [(a,b)] myZip1 l1 l2 = ASSERT(length l1 == length l2) zip l1 l2 - + myZip2 :: [[a]] -> [[b]] -> [[(a,b)]] myZip2 l1 l2 = ASSERT(and (zipWith (>=) (map length l1) (map length l2))) [ zip x1 x2 | (x1,x2) <- zip l1 l2 ] - + return $ mapBag DerivTyCon (metaTyCons2TyCons metaDts) `unionBags` listToBag (d_mkInst : c_mkInst ++ concat s_mkInst) \end{code} @@ -189,14 +183,13 @@ metaTyConsToDerivStuff tc metaDts = %************************************************************************ \begin{code} -get_gen1_constrained_tys :: TyVar -> [Type] -> [Type] +get_gen1_constrained_tys :: TyVar -> Type -> [Type] -- called by TcDeriv.inferConstraints; generates a list of types, each of which -- must be a Functor in order for the Generic1 instance to work. -get_gen1_constrained_tys argVar = - concatMap $ argTyFold argVar $ ArgTyAlg { - ata_rec0 = const [], - ata_par1 = [], ata_rec1 = const [], - ata_comp = (:)} +get_gen1_constrained_tys argVar + = argTyFold argVar $ ArgTyAlg { ata_rec0 = const [] + , ata_par1 = [], ata_rec1 = const [] + , ata_comp = (:) } {- @@ -287,8 +280,8 @@ canDoGenerics tc tc_args then (Just (ppr dc <+> text "must be a vanilla data constructor")) else Nothing) - -- Nor can we do the job if it's an existential data constructor, - -- Nor if the args are polymorphic types (I don't think) + -- Nor can we do the job if it's an existential data constructor, + -- Nor if the args are polymorphic types (I don't think) bad_arg_type ty = isUnLiftedType ty || not (isTauTy ty) mergeErrors :: [Maybe SDoc] -> Maybe SDoc @@ -402,13 +395,13 @@ canDoGenerics1 rep_tc tc_args = \end{code} %************************************************************************ -%* * +%* * \subsection{Generating the RHS of a generic default method} -%* * +%* * %************************************************************************ \begin{code} -type US = Int -- Local unique supply, just a plain Int +type US = Int -- Local unique supply, just a plain Int type Alt = (LPat RdrName, LHsExpr RdrName) -- GenericKind serves to mark if a datatype derives Generic (Gen0) or @@ -435,7 +428,7 @@ gk2gkDC Gen1_{} d = Gen1_DC $ last $ dataConUnivTyVars d -- Bindings for the Generic instance mkBindsRep :: GenericKind -> TyCon -> LHsBinds RdrName -mkBindsRep gk tycon = +mkBindsRep gk tycon = unitBag (mkRdrFunBind (L loc from01_RDR) from_matches) `unionBags` unitBag (mkRdrFunBind (L loc to01_RDR) to_matches) @@ -457,7 +450,7 @@ mkBindsRep gk tycon = Gen1 -> ASSERT(length tyvars >= 1) Gen1_ (last tyvars) where tyvars = tyConTyVars tycon - + -------------------------------------------------------------------------------- -- The type synonym instance and synonym -- type instance Rep (D a b) = Rep_D a b @@ -469,7 +462,7 @@ tc_mkRepFamInsts :: GenericKind -- Gen0 or Gen1 -> MetaTyCons -- Metadata datatypes to refer to -> Module -- Used as the location of the new RepTy -> TcM (FamInst) -- Generated representation0 coercion -tc_mkRepFamInsts gk tycon metaDts mod = +tc_mkRepFamInsts gk tycon metaDts mod = -- Consider the example input tycon `D`, where data D a b = D_ a -- Also consider `R:DInt`, where { data family D x y :: * -> * -- ; data instance D Int a b = D_ a } @@ -502,7 +495,7 @@ tc_mkRepFamInsts gk tycon metaDts mod = -- `repTy` = D1 ... (C1 ... (S1 ... (Rec0 a))) :: * -> * ; repTy <- tc_mkRepTy gk_ tycon metaDts - + -- `rep_name` is a name we generate for the synonym ; rep_name <- let mkGen = case gk of Gen0 -> mkGenR; Gen1 -> mkGen1R in newGlobalBinder mod (mkGen (nameOccName (tyConName tycon))) @@ -585,10 +578,10 @@ tc_mkRepTy :: -- Gen0_ or Gen1_, for Rep or Rep1 -- The type to generate representation for -> TyCon -- Metadata datatypes to refer to - -> MetaTyCons + -> MetaTyCons -- Generated representation0 type -> TcM Type -tc_mkRepTy gk_ tycon metaDts = +tc_mkRepTy gk_ tycon metaDts = do d1 <- tcLookupTyCon d1TyConName c1 <- tcLookupTyCon c1TyConName @@ -602,7 +595,7 @@ tc_mkRepTy gk_ tycon metaDts = plus <- tcLookupTyCon sumTyConName times <- tcLookupTyCon prodTyConName comp <- tcLookupTyCon compTyConName - + let mkSum' a b = mkTyConApp plus [a,b] mkProd a b = mkTyConApp times [a,b] mkComp a b = mkTyConApp comp [a,b] @@ -616,7 +609,7 @@ tc_mkRepTy gk_ tycon metaDts = mkS True _ a = mkTyConApp s1 [mkTyConTy nS1, a] -- This field has a label mkS False d a = mkTyConApp s1 [d, a] - + -- Sums and products are done in the same way for both Rep and Rep1 sumP [] = mkTyConTy v1 sumP l = ASSERT(length metaCTyCons == length l) @@ -631,9 +624,9 @@ tc_mkRepTy gk_ tycon metaDts = ASSERT(length l == length (metaSTyCons !! i)) foldBal mkProd [ arg d t b | (d,t) <- zip (metaSTyCons !! i) l ] - + arg :: Type -> Type -> Bool -> Type - arg d t b = mkS b d $ case gk_ of + arg d t b = mkS b d $ case gk_ of -- Here we previously used Par0 if t was a type variable, but we -- realized that we can't always guarantee that we are wrapping-up -- all type variables in Par0. So we decided to stop using Par0 @@ -646,40 +639,40 @@ tc_mkRepTy gk_ tycon metaDts = argPar argVar = argTyFold argVar $ ArgTyAlg {ata_rec0 = mkRec0, ata_par1 = mkPar1, ata_rec1 = mkRec1, ata_comp = mkComp} - - + + metaDTyCon = mkTyConTy (metaD metaDts) metaCTyCons = map mkTyConTy (metaC metaDts) metaSTyCons = map (map mkTyConTy) (metaS metaDts) - + return (mkD tycon) -------------------------------------------------------------------------------- -- Meta-information -------------------------------------------------------------------------------- -data MetaTyCons = MetaTyCons { -- One meta datatype per dataype +data MetaTyCons = MetaTyCons { -- One meta datatype per datatype metaD :: TyCon -- One meta datatype per constructor , metaC :: [TyCon] -- One meta datatype per selector per constructor , metaS :: [[TyCon]] } - + instance Outputable MetaTyCons where ppr (MetaTyCons d c s) = ppr d $$ vcat (map ppr c) $$ vcat (map ppr (concat s)) - + metaTyCons2TyCons :: MetaTyCons -> Bag TyCon metaTyCons2TyCons (MetaTyCons d c s) = listToBag (d : c ++ concat s) -- Bindings for Datatype, Constructor, and Selector instances -mkBindsMetaD :: FixityEnv -> TyCon +mkBindsMetaD :: FixityEnv -> TyCon -> ( LHsBinds RdrName -- Datatype instance , [LHsBinds RdrName] -- Constructor instances , [[LHsBinds RdrName]]) -- Selector instances mkBindsMetaD fix_env tycon = (dtBinds, allConBinds, allSelBinds) where - mkBag l = foldr1 unionBags + mkBag l = foldr1 unionBags [ unitBag (mkRdrFunBind (L loc name) matches) | (name, matches) <- l ] dtBinds = mkBag ( [ (datatypeName_RDR, dtName_matches) @@ -717,7 +710,7 @@ mkBindsMetaD fix_env tycon = (dtBinds, allConBinds, allSelBinds) dtName_matches = mkStringLHS . occNameString . nameOccName $ tyConName_user - moduleName_matches = mkStringLHS . moduleNameString . moduleName + moduleName_matches = mkStringLHS . moduleNameString . moduleName . nameModule . tyConName $ tycon isNewtype_matches = [mkSimpleHsAlt nlWildPat (nlHsVar true_RDR)] @@ -778,10 +771,10 @@ mk1Sum gk_ us i n datacon = (from_alt, to_alt) us' = us + n_args datacon_rdr = getRdrName datacon - + from_alt = (nlConVarPat datacon_rdr datacon_vars, from_alt_rhs) from_alt_rhs = mkM1_E (genLR_E i n (mkProd_E gk_ us' datacon_varTys)) - + to_alt = (mkM1_P (genLR_P i n (mkProd_P gk us' datacon_vars)), to_alt_rhs) -- These M1s are meta-information for the datatype to_alt_rhs = case gk_ of @@ -822,9 +815,9 @@ genLR_E i n e -- Build a product expression mkProd_E :: GenericKind_DC -- Generic or Generic1? - -> US -- Base for unique names + -> US -- Base for unique names -> [(RdrName, Type)] -- List of variables matched on the lhs and their types - -> LHsExpr RdrName -- Resulting product expression + -> LHsExpr RdrName -- Resulting product expression mkProd_E _ _ [] = mkM1_E (nlHsVar u1DataCon_RDR) mkProd_E gk_ _ varTys = mkM1_E (foldBal prod appVars) -- These M1s are meta-information for the constructor @@ -848,9 +841,9 @@ wrapArg_E (Gen1_DC argVar) (var, ty) = mkM1_E $ converter ty `nlHsApp` nlHsVar v -- Build a product pattern mkProd_P :: GenericKind -- Gen0 or Gen1 - -> US -- Base for unique names - -> [RdrName] -- List of variables to match - -> LPat RdrName -- Resulting product pattern + -> US -- Base for unique names + -> [RdrName] -- List of variables to match + -> LPat RdrName -- Resulting product pattern mkProd_P _ _ [] = mkM1_P (nlNullaryConPat u1DataCon_RDR) mkProd_P gk _ vars = mkM1_P (foldBal prod appVars) -- These M1s are meta-information for the constructor diff --git a/compiler/typecheck/TcHsSyn.lhs b/compiler/typecheck/TcHsSyn.lhs index 59b42ea673..f90cfca317 100644 --- a/compiler/typecheck/TcHsSyn.lhs +++ b/compiler/typecheck/TcHsSyn.lhs @@ -9,12 +9,15 @@ This module is an extension of @HsSyn@ syntax, for use in the type checker. \begin{code} +{-# LANGUAGE CPP #-} + module TcHsSyn ( mkHsConApp, mkHsDictLet, mkHsApp, hsLitType, hsLPatType, hsPatType, mkHsAppTy, mkSimpleHsAlt, nlHsIntLit, shortCutLit, hsOverLitName, + conLikeResTy, -- re-exported from TcMonad TcId, TcIdSet, @@ -38,7 +41,9 @@ import TcEvidence import TysPrim import TysWiredIn import Type +import ConLike import DataCon +import PatSyn( patSynInstResTy ) import Name import NameSet import Var @@ -80,14 +85,19 @@ hsPatType (ViewPat _ _ ty) = ty hsPatType (ListPat _ ty Nothing) = mkListTy ty hsPatType (ListPat _ _ (Just (ty,_))) = ty hsPatType (PArrPat _ ty) = mkPArrTy ty -hsPatType (TuplePat _ _ ty) = ty -hsPatType (ConPatOut { pat_ty = ty }) = ty +hsPatType (TuplePat _ bx tys) = mkTupleTy (boxityNormalTupleSort bx) tys +hsPatType (ConPatOut { pat_con = L _ con, pat_arg_tys = tys }) + = conLikeResTy con tys hsPatType (SigPatOut _ ty) = ty hsPatType (NPat lit _ _) = overLitType lit hsPatType (NPlusKPat id _ _ _) = idType (unLoc id) hsPatType (CoPat _ _ ty) = ty hsPatType p = pprPanic "hsPatType" (ppr p) +conLikeResTy :: ConLike -> [Type] -> Type +conLikeResTy (RealDataCon con) tys = mkTyConApp (dataConTyCon con) tys +conLikeResTy (PatSynCon ps) tys = patSynInstResTy ps tys + hsLitType :: HsLit -> TcType hsLitType (HsChar _) = charTy hsLitType (HsCharPrim _) = charPrimTy @@ -1025,16 +1035,16 @@ zonk_pat env (PArrPat pats ty) ; (env', pats') <- zonkPats env pats ; return (env', PArrPat pats' ty') } -zonk_pat env (TuplePat pats boxed ty) - = do { ty' <- zonkTcTypeToType env ty +zonk_pat env (TuplePat pats boxed tys) + = do { tys' <- mapM (zonkTcTypeToType env) tys ; (env', pats') <- zonkPats env pats - ; return (env', TuplePat pats' boxed ty') } + ; return (env', TuplePat pats' boxed tys') } -zonk_pat env p@(ConPatOut { pat_ty = ty, pat_tvs = tyvars +zonk_pat env p@(ConPatOut { pat_arg_tys = tys, pat_tvs = tyvars , pat_dicts = evs, pat_binds = binds , pat_args = args, pat_wrap = wrapper }) = ASSERT( all isImmutableTyVar tyvars ) - do { new_ty <- zonkTcTypeToType env ty + do { new_tys <- mapM (zonkTcTypeToType env) tys ; (env0, new_tyvars) <- zonkTyBndrsX env tyvars -- Must zonk the existential variables, because their -- /kind/ need potential zonking. @@ -1043,7 +1053,7 @@ zonk_pat env p@(ConPatOut { pat_ty = ty, pat_tvs = tyvars ; (env2, new_binds) <- zonkTcEvBinds env1 binds ; (env3, new_wrapper) <- zonkCoFn env2 wrapper ; (env', new_args) <- zonkConStuff env3 args - ; return (env', p { pat_ty = new_ty, + ; return (env', p { pat_arg_tys = new_tys, pat_tvs = new_tyvars, pat_dicts = new_evs, pat_binds = new_binds, diff --git a/compiler/typecheck/TcHsType.lhs b/compiler/typecheck/TcHsType.lhs index eed906898b..cdeb191489 100644 --- a/compiler/typecheck/TcHsType.lhs +++ b/compiler/typecheck/TcHsType.lhs @@ -5,7 +5,8 @@ \section[TcMonoType]{Typechecking user-specified @MonoTypes@} \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -53,6 +54,7 @@ import TcType import Type import TypeRep( Type(..) ) -- For the mkNakedXXX stuff import Kind +import RdrName( lookupLocalRdrOcc ) import Var import VarSet import TyCon @@ -72,8 +74,9 @@ import Outputable import FastString import Util +import Data.Maybe( isNothing ) import Control.Monad ( unless, when, zipWithM ) -import PrelNames( ipClassName, funTyConKey ) +import PrelNames( ipClassName, funTyConKey, allNameStrings ) \end{code} @@ -207,18 +210,22 @@ tc_inst_head hs_ty = tc_hs_type hs_ty ekConstraint ----------------- -tcHsDeriv :: HsType Name -> TcM ([TyVar], Class, [Type]) --- Like tcHsSigTypeNC, but for the ...deriving( ty ) clause -tcHsDeriv hs_ty - = do { kind <- newMetaKindVar - ; ty <- tcCheckHsTypeAndGen hs_ty kind - -- Funny newtype deriving form - -- forall a. C [a] - -- where C has arity 2. Hence any-kinded result - ; ty <- zonkSigType ty +tcHsDeriv :: HsType Name -> TcM ([TyVar], Class, [Type], Kind) +-- Like tcHsSigTypeNC, but for the ...deriving( C t1 ty2 ) clause +-- Returns the C, [ty1, ty2, and the kind of C's *next* argument +-- E.g. class C (a::*) (b::k->k) +-- data T a b = ... deriving( C Int ) +-- returns ([k], C, [k, Int], k->k) +-- Also checks that (C ty1 ty2 arg) :: Constraint +-- if arg has a suitable kind +tcHsDeriv hs_ty + = do { arg_kind <- newMetaKindVar + ; ty <- tcCheckHsTypeAndGen hs_ty (mkArrowKind arg_kind constraintKind) + ; ty <- zonkSigType ty + ; arg_kind <- zonkSigType arg_kind ; let (tvs, pred) = splitForAllTys ty ; case getClassPredTys_maybe pred of - Just (cls, tys) -> return (tvs, cls, tys) + Just (cls, tys) -> return (tvs, cls, tys, arg_kind) Nothing -> failWithTc (ptext (sLit "Illegal deriving item") <+> quotes (ppr hs_ty)) } -- Used for 'VECTORISE [SCALAR] instance' declarations @@ -389,13 +396,17 @@ tc_hs_type hs_ty@(HsAppTy ty1 ty2) exp_kind (fun_ty, arg_tys) = splitHsAppTys ty1 [ty2] --------- Foralls -tc_hs_type hs_ty@(HsForAllTy _ hs_tvs context ty) exp_kind - = tcHsTyVarBndrs hs_tvs $ \ tvs' -> +tc_hs_type hs_ty@(HsForAllTy _ hs_tvs context ty) exp_kind@(EK exp_k _) + | isConstraintKind exp_k + = failWithTc (hang (ptext (sLit "Illegal constraint:")) 2 (ppr hs_ty)) + + | otherwise + = tcHsTyVarBndrs hs_tvs $ \ tvs' -> -- Do not kind-generalise here! See Note [Kind generalisation] do { ctxt' <- tcHsContext context ; ty' <- if null (unLoc context) then -- Plain forall, no context tc_lhs_type ty exp_kind -- Why exp_kind? See Note [Body kind of forall] - else + else -- If there is a context, then this forall is really a -- _function_, so the kind of the result really is * -- The body kind (result of the function can be * or #, hence ekOpen @@ -614,7 +625,6 @@ tcTyVar :: Name -> TcM (TcType, TcKind) tcTyVar name -- Could be a tyvar, a tycon, or a datacon = do { traceTc "lk1" (ppr name) ; thing <- tcLookup name - ; traceTc "lk2" (ppr name <+> ppr thing) ; case thing of ATyVar _ tv | isKindVar tv @@ -724,17 +734,17 @@ mkNakedAppTys ty1 tys2 = foldl AppTy ty1 tys2 zonkSigType :: TcType -> TcM TcType -- Zonk the result of type-checking a user-written type signature --- It may have kind varaibles in it, but no meta type variables +-- It may have kind variables in it, but no meta type variables -- Because of knot-typing (see Note [Zonking inside the knot]) --- it may need to establish the Type invariants; +-- it may need to establish the Type invariants; -- hence the use of mkTyConApp and mkAppTy zonkSigType ty = go ty where go (TyConApp tc tys) = do tys' <- mapM go tys return (mkTyConApp tc tys') - -- Key point: establish Type invariants! - -- See Note [Zonking inside the knot] + -- Key point: establish Type invariants! + -- See Note [Zonking inside the knot] go (LitTy n) = return (LitTy n) @@ -1297,6 +1307,11 @@ tcTyClTyVars tycon (HsQTvs { hsq_kvs = hs_kvs, hsq_tvs = hs_tvs }) thing_inside ; tvs <- zipWithM tc_hs_tv hs_tvs kinds ; tcExtendTyVarEnv tvs (thing_inside (kvs ++ tvs) res) } where + -- In the case of associated types, the renamer has + -- ensured that the names are in commmon + -- e.g. class C a_29 where + -- type T b_30 a_29 :: * + -- Here the a_29 is shared tc_hs_tv (L _ (UserTyVar n)) kind = return (mkTyVar n kind) tc_hs_tv (L _ (KindedTyVar n hs_k)) kind = do { tc_kind <- tcLHsKind hs_k ; checkKind kind tc_kind @@ -1313,21 +1328,20 @@ tcDataKindSig kind = do { checkTc (isLiftedTypeKind res_kind) (badKindSig kind) ; span <- getSrcSpanM ; us <- newUniqueSupply + ; rdr_env <- getLocalRdrEnv ; let uniqs = uniqsFromSupply us - ; return [ mk_tv span uniq str kind - | ((kind, str), uniq) <- arg_kinds `zip` dnames `zip` uniqs ] } + occs = [ occ | str <- allNameStrings + , let occ = mkOccName tvName str + , isNothing (lookupLocalRdrOcc rdr_env occ) ] + -- Note [Avoid name clashes for associated data types] + + ; return [ mk_tv span uniq occ kind + | ((kind, occ), uniq) <- arg_kinds `zip` occs `zip` uniqs ] } where (arg_kinds, res_kind) = splitKindFunTys kind - mk_tv loc uniq str kind = mkTyVar name kind - where - name = mkInternalName uniq occ loc - occ = mkOccName tvName str + mk_tv loc uniq occ kind + = mkTyVar (mkInternalName uniq occ loc) kind - dnames = map ('$' :) names -- Note [Avoid name clashes for associated data types] - - names :: [String] - names = [ c:cs | cs <- "" : names, c <- ['a'..'z'] ] - badKindSig :: Kind -> SDoc badKindSig kind = hang (ptext (sLit "Kind signature on data type declaration has non-* return kind")) @@ -1338,19 +1352,17 @@ Note [Avoid name clashes for associated data types] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider class C a b where data D b :: * -> * -When typechecking the decl for D, we'll invent an extra type variable for D, -to fill out its kind. We *don't* want this type variable to be 'a', because -in an .hi file we'd get +When typechecking the decl for D, we'll invent an extra type variable +for D, to fill out its kind. Ideally we don't want this type variable +to be 'a', because when pretty printing we'll get class C a b where - data D b a -which makes it look as if there are *two* type indices. But there aren't! -So we use $a instead, which cannot clash with a user-written type variable. -Remember that type variable binders in interface files are just FastStrings, -not proper Names. - -(The tidying phase can't help here because we don't tidy TyCons. Another -alternative would be to record the number of indexing parameters in the -interface file.) + data D b a0 +(NB: the tidying happens in the conversion to IfaceSyn, which happens +as part of pretty-printing a TyThing.) + +That's why we look in the LocalRdrEnv to see what's in scope. This is +important only to get nice-looking output when doing ":info C" in GHCi. +It isn't essential for correctness. %************************************************************************ diff --git a/compiler/typecheck/TcInstDcls.lhs b/compiler/typecheck/TcInstDcls.lhs index fc1842908d..c3ba825cd5 100644 --- a/compiler/typecheck/TcInstDcls.lhs +++ b/compiler/typecheck/TcInstDcls.lhs @@ -6,7 +6,8 @@ TcInstDecls: Typechecking instance declarations \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -37,6 +38,7 @@ import TcDeriv import TcEnv import TcHsType import TcUnify +import Coercion ( pprCoAxiom ) import MkCore ( nO_METHOD_BINDING_ERROR_ID ) import Type import TcEvidence @@ -68,6 +70,7 @@ import BooleanFormula ( isUnsatisfied, pprBooleanFormulaNice ) import Control.Monad import Maybes ( isNothing, isJust, whenIsJust ) +import Data.List ( mapAccumL ) \end{code} Typechecking instance declarations is done in two passes. The first @@ -504,6 +507,7 @@ tcLocalInstDecl (L loc (ClsInstD { cid_inst = decl })) tcClsInstDecl :: LClsInstDecl Name -> TcM ([InstInfo Name], [FamInst]) tcClsInstDecl (L loc (ClsInstDecl { cid_poly_ty = poly_ty, cid_binds = binds , cid_sigs = uprags, cid_tyfam_insts = ats + , cid_overlap_mode = overlap_mode , cid_datafam_insts = adts })) = setSrcSpan loc $ addErrCtxt (instDeclCtxt1 poly_ty) $ @@ -525,44 +529,20 @@ tcClsInstDecl (L loc (ClsInstDecl { cid_poly_ty = poly_ty, cid_binds = binds -- Check for missing associated types and build them -- from their defaults (if available) - ; let defined_ats = mkNameSet $ map (tyFamInstDeclName . unLoc) ats - defined_adts = mkNameSet $ map (unLoc . dfid_tycon . unLoc) adts - - mk_deflt_at_instances :: ClassATItem -> TcM [FamInst] - mk_deflt_at_instances (fam_tc, defs) - -- User supplied instances ==> everything is OK - | tyConName fam_tc `elemNameSet` defined_ats - || tyConName fam_tc `elemNameSet` defined_adts - = return [] - - -- No defaults ==> generate a warning - | null defs - = do { warnMissingMethodOrAT "associated type" (tyConName fam_tc) - ; return [] } - - -- No user instance, have defaults ==> instatiate them - -- Example: class C a where { type F a b :: *; type F a b = () } - -- instance C [x] - -- Then we want to generate the decl: type F [x] b = () - | otherwise - = forM defs $ \(CoAxBranch { cab_lhs = pat_tys, cab_rhs = rhs }) -> - do { let pat_tys' = substTys mini_subst pat_tys - rhs' = substTy mini_subst rhs - tv_set' = tyVarsOfTypes pat_tys' - tvs' = varSetElems tv_set' - ; rep_tc_name <- newFamInstTyConName (noLoc (tyConName fam_tc)) pat_tys' - ; let axiom = mkSingleCoAxiom rep_tc_name tvs' fam_tc pat_tys' rhs' - ; ASSERT( tyVarsOfType rhs' `subVarSet` tv_set' ) - newFamInst SynFamilyInst axiom } - - ; tyfam_insts1 <- mapM mk_deflt_at_instances (classATItems clas) + ; let defined_ats = mkNameSet (map (tyFamInstDeclName . unLoc) ats) + `unionNameSets` + mkNameSet (map (unLoc . dfid_tycon . unLoc) adts) + ; tyfam_insts1 <- mapM (tcATDefault mini_subst defined_ats) + (classATItems clas) -- Finally, construct the Core representation of the instance. -- (This no longer includes the associated types.) ; dfun_name <- newDFunName clas inst_tys (getLoc poly_ty) -- Dfun location is that of instance *header* - ; overlap_flag <- getOverlapFlag + ; overlap_flag <- + do defaultOverlapFlag <- getOverlapFlag + return $ setOverlapModeMaybe defaultOverlapFlag overlap_mode ; (subst, tyvars') <- tcInstSkolTyVars tyvars ; let dfun = mkDictFunId dfun_name tyvars theta clas inst_tys ispec = mkLocalInstance dfun overlap_flag tyvars' clas (substTys subst inst_tys) @@ -577,6 +557,48 @@ tcClsInstDecl (L loc (ClsInstDecl { cid_poly_ty = poly_ty, cid_binds = binds ; return ( [inst_info], tyfam_insts0 ++ concat tyfam_insts1 ++ datafam_insts) } + +tcATDefault :: TvSubst -> NameSet -> ClassATItem -> TcM [FamInst] +-- ^ Construct default instances for any associated types that +-- aren't given a user definition +-- Returns [] or singleton +tcATDefault inst_subst defined_ats (ATI fam_tc defs) + -- User supplied instances ==> everything is OK + | tyConName fam_tc `elemNameSet` defined_ats + = return [] + + -- No user instance, have defaults ==> instatiate them + -- Example: class C a where { type F a b :: *; type F a b = () } + -- instance C [x] + -- Then we want to generate the decl: type F [x] b = () + | Just rhs_ty <- defs + = do { let (subst', pat_tys') = mapAccumL subst_tv inst_subst + (tyConTyVars fam_tc) + rhs' = substTy subst' rhs_ty + tv_set' = tyVarsOfTypes pat_tys' + tvs' = varSetElemsKvsFirst tv_set' + ; rep_tc_name <- newFamInstTyConName (noLoc (tyConName fam_tc)) pat_tys' + ; let axiom = mkSingleCoAxiom rep_tc_name tvs' fam_tc pat_tys' rhs' + ; traceTc "mk_deflt_at_instance" (vcat [ ppr fam_tc, ppr rhs_ty + , pprCoAxiom axiom ]) + ; fam_inst <- ASSERT( tyVarsOfType rhs' `subVarSet` tv_set' ) + newFamInst SynFamilyInst axiom + ; return [fam_inst] } + + -- No defaults ==> generate a warning + | otherwise -- defs = Nothing + = do { warnMissingMethodOrAT "associated type" (tyConName fam_tc) + ; return [] } + where + subst_tv subst tc_tv + | Just ty <- lookupVarEnv (getTvSubstEnv subst) tc_tv + = (subst, ty) + | otherwise + = (extendTvSubst subst tc_tv ty', ty') + where + ty' = mkTyVarTy (updateTyVarKind (substTy subst) tc_tv) + + -------------- tcAssocTyDecl :: Class -- Class of associated type -> VarEnv Type -- Instantiation of class TyVars @@ -625,24 +647,22 @@ tcTyFamInstDecl :: Maybe (Class, VarEnv Type) -- the class & mini_env if applica tcTyFamInstDecl mb_clsinfo (L loc decl@(TyFamInstDecl { tfid_eqn = eqn })) = setSrcSpan loc $ tcAddTyFamInstCtxt decl $ - do { let fam_lname = tfie_tycon (unLoc eqn) + do { let fam_lname = tfe_tycon (unLoc eqn) ; fam_tc <- tcFamInstDeclCombined mb_clsinfo fam_lname -- (0) Check it's an open type family - ; checkTc (isFamilyTyCon fam_tc) (notFamily fam_tc) - ; checkTc (isSynTyCon fam_tc) (wrongKindOfFamily fam_tc) - ; checkTc (isOpenSynFamilyTyCon fam_tc) - (notOpenFamily fam_tc) + ; checkTc (isFamilyTyCon fam_tc) (notFamily fam_tc) + ; checkTc (isSynFamilyTyCon fam_tc) (wrongKindOfFamily fam_tc) + ; checkTc (isOpenSynFamilyTyCon fam_tc) (notOpenFamily fam_tc) -- (1) do the work of verifying the synonym group - ; co_ax_branch <- tcSynFamInstDecl fam_tc decl + ; co_ax_branch <- tcTyFamInstEqn (famTyConShape fam_tc) eqn -- (2) check for validity ; checkValidTyFamInst mb_clsinfo fam_tc co_ax_branch -- (3) construct coercion axiom - ; rep_tc_name <- newFamInstAxiomName loc - (tyFamInstDeclName decl) + ; rep_tc_name <- newFamInstAxiomName loc (unLoc fam_lname) [co_ax_branch] ; let axiom = mkUnbranchedCoAxiom rep_tc_name fam_tc co_ax_branch ; newFamInst SynFamilyInst axiom } @@ -665,7 +685,7 @@ tcDataFamInstDecl mb_clsinfo ; checkTc (isAlgTyCon fam_tc) (wrongKindOfFamily fam_tc) -- Kind check type patterns - ; tcFamTyPats (unLoc fam_tc_name) (tyConKind fam_tc) pats + ; tcFamTyPats (famTyConShape fam_tc) pats (kcDataDefn defn) $ \tvs' pats' res_kind -> do @@ -680,7 +700,7 @@ tcDataFamInstDecl mb_clsinfo ; checkTc (isLiftedTypeKind res_kind) $ tooFewParmsErr (tyConArity fam_tc) ; stupid_theta <- tcHsContext ctxt - ; h98_syntax <- dataDeclChecks (tyConName fam_tc) new_or_data stupid_theta cons + ; gadt_syntax <- dataDeclChecks (tyConName fam_tc) new_or_data stupid_theta cons -- Construct representation tycon ; rep_tc_name <- newFamInstTyConName fam_tc_name pats' @@ -703,7 +723,7 @@ tcDataFamInstDecl mb_clsinfo rep_tc = buildAlgTyCon rep_tc_name tvs' roles cType stupid_theta tc_rhs Recursive False -- No promotable to the kind level - h98_syntax parent + gadt_syntax parent -- We always assume that indexed types are recursive. Why? -- (1) Due to their open nature, we can never be sure that a -- further instance might not introduce a new recursive diff --git a/compiler/typecheck/TcInteract.lhs b/compiler/typecheck/TcInteract.lhs index 3d057ae2d7..02c5866018 100644 --- a/compiler/typecheck/TcInteract.lhs +++ b/compiler/typecheck/TcInteract.lhs @@ -1,4 +1,6 @@ \begin{code} +{-# LANGUAGE CPP #-} + module TcInteract ( solveInteractGiven, -- Solves [EvVar],GivenLoc solveInteract, -- Solves Cts @@ -103,6 +105,7 @@ solveInteractGiven loc old_fsks givens , ctev_loc = loc } | ev_id <- givens ] + -- See Note [Given flatten-skolems] in TcSMonad fsk_bag = listToBag [ mkNonCanonical $ CtGiven { ctev_evtm = EvCoercion (mkTcNomReflCo tv_ty) , ctev_pred = pred , ctev_loc = loc } @@ -1584,7 +1587,9 @@ doTopReactDict inerts fl cls xis = do { instEnvs <- getInstEnvs ; let fd_eqns = improveFromInstEnv instEnvs pred ; fd_work <- rewriteWithFunDeps fd_eqns loc - ; unless (null fd_work) (updWorkListTcS (extendWorkListEqs fd_work)) + ; unless (null fd_work) $ + do { traceTcS "Addig FD work" (ppr pred $$ vcat (map pprEquation fd_eqns) $$ ppr fd_work) + ; updWorkListTcS (extendWorkListEqs fd_work) } ; return NoTopInt } -------------------- @@ -2032,6 +2037,8 @@ getCoercibleInst loc ty1 ty2 = do where go :: FamInstEnvs -> GlobalRdrEnv -> TcS LookupInstResult go famenv rdr_env + -- Also see [Order of Coercible Instances] + -- Coercible a a (see case 1 in [Coercible Instances]) | ty1 `tcEqType` ty2 = do return $ GenInst [] @@ -2047,7 +2054,19 @@ getCoercibleInst loc ty1 ty2 = do ev_term <- deferTcSForAllEq Representational loc (tvs1,body1) (tvs2,body2) return $ GenInst [] ev_term - -- Coercible (D ty1 ty2) (D ty1' ty2') (see case 3 in [Coercible Instances]) + -- Coercible NT a (see case 4 in [Coercible Instances]) + | Just (tc,tyArgs) <- splitTyConApp_maybe ty1, + Just (concTy, ntCo) <- instNewTyConTF_maybe famenv tc tyArgs, + dataConsInScope rdr_env tc -- Do not look at all tyConsOfTyCon + = do markDataConsAsUsed rdr_env tc + ct_ev <- requestCoercible loc concTy ty2 + local_var <- mkSysLocalM (fsLit "coev") $ mkCoerciblePred concTy ty2 + let binds = EvBinds (unitBag (EvBind local_var (getEvTerm ct_ev))) + tcCo = TcLetCo binds $ + coercionToTcCoercion ntCo `mkTcTransCo` mkTcCoVarCo local_var + return $ GenInst (freshGoals [ct_ev]) (EvCoercion tcCo) + + -- Coercible (D ty1 ty2) (D ty1' ty2') (see case 2 in [Coercible Instances]) | Just (tc1,tyArgs1) <- splitTyConApp_maybe ty1, Just (tc2,tyArgs2) <- splitTyConApp_maybe ty2, tc1 == tc2, @@ -2078,19 +2097,7 @@ getCoercibleInst loc ty1 ty2 = do tcCo = TcLetCo binds (mkTcTyConAppCo Representational tc1 arg_cos) return $ GenInst (catMaybes arg_new) (EvCoercion tcCo) - -- Coercible NT a (see case 4 in [Coercible Instances]) - | Just (tc,tyArgs) <- splitTyConApp_maybe ty1, - Just (concTy, ntCo) <- instNewTyConTF_maybe famenv tc tyArgs, - dataConsInScope rdr_env tc -- Do not look at all tyConsOfTyCon - = do markDataConsAsUsed rdr_env tc - ct_ev <- requestCoercible loc concTy ty2 - local_var <- mkSysLocalM (fsLit "coev") $ mkCoerciblePred concTy ty2 - let binds = EvBinds (unitBag (EvBind local_var (getEvTerm ct_ev))) - tcCo = TcLetCo binds $ - coercionToTcCoercion ntCo `mkTcTransCo` mkTcCoVarCo local_var - return $ GenInst (freshGoals [ct_ev]) (EvCoercion tcCo) - - -- Coercible a NT (see case 4 in [Coercible Instances]) + -- Coercible a NT (see case 3 in [Coercible Instances]) | Just (tc,tyArgs) <- splitTyConApp_maybe ty2, Just (concTy, ntCo) <- instNewTyConTF_maybe famenv tc tyArgs, dataConsInScope rdr_env tc -- Do not look at all tyConsOfTyCon @@ -2141,7 +2148,7 @@ Note [Coercible Instances] The class Coercible is special: There are no regular instances, and the user cannot even define them (it is listed as an `abstractClass` in TcValidity). Instead, the type checker will create instances and their evidence out of thin -air, in getCoercibleInst. The following “instances” are present: +air, in getCoercibleInst. The following "instances" are present: 1. instance Coercible a a for any type a at any kind k. @@ -2150,26 +2157,14 @@ air, in getCoercibleInst. The following “instances” are present: (which would be illegal to write like that in the source code, but we have it nevertheless). - - 3. instance (Coercible t1_r t1'_r, Coercible t2_r t2_r',...) => - Coercible (C t1_r t2_r ... t1_p t2_p ... t1_n t2_n ...) - (C t1_r' t2_r' ... t1_p' t2_p' ... t1_n t2_n ...) - for a type constructor C where - * the nominal type arguments are not changed, - * the phantom type arguments may change arbitrarily - * the representational type arguments are again Coercible - - The type constructor can be used undersaturated; then the Coercible - instance is at a higher kind. This does not cause problems. - - 4. instance Coercible r b => Coercible (NT t1 t2 ...) b + 3. instance Coercible r b => Coercible (NT t1 t2 ...) b instance Coercible a r => Coercible a (NT t1 t2 ...) for a newtype constructor NT (or data family instance that resolves to a newtype) where * r is the concrete type of NT, instantiated with the arguments t1 t2 ... - * the constructor of NT are in scope. + * the constructor of NT is in scope. - Again, the newtype TyCon can appear undersaturated, but only if it has + The newtype TyCon can appear undersaturated, but only if it has enough arguments to apply the newtype coercion (which is eta-reduced). Examples: newtype NT a = NT (Either a Int) Coercible (NT Int) (Either Int Int) -- ok @@ -2177,12 +2172,24 @@ air, in getCoercibleInst. The following “instances” are present: newtype NT3 a b = NT3 (b -> a) Coercible (NT2 Int) (NT3 Int) -- cannot be derived + 4. instance (Coercible t1_r t1'_r, Coercible t2_r t2_r',...) => + Coercible (C t1_r t2_r ... t1_p t2_p ... t1_n t2_n ...) + (C t1_r' t2_r' ... t1_p' t2_p' ... t1_n t2_n ...) + for a type constructor C where + * the nominal type arguments are not changed, + * the phantom type arguments may change arbitrarily + * the representational type arguments are again Coercible + + The type constructor can be used undersaturated; then the Coercible + instance is at a higher kind. This does not cause problems. + + The type checker generates evidence in the form of EvCoercion, but the TcCoercion therein has role Representational, which are turned into Core coercions by dsEvTerm in DsBinds. -The evidence for the first three instance is generated here by -getCoercibleInst, for the second instance deferTcSForAllEq is used. +The evidence for the second case is created by deferTcSForAllEq, for the other +cases by getCoercibleInst. When the constraint cannot be solved, it is treated as any other unsolved constraint, i.e. it can turn up in an inferred type signature, or reported to @@ -2191,6 +2198,33 @@ coercible_msg in TcErrors gives additional explanations of why GHC could not find a Coercible instance, so it duplicates some of the logic from getCoercibleInst (in negated form). +Note [Order of Coercible Instances] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +At first glance, the order of the various coercible instances doesn't matter, as +incoherence is no issue here: We do not care how the evidence is constructed, +as long as it is. + +But because of role annotations, the order *can* matter: + + newtype T a = MkT [a] + type role T nominal + + type family F a + type instance F Int = Bool + +Here T's declared role is more restrictive than its inferred role +(representational) would be. If MkT is not in scope, so that the +newtype-unwrapping instance is not available, then this coercible +instance would fail: + Coercible (T Bool) (T (F Int) +But MkT was in scope, *and* if we used it before decomposing on T, +we'd unwrap the newtype (on both sides) to get + Coercible Bool (F Int) +whic succeeds. + +So our current decision is to apply case 3 (newtype-unwrapping) first, +followed by decomposition (case 4). This is strictly more powerful +if the newtype constructor is in scope. See Trac #9117 for a discussion. Note [Instance and Given overlap] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/compiler/typecheck/TcMType.lhs b/compiler/typecheck/TcMType.lhs index f646305e39..65bc0b7653 100644 --- a/compiler/typecheck/TcMType.lhs +++ b/compiler/typecheck/TcMType.lhs @@ -9,7 +9,8 @@ This module contains monadic operations over types that contain mutable type variables \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/typecheck/TcMatches.lhs b/compiler/typecheck/TcMatches.lhs index 5859e7b810..32b6d1e326 100644 --- a/compiler/typecheck/TcMatches.lhs +++ b/compiler/typecheck/TcMatches.lhs @@ -6,7 +6,8 @@ TcMatches: Typecheck some @Matches@ \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, RankNTypes #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/typecheck/TcPat.lhs b/compiler/typecheck/TcPat.lhs index 0b2a200867..cfc76d6538 100644 --- a/compiler/typecheck/TcPat.lhs +++ b/compiler/typecheck/TcPat.lhs @@ -6,7 +6,8 @@ TcPat: Typechecking patterns \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, RankNTypes #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -531,9 +532,9 @@ tc_pat penv (TuplePat pats boxity _) pat_ty thing_inside -- so that we can experiment with lazy tuple-matching. -- This is a pretty odd place to make the switch, but -- it was easy to do. - ; let pat_ty' = mkTyConApp tc arg_tys - -- pat_ty /= pat_ty iff coi /= IdCo - unmangled_result = TuplePat pats' boxity pat_ty' + ; let + unmangled_result = TuplePat pats' boxity arg_tys + -- pat_ty /= pat_ty iff coi /= IdCo possibly_mangled_result | gopt Opt_IrrefutableTuples dflags && isBoxed boxity = LazyPat (noLoc unmangled_result) @@ -730,14 +731,14 @@ tcDataConPat penv (L con_span con_name) data_con pat_ty arg_pats thing_inside (zipTopTvSubst univ_tvs ctxt_res_tys) ex_tvs -- Get location from monad, not from ex_tvs - ; let pat_ty' = mkTyConApp tycon ctxt_res_tys + ; let -- pat_ty' = mkTyConApp tycon ctxt_res_tys -- pat_ty' is type of the actual constructor application -- pat_ty' /= pat_ty iff coi /= IdCo arg_tys' = substTys tenv arg_tys ; traceTc "tcConPat" (vcat [ ppr con_name, ppr univ_tvs, ppr ex_tvs, ppr eq_spec - , ppr ex_tvs', ppr pat_ty', ppr arg_tys' ]) + , ppr ex_tvs', ppr ctxt_res_tys, ppr arg_tys' ]) ; if null ex_tvs && null eq_spec && null theta then do { -- The common case; no class bindings etc -- (see Note [Arrows and patterns]) @@ -747,7 +748,7 @@ tcDataConPat penv (L con_span con_name) data_con pat_ty arg_pats thing_inside pat_tvs = [], pat_dicts = [], pat_binds = emptyTcEvBinds, pat_args = arg_pats', - pat_ty = pat_ty', + pat_arg_tys = ctxt_res_tys, pat_wrap = idHsWrapper } ; return (mkHsWrapPat wrap res_pat pat_ty, res) } @@ -780,7 +781,7 @@ tcDataConPat penv (L con_span con_name) data_con pat_ty arg_pats thing_inside pat_dicts = given, pat_binds = ev_binds, pat_args = arg_pats', - pat_ty = pat_ty', + pat_arg_tys = ctxt_res_tys, pat_wrap = idHsWrapper } ; return (mkHsWrapPat wrap res_pat pat_ty, res) } } @@ -790,11 +791,9 @@ tcPatSynPat :: PatEnv -> Located Name -> PatSyn -> HsConPatDetails Name -> TcM a -> TcM (Pat TcId, a) tcPatSynPat penv (L con_span _) pat_syn pat_ty arg_pats thing_inside - = do { let (univ_tvs, ex_tvs, (prov_theta, req_theta)) = patSynSig pat_syn - arg_tys = patSynArgTys pat_syn - ty = patSynType pat_syn + = do { let (univ_tvs, ex_tvs, prov_theta, req_theta, arg_tys, ty) = patSynSig pat_syn - ; (_univ_tvs', inst_tys, subst) <- tcInstTyVars univ_tvs + ; (univ_tvs', inst_tys, subst) <- tcInstTyVars univ_tvs ; checkExistentials ex_tvs penv ; (tenv, ex_tvs') <- tcInstSuperSkolTyVarsX subst ex_tvs @@ -838,7 +837,7 @@ tcPatSynPat penv (L con_span _) pat_syn pat_ty arg_pats thing_inside pat_dicts = prov_dicts', pat_binds = ev_binds, pat_args = arg_pats', - pat_ty = ty', + pat_arg_tys = mkTyVarTys univ_tvs', pat_wrap = req_wrap } ; return (mkHsWrapPat wrap res_pat pat_ty, res) } diff --git a/compiler/typecheck/TcPatSyn.lhs b/compiler/typecheck/TcPatSyn.lhs index 0b3b4e4858..82fa999f34 100644 --- a/compiler/typecheck/TcPatSyn.lhs +++ b/compiler/typecheck/TcPatSyn.lhs @@ -5,6 +5,8 @@ \section[TcPatSyn]{Typechecking pattern synonym declarations} \begin{code} +{-# LANGUAGE CPP #-} + module TcPatSyn (tcPatSynDecl) where import HsSyn @@ -22,6 +24,7 @@ import Outputable import FastString import Var import Id +import IdInfo( IdDetails( VanillaId ) ) import TcBinds import BasicTypes import TcSimplify @@ -31,31 +34,11 @@ import Data.Monoid import Bag import TcEvidence import BuildTyCl +import TypeRep #include "HsVersions.h" \end{code} -Note [Pattern synonym typechecking] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Consider the following pattern synonym declaration - - pattern P x = MkT [x] (Just 42) - -where - data T a where - MkT :: (Show a, Ord b) => [b] -> a -> T a - -The pattern synonym's type is described with five axes, given here for -the above example: - - Pattern type: T (Maybe t) - Arguments: [x :: b] - Universal type variables: [t] - Required theta: (Eq t, Num t) - Existential type variables: [b] - Provided theta: (Show (Maybe t), Ord b) - \begin{code} tcPatSynDecl :: Located Name -> HsPatSynDetails (Located Name) @@ -118,7 +101,7 @@ tcPatSynDecl lname@(L _ name) details lpat dir ; traceTc "tcPatSynDecl }" $ ppr name ; let patSyn = mkPatSyn name is_infix - args + (map varType args) univ_tvs ex_tvs prov_theta req_theta pat_ty @@ -127,40 +110,6 @@ tcPatSynDecl lname@(L _ name) details lpat dir \end{code} -Note [Matchers and wrappers for pattern synonyms] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -For each pattern synonym, we generate a single matcher function which -implements the actual matching. For the above example, the matcher -will have type: - - $mP :: forall r t. (Eq t, Num t) - => T (Maybe t) - -> (forall b. (Show (Maybe t), Ord b) => b -> r) - -> r - -> r - -with the following implementation: - - $mP @r @t $dEq $dNum scrut cont fail = case scrut of - MkT @b $dShow $dOrd [x] (Just 42) -> cont @b $dShow $dOrd x - _ -> fail - -For bidirectional pattern synonyms, we also generate a single wrapper -function which implements the pattern synonym in an expression -context. For our running example, it will be: - - $WP :: forall t b. (Show (Maybe t), Ord b, Eq t, Num t) - => b -> T (Maybe t) - $WP x = MkT [x] (Just 42) - -N.b. the existential/universal and required/provided split does not -apply to the wrapper since you are only putting stuff in, not getting -stuff out. - -Injectivity of bidirectional pattern synonyms is checked in -tcPatToExpr which walks the pattern and returns its corresponding -expression when available. \begin{code} tcPatSynMatcher :: Located Name @@ -172,12 +121,18 @@ tcPatSynMatcher :: Located Name -> ThetaType -> ThetaType -> TcType -> TcM (Id, LHsBinds Id) +-- See Note [Matchers and wrappers for pattern synonyms] in PatSyn tcPatSynMatcher (L loc name) lpat args univ_tvs ex_tvs ev_binds prov_dicts req_dicts prov_theta req_theta pat_ty = do { res_tv <- zonkQuantifiedTyVar =<< newFlexiTyVar liftedTypeKind - ; (matcher_id, res_ty, cont_ty) <- mkPatSynMatcherId name args - univ_tvs ex_tvs - prov_theta req_theta - pat_ty res_tv + ; matcher_name <- newImplicitBinder name mkMatcherOcc + ; let res_ty = TyVarTy res_tv + cont_ty = mkSigmaTy ex_tvs prov_theta $ + mkFunTys (map varType args) res_ty + + ; let matcher_tau = mkFunTys [pat_ty, cont_ty, res_ty] res_ty + matcher_sigma = mkSigmaTy (res_tv:univ_tvs) req_theta matcher_tau + matcher_id = mkExportedLocalId VanillaId matcher_name matcher_sigma + ; traceTc "tcPatSynMatcher" (ppr name $$ ppr (idType matcher_id)) ; let matcher_lid = L loc matcher_id @@ -241,6 +196,7 @@ tcPatSynWrapper :: Located Name -> ThetaType -> TcType -> TcM (Maybe (Id, LHsBinds Id)) +-- See Note [Matchers and wrappers for pattern synonyms] in PatSyn tcPatSynWrapper lname lpat dir args univ_tvs ex_tvs theta pat_ty = do { let argNames = mkNameSet (map Var.varName args) ; case (dir, tcPatToExpr argNames lpat) of @@ -260,18 +216,16 @@ tc_pat_syn_wrapper_from_expr :: Located Name -> TcM (Id, LHsBinds Id) tc_pat_syn_wrapper_from_expr (L loc name) lexpr args univ_tvs ex_tvs theta pat_ty = do { let qtvs = univ_tvs ++ ex_tvs - ; (subst, qtvs') <- tcInstSkolTyVars qtvs - ; let theta' = substTheta subst theta + ; (subst, wrapper_tvs) <- tcInstSkolTyVars qtvs + ; let wrapper_theta = substTheta subst theta pat_ty' = substTy subst pat_ty args' = map (\arg -> setVarType arg $ substTy subst (varType arg)) args - - ; wrapper_id <- mkPatSynWrapperId name args qtvs theta pat_ty - ; let wrapper_name = getName wrapper_id - wrapper_lname = L loc wrapper_name - -- (wrapper_tvs, wrapper_theta, wrapper_tau) = tcSplitSigmaTy (idType wrapper_id) - wrapper_tvs = qtvs' - wrapper_theta = theta' wrapper_tau = mkFunTys (map varType args') pat_ty' + wrapper_sigma = mkSigmaTy wrapper_tvs wrapper_theta wrapper_tau + + ; wrapper_name <- newImplicitBinder name mkDataConWrapperOcc + ; let wrapper_lname = L loc wrapper_name + wrapper_id = mkExportedLocalId VanillaId wrapper_name wrapper_sigma ; let wrapper_args = map (noLoc . VarPat . Var.varName) args' wrapper_match = mkMatch wrapper_args lexpr EmptyLocalBinds diff --git a/compiler/typecheck/TcRnDriver.lhs b/compiler/typecheck/TcRnDriver.lhs index 5b39132254..281db25620 100644 --- a/compiler/typecheck/TcRnDriver.lhs +++ b/compiler/typecheck/TcRnDriver.lhs @@ -5,6 +5,8 @@ \section[TcMovectle]{Typechecking a whole module} \begin{code} +{-# LANGUAGE CPP, NondecreasingIndentation #-} + module TcRnDriver ( #ifdef GHCI tcRnStmt, tcRnExpr, tcRnType, @@ -18,8 +20,7 @@ module TcRnDriver ( tcRnLookupName, tcRnGetInfo, tcRnModule, tcRnModuleTcRnM, - tcTopSrcDecls, - tcRnExtCore + tcTopSrcDecls ) where #ifdef GHCI @@ -58,10 +59,9 @@ import LoadIface import RnNames import RnEnv import RnSource -import PprCore -import CoreSyn import ErrUtils import Id +import IdInfo( IdDetails( VanillaId ) ) import VarEnv import Module import UniqFM @@ -82,7 +82,6 @@ import CoAxiom import Inst ( tcGetInstEnvs ) import Annotations import Data.List ( sortBy ) -import Data.IORef ( readIORef ) import Data.Ord #ifdef GHCI import BasicTypes hiding( SuccessFlag(..) ) @@ -306,107 +305,6 @@ tcRnImports hsc_env import_decls %************************************************************************ %* * - Type-checking external-core modules -%* * -%************************************************************************ - -\begin{code} -tcRnExtCore :: HscEnv - -> HsExtCore RdrName - -> IO (Messages, Maybe ModGuts) - -- Nothing => some error occurred - -tcRnExtCore hsc_env (HsExtCore this_mod decls src_binds) - -- The decls are IfaceDecls; all names are original names - = do { showPass (hsc_dflags hsc_env) "Renamer/typechecker" ; - - initTc hsc_env ExtCoreFile False this_mod $ do { - - let { ldecls = map noLoc decls } ; - - -- Bring the type and class decls into scope - -- ToDo: check that this doesn't need to extract the val binds. - -- It seems that only the type and class decls need to be in scope below because - -- (a) tcTyAndClassDecls doesn't need the val binds, and - -- (b) tcExtCoreBindings doesn't need anything - -- (in fact, it might not even need to be in the scope of - -- this tcg_env at all) - (tc_envs, _bndrs) <- getLocalNonValBinders emptyFsEnv {- no fixity decls -} - (mkFakeGroup ldecls) ; - setEnvs tc_envs $ do { - - (rn_decls, _fvs) <- checkNoErrs $ rnTyClDecls [] [mkTyClGroup ldecls] ; - -- The empty list is for extra dependencies coming from .hs-boot files - -- See Note [Extra dependencies from .hs-boot files] in RnSource - - -- Dump trace of renaming part - rnDump (ppr rn_decls) ; - - -- Typecheck them all together so that - -- any mutually recursive types are done right - -- Just discard the auxiliary bindings; they are generated - -- only for Haskell source code, and should already be in Core - tcg_env <- tcTyAndClassDecls emptyModDetails rn_decls ; - safe_mode <- liftIO $ finalSafeMode (hsc_dflags hsc_env) tcg_env ; - dep_files <- liftIO $ readIORef (tcg_dependent_files tcg_env) ; - - setGblEnv tcg_env $ do { - -- Make the new type env available to stuff slurped from interface files - - -- Now the core bindings - core_binds <- initIfaceExtCore (tcExtCoreBindings src_binds) ; - - - -- Wrap up - let { - bndrs = bindersOfBinds core_binds ; - my_exports = map (Avail . idName) bndrs ; - -- ToDo: export the data types also? - - mod_guts = ModGuts { mg_module = this_mod, - mg_boot = False, - mg_used_names = emptyNameSet, -- ToDo: compute usage - mg_used_th = False, - mg_dir_imps = emptyModuleEnv, -- ?? - mg_deps = noDependencies, -- ?? - mg_exports = my_exports, - mg_tcs = tcg_tcs tcg_env, - mg_insts = tcg_insts tcg_env, - mg_fam_insts = tcg_fam_insts tcg_env, - mg_inst_env = tcg_inst_env tcg_env, - mg_fam_inst_env = tcg_fam_inst_env tcg_env, - mg_patsyns = [], -- TODO - mg_rules = [], - mg_vect_decls = [], - mg_anns = [], - mg_binds = core_binds, - - -- Stubs - mg_rdr_env = emptyGlobalRdrEnv, - mg_fix_env = emptyFixityEnv, - mg_warns = NoWarnings, - mg_foreign = NoStubs, - mg_hpc_info = emptyHpcInfo False, - mg_modBreaks = emptyModBreaks, - mg_vect_info = noVectInfo, - mg_safe_haskell = safe_mode, - mg_trust_pkg = False, - mg_dependent_files = dep_files - } } ; - - tcCoreDump mod_guts ; - - return mod_guts - }}}} - -mkFakeGroup :: [LTyClDecl a] -> HsGroup a -mkFakeGroup decls -- Rather clumsy; lots of unused fields - = emptyRdrGroup { hs_tyclds = [mkTyClGroup decls] } -\end{code} - - -%************************************************************************ -%* * Type-checking the top level of a module %* * %************************************************************************ @@ -647,12 +545,35 @@ checkHiBootIface tcg_env@(TcGblEnv { tcg_src = hs_src, tcg_binds = binds, tcg_insts = local_insts, tcg_type_env = local_type_env, tcg_exports = local_exports }) - (ModDetails { md_insts = boot_insts, md_fam_insts = boot_fam_insts, - md_types = boot_type_env, md_exports = boot_exports }) + boot_details | isHsBoot hs_src -- Current module is already a hs-boot file! = return tcg_env | otherwise + = do { mb_dfun_prs <- checkHiBootIface' local_insts local_type_env + local_exports boot_details + ; let dfun_prs = catMaybes mb_dfun_prs + boot_dfuns = map fst dfun_prs + dfun_binds = listToBag [ mkVarBind boot_dfun (nlHsVar dfun) + | (boot_dfun, dfun) <- dfun_prs ] + type_env' = extendTypeEnvWithIds local_type_env boot_dfuns + tcg_env' = tcg_env { tcg_binds = binds `unionBags` dfun_binds } + + ; setGlobalTypeEnv tcg_env' type_env' } + -- Update the global type env *including* the knot-tied one + -- so that if the source module reads in an interface unfolding + -- mentioning one of the dfuns from the boot module, then it + -- can "see" that boot dfun. See Trac #4003 + +checkHiBootIface' :: [ClsInst] -> TypeEnv -> [AvailInfo] + -> ModDetails -> TcM [Maybe (Id, Id)] +-- Variant which doesn't require a full TcGblEnv; you could get the +-- local components from another ModDetails. + +checkHiBootIface' + local_insts local_type_env local_exports + (ModDetails { md_insts = boot_insts, md_fam_insts = boot_fam_insts, + md_types = boot_type_env, md_exports = boot_exports }) = do { traceTc "checkHiBootIface" $ vcat [ ppr boot_type_env, ppr boot_insts, ppr boot_exports] @@ -669,19 +590,11 @@ checkHiBootIface -- Check instance declarations ; mb_dfun_prs <- mapM check_inst boot_insts - ; let dfun_prs = catMaybes mb_dfun_prs - boot_dfuns = map fst dfun_prs - dfun_binds = listToBag [ mkVarBind boot_dfun (nlHsVar dfun) - | (boot_dfun, dfun) <- dfun_prs ] - type_env' = extendTypeEnvWithIds local_type_env boot_dfuns - tcg_env' = tcg_env { tcg_binds = binds `unionBags` dfun_binds } ; failIfErrsM - ; setGlobalTypeEnv tcg_env' type_env' } - -- Update the global type env *including* the knot-tied one - -- so that if the source module reads in an interface unfolding - -- mentioning one of the dfuns from the boot module, then it - -- can "see" that boot dfun. See Trac #4003 + + ; return mb_dfun_prs } + where check_export boot_avail -- boot_avail is exported by the boot iface | name `elem` dfun_names = return () @@ -735,7 +648,7 @@ checkHiBootIface where boot_dfun = instanceDFunId boot_inst boot_inst_ty = idType boot_dfun - local_boot_dfun = Id.mkExportedLocalId (idName boot_dfun) boot_inst_ty + local_boot_dfun = Id.mkExportedLocalId VanillaId (idName boot_dfun) boot_inst_ty -- This has to compare the TyThing from the .hi-boot file to the TyThing @@ -783,17 +696,14 @@ checkBootTyCon tc1 tc2 (_, rho_ty2) = splitForAllTys (idType id2) op_ty2 = funResultTy rho_ty2 - eqAT (tc1, def_ats1) (tc2, def_ats2) + eqAT (ATI tc1 def_ats1) (ATI tc2 def_ats2) = checkBootTyCon tc1 tc2 && - eqListBy eqATDef def_ats1 def_ats2 + eqATDef def_ats1 def_ats2 -- Ignore the location of the defaults - eqATDef (CoAxBranch { cab_tvs = tvs1, cab_lhs = ty_pats1, cab_rhs = ty1 }) - (CoAxBranch { cab_tvs = tvs2, cab_lhs = ty_pats2, cab_rhs = ty2 }) - | Just env <- eqTyVarBndrs emptyRnEnv2 tvs1 tvs2 - = eqListBy (eqTypeX env) ty_pats1 ty_pats2 && - eqTypeX env ty1 ty2 - | otherwise = False + eqATDef Nothing Nothing = True + eqATDef (Just ty1) (Just ty2) = eqTypeX env ty1 ty2 + eqATDef _ _ = False eqFD (as1,bs1) (as2,bs2) = eqListBy (eqTypeX env) (mkTyVarTys as1) (mkTyVarTys as2) && @@ -1148,7 +1058,7 @@ check_main dflags tcg_env ; let { root_main_name = mkExternalName rootMainKey rOOT_MAIN (mkVarOccFS (fsLit "main")) (getSrcSpan main_name) - ; root_main_id = Id.mkExportedLocalId root_main_name + ; root_main_id = Id.mkExportedLocalId VanillaId root_main_name (mkTyConApp ioTyCon [res_ty]) ; co = mkWpTyApps [res_ty] ; rhs = nlHsApp (mkLHsWrap co (nlHsVar run_main_id)) main_expr @@ -1864,17 +1774,6 @@ tcDump env -- NB: foreign x-d's have undefined's in their types; -- hence can't show the tc_fords -tcCoreDump :: ModGuts -> TcM () -tcCoreDump mod_guts - = do { dflags <- getDynFlags ; - when (dopt Opt_D_dump_types dflags || dopt Opt_D_dump_tc dflags) - (dumpTcRn (pprModGuts mod_guts)) ; - - -- Dump bindings if -ddump-tc - dumpOptTcRn Opt_D_dump_tc (mkDumpDoc "Typechecker" full_dump) } - where - full_dump = pprCoreBindings (mg_binds mod_guts) - -- It's unpleasant having both pprModGuts and pprModDetails here pprTcGblEnv :: TcGblEnv -> SDoc pprTcGblEnv (TcGblEnv { tcg_type_env = type_env, @@ -1900,12 +1799,6 @@ pprTcGblEnv (TcGblEnv { tcg_type_env = type_env, `thenCmp` (is_boot1 `compare` is_boot2) -pprModGuts :: ModGuts -> SDoc -pprModGuts (ModGuts { mg_tcs = tcs - , mg_rules = rules }) - = vcat [ ppr_types [] (mkTypeEnv (map ATyCon tcs)), - ppr_rules rules ] - ppr_types :: [ClsInst] -> TypeEnv -> SDoc ppr_types insts type_env = text "TYPE SIGNATURES" $$ nest 2 (ppr_sigs ids) @@ -1956,13 +1849,5 @@ ppr_tydecls tycons -- Print type constructor info; sort by OccName = vcat (map ppr_tycon (sortBy (comparing getOccName) tycons)) where - ppr_tycon tycon = vcat [ ppr (tyConName tycon) <+> dcolon <+> ppr (tyConKind tycon) - -- Temporarily print the kind signature too - , ppr (tyThingToIfaceDecl (ATyCon tycon)) ] - -ppr_rules :: [CoreRule] -> SDoc -ppr_rules [] = empty -ppr_rules rs = vcat [ptext (sLit "{-# RULES"), - nest 2 (pprRules rs), - ptext (sLit "#-}")] + ppr_tycon tycon = vcat [ ppr (tyThingToIfaceDecl (ATyCon tycon)) ] \end{code} diff --git a/compiler/typecheck/TcRnMonad.lhs b/compiler/typecheck/TcRnMonad.lhs index 01c9d36cf3..17700e77ce 100644 --- a/compiler/typecheck/TcRnMonad.lhs +++ b/compiler/typecheck/TcRnMonad.lhs @@ -5,7 +5,9 @@ Functions for working with the typechecker environment (setters, getters...). \begin{code} +{-# LANGUAGE CPP, ExplicitForAll, FlexibleInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} + module TcRnMonad( module TcRnMonad, module TcRnTypes, @@ -1245,17 +1247,6 @@ initIfaceTcRn thing_inside ; get_type_env = readTcRef (tcg_type_env_var tcg_env) } ; setEnvs (if_env, ()) thing_inside } -initIfaceExtCore :: IfL a -> TcRn a -initIfaceExtCore thing_inside - = do { tcg_env <- getGblEnv - ; let { mod = tcg_mod tcg_env - ; doc = ptext (sLit "External Core file for") <+> quotes (ppr mod) - ; if_env = IfGblEnv { - if_rec_types = Just (mod, return (tcg_type_env tcg_env)) } - ; if_lenv = mkIfLclEnv mod doc - } - ; setEnvs (if_env, if_lenv) thing_inside } - initIfaceCheck :: HscEnv -> IfG a -> IO a -- Used when checking the up-to-date-ness of the old Iface -- Initialise the environment with no useful info at all diff --git a/compiler/typecheck/TcRnTypes.lhs b/compiler/typecheck/TcRnTypes.lhs index 0355dab9c7..bc536c17a8 100644 --- a/compiler/typecheck/TcRnTypes.lhs +++ b/compiler/typecheck/TcRnTypes.lhs @@ -16,6 +16,8 @@ For state that is global and should be returned at the end (e.g not part of the stack mechanism), you should use an TcRef (= IORef) to store them. \begin{code} +{-# LANGUAGE CPP #-} + module TcRnTypes( TcRnIf, TcRn, TcM, RnM, IfM, IfL, IfG, -- The monad is opaque outside this module TcRef, @@ -92,7 +94,7 @@ import Class ( Class ) import TyCon ( TyCon ) import ConLike ( ConLike(..) ) import DataCon ( DataCon, dataConUserType, dataConOrigArgTys ) -import PatSyn ( PatSyn, patSynId ) +import PatSyn ( PatSyn, patSynType ) import TcType import Annotations import InstEnv @@ -294,7 +296,7 @@ data TcGblEnv -- ^ Allows us to choose unique DFun names. -- The next fields accumulate the payload of the module - -- The binds, rules and foreign-decl fiels are collected + -- The binds, rules and foreign-decl fields are collected -- initially in un-zonked form and are finally zonked in tcRnSrcDecls tcg_rn_exports :: Maybe [Located (IE Name)], @@ -1282,6 +1284,8 @@ data Implication ic_fsks :: [TcTyVar], -- Extra flatten-skolems introduced by -- by flattening the givens + -- See Note [Given flatten-skolems] + ic_no_eqs :: Bool, -- True <=> ic_givens have no equalities, for sure -- False <=> ic_givens might have equalities @@ -1741,11 +1745,14 @@ pprSkolInfo ArrowSkol = ptext (sLit "the arrow form") pprSkolInfo (PatSkol cl mc) = case cl of RealDataCon dc -> sep [ ptext (sLit "a pattern with constructor") , nest 2 $ ppr dc <+> dcolon - <+> ppr (dataConUserType dc) <> comma + <+> pprType (dataConUserType dc) <> comma + -- pprType prints forall's regardless of -fprint-explict-foralls + -- which is what we want here, since we might be saying + -- type variable 't' is bound by ... , ptext (sLit "in") <+> pprMatchContext mc ] PatSynCon ps -> sep [ ptext (sLit "a pattern with pattern synonym") , nest 2 $ ppr ps <+> dcolon - <+> ppr (varType (patSynId ps)) <> comma + <+> pprType (patSynType ps) <> comma , ptext (sLit "in") <+> pprMatchContext mc ] pprSkolInfo (InferSkol ids) = sep [ ptext (sLit "the inferred type of") , vcat [ ppr name <+> dcolon <+> ppr ty diff --git a/compiler/typecheck/TcRules.lhs b/compiler/typecheck/TcRules.lhs index c2f3b6b302..47b38f114b 100644 --- a/compiler/typecheck/TcRules.lhs +++ b/compiler/typecheck/TcRules.lhs @@ -6,7 +6,7 @@ TcRules: Typechecking transformation rules \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/typecheck/TcSMonad.lhs b/compiler/typecheck/TcSMonad.lhs index ad3e5cbcb7..60ff5d26c8 100644 --- a/compiler/typecheck/TcSMonad.lhs +++ b/compiler/typecheck/TcSMonad.lhs @@ -1,6 +1,7 @@ \begin{code} +{-# LANGUAGE CPP, TypeFamilies #-} + -- Type definitions for the constraint solver -{-# LANGUAGE TypeFamilies #-} module TcSMonad ( -- Canonical constraints, definition is now in TcRnTypes @@ -461,6 +462,7 @@ data InertSet , inert_fsks :: [TcTyVar] -- Rigid flatten-skolems (arising from givens) -- allocated in this local scope + -- See Note [Given flatten-skolems] , inert_solved_funeqs :: FunEqMap (CtEvidence, TcType) -- See Note [Type family equations] @@ -478,8 +480,29 @@ data InertSet -- - Stored not necessarily as fully rewritten -- (ToDo: rewrite lazily when we lookup) } +\end{code} +Note [Given flatten-skolems] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Suppose we simplify the implication + forall b. C (F a) b => (C (F a) beta, blah) +We'll flatten the givens, introducing a flatten-skolem, so the +givens effectively look like + (C fsk b, F a ~ fsk) +Then we simplify the wanteds, transforming (C (F a) beta) to (C fsk beta). +Now, if we don't solve that wanted, we'll put it back into the residual +implication. But where is fsk bound? + +We solve this by recording the given flatten-skolems in the implication +(the ic_fsks field), so it's as if we change the implication to + forall b, fsk. (C fsk b, F a ~ fsk) => (C fsk beta, blah) + +We don't need to explicitly record the (F a ~ fsk) constraint in the implication +because we can recover it from inside the fsk TyVar itself. But we do need +to treat that (F a ~ fsk) as a new given. See the fsk_bag stuff in +TcInteract.solveInteractGiven. +\begin{code} instance Outputable InertCans where ppr ics = vcat [ ptext (sLit "Equalities:") <+> vcat (map ppr (varEnvElts (inert_eqs ics))) @@ -506,9 +529,9 @@ emptyInert , inert_funeqs = emptyFunEqs , inert_irreds = emptyCts , inert_insols = emptyCts - , inert_no_eqs = True + , inert_no_eqs = True -- See Note [inert_fsks and inert_no_eqs] } - , inert_fsks = [] + , inert_fsks = [] -- See Note [inert_fsks and inert_no_eqs] , inert_flat_cache = emptyFunEqs , inert_solved_funeqs = emptyFunEqs , inert_solved_dicts = emptyDictMap } @@ -521,10 +544,12 @@ addInertCan ics item@(CTyEqCan { cc_ev = ev }) (inert_eqs ics) (cc_tyvar item) [item] , inert_no_eqs = isFlatSkolEv ev && inert_no_eqs ics } + -- See Note [When does an implication have given equalities?] in TcSimplify addInertCan ics item@(CFunEqCan { cc_fun = tc, cc_tyargs = tys, cc_ev = ev }) = ics { inert_funeqs = addFunEq (inert_funeqs ics) tc tys item , inert_no_eqs = isFlatSkolEv ev && inert_no_eqs ics } + -- See Note [When does an implication have given equalities?] in TcSimplify addInertCan ics item@(CIrredEvCan {}) = ics { inert_irreds = inert_irreds ics `Bag.snocBag` item @@ -601,7 +626,7 @@ prepareInertsForImplications is , inert_irreds = Bag.filterBag isGivenCt irreds , inert_dicts = filterDicts isGivenCt dicts , inert_insols = emptyCts - , inert_no_eqs = True -- Ready for each implication + , inert_no_eqs = True -- See Note [inert_fsks and inert_no_eqs] } is_given_eq :: [Ct] -> Bool @@ -1125,8 +1150,8 @@ nestImplicTcS ref inner_untch inerts (TcS thing_inside) , tcs_ty_binds = ty_binds , tcs_count = count , tcs_inerts = new_inert_var - , tcs_worklist = panic "nextImplicTcS: worklist" - , tcs_implics = panic "nextImplicTcS: implics" + , tcs_worklist = panic "nestImplicTcS: worklist" + , tcs_implics = panic "nestImplicTcS: implics" -- NB: Both these are initialised by withWorkList } ; res <- TcM.setUntouchables inner_untch $ @@ -1154,8 +1179,8 @@ nestTcS (TcS thing_inside) do { inerts <- TcM.readTcRef inerts_var ; new_inert_var <- TcM.newTcRef inerts ; let nest_env = env { tcs_inerts = new_inert_var - , tcs_worklist = panic "nextImplicTcS: worklist" - , tcs_implics = panic "nextImplicTcS: implics" } + , tcs_worklist = panic "nestTcS: worklist" + , tcs_implics = panic "nestTcS: implics" } ; thing_inside nest_env } tryTcS :: TcS a -> TcS a @@ -1173,8 +1198,8 @@ tryTcS (TcS thing_inside) ; let nest_env = env { tcs_ev_binds = ev_binds_var , tcs_ty_binds = ty_binds_var , tcs_inerts = is_var - , tcs_worklist = panic "nextImplicTcS: worklist" - , tcs_implics = panic "nextImplicTcS: implics" } + , tcs_worklist = panic "tryTcS: worklist" + , tcs_implics = panic "tryTcS: implics" } ; thing_inside nest_env } -- Getters and setters of TcEnv fields @@ -1257,19 +1282,36 @@ getUntouchables :: TcS Untouchables getUntouchables = wrapTcS TcM.getUntouchables getGivenInfo :: TcS a -> TcS (Bool, [TcTyVar], a) --- Run thing_inside, returning info on --- a) whether we got any new equalities --- b) which new (given) flatten skolems were generated +-- See Note [inert_fsks and inert_no_eqs] getGivenInfo thing_inside - = do { updInertTcS reset_vars - ; res <- thing_inside - ; is <- getTcSInerts + = do { + ; updInertTcS reset_vars -- Set inert_fsks and inert_no_eqs to initial values + ; res <- thing_inside -- Run thing_inside + ; is <- getTcSInerts -- Get new values of inert_fsks and inert_no_eqs ; return (inert_no_eqs (inert_cans is), inert_fsks is, res) } where reset_vars :: InertSet -> InertSet reset_vars is = is { inert_cans = (inert_cans is) { inert_no_eqs = True } , inert_fsks = [] } +\end{code} +Note [inert_fsks and inert_no_eqs] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The function getGivenInfo runs thing_inside to see what new flatten-skolems +and equalities are generated by thing_inside. To that end, + * it initialises inert_fsks, inert_no_eqs + * runs thing_inside + * reads out inert_fsks, inert_no_eqs +This is the only place where it matters what inert_fsks and inert_no_eqs +are initialised to. In other places (eg emptyIntert), we need to set them +to something (because they are strict) but they will never be looked at. + +See Note [When does an implication have given equalities?] in TcSimplify +for more details about inert_no_eqs. + +See Note [Given flatten-skolems] for more details about inert_fsks. + +\begin{code} getTcSTyBinds :: TcS (IORef (Bool, TyVarEnv (TcTyVar, TcType))) getTcSTyBinds = TcS (return . tcs_ty_binds) @@ -1354,7 +1396,7 @@ checkWellStagedDFun pred dfun_id loc bind_lvl = TcM.topIdLvl dfun_id pprEq :: TcType -> TcType -> SDoc -pprEq ty1 ty2 = pprType $ mkEqPred ty1 ty2 +pprEq ty1 ty2 = pprParendType ty1 <+> char '~' <+> pprParendType ty2 isTouchableMetaTyVarTcS :: TcTyVar -> TcS Bool isTouchableMetaTyVarTcS tv @@ -1794,7 +1836,7 @@ rewriteEqEvidence :: CtEvidence -- Old evidence :: olhs ~ orhs (not swap -- It's all a form of rewwriteEvidence, specialised for equalities rewriteEqEvidence old_ev swapped nlhs nrhs lhs_co rhs_co | CtDerived { ctev_loc = loc } <- old_ev - = newDerived loc (mkEqPred nlhs nrhs) + = newDerived loc (mkTcEqPred nlhs nrhs) | NotSwapped <- swapped , isTcReflCo lhs_co -- See Note [Rewriting with Refl] @@ -1821,7 +1863,7 @@ rewriteEqEvidence old_ev swapped nlhs nrhs lhs_co rhs_co | otherwise = panic "rewriteEvidence" where - new_pred = mkEqPred nlhs nrhs + new_pred = mkTcEqPred nlhs nrhs maybeSym :: SwapFlag -> TcCoercion -> TcCoercion maybeSym IsSwapped co = mkTcSymCo co diff --git a/compiler/typecheck/TcSimplify.lhs b/compiler/typecheck/TcSimplify.lhs index 64ef3fed4b..dde5902ccc 100644 --- a/compiler/typecheck/TcSimplify.lhs +++ b/compiler/typecheck/TcSimplify.lhs @@ -1,4 +1,6 @@ \begin{code} +{-# LANGUAGE CPP #-} + module TcSimplify( simplifyInfer, quantifyPred, simplifyAmbiguityCheck, @@ -95,10 +97,9 @@ simpl_top wanteds try_class_defaulting :: WantedConstraints -> TcS WantedConstraints try_class_defaulting wc - | isEmptyWC wc || insolubleWC wc - = return wc -- Don't do type-class defaulting if there are insolubles - -- Doing so is not going to solve the insolubles - | otherwise + | isEmptyWC wc + = return wc + | otherwise -- See Note [When to do type-class defaulting] = do { something_happened <- applyDefaultingRules (approximateWC wc) -- See Note [Top-level Defaulting Plan] ; if something_happened @@ -107,6 +108,33 @@ simpl_top wanteds else return wc } \end{code} +Note [When to do type-class defaulting] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +In GHC 7.6 and 7.8.2, we did type-class defaulting only if insolubleWC +was false, on the grounds that defaulting can't help solve insoluble +constraints. But if we *don't* do defaulting we may report a whole +lot of errors that would be solved by defaulting; these errors are +quite spurious because fixing the single insoluble error means that +defaulting happens again, which makes all the other errors go away. +This is jolly confusing: Trac #9033. + +So it seems better to always do type-class defaulting. + +However, always doing defaulting does mean that we'll do it in +situations like this (Trac #5934): + run :: (forall s. GenST s) -> Int + run = fromInteger 0 +We don't unify the return type of fromInteger with the given function +type, because the latter involves foralls. So we're left with + (Num alpha, alpha ~ (forall s. GenST s) -> Int) +Now we do defaulting, get alpha := Integer, and report that we can't +match Integer with (forall s. GenST s) -> Int. That's not totally +stupid, but perhaps a little strange. + +Another potential alternative would be to suppress *all* non-insoluble +errors if there are *any* insoluble errors, anywhere, but that seems +too drastic. + Note [Must simplify after defaulting] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We may have a deeply buried constraint @@ -815,39 +843,6 @@ Consider floated_eqs (all wanted or derived): simpl_loop. So we iterate if there any of these \begin{code} -floatEqualities :: [TcTyVar] -> Bool -> WantedConstraints - -> TcS (Cts, WantedConstraints) --- Post: The returned floated constraints (Cts) are only Wanted or Derived --- and come from the input wanted ev vars or deriveds --- Also performs some unifications, adding to monadically-carried ty_binds --- These will be used when processing floated_eqs later -floatEqualities skols no_given_eqs wanteds@(WC { wc_flat = flats }) - | not no_given_eqs -- There are some given equalities, so don't float - = return (emptyBag, wanteds) -- Note [Float Equalities out of Implications] - | otherwise - = do { let (float_eqs, remaining_flats) = partitionBag is_floatable flats - ; untch <- TcS.getUntouchables - ; mapM_ (promoteTyVar untch) (varSetElems (tyVarsOfCts float_eqs)) - -- See Note [Promoting unification variables] - ; ty_binds <- getTcSTyBindsMap - ; traceTcS "floatEqualities" (vcat [ text "Flats =" <+> ppr flats - , text "Floated eqs =" <+> ppr float_eqs - , text "Ty binds =" <+> ppr ty_binds]) - ; return (float_eqs, wanteds { wc_flat = remaining_flats }) } - where - -- See Note [Float equalities from under a skolem binding] - skol_set = fixVarSet mk_next (mkVarSet skols) - mk_next tvs = foldrBag grow_one tvs flats - grow_one (CFunEqCan { cc_tyargs = xis, cc_rhs = rhs }) tvs - | intersectsVarSet tvs (tyVarsOfTypes xis) - = tvs `unionVarSet` tyVarsOfType rhs - grow_one _ tvs = tvs - - is_floatable :: Ct -> Bool - is_floatable ct = isEqPred pred && skol_set `disjointVarSet` tyVarsOfType pred - where - pred = ctPred ct - promoteTyVar :: Untouchables -> TcTyVar -> TcS () -- When we float a constraint out of an implication we must restore -- invariant (MetaTvInv) in Note [Untouchable type variables] in TcType @@ -1008,6 +1003,80 @@ should! If we don't solve the constraint, we'll stupidly quantify over (b:*) instead of (a:OpenKind), which can lead to disaster; see Trac #7332. Trac #7641 is a simpler example. +Note [Promoting unification variables] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +When we float an equality out of an implication we must "promote" free +unification variables of the equality, in order to maintain Invariant +(MetaTvInv) from Note [Untouchable type variables] in TcType. for the +leftover implication. + +This is absolutely necessary. Consider the following example. We start +with two implications and a class with a functional dependency. + + class C x y | x -> y + instance C [a] [a] + + (I1) [untch=beta]forall b. 0 => F Int ~ [beta] + (I2) [untch=beta]forall c. 0 => F Int ~ [[alpha]] /\ C beta [c] + +We float (F Int ~ [beta]) out of I1, and we float (F Int ~ [[alpha]]) out of I2. +They may react to yield that (beta := [alpha]) which can then be pushed inwards +the leftover of I2 to get (C [alpha] [a]) which, using the FunDep, will mean that +(alpha := a). In the end we will have the skolem 'b' escaping in the untouchable +beta! Concrete example is in indexed_types/should_fail/ExtraTcsUntch.hs: + + class C x y | x -> y where + op :: x -> y -> () + + instance C [a] [a] + + type family F a :: * + + h :: F Int -> () + h = undefined + + data TEx where + TEx :: a -> TEx + + + f (x::beta) = + let g1 :: forall b. b -> () + g1 _ = h [x] + g2 z = case z of TEx y -> (h [[undefined]], op x [y]) + in (g1 '3', g2 undefined) + + + +Note [Solving Family Equations] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +After we are done with simplification we may be left with constraints of the form: + [Wanted] F xis ~ beta +If 'beta' is a touchable unification variable not already bound in the TyBinds +then we'd like to create a binding for it, effectively "defaulting" it to be 'F xis'. + +When is it ok to do so? + 1) 'beta' must not already be defaulted to something. Example: + + [Wanted] F Int ~ beta <~ Will default [beta := F Int] + [Wanted] F Char ~ beta <~ Already defaulted, can't default again. We + have to report this as unsolved. + + 2) However, we must still do an occurs check when defaulting (F xis ~ beta), to + set [beta := F xis] only if beta is not among the free variables of xis. + + 3) Notice that 'beta' can't be bound in ty binds already because we rewrite RHS + of type family equations. See Inert Set invariants in TcInteract. + +This solving is now happening during zonking, see Note [Unflattening while zonking] +in TcMType. + + +********************************************************************************* +* * +* Floating equalities * +* * +********************************************************************************* + Note [Float Equalities out of Implications] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ For ordinary pattern matches (including existentials) we float @@ -1053,8 +1122,59 @@ Consequence: classes with functional dependencies don't matter (since there is no evidence for a fundep equality), but equality superclasses do matter (since they carry evidence). +\begin{code} +floatEqualities :: [TcTyVar] -> Bool -> WantedConstraints + -> TcS (Cts, WantedConstraints) +-- Main idea: see Note [Float Equalities out of Implications] +-- +-- Post: The returned floated constraints (Cts) are only Wanted or Derived +-- and come from the input wanted ev vars or deriveds +-- Also performs some unifications (via promoteTyVar), adding to +-- monadically-carried ty_binds. These will be used when processing +-- floated_eqs later +-- +-- Subtleties: Note [Float equalities from under a skolem binding] +-- Note [Skolem escape] +floatEqualities skols no_given_eqs wanteds@(WC { wc_flat = flats }) + | not no_given_eqs -- There are some given equalities, so don't float + = return (emptyBag, wanteds) -- Note [Float Equalities out of Implications] + | otherwise + = do { let (float_eqs, remaining_flats) = partitionBag is_floatable flats + ; untch <- TcS.getUntouchables + ; mapM_ (promoteTyVar untch) (varSetElems (tyVarsOfCts float_eqs)) + -- See Note [Promoting unification variables] + ; ty_binds <- getTcSTyBindsMap + ; traceTcS "floatEqualities" (vcat [ text "Skols =" <+> ppr skols + , text "Flats =" <+> ppr flats + , text "Skol set =" <+> ppr skol_set + , text "Floated eqs =" <+> ppr float_eqs + , text "Ty binds =" <+> ppr ty_binds]) + ; return (float_eqs, wanteds { wc_flat = remaining_flats }) } + where + is_floatable :: Ct -> Bool + is_floatable ct + = case classifyPredType (ctPred ct) of + EqPred ty1 ty2 -> skol_set `disjointVarSet` tyVarsOfType ty1 + && skol_set `disjointVarSet` tyVarsOfType ty2 + _ -> False + + skol_set = fixVarSet mk_next (mkVarSet skols) + mk_next tvs = foldr grow_one tvs flat_eqs + flat_eqs :: [(TcTyVarSet, TcTyVarSet)] + flat_eqs = [ (tyVarsOfType ty1, tyVarsOfType ty2) + | EqPred ty1 ty2 <- map (classifyPredType . ctPred) (bagToList flats)] + grow_one (tvs1,tvs2) tvs + | intersectsVarSet tvs tvs1 = tvs `unionVarSet` tvs2 + | intersectsVarSet tvs tvs2 = tvs `unionVarSet` tvs2 + | otherwise = tvs +\end{code} + Note [When does an implication have given equalities?] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + NB: This note is mainly referred to from TcSMonad + but it relates to floating equalities, so I've + left it here + Consider an implication beta => alpha ~ Int where beta is a unification variable that has already been unified @@ -1096,118 +1216,97 @@ An alternative we considered was to equalities mentions any of the ic_givens of this implication. This seems like the Right Thing, but it's more code, and more work at runtime, so we are using the FlatSkolOrigin idea intead. It's less -obvious that it works, but I htink it does, and it's simple and efficient. - +obvious that it works, but I think it does, and it's simple and efficient. Note [Float equalities from under a skolem binding] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You might worry about skolem escape with all this floating. -For example, consider - [2] forall a. (a ~ F beta[2] delta, - Maybe beta[2] ~ gamma[1]) - -The (Maybe beta ~ gamma) doesn't mention 'a', so we float it, and -solve with gamma := beta. But what if later delta:=Int, and - F b Int = b. -Then we'd get a ~ beta[2], and solve to get beta:=a, and now the -skolem has escaped! - -But it's ok: when we float (Maybe beta[2] ~ gamma[1]), we promote beta[2] -to beta[1], and that means the (a ~ beta[1]) will be stuck, as it should be. - -Previously we tried to "grow" the skol_set with the constraints, to get -all the tyvars that could *conceivably* unify with the skolems, but that -was far too conservative (Trac #7804). Example: this should be fine: - f :: (forall a. a -> Proxy x -> Proxy (F x)) -> Int - f = error "Urk" :: (forall a. a -> Proxy x -> Proxy (F x)) -> Int - -BUT (sigh) we have to be careful. Here are some edge cases: +Which of the flat equalities can we float out? Obviously, only +ones that don't mention the skolem-bound variables. But that is +over-eager. Consider + [2] forall a. F a beta[1] ~ gamma[2], G beta[1] gamma[2] ~ Int +The second constraint doesn't mention 'a'. But if we float it +we'll promote gamma to gamma'[1]. Now suppose that we learn that +beta := Bool, and F a Bool = a, and G Bool _ = Int. Then we'll +we left with the constraint + [2] forall a. a ~ gamma'[1] +which is insoluble because gamma became untouchable. + +Solution: only promote a constraint if its free variables cannot +possibly be connected with the skolems. Procedurally, start with +the skolems and "grow" that set as follows: + * For each flat equality F ts ~ s, or tv ~ s, + if the current set intersects with the LHS of the equality, + add the free vars of the RHS, and vice versa +That gives us a grown skolem set. Now float an equality if its free +vars don't intersect the grown skolem set. + +This seems very ad hoc (sigh). But here are some tricky edge cases: a) [2]forall a. (F a delta[1] ~ beta[2], delta[1] ~ Maybe beta[2]) -b) [2]forall a. (F b ty ~ beta[2], G beta[2] ~ gamma[2]) +b1) [2]forall a. (F a ty ~ beta[2], G beta[2] ~ gamma[2]) +b2) [2]forall a. (a ~ beta[2], G beta[2] ~ gamma[2]) c) [2]forall a. (F a ty ~ beta[2], delta[1] ~ Maybe beta[2]) +d) [2]forall a. (gamma[1] ~ Tree beta[2], F ty ~ beta[2]) In (a) we *must* float out the second equality, else we can't solve at all (Trac #7804). -In (b) we *must not* float out the second equality. - It will ultimately be solved (by flattening) in situ, but if we - float it we'll promote beta,gamma, and render the first equality insoluble. +In (b1, b2) we *must not* float out the second equality. + It will ultimately be solved (by flattening) in situ, but if we float + it we'll promote beta,gamma, and render the first equality insoluble. + + Trac #9316 was an example of (b2). You may wonder why (a ~ beta[2]) isn't + solved; in #9316 it wasn't solved because (a:*) and (beta:kappa[1]), so the + equality was kind-mismatched, and hence was a CIrredEvCan. There was + another equality alongside, (kappa[1] ~ *). We must first float *that* + one out and *then* we can solve (a ~ beta). In (c) it would be OK to float the second equality but better not to. If we flatten we see (delta[1] ~ Maybe (F a ty)), which is a - skolem-escape problem. If we float the secodn equality we'll + skolem-escape problem. If we float the second equality we'll end up with (F a ty ~ beta'[1]), which is a less explicable error. -Hence we start with the skolems, grow them by the CFunEqCans, and -float ones that don't mention the grown variables. Seems very ad hoc. - -Note [Promoting unification variables] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -When we float an equality out of an implication we must "promote" free -unification variables of the equality, in order to maintain Invariant -(MetaTvInv) from Note [Untouchable type variables] in TcType. for the -leftover implication. - -This is absolutely necessary. Consider the following example. We start -with two implications and a class with a functional dependency. - - class C x y | x -> y - instance C [a] [a] - - (I1) [untch=beta]forall b. 0 => F Int ~ [beta] - (I2) [untch=beta]forall c. 0 => F Int ~ [[alpha]] /\ C beta [c] - -We float (F Int ~ [beta]) out of I1, and we float (F Int ~ [[alpha]]) out of I2. -They may react to yield that (beta := [alpha]) which can then be pushed inwards -the leftover of I2 to get (C [alpha] [a]) which, using the FunDep, will mean that -(alpha := a). In the end we will have the skolem 'b' escaping in the untouchable -beta! Concrete example is in indexed_types/should_fail/ExtraTcsUntch.hs: - - class C x y | x -> y where - op :: x -> y -> () - - instance C [a] [a] +In (d) we must float the first equality, so that we can unify gamma. + But that promotes beta, so we must float the second equality too, + Trac #7196 exhibits this case - type family F a :: * +Some notes - h :: F Int -> () - h = undefined +* When "growing", do not simply take the free vars of the predicate! + Example [2]forall a. (a:* ~ beta[2]:kappa[1]), (kappa[1] ~ *) + We must float the second, and we must not float the first. + But the first actually looks like ((~) kappa a beta), so if we just + look at its free variables we'll see {a,kappa,beta), and that might + make us think kappa should be in the grown skol set. - data TEx where - TEx :: a -> TEx + (In any case, the kind argument for a kind-mis-matched equality like + this one doesn't really make sense anyway.) + That's why we use classifyPred when growing. - f (x::beta) = - let g1 :: forall b. b -> () - g1 _ = h [x] - g2 z = case z of TEx y -> (h [[undefined]], op x [y]) - in (g1 '3', g2 undefined) - - - -Note [Solving Family Equations] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -After we are done with simplification we may be left with constraints of the form: - [Wanted] F xis ~ beta -If 'beta' is a touchable unification variable not already bound in the TyBinds -then we'd like to create a binding for it, effectively "defaulting" it to be 'F xis'. +* Previously we tried to "grow" the skol_set with *all* the + constraints (not just equalities), to get all the tyvars that could + *conceivably* unify with the skolems, but that was far too + conservative (Trac #7804). Example: this should be fine: + f :: (forall a. a -> Proxy x -> Proxy (F x)) -> Int + f = error "Urk" :: (forall a. a -> Proxy x -> Proxy (F x)) -> Int -When is it ok to do so? - 1) 'beta' must not already be defaulted to something. Example: - [Wanted] F Int ~ beta <~ Will default [beta := F Int] - [Wanted] F Char ~ beta <~ Already defaulted, can't default again. We - have to report this as unsolved. - - 2) However, we must still do an occurs check when defaulting (F xis ~ beta), to - set [beta := F xis] only if beta is not among the free variables of xis. +Note [Skolem escape] +~~~~~~~~~~~~~~~~~~~~ +You might worry about skolem escape with all this floating. +For example, consider + [2] forall a. (a ~ F beta[2] delta, + Maybe beta[2] ~ gamma[1]) - 3) Notice that 'beta' can't be bound in ty binds already because we rewrite RHS - of type family equations. See Inert Set invariants in TcInteract. +The (Maybe beta ~ gamma) doesn't mention 'a', so we float it, and +solve with gamma := beta. But what if later delta:=Int, and + F b Int = b. +Then we'd get a ~ beta[2], and solve to get beta:=a, and now the +skolem has escaped! -This solving is now happening during zonking, see Note [Unflattening while zonking] -in TcMType. +But it's ok: when we float (Maybe beta[2] ~ gamma[1]), we promote beta[2] +to beta[1], and that means the (a ~ beta[1]) will be stuck, as it should be. ********************************************************************************* diff --git a/compiler/typecheck/TcSplice.lhs b/compiler/typecheck/TcSplice.lhs index 7fce241edb..de3fbdbe89 100644 --- a/compiler/typecheck/TcSplice.lhs +++ b/compiler/typecheck/TcSplice.lhs @@ -7,8 +7,9 @@ TcSplice: Template Haskell splices \begin{code} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE CPP, FlexibleInstances, MagicHash, ScopedTypeVariables #-} {-# OPTIONS_GHC -fno-warn-orphans #-} + module TcSplice( -- These functions are defined in stage1 and stage2 -- The raise civilised errors in stage1 @@ -70,7 +71,7 @@ import Class import Inst import TyCon import CoAxiom -import PatSyn ( patSynId ) +import PatSyn ( patSynName ) import ConLike import DataCon import TcEvidence( TcEvBinds(..) ) @@ -1183,7 +1184,7 @@ reifyThing (AGlobal (AConLike (RealDataCon dc))) (reifyName (dataConOrigTyCon dc)) fix) } reifyThing (AGlobal (AConLike (PatSynCon ps))) - = noTH (sLit "pattern synonyms") (ppr $ patSynId ps) + = noTH (sLit "pattern synonyms") (ppr $ patSynName ps) reifyThing (ATcId {tct_id = id}) = do { ty1 <- zonkTcType (idType id) -- Make use of all the info we have, even @@ -1507,13 +1508,14 @@ lookupThAnnLookup (TH.AnnLookupModule (TH.Module pn mn)) mkModule (stringToPackageId $ TH.pkgString pn) (mkModuleName $ TH.modString mn) reifyAnnotations :: Data a => TH.AnnLookup -> TcM [a] -reifyAnnotations th_nm - = do { name <- lookupThAnnLookup th_nm - ; eps <- getEps +reifyAnnotations th_name + = do { name <- lookupThAnnLookup th_name + ; topEnv <- getTopEnv + ; epsHptAnns <- liftIO $ prepareAnnotations topEnv Nothing ; tcg <- getGblEnv - ; let epsAnns = findAnns deserializeWithData (eps_ann_env eps) name - ; let envAnns = findAnns deserializeWithData (tcg_ann_env tcg) name - ; return (envAnns ++ epsAnns) } + ; let selectedEpsHptAnns = findAnns deserializeWithData epsHptAnns name + ; let selectedTcgAnns = findAnns deserializeWithData (tcg_ann_env tcg) name + ; return (selectedEpsHptAnns ++ selectedTcgAnns) } ------------------------------ modToTHMod :: Module -> TH.Module diff --git a/compiler/typecheck/TcSplice.lhs-boot b/compiler/typecheck/TcSplice.lhs-boot index c496aed798..ea3848db18 100644 --- a/compiler/typecheck/TcSplice.lhs-boot +++ b/compiler/typecheck/TcSplice.lhs-boot @@ -1,4 +1,6 @@ \begin{code} +{-# LANGUAGE CPP #-} + module TcSplice where import HsSyn ( HsSplice, HsBracket, HsQuasiQuote, HsExpr, LHsType, LHsExpr, LPat, LHsDecl ) diff --git a/compiler/typecheck/TcTyClsDecls.lhs b/compiler/typecheck/TcTyClsDecls.lhs index f11295a7d0..f09bef8081 100644 --- a/compiler/typecheck/TcTyClsDecls.lhs +++ b/compiler/typecheck/TcTyClsDecls.lhs @@ -6,7 +6,7 @@ TcTyClsDecls: Typecheck type and class declarations \begin{code} -{-# LANGUAGE TupleSections #-} +{-# LANGUAGE CPP, TupleSections #-} module TcTyClsDecls ( tcTyAndClassDecls, tcAddImplicits, @@ -14,7 +14,7 @@ module TcTyClsDecls ( -- Functions used by TcInstDcls to check -- data/type family instance declarations kcDataDefn, tcConDecls, dataDeclChecks, checkValidTyCon, - tcSynFamInstDecl, tcFamTyPats, + tcFamTyPats, tcTyFamInstEqn, famTyConShape, tcAddTyFamInstCtxt, tcAddDataFamInstCtxt, wrongKindOfFamily, dataConCtxt, badDataConTyCon ) where @@ -502,10 +502,12 @@ kcTyClDecl (ForeignType {}) = return () -- closed type families look at their equations, but other families don't -- do anything here -kcTyClDecl (FamDecl (FamilyDecl { fdLName = L _ fam_tc_name - , fdInfo = ClosedTypeFamily eqns })) - = do { k <- kcLookupKind fam_tc_name - ; mapM_ (kcTyFamInstEqn fam_tc_name k) eqns } +kcTyClDecl (FamDecl (FamilyDecl { fdLName = L _ fam_tc_name + , fdTyVars = hs_tvs + , fdInfo = ClosedTypeFamily eqns })) + = do { tc_kind <- kcLookupKind fam_tc_name + ; let fam_tc_shape = ( fam_tc_name, length (hsQTvBndrs hs_tvs), tc_kind) + ; mapM_ (kcTyFamInstEqn fam_tc_shape) eqns } kcTyClDecl (FamDecl {}) = return () ------------------- @@ -638,13 +640,13 @@ tcTyClDecl1 _parent rec_info ; (sig_stuff, gen_dm_env) <- tcClassSigs class_name sigs meths ; at_stuff <- tcClassATs class_name (AssocFamilyTyCon clas) ats at_defs ; mindef <- tcClassMinimalDef class_name sigs sig_stuff - ; clas <- buildClass False {- Must include unfoldings for selectors -} + ; clas <- buildClass class_name tvs' roles ctxt' fds' at_stuff sig_stuff mindef tc_isrec ; traceTc "tcClassDecl" (ppr fundeps $$ ppr tvs' $$ ppr fds') ; return (clas, tvs', gen_dm_env) } - ; let { gen_dm_ids = [ AnId (mkExportedLocalId gen_dm_name gen_dm_ty) + ; let { gen_dm_ids = [ AnId (mkExportedLocalId VanillaId gen_dm_name gen_dm_ty) | (sel_id, GenDefMeth gen_dm_name) <- classOpItems clas , let gen_dm_tau = expectJust "tcTyClDecl1" $ lookupNameEnv gen_dm_env (idName sel_id) @@ -699,14 +701,11 @@ tcFamDecl1 parent ; checkFamFlag tc_name -- make sure we have -XTypeFamilies - -- check to make sure all the names used in the equations are - -- consistent - ; let names = map (tfie_tycon . unLoc) eqns - ; tcSynFamInstNames lname names - - -- process the equations, creating CoAxBranches - ; tycon_kind <- kcLookupKind tc_name - ; branches <- mapM (tcTyFamInstEqn tc_name tycon_kind) eqns + -- Process the equations, creating CoAxBranches + ; tc_kind <- kcLookupKind tc_name + ; let fam_tc_shape = (tc_name, length (hsQTvBndrs tvs), tc_kind) + + ; branches <- mapM (tcTyFamInstEqn fam_tc_shape) eqns -- we need the tycon that we will be creating, but it's in scope. -- just look it up. @@ -793,7 +792,7 @@ tcDataDefn rec_info tc_name tvs kind ; checkKind kind tc_kind ; return () } - ; h98_syntax <- dataDeclChecks tc_name new_or_data stupid_theta cons + ; gadt_syntax <- dataDeclChecks tc_name new_or_data stupid_theta cons ; tycon <- fixM $ \ tycon -> do { let res_ty = mkTyConApp tycon (mkTyVarTys final_tvs) @@ -808,7 +807,7 @@ tcDataDefn rec_info tc_name tvs kind ; return (buildAlgTyCon tc_name final_tvs roles cType stupid_theta tc_rhs (rti_is_rec rec_info tc_name) (rti_promotable rec_info) - (not h98_syntax) NoParentTyCon) } + gadt_syntax NoParentTyCon) } ; return [ATyCon tycon] } \end{code} @@ -836,76 +835,90 @@ Note that: - We can get default definitions only for type families, not data families \begin{code} -tcClassATs :: Name -- The class name (not knot-tied) - -> TyConParent -- The class parent of this associated type - -> [LFamilyDecl Name] -- Associated types. - -> [LTyFamInstDecl Name] -- Associated type defaults. +tcClassATs :: Name -- The class name (not knot-tied) + -> TyConParent -- The class parent of this associated type + -> [LFamilyDecl Name] -- Associated types. + -> [LTyFamDefltEqn Name] -- Associated type defaults. -> TcM [ClassATItem] tcClassATs class_name parent ats at_defs = do { -- Complain about associated type defaults for non associated-types sequence_ [ failWithTc (badATErr class_name n) - | n <- map (tyFamInstDeclName . unLoc) at_defs + | n <- map at_def_tycon at_defs , not (n `elemNameSet` at_names) ] ; mapM tc_at ats } where - at_names = mkNameSet (map (unLoc . fdLName . unLoc) ats) + at_def_tycon :: LTyFamDefltEqn Name -> Name + at_def_tycon (L _ eqn) = unLoc (tfe_tycon eqn) + + at_fam_name :: LFamilyDecl Name -> Name + at_fam_name (L _ decl) = unLoc (fdLName decl) + + at_names = mkNameSet (map at_fam_name ats) - at_defs_map :: NameEnv [LTyFamInstDecl Name] + at_defs_map :: NameEnv [LTyFamDefltEqn Name] -- Maps an AT in 'ats' to a list of all its default defs in 'at_defs' at_defs_map = foldr (\at_def nenv -> extendNameEnv_C (++) nenv - (tyFamInstDeclName (unLoc at_def)) [at_def]) + (at_def_tycon at_def) [at_def]) emptyNameEnv at_defs tc_at at = do { [ATyCon fam_tc] <- addLocM (tcFamDecl1 parent) at - ; let at_defs = lookupNameEnv at_defs_map (unLoc $ fdLName $ unLoc at) - `orElse` [] - ; atd <- mapM (tcDefaultAssocDecl fam_tc) at_defs - ; return (fam_tc, atd) } + ; let at_defs = lookupNameEnv at_defs_map (at_fam_name at) + `orElse` [] + ; atd <- tcDefaultAssocDecl fam_tc at_defs + ; return (ATI fam_tc atd) } ------------------------- -tcDefaultAssocDecl :: TyCon -- ^ Family TyCon - -> LTyFamInstDecl Name -- ^ RHS - -> TcM CoAxBranch -- ^ Type checked RHS and free TyVars -tcDefaultAssocDecl fam_tc (L loc decl) +tcDefaultAssocDecl :: TyCon -- ^ Family TyCon + -> [LTyFamDefltEqn Name] -- ^ Defaults + -> TcM (Maybe Type) -- ^ Type checked RHS +tcDefaultAssocDecl _ [] + = return Nothing -- No default declaration + +tcDefaultAssocDecl _ (d1:_:_) + = failWithTc (ptext (sLit "More than one default declaration for") + <+> ppr (tfe_tycon (unLoc d1))) + +tcDefaultAssocDecl fam_tc [L loc (TyFamEqn { tfe_tycon = L _ tc_name + , tfe_pats = hs_tvs + , tfe_rhs = rhs })] = setSrcSpan loc $ - tcAddTyFamInstCtxt decl $ - do { traceTc "tcDefaultAssocDecl" (ppr decl) - ; tcSynFamInstDecl fam_tc decl } + tcAddFamInstCtxt (ptext (sLit "default type instance")) tc_name $ + tcTyClTyVars tc_name hs_tvs $ \ tvs rhs_kind -> + do { traceTc "tcDefaultAssocDecl" (ppr tc_name) + ; checkTc (isSynFamilyTyCon fam_tc) (wrongKindOfFamily fam_tc) + ; let (fam_name, fam_pat_arity, _) = famTyConShape fam_tc + ; ASSERT( fam_name == tc_name ) + checkTc (length (hsQTvBndrs hs_tvs) == fam_pat_arity) + (wrongNumberOfParmsErr fam_pat_arity) + ; rhs_ty <- tcCheckLHsType rhs rhs_kind + ; rhs_ty <- zonkTcTypeToType emptyZonkEnv rhs_ty + ; let fam_tc_tvs = tyConTyVars fam_tc + subst = zipTopTvSubst tvs (mkTyVarTys fam_tc_tvs) + ; return ( ASSERT( equalLength fam_tc_tvs tvs ) + Just (substTy subst rhs_ty) ) } -- We check for well-formedness and validity later, in checkValidClass ------------------------- -tcSynFamInstDecl :: TyCon -> TyFamInstDecl Name -> TcM CoAxBranch --- Placed here because type family instances appear as --- default decls in class declarations -tcSynFamInstDecl fam_tc (TyFamInstDecl { tfid_eqn = eqn }) - = do { checkTc (isSynTyCon fam_tc) (wrongKindOfFamily fam_tc) - ; tcTyFamInstEqn (tyConName fam_tc) (tyConKind fam_tc) eqn } - --- Checks to make sure that all the names in an instance group are the same -tcSynFamInstNames :: Located Name -> [Located Name] -> TcM () -tcSynFamInstNames (L _ first) names - = do { let badNames = filter ((/= first) . unLoc) names - ; mapM_ (failLocated (wrongNamesInInstGroup first)) badNames } - where - failLocated :: (Name -> SDoc) -> Located Name -> TcM () - failLocated msg_fun (L loc name) - = setSrcSpan loc $ - failWithTc (msg_fun name) - -kcTyFamInstEqn :: Name -> Kind -> LTyFamInstEqn Name -> TcM () -kcTyFamInstEqn fam_tc_name kind - (L loc (TyFamInstEqn { tfie_pats = pats, tfie_rhs = hs_ty })) +kcTyFamInstEqn :: FamTyConShape -> LTyFamInstEqn Name -> TcM () +kcTyFamInstEqn fam_tc_shape + (L loc (TyFamEqn { tfe_pats = pats, tfe_rhs = hs_ty })) = setSrcSpan loc $ discardResult $ - tc_fam_ty_pats fam_tc_name kind pats (discardResult . (tcCheckLHsType hs_ty)) - -tcTyFamInstEqn :: Name -> Kind -> LTyFamInstEqn Name -> TcM CoAxBranch -tcTyFamInstEqn fam_tc_name kind - (L loc (TyFamInstEqn { tfie_pats = pats, tfie_rhs = hs_ty })) + tc_fam_ty_pats fam_tc_shape pats (discardResult . (tcCheckLHsType hs_ty)) + +tcTyFamInstEqn :: FamTyConShape -> LTyFamInstEqn Name -> TcM CoAxBranch +-- Needs to be here, not in TcInstDcls, because closed families +-- (typechecked here) have TyFamInstEqns +tcTyFamInstEqn fam_tc_shape@(fam_tc_name,_,_) + (L loc (TyFamEqn { tfe_tycon = L _ eqn_tc_name + , tfe_pats = pats + , tfe_rhs = hs_ty })) = setSrcSpan loc $ - tcFamTyPats fam_tc_name kind pats (discardResult . (tcCheckLHsType hs_ty)) $ + tcFamTyPats fam_tc_shape pats (discardResult . (tcCheckLHsType hs_ty)) $ \tvs' pats' res_kind -> - do { rhs_ty <- tcCheckLHsType hs_ty res_kind + do { checkTc (fam_tc_name == eqn_tc_name) + (wrongTyFamName fam_tc_name eqn_tc_name) + ; rhs_ty <- tcCheckLHsType hs_ty res_kind ; rhs_ty <- zonkTcTypeToType emptyZonkEnv rhs_ty ; traceTc "tcTyFamInstEqn" (ppr fam_tc_name <+> ppr tvs') -- don't print out the pats here, as they might be zonked inside the knot @@ -947,6 +960,19 @@ type families. tcFamTyPats type checks the patterns, zonks, and then calls thing_inside to generate a desugaring. It is used during type-checking (not kind-checking). +Note [Type-checking type patterns] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +When typechecking the patterns of a family instance declaration, we can't +rely on using the family TyCon, because this is sometimes called +from within a type-checking knot. (Specifically for closed type families.) +The type FamTyConShape gives just enough information to do the job. + +The "arity" field of FamTyConShape is the *visible* arity of the family +type constructor, i.e. what the users sees and writes, not including kind +arguments. + +See also Note [tc_fam_ty_pats vs tcFamTyPats] + Note [Failing early in kcDataDefn] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We need to use checkNoErrs when calling kcConDecl. This is because kcConDecl @@ -961,15 +987,18 @@ two bad things could happen: \begin{code} ----------------- --- Note that we can't use the family TyCon, because this is sometimes called --- from within a type-checking knot. So, we ask our callers to do a little more --- work. --- See Note [tc_fam_ty_pats vs tcFamTyPats] -tc_fam_ty_pats :: Name -- of the family TyCon - -> Kind -- of the family TyCon +type FamTyConShape = (Name, Arity, Kind) -- See Note [Type-checking type patterns] + +famTyConShape :: TyCon -> FamTyConShape +famTyConShape fam_tc + = ( tyConName fam_tc + , length (filterOut isKindVar (tyConTyVars fam_tc)) + , tyConKind fam_tc ) + +tc_fam_ty_pats :: FamTyConShape -> HsWithBndrs [LHsType Name] -- Patterns - -> (TcKind -> TcM ()) -- Kind checker for RHS - -- result is ignored + -> (TcKind -> TcM ()) -- Kind checker for RHS + -- result is ignored -> TcM ([Kind], [Type], Kind) -- Check the type patterns of a type or data family instance -- type instance F <pat1> <pat2> = <type> @@ -982,7 +1011,7 @@ tc_fam_ty_pats :: Name -- of the family TyCon -- In that case, the type variable 'a' will *already be in scope* -- (and, if C is poly-kinded, so will its kind parameter). -tc_fam_ty_pats fam_tc_name kind +tc_fam_ty_pats (name, arity, kind) (HsWB { hswb_cts = arg_pats, hswb_kvs = kvars, hswb_tvs = tvars }) kind_checker = do { let (fam_kvs, fam_body) = splitForAllTys kind @@ -994,9 +1023,8 @@ tc_fam_ty_pats fam_tc_name kind -- Note that we don't have enough information at hand to do a full check, -- as that requires the full declared arity of the family, which isn't -- nearby. - ; let max_args = length (fst $ splitKindFunTys fam_body) - ; checkTc (length arg_pats <= max_args) $ - wrongNumberOfParmsErrTooMany max_args + ; checkTc (length arg_pats == arity) $ + wrongNumberOfParmsErr arity -- Instantiate with meta kind vars ; fam_arg_kinds <- mapM (const newMetaKindVar) fam_kvs @@ -1011,22 +1039,21 @@ tc_fam_ty_pats fam_tc_name kind -- See Note [Quantifying over family patterns] ; typats <- tcHsTyVarBndrs hs_tvs $ \ _ -> do { kind_checker res_kind - ; tcHsArgTys (quotes (ppr fam_tc_name)) arg_pats arg_kinds } + ; tcHsArgTys (quotes (ppr name)) arg_pats arg_kinds } ; return (fam_arg_kinds, typats, res_kind) } -- See Note [tc_fam_ty_pats vs tcFamTyPats] -tcFamTyPats :: Name -- of the family ToCon - -> Kind -- of the family TyCon +tcFamTyPats :: FamTyConShape -> HsWithBndrs [LHsType Name] -- patterns -> (TcKind -> TcM ()) -- kind-checker for RHS -> ([TKVar] -- Kind and type variables -> [TcType] -- Kind and type arguments -> Kind -> TcM a) -> TcM a -tcFamTyPats fam_tc_name kind pats kind_checker thing_inside +tcFamTyPats fam_shape@(name,_,_) pats kind_checker thing_inside = do { (fam_arg_kinds, typats, res_kind) - <- tc_fam_ty_pats fam_tc_name kind pats kind_checker + <- tc_fam_ty_pats fam_shape pats kind_checker ; let all_args = fam_arg_kinds ++ typats -- Find free variables (after zonking) and turn @@ -1040,7 +1067,7 @@ tcFamTyPats fam_tc_name kind pats kind_checker thing_inside ; all_args' <- zonkTcTypeToTypes ze all_args ; res_kind' <- zonkTcTypeToType ze res_kind - ; traceTc "tcFamTyPats" (ppr fam_tc_name) + ; traceTc "tcFamTyPats" (ppr name) -- don't print out too much, as we might be in the knot ; tcExtendTyVarEnv qtkvs' $ thing_inside qtkvs' all_args' res_kind' } @@ -1101,11 +1128,11 @@ dataDeclChecks :: Name -> NewOrData -> ThetaType -> [LConDecl Name] -> TcM Bool dataDeclChecks tc_name new_or_data stupid_theta cons = do { -- Check that we don't use GADT syntax in H98 world gadtSyntax_ok <- xoptM Opt_GADTSyntax - ; let h98_syntax = consUseH98Syntax cons - ; checkTc (gadtSyntax_ok || h98_syntax) (badGadtDecl tc_name) + ; let gadt_syntax = consUseGadtSyntax cons + ; checkTc (gadtSyntax_ok || not gadt_syntax) (badGadtDecl tc_name) -- Check that the stupid theta is empty for a GADT-style declaration - ; checkTc (null stupid_theta || h98_syntax) (badStupidTheta tc_name) + ; checkTc (null stupid_theta || not gadt_syntax) (badStupidTheta tc_name) -- Check that a newtype has exactly one constructor -- Do this before checking for empty data decls, so that @@ -1119,13 +1146,13 @@ dataDeclChecks tc_name new_or_data stupid_theta cons ; is_boot <- tcIsHsBoot -- Are we compiling an hs-boot file? ; checkTc (not (null cons) || empty_data_decls || is_boot) (emptyConDeclsErr tc_name) - ; return h98_syntax } + ; return gadt_syntax } ----------------------------------- -consUseH98Syntax :: [LConDecl a] -> Bool -consUseH98Syntax (L _ (ConDecl { con_res = ResTyGADT _ }) : _) = False -consUseH98Syntax _ = True +consUseGadtSyntax :: [LConDecl a] -> Bool +consUseGadtSyntax (L _ (ConDecl { con_res = ResTyGADT _ }) : _) = True +consUseGadtSyntax _ = False -- All constructors have same shape ----------------------------------- @@ -1466,8 +1493,8 @@ checkValidClosedCoAxiom (CoAxiom { co_ax_branches = branches, co_ax_tc = tc }) -- ones and hence is inaccessible check_accessibility prev_branches cur_branch = do { when (cur_branch `isDominatedBy` prev_branches) $ - setSrcSpan (coAxBranchSpan cur_branch) $ - addErrTc $ inaccessibleCoAxBranch tc cur_branch + addWarnAt (coAxBranchSpan cur_branch) $ + inaccessibleCoAxBranch tc cur_branch ; return (cur_branch : prev_branches) } checkFieldCompat :: Name -> DataCon -> DataCon -> TyVarSet @@ -1484,16 +1511,19 @@ checkValidDataCon :: DynFlags -> Bool -> TyCon -> DataCon -> TcM () checkValidDataCon dflags existential_ok tc con = setSrcSpan (srcLocSpan (getSrcLoc con)) $ addErrCtxt (dataConCtxt con) $ - do { traceTc "checkValidDataCon" (ppr con $$ ppr tc) - - -- Check that the return type of the data constructor + do { -- Check that the return type of the data constructor -- matches the type constructor; eg reject this: -- data T a where { MkT :: Bogus a } -- c.f. Note [Check role annotations in a second pass] -- and Note [Checking GADT return types] - ; let tc_tvs = tyConTyVars tc + let tc_tvs = tyConTyVars tc res_ty_tmpl = mkFamilyTyConApp tc (mkTyVarTys tc_tvs) orig_res_ty = dataConOrigResTy con + ; traceTc "checkValidDataCon" (vcat + [ ppr con, ppr tc, ppr tc_tvs + , ppr res_ty_tmpl <+> dcolon <+> ppr (typeKind res_ty_tmpl) + , ppr orig_res_ty <+> dcolon <+> ppr (typeKind orig_res_ty)]) + ; checkTc (isJust (tcMatchTy (mkVarSet tc_tvs) res_ty_tmpl orig_res_ty)) @@ -1581,10 +1611,12 @@ checkValidClass cls ; nullary_type_classes <- xoptM Opt_NullaryTypeClasses ; fundep_classes <- xoptM Opt_FunctionalDependencies - -- Check that the class is unary, unless multiparameter or - -- nullary type classes are enabled - ; checkTc (nullary_type_classes || notNull tyvars) (nullaryClassErr cls) - ; checkTc (multi_param_type_classes || arity <= 1) (classArityErr cls) + -- Check that the class is unary, unless multiparameter type classes + -- are enabled; also recognize deprecated nullary type classes + -- extension (subsumed by multiparameter type classes, Trac #8993) + ; checkTc (multi_param_type_classes || arity == 1 || + (nullary_type_classes && arity == 0)) + (classArityErr arity cls) ; checkTc (fundep_classes || null fundeps) (classFunDepsErr cls) -- Check the super-classes @@ -1621,7 +1653,7 @@ checkValidClass cls -- since there is no possible ambiguity ; let grown_tyvars = growThetaTyVars theta (mkVarSet tyvars) ; checkTc (arity == 0 || tyVarsOfType tau `intersectsVarSet` grown_tyvars) - (noClassTyVarErr cls sel_id) + (noClassTyVarErr cls (ptext (sLit "class method") <+> quotes (ppr sel_id))) ; case dm of GenDefMeth dm_name -> do { dm_id <- tcLookupId dm_name @@ -1643,11 +1675,10 @@ checkValidClass cls -- in the context of a for-all must mention at least one quantified -- type variable. What a mess! - check_at_defs (fam_tc, defs) - = tcAddDefaultAssocDeclCtxt (tyConName fam_tc) $ - mapM_ (checkValidTyFamInst mb_clsinfo fam_tc) defs - - mb_clsinfo = Just (cls, mkVarEnv [ (tv, mkTyVarTy tv) | tv <- tyvars ]) + check_at_defs (ATI fam_tc _) + = do { traceTc "check-at" (ppr fam_tc $$ ppr (tyConTyVars fam_tc) $$ ppr tyvars) + ; checkTc (any (`elem` tyvars) (tyConTyVars fam_tc)) + (noClassTyVarErr cls (ptext (sLit "associated type") <+> quotes (ppr fam_tc))) } checkFamFlag :: Name -> TcM () -- Check that we don't use families without -XTypeFamilies @@ -1672,9 +1703,9 @@ checkValidRoleAnnots :: RoleAnnots -> TyThing -> TcM () checkValidRoleAnnots role_annots thing = case thing of { ATyCon tc - | isSynTyCon tc -> check_no_roles - | isFamilyTyCon tc -> check_no_roles - | isAlgTyCon tc -> check_roles + | isTypeSynonymTyCon tc -> check_no_roles + | isFamilyTyCon tc -> check_no_roles + | isAlgTyCon tc -> check_roles where name = tyConName tc @@ -1798,7 +1829,7 @@ checkValidRoles tc mkDefaultMethodIds :: [TyThing] -> [Id] -- See Note [Default method Ids and Template Haskell] mkDefaultMethodIds things - = [ mkExportedLocalId dm_name (idType sel_id) + = [ mkExportedLocalId VanillaId dm_name (idType sel_id) | ATyCon tc <- things , Just cls <- [tyConClass_maybe tc] , (sel_id, DefMeth dm_name) <- classOpItems cls ] @@ -1838,8 +1869,7 @@ mkRecSelBind (tycon, sel_name) = (L loc (IdSig sel_id), unitBag (L loc sel_bind)) where loc = getSrcSpan sel_name - sel_id = Var.mkExportedLocalVar rec_details sel_name - sel_ty vanillaIdInfo + sel_id = mkExportedLocalId rec_details sel_name sel_ty rec_details = RecSelId { sel_tycon = tycon, sel_naughty = is_naughty } -- Find a representative constructor, con1 @@ -2005,13 +2035,6 @@ gotten by appying the eq_spec to the univ_tvs of the data con. %************************************************************************ \begin{code} -tcAddDefaultAssocDeclCtxt :: Name -> TcM a -> TcM a -tcAddDefaultAssocDeclCtxt name thing_inside - = addErrCtxt ctxt thing_inside - where - ctxt = hsep [ptext (sLit "In the type synonym instance default declaration for"), - quotes (ppr name)] - tcAddTyFamInstCtxt :: TyFamInstDecl Name -> TcM a -> TcM a tcAddTyFamInstCtxt decl = tcAddFamInstCtxt (ptext (sLit "type instance")) (tyFamInstDeclName decl) @@ -2054,26 +2077,26 @@ classOpCtxt :: Var -> Type -> SDoc classOpCtxt sel_id tau = sep [ptext (sLit "When checking the class method:"), nest 2 (pprPrefixOcc sel_id <+> dcolon <+> ppr tau)] -nullaryClassErr :: Class -> SDoc -nullaryClassErr cls - = vcat [ptext (sLit "No parameters for class") <+> quotes (ppr cls), - parens (ptext (sLit "Use NullaryTypeClasses to allow no-parameter classes"))] - -classArityErr :: Class -> SDoc -classArityErr cls - = vcat [ptext (sLit "Too many parameters for class") <+> quotes (ppr cls), - parens (ptext (sLit "Use MultiParamTypeClasses to allow multi-parameter classes"))] +classArityErr :: Int -> Class -> SDoc +classArityErr n cls + | n == 0 = mkErr "No" "no-parameter" + | otherwise = mkErr "Too many" "multi-parameter" + where + mkErr howMany allowWhat = + vcat [ptext (sLit $ howMany ++ " parameters for class") <+> quotes (ppr cls), + parens (ptext (sLit $ "Use MultiParamTypeClasses to allow " + ++ allowWhat ++ " classes"))] classFunDepsErr :: Class -> SDoc classFunDepsErr cls = vcat [ptext (sLit "Fundeps in class") <+> quotes (ppr cls), parens (ptext (sLit "Use FunctionalDependencies to allow fundeps"))] -noClassTyVarErr :: Class -> Var -> SDoc -noClassTyVarErr clas op - = sep [ptext (sLit "The class method") <+> quotes (ppr op), - ptext (sLit "mentions none of the type variables of the class") <+> - ppr clas <+> hsep (map ppr (classTyVars clas))] +noClassTyVarErr :: Class -> SDoc -> SDoc +noClassTyVarErr clas what + = sep [ptext (sLit "The") <+> what, + ptext (sLit "mentions none of the type or kind variables of the class") <+> + quotes (ppr clas <+> hsep (map ppr (classTyVars clas)))] recSynErr :: [LTyClDecl Name] -> TcRn () recSynErr syn_decls @@ -2152,20 +2175,20 @@ wrongKindOfFamily family | isAlgTyCon family = ptext (sLit "data type") | otherwise = pprPanic "wrongKindOfFamily" (ppr family) -wrongNumberOfParmsErrTooMany :: Arity -> SDoc -wrongNumberOfParmsErrTooMany max_args - = ptext (sLit "Number of parameters must match family declaration; expected no more than") +wrongNumberOfParmsErr :: Arity -> SDoc +wrongNumberOfParmsErr max_args + = ptext (sLit "Number of parameters must match family declaration; expected") <+> ppr max_args -wrongNamesInInstGroup :: Name -> Name -> SDoc -wrongNamesInInstGroup first cur - = ptext (sLit "Mismatched type names in closed type family declaration.") $$ - ptext (sLit "First name was") <+> - (ppr first) <> (ptext (sLit "; this one is")) <+> (ppr cur) +wrongTyFamName :: Name -> Name -> SDoc +wrongTyFamName fam_tc_name eqn_tc_name + = hang (ptext (sLit "Mismatched type name in type family instance.")) + 2 (vcat [ ptext (sLit "Expected:") <+> ppr fam_tc_name + , ptext (sLit " Actual:") <+> ppr eqn_tc_name ]) inaccessibleCoAxBranch :: TyCon -> CoAxBranch -> SDoc inaccessibleCoAxBranch tc fi - = ptext (sLit "Inaccessible family instance equation:") $$ + = ptext (sLit "Overlapped type family instance equation:") $$ (pprCoAxBranch tc fi) badRoleAnnot :: Name -> Role -> Role -> SDoc @@ -2206,12 +2229,12 @@ addTyThingCtxt thing name = getName thing flav = case thing of ATyCon tc - | isClassTyCon tc -> ptext (sLit "class") - | isSynFamilyTyCon tc -> ptext (sLit "type family") - | isDataFamilyTyCon tc -> ptext (sLit "data family") - | isSynTyCon tc -> ptext (sLit "type") - | isNewTyCon tc -> ptext (sLit "newtype") - | isDataTyCon tc -> ptext (sLit "data") + | isClassTyCon tc -> ptext (sLit "class") + | isSynFamilyTyCon tc -> ptext (sLit "type family") + | isDataFamilyTyCon tc -> ptext (sLit "data family") + | isTypeSynonymTyCon tc -> ptext (sLit "type") + | isNewTyCon tc -> ptext (sLit "newtype") + | isDataTyCon tc -> ptext (sLit "data") _ -> pprTrace "addTyThingCtxt strange" (ppr thing) empty diff --git a/compiler/typecheck/TcTyDecls.lhs b/compiler/typecheck/TcTyDecls.lhs index ed9a5b7661..262aa519b3 100644 --- a/compiler/typecheck/TcTyDecls.lhs +++ b/compiler/typecheck/TcTyDecls.lhs @@ -9,7 +9,8 @@ This stuff is only used for source-code decls; it's recorded in interface files for imported data types. \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -120,7 +121,7 @@ synTyConsOfType ty mkSynEdges :: [LTyClDecl Name] -> [(LTyClDecl Name, Name, [Name])] mkSynEdges syn_decls = [ (ldecl, name, nameSetToList fvs) | ldecl@(L _ (SynDecl { tcdLName = L _ name - , tcdFVs = fvs })) <- syn_decls ] + , tcdFVs = fvs })) <- syn_decls ] calcSynCycles :: [LTyClDecl Name] -> [SCC (LTyClDecl Name)] calcSynCycles = stronglyConnCompFromEdgedVertices . mkSynEdges @@ -263,7 +264,7 @@ this for all newtypes, we'd get infinite types. So we figure out for each newtype whether it is "recursive", and add a coercion if so. In effect, we are trying to "cut the loops" by identifying a loop-breaker. -2. Avoid infinite unboxing. This is nothing to do with newtypes. +2. Avoid infinite unboxing. This has nothing to do with newtypes. Suppose we have data T = MkT Int T f (MkT x t) = f t @@ -672,10 +673,10 @@ initialRoleEnv is_boot annots = extendNameEnvList emptyNameEnv . initialRoleEnv1 :: Bool -> RoleAnnots -> TyCon -> (Name, [Role]) initialRoleEnv1 is_boot annots_env tc - | isFamilyTyCon tc = (name, map (const Nominal) tyvars) - | isAlgTyCon tc - || isSynTyCon tc = (name, default_roles) - | otherwise = pprPanic "initialRoleEnv1" (ppr tc) + | isFamilyTyCon tc = (name, map (const Nominal) tyvars) + | isAlgTyCon tc = (name, default_roles) + | isTypeSynonymTyCon tc = (name, default_roles) + | otherwise = pprPanic "initialRoleEnv1" (ppr tc) where name = tyConName tc tyvars = tyConTyVars tc (kvs, tvs) = span isKindVar tyvars diff --git a/compiler/typecheck/TcType.lhs b/compiler/typecheck/TcType.lhs index 08c7a627ce..a952ce702e 100644 --- a/compiler/typecheck/TcType.lhs +++ b/compiler/typecheck/TcType.lhs @@ -15,6 +15,8 @@ The "tc" prefix is for "TypeChecker", because the type checker is the principal client. \begin{code} +{-# LANGUAGE CPP #-} + module TcType ( -------------------------------- -- Types @@ -478,7 +480,7 @@ pprTcTyVarDetails (MetaTv { mtv_info = info, mtv_untch = untch }) pprUserTypeCtxt :: UserTypeCtxt -> SDoc pprUserTypeCtxt (InfSigCtxt n) = ptext (sLit "the inferred type for") <+> quotes (ppr n) pprUserTypeCtxt (FunSigCtxt n) = ptext (sLit "the type signature for") <+> quotes (ppr n) -pprUserTypeCtxt (RuleSigCtxt n) = ptext (sLit "a RULE for") <+> quotes (ppr n) +pprUserTypeCtxt (RuleSigCtxt n) = ptext (sLit "a RULE for") <+> quotes (ppr n) pprUserTypeCtxt ExprSigCtxt = ptext (sLit "an expression type signature") pprUserTypeCtxt (ConArgCtxt c) = ptext (sLit "the type of the constructor") <+> quotes (ppr c) pprUserTypeCtxt (TySynCtxt c) = ptext (sLit "the RHS of the type synonym") <+> quotes (ppr c) @@ -734,7 +736,7 @@ mkTcEqPred :: TcType -> TcType -> Type mkTcEqPred ty1 ty2 = mkTyConApp eqTyCon [k, ty1, ty2] where - k = defaultKind (typeKind ty1) + k = typeKind ty1 \end{code} @isTauTy@ tests for nested for-alls. It should not be called on a boxy type. @@ -961,7 +963,7 @@ tcInstHeadTyNotSynonym :: Type -> Bool -- are transparent, so we need a special function here tcInstHeadTyNotSynonym ty = case ty of - TyConApp tc _ -> not (isSynTyCon tc) + TyConApp tc _ -> not (isTypeSynonymTyCon tc) _ -> True tcInstHeadTyAppAllTyVars :: Type -> Bool diff --git a/compiler/typecheck/TcUnify.lhs b/compiler/typecheck/TcUnify.lhs index 1447448973..ef06ddd263 100644 --- a/compiler/typecheck/TcUnify.lhs +++ b/compiler/typecheck/TcUnify.lhs @@ -6,7 +6,8 @@ Type subsumption and unification \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/typecheck/TcValidity.lhs b/compiler/typecheck/TcValidity.lhs index 84453eb700..b5e6d64522 100644 --- a/compiler/typecheck/TcValidity.lhs +++ b/compiler/typecheck/TcValidity.lhs @@ -4,6 +4,8 @@ % \begin{code} +{-# LANGUAGE CPP #-} + module TcValidity ( Rank, UserTypeCtxt(..), checkValidType, checkValidMonoType, expectedKindInCtxt, @@ -44,7 +46,6 @@ import ListSetOps import SrcLoc import Outputable import FastString -import BasicTypes ( Arity ) import Control.Monad import Data.Maybe @@ -67,13 +68,21 @@ checkAmbiguity ctxt ty -- Then :k T should work in GHCi, not complain that -- (T k) is ambiguous! + | InfSigCtxt {} <- ctxt -- See Note [Validity of inferred types] in TcBinds + = return () + | otherwise = do { traceTc "Ambiguity check for" (ppr ty) - ; (subst, _tvs) <- tcInstSkolTyVars (varSetElems (tyVarsOfType ty)) + ; let free_tkvs = varSetElemsKvsFirst (closeOverKinds (tyVarsOfType ty)) + ; (subst, _tvs) <- tcInstSkolTyVars free_tkvs ; let ty' = substTy subst ty - -- The type might have free TyVars, - -- so we skolemise them as TcTyVars + -- The type might have free TyVars, esp when the ambiguity check + -- happens during a call to checkValidType, + -- so we skolemise them as TcTyVars. -- Tiresome; but the type inference engine expects TcTyVars + -- NB: The free tyvar might be (a::k), so k is also free + -- and we must skolemise it as well. Hence closeOverKinds. + -- (Trac #9222) -- Solve the constraints eagerly because an ambiguous type -- can cause a cascade of further errors. Since the free @@ -285,7 +294,7 @@ check_type ctxt rank (AppTy ty1 ty2) ; check_arg_type ctxt rank ty2 } check_type ctxt rank ty@(TyConApp tc tys) - | isSynTyCon tc = check_syn_tc_app ctxt rank ty tc tys + | isTypeSynonymTyCon tc = check_syn_tc_app ctxt rank ty tc tys | isUnboxedTupleTyCon tc = check_ubx_tuple ctxt ty tys | otherwise = mapM_ (check_arg_type ctxt rank) tys @@ -506,7 +515,7 @@ okIPCtxt (SpecInstCtxt {}) = False okIPCtxt _ = True badIPPred :: PredType -> SDoc -badIPPred pred = ptext (sLit "Illegal implict parameter") <+> quotes (ppr pred) +badIPPred pred = ptext (sLit "Illegal implicit parameter") <+> quotes (ppr pred) check_eq_pred :: DynFlags -> UserTypeCtxt -> PredType -> TcType -> TcType -> TcM () @@ -650,7 +659,7 @@ unambiguous. See Note [Impedence matching] in TcBinds. This test is very conveniently implemented by calling tcSubType <type> <type> This neatly takes account of the functional dependecy stuff above, -and implict parameter (see Note [Implicit parameters and ambiguity]). +and implicit parameter (see Note [Implicit parameters and ambiguity]). What about this, though? g :: C [a] => Int @@ -765,11 +774,10 @@ checkValidInstHead ctxt clas cls_args ; checkTc (xopt Opt_FlexibleInstances dflags || all tcInstHeadTyAppAllTyVars ty_args) (instTypeErr clas cls_args head_type_args_tyvars_msg) - ; checkTc (xopt Opt_NullaryTypeClasses dflags || - not (null ty_args)) - (instTypeErr clas cls_args head_no_type_msg) ; checkTc (xopt Opt_MultiParamTypeClasses dflags || - length ty_args <= 1) -- Only count type arguments + length ty_args == 1 || -- Only count type arguments + (xopt Opt_NullaryTypeClasses dflags && + null ty_args)) (instTypeErr clas cls_args head_one_type_msg) } -- May not contain type family applications @@ -799,11 +807,7 @@ checkValidInstHead ctxt clas cls_args head_one_type_msg = parens ( text "Only one type can be given in an instance head." $$ - text "Use MultiParamTypeClasses if you want to allow more.") - - head_no_type_msg = parens ( - text "No parameters in the instance head." $$ - text "Use NullaryTypeClasses if you want to allow this.") + text "Use MultiParamTypeClasses if you want to allow more, or zero.") abstract_class_msg = text "The class is abstract, manual instances are not permitted." @@ -1160,26 +1164,18 @@ checkValidFamPats :: TyCon -> [TyVar] -> [Type] -> TcM () -- type instance F (T a) = a -- c) Have the right number of patterns checkValidFamPats fam_tc tvs ty_pats - = do { -- A family instance must have exactly the same number of type - -- parameters as the family declaration. You can't write - -- type family F a :: * -> * - -- type instance F Int y = y - -- because then the type (F Int) would be like (\y.y) - checkTc (length ty_pats == fam_arity) $ - wrongNumberOfParmsErr (fam_arity - length fam_kvs) -- report only types - ; mapM_ checkTyFamFreeness ty_pats + = ASSERT( length ty_pats == tyConArity fam_tc ) + -- A family instance must have exactly the same number of type + -- parameters as the family declaration. You can't write + -- type family F a :: * -> * + -- type instance F Int y = y + -- because then the type (F Int) would be like (\y.y) + -- But this is checked at the time the axiom is created + do { mapM_ checkTyFamFreeness ty_pats ; let unbound_tvs = filterOut (`elemVarSet` exactTyVarsOfTypes ty_pats) tvs ; checkTc (null unbound_tvs) (famPatErr fam_tc unbound_tvs ty_pats) } - where fam_arity = tyConArity fam_tc - (fam_kvs, _) = splitForAllTys (tyConKind fam_tc) - -wrongNumberOfParmsErr :: Arity -> SDoc -wrongNumberOfParmsErr exp_arity - = ptext (sLit "Number of parameters must match family declaration; expected") - <+> ppr exp_arity -- Ensure that no type family instances occur in a type. --- checkTyFamFreeness :: Type -> TcM () checkTyFamFreeness ty = checkTc (isTyFamFree ty) $ diff --git a/compiler/types/Class.lhs b/compiler/types/Class.lhs index 2d145683bf..9863b8d98f 100644 --- a/compiler/types/Class.lhs +++ b/compiler/types/Class.lhs @@ -6,7 +6,8 @@ The @Class@ datatype \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, DeriveDataTypeable #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -16,7 +17,7 @@ The @Class@ datatype module Class ( Class, ClassOpItem, DefMeth (..), - ClassATItem, + ClassATItem(..), ClassMinimalDef, defMethSpecOfDefMeth, @@ -31,8 +32,7 @@ module Class ( #include "HsVersions.h" import {-# SOURCE #-} TyCon ( TyCon, tyConName, tyConUnique ) -import {-# SOURCE #-} TypeRep ( PredType ) -import CoAxiom +import {-# SOURCE #-} TypeRep ( Type, PredType ) import Var import Name import BasicTypes @@ -99,10 +99,10 @@ data DefMeth = NoDefMeth -- No default method | GenDefMeth Name -- A generic default method deriving Eq -type ClassATItem = (TyCon, -- See Note [Associated type tyvar names] - [CoAxBranch]) -- Default associated types from these templates - -- We can have more than one default per type; see - -- Note [Associated type defaults] in TcTyClsDecls +data ClassATItem + = ATI TyCon -- See Note [Associated type tyvar names] + (Maybe Type) -- Default associated type (if any) from this template + -- Note [Associated type defaults] type ClassMinimalDef = BooleanFormula Name -- Required methods @@ -114,9 +114,39 @@ defMethSpecOfDefMeth meth NoDefMeth -> NoDM DefMeth _ -> VanillaDM GenDefMeth _ -> GenericDM - \end{code} +Note [Associated type defaults] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The following is an example of associated type defaults: + class C a where + data D a r + + type F x a b :: * + type F p q r = (p,q)->r -- Default + +Note that + + * The TyCons for the associated types *share type variables* with the + class, so that we can tell which argument positions should be + instantiated in an instance decl. (The first for 'D', the second + for 'F'.) + + * We can have default definitions only for *type* families, + not data families + + * In the default decl, the "patterns" should all be type variables, + but (in the source language) they don't need to be the same as in + the 'type' decl signature or the class. It's more like a + free-standing 'type instance' declaration. + + * HOWEVER, in the internal ClassATItem we rename the RHS to match the + tyConTyVars of the family TyCon. So in the example above we'd get + a ClassATItem of + ATI F ((x,a) -> b) + So the tyConTyVars of the family TyCon bind the free vars of + the default Type rhs + The @mkClass@ function fills in the indirect superclasses. \begin{code} @@ -197,7 +227,7 @@ classOpItems = classOpStuff classATs :: Class -> [TyCon] classATs (Class { classATStuff = at_stuff }) - = [tc | (tc, _) <- at_stuff] + = [tc | ATI tc _ <- at_stuff] classATItems :: Class -> [ClassATItem] classATItems = classATStuff diff --git a/compiler/types/CoAxiom.lhs b/compiler/types/CoAxiom.lhs index d6122b21e6..06b74a43f0 100644 --- a/compiler/types/CoAxiom.lhs +++ b/compiler/types/CoAxiom.lhs @@ -4,7 +4,7 @@ \begin{code} -{-# LANGUAGE GADTs, ScopedTypeVariables #-} +{-# LANGUAGE CPP, DeriveDataTypeable, GADTs, ScopedTypeVariables #-} -- | Module for coercion axioms, used to represent type family instances -- and newtypes diff --git a/compiler/types/Coercion.lhs b/compiler/types/Coercion.lhs index af2b2fa483..2f499b704b 100644 --- a/compiler/types/Coercion.lhs +++ b/compiler/types/Coercion.lhs @@ -3,6 +3,8 @@ % \begin{code} +{-# LANGUAGE CPP, DeriveDataTypeable #-} + -- | Module for (a) type kinds and (b) type coercions, -- as used in System FC. See 'CoreSyn.Expr' for -- more on System FC and how coercions fit into it. @@ -16,7 +18,7 @@ module Coercion ( -- ** Functions over coercions coVarKind, coVarRole, coercionType, coercionKind, coercionKinds, isReflCo, - isReflCo_maybe, coercionRole, + isReflCo_maybe, coercionRole, coercionKindRole, mkCoercionType, -- ** Constructing coercions @@ -27,7 +29,7 @@ module Coercion ( mkSymCo, mkTransCo, mkNthCo, mkNthCoRole, mkLRCo, mkInstCo, mkAppCo, mkAppCoFlexible, mkTyConAppCo, mkFunCo, mkForAllCo, mkUnsafeCo, mkUnivCo, mkSubCo, mkPhantomCo, - mkNewTypeCo, maybeSubCo, maybeSubCo2, + mkNewTypeCo, downgradeRole, mkAxiomRuleCo, -- ** Decomposition @@ -38,7 +40,7 @@ module Coercion ( splitAppCo_maybe, splitForAllCo_maybe, nthRole, tyConRolesX, - nextRole, + nextRole, setNominalRole_maybe, -- ** Coercion variables mkCoVar, isCoVar, isCoVarType, coVarName, setCoVarName, setCoVarUnique, @@ -102,8 +104,10 @@ import PrelNames ( funTyConKey, eqPrimTyConKey, eqReprPrimTyConKey ) import Control.Applicative import Data.Traversable (traverse, sequenceA) import FastString +import ListSetOps import qualified Data.Data as Data hiding ( TyCon ) +import Control.Arrow ( first ) \end{code} %************************************************************************ @@ -632,7 +636,7 @@ pprCo, pprParendCo :: Coercion -> SDoc pprCo co = ppr_co TopPrec co pprParendCo co = ppr_co TyConPrec co -ppr_co :: Prec -> Coercion -> SDoc +ppr_co :: TyPrec -> Coercion -> SDoc ppr_co _ (Refl r ty) = angleBrackets (ppr ty) <> ppr_role r ppr_co p co@(TyConAppCo _ tc [_,_]) @@ -695,7 +699,7 @@ instance Outputable LeftOrRight where ppr CLeft = ptext (sLit "Left") ppr CRight = ptext (sLit "Right") -ppr_fun_co :: Prec -> Coercion -> SDoc +ppr_fun_co :: TyPrec -> Coercion -> SDoc ppr_fun_co p co = pprArrowChain p (split co) where split :: Coercion -> [SDoc] @@ -704,7 +708,7 @@ ppr_fun_co p co = pprArrowChain p (split co) = ppr_co FunPrec arg : split res split co = [ppr_co TopPrec co] -ppr_forall_co :: Prec -> Coercion -> SDoc +ppr_forall_co :: TyPrec -> Coercion -> SDoc ppr_forall_co p ty = maybeParen p FunPrec $ sep [pprForAll tvs, ppr_co TopPrec rho] @@ -724,7 +728,7 @@ pprCoAxBranch :: TyCon -> CoAxBranch -> SDoc pprCoAxBranch fam_tc (CoAxBranch { cab_tvs = tvs , cab_lhs = lhs , cab_rhs = rhs }) - = hang (ifPprDebug (pprForAll tvs)) + = hang (pprUserForAll tvs) 2 (hang (pprTypeApp fam_tc lhs) 2 (equals <+> (ppr rhs))) pprCoAxBranchHdr :: CoAxiom br -> BranchIndex -> SDoc @@ -770,7 +774,7 @@ splitAppCo_maybe (AppCo co1 co2) = Just (co1, co2) splitAppCo_maybe (TyConAppCo r tc cos) | isDecomposableTyCon tc || cos `lengthExceeds` tyConArity tc , Just (cos', co') <- snocView cos - , Just co'' <- unSubCo_maybe co' + , Just co'' <- setNominalRole_maybe co' = Just (mkTyConAppCo r tc cos', co'') -- Never create unsaturated type family apps! -- Use mkTyConAppCo to preserve the invariant -- that identity coercions are always represented by Refl @@ -829,6 +833,55 @@ isReflCo_maybe _ = Nothing %* * %************************************************************************ +Note [Role twiddling functions] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +There are a plethora of functions for twiddling roles: + +mkSubCo: Requires a nominal input coercion and always produces a +representational output. This is used when you (the programmer) are sure you +know exactly that role you have and what you want. + +setRole_maybe: This function takes both the input role and the output role +as parameters. (The *output* role comes first!) It can only *downgrade* a +role -- that is, change it from N to R or P, or from R to P. This one-way +behavior is why there is the "_maybe". If an upgrade is requested, this +function produces Nothing. This is used when you need to change the role of a +coercion, but you're not sure (as you're writing the code) of which roles are +involved. + +This function could have been written using coercionRole to ascertain the role +of the input. But, that function is recursive, and the caller of setRole_maybe +often knows the input role. So, this is more efficient. + +downgradeRole: This is just like setRole_maybe, but it panics if the conversion +isn't a downgrade. + +setNominalRole_maybe: This is the only function that can *upgrade* a coercion. The result +(if it exists) is always Nominal. The input can be at any role. It works on a +"best effort" basis, as it should never be strictly necessary to upgrade a coercion +during compilation. It is currently only used within GHC in splitAppCo_maybe. In order +to be a proper inverse of mkAppCo, the second coercion that splitAppCo_maybe returns +must be nominal. But, it's conceivable that splitAppCo_maybe is operating over a +TyConAppCo that uses a representational coercion. Hence the need for setNominalRole_maybe. +splitAppCo_maybe, in turn, is used only within coercion optimization -- thus, it is +not absolutely critical that setNominalRole_maybe be complete. + +Note that setNominalRole_maybe will never upgrade a phantom UnivCo. Phantom +UnivCos are perfectly type-safe, whereas representational and nominal ones are +not. Indeed, `unsafeCoerce` is implemented via a representational UnivCo. +(Nominal ones are no worse than representational ones, so this function *will* +change a UnivCo Representational to a UnivCo Nominal.) + +Conal Elliott also came across a need for this function while working with the GHC +API, as he was decomposing Core casts. The Core casts use representational coercions, +as they must, but his use case required nominal coercions (he was building a GADT). +So, that's why this function is exported from this module. + +One might ask: shouldn't setRole_maybe just use setNominalRole_maybe as appropriate? +I (Richard E.) have decided not to do this, because upgrading a role is bizarre and +a caller should have to ask for this behavior explicitly. + \begin{code} mkCoVarCo :: CoVar -> Coercion -- cv :: s ~# t @@ -845,9 +898,9 @@ mkAxInstCo :: Role -> CoAxiom br -> BranchIndex -> [Type] -> Coercion -- mkAxInstCo can legitimately be called over-staturated; -- i.e. with more type arguments than the coercion requires mkAxInstCo role ax index tys - | arity == n_tys = maybeSubCo2 role ax_role $ AxiomInstCo ax_br index rtys + | arity == n_tys = downgradeRole role ax_role $ AxiomInstCo ax_br index rtys | otherwise = ASSERT( arity < n_tys ) - maybeSubCo2 role ax_role $ + downgradeRole role ax_role $ foldl AppCo (AxiomInstCo ax_br index (take arity rtys)) (drop arity rtys) where @@ -899,10 +952,12 @@ mkAppCo co1 co2 = mkAppCoFlexible co1 Nominal co2 mkAppCoFlexible :: Coercion -> Role -> Coercion -> Coercion mkAppCoFlexible (Refl r ty1) _ (Refl _ ty2) = Refl r (mkAppTy ty1 ty2) -mkAppCoFlexible (Refl r (TyConApp tc tys)) r2 co2 +mkAppCoFlexible (Refl r ty1) r2 co2 + | Just (tc, tys) <- splitTyConApp_maybe ty1 + -- Expand type synonyms; a TyConAppCo can't have a type synonym (Trac #9102) = TyConAppCo r tc (zip_roles (tyConRolesX r tc) tys) where - zip_roles (r1:_) [] = [maybeSubCo2 r1 r2 co2] + zip_roles (r1:_) [] = [downgradeRole r1 r2 co2] zip_roles (r1:rs) (ty1:tys) = mkReflCo r1 ty1 : zip_roles rs tys zip_roles _ _ = panic "zip_roles" -- but the roles are infinite... mkAppCoFlexible (TyConAppCo r tc cos) r2 co @@ -911,7 +966,7 @@ mkAppCoFlexible (TyConAppCo r tc cos) r2 co TyConAppCo Nominal tc (cos ++ [co]) Representational -> TyConAppCo Representational tc (cos ++ [co']) where new_role = (tyConRolesX Representational tc) !! (length cos) - co' = maybeSubCo2 new_role r2 co + co' = downgradeRole new_role r2 co Phantom -> TyConAppCo Phantom tc (cos ++ [mkPhantomCo co]) mkAppCoFlexible co1 _r2 co2 = ASSERT( _r2 == Nominal ) @@ -970,7 +1025,7 @@ mkTransCo co1 co2 = TransCo co1 co2 -- sure this request is reasonable mkNthCoRole :: Role -> Int -> Coercion -> Coercion mkNthCoRole role n co - = maybeSubCo2 role nth_role $ nth_co + = downgradeRole role nth_role $ nth_co where nth_co = mkNthCo n co nth_role = coercionRole nth_co @@ -999,10 +1054,9 @@ ok_tc_app ty n = case splitTyConApp_maybe ty of mkInstCo :: Coercion -> Type -> Coercion mkInstCo co ty = InstCo co ty --- | Manufacture a coercion from thin air. Needless to say, this is --- not usually safe, but it is used when we know we are dealing with --- bottom, which is one case in which it is safe. This is also used --- to implement the @unsafeCoerce#@ primitive. Optimise by pushing +-- | Manufacture an unsafe coercion from thin air. +-- Currently (May 14) this is used only to implement the +-- @unsafeCoerce#@ primitive. Optimise by pushing -- down through type constructors. mkUnsafeCo :: Type -> Type -> Coercion mkUnsafeCo = mkUnivCo Representational @@ -1015,7 +1069,7 @@ mkUnivCo role ty1 ty2 mkAxiomRuleCo :: CoAxiomRule -> [Type] -> [Coercion] -> Coercion mkAxiomRuleCo = AxiomRuleCo --- input coercion is Nominal +-- input coercion is Nominal; see also Note [Role twiddling functions] mkSubCo :: Coercion -> Coercion mkSubCo (Refl Nominal ty) = Refl Representational ty mkSubCo (TyConAppCo Nominal tc cos) @@ -1024,44 +1078,51 @@ mkSubCo (UnivCo Nominal ty1 ty2) = UnivCo Representational ty1 ty2 mkSubCo co = ASSERT2( coercionRole co == Nominal, ppr co <+> ppr (coercionRole co) ) SubCo co - --- takes a Nominal coercion and possibly casts it into a Representational one -maybeSubCo :: Role -> Coercion -> Coercion -maybeSubCo Nominal = id -maybeSubCo Representational = mkSubCo -maybeSubCo Phantom = pprPanic "maybeSubCo Phantom" . ppr - -maybeSubCo2_maybe :: Role -- desired role - -> Role -- current role - -> Coercion -> Maybe Coercion -maybeSubCo2_maybe Representational Nominal = Just . mkSubCo -maybeSubCo2_maybe Nominal Representational = const Nothing -maybeSubCo2_maybe Phantom Phantom = Just -maybeSubCo2_maybe Phantom _ = Just . mkPhantomCo -maybeSubCo2_maybe _ Phantom = const Nothing -maybeSubCo2_maybe _ _ = Just - -maybeSubCo2 :: Role -- desired role - -> Role -- current role - -> Coercion -> Coercion -maybeSubCo2 r1 r2 co - = case maybeSubCo2_maybe r1 r2 co of +-- only *downgrades* a role. See Note [Role twiddling functions] +setRole_maybe :: Role -- desired role + -> Role -- current role + -> Coercion -> Maybe Coercion +setRole_maybe Representational Nominal = Just . mkSubCo +setRole_maybe Nominal Representational = const Nothing +setRole_maybe Phantom Phantom = Just +setRole_maybe Phantom _ = Just . mkPhantomCo +setRole_maybe _ Phantom = const Nothing +setRole_maybe _ _ = Just + +-- panics if the requested conversion is not a downgrade. +-- See also Note [Role twiddling functions] +downgradeRole :: Role -- desired role + -> Role -- current role + -> Coercion -> Coercion +downgradeRole r1 r2 co + = case setRole_maybe r1 r2 co of Just co' -> co' - Nothing -> pprPanic "maybeSubCo2" (ppr co) - --- if co is Nominal, returns it; otherwise, unwraps a SubCo; otherwise, fails -unSubCo_maybe :: Coercion -> Maybe Coercion -unSubCo_maybe (SubCo co) = Just co -unSubCo_maybe (Refl _ ty) = Just $ Refl Nominal ty -unSubCo_maybe (TyConAppCo Representational tc cos) - = do { cos' <- mapM unSubCo_maybe cos + Nothing -> pprPanic "downgradeRole" (ppr co) + +-- Converts a coercion to be nominal, if possible. +-- See also Note [Role twiddling functions] +setNominalRole_maybe :: Coercion -> Maybe Coercion +setNominalRole_maybe co + | Nominal <- coercionRole co = Just co +setNominalRole_maybe (SubCo co) = Just co +setNominalRole_maybe (Refl _ ty) = Just $ Refl Nominal ty +setNominalRole_maybe (TyConAppCo Representational tc coes) + = do { cos' <- mapM setNominalRole_maybe coes ; return $ TyConAppCo Nominal tc cos' } -unSubCo_maybe (UnivCo Representational ty1 ty2) = Just $ UnivCo Nominal ty1 ty2 +setNominalRole_maybe (UnivCo Representational ty1 ty2) = Just $ UnivCo Nominal ty1 ty2 -- We do *not* promote UnivCo Phantom, as that's unsafe. -- UnivCo Nominal is no more unsafe than UnivCo Representational -unSubCo_maybe co - | Nominal <- coercionRole co = Just co -unSubCo_maybe _ = Nothing +setNominalRole_maybe (TransCo co1 co2) + = TransCo <$> setNominalRole_maybe co1 <*> setNominalRole_maybe co2 +setNominalRole_maybe (AppCo co1 co2) + = AppCo <$> setNominalRole_maybe co1 <*> pure co2 +setNominalRole_maybe (ForAllCo tv co) + = ForAllCo tv <$> setNominalRole_maybe co +setNominalRole_maybe (NthCo n co) + = NthCo n <$> setNominalRole_maybe co +setNominalRole_maybe (InstCo co ty) + = InstCo <$> setNominalRole_maybe co <*> pure ty +setNominalRole_maybe _ = Nothing -- takes any coercion and turns it into a Phantom coercion mkPhantomCo :: Coercion -> Coercion @@ -1556,7 +1617,7 @@ failing for reason 2) is fine. matchAxiom is trying to find a set of coercions that match, but it may fail, and this is healthy behavior. Bottom line: if you find that liftCoSubst is doing weird things (like leaving out-of-scope variables lying around), disable coercion optimization (bypassing matchAxiom) -and use maybeSubCo2 instead of maybeSubCo2_maybe. The panic will then happen, +and use downgradeRole instead of setRole_maybe. The panic will then happen, and you may learn something useful. \begin{code} @@ -1566,7 +1627,7 @@ liftCoSubstTyVar (LCS _ cenv) r tv = do { co <- lookupVarEnv cenv tv ; let co_role = coercionRole co -- could theoretically take this as -- a parameter, but painful - ; maybeSubCo2_maybe r co_role co } -- see Note [liftCoSubstTyVar] + ; setRole_maybe r co_role co } -- see Note [liftCoSubstTyVar] liftCoSubstTyVarBndr :: LiftCoSubst -> TyVar -> (LiftCoSubst, TyVar) liftCoSubstTyVarBndr subst@(LCS in_scope cenv) old_var @@ -1733,10 +1794,23 @@ seqCos (co:cos) = seqCo co `seq` seqCos cos %* * %************************************************************************ +Note [Computing a coercion kind and role] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +To compute a coercion's kind is straightforward: see coercionKind. +But to compute a coercion's role, in the case for NthCo we need +its kind as well. So if we have two separate functions (one for kinds +and one for roles) we can get exponentially bad behaviour, sinc each +NthCo node makes a seaprate call to coercionKind, which traverses the +sub-tree again. This was part of the problem in Trac #9233. + +Solution: compute both together; hence coercionKindRole. We keep a +separate coercionKind function because it's a bit more efficient if +the kind is all you wan. + \begin{code} coercionType :: Coercion -> Type -coercionType co = case coercionKind co of - Pair ty1 ty2 -> mkCoercionType (coercionRole co) ty1 ty2 +coercionType co = case coercionKindRole co of + (Pair ty1 ty2, r) -> mkCoercionType r ty1 ty2 ------------------ -- | If it is the case that @@ -1768,11 +1842,10 @@ coercionKind co = go co go (InstCo aco ty) = go_app aco [ty] go (SubCo co) = go co go (AxiomRuleCo ax tys cos) = - case coaxrProves ax tys (map coercionKind cos) of + case coaxrProves ax tys (map go cos) of Just res -> res Nothing -> panic "coercionKind: Malformed coercion" - go_app :: Coercion -> [Type] -> Pair Type -- Collect up all the arguments and apply all at once -- See Note [Nested InstCos] @@ -1783,25 +1856,54 @@ coercionKind co = go co coercionKinds :: [Coercion] -> Pair [Type] coercionKinds tys = sequenceA $ map coercionKind tys -coercionRole :: Coercion -> Role -coercionRole = go +-- | Get a coercion's kind and role. +-- Why both at once? See Note [Computing a coercion kind and role] +coercionKindRole :: Coercion -> (Pair Type, Role) +coercionKindRole = go where - go (Refl r _) = r - go (TyConAppCo r _ _) = r - go (AppCo co _) = go co - go (ForAllCo _ co) = go co - go (CoVarCo cv) = coVarRole cv - go (AxiomInstCo ax _ _) = coAxiomRole ax - go (UnivCo r _ _) = r - go (SymCo co) = go co - go (TransCo co1 _) = go co1 -- same as go co2 - go (NthCo n co) = let Pair ty1 _ = coercionKind co - (tc, _) = splitTyConApp ty1 - in nthRole (coercionRole co) tc n - go (LRCo _ _) = Nominal - go (InstCo co _) = go co - go (SubCo _) = Representational - go (AxiomRuleCo c _ _) = coaxrRole c + go (Refl r ty) = (Pair ty ty, r) + go (TyConAppCo r tc cos) + = (mkTyConApp tc <$> (sequenceA $ map coercionKind cos), r) + go (AppCo co1 co2) + = let (tys1, r1) = go co1 in + (mkAppTy <$> tys1 <*> coercionKind co2, r1) + go (ForAllCo tv co) + = let (tys, r) = go co in + (mkForAllTy tv <$> tys, r) + go (CoVarCo cv) = (toPair $ coVarKind cv, coVarRole cv) + go co@(AxiomInstCo ax _ _) = (coercionKind co, coAxiomRole ax) + go (UnivCo r ty1 ty2) = (Pair ty1 ty2, r) + go (SymCo co) = first swap $ go co + go (TransCo co1 co2) + = let (tys1, r) = go co1 in + (Pair (pFst tys1) (pSnd $ coercionKind co2), r) + go (NthCo d co) + = let (Pair t1 t2, r) = go co + (tc1, args1) = splitTyConApp t1 + (_tc2, args2) = splitTyConApp t2 + in + ASSERT( tc1 == _tc2 ) + ((`getNth` d) <$> Pair args1 args2, nthRole r tc1 d) + go co@(LRCo {}) = (coercionKind co, Nominal) + go (InstCo co ty) = go_app co [ty] + go (SubCo co) = (coercionKind co, Representational) + go co@(AxiomRuleCo ax _ _) = (coercionKind co, coaxrRole ax) + + go_app :: Coercion -> [Type] -> (Pair Type, Role) + -- Collect up all the arguments and apply all at once + -- See Note [Nested InstCos] + go_app (InstCo co ty) tys = go_app co (ty:tys) + go_app co tys + = let (pair, r) = go co in + ((`applyTys` tys) <$> pair, r) + +-- | Retrieve the role from a coercion. +coercionRole :: Coercion -> Role +coercionRole = snd . coercionKindRole + -- There's not a better way to do this, because NthCo needs the *kind* + -- and role of its argument. Luckily, laziness should generally avoid + -- the need for computing kinds in other cases. + \end{code} Note [Nested InstCos] diff --git a/compiler/types/FamInstEnv.lhs b/compiler/types/FamInstEnv.lhs index 50ced7d323..fcf7cb443f 100644 --- a/compiler/types/FamInstEnv.lhs +++ b/compiler/types/FamInstEnv.lhs @@ -5,13 +5,12 @@ FamInstEnv: Type checked family instance declarations \begin{code} - -{-# LANGUAGE GADTs #-} +{-# LANGUAGE CPP, GADTs #-} module FamInstEnv ( FamInst(..), FamFlavor(..), famInstAxiom, famInstTyCon, famInstRHS, famInstsRepTyCons, famInstRepTyCon_maybe, dataFamInstRepTyCon, - pprFamInst, pprFamInstHdr, pprFamInsts, + pprFamInst, pprFamInsts, mkImportedFamInst, FamInstEnvs, FamInstEnv, emptyFamInstEnv, emptyFamInstEnvs, @@ -167,12 +166,13 @@ instance Outputable FamInst where ppr = pprFamInst -- Prints the FamInst as a family instance declaration +-- NB: FamInstEnv.pprFamInst is used only for internal, debug printing +-- See pprTyThing.pprFamInst for printing for the user pprFamInst :: FamInst -> SDoc pprFamInst famInst = hang (pprFamInstHdr famInst) 2 (vcat [ ifPprDebug (ptext (sLit "Coercion axiom:") <+> ppr ax) - , ifPprDebug (ptext (sLit "RHS:") <+> ppr (famInstRHS famInst)) - , ptext (sLit "--") <+> pprDefinedAt (getName famInst)]) + , ifPprDebug (ptext (sLit "RHS:") <+> ppr (famInstRHS famInst)) ]) where ax = fi_axiom famInst @@ -199,6 +199,9 @@ pprFamInstHdr fi@(FamInst {fi_flavor = flavor}) else pprTypeApp fam_tc (etad_lhs_tys ++ mkTyVarTys extra_tvs) -- Without -dppr-debug, eta-expand -- See Trac #8674 + -- (This is probably over the top now that we use this + -- only for internal debug printing; PprTyThing.pprFamInst + -- is used for user-level printing.) | otherwise = vanilla_pp_head diff --git a/compiler/types/InstEnv.lhs b/compiler/types/InstEnv.lhs index 826537db17..be1cdb1e44 100644 --- a/compiler/types/InstEnv.lhs +++ b/compiler/types/InstEnv.lhs @@ -7,13 +7,16 @@ The bits common to TcInstDcls and TcDeriv. \begin{code} +{-# LANGUAGE CPP, DeriveDataTypeable #-} + module InstEnv ( - DFunId, OverlapFlag(..), InstMatch, ClsInstLookupResult, - ClsInst(..), DFunInstType, pprInstance, pprInstanceHdr, pprInstances, + DFunId, InstMatch, ClsInstLookupResult, + OverlapFlag(..), OverlapMode(..), setOverlapModeMaybe, + ClsInst(..), DFunInstType, pprInstance, pprInstanceHdr, pprInstances, instanceHead, instanceSig, mkLocalInstance, mkImportedInstance, instanceDFunId, tidyClsInstDFun, instanceRoughTcs, - InstEnv, emptyInstEnv, extendInstEnv, overwriteInstEnv, + InstEnv, emptyInstEnv, extendInstEnv, overwriteInstEnv, extendInstEnvList, lookupUniqueInstEnv, lookupInstEnv', lookupInstEnv, instEnvElts, classInstances, orphNamesOfClsInst, instanceBindFun, instanceCantMatch, roughMatchTcs @@ -164,15 +167,13 @@ pprInstanceHdr :: ClsInst -> SDoc -- Prints the ClsInst as an instance declaration pprInstanceHdr (ClsInst { is_flag = flag, is_dfun = dfun }) = getPprStyle $ \ sty -> - let theta_to_print - | debugStyle sty = theta - | otherwise = drop (dfunNSilent dfun) theta + let dfun_ty = idType dfun + (tvs, theta, res_ty) = tcSplitSigmaTy dfun_ty + theta_to_print = drop (dfunNSilent dfun) theta -- See Note [Silent superclass arguments] in TcInstDcls - in ptext (sLit "instance") <+> ppr flag - <+> sep [pprThetaArrowTy theta_to_print, ppr res_ty] - where - (_, theta, res_ty) = tcSplitSigmaTy (idType dfun) - -- Print without the for-all, which the programmer doesn't write + ty_to_print | debugStyle sty = dfun_ty + | otherwise = mkSigmaTy tvs theta_to_print res_ty + in ptext (sLit "instance") <+> ppr flag <+> pprSigmaType ty_to_print pprInstances :: [ClsInst] -> SDoc pprInstances ispecs = vcat (map pprInstance ispecs) @@ -536,7 +537,7 @@ lookupInstEnv' ie cls tys -- Does not match, so next check whether the things unify -- See Note [Overlapping instances] and Note [Incoherent Instances] - | Incoherent _ <- oflag + | Incoherent <- overlapMode oflag = find ms us rest | otherwise @@ -635,11 +636,10 @@ insert_overlapping new_item (item:items) new_beats_old = new_item `beats` item old_beats_new = item `beats` new_item - incoherent (inst, _) = case is_flag inst of Incoherent _ -> True - _ -> False + incoherent (inst, _) = overlapMode (is_flag inst) == Incoherent (instA, _) `beats` (instB, _) - = overlap_ok && + = overlap_ok && isJust (tcMatchTys (mkVarSet (is_tvs instB)) (is_tys instB) (is_tys instA)) -- A beats B if A is more specific than B, -- (ie. if B can be instantiated to match A) @@ -648,9 +648,10 @@ insert_overlapping new_item (item:items) -- Overlap permitted if *either* instance permits overlap -- This is a change (Trac #3877, Dec 10). It used to -- require that instB (the less specific one) permitted overlap. - overlap_ok = case (is_flag instA, is_flag instB) of - (NoOverlap _, NoOverlap _) -> False - _ -> True + overlap_ok = case (overlapMode (is_flag instA), + overlapMode (is_flag instB)) of + (NoOverlap, NoOverlap) -> False + _ -> True \end{code} Note [Incoherent instances] diff --git a/compiler/types/Kind.lhs b/compiler/types/Kind.lhs index 793aa4a761..e4dc783124 100644 --- a/compiler/types/Kind.lhs +++ b/compiler/types/Kind.lhs @@ -3,7 +3,8 @@ % \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -62,6 +63,7 @@ import PrelNames import Outputable import Maybes( orElse ) import Util +import FastString \end{code} %************************************************************************ @@ -96,14 +98,19 @@ during type inference. Hence cmpTc treats them as equal. \begin{code} -- | Essentially 'funResultTy' on kinds handling pi-types too -kindFunResult :: Kind -> KindOrType -> Kind -kindFunResult (FunTy _ res) _ = res -kindFunResult (ForAllTy kv res) arg = substKiWith [kv] [arg] res -kindFunResult k _ = pprPanic "kindFunResult" (ppr k) - -kindAppResult :: Kind -> [Type] -> Kind -kindAppResult k [] = k -kindAppResult k (a:as) = kindAppResult (kindFunResult k a) as +kindFunResult :: SDoc -> Kind -> KindOrType -> Kind +kindFunResult _ (FunTy _ res) _ = res +kindFunResult _ (ForAllTy kv res) arg = substKiWith [kv] [arg] res +#ifdef DEBUG +kindFunResult doc k _ = pprPanic "kindFunResult" (ppr k $$ doc) +#else +-- Without DEBUG, doc becomes an unsed arg, and will be optimised away +kindFunResult _ _ _ = panic "kindFunResult" +#endif + +kindAppResult :: SDoc -> Kind -> [Type] -> Kind +kindAppResult _ k [] = k +kindAppResult doc k (a:as) = kindAppResult doc (kindFunResult doc k a) as -- | Essentially 'splitFunTys' on kinds splitKindFunTys :: Kind -> ([Kind],Kind) @@ -127,7 +134,8 @@ splitKindFunTysN n k = pprPanic "splitKindFunTysN" (ppr n <+> ppr k) -- Actually this function works fine on data types too, -- but they'd always return '*', so we never need to ask synTyConResKind :: TyCon -> Kind -synTyConResKind tycon = kindAppResult (tyConKind tycon) (map mkTyVarTy (tyConTyVars tycon)) +synTyConResKind tycon = kindAppResult (ptext (sLit "synTyConResKind") <+> ppr tycon) + (tyConKind tycon) (map mkTyVarTy (tyConTyVars tycon)) -- | See "Type#kind_subtyping" for details of the distinction between these 'Kind's isOpenTypeKind, isUnliftedTypeKind, diff --git a/compiler/types/OptCoercion.lhs b/compiler/types/OptCoercion.lhs index bb2b9f888b..6eccf42588 100644 --- a/compiler/types/OptCoercion.lhs +++ b/compiler/types/OptCoercion.lhs @@ -3,7 +3,8 @@ % \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -26,7 +27,6 @@ import VarEnv import StaticFlags ( opt_NoOptCoercion ) import Outputable import Pair -import Maybes import FastString import Util import Unify @@ -58,13 +58,29 @@ because now the co_B1 (which is really free) has been captured, and subsequent substitutions will go wrong. That's why we can't use mkCoPredTy in the ForAll case, where this note appears. +Note [Optimising coercion optimisation] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Looking up a coercion's role or kind is linear in the size of the +coercion. Thus, doing this repeatedly during the recursive descent +of coercion optimisation is disastrous. We must be careful to avoid +doing this if at all possible. + +Because it is generally easy to know a coercion's components' roles +from the role of the outer coercion, we pass down the known role of +the input in the algorithm below. We also keep functions opt_co2 +and opt_co3 separate from opt_co4, so that the former two do Phantom +checks that opt_co4 can avoid. This is a big win because Phantom coercions +rarely appear within non-phantom coercions -- only in some TyConAppCos +and some AxiomInstCos. We handle these cases specially by calling +opt_co2. + \begin{code} optCoercion :: CvSubst -> Coercion -> NormalCo -- ^ optCoercion applies a substitution to a coercion, -- *and* optimises it to reduce its size optCoercion env co | opt_NoOptCoercion = substCo env co - | otherwise = opt_co env False Nothing co + | otherwise = opt_co1 env False co type NormalCo = Coercion -- Invariants: @@ -75,20 +91,24 @@ type NormalCo = Coercion type NormalNonIdCo = NormalCo -- Extra invariant: not the identity -opt_co, opt_co' :: CvSubst - -> Bool -- True <=> return (sym co) - -> Maybe Role -- Nothing <=> don't change; otherwise, change - -- INVARIANT: the change is always a *downgrade* - -> Coercion - -> NormalCo -opt_co = opt_co' +-- | Do we apply a @sym@ to the result? +type SymFlag = Bool + +-- | Do we force the result to be representational? +type ReprFlag = Bool + +-- | Optimize a coercion, making no assumptions. +opt_co1 :: CvSubst + -> SymFlag + -> Coercion -> NormalCo +opt_co1 env sym co = opt_co2 env sym (coercionRole co) co {- opt_co env sym co = pprTrace "opt_co {" (ppr sym <+> ppr co $$ ppr env) $ co1 `seq` pprTrace "opt_co done }" (ppr co1) $ - (WARN( not same_co_kind, ppr co <+> dcolon <+> pprEqPred (Pair s1 t1) - $$ ppr co1 <+> dcolon <+> pprEqPred (Pair s2 t2) ) + (WARN( not same_co_kind, ppr co <+> dcolon <+> ppr (coercionType co) + $$ ppr co1 <+> dcolon <+> ppr (coercionType co1) ) WARN( not (coreEqCoercion co1 simple_result), (text "env=" <+> ppr env) $$ (text "input=" <+> ppr co) $$ @@ -107,111 +127,123 @@ opt_co env sym co | otherwise = substCo env co -} -opt_co' env _ mrole (Refl r ty) = Refl (mrole `orElse` r) (substTy env ty) -opt_co' env sym mrole co - | mrole == Just Phantom - || coercionRole co == Phantom - , Pair ty1 ty2 <- coercionKind co - = if sym - then opt_univ env Phantom ty2 ty1 - else opt_univ env Phantom ty1 ty2 - -opt_co' env sym mrole (SymCo co) = opt_co env (not sym) mrole co -opt_co' env sym mrole (TyConAppCo r tc cos) - = case mrole of - Nothing -> mkTyConAppCo r tc (map (opt_co env sym Nothing) cos) - Just r' -> mkTyConAppCo r' tc (zipWith (opt_co env sym) - (map Just (tyConRolesX r' tc)) cos) -opt_co' env sym mrole (AppCo co1 co2) = mkAppCo (opt_co env sym mrole co1) - (opt_co env sym Nothing co2) -opt_co' env sym mrole (ForAllCo tv co) +-- See Note [Optimising coercion optimisation] +-- | Optimize a coercion, knowing the coercion's role. No other assumptions. +opt_co2 :: CvSubst + -> SymFlag + -> Role -- ^ The role of the input coercion + -> Coercion -> NormalCo +opt_co2 env sym Phantom co = opt_phantom env sym co +opt_co2 env sym r co = opt_co3 env sym Nothing r co + +-- See Note [Optimising coercion optimisation] +-- | Optimize a coercion, knowing the coercion's non-Phantom role. +opt_co3 :: CvSubst -> SymFlag -> Maybe Role -> Role -> Coercion -> NormalCo +opt_co3 env sym (Just Phantom) _ co = opt_phantom env sym co +opt_co3 env sym (Just Representational) r co = opt_co4 env sym True r co + -- if mrole is Just Nominal, that can't be a downgrade, so we can ignore +opt_co3 env sym _ r co = opt_co4 env sym False r co + + +-- See Note [Optimising coercion optimisation] +-- | Optimize a non-phantom coercion. +opt_co4 :: CvSubst -> SymFlag -> ReprFlag -> Role -> Coercion -> NormalCo + +opt_co4 env _ rep r (Refl _r ty) + = ASSERT( r == _r ) + Refl (chooseRole rep r) (substTy env ty) + +opt_co4 env sym rep r (SymCo co) = opt_co4 env (not sym) rep r co + +opt_co4 env sym rep r g@(TyConAppCo _r tc cos) + = ASSERT( r == _r ) + case (rep, r) of + (True, Nominal) -> + mkTyConAppCo Representational tc + (zipWith3 (opt_co3 env sym) + (map Just (tyConRolesX Representational tc)) + (repeat Nominal) + cos) + (False, Nominal) -> + mkTyConAppCo Nominal tc (map (opt_co4 env sym False Nominal) cos) + (_, Representational) -> + -- must use opt_co2 here, because some roles may be P + -- See Note [Optimising coercion optimisation] + mkTyConAppCo r tc (zipWith (opt_co2 env sym) + (tyConRolesX r tc) -- the current roles + cos) + (_, Phantom) -> pprPanic "opt_co4 sees a phantom!" (ppr g) + +opt_co4 env sym rep r (AppCo co1 co2) = mkAppCo (opt_co4 env sym rep r co1) + (opt_co4 env sym False Nominal co2) +opt_co4 env sym rep r (ForAllCo tv co) = case substTyVarBndr env tv of - (env', tv') -> mkForAllCo tv' (opt_co env' sym mrole co) + (env', tv') -> mkForAllCo tv' (opt_co4 env' sym rep r co) -- Use the "mk" functions to check for nested Refls -opt_co' env sym mrole (CoVarCo cv) +opt_co4 env sym rep r (CoVarCo cv) | Just co <- lookupCoVar env cv - = opt_co (zapCvSubstEnv env) sym mrole co + = opt_co4 (zapCvSubstEnv env) sym rep r co | Just cv1 <- lookupInScope (getCvInScope env) cv - = ASSERT( isCoVar cv1 ) wrapRole mrole cv_role $ wrapSym sym (CoVarCo cv1) + = ASSERT( isCoVar cv1 ) wrapRole rep r $ wrapSym sym (CoVarCo cv1) -- cv1 might have a substituted kind! | otherwise = WARN( True, ptext (sLit "opt_co: not in scope:") <+> ppr cv $$ ppr env) ASSERT( isCoVar cv ) - wrapRole mrole cv_role $ wrapSym sym (CoVarCo cv) - where cv_role = coVarRole cv + wrapRole rep r $ wrapSym sym (CoVarCo cv) -opt_co' env sym mrole (AxiomInstCo con ind cos) +opt_co4 env sym rep r (AxiomInstCo con ind cos) -- Do *not* push sym inside top-level axioms -- e.g. if g is a top-level axiom -- g a : f a ~ a -- then (sym (g ty)) /= g (sym ty) !! - = wrapRole mrole (coAxiomRole con) $ + = ASSERT( r == coAxiomRole con ) + wrapRole rep (coAxiomRole con) $ wrapSym sym $ - AxiomInstCo con ind (map (opt_co env False Nothing) cos) + -- some sub-cos might be P: use opt_co2 + -- See Note [Optimising coercion optimisation] + AxiomInstCo con ind (zipWith (opt_co2 env False) + (coAxBranchRoles (coAxiomNthBranch con ind)) + cos) -- Note that the_co does *not* have sym pushed into it -opt_co' env sym mrole (UnivCo r oty1 oty2) - = opt_univ env role a b +opt_co4 env sym rep r (UnivCo _r oty1 oty2) + = ASSERT( r == _r ) + opt_univ env (chooseRole rep r) a b where (a,b) = if sym then (oty2,oty1) else (oty1,oty2) - role = mrole `orElse` r -opt_co' env sym mrole (TransCo co1 co2) - | sym = opt_trans in_scope opt_co2 opt_co1 -- sym (g `o` h) = sym h `o` sym g - | otherwise = opt_trans in_scope opt_co1 opt_co2 +opt_co4 env sym rep r (TransCo co1 co2) + -- sym (g `o` h) = sym h `o` sym g + | sym = opt_trans in_scope co2' co1' + | otherwise = opt_trans in_scope co1' co2' where - opt_co1 = opt_co env sym mrole co1 - opt_co2 = opt_co env sym mrole co2 + co1' = opt_co4 env sym rep r co1 + co2' = opt_co4 env sym rep r co2 in_scope = getCvInScope env --- NthCo roles are fiddly! -opt_co' env sym mrole (NthCo n (TyConAppCo _ _ cos)) - = opt_co env sym mrole (getNth cos n) -opt_co' env sym mrole (NthCo n co) - | TyConAppCo _ _tc cos <- co' - , isDecomposableTyCon tc -- Not synonym families - = ASSERT( n < length cos ) - ASSERT( _tc == tc ) - let resultCo = cos !! n - resultRole = coercionRole resultCo in - case (mrole, resultRole) of - -- if we just need an R coercion, try to propagate the SubCo again: - (Just Representational, Nominal) -> opt_co (zapCvSubstEnv env) False mrole resultCo - _ -> resultCo - - | otherwise - = wrap_role $ NthCo n co' - - where - wrap_role wrapped = wrapRole mrole (coercionRole wrapped) wrapped - - tc = tyConAppTyCon $ pFst $ coercionKind co - co' = opt_co env sym mrole' co - mrole' = case mrole of - Just Representational - | Representational <- nthRole Representational tc n - -> Just Representational - _ -> Nothing +opt_co4 env sym rep r co@(NthCo {}) = opt_nth_co env sym rep r co -opt_co' env sym mrole (LRCo lr co) +opt_co4 env sym rep r (LRCo lr co) | Just pr_co <- splitAppCo_maybe co - = opt_co env sym mrole (pickLR lr pr_co) + = ASSERT( r == Nominal ) + opt_co4 env sym rep Nominal (pickLR lr pr_co) | Just pr_co <- splitAppCo_maybe co' - = if mrole == Just Representational - then opt_co (zapCvSubstEnv env) False mrole (pickLR lr pr_co) + = ASSERT( r == Nominal ) + if rep + then opt_co4 (zapCvSubstEnv env) False True Nominal (pickLR lr pr_co) else pickLR lr pr_co | otherwise - = wrapRole mrole Nominal $ LRCo lr co' + = wrapRole rep Nominal $ LRCo lr co' where - co' = opt_co env sym Nothing co + co' = opt_co4 env sym False Nominal co -opt_co' env sym mrole (InstCo co ty) +opt_co4 env sym rep r (InstCo co ty) -- See if the first arg is already a forall -- ...then we can just extend the current substitution | Just (tv, co_body) <- splitForAllCo_maybe co - = opt_co (extendTvSubst env tv ty') sym mrole co_body + = opt_co4 (extendTvSubst env tv ty') sym rep r co_body -- See if it is a forall after optimization -- If so, do an inefficient one-variable substitution @@ -220,22 +252,34 @@ opt_co' env sym mrole (InstCo co ty) | otherwise = InstCo co' ty' where - co' = opt_co env sym mrole co + co' = opt_co4 env sym rep r co ty' = substTy env ty -opt_co' env sym _ (SubCo co) = opt_co env sym (Just Representational) co +opt_co4 env sym _ r (SubCo co) + = ASSERT( r == Representational ) + opt_co4 env sym True Nominal co -- XXX: We could add another field to CoAxiomRule that -- would allow us to do custom simplifications. -opt_co' env sym mrole (AxiomRuleCo co ts cs) = - wrapRole mrole (coaxrRole co) $ +opt_co4 env sym rep r (AxiomRuleCo co ts cs) + = ASSERT( r == coaxrRole co ) + wrapRole rep r $ wrapSym sym $ AxiomRuleCo co (map (substTy env) ts) - (zipWith (opt_co env False) (map Just (coaxrAsmpRoles co)) cs) - + (zipWith (opt_co2 env False) (coaxrAsmpRoles co) cs) ------------- +-- | Optimize a phantom coercion. The input coercion may not necessarily +-- be a phantom, but the output sure will be. +opt_phantom :: CvSubst -> SymFlag -> Coercion -> NormalCo +opt_phantom env sym co + = if sym + then opt_univ env Phantom ty2 ty1 + else opt_univ env Phantom ty1 ty2 + where + Pair ty1 ty2 = coercionKind co + opt_univ :: CvSubst -> Role -> Type -> Type -> Coercion opt_univ env role oty1 oty2 | Just (tc1, tys1) <- splitTyConApp_maybe oty1 @@ -262,6 +306,45 @@ opt_univ env role oty1 oty2 = mkUnivCo role (substTy env oty1) (substTy env oty2) ------------- +-- NthCo must be handled separately, because it's the one case where we can't +-- tell quickly what the component coercion's role is from the containing +-- coercion. To avoid repeated coercionRole calls as opt_co1 calls opt_co2, +-- we just look for nested NthCo's, which can happen in practice. +opt_nth_co :: CvSubst -> SymFlag -> ReprFlag -> Role -> Coercion -> NormalCo +opt_nth_co env sym rep r = go [] + where + go ns (NthCo n co) = go (n:ns) co + -- previous versions checked if the tycon is decomposable. This + -- is redundant, because a non-decomposable tycon under an NthCo + -- is entirely bogus. See docs/core-spec/core-spec.pdf. + go ns co + = opt_nths ns co + + -- input coercion is *not* yet sym'd or opt'd + opt_nths [] co = opt_co4 env sym rep r co + opt_nths (n:ns) (TyConAppCo _ _ cos) = opt_nths ns (cos `getNth` n) + + -- here, the co isn't a TyConAppCo, so we opt it, hoping to get + -- a TyConAppCo as output. We don't know the role, so we use + -- opt_co1. This is slightly annoying, because opt_co1 will call + -- coercionRole, but as long as we don't have a long chain of + -- NthCo's interspersed with some other coercion former, we should + -- be OK. + opt_nths ns co = opt_nths' ns (opt_co1 env sym co) + + -- input coercion *is* sym'd and opt'd + opt_nths' [] co + = if rep && (r == Nominal) + -- propagate the SubCo: + then opt_co4 (zapCvSubstEnv env) False True r co + else co + opt_nths' (n:ns) (TyConAppCo _ _ cos) = opt_nths' ns (cos `getNth` n) + opt_nths' ns co = wrapRole rep r (mk_nths ns co) + + mk_nths [] co = co + mk_nths (n:ns) co = mk_nths ns (mkNthCo n co) + +------------- opt_transList :: InScopeSet -> [NormalCo] -> [NormalCo] -> [NormalCo] opt_transList is = zipWith (opt_trans is) @@ -426,11 +509,11 @@ opt_trans_rule is co1 co2 role = coercionRole co1 -- should be the same as coercionRole co2! opt_trans_rule _ co1 co2 -- Identity rule - | Pair ty1 _ <- coercionKind co1 + | (Pair ty1 _, r) <- coercionKindRole co1 , Pair _ ty2 <- coercionKind co2 , ty1 `eqType` ty2 = fireTransRule "RedTypeDirRefl" co1 co2 $ - Refl (coercionRole co1) ty2 + Refl r ty2 opt_trans_rule _ _ _ = Nothing @@ -493,16 +576,24 @@ checkAxInstCo (AxiomInstCo ax ind cos) checkAxInstCo _ = Nothing ----------- -wrapSym :: Bool -> Coercion -> Coercion +wrapSym :: SymFlag -> Coercion -> Coercion wrapSym sym co | sym = SymCo co | otherwise = co -wrapRole :: Maybe Role -- desired - -> Role -- current +-- | Conditionally set a role to be representational +wrapRole :: ReprFlag + -> Role -- ^ current role -> Coercion -> Coercion -wrapRole Nothing _ = id -wrapRole (Just desired) current = maybeSubCo2 desired current - +wrapRole False _ = id +wrapRole True current = downgradeRole Representational current + +-- | If we require a representational role, return that. Otherwise, +-- return the "default" role provided. +chooseRole :: ReprFlag + -> Role -- ^ "default" role + -> Role +chooseRole True _ = Representational +chooseRole _ r = r ----------- -- takes two tyvars and builds env'ts to map them to the same tyvar substTyVarBndr2 :: CvSubst -> TyVar -> TyVar @@ -569,8 +660,7 @@ etaAppCo_maybe :: Coercion -> Maybe (Coercion,Coercion) etaAppCo_maybe co | Just (co1,co2) <- splitAppCo_maybe co = Just (co1,co2) - | Nominal <- coercionRole co - , Pair ty1 ty2 <- coercionKind co + | (Pair ty1 ty2, Nominal) <- coercionKindRole co , Just (_,t1) <- splitAppTy_maybe ty1 , Just (_,t2) <- splitAppTy_maybe ty2 , typeKind t1 `eqType` typeKind t2 -- Note [Eta for AppCo] diff --git a/compiler/types/TyCon.lhs b/compiler/types/TyCon.lhs index bb489b33e1..c39f9d1729 100644 --- a/compiler/types/TyCon.lhs +++ b/compiler/types/TyCon.lhs @@ -6,6 +6,7 @@ The @TyCon@ datatype \begin{code} +{-# LANGUAGE CPP, DeriveDataTypeable #-} module TyCon( -- * Main TyCon data types @@ -34,14 +35,13 @@ module TyCon( isFunTyCon, isPrimTyCon, isTupleTyCon, isUnboxedTupleTyCon, isBoxedTupleTyCon, - isSynTyCon, + isSynTyCon, isTypeSynonymTyCon, isDecomposableTyCon, isForeignTyCon, isPromotedDataCon, isPromotedTyCon, isPromotedDataCon_maybe, isPromotedTyCon_maybe, promotableTyCon_maybe, promoteTyCon, - isInjectiveTyCon, isDataTyCon, isProductTyCon, isDataProductTyCon_maybe, isEnumerationTyCon, isNewTyCon, isAbstractTyCon, @@ -1187,11 +1187,17 @@ isDataProductTyCon_maybe (TupleTyCon { dataCon = con }) = Just con isDataProductTyCon_maybe _ = Nothing --- | Is this a 'TyCon' representing a type synonym (@type@)? +-- | Is this a 'TyCon' representing a regular H98 type synonym (@type@)? +isTypeSynonymTyCon :: TyCon -> Bool +isTypeSynonymTyCon (SynTyCon { synTcRhs = SynonymTyCon {} }) = True +isTypeSynonymTyCon _ = False + +-- | Is this 'TyCon' a type synonym or type family? isSynTyCon :: TyCon -> Bool isSynTyCon (SynTyCon {}) = True isSynTyCon _ = False + -- As for newtypes, it is in some contexts important to distinguish between -- closed synonyms and synonym families, as synonym families have no unique -- right hand side to which a synonym family application can expand. @@ -1199,7 +1205,14 @@ isSynTyCon _ = False isDecomposableTyCon :: TyCon -> Bool -- True iff we can decompose (T a b c) into ((T a b) c) +-- I.e. is it injective? -- Specifically NOT true of synonyms (open and otherwise) +-- Ultimately we may have injective associated types +-- in which case this test will become more interesting +-- +-- It'd be unusual to call isDecomposableTyCon on a regular H98 +-- type synonym, because you should probably have expanded it first +-- But regardless, it's not decomposable isDecomposableTyCon (SynTyCon {}) = False isDecomposableTyCon _other = True @@ -1259,17 +1272,6 @@ isDataFamilyTyCon :: TyCon -> Bool isDataFamilyTyCon (AlgTyCon {algTcRhs = DataFamilyTyCon {}}) = True isDataFamilyTyCon _ = False --- | Injective 'TyCon's can be decomposed, so that --- T ty1 ~ T ty2 => ty1 ~ ty2 -isInjectiveTyCon :: TyCon -> Bool -isInjectiveTyCon tc = not (isSynTyCon tc) - -- Ultimately we may have injective associated types - -- in which case this test will become more interesting - -- - -- It'd be unusual to call isInjectiveTyCon on a regular H98 - -- type synonym, because you should probably have expanded it first - -- But regardless, it's not injective! - -- | Are we able to extract informationa 'TyVar' to class argument list -- mappping from a given 'TyCon'? isTyConAssoc :: TyCon -> Bool @@ -1370,13 +1372,15 @@ isPromotedDataCon_maybe _ = Nothing -- * Family instances are /not/ implicit as they represent the instance body -- (similar to a @dfun@ does that for a class instance). isImplicitTyCon :: TyCon -> Bool -isImplicitTyCon tycon - | isTyConAssoc tycon = True - | isSynTyCon tycon = False - | isAlgTyCon tycon = isTupleTyCon tycon - | otherwise = True - -- 'otherwise' catches: FunTyCon, PrimTyCon, - -- PromotedDataCon, PomotedTypeTyCon +isImplicitTyCon (FunTyCon {}) = True +isImplicitTyCon (TupleTyCon {}) = True +isImplicitTyCon (PrimTyCon {}) = True +isImplicitTyCon (PromotedDataCon {}) = True +isImplicitTyCon (PromotedTyCon {}) = True +isImplicitTyCon (AlgTyCon { algTcParent = AssocFamilyTyCon {} }) = True +isImplicitTyCon (AlgTyCon {}) = False +isImplicitTyCon (SynTyCon { synTcParent = AssocFamilyTyCon {} }) = True +isImplicitTyCon (SynTyCon {}) = False tyConCType_maybe :: TyCon -> Maybe CType tyConCType_maybe tc@(AlgTyCon {}) = tyConCType tc diff --git a/compiler/types/Type.lhs b/compiler/types/Type.lhs index 88054ce38b..ad9e8b517c 100644 --- a/compiler/types/Type.lhs +++ b/compiler/types/Type.lhs @@ -6,6 +6,7 @@ Type - public interface \begin{code} +{-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fno-warn-orphans #-} -- | Main functions for manipulating types and type-related things @@ -35,7 +36,7 @@ module Type ( mkForAllTy, mkForAllTys, splitForAllTy_maybe, splitForAllTys, mkPiKinds, mkPiType, mkPiTypes, - applyTy, applyTys, applyTysD, isForAllTy, dropForAlls, + applyTy, applyTys, applyTysD, dropForAlls, mkNumLitTy, isNumLitTy, mkStrLitTy, isStrLitTy, @@ -50,7 +51,7 @@ module Type ( isDictLikeTy, mkEqPred, mkCoerciblePred, mkPrimEqPred, mkReprPrimEqPred, mkClassPred, - noParenPred, isClassPred, isEqPred, + isClassPred, isEqPred, isIPPred, isIPPred_maybe, isIPTyCon, isIPClass, -- Deconstructing predicate types @@ -62,7 +63,7 @@ module Type ( funTyCon, -- ** Predicates on types - isTypeVar, isKindVar, + isTypeVar, isKindVar, allDistinctTyVars, isForAllTy, isTyVarTy, isFunTy, isDictTy, isPredTy, isVoidTy, -- (Lifting and boxity) @@ -128,9 +129,10 @@ module Type ( -- * Pretty-printing pprType, pprParendType, pprTypeApp, pprTyThingCategory, pprTyThing, - pprTvBndr, pprTvBndrs, pprForAll, pprSigmaType, - pprEqPred, pprTheta, pprThetaArrowTy, pprClassPred, + pprTvBndr, pprTvBndrs, pprForAll, pprUserForAll, pprSigmaType, + pprTheta, pprThetaArrowTy, pprClassPred, pprKind, pprParendKind, pprSourceTyCon, + TyPrec(..), maybeParen, -- * Tidying type related things up for printing tidyType, tidyTypes, @@ -321,6 +323,15 @@ getTyVar_maybe ty | Just ty' <- coreView ty = getTyVar_maybe ty' getTyVar_maybe (TyVarTy tv) = Just tv getTyVar_maybe _ = Nothing +allDistinctTyVars :: [KindOrType] -> Bool +allDistinctTyVars tkvs = go emptyVarSet tkvs + where + go _ [] = True + go so_far (ty : tys) + = case getTyVar_maybe ty of + Nothing -> False + Just tv | tv `elemVarSet` so_far -> False + | otherwise -> go (so_far `extendVarSet` tv) tys \end{code} @@ -813,7 +824,7 @@ applyTysD doc orig_fun_ty arg_tys = substTyWith (take n_args tvs) arg_tys (mkForAllTys (drop n_args tvs) rho_ty) | otherwise -- Too many type args - = ASSERT2( n_tvs > 0, doc $$ ppr orig_fun_ty ) -- Zero case gives infnite loop! + = ASSERT2( n_tvs > 0, doc $$ ppr orig_fun_ty ) -- Zero case gives infinite loop! applyTysD doc (substTyWith tvs (take n_tvs arg_tys) rho_ty) (drop n_tvs arg_tys) where @@ -832,13 +843,6 @@ applyTysD doc orig_fun_ty arg_tys Predicates on PredType \begin{code} -noParenPred :: PredType -> Bool --- A predicate that can appear without parens before a "=>" --- C a => a -> a --- a~b => a -> b --- But (?x::Int) => Int -> Int -noParenPred p = not (isIPPred p) && isClassPred p || isEqPred p - isPredTy :: Type -> Bool -- NB: isPredTy is used when printing types, which can happen in debug printing -- during type checking of not-fully-zonked types. So it's not cool to say @@ -1635,26 +1639,31 @@ type SimpleKind = Kind \begin{code} typeKind :: Type -> Kind -typeKind (TyConApp tc tys) - | isPromotedTyCon tc - = ASSERT( tyConArity tc == length tys ) superKind - | otherwise - = kindAppResult (tyConKind tc) tys - -typeKind (AppTy fun arg) = kindAppResult (typeKind fun) [arg] -typeKind (LitTy l) = typeLiteralKind l -typeKind (ForAllTy _ ty) = typeKind ty -typeKind (TyVarTy tyvar) = tyVarKind tyvar -typeKind _ty@(FunTy _arg res) - -- Hack alert. The kind of (Int -> Int#) is liftedTypeKind (*), - -- not unliftedTypKind (#) - -- The only things that can be after a function arrow are - -- (a) types (of kind openTypeKind or its sub-kinds) - -- (b) kinds (of super-kind TY) (e.g. * -> (* -> *)) - | isSuperKind k = k - | otherwise = ASSERT2( isSubOpenTypeKind k, ppr _ty $$ ppr k ) liftedTypeKind - where - k = typeKind res +typeKind orig_ty = go orig_ty + where + + go ty@(TyConApp tc tys) + | isPromotedTyCon tc + = ASSERT( tyConArity tc == length tys ) superKind + | otherwise + = kindAppResult (ptext (sLit "typeKind 1") <+> ppr ty $$ ppr orig_ty) + (tyConKind tc) tys + + go ty@(AppTy fun arg) = kindAppResult (ptext (sLit "typeKind 2") <+> ppr ty $$ ppr orig_ty) + (go fun) [arg] + go (LitTy l) = typeLiteralKind l + go (ForAllTy _ ty) = go ty + go (TyVarTy tyvar) = tyVarKind tyvar + go _ty@(FunTy _arg res) + -- Hack alert. The kind of (Int -> Int#) is liftedTypeKind (*), + -- not unliftedTypeKind (#) + -- The only things that can be after a function arrow are + -- (a) types (of kind openTypeKind or its sub-kinds) + -- (b) kinds (of super-kind TY) (e.g. * -> (* -> *)) + | isSuperKind k = k + | otherwise = ASSERT2( isSubOpenTypeKind k, ppr _ty $$ ppr k ) liftedTypeKind + where + k = go res typeLiteralKind :: TyLit -> Kind typeLiteralKind l = diff --git a/compiler/types/Type.lhs-boot b/compiler/types/Type.lhs-boot index c2d2dec093..ff9db3e28c 100644 --- a/compiler/types/Type.lhs-boot +++ b/compiler/types/Type.lhs-boot @@ -3,7 +3,6 @@ module Type where import {-# SOURCE #-} TypeRep( Type, Kind ) import Var -noParenPred :: Type -> Bool isPredTy :: Type -> Bool typeKind :: Type -> Kind diff --git a/compiler/types/TypeRep.lhs b/compiler/types/TypeRep.lhs index bea67b4e3b..c8b20e8d93 100644 --- a/compiler/types/TypeRep.lhs +++ b/compiler/types/TypeRep.lhs @@ -15,16 +15,16 @@ Note [The Type-related module hierarchy] Coercion imports Type \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, DeriveDataTypeable, DeriveFunctor, DeriveFoldable, DeriveTraversable #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See -- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces -- for details - --- We expose the relevant stuff from this module via the Type module {-# OPTIONS_HADDOCK hide #-} -{-# LANGUAGE DeriveDataTypeable, DeriveFunctor, DeriveFoldable, DeriveTraversable #-} +-- We expose the relevant stuff from this module via the Type module + module TypeRep ( TyThing(..), Type(..), @@ -39,9 +39,10 @@ module TypeRep ( -- Pretty-printing pprType, pprParendType, pprTypeApp, pprTvBndr, pprTvBndrs, pprTyThing, pprTyThingCategory, pprSigmaType, - pprEqPred, pprTheta, pprForAll, pprThetaArrowTy, pprClassPred, + pprTheta, pprForAll, pprUserForAll, + pprThetaArrowTy, pprClassPred, pprKind, pprParendKind, pprTyLit, suppressKinds, - Prec(..), maybeParen, pprTcApp, + TyPrec(..), maybeParen, pprTcApp, pprPrefixApp, pprArrowChain, ppr_type, -- Free variables @@ -65,7 +66,7 @@ module TypeRep ( import {-# SOURCE #-} DataCon( dataConTyCon ) import ConLike ( ConLike(..) ) -import {-# SOURCE #-} Type( noParenPred, isPredTy ) -- Transitively pulls in a LOT of stuff, better to break the loop +import {-# SOURCE #-} Type( isPredTy ) -- Transitively pulls in a LOT of stuff, better to break the loop -- friends: import Var @@ -81,7 +82,6 @@ import CoAxiom import PrelNames import Outputable import FastString -import Pair import Util import DynFlags @@ -491,13 +491,31 @@ defined to use this. @pprParendType@ is the same, except it puts parens around the type, except for the atomic cases. @pprParendType@ works just by setting the initial context precedence very high. +Note [Precedence in types] +~~~~~~~~~~~~~~~~~~~~~~~~~~ +We don't keep the fixity of type operators in the operator. So the pretty printer +operates the following precedene structre: + Type constructor application binds more tightly than + Oerator applications which bind more tightly than + Function arrow + +So we might see a :+: T b -> c +meaning (a :+: (T b)) -> c + +Maybe operator applications should bind a bit less tightly? + +Anyway, that's the current story, and it is used consistently for Type and HsType + \begin{code} -data Prec = TopPrec -- No parens - | FunPrec -- Function args; no parens for tycon apps - | TyConPrec -- Tycon args; no parens for atomic - deriving( Eq, Ord ) +data TyPrec -- See Note [Prededence in types] + + = TopPrec -- No parens + | FunPrec -- Function args; no parens for tycon apps + | TyOpPrec -- Infix operator + | TyConPrec -- Tycon args; no parens for atomic + deriving( Eq, Ord ) -maybeParen :: Prec -> Prec -> SDoc -> SDoc +maybeParen :: TyPrec -> TyPrec -> SDoc -> SDoc maybeParen ctxt_prec inner_prec pretty | ctxt_prec < inner_prec = pretty | otherwise = parens pretty @@ -514,18 +532,6 @@ pprKind, pprParendKind :: Kind -> SDoc pprKind = pprType pprParendKind = pprParendType ------------------- -pprEqPred :: Pair Type -> SDoc --- NB: Maybe move to Coercion? It's only called after coercionKind anyway. -pprEqPred (Pair ty1 ty2) - = sep [ ppr_type FunPrec ty1 - , nest 2 (ptext (sLit "~#")) - , ppr_type FunPrec ty2] - -- Precedence looks like (->) so that we get - -- Maybe a ~ Bool - -- (a->a) ~ Bool - -- Note parens on the latter! - ------------ pprClassPred :: Class -> [Type] -> SDoc pprClassPred clas tys = pprTypeApp (classTyCon clas) tys @@ -536,10 +542,9 @@ pprTheta :: ThetaType -> SDoc pprTheta theta = parens (sep (punctuate comma (map (ppr_type TopPrec) theta))) pprThetaArrowTy :: ThetaType -> SDoc -pprThetaArrowTy [] = empty -pprThetaArrowTy [pred] - | noParenPred pred = ppr_type TopPrec pred <+> darrow -pprThetaArrowTy preds = parens (fsep (punctuate comma (map (ppr_type TopPrec) preds))) +pprThetaArrowTy [] = empty +pprThetaArrowTy [pred] = ppr_type FunPrec pred <+> darrow +pprThetaArrowTy preds = parens (fsep (punctuate comma (map (ppr_type TopPrec) preds))) <+> darrow -- Notice 'fsep' here rather that 'sep', so that -- type contexts don't get displayed in a giant column @@ -573,15 +578,9 @@ instance Outputable TyLit where ------------------ -- OK, here's the main printer -ppr_type :: Prec -> Type -> SDoc +ppr_type :: TyPrec -> Type -> SDoc ppr_type _ (TyVarTy tv) = ppr_tvar tv - -ppr_type _ (TyConApp tc [LitTy (StrTyLit n),ty]) - | tc `hasKey` ipClassNameKey - = char '?' <> ftext n <> ptext (sLit "::") <> ppr_type TopPrec ty - ppr_type p (TyConApp tc tys) = pprTyTcApp p tc tys - ppr_type p (LitTy l) = ppr_tylit p l ppr_type p ty@(ForAllTy {}) = ppr_forall_type p ty @@ -600,15 +599,17 @@ ppr_type p fun_ty@(FunTy ty1 ty2) ppr_fun_tail other_ty = [ppr_type TopPrec other_ty] -ppr_forall_type :: Prec -> Type -> SDoc +ppr_forall_type :: TyPrec -> Type -> SDoc ppr_forall_type p ty = maybeParen p FunPrec $ ppr_sigma_type True ty + -- True <=> we always print the foralls on *nested* quantifiers + -- Opt_PrintExplicitForalls only affects top-level quantifiers ppr_tvar :: TyVar -> SDoc ppr_tvar tv -- Note [Infix type variables] = parenSymOcc (getOccName tv) (ppr tv) -ppr_tylit :: Prec -> TyLit -> SDoc +ppr_tylit :: TyPrec -> TyLit -> SDoc ppr_tylit _ tl = case tl of NumTyLit n -> integer n @@ -616,34 +617,38 @@ ppr_tylit _ tl = ------------------- ppr_sigma_type :: Bool -> Type -> SDoc --- Bool <=> Show the foralls -ppr_sigma_type show_foralls ty - = sdocWithDynFlags $ \ dflags -> - let filtered_tvs | gopt Opt_PrintExplicitKinds dflags - = tvs - | otherwise - = filterOut isKindVar tvs - in sep [ ppWhen show_foralls (pprForAll filtered_tvs) - , pprThetaArrowTy ctxt - , pprType tau ] +-- Bool <=> Show the foralls unconditionally +ppr_sigma_type show_foralls_unconditionally ty + = sep [ if show_foralls_unconditionally + then pprForAll tvs + else pprUserForAll tvs + , pprThetaArrowTy ctxt + , pprType tau ] where (tvs, rho) = split1 [] ty (ctxt, tau) = split2 [] rho split1 tvs (ForAllTy tv ty) = split1 (tv:tvs) ty - split1 tvs ty = (reverse tvs, ty) - + split1 tvs ty = (reverse tvs, ty) + split2 ps (ty1 `FunTy` ty2) | isPredTy ty1 = split2 (ty1:ps) ty2 split2 ps ty = (reverse ps, ty) - pprSigmaType :: Type -> SDoc -pprSigmaType ty = sdocWithDynFlags $ \dflags -> - ppr_sigma_type (gopt Opt_PrintExplicitForalls dflags) ty +pprSigmaType ty = ppr_sigma_type False ty + +pprUserForAll :: [TyVar] -> SDoc +-- Print a user-level forall; see Note [WHen to print foralls] +pprUserForAll tvs + = sdocWithDynFlags $ \dflags -> + ppWhen (any tv_has_kind_var tvs || gopt Opt_PrintExplicitForalls dflags) $ + pprForAll tvs + where + tv_has_kind_var tv = not (isEmptyVarSet (tyVarsOfType (tyVarKind tv))) pprForAll :: [TyVar] -> SDoc pprForAll [] = empty -pprForAll tvs = ptext (sLit "forall") <+> pprTvBndrs tvs <> dot +pprForAll tvs = forAllLit <+> pprTvBndrs tvs <> dot pprTvBndrs :: [TyVar] -> SDoc pprTvBndrs tvs = sep (map pprTvBndr tvs) @@ -656,6 +661,24 @@ pprTvBndr tv kind = tyVarKind tv \end{code} +Note [When to print foralls] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Mostly we want to print top-level foralls when (and only when) the user specifies +-fprint-explicit-foralls. But when kind polymorphism is at work, that suppresses +too much information; see Trac #9018. + +So I'm trying out this rule: print explicit foralls if + a) User specifies -fprint-explicit-foralls, or + b) Any of the quantified type variables has a kind + that mentions a kind variable + +This catches common situations, such as a type siguature + f :: m a +which means + f :: forall k. forall (m :: k->*) (a :: k). m a +We really want to see both the "forall k" and the kind signatures +on m and a. The latter comes from pprTvBndr. + Note [Infix type variables] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ With TypeOperators you can say @@ -680,10 +703,15 @@ pprTypeApp tc tys = pprTyTcApp TopPrec tc tys -- We have to use ppr on the TyCon (not its name) -- so that we get promotion quotes in the right place -pprTyTcApp :: Prec -> TyCon -> [Type] -> SDoc +pprTyTcApp :: TyPrec -> TyCon -> [Type] -> SDoc -- Used for types only; so that we can make a -- special case for type-level lists pprTyTcApp p tc tys + | tc `hasKey` ipClassNameKey + , [LitTy (StrTyLit n),ty] <- tys + = maybeParen p FunPrec $ + char '?' <> ftext n <> ptext (sLit "::") <> ppr_type TopPrec ty + | tc `hasKey` consDataConKey , [_kind,ty1,ty2] <- tys = sdocWithDynFlags $ \dflags -> @@ -693,7 +721,7 @@ pprTyTcApp p tc tys | otherwise = pprTcApp p ppr_type tc tys -pprTcApp :: Prec -> (Prec -> a -> SDoc) -> TyCon -> [a] -> SDoc +pprTcApp :: TyPrec -> (TyPrec -> a -> SDoc) -> TyCon -> [a] -> SDoc -- Used for both types and coercions, hence polymorphism pprTcApp _ pp tc [ty] | tc `hasKey` listTyConKey = pprPromotionQuote tc <> brackets (pp TopPrec ty) @@ -717,7 +745,7 @@ pprTcApp p pp tc tys | otherwise = sdocWithDynFlags (pprTcApp_help p pp tc tys) -pprTcApp_help :: Prec -> (Prec -> a -> SDoc) -> TyCon -> [a] -> DynFlags -> SDoc +pprTcApp_help :: TyPrec -> (TyPrec -> a -> SDoc) -> TyCon -> [a] -> DynFlags -> SDoc -- This one has accss to the DynFlags pprTcApp_help p pp tc tys dflags | not (isSymOcc (nameOccName (tyConName tc))) @@ -740,6 +768,7 @@ pprTcApp_help p pp tc tys dflags suppressKinds :: DynFlags -> Kind -> [a] -> [a] -- Given the kind of a TyCon, and the args to which it is applied, -- suppress the args that are kind args +-- C.f. Note [Suppressing kinds] in IfaceType suppressKinds dflags kind xs | gopt Opt_PrintExplicitKinds dflags = xs | otherwise = suppress kind xs @@ -749,7 +778,7 @@ suppressKinds dflags kind xs suppress _ xs = xs ---------------- -pprTyList :: Prec -> Type -> Type -> SDoc +pprTyList :: TyPrec -> Type -> Type -> SDoc -- Given a type-level list (t1 ': t2), see if we can print -- it in list notation [t1, ...]. pprTyList p ty1 ty2 @@ -773,19 +802,19 @@ pprTyList p ty1 ty2 gather ty = ([], Just ty) ---------------- -pprInfixApp :: Prec -> (Prec -> a -> SDoc) -> SDoc -> a -> a -> SDoc +pprInfixApp :: TyPrec -> (TyPrec -> a -> SDoc) -> SDoc -> a -> a -> SDoc pprInfixApp p pp pp_tc ty1 ty2 - = maybeParen p FunPrec $ - sep [pp FunPrec ty1, pprInfixVar True pp_tc <+> pp FunPrec ty2] + = maybeParen p TyOpPrec $ + sep [pp TyOpPrec ty1, pprInfixVar True pp_tc <+> pp TyOpPrec ty2] -pprPrefixApp :: Prec -> SDoc -> [SDoc] -> SDoc +pprPrefixApp :: TyPrec -> SDoc -> [SDoc] -> SDoc pprPrefixApp p pp_fun pp_tys | null pp_tys = pp_fun | otherwise = maybeParen p TyConPrec $ hang pp_fun 2 (sep pp_tys) ---------------- -pprArrowChain :: Prec -> [SDoc] -> SDoc +pprArrowChain :: TyPrec -> [SDoc] -> SDoc -- pprArrowChain p [a,b,c] generates a -> b -> c pprArrowChain _ [] = empty pprArrowChain p (arg:args) = maybeParen p FunPrec $ diff --git a/compiler/types/Unify.lhs b/compiler/types/Unify.lhs index d56a3f65fc..f44e260c57 100644 --- a/compiler/types/Unify.lhs +++ b/compiler/types/Unify.lhs @@ -3,7 +3,8 @@ % \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See @@ -23,7 +24,6 @@ module Unify ( -- Side-effect free unification tcUnifyTy, tcUnifyTys, BindFlag(..), - niFixTvSubst, niSubstTvSet, UnifyResultM(..), UnifyResult, tcUnifyTysFG @@ -205,6 +205,8 @@ match _ subst (LitTy x) (LitTy y) | x == y = return subst match _ _ _ _ = Nothing + + -------------- match_kind :: MatchEnv -> TvSubstEnv -> Kind -> Kind -> Maybe TvSubstEnv -- Match the kind of the template tyvar with the kind of Type @@ -470,19 +472,52 @@ During unification we use a TvSubstEnv that is (a) non-idempotent (b) loop-free; ie repeatedly applying it yields a fixed point +Note [Finding the substitution fixpoint] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Finding the fixpoint of a non-idempotent substitution arising from a +unification is harder than it looks, because of kinds. Consider + T k (H k (f:k)) ~ T * (g:*) +If we unify, we get the substitution + [ k -> * + , g -> H k (f:k) ] +To make it idempotent we don't want to get just + [ k -> * + , g -> H * (f:k) ] +We also want to substitute inside f's kind, to get + [ k -> * + , g -> H k (f:*) ] +If we don't do this, we may apply the substitition to something, +and get an ill-formed type, i.e. one where typeKind will fail. +This happened, for example, in Trac #9106. + +This is the reason for extending env with [f:k -> f:*], in the +definition of env' in niFixTvSubst + \begin{code} niFixTvSubst :: TvSubstEnv -> TvSubst -- Find the idempotent fixed point of the non-idempotent substitution +-- See Note [Finding the substitution fixpoint] -- ToDo: use laziness instead of iteration? niFixTvSubst env = f env where - f e | not_fixpoint = f (mapVarEnv (substTy subst) e) - | otherwise = subst + f env | not_fixpoint = f (mapVarEnv (substTy subst') env) + | otherwise = subst where - range_tvs = foldVarEnv (unionVarSet . tyVarsOfType) emptyVarSet e - subst = mkTvSubst (mkInScopeSet range_tvs) e - not_fixpoint = foldVarSet ((||) . in_domain) False range_tvs - in_domain tv = tv `elemVarEnv` e + not_fixpoint = foldVarSet ((||) . in_domain) False all_range_tvs + in_domain tv = tv `elemVarEnv` env + + range_tvs = foldVarEnv (unionVarSet . tyVarsOfType) emptyVarSet env + all_range_tvs = closeOverKinds range_tvs + subst = mkTvSubst (mkInScopeSet all_range_tvs) env + + -- env' extends env by replacing any free type with + -- that same tyvar with a substituted kind + -- See note [Finding the substitution fixpoint] + env' = extendVarEnvList env [ (rtv, mkTyVarTy $ setTyVarKind rtv $ + substTy subst $ tyVarKind rtv) + | rtv <- varSetElems range_tvs + , not (in_domain rtv) ] + subst' = mkTvSubst (mkInScopeSet all_range_tvs) env' niSubstTvSet :: TvSubstEnv -> TyVarSet -> TyVarSet -- Apply the non-idempotent substitution to a set of type variables, @@ -620,6 +655,7 @@ uUnrefined subst tv1 ty2 ty2' -- ty2 is not a type variable -- See Note [Fine-grained unification] | otherwise = do { subst' <- unify subst k1 k2 + -- Note [Kinds Containing Only Literals] ; bindTv subst' tv1 ty2 } -- Bind tyvar to the synonym if poss where k1 = tyVarKind tv1 diff --git a/compiler/utils/Bag.lhs b/compiler/utils/Bag.lhs index 2d823e46bb..65c5b39df1 100644 --- a/compiler/utils/Bag.lhs +++ b/compiler/utils/Bag.lhs @@ -6,6 +6,8 @@ Bag: an unordered collection with duplicates \begin{code} +{-# LANGUAGE DeriveDataTypeable, ScopedTypeVariables #-} + module Bag ( Bag, -- abstract type diff --git a/compiler/utils/Binary.hs b/compiler/utils/Binary.hs index 332bfc8e0c..82d1497ee6 100644 --- a/compiler/utils/Binary.hs +++ b/compiler/utils/Binary.hs @@ -1,4 +1,4 @@ -{-# OPTIONS -cpp #-} +{-# LANGUAGE CPP, MagicHash, UnboxedTuples #-} {-# OPTIONS_GHC -O -funbox-strict-fields #-} -- We always optimise this, otherwise performance of a non-optimised -- compiler is severely affected @@ -707,14 +707,13 @@ getBS bh = do l <- get bh fp <- mallocForeignPtrBytes l withForeignPtr fp $ \ptr -> do - let - go n | n == l = return $ BS.fromForeignPtr fp 0 l + let go n | n == l = return $ BS.fromForeignPtr fp 0 l | otherwise = do b <- getByte bh pokeElemOff ptr n b go (n+1) - -- - go 0 + -- + go 0 instance Binary ByteString where put_ bh f = putBS bh f @@ -834,18 +833,26 @@ instance Binary RecFlag where 0 -> do return Recursive _ -> do return NonRecursive -instance Binary OverlapFlag where - put_ bh (NoOverlap b) = putByte bh 0 >> put_ bh b - put_ bh (OverlapOk b) = putByte bh 1 >> put_ bh b - put_ bh (Incoherent b) = putByte bh 2 >> put_ bh b +instance Binary OverlapMode where + put_ bh NoOverlap = putByte bh 0 + put_ bh OverlapOk = putByte bh 1 + put_ bh Incoherent = putByte bh 2 get bh = do h <- getByte bh - b <- get bh case h of - 0 -> return $ NoOverlap b - 1 -> return $ OverlapOk b - 2 -> return $ Incoherent b - _ -> panic ("get OverlapFlag " ++ show h) + 0 -> return NoOverlap + 1 -> return OverlapOk + 2 -> return Incoherent + _ -> panic ("get OverlapMode" ++ show h) + + +instance Binary OverlapFlag where + put_ bh flag = do put_ bh (overlapMode flag) + put_ bh (isSafeOverlap flag) + get bh = do + h <- get bh + b <- get bh + return OverlapFlag { overlapMode = h, isSafeOverlap = b } instance Binary FixityDirection where put_ bh InfixL = do diff --git a/compiler/utils/BufWrite.hs b/compiler/utils/BufWrite.hs index f85ea8e792..7eba0753fe 100644 --- a/compiler/utils/BufWrite.hs +++ b/compiler/utils/BufWrite.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + ----------------------------------------------------------------------------- -- -- Fast write-buffered Handles @@ -10,7 +12,7 @@ -- ----------------------------------------------------------------------------- -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/utils/Digraph.lhs b/compiler/utils/Digraph.lhs index cc684303b6..d22380ff6e 100644 --- a/compiler/utils/Digraph.lhs +++ b/compiler/utils/Digraph.lhs @@ -3,22 +3,22 @@ % \begin{code} -{-# OPTIONS -fno-warn-tabs #-} +{-# LANGUAGE CPP, ScopedTypeVariables #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See -- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces -- for details -{-# LANGUAGE ScopedTypeVariables #-} module Digraph( Graph, graphFromVerticesAndAdjacency, graphFromEdgedVertices, SCC(..), Node, flattenSCC, flattenSCCs, - stronglyConnCompG, stronglyConnCompFromG, + stronglyConnCompG, topologicalSortG, dfsTopSortG, verticesG, edgesG, hasVertexG, - reachableG, transposeG, + reachableG, reachablesG, transposeG, outdegreeG, indegreeG, vertexGroupsG, emptyG, componentsG, @@ -258,14 +258,6 @@ stronglyConnCompG :: Graph node -> [SCC node] stronglyConnCompG graph = decodeSccs graph forest where forest = {-# SCC "Digraph.scc" #-} scc (gr_int_graph graph) --- Find the set of strongly connected components starting from the --- given roots. This is a good way to discard unreachable nodes at --- the same time as computing SCCs. -stronglyConnCompFromG :: Graph node -> [node] -> [SCC node] -stronglyConnCompFromG graph roots = decodeSccs graph forest - where forest = {-# SCC "Digraph.scc" #-} sccFrom (gr_int_graph graph) vs - vs = [ v | Just v <- map (gr_node_to_vertex graph) roots ] - decodeSccs :: Graph node -> Forest Vertex -> [SCC node] decodeSccs Graph { gr_int_graph = graph, gr_vertex_to_node = vertex_fn } forest = map decode forest @@ -315,7 +307,13 @@ dfsTopSortG graph = reachableG :: Graph node -> node -> [node] reachableG graph from = map (gr_vertex_to_node graph) result where from_vertex = expectJust "reachableG" (gr_node_to_vertex graph from) - result = {-# SCC "Digraph.reachable" #-} reachable (gr_int_graph graph) from_vertex + result = {-# SCC "Digraph.reachable" #-} reachable (gr_int_graph graph) [from_vertex] + +reachablesG :: Graph node -> [node] -> [node] +reachablesG graph froms = map (gr_vertex_to_node graph) result + where result = {-# SCC "Digraph.reachable" #-} + reachable (gr_int_graph graph) vs + vs = [ v | Just v <- map (gr_node_to_vertex graph) froms ] hasVertexG :: Graph node -> node -> Bool hasVertexG graph node = isJust $ gr_node_to_vertex graph node @@ -548,9 +546,6 @@ postorderF ts = foldr (.) id $ map postorder ts postOrd :: IntGraph -> [Vertex] postOrd g = postorderF (dff g) [] -postOrdFrom :: IntGraph -> [Vertex] -> [Vertex] -postOrdFrom g vs = postorderF (dfs g vs) [] - topSort :: IntGraph -> [Vertex] topSort = reverse . postOrd \end{code} @@ -574,9 +569,6 @@ undirected g = buildG (bounds g) (edges g ++ reverseE g) \begin{code} scc :: IntGraph -> Forest Vertex scc g = dfs g (reverse (postOrd (transpose g))) - -sccFrom :: IntGraph -> [Vertex] -> Forest Vertex -sccFrom g vs = reverse (dfs (transpose g) (reverse (postOrdFrom g vs))) \end{code} ------------------------------------------------------------ @@ -602,11 +594,11 @@ forward g tree pre = mapT select g ------------------------------------------------------------ \begin{code} -reachable :: IntGraph -> Vertex -> [Vertex] -reachable g v = preorderF (dfs g [v]) +reachable :: IntGraph -> [Vertex] -> [Vertex] +reachable g vs = preorderF (dfs g vs) path :: IntGraph -> Vertex -> Vertex -> Bool -path g v w = w `elem` (reachable g v) +path g v w = w `elem` (reachable g [v]) \end{code} ------------------------------------------------------------ diff --git a/compiler/utils/Encoding.hs b/compiler/utils/Encoding.hs index c4a669c134..115703fc69 100644 --- a/compiler/utils/Encoding.hs +++ b/compiler/utils/Encoding.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE BangPatterns, CPP, MagicHash, UnboxedTuples #-} {-# OPTIONS_GHC -O #-} -- We always optimise this, otherwise performance of a non-optimised -- compiler is severely affected diff --git a/compiler/utils/ExtsCompat46.hs b/compiler/utils/ExtsCompat46.hs index da0e67ab93..a33fef57d8 100644 --- a/compiler/utils/ExtsCompat46.hs +++ b/compiler/utils/ExtsCompat46.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE BangPatterns, CPP #-} +{-# LANGUAGE BangPatterns, CPP, MagicHash #-} ----------------------------------------------------------------------------- -- | diff --git a/compiler/utils/FastBool.lhs b/compiler/utils/FastBool.lhs index 32cb7aef3a..9558da7079 100644 --- a/compiler/utils/FastBool.lhs +++ b/compiler/utils/FastBool.lhs @@ -4,6 +4,8 @@ \section{Fast booleans} \begin{code} +{-# LANGUAGE CPP, MagicHash #-} + module FastBool ( --fastBool could be called bBox; isFastTrue, bUnbox; but they're not FastBool, fastBool, isFastTrue, fastOr, fastAnd diff --git a/compiler/utils/FastFunctions.lhs b/compiler/utils/FastFunctions.lhs index b1dacdcd9b..457fcc9c93 100644 --- a/compiler/utils/FastFunctions.lhs +++ b/compiler/utils/FastFunctions.lhs @@ -4,6 +4,7 @@ Z% \section{Fast functions} \begin{code} +{-# LANGUAGE CPP, MagicHash, UnboxedTuples #-} module FastFunctions ( unsafeChr, inlinePerformIO, unsafeDupableInterleaveIO, diff --git a/compiler/utils/FastMutInt.lhs b/compiler/utils/FastMutInt.lhs index 7156cdc9fb..0f0ca78e14 100644 --- a/compiler/utils/FastMutInt.lhs +++ b/compiler/utils/FastMutInt.lhs @@ -1,6 +1,5 @@ \begin{code} -{-# LANGUAGE BangPatterns #-} -{-# OPTIONS -cpp #-} +{-# LANGUAGE CPP, BangPatterns, MagicHash, UnboxedTuples #-} {-# OPTIONS_GHC -O #-} -- We always optimise this, otherwise performance of a non-optimised -- compiler is severely affected diff --git a/compiler/utils/FastString.lhs b/compiler/utils/FastString.lhs index 5a78c0b59b..0396c02749 100644 --- a/compiler/utils/FastString.lhs +++ b/compiler/utils/FastString.lhs @@ -2,7 +2,7 @@ % (c) The University of Glasgow, 1997-2006 % \begin{code} -{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE BangPatterns, CPP, DeriveDataTypeable, MagicHash, UnboxedTuples #-} {-# OPTIONS_GHC -O -funbox-strict-fields #-} -- We always optimise this, otherwise performance of a non-optimised -- compiler is severely affected diff --git a/compiler/utils/FastTypes.lhs b/compiler/utils/FastTypes.lhs index 0ef10ade56..36d8e4c4fd 100644 --- a/compiler/utils/FastTypes.lhs +++ b/compiler/utils/FastTypes.lhs @@ -4,6 +4,7 @@ \section{Fast integers, etc... booleans moved to FastBool for using panic} \begin{code} +{-# LANGUAGE CPP, MagicHash #-} --Even if the optimizer could handle boxed arithmetic equally well, --this helps automatically check the sources to make sure that diff --git a/compiler/utils/Fingerprint.hsc b/compiler/utils/Fingerprint.hsc index 9a55e385b3..464337b7a9 100644 --- a/compiler/utils/Fingerprint.hsc +++ b/compiler/utils/Fingerprint.hsc @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- ---------------------------------------------------------------------------- -- -- (c) The University of Glasgow 2006 diff --git a/compiler/utils/GraphBase.hs b/compiler/utils/GraphBase.hs index 8cb3acee71..2aa16ae99e 100644 --- a/compiler/utils/GraphBase.hs +++ b/compiler/utils/GraphBase.hs @@ -1,7 +1,7 @@ -- | Types for the general graph colorer. -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/utils/GraphPpr.hs b/compiler/utils/GraphPpr.hs index a896bbbf63..2682c7347e 100644 --- a/compiler/utils/GraphPpr.hs +++ b/compiler/utils/GraphPpr.hs @@ -1,7 +1,7 @@ -- | Pretty printing of graphs. -{-# OPTIONS -fno-warn-tabs #-} +{-# OPTIONS_GHC -fno-warn-tabs #-} -- The above warning supression flag is a temporary kludge. -- While working on this module you are encouraged to remove it and -- detab the module (please do the detabbing in a separate patch). See diff --git a/compiler/utils/IOEnv.hs b/compiler/utils/IOEnv.hs index 6885bbd127..1db15537c7 100644 --- a/compiler/utils/IOEnv.hs +++ b/compiler/utils/IOEnv.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE DeriveDataTypeable, UndecidableInstances #-} + -- -- (c) The University of Glasgow 2002-2006 -- @@ -7,7 +9,6 @@ -- as its in the IO monad, mutable references can be used -- for updating state. -- -{-# LANGUAGE UndecidableInstances #-} module IOEnv ( IOEnv, -- Instance of Monad diff --git a/compiler/utils/ListSetOps.lhs b/compiler/utils/ListSetOps.lhs index 5ad402d081..6247dc67f6 100644 --- a/compiler/utils/ListSetOps.lhs +++ b/compiler/utils/ListSetOps.lhs @@ -5,6 +5,7 @@ \section[ListSetOps]{Set-like operations on lists} \begin{code} +{-# LANGUAGE CPP #-} module ListSetOps ( unionLists, minusList, insertList, diff --git a/compiler/utils/Outputable.lhs b/compiler/utils/Outputable.lhs index 85d3d03557..e32261de65 100644 --- a/compiler/utils/Outputable.lhs +++ b/compiler/utils/Outputable.lhs @@ -22,11 +22,12 @@ module Outputable ( char, text, ftext, ptext, ztext, int, intWithCommas, integer, float, double, rational, - parens, cparen, brackets, braces, quotes, quote, + parens, cparen, brackets, braces, quotes, quote, doubleQuotes, angleBrackets, paBrackets, - semi, comma, colon, dcolon, space, equals, dot, arrow, darrow, + semi, comma, colon, dcolon, space, equals, dot, + arrow, larrow, darrow, arrowt, larrowt, arrowtt, larrowtt, lparen, rparen, lbrack, rbrack, lbrace, rbrace, underscore, - blankLine, + blankLine, forAllLit, (<>), (<+>), hcat, hsep, ($$), ($+$), vcat, sep, cat, @@ -73,7 +74,7 @@ module Outputable ( import {-# SOURCE #-} DynFlags( DynFlags, targetPlatform, pprUserLength, pprCols, - useUnicodeQuotes, + useUnicode, useUnicodeSyntax, unsafeGlobalDynFlags ) import {-# SOURCE #-} Module( Module, ModuleName, moduleName ) import {-# SOURCE #-} OccName( OccName ) @@ -458,7 +459,7 @@ cparen b d = SDoc $ Pretty.cparen b . runSDoc d -- so that we don't get `foo''. Instead we just have foo'. quotes d = sdocWithDynFlags $ \dflags -> - if useUnicodeQuotes dflags + if useUnicode dflags then char '‘' <> d <> char '’' else SDoc $ \sty -> let pp_d = runSDoc d sty @@ -468,13 +469,19 @@ quotes d = ('\'' : _, _) -> pp_d _other -> Pretty.quotes pp_d -semi, comma, colon, equals, space, dcolon, arrow, underscore, dot :: SDoc -darrow, lparen, rparen, lbrack, rbrack, lbrace, rbrace, blankLine :: SDoc +semi, comma, colon, equals, space, dcolon, underscore, dot :: SDoc +arrow, larrow, darrow, arrowt, larrowt, arrowtt, larrowtt :: SDoc +lparen, rparen, lbrack, rbrack, lbrace, rbrace, blankLine :: SDoc blankLine = docToSDoc $ Pretty.ptext (sLit "") -dcolon = docToSDoc $ Pretty.ptext (sLit "::") -arrow = docToSDoc $ Pretty.ptext (sLit "->") -darrow = docToSDoc $ Pretty.ptext (sLit "=>") +dcolon = unicodeSyntax (char '∷') (docToSDoc $ Pretty.ptext (sLit "::")) +arrow = unicodeSyntax (char '→') (docToSDoc $ Pretty.ptext (sLit "->")) +larrow = unicodeSyntax (char '←') (docToSDoc $ Pretty.ptext (sLit "<-")) +darrow = unicodeSyntax (char '⇒') (docToSDoc $ Pretty.ptext (sLit "=>")) +arrowt = unicodeSyntax (char '↣') (docToSDoc $ Pretty.ptext (sLit ">-")) +larrowt = unicodeSyntax (char '↢') (docToSDoc $ Pretty.ptext (sLit "-<")) +arrowtt = unicodeSyntax (char '⤜') (docToSDoc $ Pretty.ptext (sLit ">>-")) +larrowtt = unicodeSyntax (char '⤛') (docToSDoc $ Pretty.ptext (sLit "-<<")) semi = docToSDoc $ Pretty.semi comma = docToSDoc $ Pretty.comma colon = docToSDoc $ Pretty.colon @@ -489,6 +496,15 @@ rbrack = docToSDoc $ Pretty.rbrack lbrace = docToSDoc $ Pretty.lbrace rbrace = docToSDoc $ Pretty.rbrace +forAllLit :: SDoc +forAllLit = unicodeSyntax (char '∀') (ptext (sLit "forall")) + +unicodeSyntax :: SDoc -> SDoc -> SDoc +unicodeSyntax unicode plain = sdocWithDynFlags $ \dflags -> + if useUnicode dflags && useUnicodeSyntax dflags + then unicode + else plain + nest :: Int -> SDoc -> SDoc -- ^ Indent 'SDoc' some specified amount (<>) :: SDoc -> SDoc -> SDoc diff --git a/compiler/utils/Pair.lhs b/compiler/utils/Pair.lhs index 9e847d6950..ca7c2a7f8e 100644 --- a/compiler/utils/Pair.lhs +++ b/compiler/utils/Pair.lhs @@ -3,6 +3,8 @@ A simple homogeneous pair type with useful Functor, Applicative, and Traversable instances. \begin{code} +{-# LANGUAGE CPP #-} + module Pair ( Pair(..), unPair, toPair, swap ) where #include "HsVersions.h" diff --git a/compiler/utils/Panic.lhs b/compiler/utils/Panic.lhs index fc04668ae1..583174b201 100644 --- a/compiler/utils/Panic.lhs +++ b/compiler/utils/Panic.lhs @@ -8,6 +8,8 @@ It's hard to put these functions anywhere else without causing some unnecessary loops in the module dependency graph. \begin{code} +{-# LANGUAGE CPP, DeriveDataTypeable, ScopedTypeVariables #-} + module Panic ( GhcException(..), showGhcException, throwGhcException, throwGhcExceptionIO, diff --git a/compiler/utils/Pretty.lhs b/compiler/utils/Pretty.lhs index fb7fe2b7fb..f6a5a44e2e 100644 --- a/compiler/utils/Pretty.lhs +++ b/compiler/utils/Pretty.lhs @@ -152,7 +152,7 @@ Relative to John's original paper, there are the following new features: \begin{code} -{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE BangPatterns, CPP, MagicHash #-} module Pretty ( Doc, -- Abstract diff --git a/compiler/utils/Serialized.hs b/compiler/utils/Serialized.hs index 902d2feea0..b1576a087f 100644 --- a/compiler/utils/Serialized.hs +++ b/compiler/utils/Serialized.hs @@ -1,9 +1,10 @@ +{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-} + -- -- (c) The University of Glasgow 2002-2006 -- -- Serialized values -{-# LANGUAGE ScopedTypeVariables #-} module Serialized ( -- * Main Serialized data type Serialized, diff --git a/compiler/utils/State.hs b/compiler/utils/State.hs index 0b6a285562..216034fdbf 100644 --- a/compiler/utils/State.hs +++ b/compiler/utils/State.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE UnboxedTuples #-} module State (module State, mapAccumLM {- XXX hack -}) where diff --git a/compiler/utils/StringBuffer.lhs b/compiler/utils/StringBuffer.lhs index 46cce5864d..a54f45ffff 100644 --- a/compiler/utils/StringBuffer.lhs +++ b/compiler/utils/StringBuffer.lhs @@ -6,7 +6,7 @@ Buffers for scanning string input stored in external arrays. \begin{code} -{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE BangPatterns, CPP, MagicHash, UnboxedTuples #-} {-# OPTIONS_GHC -O -funbox-strict-fields #-} -- We always optimise this, otherwise performance of a non-optimised -- compiler is severely affected diff --git a/compiler/utils/UniqFM.lhs b/compiler/utils/UniqFM.lhs index a13a17c412..d8e08f599a 100644 --- a/compiler/utils/UniqFM.lhs +++ b/compiler/utils/UniqFM.lhs @@ -20,9 +20,9 @@ and ``addToUFM\_C'' and ``Data.IntMap.insertWith'' differ in the order of arguments of combining function. \begin{code} -{-# LANGUAGE DeriveTraversable, GeneralizedNewtypeDeriving #-} +{-# LANGUAGE DeriveTraversable, DeriveDataTypeable, GeneralizedNewtypeDeriving #-} +{-# OPTIONS_GHC -Wall #-} -{-# OPTIONS -Wall #-} module UniqFM ( -- * Unique-keyed mappings UniqFM, -- abstract type @@ -60,9 +60,10 @@ module UniqFM ( eltsUFM, keysUFM, splitUFM, ufmToSet_Directly, ufmToList, - joinUFM + joinUFM, pprUniqFM ) where +import FastString import Unique ( Uniquable(..), Unique, getKey ) import Outputable @@ -319,5 +320,11 @@ joinUFM eltJoin l (OldFact old) (NewFact new) = foldUFM_Directly add (NoChange, \begin{code} instance Outputable a => Outputable (UniqFM a) where - ppr ufm = ppr (ufmToList ufm) + ppr ufm = pprUniqFM ppr ufm + +pprUniqFM :: (a -> SDoc) -> UniqFM a -> SDoc +pprUniqFM ppr_elt ufm + = brackets $ fsep $ punctuate comma $ + [ ppr uq <+> ptext (sLit ":->") <+> ppr_elt elt + | (uq, elt) <- ufmToList ufm ] \end{code} diff --git a/compiler/utils/Util.lhs b/compiler/utils/Util.lhs index 5c82c757aa..2dcc73fd89 100644 --- a/compiler/utils/Util.lhs +++ b/compiler/utils/Util.lhs @@ -3,6 +3,7 @@ % \begin{code} +{-# LANGUAGE CPP #-} -- | Highly random utility functions -- @@ -46,7 +47,7 @@ module Util ( nTimes, -- * Sorting - sortWith, minWith, + sortWith, minWith, nubSort, -- * Comparisons isEqual, eqListBy, eqMaybeBy, @@ -125,6 +126,7 @@ import Data.Ord ( comparing ) import Data.Bits import Data.Word import qualified Data.IntMap as IM +import qualified Data.Set as Set import Data.Time #if __GLASGOW_HASKELL__ < 705 @@ -489,6 +491,9 @@ sortWith get_key xs = sortBy (comparing get_key) xs minWith :: Ord b => (a -> b) -> [a] -> a minWith get_key xs = ASSERT( not (null xs) ) head (sortWith get_key xs) + +nubSort :: Ord a => [a] -> [a] +nubSort = Set.toAscList . Set.fromList \end{code} %************************************************************************ diff --git a/compiler/vectorise/Vectorise.hs b/compiler/vectorise/Vectorise.hs index 012ae37039..38bd55482a 100644 --- a/compiler/vectorise/Vectorise.hs +++ b/compiler/vectorise/Vectorise.hs @@ -351,6 +351,6 @@ tryConvert var vect_var rhs = fromVect (idType var) (Var vect_var) `orElseErrV` do - { emitVt " Could NOT call vectorised from original version" $ ppr var + { emitVt " Could NOT call vectorised from original version" $ ppr var <+> dcolon <+> ppr (idType var) ; return rhs } diff --git a/compiler/vectorise/Vectorise/Exp.hs b/compiler/vectorise/Vectorise/Exp.hs index fb0c148610..6adb9ec435 100644 --- a/compiler/vectorise/Vectorise/Exp.hs +++ b/compiler/vectorise/Vectorise/Exp.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE TupleSections #-} +{-# LANGUAGE CPP, TupleSections #-} -- |Vectorisation of expressions. diff --git a/compiler/vectorise/Vectorise/Monad/InstEnv.hs b/compiler/vectorise/Vectorise/Monad/InstEnv.hs index 84b29ceb61..a97f319b4f 100644 --- a/compiler/vectorise/Vectorise/Monad/InstEnv.hs +++ b/compiler/vectorise/Vectorise/Monad/InstEnv.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + module Vectorise.Monad.InstEnv ( existsInst , lookupInst diff --git a/compiler/vectorise/Vectorise/Monad/Naming.hs b/compiler/vectorise/Vectorise/Monad/Naming.hs index def1ffa58c..b53324012f 100644 --- a/compiler/vectorise/Vectorise/Monad/Naming.hs +++ b/compiler/vectorise/Vectorise/Monad/Naming.hs @@ -24,6 +24,7 @@ import Name import SrcLoc import MkId import Id +import IdInfo( IdDetails(VanillaId) ) import FastString import Control.Monad @@ -67,7 +68,7 @@ mkVectId :: Id -> Type -> VM Id mkVectId id ty = do { name <- mkLocalisedName mkVectOcc (getName id) ; let id' | isDFunId id = MkId.mkDictFunId name tvs theta cls tys - | isExportedId id = Id.mkExportedLocalId name ty + | isExportedId id = Id.mkExportedLocalId VanillaId name ty | otherwise = Id.mkLocalId name ty ; return id' } @@ -91,8 +92,8 @@ newExportedVar occ_name ty u <- liftDs newUnique let name = mkExternalName u mod occ_name noSrcSpan - - return $ Id.mkExportedLocalId name ty + + return $ Id.mkExportedLocalId VanillaId name ty -- |Make a fresh local variable with the given type. -- The variable's name is formed using the given string as the prefix. diff --git a/compiler/vectorise/Vectorise/Type/Env.hs b/compiler/vectorise/Vectorise/Type/Env.hs index 34008efbbd..6ee5ca6cd9 100644 --- a/compiler/vectorise/Vectorise/Type/Env.hs +++ b/compiler/vectorise/Vectorise/Type/Env.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + -- Vectorise a modules type and class declarations. -- -- This produces new type constructors and family instances top be included in the module toplevel diff --git a/compiler/vectorise/Vectorise/Type/TyConDecl.hs b/compiler/vectorise/Vectorise/Type/TyConDecl.hs index a8159b09f4..37a07f710d 100644 --- a/compiler/vectorise/Vectorise/Type/TyConDecl.hs +++ b/compiler/vectorise/Vectorise/Type/TyConDecl.hs @@ -59,7 +59,6 @@ vectTyConDecl tycon name' -- NB: 'buildClass' attaches new quantifiers and dictionaries to the method types ; cls' <- liftDs $ buildClass - False -- include unfoldings on dictionary selectors name' -- new name: "V:Class" (tyConTyVars tycon) -- keep original type vars (map (const Nominal) (tyConRoles tycon)) -- all role are N for safety diff --git a/compiler/vectorise/Vectorise/Utils/Base.hs b/compiler/vectorise/Vectorise/Utils/Base.hs index cb7b34e36a..7d4bae3046 100644 --- a/compiler/vectorise/Vectorise/Utils/Base.hs +++ b/compiler/vectorise/Vectorise/Utils/Base.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE CPP #-} + module Vectorise.Utils.Base ( voidType , newLocalVVar diff --git a/configure.ac b/configure.ac index 9f0edaa663..5fc5733bb4 100644 --- a/configure.ac +++ b/configure.ac @@ -34,7 +34,7 @@ fi AC_SUBST([CONFIGURE_ARGS], [$ac_configure_args]) dnl ---------------------------------------------------------- -dnl ** Find unixy sort and find commands, +dnl ** Find unixy sort and find commands, dnl ** which are needed by FP_SETUP_PROJECT_VERSION dnl ** Find find command (for Win32's benefit) @@ -91,7 +91,7 @@ AC_ARG_WITH([ghc], WithGhc="$GHC"]) dnl ** Tell the make system which OS we are using -dnl $OSTYPE is set by the operating system to "msys" or "cygwin" or something +dnl $OSTYPE is set by the operating system to "msys" or "cygwin" or something AC_SUBST(OSTYPE) AC_ARG_ENABLE(bootstrap-with-devel-snapshot, @@ -479,6 +479,61 @@ export CC MAYBE_OVERRIDE_STAGE0([gcc],[CC_STAGE0]) MAYBE_OVERRIDE_STAGE0([ar],[AR_STAGE0]) +dnl ** what cpp to use? +dnl -------------------------------------------------------------- +AC_ARG_WITH(hs-cpp, +[AC_HELP_STRING([--with-hs-cpp=ARG], + [Use ARG as the path to cpp [default=autodetect]])], +[ + if test "$HostOS" = "mingw32" + then + AC_MSG_WARN([Request to use $withval will be ignored]) + else + HaskellCPPCmd=$withval + fi +], +[ + HaskellCPPCmd=$WhatGccIsCalled +] +) + + + +dnl ** what cpp flags to use? +dnl ----------------------------------------------------------- +AC_ARG_WITH(hs-cpp-flags, + [AC_HELP_STRING([--with-hs-cpp-flags=ARG], + [Use ARG as the path to hs cpp [default=autodetect]])], + [ + if test "$HostOS" = "mingw32" + then + AC_MSG_WARN([Request to use $withval will be ignored]) + else + HaskellCPPArgs=$withval + fi + ], +[ + $HaskellCPPCmd -x c /dev/null -dM -E > conftest.txt 2>&1 + if grep "__clang__" conftest.txt >/dev/null 2>&1; then + HaskellCPPArgs="-E -undef -traditional -Wno-invalid-pp-token -Wno-unicode -Wno-trigraphs " + else + $HaskellCPPCmd -v > conftest.txt 2>&1 + if grep "gcc" conftest.txt >/dev/null 2>&1; then + HaskellCPPArgs="-E -undef -traditional " + else + $HaskellCPPCmd --version > conftest.txt 2>&1 + if grep "cpphs" conftest.txt >/dev/null 2>&1; then + HaskellCPPArgs="--cpp -traditional" + else + AC_MSG_WARN([configure can't recognize your CPP program, you may need to set --with-hs-cpp-flags=FLAGS explicitly]) + HaskellCPPArgs="" + fi + fi + fi + ] +) + + dnl ** Which ld to use? dnl -------------------------------------------------------------- FP_ARG_WITH_PATH_GNU_PROG([LD], [ld], [ld]) @@ -991,13 +1046,14 @@ echo ["\ Using $CompilerName : $WhatGccIsCalled which is version : $GccVersion Building a cross compiler : $CrossCompiling - - ld : $LdCmd - Happy : $HappyCmd ($HappyVersion) - Alex : $AlexCmd ($AlexVersion) - Perl : $PerlCmd - dblatex : $DblatexCmd - xsltproc : $XsltprocCmd + cpp : $HaskellCPPCmd + cpp-flags : $HaskellCPPArgs + ld : $LdCmd + Happy : $HappyCmd ($HappyVersion) + Alex : $AlexCmd ($AlexVersion) + Perl : $PerlCmd + dblatex : $DblatexCmd + xsltproc : $XsltprocCmd Using LLVM tools llc : $LlcCmd diff --git a/distrib/compare/Makefile b/distrib/compare/Makefile index f65c0419eb..49645783e2 100644 --- a/distrib/compare/Makefile +++ b/distrib/compare/Makefile @@ -2,7 +2,7 @@ GHC = ghc compare: *.hs - "$(GHC)" -O --make -Wall -Werror $@ + "$(GHC)" -O -XHaskell2010 --make -Wall -Werror $@ .PHONY: clean clean: diff --git a/distrib/compare/compare.hs b/distrib/compare/compare.hs index 81055c2826..8653e3f6aa 100644 --- a/distrib/compare/compare.hs +++ b/distrib/compare/compare.hs @@ -1,5 +1,3 @@ -{-# LANGUAGE PatternGuards #-} - module Main (main) where import Control.Monad.State diff --git a/distrib/configure.ac.in b/distrib/configure.ac.in index ed91244d88..c7a8ead9b0 100644 --- a/distrib/configure.ac.in +++ b/distrib/configure.ac.in @@ -63,6 +63,65 @@ FIND_GCC([WhatGccIsCalled], [gcc], [gcc]) CC="$WhatGccIsCalled" export CC + +dnl ** what cpp to use? +dnl -------------------------------------------------------------- +AC_ARG_WITH(hs-cpp, +[AC_HELP_STRING([--with-hs-cpp=ARG], + [Use ARG as the path to cpp [default=autodetect]])], +[ + if test "$HostOS" = "mingw32" + then + AC_MSG_WARN([Request to use $withval will be ignored]) + else + HaskellCPPCmd=$withval + fi +], +[ + if test "$HostOS" != "mingw32" + then + HaskellCPPCmd=$WhatGccIsCalled + fi +] +) + + + +dnl ** what cpp flags to use? +dnl ----------------------------------------------------------- +AC_ARG_WITH(hs-cpp-flags, + [AC_HELP_STRING([--with-hs-cpp-flags=ARG], + [Use ARG as the path to hs cpp [default=autodetect]])], + [ + if test "$HostOS" = "mingw32" + then + AC_MSG_WARN([Request to use $withval will be ignored]) + else + HaskellCPPArgs=$withval + fi + ], +[ + $HaskellCPPCmd -x c /dev/null -dM -E > conftest.txt 2>&1 + if grep "__clang__" conftest.txt >/dev/null 2>&1; then + HaskellCPPArgs="-E -undef -traditional -Wno-invalid-pp-token -Wno-unicode -Wno-trigraphs " + else + $HaskellCPPCmd -v > conftest.txt 2>&1 + if grep "gcc" conftest.txt >/dev/null 2>&1; then + HaskellCPPArgs="-E -undef -traditional " + else + $HaskellCPPCmd --version > conftest.txt 2>&1 + if grep "cpphs" conftest.txt >/dev/null 2>&1; then + HaskellCPPArgs="--cpp -traditional" + else + AC_MSG_WARN([configure can't recognize your CPP program, you may need to set --with-hs-cpp-flags=FLAGS explicitly]) + HaskellCPPArgs="" + fi + fi + fi + ] +) + + dnl ** Which ld to use? dnl -------------------------------------------------------------- FP_ARG_WITH_PATH_GNU_PROG([LD], [ld], [ld]) diff --git a/distrib/remilestoning.pl b/distrib/remilestoning.pl index a544ddaf17..60a23af518 100644 --- a/distrib/remilestoning.pl +++ b/distrib/remilestoning.pl @@ -1,5 +1,6 @@ -#!/usr/bin/perl +#!/usr/bin/env perl +use warnings; use strict; use DBI; diff --git a/docs/backpack/.gitignore b/docs/backpack/.gitignore new file mode 100644 index 0000000000..c3eb46ecd6 --- /dev/null +++ b/docs/backpack/.gitignore @@ -0,0 +1,10 @@ +*.aux +*.bak +*.bbl +*.blg +*.dvi +*.fdb_latexmk +*.fls +*.log +*.synctex.gz +backpack-impl.pdf diff --git a/docs/backpack/Makefile b/docs/backpack/Makefile new file mode 100644 index 0000000000..0dd7a9dad5 --- /dev/null +++ b/docs/backpack/Makefile @@ -0,0 +1,2 @@ +backpack-impl.pdf: backpack-impl.tex + latexmk -pdf -latexoption=-halt-on-error -latexoption=-file-line-error -latexoption=-synctex=1 backpack-impl.tex && touch paper.dvi || ! rm -f $@ diff --git a/docs/backpack/arch.png b/docs/backpack/arch.png Binary files differnew file mode 100644 index 0000000000..d8b8fd21f9 --- /dev/null +++ b/docs/backpack/arch.png diff --git a/docs/backpack/backpack-impl.bib b/docs/backpack/backpack-impl.bib new file mode 100644 index 0000000000..6bda35a8ea --- /dev/null +++ b/docs/backpack/backpack-impl.bib @@ -0,0 +1,17 @@ +@inproceedings{Kilpatrick:2014:BRH:2535838.2535884, + author = {Kilpatrick, Scott and Dreyer, Derek and Peyton Jones, Simon and Marlow, Simon}, + title = {Backpack: Retrofitting Haskell with Interfaces}, + booktitle = {Proceedings of the 41st ACM SIGPLAN-SIGACT Symposium on Principles of Programming Languages}, + series = {POPL '14}, + year = {2014}, + isbn = {978-1-4503-2544-8}, + location = {San Diego, California, USA}, + pages = {19--31}, + numpages = {13}, + url = {http://doi.acm.org/10.1145/2535838.2535884}, + doi = {10.1145/2535838.2535884}, + acmid = {2535884}, + publisher = {ACM}, + address = {New York, NY, USA}, + keywords = {applicative instantiation, haskell modules, mixin modules, module systems, packages, recursive modules, separate modular development, type systems}, +} diff --git a/docs/backpack/backpack-impl.tex b/docs/backpack/backpack-impl.tex new file mode 100644 index 0000000000..b0b43ba87a --- /dev/null +++ b/docs/backpack/backpack-impl.tex @@ -0,0 +1,1861 @@ +\documentclass{article} + +\usepackage{graphicx} %[pdftex] OR [dvips] +\usepackage{fullpage} +\usepackage{wrapfig} +\usepackage{float} +\usepackage{titling} +\usepackage{hyperref} +\usepackage{tikz} +\usepackage{color} +\usetikzlibrary{arrows} +\usetikzlibrary{positioning} +\setlength{\droptitle}{-6em} + +\input{commands-new-new.tex} + +\newcommand{\nuAA}{\nu_\mathit{AA}} +\newcommand{\nuAB}{\nu_\mathit{AB}} +\newcommand{\nuGA}{\nu_\mathit{GA}} +\newcommand{\nuGB}{\nu_\mathit{GB}} +\newcommand{\betaPL}{\beta_\mathit{PL}} +\newcommand{\betaAA}{\beta_\mathit{AA}} +\newcommand{\betaAS}{\beta_\mathit{AS}} +\newcommand{\thinandalso}{\hspace{.45cm}} +\newcommand{\thinnerandalso}{\hspace{.38cm}} + +\input{commands-rebindings.tex} + +\newcommand{\ghcfile}[1]{\textsl{#1}} + +\title{Implementing Backpack} + +\begin{document} + +\maketitle + +The purpose of this document is to describe an implementation path +for Backpack in GHC\@. + +We start off by outlining the current architecture of GHC, ghc-pkg and Cabal, +which constitute the existing packaging system. We then state what our subgoals +are, since there are many similar sounding but different problems to solve. Next, +we describe the ``probably correct'' implementation plan, and finish off with +some open design questions. This is intended to be an evolving design document, +so please contribute! + +\tableofcontents + +\section{Current packaging architecture} + +The overall architecture is described in Figure~\ref{fig:arch}. + +\begin{figure}[H] + \center{\scalebox{0.8}{\includegraphics{arch.png}}} +\label{fig:arch}\caption{Architecture of GHC, ghc-pkg and Cabal. Green bits indicate additions from upcoming IHG work, red bits indicate additions from Backpack. Orange indicates a Haskell library.} +\end{figure} + +Here, arrows indicate dependencies from one component to another. Color +coding is as follows: orange components are libaries, green components +are to be added with the IHG work, red components are to be added with +Backpack. (Thus, black and orange can be considered the current) + +\subsection{Installed package database} + +Starting from the bottom, we have the \emph{installed package database} +(actually a collection of such databases), which stores information +about what packages have been installed are thus available to be +compiled against. There is both a global database (for the system +administrator) and a local database (for end users), which can be +updated independently. One way to think about the package database +is as a \emph{cache of object code}. In principle, one could compile +any piece of code by repeatedly recompiling all of its dependencies; +the installed package database describes when this can be bypassed. + +\begin{figure}[H] + \center{\scalebox{0.8}{\includegraphics{pkgdb.png}}} +\label{fig:pkgdb}\caption{Anatomy of a package database.} +\end{figure} + +In Figure~\ref{fig:pkgdb}, we show the structure of a package database. +The installed package are created from a Cabal file through the process +of dependency resolution and compilation. In database terms, the primary key +of a package database is the InstalledPackageId +(Figure~\ref{fig:current-pkgid}). This ID uniquely identifies an +instance of an installed package. The PackageId omits the ABI hash and +is used to qualify linker exported symbols: the current value of this +parameter is communicated to GHC using the \verb|-package-id| flag. + +In principle, packages with different PackageIds should be linkable +together in the same compiled program, whereas packages with the same +PackageId are not (even if they have different InstalledPackageIds). In +practice, GHC is currently only able to select one version of a package, +as it clears out all old versions of the package in +\ghcfile{compiler/main/Package.lhs}:applyPackageFlag. + +\begin{figure} + \center{\begin{tabular}{r l} + PackageId & package name, package version \\ + InstalledPackageId & PackageId, ABI hash \\ + \end{tabular}} +\label{fig:current-pkgid}\caption{Current structure of package identifiers.} +\end{figure} + +The database entry itself contains the information from the installed package ID, +as well as information such as what dependencies it was linked against, where +its compiled code and interface files live, its compilation flags, what modules +it exposes, etc. Much of this information is only relevant to Cabal; GHC +uses a subset of the information in the package database. + +\subsection{GHC} + +The two programs which access the package database directly are GHC +proper (for compilation) and ghc-pkg (which is a general purpose +command line tool for manipulating the database.) GHC relies on +the package database in the following ways: + +\begin{itemize} + \item It imports the local and global package databases into + its runtime database, and applies modifications to the exposed + and trusted status of the entries via the flags \verb|-package| + and others (\ghcfile{compiler/main/Packages.lhs}). The internal + package state can be seen at \verb|-v4| or higher. + \item It uses this package database to find the location of module + interfaces when it attempts to load the module info of an external + module (\ghcfile{compiler/iface/LoadIface.hs}). +\end{itemize} + +GHC itself performs a type checking phase, which generates an interface +file representing the module (so that later invocations of GHC can load the type +of a module), and then after compilation projects object files and linked archives +for programs to use. + +\paragraph{Original names} Original names are an important design pattern +in GHC\@. +Sometimes, a name can be exposed in an hi file even if its module +wasn't exposed. Here is an example (compiled in package R): + +\begin{verbatim} +module X where + import Internal (f) + g = f + +module Internal where + import Internal.Total (f) +\end{verbatim} + +Then in X.hi: + +\begin{verbatim} +g = <R.id, Internal.Total, f> (this is the original name) +\end{verbatim} + +(The reason we refer to the package as R.id is because it's the +full package ID, and not just R). + +\subsection{hs-boot} + +\verb|hs-boot| is a special mechanism used to support recursive linking +of modules within a package, today. Suppose I have a recursive module +dependency between modules and A and B. I break one of\ldots + +(ToDo: describe how hs-boot mechanism works) + +\subsection{Cabal} + +Cabal is the build system for GHC, we can think of it as parsing a Cabal +file describing a package, and then making (possibly multiple) +invocations to GHC to perform the appropriate compilation. What +information does Cabal pass onto GHC\@? One can get an idea for this by +looking at a prototypical command line that Cabal invokes GHC with: + +\begin{verbatim} +ghc --make + -package-name myapp-0.1 + -hide-all-packages + -package-id containers-0.9-ABCD + Module1 Module2 +\end{verbatim} + +There are a few things going on here. First, Cabal has to tell GHC +what the name of the package it's compiling (otherwise, GHC can't appropriately +generate symbols that other code referring to this package might generate). +There are also a number of commands which configure its in-memory view of +the package database (GHC's view of the package database may not directly +correspond to what is on disk). There's also an optimization here: in principle, +GHC can compile each module one-by-one, but instead we use the \verb|--make| flag +because this allows GHC to reuse some data structures, resulting in a nontrivial +speedup. + +(ToDo: describe cabal-install/sandbox) + +\section{Goals} + +Here are some of the high-level goals which motivate our improvements to +the module system. + +\begin{itemize} + \item Solve \emph{Cabal hell}, a situation which occurs when conflicting + version ranges on a wide range of dependencies leads to a situation + where it is impossible to satisfy the constraints. We're seeking + to solve this problem in two ways: first, we want to support + multiple instances of containers-2.9 in the database which are + compiled with different dependencies (and even link them + together), and second, we want to abolish (often inaccurate) + version ranges and move to a regime where packages depend on + signatures. Version ranges may still be used to indicate important + semantic changes (e.g., bugs or bad behavior on the part of package + authors), but they should no longer drive dependency resolution + and often only be recorded after the fact. + + \item Support \emph{hermetic builds with sharing}. A hermetic build + system is one which simulates rebuilding every package whenever + it is built; on the other hand, actually rebuilding every time + is extremely inefficient (but what happens in practice with + Cabal sandboxes). We seek to solve this problem with the IHG work, + by allowing multiple instances of a package in the database, where + the only difference is compilation parameters. We don't care + about being able to link these together in a single program. + + \item Support \emph{module-level pluggability} as an alternative to + existing (poor) usage of type classes. The canonical example are + strings, where a library might want to either use the convenient + but inefficient native strings, or the efficient packed Text data + type, but would really like to avoid having to say \verb|StringLike s => ...| + in all of their type signatures. While we do not plan on supporting + separate compilation, Cabal should understand how to automatically + recompile these ``indefinite'' packages when they are instantiated + with a new plugin. + + \item Support \emph{separate modular development}, where a library and + an application using the library can be developed and type-checked + separately, intermediated by an interface. The canonical example + here is the \verb|ghc-api|, which is a large, complex API that + the library writers (GHC developers) frequently change---the ability + for downstream projects to say, ``Here is the API I'm relying on'' + without requiring these projects to actually be built would greatly + assist in keeping the API stable. This is applicable in + the pluggability example as well, where we want to ensure that all + of the $M \times N$ configurations of libraries versus applications + type check, by only running the typechecker $M + N$ times. A closely + related concern is related toolchain support for extracting a signature + from an existing implementation, as current Haskell culture is averse + to explicitly writing separate signature files. + + \item Subsume existing support for \emph{mutually recursive modules}, + without the double vision problem. +\end{itemize} + +A \emph{non-goal} is to allow users to upgrade upstream libraries +without recompiling downstream. This is an ABI concern and we're not +going to worry about it. + +\section{Module identities} + +We are going to implement module identities slightly differently from +the way it was described from the Backpack paper. Motivated by +implementation considerations, we coarsen the +granularity of dependency tracking, so that it's not necessary to +calculate the transitive dependencies of every module: we only do it per +package. In this next section, we recapitulate Section 3.1 of the +original Backpack paper, but with our new granularity. Comparisons to +original Backpack will be recorded in footnotes. Then we more generally +discuss the differing points of the design space these two occupy, and +how this affects what programs typecheck and how things are actually +implemented. + +\subsection{The new scheme} + +\begin{wrapfigure}{R}{0.5\textwidth} +\begin{myfig} +\[ +\begin{array}{@{}lr@{\;}c@{\;}l@{}} + \text{Package Names (\texttt{PkgName})} & P &\in& \mathit{PkgNames} \\ + \text{Module Path Names (\texttt{ModName})} & p &\in& \mathit{ModPaths} \\ + \text{Module Identity Vars} & \alpha,\beta &\in& \mathit{IdentVars} \\ + \text{Package Key (\texttt{PackageId})} & \K &::=& P(\vec{p\mapsto\nu}) \\ + \text{Module Identities (\texttt{Module})} & \nu &::=& + \alpha ~|~ + \mu\alpha.\K\colon\! p \\ + \text{Module Identity Substs} & \phi,\theta &::=& + \{\vec{\alpha \coloneqq \nu}\} \\ +\end{array} +\] +\caption{Module Identities} +\label{fig:mod-idents} +\end{myfig} +\end{wrapfigure} + +Physical module +identities $\nu$, referred to in GHC as \emph{original names}, are either (1) \emph{variables} $\alpha$, which are +used to represent holes; (2) a concrete module $p$ defined in package +$P$, with holes instantiated with other module identities (might be +empty)\footnote{In Paper Backpack, we would refer to just $P$:$p$ as the identity +constructor. However, we've written the subterms specifically next to $P$ to highlight the semantic difference of these terms.}; or (3) \emph{recursive} module identities, defined via +$\mu$-constructors.\footnote{Actually, all concrete modules implicitly + define a $\mu$-constructor, and we plan on using de Bruijn indices + instead of variables in this case, a locally nameless +representation.} + +As in traditional Haskell, every package contains a number of module +files at some module path $p$; within a package these paths are +guaranteed to be unique.\footnote{In Paper Backpack, the module expressions themselves are used to refer to globally unique identifiers for each literal. This makes the metatheory simpler, but for implementation purposes it is convenient to conflate the \emph{original} module path that a module is defined at with its physical identity.} When we write inline module definitions, we assume +that they are immediately assigned to a module path $p$ which is incorporated +into their identity. A module identity $\nu$ simply augments this +with subterms $\vec{p\mapsto\nu}$ representing how \emph{all} holes in the package $P$ +were instantiated.\footnote{In Paper Backpack, we do not distinguish between holes/non-holes, and we consider all imports of the \emph{module}, not the package.} This naming is stable because the current Backpack surface syntax does not allow a logical path in a package +to be undefined. A package key is $P(\vec{p\mapsto\nu})$; it is the entity +that today is internally referred to in GHC as \texttt{PackageId}. + +Here is the very first example from +Section 2 of the original Backpack paper, \pname{ab-1}: + +\begin{example} +\Pdef{ab-1}{ + \Pmod{A}{x = True} + \Pmod{B}{\Mimp{A}; y = not x} + % \Pmodd{C}{\mname{A}} +} +\end{example} + +The identities of \m{A} and \m{B} are +\pname{ab-1}:\mname{A} and \pname{ab-1}:\mname{B}, respectively.\footnote{In Paper Backpack, the identity for \mname{B} records its import of \mname{A}, but since it is definite, this is strictly redundant.} In a package with holes, each +hole gets a fresh variable (within the package definition) as its +identity, and all of the holes associated with package $P$ are recorded. Consider \pname{abcd-holes-1}: + +\begin{example} +\Pdef{abcd-holes-1}{ + \Psig{A}{x :: Bool} % chktex 26 + \Psig{B}{y :: Bool} % chktex 26 + \Pmod{C}{x = False} + \Pmodbig{D}{ + \Mimpq{A}\\ + \Mimpq{C}\\ + % \Mexp{\m{A}.x, z}\\ + z = \m{A}.x \&\& \m{C}.x + } +} +\end{example} + +The identities of the four modules +are, in order, $\alpha_a$, $\alpha_b$, $\pname{abcd-holes-1}(\alpha_a,\alpha_b)$:\mname{C}, and +$\pname{abcd-holes-1}(\alpha_a,\alpha_b)$:\mname{D}.\footnote{In Paper Backpack, the granularity is at the module level, so the subterms of \mname{C} and \mname{D} can differ.} + +Consider now the module identities in the \m{Graph} instantiations in +\pname{multinst}, shown in Figure 2 of the original Backpack paper (we have +omitted it for brevity). +In the definition of \pname{structures}, assume that the variables for +\m{Prelude} and \m{Array} are $\alpha_P$ and $\alpha_A$ respectively. +The identity of \m{Graph} is $\pname{structures}(\alpha_P, \alpha_A)$:\m{Graph}. Similarly, the identities of the two array implementations +are $\nu_{AA} = \pname{arrays-a}(\alpha_P)$:\m{Array} and +$\nu_{AB} = \pname{arrays-b}(\alpha_P)$:\m{Array}.\footnote{Notice that the subterms coincide with Paper Backpack! A sign that module level granularity is not necessary for many use-cases.} + +The package \pname{graph-a} is more interesting because it +\emph{links} the packages \pname{arrays-a} and \pname{structures} +together, with the implementation of \m{Array} from \pname{arrays-a} +\emph{instantiating} the hole \m{Array} from \pname{structures}. This +linking is reflected in the identity of the \m{Graph} module in +\pname{graph-a}: whereas in \pname{structures} it was $\nu_G = +\pname{structures}(\alpha_P, \alpha_A)$:\m{Graph}, in \pname{graph-a} it is +$\nu_{GA} = \nu_G[\nu_{AA}/\alpha_A] = \pname{structures}(\alpha_P, \nu_{AA})$:\m{Graph}. Similarly, the identity of \m{Graph} in +\pname{graph-b} is $\nu_{GB} = \nu_G[\nu_{AB}/\alpha_A] = +\pname{structures}(\alpha_P, \nu_{AB})$:\m{Graph}. Thus, linking consists +of substituting the variable identity of a hole by the concrete +identity of the module filling that hole. + +Lastly, \pname{multinst} makes use of both of these \m{Graph} +modules, under the aliases \m{GA} and \m{GB}, respectively. +Consequently, in the \m{Client} module, \code{\m{GA}.G} and +\code{\m{GB}.G} will be correctly viewed as distinct types since they +originate in modules with distinct identities. + +As \pname{multinst} illustrates, module identities effectively encode +dependency graphs at the package level.\footnote{In Paper Backpack, module identities +encode dependency graphs at the module level. In both cases, however, what is being +depended on is always a module.} Like in Paper Backpack, we have an \emph{applicative} +semantics of instantiation, and the applicativity example in Figure 3 of the +Backpack paper still type checks. However, because we are operating at a coarser +granularity, modules may have spurious dependencies on holes that they don't +actually depend on, which means less type equalities may hold. + +Shaping proceeds in the same way as in Paper Backpack, except that the +shaping judgment must also accept the package key +$P(\vec{p\mapsto\alpha})$ so we can create identifiers with +\textsf{mkident}. This implies we must know ahead of time what the holes +of a package are. + +\subsection{Commentary} + +\begin{wrapfigure}{r}{0.4\textwidth} +\begin{verbatim} +package p where + A :: ... + -- B does not import A + B = [ data T = T; f T = T ] + C = [ import A; ... ] +package q where + A1 = [ ... ] + A2 = [ ... ] + include p (A as A1, B as B1) + include p (A as A2, B as B2) + Main = [ + import qualified B1 + import qualified B2 + y = B1.f B2.T + ] +\end{verbatim} +\caption{The difference between package and module granularity}\label{fig:granularity} +\end{wrapfigure} + +\paragraph{The sliding scale of granularity} The scheme we have described +here is coarser-grained than Backpack's, and thus does not accept as many +programs. Figure~\ref{fig:granularity} is a minimal example which doesn't type +check in our new scheme. +In Paper Backpack, the physical module identities of \m{B1} and \m{B2} are +both $\K_B$, and so \m{Main} typechecks. However, in GHC Backpack, +we assign module identities $\pname{p(q:A1)}$:$\m{B}$ and $\pname{p(q:A2)}$:$\m{B}$, +which are not equal. + +Does this mean that Paper Backpack's form of granularity is \emph{better?} +Not necessarily! First, we can always split packages into further subpackages +which better reflect the internal hole dependencies, so it is always possible +to rewrite a program to make it typecheck---just with more packages. Second, +Paper Backpack's granularity is only one on a sliding scale; it is by no means +the most extreme! You could imagine a version of Backpack where we desugared +each \emph{expression} into a separate module.\footnote{Indeed, there are some +languages which take this stance. (See Bob Harper's work.)} Then, even if \m{B} imported +\m{A}, as long as it didn't use any types from \m{A} in the definition of +\verb|T|, we would still consider the types equal. Finally, to understand +what the physical module identity of a module is, in Paper Backpack I must +understand the internal dependency structure of the modules in a package. This +is a lot of work for the developer to think about; a more granular model +is also easier to reason about. + +Nevertheless, finer granularity can be desirable from an end-user perspective. +Usually, these circumstances arise when library-writers are forced to split their +components into many separate packages, when they would much rather have written +a single package. For example, if I define a data type in my library, and would +like to define a \verb|Lens| instance for it, I would create a new package just +for the instance, in order to avoid saddling users who aren't interested in lenses +with an extra dependency. Another example is test suites, which have dependencies +on various test frameworks that a user won't care about if they are not planning +on testing the code. (Cabal has a special case for this, allowing the user +to write effectively multiple packages in a single Cabal file.) + +\paragraph{Cabal dependency resolution} Currently, when we compile a Cabal +package, Cabal goes ahead and resolves \verb|build-depends| entries with actual +implementations, which we compile against. A planned addition to the package key, +independent of Backpack, is to record the transitive dependency tree selected +during this dependency resolution process, so that we can install \pname{libfoo-1.0} +twice compiled against different versions of its dependencies. +What is the relationship to this transitive dependency tree of \emph{packages}, +with the subterms of our package identities which are \emph{modules}? Does one +subsume the other? In fact, these are separate mechanisms---two levels of indirections, +so to speak. + +To illustrate, suppose I write a Cabal file with \verb|build-depends: foobar|. A reasonable assumption is that this translates into a +Backpack package which has \verb|include foobar|. However, this is not +actually a Paper Backpack package: Cabal's dependency solver has to +rewrite all of these package references into versioned references +\verb|include foobar-0.1|. For example, this is a pre-package: + +\begin{verbatim} +package foo where + include bar +\end{verbatim} + +and this is a Paper Backpack package: + +\begin{verbatim} +package foo-0.3[bar-0.1[baz-0.2]] where + include bar-0.1[baz-0.2] +\end{verbatim} + +This tree is very similar to the one tracking dependencies for holes, +but we must record this tree \emph{even} when our package has no holes. +As a final example, the full module +identity of \m{B1} in Figure~\ref{fig:granularity} may actually be $\pname{p-0.9(q-1.0[p-0.9]:A1)}$:\m{B}. + +\subsection{Implementation} + +In GHC's current packaging system, a single package compiles into a +single entry in the installed package database, indexed by the package +key. This property is preserved by package-level granularity, as we +assign the same package key to all modules. Package keys provide an +easy mechanism for sharing to occur: when an indefinite package is fully +instantiated, we can check if we already have its package key installed +in the installed package database. (At the end of this section, we'll +briefly discuss some of the problems actually implementing Paper Backpack.) +It is also important to note that we are \emph{willing to duplicate code}; +processes like this already happen in other parts of the compiler +(such as inlining.) + +However, there is one major challenge for this scheme, related to +\emph{dynamically linked libraries}. Consider this example: + +\begin{verbatim} +package p where + A :: [ ... ] + B = [ ... ] +package q where + A = [ ... ] + include p + C = [ import A; import B; ... ] +\end{verbatim} + +When we compile package \pname{q}, we end up compiling package keys +\pname{q} and $\pname{p(q:A)}$, which turn into their respective libraries +in the installed package database. When we need to statically link against +these libraries, it doesn't matter that \pname{q} refers to code in $\pname{p(q:A)}$, +and vice versa: the linker is building an executable and can resolve all +of the symbols in one go. However, when the libraries in question are +dynamic libraries \verb|libHSq.so| and \verb|libHSp(q:A).so|, this is now +a \emph{circular dependency} between the two libraries, and most dynamic +linkers will not be able to load either of these libraries. + +Our plan is to break the circularity by inlining the entire module \m{A} +into $\pname{p(q:A)}$ when it is necessary (perhaps in other situations, +\m{A} will be in another package and no inlining is necessary). The code +in both situations should be exactly the same, so it should be completely +permissible to consider them type-equal. + +\paragraph{Relaxing package selection restrictions} As mentioned +previously, GHC is unable to select multiple packages with the same +package name (but different package keys). This restriction needs to be +lifted. We should add a new flag \verb|-package-key|. GHC also knows +about version numbers and will mask out old versions of a library when +you make another version visible; this behavior needs to be modified. + +\paragraph{Linker symbols} As we increase the amount of information in +PackageId, it's important to be careful about the length of these IDs, +as they are used for exported linker symbols (e.g. +\verb|base_TextziReadziLex_zdwvalDig_info|). Very long symbol names +hurt compile and link time, object file sizes, GHCi startup time, +dynamic linking, and make gdb hard to use. As such, the current plan is +to do away with full package names and versions, and instead use just a +base-62 encoded hash, perhaps with the first four characters of the package +name for user-friendliness. + +\paragraph{Wired-in names} One annoying thing to remember is that GHC +has wired-in names, which refer to packages without any version. These +are specially treated during compilation so that they are built using +a package key that has no version or dependency information. One approach +is to continue treating these libraries specially; alternately we can +maintain a fixed table from these wired names to +package IDs. + +\section{Shapeless Backpack}\label{sec:simplifying-backpack} + +Backpack as currently defined always requires a \emph{shaping} pass, +which calculates the shapes of all modules defined in a package. +The shaping pass is critical to the solution of the double-vision problem +in recursive module linking, but it also presents a number of unpalatable +implementation problems: + +\begin{itemize} + + \item \emph{Shaping is a lot of work.} A module shape specifies the + providence of all data types and identifiers defined by a + module. To calculate this, we must preprocess and parse all + modules, even before we do the type-checking pass. (Fortunately, + shaping doesn't require a full parse of a module, only enough + to get identifiers. However, it does have to understand import + statements at the same level of detail as GHC's renamer.) + + \item \emph{Shaping must be done upfront.} In the current Backpack + design, all shapes must be computed before any typechecking can + occur. While performing the shaping pass upfront is necessary + in order to solve the double vision problem (where a module + identity may be influenced by later definitions), it means + that GHC must first do a shaping pass, and then revisit every module and + compile them proper. Nor is it (easily) possible to skip the + shaping pass when it is unnecessary, as one might expect to be + the case in the absence of mutual recursion. Shaping is not + a ``pay as you go'' language feature. + + \item \emph{GHC can't compile all programs shaping accepts.} Shaping + accepts programs that GHC, with its current hs-boot mechanism, cannot + compile. In particular, GHC requires that any data type or function + in a signature actually be \emph{defined} in the module corresponding + to that file (i.e., an original name can be assigned to these entities + immediately.) Shaping permits unrestricted exports to implement + modules; this shows up in the formalism as $\beta$ module variables. + + \item \emph{Shaping encourages inefficient program organization.} + Shaping is designed to enable mutually recursive modules, but as + currently implemented, mutual recursion is less efficient than + code without recursive dependencies. Programmers should avoid + this code organization, except when it is absolutely necessary. + + \item \emph{GHC is architecturally ill-suited for directly + implementing shaping.} Shaping implies that GHC's internal + concept of an ``original name'' be extended to accommodate + module variables. This is an extremely invasive change to all + aspects of GHC, since the original names assumption is baked + quite deeply into the compiler. Plausible implementations of + shaping requires all these variables to be skolemized outside + of GHC\@. + +\end{itemize} + +To be clear, the shaping pass is fundamentally necessary for some +Backpack packages. Here is the example which convinced Simon: + +\begin{verbatim} +package p where + A :: [data T; f :: T -> T] + B = [export T(MkT), h; import A(f); data T = MkT; h x = f MkT] + A = [export T(MkT), f, h; import B; f MkT = MkT] +\end{verbatim} + +The key to this example is that B \emph{may or may not typecheck} depending +on the definition of A. Because A reexports B's definition T, B will +typecheck; but if A defined T on its own, B would not typecheck. Thus, +we \emph{cannot} typecheck B until we have done some analysis of A (the +shaping analysis!) + +Thus, it is beneficial (from an optimization point of view) to +consider a subset of Backpack for which shaping is not necessary. +Here is a programming discipline which does just that, which we will call the \textbf{linking restriction}: \emph{Module implementations must be declared before +signatures.} Formally, this restriction modifies the rule for merging +polarized module shapes ($\widetilde{\tau}_1^{m_1} \oplus \widetilde{\tau}_2^{m_2}$) so that +$\widetilde{\tau}_1^- \oplus \widetilde{\tau}_2^+$ is always undefined.\footnote{This seemed to be the crispest way of defining the restriction, although this means an error happens a bit later than I'd like it to: I'd prefer if we errored while merging logical contexts, but we don't know what is a hole at that point.} + +Here is an example of the linking restriction. Consider these two packages: + +\begin{verbatim} +package random where + System.Random = [ ... ].hs + +package monte-carlo where + System.Random :: ... + System.MonteCarlo = [ ... ].hs +\end{verbatim} + +Here, random is a definite package which may have been compiled ahead +of time; monte-carlo is an indefinite package with a dependency +on any package which provides \verb|System.Random|. + +Now, to link these two applications together, only one ordering +is permissible: + +\begin{verbatim} +package myapp where + include random + include monte-carlo +\end{verbatim} + +If myapp wants to provide its own random implementation, it can do so: + +\begin{verbatim} +package myapp2 where + System.Random = [ ... ].hs + include monte-carlo +\end{verbatim} + +In both cases, all of \verb|monte-carlo|'s holes have been filled in by the time +it is included. The alternate ordering is not allowed. + +Why does this discipline prevent mutually recursive modules? Intuitively, +a hole is the mechanism by which we can refer to an implementation +before it is defined; otherwise, we can only refer to definitions which +preceed our definition. If there are never any holes \emph{which get filled}, +implementation links can only go backwards, ruling out circularity. + +It's easy to see how mutual recursion can occur if we break this discipline: + +\begin{verbatim} +package myapp2 where + include monte-carlo + System.Random = [ import System.MonteCarlo ].hs +\end{verbatim} + +\subsection{Typechecking of definite modules without shaping} + +If we are not carrying out a shaping pass, we need to be able to calculate +$\widetilde{\Xi}_{\mathsf{pkg}}$ on the fly. In the case that we are +compiling a package---there will be no holes in the final package---we +can show that shaping is unnecessary quite easily, since with the +linking restriction, everything is definite from the get-go. + +Observe the following invariant: at any given step of the module +bindings, the physical context $\widetilde{\Phi}$ contains no +holes. We can thus conclude that there are no module variables in any +type shapes. As the only time a previously calculated package shape can +change is due to unification, the incrementally computed shape is in +fact the true one. + +As far as the implementation is concerned, we never have to worry +about handling module variables; we only need to do extra typechecks +against (renamed) interface files. + +\subsection{Compilation of definite modules}\label{sec:compiling-definite} + +Of course, we still have to compile the code, and this includes any +subpackages which we have mixed in the dependencies to make them fully +definite. Let's take the following set of packages as an example: + +\begin{verbatim} +package pkg-a where + A = [ a = 0; b = 0 ] -- b is not visible + B = ... -- this code is ignored +package pgk-b where -- indefinite package + A :: [ a :: Bool ] + B = [ import A; b = 1 ] +package pkg-c where + include pkg-a (A) + include pkg-b + C = [ import B; ... ] +\end{verbatim} + +Note: in the following example, we will assume that we are operating +under the packaging scheme specified in Section~\ref{sec:one-per-definite-package} +with the indefinite package refinement. + +With the linking invariant, we can simply walk the Backpack package ``tree'', +compiling each of its dependencies. Let's walk through it explicitly.\footnote{To simplify matters, we assume that there is only one instance of any +PackageId in the database, so we omit the unique-ifying hashes from the +ghc-pkg registration commands; we ignore the existence of version numbers +and Cabal's dependency solver; finally, we do the compilation in +one-shot mode, even though Cabal in practice will use the Make mode.} + +First, we have to build \verb|pkg-a|. This package has no dependencies +of any kind, so compiling is much like compiling ordinary Haskell. If +it was already installed in the database, we wouldn't even bother compiling it. + +\begin{verbatim} +ADEPS = # empty! +ghc -c A.hs -package-name pkg-a-ADEPS +ghc -c B.hs -package-name pkg-a-ADEPS +# install and register pkg-a-ADEPS +\end{verbatim} + +Next, we have to build \verb|pkg-b|. This package has a hole \verb|A|, +intuitively, it depends on package A. This is done in two steps: +first we check if the signature given for the hole matches up with the +actual implementation provided. Then we build the module properly. + +\begin{verbatim} +BDEPS = "A -> pkg-a-ADEPS:A" +ghc -c A.hs-boot -package-name pkg-b-BDEPS -hide-all-packages \ + -package "pkg-a-ADEPS(A)" +ghc -c B.hs -package-name pkg-b-BDEPS -hide-all-packages \ + -package "pkg-a-ADEPS(A)" +# install and register pkg-b-BDEPS +\end{verbatim} + +These commands mostly resemble the traditional compilation process, but +with some minor differences. First, the \verb|-package| includes must +also specify a thinning (and renaming) list. This is because when +\verb|pkg-b| is compiled, it only sees module \verb|A| from it, not +module \verb|B| (as it was thinned out.) Conceptually, this package is +being compiled in the context of some module environment \verb|BDEPS| (a +logical context, in Backpack lingo) which maps modules to original names +and is utilized by the module finder to lookup the import in +\verb|B.hs|; we load/thin/rename packages so that the package +environment accurately reflects the module environment. + +Similarly, it is important that the compilation of \verb|B.hs| use \verb|A.hi-boot| +to determine what entities in the module are visible upon import; this is +automatically detected by \verb|GHC| when the compilation occurs. Otherwise, +in module \verb|pkg-b:B|, there would be a name collision between the local +definition of \verb|b| and the identifier \verb|b| which was +accidentally pulled in when we compiled against the actual implementation of +\verb|A|. It's actually a bit tempting to compile \verb|pkg-b:B| against the +\verb|hi-boot| generated by the signature, but this would unnecessarily +lose out on possible optimizations which are stored in the original \verb|hi| +file, but not evident from \verb|hi-boot|. + +Finally, we created all of the necessary subpackages, and we can compile +our package proper. + +\begin{verbatim} +CDEPS = # empty!! +ghc -c C.hs -package-name pkg-c-CDEPS -hide-all-packages \ + -package "pkg-a-ADEPS(A)" \ + -package "pkg-b-BDEPS" +# install and register package pkg-c-CDEPS +\end{verbatim} + +This command is quite similar, although it's worth mentioning that now, +the \verb|package| flags directly mirror the syntax in Backpack. +Additionally, even though \verb|pkg-c| ``depends'' on subpackages, these +do not show in its package-name identifier, e.g. CDEPS\@. This is +because this package \emph{chose} the values of ADEPS and BDEPS +explicitly (by including the packages in this particular order), so +there are no degrees of freedom.\footnote{In the presence of a + Cabal-style dependency solver which associates a-0.1 with a concrete +identifier a, these choices need to be recorded in the package ID.} + +Overall, there are a few important things to notice about this architecture. +First, because the \verb|pkg-b-BDEPS| product is installed, if in another package +build we instantiate the indefinite module B with exactly the same \verb|pkg-a| +implementation, we can skip the compilation process and reuse the version. +This is because the calculated \verb|BDEPS| will be the same, and thus the package +IDs will be the same. + +XXX ToDo: actually write down pseudocode algorithm for this + +\paragraph{Sometimes you need a module environment instead} In the compilation +description here, we've implicitly assumed that any external modules you might +depend on exist in a package somewhere. However, a tricky situation +occurs when some of these modules come from a parent package: +\begin{verbatim} +package pkg-b where + A :: [ a :: Bool ] + B = [ import A; b = 1 ] +package pkg-c where + A = [ a = 0; b = 0 ] + include pkg-b + C = [ import B; ... ] +\end{verbatim} + +How this problem gets resolved depends on what our library granularity is (Section~\ref{sec:flatten}). + +In the ``only definite packages are compiled'' world +(Section~\ref{sec:one-per-definite-package}), we need to pass a +special ``module environment'' to the compilation of libraries +in \verb|monte-carlo| to say where to find \verb|System.Random|. +The compilation of \verb|pkg-b| now looks as follows: + +\begin{verbatim} +BDEPS = "A -> pkg-a-ADEPS:A" +ghc -c A.hs-boot -package-name pkg-a-ADEPS -module-env BDEPS +ghc -c B.hs -package-name pkg-a-ADEPS -subpackage-name pkg-b-BDEPS -module-env BDEPS +\end{verbatim} + +The most important thing to remember here is that in the ``only definite +packages are compiled'' world, we must create a \emph{copy} of +\verb|pkg-b| in order to instantiate its hole with \verb|pkg-a:A| +(otherwise, there is a circular dependency.) These packages must be +distinguished from the parent package (\verb|-subpackage-name|), but +logically, they will be installed in the \verb|pkg-a| library. The +module environment specifies where the holes can be found, without +referring to an actual package (since \verb|pkg-a| has, indeed, not been +installed yet at the time we process \verb|B.hs|). These files are +probably looked up in the include paths.\footnote{It's worth remarking + that a variant of this was originally proposed as the one true + compilation strategy. However, it was pointed out that this gave up + applicativity in all cases. Our current refinement of this strategy +gives up applicativity for modules which have not been placed in an +external package.} + +Things are a bit different in sliced world and physical module identity +world (Section~\ref{sec:one-per-package-identity}); here, we really do +compile and install (perhaps to a local database) \verb|pkg-c:A| before +starting with the compilation of \verb|pkg-b|. So package imports will +continue to work fine. + +\subsection{Restricted recursive modules ala hs-boot}\label{sec:hs-boot-restrict} + +It should be possible to support GHC-style mutual recursion using the +Backpack formalism immediately using hs-boot files. However, to avoid +the need for a shaping pass, we must adopt an existing constraint that +already applies to hs-boot files: \emph{at the time we define a signature, +we must know what the original name for all data types is}. In practice, +GHC enforces this by stating that: (1) an hs-boot file must be +accompanied with an implementation, and (2) the implementation must +in fact define (and not reexport) all of the declarations in the signature. + +Why does this not require a shaping pass? The reason is that the +signature is not really polymorphic: we require that the $\alpha$ module +variable be resolved to a concrete module later in the same package, and +that all the $\beta$ module variables be unified with $\alpha$. Thus, we +know ahead of time the original names and don't need to deal with any +renaming.\footnote{This strategy doesn't completely resolve the problem +of cross-package mutual recursion, because we need to first compile a +bit of the first package (signatures), then the second package, and then +the rest of the first package.} + +Compiling packages in this way gives the tantalizing possibility +of true separate compilation: the only thing we don't know is what the actual +package name of an indefinite package will be, and what the correct references +to have are. This is a very minor change to the assembly, so one could conceive +of dynamically rewriting these references at the linking stage. But +separate compilation achieved in this fashion would not be able to take +advantage of cross-module optimizations. + +\section{Shaped Backpack} + +Despite the simplicity of shapeless Backpack with the linking +restriction in the absence of holes, we will find that when we have +holes, it will be very difficult to do type-checking without +some form of shaping. This section is very much a work in progress, +but the ability to typecheck against holes, even with the linking restriction, +is a very important part of modular separate development, so we will need +to support it at some ponit. + +\subsection{Efficient shaping} + +(These are Edward's opinion, he hasn't convinced other folks that this is +the right way to do it.) + +In this section, I want to argue that, although shaping constitutes +a pre-pass which must be run before compilation in earnest, it is only +about as bad as the dependency resolution analysis that GHC already does +in \verb|ghc -M| or \verb|ghc --make|. + +In Paper Backpack, what information does shaping compute? It looks at +exports, imports, data declarations and value declarations (but not the +actual expressions associated with these values.) As a matter of fact, +GHC already must look at the imports associated with a package in order +to determine the dependency graph, so that it can have some order to compile +modules in. There is a specialized parser which just parses these statements, +and then ignores the rest of the file. + +A bit of background: the \emph{renamer} is responsible for resolving +imports and figuring out where all of these entities actually come from. +SPJ would really like to avoid having to run the renamer in order to perform +a shaping pass. + +\paragraph{Is it necessary to run the Renamer to do shaping?} +Edward and Scott believe the answer is no, well, partially. +Shaping needs to know the original names of all entities exposed by a +module/signature. Then it needs to know (a) which entities a module/signature +defines/declares locally and (b) which entities that module/signature exports. +The former, (a), can be determined by a straightforward inspection of a parse +tree of the source file.\footnote{Note that no expression or type parsing +is necessary. We only need names of local values, data types, and data +constructors.} The latter, (b), is a bit trickier. Right now it's the Renamer +that interprets imports and exports into original names, so we would still +rely on that implementation. However, the Renamer does other, harder things +that we don't need, so ideally we could factor out the import/export +resolution from the Renamer for use in shaping. + +Unfortunately the Renamer's import resolution analyzes \verb|.hi| files, but for +local modules, which haven't yet been typechecked, we don't have those. +Instead, we could use a new file format, \verb|.hsi| files, to store the shape of +a locally defined module. (Defined packages are bundled with their shapes, +so included modules have \verb|.hsi| files as well.) (What about the logical +vs.~physical distinction in file names?) If we refactor the import/export +resolution code, could we rewrite it to generically operate on both +\verb|.hi| files and \verb|.hsi| files? + +Alternatively, rather than storing shapes on a per-source basis, we could +store (in memory) the entire package shape. Similarly, included packages +could have a single shape file for the entire package. Although this approach +would make shaping non-incremental, since an entire package's shape would +be recomputed any time a constituent module's shape changes, we do not expect +shaping to be all that expensive. + +\subsection{Typechecking of indefinite modules}\label{sec:typechecking-indefinite} + +Recall in our argument in the definite case, where we showed there are +no holes in the physical context. With indefinite modules, this is no +longer true. While (with the linking restriction) these holes will never +be linked against a physical implementation, they may be linked against +other signatures. (Note: while disallowing signature linking would +solve our problem, it would disallow a wide array of useful instances of +signature reuse, for example, a package mylib that implements both +mylib-1x-sig and mylib-2x-sig.) + +With holes, we must handle module variables, and we sometimes must unify them: + +\begin{verbatim} +package p where + A :: [ data A ] +package q where + A :: [ data A ] +package r where + include p + include q +\end{verbatim} + +In this package, it is not possible to a priori assign original names to +module A in p and q, because in package r, they should have the same +original name. When signature linking occurs, unification may occur, +which means we have to rename all relevant original names. (A similar +situation occurs when a module is typechecked against a signature.) + +An invariant which would be nice to have is this: when typechecking a +signature or including a package, we may apply renaming to the entities +being brought into context. But once we've picked an original name for +our entities, no further renaming should be necessary. (Formally, in the +unification for semantic object shapes, apply the unifier to the second +shape, but not the first one.) + +However, there are plenty of counterexamples here: + +\begin{verbatim} +package p where + A :: [ data A ] + B :: [ data A ] + M = ... + A = B +\end{verbatim} + +In this package, does module M know that A.A and B.A are type equal? In +fact, the shaping pass will have assigned equal module identities to A +and B, so M \emph{equates these types}, despite the aliasing occurring +after the fact. + +We can make this example more sophisticated, by having a later +subpackage which causes the aliasing; now, the decision is not even a +local one (on the other hand, the equality should be evident by inspection +of the package interface associated with q): + +\begin{verbatim} +package p where + A :: [ data A ] + B :: [ data A ] +package q where + A :: [ data A ] + B = A +package r where + include p + include q +\end{verbatim} + +Another possibility is that it might be acceptable to do a mini-shaping +pass, without parsing modules or signatures, \emph{simply} looking at +names and aliases. But logical names are not the only mechanism by +which unification may occur: + +\begin{verbatim} +package p where + C :: [ data A ] + A = [ data A = A ] + B :: [ import A(A) ] + C = B +\end{verbatim} + +It is easy to conclude that the original names of C and B are the same. But +more importantly, C.A must be given the original name of p:A.A. This can only +be discovered by looking at the signature definition for B. In any case, it +is worth noting that this situation parallels the situation with hs-boot +files (although there is no mutual recursion here). + +The conclusion is that you will probably, in fact, have to do real +shaping in order to typecheck all of these examples. + +\paragraph{Hey, these signature imports are kind of tricky\ldots} + +When signatures and modules are interleaved, the interaction can be +complex. Here is an example: + +\begin{verbatim} +package p where + C :: [ data A ] + M = [ import C; ... ] + A = [ import M; data A = A ] + C :: [ import A(A) ] +\end{verbatim} + +Here, the second signature for C refers to a module implementation A +(this is permissible: it simply means that the original name for p:C.A +is p:A.A). But wait! A relies on M, and M relies on C. Circularity? +Fortunately not: a client of package p will find it impossible to have +the hole C implemented in advance, since they will need to get their hands on module +A\ldots but it will not be defined prior to package p. + +In any case, however, it would be good to emit a warning if a package +cannot be compiled without mutual recursion. + +\subsection{Incremental typechecking} +We want to typecheck modules incrementally, i.e., when something changes in +a package, we only want to re-typecheck the modules that care about that +change. GHC already does this today.% +\footnote{\url{https://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/RecompilationAvoidance}} +Is the same mechanism sufficient for Backpack? Edward and Scott think that it +is, mostly. Our conjecture is that a module should be re-typechecked if the +existing mechanism says it should \emph{or} if the logical shape +context (which maps logical names to physical names) has changed. The latter +condition is due to aliases that affect typechecking of modules. + +Let's look again at an example from before: +\begin{verbatim} +package p where + A :: [ data A ] + B :: [ data A ] + M = [ import A; import B; ... ] +\end{verbatim} +Let's say that \verb|M| is typechecked successfully. Now we add an alias binding +at the end of the package, \verb|A = B|. Does \verb|M| need to be +re-typechecked? Yes! (Well, it seems so, but let's just assert ``yes'' for now. +Certainly in the reverse case---if we remove the alias and then ask---this +is true, since \verb|M| might have depended on the two \verb|A| types +being the same.) +The logical shape context changed to say that \verb|A| and +\verb|B| now map to the same physical module identity. But does the existing +recompilation avoidance mechanism say that \verb|M| should be re-typechecked? +It's unclear. The \verb|.hi| file for \verb|M| records that it imported \verb|A| and +\verb|B| with particular ABIs, but does it also know about the physical module +identities (or rather, original module names) of these modules? + +Scott thinks this highlights the need for us to get our story straight about +the connection between logical names, physical module identities, and file +names! + + +\subsection{Installing indefinite packages}\label{sec:installing-indefinite} + +If an indefinite package contains no code at all, we only need +to install the interface file for the signatures. However, if +they include code, we must provide all of the +ingredients necessary to compile them when the holes are linked against +actual implementations. (Figure~\ref{fig:pkgdb}) + +\paragraph{Source tarball or preprocessed source?} What is the representation of the source that is saved is. There +are a number of possible choices: + +\begin{itemize} + \item The original tarballs downloaded from Hackage, + \item Preprocessed source files, + \item Some sort of internal, type-checked representation of Haskell code (maybe the output of the desugarer). +\end{itemize} + +Storing the tarballs is the simplest and most straightforward mechanism, +but we will have to be very certain that we can recompile the module +later in precisely the same we compiled it originally, to ensure the hi +files match up (fortunately, it should be simple to perform an optional +sanity check before proceeding.) The appeal of saving preprocessed +source, or even the IRs, is that this is conceptually this is exactly +what an indefinite package is: we have paused the compilation process +partway, intending to finish it later. However, our compilation strategy +for definite packages requires us to run this step using a \emph{different} +choice of original names, so it's unclear how much work could actually be reused. + +\section{Surface syntax} + +In the Backpack paper, a brand new module language is presented, with +syntax for inline modules and signatures. This syntax is probably worth implementing, +because it makes it easy to specify compatibility packages, whose module +definitions in general may be very short: + +\begin{verbatim} +package ishake-0.12-shake-0.13 where + include shake-0.13 + Development.Shake.Sys = Development.Shake.Cmd + Development.Shake = [ (**>) = (&>) ; (*>>) = (|*>)] + Development.Shake.Rule = [ defaultPriority = rule . priority 0.5 ] + include ishake-0.12 +\end{verbatim} + +However, there are a few things that are less than ideal about the +surface syntax proposed by Paper Backpack: + +\begin{itemize} + \item It's completely different from the current method users + specify packages. There's nothing wrong with this per se + (one simply needs to support both formats) but the smaller + the delta, the easier the new packaging format is to explain + and implement. + + \item Sometimes order matters (relative ordering of signatures and + module implementations), and other times it does not (aliases). + This can be confusing for users. + + \item Users have to order module definitions topologically, + whereas in current Cabal modules can be listed in any order, and + GHC figures out an appropriate order to compile them. +\end{itemize} + +Here is an alternative proposal, closely based on Cabal syntax. Given +the following Backpack definition: + +\begin{verbatim} +package libfoo(A, B, C, Foo) where + include base + -- renaming and thinning + include libfoo (Foo, Bar as Baz) + -- holes + A :: [ a :: Bool ].hsig + A2 :: [ b :: Bool ].hsig + -- normal module + B = [ + import {-# SOURCE #-} A + import Foo + import Baz + ... + ].hs + -- recursively linked pair of modules, one is private + C :: [ data C ].hsig + D = [ import {-# SOURCE #-} C; data D = D C ].hs + C = [ import D; data C = C D ].hs + -- alias + A = A2 +\end{verbatim} + +We can write the following Cabal-like syntax instead (where +all of the signatures and modules are placed in appropriately +named files): + +\begin{verbatim} +package: libfoo +... +build-depends: base, libfoo (Foo, Bar as Baz) +holes: A A2 -- deferred for now +exposed-modules: Foo B C +aliases: A = A2 +other-modules: D +\end{verbatim} + +Notably, all of these lists are \emph{insensitive} to ordering! +The key idea is use of the \verb|{-# SOURCE #-}| pragma, which +is enough to solve the important ordering constraint between +signatures and modules. + +Here is how the elaboration works. For simplicity, in this algorithm +description, we assume all packages being compiled have no holes +(including \verb|build-depends| packages). Later, we'll discuss how to +extend the algorithm to handle holes in both subpackages and the main +package itself. + +\begin{enumerate} + + \item At the top-level with \verb|package| $p$ and + \verb|exposed-modules| $ms$, record \verb|package p (ms) where| + + \item For each package $p$ with thinning/renaming $ms$ in + \verb|build-depends|, record a \verb|include p (ms)| in the + Backpack package. The ordering of these includes does not + matter, since none of these packages have holes. + + \item Take all modules $m$ in \verb|other-modules| and + \verb|exposed-modules| which were not exported by build + dependencies, and create a directed graph where hs and hs-boot + files are nodes and imports are edges (the target of an edge is + an hs file if it is a normal import, and an hs-boot file if it + is a SOURCE import). Topologically sort this graph, erroring if + this graph contains cycles (even with recursive modules, the + cycle should have been broken by an hs-boot file). For each + node, in this order, record \verb|M = [ ... ]| or \verb|M :: [ ... ]| + depending on whether or not it is an hs or hs-boot. If possible, + sort signatures before implementations when there is no constraint + otherwise. + +\end{enumerate} + +Here is a simple example which shows how SOURCE can be used to disambiguate +between two important cases. Suppose we have these modules: + +\begin{verbatim} +-- A1.hs +import {-# SOURCE #-} B + +-- A2.hs +import B + +-- B.hs +x = True + +-- B.hs-boot +x :: Bool +\end{verbatim} + +Then we translate the following packages as follows: + +\begin{verbatim} +exposed-modules: A1 B +-- translates to +B :: [ x :: Bool ] +A1 = [ import B ] +B = [ x = True ] +\end{verbatim} + +but + +\begin{verbatim} +exposed-modules: A2 B +-- translates to +B = [ x = True ] +B :: [ x :: Bool ] +A2 = [ import B ] +\end{verbatim} + +The import controls placement between signature and module, and in A1 it +forces B's signature to be sorted before B's implementation (whereas in +the second section, there is no constraint so we preferentially place +the B's implementation first) + +\paragraph{Holes in the database} In the presence of holes, +\verb|build-depends| resolution becomes more complicated. First, +let's consider the case where the package we are building is +definite, but the package database contains indefinite packages with holes. +In order to maintain the linking restriction, we now have to order packages +from step (2) of the previous elaboration. We can do this by creating +a directed graph, where nodes are packages and edges are from holes to the +package which implements them. If there is a cycle, this indicates a mutually +recursive package. In the absence of cycles, a topological sorting of this +graph preserves the linking invariant. + +One subtlety to consider is the fact that an entry in \verb|build-depends| +can affect how a hole is instantiated by another entry. This might be a +bit weird to users, who might like to explicitly say how holes are +filled when instantiating a package. Food for thought, surface syntax wise. + +\paragraph{Holes in the package} Actually, this is quite simple: the +ordering of includes goes as before, but some indefinite packages in the +database are less constrained as they're ``dependencies'' are fulfilled +by the holes at the top-level of this package. It's also worth noting +that some dependencies will go unresolved, since the following package +is valid: + +\begin{verbatim} +package a where + A :: ... +package b where + include a +\end{verbatim} + +\paragraph{Multiple signatures} In Backpack syntax, it's possible to +define a signature multiple times, which is necessary for mutually +recursive signatures: + +\begin{verbatim} +package a where + A :: [ data A ] + B :: [ import A; data B = B A ] + A :: [ import B; data A = A B ] +\end{verbatim} + +Critically, notice that we can see the constructors for both module B and A +after the signatures are linked together. This is not possible in GHC +today, but could be possible by permitting multiple hs-boot files. Now +the SOURCE pragma indicating an import must \emph{disambiguate} which +hs-boot file it intends to include. This might be one way of doing it: + +\begin{verbatim} +-- A.hs-boot2 +data A + +-- B.hs-boot +import {-# SOURCE hs-boot2 #-} A + +-- A.hs-boot +import {-# SOURCE hs-boot #-} B +\end{verbatim} + +\paragraph{Explicit or implicit reexports} One annoying property of +this proposal is that, looking at the \verb|exposed-modules| list, it is +not immediately clear what source files one would expect to find in the +current package. It's not obvious what the proper way to go about doing +this is. + +\paragraph{Better syntax for SOURCE} If we enshrine the SOURCE import +as a way of solving Backpack ordering problems, it would be nice to have +some better syntax for it. One possibility is: + +\begin{verbatim} +abstract import Data.Foo +\end{verbatim} + +which makes it clear that this module is pluggable, typechecking against +a signature. Note that this only indicates how type checking should be +done: when actually compiling the module we will compile against the +interface file for the true implementation of the module. + +It's worth noting that the SOURCE annotation was originally made a +pragma because, in principle, it should have been possible to compile +some recursive modules without needing the hs-boot file at all. But if +we're moving towards boot files as signatures, this concern is less +relevant. + +\section{Type classes and type families} + +\subsection{Background} + +Before we talk about how to support type classes in Backpack, it's first +worth talking about what we are trying to achieve in the design. Most +would agree that \emph{type safety} is the cardinal law that should be +preserved (in the sense that segfaults should not be possible), but +there are many instances of ``bad behavior'' (top level mutable state, +weakening of abstraction guarantees, ambiguous instance resolution, etc) +which various Haskellers may disagree on the necessity of ruling out. + +With this in mind, it is worth summarizing what kind of guarantees are +presently given by GHC with regards to type classes and type families, +as well as characterizing the \emph{cultural} expectations of the +Haskell community. + +\paragraph{Type classes} When discussing type class systems, there are +several properties that one may talk about. +A set of instances is \emph{confluent} if, no matter what order +constraint solving is performed, GHC will terminate with a canonical set +of constraints that must be satisfied for any given use of a type class. +In other words, confluence says that we won't conclude that a program +doesn't type check just because we swapped in a different constraint +solving algorithm. + +Confluence's closely related twin is \emph{coherence} (defined in ``Type +classes: exploring the design space''). This property states that +``every different valid typing derivation of a program leads to a +resulting program that has the same dynamic semantics.'' Why could +differing typing derivations result in different dynamic semantics? The +answer is that context reduction, which picks out type class instances, +elaborates into concrete choices of dictionaries in the generated code. +Confluence is a prerequisite for coherence, since one +can hardly talk about the dynamic semantics of a program that doesn't +type check. + +In the vernacular, confluence and coherence are often incorrectly used +to refer to another related property: \emph{global uniqueness of instances}, +which states that in a fully compiled program, for any type, there is at most one +instance resolution for a given type class. Languages with local type +class instances such as Scala generally do not have this property, and +this assumption is frequently used for abstraction. + +So, what properties does GHC enforce, in practice? +In the absence of any type system extensions, GHC's employs a set of +rules (described in ``Exploring the design space'') to ensure that type +class resolution is confluent and coherent. Intuitively, it achieves +this by having a very simple constraint solving algorithm (generate +wanted constraints and solve wanted constraints) and then requiring the +set of instances to be \emph{nonoverlapping}, ensuring there is only +ever one way to solve a wanted constraint. Overlap is a +more stringent restriction than either confluence or coherence, and +via the \verb|OverlappingInstances| and \verb|IncoherentInstances|, GHC +allows a user to relax this restriction ``if they know what they're doing.'' + +Surprisingly, however, GHC does \emph{not} enforce global uniqueness of +instances. Imported instances are not checked for overlap until we +attempt to use them for instance resolution. Consider the following program: + +\begin{verbatim} +-- T.hs +data T = T +-- A.hs +import T +instance Eq T where +-- B.hs +import T +instance Eq T where +-- C.hs +import A +import B +\end{verbatim} + +When compiled with one-shot compilation, \verb|C| will not report +overlapping instances unless we actually attempt to use the \verb|Eq| +instance in C.\footnote{When using batch compilation, GHC reuses the + instance database and is actually able to detect the duplicated + instance when compiling B. But if you run it again, recompilation +avoidance skips A, and it finishes compiling! See this bug: +\url{https://ghc.haskell.org/trac/ghc/ticket/5316}} This is by +design\footnote{\url{https://ghc.haskell.org/trac/ghc/ticket/2356}}: +ensuring that there are no overlapping instances eagerly requires +eagerly reading all the interface files a module may depend on. + +We might summarize these three properties in the following manner. +Culturally, the Haskell community expects \emph{global uniqueness of instances} +to hold: the implicit global database of instances should be +confluent and coherent. GHC, however, does not enforce uniqueness of +instances: instead, it merely guarantees that the \emph{subset} of the +instance database it uses when it compiles any given module is confluent and coherent. GHC does do some +tests when an instance is declared to see if it would result in overlap +with visible instances, but the check is by no means +perfect\footnote{\url{https://ghc.haskell.org/trac/ghc/ticket/9288}}; +truly, \emph{type-class constraint resolution} has the final word. One +mitigating factor is that in the absence of \emph{orphan instances}, GHC is +guaranteed to eagerly notice when the instance database has overlap.\footnote{Assuming that the instance declaration checks actually worked\ldots} + +Clearly, the fact that GHC's lazy behavior is surprising to most +Haskellers means that the lazy check is mostly good enough: a user +is likely to discover overlapping instances one way or another. +However, it is relatively simple to construct example programs which +violate global uniqueness of instances in an observable way: + +\begin{verbatim} +-- A.hs +module A where +data U = X | Y deriving (Eq, Show) + +-- B.hs +module B where +import Data.Set +import A + +instance Ord U where +compare X X = EQ +compare X Y = LT +compare Y X = GT +compare Y Y = EQ + +ins :: U -> Set U -> Set U +ins = insert + +-- C.hs +module C where +import Data.Set +import A + +instance Ord U where +compare X X = EQ +compare X Y = GT +compare Y X = LT +compare Y Y = EQ + +ins' :: U -> Set U -> Set U +ins' = insert + +-- D.hs +module Main where +import Data.Set +import A +import B +import C + +test :: Set U +test = ins' X $ ins X $ ins Y $ empty + +main :: IO () +main = print test + +-- OUTPUT +$ ghc -Wall -XSafe -fforce-recomp --make D.hs +[1 of 4] Compiling A ( A.hs, A.o ) +[2 of 4] Compiling B ( B.hs, B.o ) + +B.hs:5:10: Warning: Orphan instance: instance [safe] Ord U +[3 of 4] Compiling C ( C.hs, C.o ) + +C.hs:5:10: Warning: Orphan instance: instance [safe] Ord U +[4 of 4] Compiling Main ( D.hs, D.o ) +Linking D ... +$ ./D +fromList [X,Y,X] +\end{verbatim} + +Locally, all type class resolution was coherent: in the subset of +instances each module had visible, type class resolution could be done +unambiguously. Furthermore, the types of \verb|ins| and \verb|ins'| +discharge type class resolution, so that in \verb|D| when the database +is now overlapping, no resolution occurs, so the error is never found. + +It is easy to dismiss this example as an implementation wart in GHC, and +continue pretending that global uniqueness of instances holds. However, +the problem with \emph{global uniqueness of instances} is that they are +inherently nonmodular: you might find yourself unable to compose two +components because they accidentally defined the same type class +instance, even though these instances are plumbed deep in the +implementation details of the components. + +As it turns out, there is already another feature in Haskell which +must enforce global uniqueness, to prevent segfaults. +We now turn to type classes' close cousin: type families. + +\paragraph{Type families} With type families, confluence is the primary +property of interest. (Coherence is not of much interest because type +families are elaborated into coercions, which don't have any +computational content.) Rather than considering what the set of +constraints we reduce to, confluence for type families considers the +reduction of type families. The overlap checks for type families +can be quite sophisticated, especially in the case of closed type +families. + +Unlike type classes, however, GHC \emph{does} check the non-overlap +of type families eagerly. The analogous program does \emph{not} type check: + +\begin{verbatim} +-- F.hs +type family F a :: * +-- A.hs +import F +type instance F Bool = Int +-- B.hs +import F +type instance F Bool = Bool +-- C.hs +import A +import B +\end{verbatim} + +The reason is that it is \emph{unsound} to ever allow any overlap +(unlike in the case of type classes where it just leads to incoherence.) +Thus, whereas one might imagine dropping the global uniqueness of instances +invariant for type classes, it is absolutely necessary to perform global +enforcement here. There's no way around it! + +\subsection{Local type classes} + +Here, we say \textbf{NO} to global uniqueness. + +This design is perhaps best discussed in relation to modular type +classes, which shares many similar properties. Instances are now +treated as first class objects (in MTCs, they are simply modules)---we +may explicitly hide or include instances for type class resolution (in +MTCs, this is done via the \verb|using| top-level declaration). This is +essentially what was sketched in Section 5 of the original Backpack +paper. As a simple example: + +\begin{verbatim} +package p where + A :: [ data T = T ] + B = [ import A; instance Eq T where ... ] + +package q where + A = [ data T = T; instance Eq T where ... ] + include p +\end{verbatim} + +Here, \verb|B| does not see the extra instance declared by \verb|A|, +because it was thinned from its signature of \verb|A| (and thus never +declared canonical.) To declare an instance without making it +canonical, it must placed in a separate (unimported) module. + +Like modular type classes, Backpack does not give rise to incoherence, +because instance visibility can only be changed at the top level module +language, where it is already considered best practice to provide +explicit signatures. Here is the example used in the Modular Type +Classes paper to demonstrate the problem: + +\begin{verbatim} +structure A = using EqInt1 in + struct ...fun f x = eq(x,x)... end +structure B = using EqInt2 in + struct ...val y = A.f(3)... end +\end{verbatim} + +Is the type of f \verb|int -> bool|, or does it have a type-class +constraint? Because type checking proceeds over the entire program, ML +could hypothetically pick either. However, ported to Haskell, the +example looks like this: + +\begin{verbatim} +EqInt1 :: [ instance Eq Int ] +EqInt2 :: [ instance Eq Int ] +A = [ + import EqInt1 + f x = x == x +] +B = [ + import EqInt2 + import A hiding (instance Eq Int) + y = f 3 +] +\end{verbatim} + +There may be ambiguity, yes, but it can be easily resolved by the +addition of a top-level type signature to \verb|f|, which is considered +best-practice anyway. Additionally, Haskell users are trained to expect +a particular inference for \verb|f| in any case (the polymorphic one). + +Here is another example which might be considered surprising: + +\begin{verbatim} +package p where + A :: [ data T = T ] + B :: [ data T = T ] + C = [ + import qualified A + import qualified B + instance Show A.T where show T = "A" + instance Show B.T where show T = "B" + x :: String + x = show A.T ++ show B.T + ] +\end{verbatim} + +In the original Backpack paper, it was implied that module \verb|C| +should not type check if \verb|A.T = B.T| (failing at link time). +However, if we set aside, for a moment, the issue that anyone who +imports \verb|C| in such a context will now have overlapping instances, +there is no reason in principle why the module itself should be +problematic. Here is the example in MTCs, which I have good word from +Derek does type check. + +\begin{verbatim} +signature SIG = sig + type t + val mk : t +end +signature SHOW = sig + type t + val show : t -> string +end +functor Example (A : SIG) (B : SIG) = + let structure ShowA : SHOW = struct + type t = A.t + fun show _ = "A" + end in + let structure ShowB : SHOW = struct + type t = B.t + fun show _ = "B" + end in + using ShowA, ShowB in + struct + val x = show A.mk ++ show B.mk + end : sig val x : string end +\end{verbatim} + +The moral of the story is, even though in a later context the instances +are overlapping, inside the functor, the type-class resolution is unambiguous +and should be done (so \verb|x = "AB"|). + +Up until this point, we've argued why MTCs and this Backpack design are similar. +However, there is an important sociological difference between modular type-classes +and this proposed scheme for Backpack. In the presentation ``Why Applicative +Functors Matter'', Derek mentions the canonical example of defining a set: + +\begin{verbatim} +signature ORD = sig type t; val cmp : t -> t -> bool end +signature SET = sig type t; type elem; + val empty : t; + val insert : elem -> t -> t ... +end +functor MkSet (X : ORD) :> SET where type elem = X.t + = struct ... end +\end{verbatim} + +This is actually very different from how sets tend to be defined in +Haskell today. If we directly encoded this in Backpack, it would +look like this: + +\begin{verbatim} +package mk-set where + X :: [ + data T + cmp :: T -> T-> Bool + ] + Set :: [ + data Set + empty :: Set + insert :: T -> Set -> Set + ] + Set = [ + import X + ... + ] +\end{verbatim} + +It's also informative to consider how MTCs would encode set as it is written +today in Haskell: + +\begin{verbatim} +signature ORD = sig type t; val cmp : t -> t -> bool end +signature SET = sig type 'a t; + val empty : 'a t; + val insert : (X : ORD) => X.t -> X.t t -> X.t t +end +struct MkSet :> SET = struct ... end +\end{verbatim} + +Here, it is clear to see that while functor instantiation occurs for +implementation, it is not occuring for types. This is a big limitation +with the Haskell approach, and it explains why Haskellers, in practice, +find global uniqueness of instances so desirable. + +Implementation-wise, this requires some subtle modifications to how we +do type class resolution. Type checking of indefinite modules works as +before, but when go to actually compile them against explicit +implementations, we need to ``forget'' that two types are equal when +doing instance resolution. This could probably be implemented by +associating type class instances with the original name that was +utilized when typechecking, so that we can resolve ambiguous matches +against types which have the same original name now that we are +compiling. + +As we've mentioned previously, this strategy is unsound for type families. + +\subsection{Globally unique} + +Here, we say \textbf{YES} to global uniqueness. + +When we require the global uniqueness of instances (either because +that's the type class design we chose, or because we're considering +the problem of type families), we will need to reject declarations like the +one cited above when \verb|A.T = B.T|: + +\begin{verbatim} +A :: [ data T ] +B :: [ data T ] +C :: [ + import qualified A + import qualified B + instance Show A.T where show T = "A" + instance Show B.T where show T = "B" +] +\end{verbatim} + +The paper mentions that a link-time check is sufficient to prevent this +case from arising. While in the previous section, we've argued why this +is actually unnecessary when local instances are allowed, the link-time +check is a good match in the case of global instances, because any +instance \emph{must} be declared in the signature. The scheme proceeds +as follows: when some instances are typechecked initially, we type check +them as if all of variable module identities were distinct. Then, when +we perform linking (we \verb|include| or we unify some module +identities), we check again if to see if we've discovered some instance +overlap. This linking check is akin to the eager check that is +performed today for type families; it would need to be implemented for +type classes as well: however, there is a twist: we are \emph{redoing} +the overlap check now that some identities have been unified. + +As an implementation trick, one could deferring the check until \verb|C| +is compiled, keeping in line with GHC's lazy ``don't check for overlap +until the use site.'' (Once again, unsound for type families.) + +\paragraph{What about module inequalities?} An older proposal was for +signatures to contain ``module inequalities'', i.e., assertions that two +modules are not equal. (Technically: we need to be able to apply this +assertion to $\beta$ module variables, since \verb|A != B| while +\verb|A.T = B.T|). Currently, Edward thinks that module inequalities +are only marginal utility with local instances (i.e., not enough to +justify the implementation cost) and not useful at all in the world of +global instances! + +With local instances, module inequalities could be useful to statically +rule out examples like \verb|show A.T ++ show B.T|. Because such uses +are not necessarily reflected in the signature, it would be a violation +of separate module development to try to divine the constraint from the +implementation itself. I claim this is of limited utility, however, because, +as we mentioned earlier, we can compile these ``incoherent'' modules perfectly +coherently. With global instances, all instances must be in the signature, so +while it might be aesthetically displeasing to have the signature impose +extra restrictions on linking identities, we can carry this out without +violating the linking restriction. + +\section{Bits and bobs} + +\subsection{Abstract type synonyms} + +In Paper Backpack, abstract type synonyms are not permitted, because GHC doesn't +understand how to deal with them. The purpose of this section is to describe +one particularly nastiness of abstract type synonyms, by way of the occurs check: + +\begin{verbatim} +A :: [ type T ] +B :: [ import qualified A; type T = [A.T] ] +\end{verbatim} + +At this point, it is illegal for \verb|A = B|, otherwise this type synonym would +fail the occurs check. This seems like pretty bad news, since every instance +of the occurs check in the type-checker could constitute a module inequality. + +\section{Open questions}\label{sec:open-questions} + +Here are open problems about the implementation which still require +hashing out. + +\begin{itemize} + + \item In Section~\ref{sec:simplifying-backpack}, we argued that we + could implement Backpack without needing a shaping pass. We're + pretty certain that this will work for typechecking and + compiling fully definite packages with no recursive linking, but + in Section~\ref{sec:typechecking-indefinite}, we described some + of the prevailing difficulties of supporting signature linking. + Renaming is not an insurmountable problem, but backwards flow of + shaping information can be, and it is unclear how best to + accommodate this. This is probably the most important problem + to overcome. + + \item In Section~\ref{sec:installing-indefinite}, a few choices for how to + store source code were pitched, however, there is not consensus on which + one is best. + + \item What is the impact of the multiplicity of PackageIds on + dependency solving in Cabal? Old questions of what to prefer + when multiple package-versions are available (Cabal originally + only needed to solve this between different versions of the same + package, preferring the oldest version), but with signatures, + there are more choices. Should there be a complex solver that + does all signature solving, or a preprocessing step that puts + things back into the original Cabal version. Authors may want + to suggest policy for what packages should actually link against + signatures (so a crypto library doesn't accidentally link + against a null cipher package). + +\end{itemize} + +\end{document} diff --git a/docs/backpack/commands-new-new.tex b/docs/backpack/commands-new-new.tex new file mode 100644 index 0000000000..1f2466e14c --- /dev/null +++ b/docs/backpack/commands-new-new.tex @@ -0,0 +1,891 @@ +%!TEX root = paper/paper.tex +\usepackage{amsmath} +\usepackage{amssymb} +\usepackage{amsthm} +\usepackage{xspace} +\usepackage{color} +\usepackage{xifthen} +\usepackage{graphicx} +\usepackage{amsbsy} +\usepackage{mathtools} +\usepackage{stmaryrd} +\usepackage{url} +\usepackage{alltt} +\usepackage{varwidth} +% \usepackage{hyperref} +\usepackage{datetime} +\usepackage{subfig} +\usepackage{array} +\usepackage{multirow} +\usepackage{xargs} +\usepackage{marvosym} % for MVAt +\usepackage{bm} % for blackboard bold semicolon + + +%% HYPERREF COLORS +\definecolor{darkred}{rgb}{.7,0,0} +\definecolor{darkgreen}{rgb}{0,.5,0} +\definecolor{darkblue}{rgb}{0,0,.5} +% \hypersetup{ +% linktoc=page, +% colorlinks=true, +% linkcolor=darkred, +% citecolor=darkgreen, +% urlcolor=darkblue, +% } + +% Coloring +\definecolor{hilite}{rgb}{0.7,0,0} +\newcommand{\hilite}[1]{\color{hilite}#1\color{black}} +\definecolor{shade}{rgb}{0.85,0.85,0.85} +\newcommand{\shade}[1]{\colorbox{shade}{\!\ensuremath{#1}\!}} + +% Misc +\newcommand\evalto{\hookrightarrow} +\newcommand\elabto{\rightsquigarrow} +\newcommand\elabtox[1]{\stackrel{#1}\rightsquigarrow} +\newcommand{\yields}{\uparrow} +\newcommand\too{\Rightarrow} +\newcommand{\nil}{\cdot} +\newcommand{\eps}{\epsilon} +\newcommand{\Ups}{\Upsilon} +\newcommand{\avoids}{\mathrel{\#}} + +\renewcommand{\vec}[1]{\overline{#1}} +\newcommand{\rname}[1]{\textsc{#1}} +\newcommand{\infrule}[3][]{% + \vspace{0.5ex} + \frac{\begin{array}{@{}c@{}}#2\end{array}}% + {\mbox{\ensuremath{#3}}}% + \ifthenelse{\isempty{#1}}% + {}% + % {\hspace{1ex}\rlap{(\rname{#1})}}% + {\hspace{1ex}(\rname{#1})}% + \vspace{0.5ex} +} +\newcommand{\infax}[2][]{\infrule[#1]{}{#2}} +\newcommand{\andalso}{\hspace{.5cm}} +\newcommand{\suchthat}{~\mathrm{s.t.}~} +\newenvironment{notes}% + {\vspace{-1.5em}\begin{itemize}\setlength\itemsep{0pt}\small}% + {\end{itemize}} +\newcommand{\macrodef}{\mathbin{\overset{\mathrm{def}}{=}}} +\newcommand{\macroiff}{\mathbin{\overset{\mathrm{def}}{\Leftrightarrow}}} + + +\newcommand{\ttt}[1]{\text{\tt #1}} +\newcommand{\ttul}{\texttt{\char 95}} +\newcommand{\ttcc}{\texttt{:\!:}} +\newcommand{\ttlb}{{\tt {\char '173}}} +\newcommand{\ttrb}{{\tt {\char '175}}} +\newcommand{\tsf}[1]{\textsf{#1}} + +% \newcommand{\secref}[1]{\S\ref{sec:#1}} +% \newcommand{\figref}[1]{Figure~\ref{fig:#1}} +\newcommand{\marginnote}[1]{\marginpar[$\ast$ {\small #1} $\ast$]% + {$\ast$ {\small #1} $\ast$}} +\newcommand{\hschange}{\marginnote{!Haskell}} +\newcommand{\TODO}{\emph{TODO}\marginnote{TODO}} +\newcommand{\parheader}[1]{\textbf{#1}\quad} + +\newcommand{\file}{\ensuremath{\mathit{file}}} +\newcommand{\mapnil}{~\mathord{\not\mapsto}} + +\newcommand{\Ckey}[1]{\textbf{\textsf{#1}}} +\newcommand{\Cent}[1]{\texttt{#1}} +% \newcommand{\Cmod}[1]{\texttt{[#1]}} +% \newcommand{\Csig}[1]{\texttt{[\ttcc{}#1]}} +\newcommand{\Cmod}[1]{=\texttt{[#1]}} +\newcommand{\Csig}[1]{~\ttcc{}~\texttt{[#1]}} +\newcommand{\Cpath}[1]{\ensuremath{\mathsf{#1}}} +\newcommand{\Cvar}[1]{\ensuremath{\mathsf{#1}}} +\newcommand{\Ccb}[1]{\text{\ttlb} {#1} \text{\ttrb}} +\newcommand{\Cpkg}[1]{\texttt{#1}} +\newcommand{\Cmv}[1]{\ensuremath{\langle #1 \rangle}} +\newcommand{\Cto}[2]{#1 \mapsto #2} +\newcommand{\Ctoo}[2]{\Cpath{#1} \mapsto \Cpath{#2}} +\newcommand{\Crm}[1]{#1 \mapnil} +\newcommand{\Crmm}[1]{\Cpath{#1} \mapnil} +\newcommand{\Cthin}[1]{\ensuremath{\langle \Ckey{only}~#1 \rangle}} +\newcommand{\Cthinn}[1]{\ensuremath{\langle \Ckey{only}~\Cpath{#1} \rangle}} +\newcommand{\Cinc}[1]{\Ckey{include}~{#1}} +\newcommand{\Cincc}[1]{\Ckey{include}~\Cpkg{#1}} +\newcommand{\Cshar}[2]{~\Ckey{where}~{#1} \equiv {#2}} +\newcommand{\Csharr}[2]{~\Ckey{where}~\Cpath{#1} \equiv \Cpath{#2}} +\newcommand{\Ctshar}[2]{~\Ckey{where}~{#1} \equiv {#2}} +\newcommand{\Ctsharr}[3]{~\Ckey{where}~\Cpath{#1}.\Cent{#3} \equiv \Cpath{#2}.\Cent{#3}} +\newcommand{\Cbinds}[1]{\left\{\!\begin{array}{l} #1 \end{array}\!\right\}} +\newcommand{\Cbindsp}[1]{\left(\!\begin{array}{l} #1 \end{array}\!\right)} +\newcommand{\Cpkgs}[1]{\[\begin{array}{l} #1\end{array}\]} +\newcommand{\Cpkgsl}[1]{\noindent\ensuremath{\begin{array}{@{}l} #1\end{array}}} +\newcommand{\Ccomment}[1]{\ttt{\emph{--~#1}}} +\newcommand{\Cimp}[1]{\Ckey{import}~\Cpkg{#1}} +\newcommand{\Cimpas}[2]{\Ckey{import}~\Cpkg{#1}~\Ckey{as}~\Cvar{#2}} + +\newcommand{\Ctbinds}[1]{\left\{\!\vrule width 0.6pt \begin{array}{l} #1 \end{array} \vrule width 0.6pt \!\right\}} +\newcommand{\Ctbindsx}{\left\{\!\vrule width 0.6pt \; \vrule width 0.6pt \!\right\}} +\newcommand{\Ctbindsxx}{\left\{\!\vrule width 0.6pt \begin{array}{l}\!\!\!\!\\\!\!\!\!\end{array} \vrule width 0.6pt \!\right\}} +\newcommand{\Ctbindsxxx}{\left\{\!\vrule width 0.6pt \begin{array}{l}\!\!\!\!\\\!\!\!\!\\\!\!\!\!\end{array} \vrule width 0.6pt \!\right\}} + + +\newcommand{\Cpkgdef}[2]{% + \ensuremath{ + \begin{array}{l} + \Ckey{package}~\Cpkg{#1}~\Ckey{where}\\ + \hspace{1em}\begin{array}{l} + #2 + \end{array} + \end{array}}} +\newcommand{\Cpkgdefonly}[3]{% + \ensuremath{ + \begin{array}{l} + \Ckey{package}~\Cpkg{#1}\Cvar{(#2)}~\Ckey{where}\\ + \hspace{1em}\begin{array}{l} + #3 + \end{array} + \end{array}}} +\newcommand{\Ccc}{\mathbin{\ttcc{}}} +\newcommand{\Cbinmod}[2]{\Cvar{#1} = \texttt{[#2]}} +\newcommand{\Cbinsig}[2]{\Cvar{#1} \Ccc \texttt{[#2]}} +\newcommand{\Cinconly}[2]{\Ckey{include}~\Cpkg{#1}\Cvar{(#2)}} +\newcommand{\Cimponly}[2]{\Ckey{import}~\Cpkg{#1}\Cvar{(#2)}} +\newcommand{\Cimpmv}[3]{\Ckey{import}~\Cpkg{#1}\langle\Cvar{#2}\mapsto\Cvar{#3}\rangle} + + + + + +\newcommand{\oxb}[1]{\llbracket #1 \rrbracket} +\newcommand{\coxb}[1]{\{\hspace{-.5ex}| #1 |\hspace{-.5ex}\}} +\newcommand{\coxbv}[1]{\coxb{\vec{#1}}} +\newcommand{\angb}[1]{\ensuremath{\boldsymbol\langle #1 \boldsymbol\rangle}\xspace} +\newcommand{\angbv}[1]{\angb{\vec{#1}}} +\newcommand{\aoxbl}{\ensuremath{\boldsymbol\langle\hspace{-.5ex}|}} +\newcommand{\aoxbr}{\ensuremath{|\hspace{-.5ex}\boldsymbol\rangle}\xspace} +\newcommand{\aoxb}[1]{\ensuremath{\aoxbl{#1}\aoxbr}} +\newcommand{\aoxbv}[1]{\aoxb{\vec{#1}}} +\newcommand{\poxb}[1]{\ensuremath{% + (\hspace{-.5ex}|% + #1% + |\hspace{-.5ex})}\xspace} +\newcommand{\stof}[1]{{#1}^{\star}} +% \newcommand{\stof}[1]{\ensuremath{\underline{#1}}} +\newcommand{\sh}[1]{\ensuremath{\tilde{#1}}} + + +% \newenvironment{code}[1][t]% +% {\ignorespaces\begin{varwidth}[#1]{\textwidth}\begin{alltt}}% +% {\end{alltt}\end{varwidth}\ignorespacesafterend} +% \newenvironment{codel}[1][t]% +% {\noindent\begin{varwidth}[#1]{\textwidth}\noindent\begin{alltt}}% +% {\end{alltt}\end{varwidth}\ignorespacesafterend} + + +%% hack for subfloats in subfig ------------- +\makeatletter +\newbox\sf@box +\newenvironment{SubFloat}[2][]% + {\def\sf@one{#1}% + \def\sf@two{#2}% + \setbox\sf@box\hbox + \bgroup}% + {\egroup + \ifx\@empty\sf@two\@empty\relax + \def\sf@two{\@empty} + \fi + \ifx\@empty\sf@one\@empty\relax + \subfloat[\sf@two]{\box\sf@box}% + \else + \subfloat[\sf@one][\sf@two]{\box\sf@box}% + \fi} +\makeatother +%% ------------------------------------------ + +%% hack for top-aligned tabular cells ------------- +\newsavebox\topalignbox +\newcolumntype{L}{% + >{\begin{lrbox}\topalignbox + \rule{0pt}{\ht\strutbox}} + l + <{\end{lrbox}% + \raisebox{\dimexpr-\height+\ht\strutbox\relax}% + {\usebox\topalignbox}}} +\newcolumntype{C}{% + >{\begin{lrbox}\topalignbox + \rule{0pt}{\ht\strutbox}} + c + <{\end{lrbox}% + \raisebox{\dimexpr-\height+\ht\strutbox\relax}% + {\usebox\topalignbox}}} +\newcolumntype{R}{% + >{\begin{lrbox}\topalignbox + \rule{0pt}{\ht\strutbox}} + r + <{\end{lrbox}% + \raisebox{\dimexpr-\height+\ht\strutbox\relax}% + {\usebox\topalignbox}}} +%% ------------------------------------------------ + +\newcommand\syn[1]{\textsf{#1}} +\newcommand\bsyn[1]{\textsf{\bfseries #1}} +\newcommand\msyn[1]{\textsf{#1}} +\newcommand{\cc}{\mathop{::}} + +% \newcommand{\Eimp}[1]{\bsyn{import}~{#1}} +% \newcommand{\Eonly}[2]{#1~\bsyn{only}~{#2}} +% \newcommand{\Ehide}[1]{~\bsyn{hide}~{#1}} +% \newcommand{\Enew}[1]{\bsyn{new}~{#1}} +% \newcommand{\Elocal}[2]{\bsyn{local}~{#1}~\bsyn{in}~{#2}} +% \newcommand{\Smv}[3]{\Emv[]{#1}{#2}{#3}} +\newcommand{\Srm}[2]{#1 \mathord{\setminus} #2} + +\newcommand{\cpath}{\varrho} +\newcommand{\fpath}{\rho} + +\newcommand{\ie}{\emph{i.e.},\xspace} +\newcommand{\eg}{\emph{e.g.},~} +\newcommand{\etal}{\emph{et al.}} + +\renewcommand{\P}[1]{\Cpkg{#1}} +\newcommand{\X}[1]{\Cvar{#1}} +\newcommand{\E}{\mathcal{E}} +\newcommand{\C}{\mathcal{C}} +\newcommand{\M}{\mathcal{M}} +\newcommand{\B}{\mathcal{B}} +\newcommand{\R}{\mathcal{R}} +\newcommand{\K}{\mathcal{K}} +\renewcommand{\L}{\mathcal{L}} +\newcommand{\D}{\mathcal{D}} + +%%%% NEW + +\newdateformat{numericdate}{% +\THEYEAR.\twodigit{\THEMONTH}.\twodigit{\THEDAY} +} + +% EL DEFNS +\newcommand{\shal}[1]{\syn{shallow}(#1)} +\newcommand{\exports}[1]{\syn{exports}(#1)} +\newcommand{\Slocals}[1]{\syn{locals}(#1)} +\newcommand{\Slocalsi}[2]{\syn{locals}(#1; #2)} +\newcommand{\specs}[1]{\syn{specs}(#1)} +\newcommand{\ELmkespc}[2]{\syn{mkespc}(#1;#2)} +\newcommand{\Smkeenv}[1]{\syn{mkeenv}(#1)} +\newcommand{\Smklocaleenv}[2]{\syn{mklocaleenv}(#1;#2)} +\newcommand{\Smklocaleenvespcs}[1]{\syn{mklocaleenv}(#1)} +\newcommand{\Smkphnms}[1]{\syn{mkphnms}(#1)} +\newcommand{\Smkphnm}[1]{\syn{mkphnm}(#1)} +\newcommand{\Sfilterespc}[2]{\syn{filterespc}(#1;#2)} +\newcommand{\Sfilterespcs}[2]{\syn{filterespcs}(#1;#2)} +\newcommand{\Simps}[1]{\syn{imps}(#1)} + + + +% IL DEFNS +\newcommand{\dexp}{\mathit{dexp}} +\newcommand{\fexp}{\mathit{fexp}} +\newcommand{\tfexp}{\mathit{tfexp}} +\newcommand{\pexp}{\mathit{pexp}} +\newcommand{\dtyp}{\mathit{dtyp}} +\newcommand{\ftyp}{\mathit{ftyp}} +\newcommand{\hsmod}{\mathit{hsmod}} +\newcommand{\fenv}{\mathit{fenv}} +\newcommand{\ILmkmod}[6]{\syn{mkmod}(#1; #2; #3; #4; #5; #6)} +\newcommand{\ILmkstubs}[3]{\syn{mkstubs}(#1; #2; #3)} +\newcommand{\Smkstubs}[1]{\syn{mkstubs}(#1)} +\newcommand{\ILentnames}[1]{\syn{entnames}(#1)} +\newcommand{\ILmkfenv}[1]{\syn{mkfenv}(#1)} +\newcommand{\ILmkdtyp}[1]{\syn{mkdtyp}(#1)} +\newcommand{\ILmkknd}[1]{\syn{mkknd}(#1)} +\newcommand{\ILmkimpdecl}[2]{\syn{mkimpdecl}(#1;#2)} +\newcommand{\ILmkimpdecls}[2]{\syn{mkimpdecls}(#1;#2)} +\newcommand{\ILmkimpspec}[3]{\syn{mkimpspec}(#1;#2;#3)} +\newcommand{\ILmkentimp}[3]{\syn{mkentimp}(#1;#2;#3)} +\newcommand{\ILmkentimpp}[1]{\syn{mkentimp}(#1)} +\newcommand{\ILmkexp}[2]{\syn{mkexp}(#1;#2)} +\newcommand{\ILmkexpdecl}[2]{\syn{mkexpdecl}(#1;#2)} +\newcommand{\ILmkespc}[2]{\syn{mkespc}(#1;#2)} +\newcommand{\ILshal}[1]{\syn{shallow}(#1)} +\newcommand{\ILexports}[1]{\syn{exports}(#1)} +\newcommand{\ILdefns}[1]{\syn{defns}(#1)} +\newcommand{\ILdefnsi}[2]{\syn{defns}(#1;#2)} + +% CORE DEFNS +\newcommand{\Hentref}{\mathit{eref}} +\newcommand{\Hentimp}{\mathit{import}} +\newcommand{\Hentexp}{\mathit{export}} +\newcommand{\Himp}{\mathit{impdecl}} +\newcommand{\Himpspec}{\mathit{impspec}} +\newcommand{\Himps}{\mathit{impdecls}} +\newcommand{\Hexps}{\mathit{expdecl}} +\newcommand{\Hdef}{\mathit{def}} +\newcommand{\Hdefs}{\mathit{defs}} +\newcommand{\Hdecl}{\mathit{decl}} +\newcommand{\Hdecls}{\mathit{decls}} +\newcommand{\Heenv}{\mathit{eenv}} +\newcommand{\Haenv}{\mathit{aenv}} +% \newcommand{\HIL}[1]{{\scriptstyle\downarrow}#1} +\newcommand{\HIL}[1]{\check{#1}} + +\newcommand{\Hcmp}{\sqsubseteq} + +\newcommand{\uexp}{\mathit{uexp}} +\newcommand{\utyp}{\mathit{utyp}} +\newcommand{\typ}{\mathit{typ}} +\newcommand{\knd}{\mathit{knd}} +\newcommand{\kndstar}{\ttt{*}} +\newcommand{\kndarr}[2]{#1\ensuremath{\mathbin{\ttt{->}}}#2} +\newcommand{\kenv}{\mathit{kenv}} +\newcommand{\phnm}{\mathit{phnm}} +\newcommand{\spc}{\mathit{dspc}} +\newcommand{\spcs}{\mathit{dspcs}} +\newcommand{\espc}{\mathit{espc}} +\newcommand{\espcs}{\mathit{espcs}} +\newcommand{\ds}{\mathit{ds}} + +\newcommand{\shctx}{\sh{\Xi}_{\syn{ctx}}} +\newcommand{\shctxsigma}{\sh{\Sigma}_{\syn{ctx}}} + +\newcommand{\vdashsh}{\Vdash} + +% \newcommand{\vdashghc}{\vdash_{\!\!\mathrm{c}}^{\!\!\mathrm{\scriptscriptstyle EL}}} +% \newcommand{\vdashghcil}{\vdash_{\!\!\mathrm{c}}^{\!\!\mathrm{\scriptscriptstyle IL}}} +% \newcommand{\vdashshghc}{\vdashsh_{\!\!\mathrm{c}}^{\!\!\mathrm{\scriptscriptstyle EL}}} +\newcommand{\vdashghc}{\vdash_{\!\!\mathrm{c}}} +\newcommand{\vdashghcil}{\vdash_{\!\!\mathrm{c}}^{\!\!\mathrm{\scriptscriptstyle IL}}} +\newcommand{\vdashshghc}{\vdashsh_{\!\!\mathrm{c}}} + +% CORE STUFF +\newcommandx*{\JCModImp}[5][1=\sh\B, 2=\nu_0, usedefault=@]% + {#1;#2 \vdashshghc #3;#4 \elabto #5} +\newcommandx*{\JIlCModImp}[5][1=\fenv, 2=f_0, usedefault=@]% + {#1;#2 \vdashghcil #3;#4 \elabto #5} +\newcommandx*{\JCSigImp}[5][1=\sh\B, 2=\sh\tau, usedefault=@]% + {#1;#2 \vdashshghc #3;#4 \elabto #5} + +\newcommandx*{\JCImpDecl}[3][1=\sh\B, usedefault=@]% + {#1 \vdashshghc #2 \elabto #3} +\newcommandx*{\JCImp}[4][1=\sh\B, 2=p, usedefault=@]% + {#1;#2 \vdashshghc #3 \elabto #4} +\newcommandx*{\JIlCImpDecl}[3][1=\fenv, usedefault=@]% + {#1 \vdashghcil #2 \elabto #3} +\newcommandx*{\JIlCImp}[4][1=\fenv, 2=f, usedefault=@]% + {#1;#2 \vdashghcil #3 \elabto #4} + +\newcommandx*{\JCModExp}[4][1=\nu_0, 2=\Heenv, usedefault=@]% + {#1;#2 \vdashshghc #3 \elabto #4} +\newcommandx*{\JIlCModExp}[4][1=f_0, 2=\HIL\Heenv, usedefault=@]% + {#1;#2 \vdashghcil #3 \elabto #4} + +\newcommandx*{\JCModDef}[5][1=\Psi, 2=\nu_0, 3=\Heenv, usedefault=@]% + {#1; #2; #3 \vdashghcil #4 : #5} +\newcommandx*{\JIlCModDef}[5][1=\fenv, 2=f_0, 3=\HIL\Heenv, usedefault=@]% + {#1; #2; #3 \vdashghcil #4 : #5} +\newcommandx*{\JCSigDecl}[5][1=\Psi, 2=\sh\tau, 3=\Heenv, usedefault=@]% + {#1; #2; #3 \vdashghcil #4 : #5} + +\newcommandx*{\JCExp}[6][1=\sh\Psi, 2=\nu_0, 3=\Hdefs, 4=\Heenv, usedefault=@]% + {#1;#2;#3;#4 \vdashshghc #5 \elabto #6} +\newcommandx*{\JIlCExp}[4][1=f_0, 2=\HIL\Heenv, usedefault=@]% + {#1;#2 \vdashghcil #3 \elabto #4} + +\newcommandx*{\JCRefExp}[7][1=\sh\Psi, 2=\nu_0, 3=\Hdefs, 4=\Heenv, usedefault=@]% + {#1;#2;#3;#4 \vdashshghc #5 \elabto #6:#7} +\newcommandx*{\JIlCRefExp}[7][1=\fenv, 2=f_0, 3=\HIL\Hdefs, 4=\HIL\Heenv, usedefault=@]% + {#1;#2;#3;#4 \vdashghcil #5 \elabto #6:#7} + +\newcommandx*{\JCMod}[4][1=\Gamma, 2=\nu_0, usedefault=@]% + {#1; #2 \vdashghc #3 : #4} +\newcommandx*{\JIlCMod}[3][1=\fenv, usedefault=@]% + {#1 \vdashghcil #2 : #3} +\newcommandx*{\JCSig}[5][1=\Gamma, 2=\sh\tau, usedefault=@]% + {#1; #2 \vdashghc #3 \elabto #4;#5} +\newcommandx*{\JCShSig}[5][1=\Gamma, 2=\sh\tau, usedefault=@]% + {#1; #2 \vdashghc #3 \elabto #4;#5} +\newcommandx*{\JCModElab}[5][1=\Gamma, 2=\nu_0, usedefault=@]% + % {#1; #2 \vdashghc #3 : #4 \elabto #5} + {#1; #2 \vdashghc #3 : #4 \;\shade{\elabto #5}} + +\newcommandx*{\JCWfEenv}[2][1=\Haenv, usedefault=@]% + {#1 \vdashshghc #2~\syn{wf}} +\newcommandx*{\JCWfEenvMap}[2][1=\Haenv, usedefault=@]% + {#1 \vdashshghc #2~\syn{wf}} +\newcommandx*{\JIlCWfEenv}[2][1=\HIL\Haenv, usedefault=@]% + {#1 \vdashghcil #2~\syn{wf}} +\newcommandx*{\JIlCWfEenvMap}[2][1=\HIL\Haenv, usedefault=@]% + {#1 \vdashghcil #2~\syn{wf}} + +\newcommandx*{\JIlTfexp}[3][1=\fenv, 2=f_0, usedefault=@]% + {#1; #2 \vdash #3} + + + + % IL STUFF + +\newcommandx*{\JIlWf}[2][1=\fenv, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JIlKnd}[4][1=\fenv, 2=\kenv, usedefault=@]% + {#1;#2 \vdashghcil #3 \mathrel{\cc} #4} +% \newcommandx*{\JIlSub}[4][1=\fenv, 2=f, usedefault=@]% +% {#1;#2 \vdash #3 \le #4} +\newcommandx*{\JIlSub}[2][usedefault=@]% + {\vdash #1 \le #2} +\newcommandx*{\JIlMerge}[3][usedefault=@]% + {\vdash #1 \oplus #2 \Rightarrow #3} + +\newcommandx*{\JIlDexp}[2][1=\fenv, usedefault=@]% + {#1 \vdash #2} +\newcommandx*{\JIlDexpTyp}[3][1=\fenv, usedefault=@]% + {#1 \vdash #2 : #3} + +\newcommandx*{\JIlWfFenv}[2][1=\nil, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JIlWfFtyp}[2][1=\fenv, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JIlWfSpc}[2][1=\fenv, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JIlWfESpc}[2][1=\fenv, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JIlWfSig}[2][1=\fenv, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JIlWfFtypSpecs}[2][1=\fenv, usedefault=@]% + {#1 \vdash #2 ~\syn{specs-wf}} +\newcommandx*{\JIlWfFtypExps}[2][1=\fenv, usedefault=@]% + {#1 \vdash #2 ~\syn{exports-wf}} +\newcommandx*{\JIlWfFenvDeps}[2][1=\fenv, usedefault=@]% + {#1 \vdash #2 ~\syn{deps-wf}} + +% WF TYPE STUFF IN EL +\newcommandx*{\JPkgValid}[1]% + {\vdash #1 ~\syn{pkg-valid}} +\newcommandx*{\JWfPkgCtx}[1][1=\Delta, usedefault=@]% + {\vdash #1 ~\syn{wf}} +\newcommandx*{\JWfPhCtx}[2][1=\nil, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JWfModTyp}[2][1=\Psi, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JWfModTypPol}[3][1=\Psi, usedefault=@]% + {#1 \vdash #2^{#3} ~\syn{wf}} +\newcommandx*{\JWfLogSig}[2][1=\Psi, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JWfSpc}[2][1=\Psi, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JWfESpc}[2][1=\Psi, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JWfSig}[2][1=\nil, usedefault=@]% + {#1 \vdash #2 ~\syn{wf}} +\newcommandx*{\JWfModTypSpecs}[2][1=\Psi, usedefault=@]% + {#1 \vdash #2 ~\syn{specs-wf}} +\newcommandx*{\JWfModTypPolSpecs}[3][1=\Psi, usedefault=@]% + {#1 \vdash #2^{#3} ~\syn{specs-wf}} +\newcommandx*{\JWfModTypExps}[2][1=\Psi, usedefault=@]% + {#1 \vdash #2 ~\syn{exports-wf}} +\newcommandx*{\JWfPhCtxDeps}[2][1=\Psi, usedefault=@]% + {#1 \vdash #2 ~\syn{deps-wf}} +\newcommandx*{\JWfPhCtxDepsOne}[4][1=\Psi, usedefault=@]% + {#1 \vdash \styp{#2}{#3}{#4} ~\syn{deps-wf}} + +% WF SHAPE STUFF IN EL +\newcommandx*{\JWfShPhCtx}[2][1=\nil, usedefault=@]% + {#1 \vdashsh #2 ~\syn{wf}} +\newcommandx*{\JWfModSh}[2][1=\sh\Psi, usedefault=@]% + {#1 \vdashsh #2 ~\syn{wf}} +\newcommandx*{\JWfModShPol}[3][1=\sh\Psi, usedefault=@]% + {#1 \vdashsh #2^{#3} ~\syn{wf}} +\newcommandx*{\JWfShLogSig}[2][1=\sh\Psi, usedefault=@]% + {#1 \vdashsh #2 ~\syn{wf}} +\newcommandx*{\JWfShSpc}[2][1=\sh\Psi, usedefault=@]% + {#1 \vdashsh #2 ~\syn{wf}} +\newcommandx*{\JWfShESpc}[2][1=\sh\Psi, usedefault=@]% + {#1 \vdashsh #2 ~\syn{wf}} +\newcommandx*{\JWfShSig}[2][1=\nil, usedefault=@]% + {#1 \vdashsh #2 ~\syn{wf}} +\newcommandx*{\JWfModShSpecs}[2][1=\sh\Psi, usedefault=@]% + {#1 \vdashsh #2 ~\syn{specs-wf}} +\newcommandx*{\JWfModShPolSpecs}[3][1=\sh\Psi, usedefault=@]% + {#1 \vdashsh #2^{#3} ~\syn{specs-wf}} +\newcommandx*{\JWfModShExps}[2][1=\sh\Psi, usedefault=@]% + {#1 \vdashsh #2 ~\syn{exports-wf}} +\newcommandx*{\JWfEenv}[4][1=\sh\Psi, 2=\nu_0, 3=\Hdefs, usedefault=@]% + {#1;#2;#3 \vdashshghc #4 ~\syn{wf}} + +\newcommandx*{\JCoreKnd}[4][1=\Psi, 2=\kenv, usedefault=@]% + {#1;#2 \vdashghc #3 \mathrel{\cc} #4} + +\newcommandx*{\JStampEq}[2]% + {\vdash #1 \equiv #2} +\newcommandx*{\JStampNeq}[2]% + {\vdash #1 \not\equiv #2} +\newcommandx*{\JUnif}[3]% + {\syn{unify}(#1 \doteq #2) \elabto #3} +\newcommandx*{\JUnifM}[2]% + {\syn{unify}(#1) \elabto #2} + +\newcommandx*{\JModTypWf}[1]% + {\vdash #1 ~\syn{wf}} + +\newcommandx*{\JModSub}[2]% + {\vdash #1 \le #2} +\newcommandx*{\JModSup}[2]% + {\vdash #1 \ge #2} +\newcommandx*{\JShModSub}[2]% + {\vdashsh #1 \le #2} + +\newcommandx*{\JModEq}[2]% + {\vdash #1 \equiv #2} +% \newcommandx*{\JCShModEq}[3][3=\C]% +% {\vdashsh #1 \equiv #2 \mathbin{|} #3} + +\newcommandx*{\JETyp}[4][1=\Gamma, 2=\shctxsigma, usedefault=@]% + {#1;#2 \vdash #3 : #4} +\newcommandx*{\JETypElab}[5][1=\Gamma, 2=\shctxsigma, usedefault=@]% + {\JETyp[#1][#2]{#3}{#4} \elabto #5} +\newcommandx*{\JESh}[3][1=\sh\Gamma, usedefault=@]% + {#1 \vdashsh #2 \Rightarrow #3} + +\newcommandx*{\JBTyp}[5][1=\Delta, 2=\Gamma, 3=\shctx, usedefault=@]% + {#1;#2;#3 \vdash #4 : #5} +\newcommandx*{\JBTypElab}[6][1=\Delta, 2=\Gamma, 3=\shctx, usedefault=@]% + % {\JBTyp[#1][#2][#3]{#4}{#5} \elabto #6} + {\JBTyp[#1][#2][#3]{#4}{#5} \;\shade{\elabto #6}} +\newcommandx*{\JBSh}[4][1=\Delta, 2=\sh\Gamma, usedefault=@]% + {#1;#2 \vdashsh #3 \Rightarrow #4} + +\newcommandx*{\JBVTyp}[4][1=\Delta, 2=\shctx, usedefault=@]% + {#1;#2 \vdash #3 : #4} +\newcommandx*{\JBVTypElab}[5][1=\Delta, 2=\shctx, usedefault=@]% + % {\JBVTyp[#1][#2]{#3}{#4} \elabto #5} + {\JBVTyp[#1][#2]{#3}{#4} \;\shade{\elabto #5}} +\newcommandx*{\JBVSh}[4][1=\Delta, usedefault=@]% + {#1 \vdashsh #2 \Rightarrow #3;\, #4} + +\newcommandx*{\JImp}[3][1=\Gamma, usedefault=@]% + {#1 \vdashimp #2 \elabto #3} +\newcommandx*{\JShImp}[3][1=\sh\Gamma, usedefault=@]% + {#1 \vdashshimp #2 \elabto #3} + +\newcommandx*{\JGhcMod}[4]% + {#1; #2 \vdashghc #3 : #4} +\newcommandx*{\JShGhcMod}[4]% + {#1; #2 \vdashshghc #3 : #4} + +\newcommandx*{\JGhcSig}[5]% + {#1; #2 \vdashghc #3 \elabto #4;#5} +\newcommandx*{\JShGhcSig}[5]% + {#1; #2 \vdashshghc #3 \elabto #4;#5} + +\newcommandx*{\JThin}[3][1=t, usedefault=@]% + {\vdash #2 \xrightarrow{~#1~} #3} +\newcommandx*{\JShThin}[3][1=t, usedefault=@]% + {\vdashsh #2 \xrightarrow{~#1~} #3} + +\newcommandx*{\JShMatch}[3][1=\nu, usedefault=@]% + {#1 \vdash #2 \sqsubseteq #3} + +\newcommandx*{\JShTrans}[4]% + {\vdash #1 \le_{#2} #3 \elabto #4} + +\newcommandx*{\JMerge}[3]% + {\vdash #1 + #2 \Rightarrow #3} +\newcommandx*{\JShMerge}[5]% + {\vdashsh #1 + #2 \Rightarrow #3;\, #4;\, #5} +\newcommandx*{\JShMergeNew}[4]% + {\vdashsh #1 + #2 \Rightarrow #3;\, #4} +\newcommandx*{\JShMergeSimple}[3]% + {\vdashsh #1 + #2 \Rightarrow #3} + +\newcommandx*{\JDTyp}[3][1=\Delta, usedefault=@]% + {#1 \vdash #2 : #3} +\newcommandx*{\JDTypElab}[4][1=\Delta, usedefault=@]% + % {#1 \vdash #2 : #3 \elabto #4} + {#1 \vdash #2 : #3 \;\shade{\elabto #4}} + +\newcommandx*{\JTTyp}[2][1=\Delta, usedefault=@]% + {#1 \vdash #2} + +\newcommandx*{\JSound}[3][1=\Psi_\syn{ctx}, usedefault=@]% + {#1 \vdash #2 \sim #3} + +\newcommandx*{\JSoundOne}[4][1=\Psi, 2=\fenv, usedefault=@]% + {\vdash #3 \sim #4} +% \newcommand{\Smodi}[4]{\ensuremath{\oxb{=#2 \cc #3 \imps #4}^{#1}}} +\newcommand{\Smodi}[3]{\ensuremath{\oxb{=#2 \cc #3}^{#1}}} +\newcommand{\Smod}[2]{\Smodi{+}{#1}{#2}} +\newcommand{\Ssig}[2]{\Smodi{-}{#1}{#2}} +\newcommand{\Sreq}[2]{\Smodi{?}{#1}{#2}} +\newcommand{\Shole}[2]{\Smodi{\circ}{#1}{#2}} + +\newcommand{\SSmodi}[2]{\ensuremath{\oxb{=#2}^{#1}}} +\newcommand{\SSmod}[1]{\SSmodi{+}{#1}} +\newcommand{\SSsig}[1]{\SSmodi{-}{#1}} +\newcommand{\SSreq}[1]{\SSmodi{?}{#1}} +\newcommand{\SShole}[1]{\SSmodi{\circ}{#1}} + +% \newcommand{\styp}[3]{\oxb{{#1}\cc{#2}}^{#3}} +\newcommand{\styp}[3]{{#1}{:}{#2}^{#3}} +\newcommand{\stm}[2]{\styp{#1}{#2}{\scriptscriptstyle+}} +\newcommand{\sts}[2]{\styp{#1}{#2}{\scriptscriptstyle-}} + +% \newcommand{\mtypsep}{[\!]} +\newcommand{\mtypsep}{\mbox{$\bm{;}$}} +\newcommand{\mtypsepsp}{\hspace{.3em}} +\newcommand{\msh}[3]{\aoxb{#1 ~\mtypsep~ #2 ~\mtypsep~ #3}} +\newcommand{\mtyp}[3]{ + \aoxb{\mtypsepsp #1 \mtypsepsp\mtypsep\mtypsepsp + #2 \mtypsepsp\mtypsep\mtypsepsp + #3 \mtypsepsp}} +\newcommand{\bigmtyp}[3]{\ensuremath{ + \left\langle\!\vrule \begin{array}{l} + #1 ~\mtypsep \\[0pt] + #2 ~\mtypsep \\ + #3 + \end{array} \vrule\!\right\rangle +}} + + +\newcommand{\mtypm}[2]{\mtyp{#1}{#2}^{\scriptstyle+}} +\newcommand{\mtyps}[2]{\mtyp{#1}{#2}^{\scriptstyle-}} +\newcommand{\bigmtypm}[2]{\bigmtyp{#1}{#2}^{\scriptstyle+}} +\newcommand{\bigmtyps}[2]{\bigmtyp{#1}{#2}^{\scriptstyle-}} + +\newcommand{\mref}{\ensuremath{\mathit{mref}}} +\newcommand{\selfpath}{\msyn{Local}} + +% \newcommand{\Ltyp}[3]{\oxb{#1 \mathbin{\scriptstyle\MVAt} #2}^{#3}} +% \newcommand{\Ltyp}[2]{\poxb{#1 \mathbin{\scriptstyle\MVAt} #2}} +\newcommand{\Ltyp}[2]{#1 {\scriptstyle\MVAt} #2} + +\newcommand{\Sshape}[1]{\ensuremath{\syn{shape}(#1)}} +\newcommand{\Srename}[2]{\ensuremath{\syn{rename}(#1;#2)}} +\newcommand{\Scons}[2]{\ensuremath{\syn{cons}(#1;#2)}} +\newcommand{\Smkreq}[1]{\ensuremath{\syn{hide}(#1)}} +\newcommand{\Sfv}[1]{\ensuremath{\syn{fv}(#1)}} +\newcommand{\Sdom}[1]{\ensuremath{\syn{dom}(#1)}} +\newcommand{\Srng}[1]{\ensuremath{\syn{rng}(#1)}} +\newcommand{\Sdomp}[2]{\ensuremath{\syn{dom}_{#1}(#2)}} +\newcommand{\Sclos}[1]{\ensuremath{\syn{clos}(#1)}} +\newcommand{\Scloss}[2]{\ensuremath{\syn{clos}_{#1}(#2)}} +\newcommand{\Snorm}[1]{\ensuremath{\syn{norm}(#1)}} +\newcommand{\Sident}[1]{\ensuremath{\syn{ident}(#1)}} +\newcommand{\Snec}[2]{\ensuremath{\syn{nec}(#1; #2)}} +\newcommand{\Sprovs}[1]{\ensuremath{\syn{provs}(#1)}} +\newcommand{\Smkstamp}[2]{\ensuremath{\syn{mkident}(#1; #2)}} +\newcommand{\Sname}[1]{\ensuremath{\syn{name}(#1)}} +\newcommand{\Snames}[1]{\ensuremath{\syn{names}(#1)}} +\newcommand{\Sallnames}[1]{\ensuremath{\syn{allnames}(#1)}} +\newcommand{\Shassubs}[1]{\ensuremath{\syn{hasSubs}(#1)}} +\newcommand{\Snooverlap}[1]{\ensuremath{\syn{nooverlap}(#1)}} +\newcommand{\Sreduce}[2]{\ensuremath{\syn{apply}(#1; #2)}} +\newcommand{\Smkfenv}[1]{\ensuremath{\syn{mkfenv}(#1)}} +\newcommand{\Svalidspc}[2]{\ensuremath{\syn{validspc}(#1; #2)}} +\newcommand{\Srepath}[2]{\ensuremath{\syn{repath}(#1; #2)}} +\newcommand{\Smksigenv}[2]{\ensuremath{\syn{mksigenv}(#1; #2)}} +\newcommand{\Smksigshenv}[2]{\ensuremath{\syn{mksigshenv}(#1; #2)}} +\newcommand{\Squalify}[2]{\ensuremath{\syn{qualify}(#1; #2)}} +\newcommandx*{\Sdepends}[2][1=\Psi, usedefault=@]% + {\ensuremath{\syn{depends}_{#1}(#2)}} +\newcommandx*{\Sdependss}[3][1=\Psi, 2=N, usedefault=@]% + {\ensuremath{\syn{depends}_{#1;#2}(#3)}} +\newcommandx*{\Sdependsss}[4][1=\Psi, 2=V, 3=\theta, usedefault=@]% + {\ensuremath{\syn{depends}_{#1;#2;#3}(#4)}} +\newcommand{\Snormsubst}[2]{\ensuremath{\syn{norm}(#1; #2)}} + +% \newcommand{\Smergeable}[2]{\ensuremath{\syn{mergeable}(#1; #2)}} +\newcommand{\mdef}{\mathrel{\bot}} +\newcommand{\Smergeable}[2]{\ensuremath{#1 \mdef #2}} + +\newcommand{\Sstamp}[1]{\ensuremath{\syn{stamp}(#1)}} +\newcommand{\Stype}[1]{\ensuremath{\syn{type}(#1)}} + +\newcommand{\Strue}{\ensuremath{\syn{true}}} +\newcommand{\Sfalse}{\ensuremath{\syn{false}}} + +\newcommandx*{\refsstar}[2][1=\nu_0, usedefault=@]% + {\ensuremath{\syn{refs}^{\star}}_{#1}(#2)} + +\renewcommand{\merge}{\boxplus} +\newcommand{\meet}{\sqcap} + +\newcommand{\Shaslocaleenv}[3]{\ensuremath{\syn{haslocaleenv}(#1;#2;#3)}} +\newcommand{\MTvalidnewmod}[3]{\ensuremath{\syn{validnewmod}(#1;#2;#3)}} +\newcommand{\Sdisjoint}[1]{\ensuremath{\syn{disjoint}(#1)}} +\newcommand{\Sconsistent}[1]{\ensuremath{\syn{consistent}(#1)}} +\newcommand{\Slocmatch}[2]{\ensuremath{\syn{locmatch}(#1;#2)}} +\newcommand{\Sctxmatch}[2]{\ensuremath{\syn{ctxmatch}(#1;#2)}} +\newcommand{\Snolocmatch}[2]{\ensuremath{\syn{nolocmatch}(#1;#2)}} +\newcommand{\Snoctxmatch}[2]{\ensuremath{\syn{noctxmatch}(#1;#2)}} +\newcommand{\Sislocal}[2]{\ensuremath{\syn{islocal}(#1;#2)}} +\newcommand{\Slocalespcs}[2]{\ensuremath{\syn{localespcs}(#1;#2)}} + +\newcommand{\Cprod}[1]{\syn{productive}(#1)} +\newcommand{\Cnil}{\nil} +\newcommand{\id}{\syn{id}} + +\newcommand{\nui}{\nu_{\syn{i}}} +\newcommand{\taui}{\tau_{\syn{i}}} +\newcommand{\Psii}{\Psi_{\syn{i}}} + +\newcommand{\vis}{\ensuremath{\mathsf{\scriptstyle V}}} +\newcommand{\hid}{\ensuremath{\mathsf{\scriptstyle H}}} + +\newcommand{\taum}[1]{\ensuremath{\tau_{#1}^{m_{#1}}}} + +\newcommand{\sigmamod}{\sigma_{\syn{m}}} +\newcommand{\sigmaprov}{\sigma_{\syn{p}}} + +\newcommand{\Svalidsubst}[2]{\ensuremath{\syn{validsubst}(#1;#2)}} +\newcommand{\Salias}[1]{\ensuremath{\syn{alias}(#1)}} +\newcommand{\Saliases}[1]{\ensuremath{\syn{aliases}(#1)}} +\newcommand{\Simp}[1]{\ensuremath{\syn{imp}(#1)}} +\newcommand{\Styp}[1]{\ensuremath{\syn{typ}(#1)}} +\newcommand{\Spol}[1]{\ensuremath{\syn{pol}(#1)}} + +\newcommand{\stoff}{\stof{(-)}} +\newcommand{\stheta}{\stof\theta} + + +%%%%%%% FOR THE PAPER! +\newcommand{\secref}[1]{Section~\ref{sec:#1}} +\newcommand{\figref}[1]{Figure~\ref{fig:#1}} + +% typesetting for module/path names +\newcommand{\mname}[1]{\textsf{#1}} +\newcommand{\m}[1]{\mname{#1}} + +% typesetting for package names +\newcommand{\pname}[1]{\textsf{#1}} + +\newcommand{\kpm}[2]{\angb{\pname{#1}.#2}} + +% for core entities +\newcommand{\code}[1]{\texttt{#1}} +\newcommand{\core}[1]{\texttt{#1}} + +\newcommand{\req}{\bsyn{req}} +\newcommand{\hiding}[1]{\req~\m{#1}} + +\newcommand{\Emod}[1]{\ensuremath{[#1]}} +\newcommand{\Esig}[1]{\ensuremath{[\cc#1]}} +\newcommand{\Epkg}[2]{\bsyn{package}~\pname{#1}~\bsyn{where}~{#2}} +% \newcommand{\Epkgt}[3]{\bsyn{package}~{#1}~\bsyn{only}~{#2}~\bsyn{where}~{#3}} +\newcommand{\Epkgt}[3]{\bsyn{package}~\pname{#1}~{#2}~\bsyn{where}~{#3}} +\newcommand{\Einc}[1]{\bsyn{include}~\pname{#1}} +% \newcommand{\Einct}[2]{\bsyn{include}~{#1}~\bsyn{only}~{#2}} +% \newcommand{\Einctr}[3]{\bsyn{include}~{#1}~\bsyn{only}~{#2}~{#3}} +\newcommand{\Einct}[2]{\bsyn{include}~\pname{#1}~(#2)} +\newcommand{\Eincr}[2]{\bsyn{include}~\pname{#1}~\angb{#2}} +\newcommand{\Einctr}[3]{\bsyn{include}~\pname{#1}~(#2)~\angb{#3}} +\newcommand{\Emv}[2]{#1 \mapsto #2} +\newcommand{\Emvp}[2]{\m{#1} \mapsto \m{#2}} +\newcommand{\Etr}[3][~]{{#2}{#1}\langle #3 \rangle} +\newcommand{\Erm}[3][~]{{#2}{#1}\langle #3 \mapnil \rangle} +\newcommand{\Ethin}[1]{(#1)} +\newcommand{\Ethinn}[2]{(#1; #2)} + + +% \newcommand{\Pdef}[2]{\ensuremath{\begin{array}{l} \Phead{#1} #2\end{array}}} +% \newcommand{\Phead}[1]{\bsyn{package}~\pname{#1}~\bsyn{where} \\} +% \newcommand{\Pbndd}[2]{\hspace{1em}{#1} = {#2} \\} +% \newcommand{\Pbnd}[2]{\hspace{1em}\mname{#1} = {#2} \\} +% \newcommand{\Pref}[2]{\hspace{1em}\mname{#1} = \mname{#2} \\} +% \newcommand{\Pmod}[2]{\hspace{1em}\mname{#1} = [\code{#2}] \\} +% \newcommand{\Psig}[2]{\hspace{1em}\mname{#1} \cc [\code{#2}] \\} +\newcommand{\Pdef}[2]{\ensuremath{ + \begin{array}{@{\hspace{1em}}L@{\;\;}c@{\;\;}l} + \multicolumn{3}{@{}l}{\Phead{#1}} \\ + #2 + \end{array} +}} +\newcommand{\Pdeft}[3]{\ensuremath{ + \begin{array}{@{\hspace{1em}}L@{\;\;}c@{\;\;}l} + \multicolumn{3}{@{}l}{\Pheadt{#1}{#2}} \\ + #3 + \end{array} +}} +\newcommand{\Phead}[1]{\bsyn{package}~\pname{#1}~\bsyn{where}} +\newcommand{\Pheadt}[2]{\bsyn{package}~\pname{#1}~(#2)~\bsyn{where}} +\newcommand{\Pbnd}[2]{#1 &=& #2 \\} +\newcommand{\Pref}[2]{\mname{#1} &=& \mname{#2} \\} +\newcommand{\Pmod}[2]{\mname{#1} &=& [\code{#2}] \\} +\newcommand{\Pmodd}[2]{\mname{#1} &=& #2 \\} +\newcommand{\Psig}[2]{\mname{#1} &\cc& [\code{#2}] \\} +\newcommand{\Psigg}[2]{\mname{#1} &\cc& #2 \\} +\newcommand{\Pmulti}[1]{\multicolumn{3}{@{\hspace{1em}}l} {#1} \\} +\newcommand{\Pinc}[1]{\Pmulti{\Einc{#1}}} +\newcommand{\Pinct}[2]{\Pmulti{\Einct{#1}{#2}}} +\newcommand{\Pincr}[2]{\Pmulti{\Eincr{#1}{#2}}} +\newcommand{\Pinctr}[3]{\Pmulti{\Einctr{#1}{#2}{#3}}} +\newcommand{\Pmodbig}[2]{\mname{#1} &=& \left[ + \begin{codeblock} + #2 + \end{codeblock} +\right] \\} +\newcommand{\Psigbig}[2]{\mname{#1} &\cc& \left[ + \begin{codeblock} + #2 + \end{codeblock} +\right] \\} + +\newcommand{\Mimp}[1]{\msyn{import}~\mname{#1}} +\newcommand{\Mimpq}[1]{\msyn{import}~\msyn{qualified}~\mname{#1}} +\newcommand{\Mimpas}[2]{\msyn{import}~\mname{#1}~\msyn{as}~\mname{#2}} +\newcommand{\Mimpqas}[2]{\msyn{import}~\msyn{qualified}~\mname{#1}~\msyn{as}~\mname{#2}} +\newcommand{\Mexp}[1]{\msyn{export}~(#1)} + +\newcommand{\illtyped}{\hfill ($\times$) \; ill-typed} + +\newenvironment{example}[1][LL]% + {\ignorespaces \begin{flushleft}\begin{tabular}{@{\hspace{1em}}#1} }% + {\end{tabular}\end{flushleft} \ignorespacesafterend} + +\newenvironment{counterexample}[1][LL]% + {\ignorespaces \begin{flushleft}\begin{tabular}{@{\hspace{1em}}#1} }% + {& \text{\illtyped} \end{tabular}\end{flushleft} \ignorespacesafterend} + +\newenvironment{codeblock}% + {\begin{varwidth}{\textwidth}\begin{alltt}}% + {\end{alltt}\end{varwidth}} + +\newcommand{\fighead}{\hrule\vspace{1.5ex}} +\newcommand{\figfoot}{\vspace{1ex}\hrule} +\newenvironment{myfig}{\fighead\small}{\figfoot} + +\newcommand{\Mhead}[2]{\syn{module}~{#1}~\syn{(}{#2}\syn{)}~\syn{where}} +\newcommand{\Mdef}[3]{\ensuremath{ + \begin{array}{@{\hspace{1em}}L} + \multicolumn{1}{@{}L}{\Mhead{#1}{\core{#2}}} \\ + #3 + \end{array} +}} + +\newcommand{\HMstof}[1]{\ensuremath{#1}} +% \newcommand{\HMstof}[1]{\ensuremath{\lfloor #1 \rfloor}} +% \newcommand{\HMstof}[1]{\ensuremath{\underline{#1}}} +% \newcommand{\HMstof}[1]{{#1}^{\star}} +\newcommand{\HMhead}[2]{\syn{module}~\(\HMstof{#1}\)~\syn{(}{#2}\syn{)}~\syn{where}} +\newcommand{\HMdef}[3]{\ensuremath{ + \begin{array}{@{\hspace{1em}}L} + \multicolumn{1}{@{}L}{\HMhead{#1}{\core{#2}}} \\ + #3 + \end{array} +}} +\newcommand{\HMimpas}[3]{% + \msyn{import}~\ensuremath{\HMstof{#1}}~% + \msyn{as}~\mname{#2}~\msyn{(}\core{#3}\msyn{)}} +\newcommand{\HMimpqas}[3]{% + \msyn{import}~\msyn{qualified}~\ensuremath{\HMstof{#1}}~% + \msyn{as}~\mname{#2}~\msyn{(}\core{#3}\msyn{)}} + +\newcommand{\stackedenv}[2][c]{\ensuremath{ + \begin{array}{#1} + #2 + \end{array} +}} + +% \renewcommand{\nil}{\mathsf{nil}} +\renewcommand{\nil}{\mathrel\emptyset} + +% \newcommand{\ee}{\mathit{ee}} +\newcommand{\ee}{\mathit{dent}} + +\renewcommand{\gets}{\mathbin{\coloneqq}}
\ No newline at end of file diff --git a/docs/backpack/commands-rebindings.tex b/docs/backpack/commands-rebindings.tex new file mode 100644 index 0000000000..96ad2bb2cc --- /dev/null +++ b/docs/backpack/commands-rebindings.tex @@ -0,0 +1,57 @@ + + +%% hide the full syntax of shapes/types for the paper +\newcommand{\fullmsh}[3]{\aoxb{#1 ~\mtypsep~ #2 ~\mtypsep~ #3}} +\newcommand{\fullmtyp}[3]{ + \aoxb{\mtypsepsp #1 \mtypsepsp\mtypsep\mtypsepsp + #2 \mtypsepsp\mtypsep\mtypsepsp + #3 \mtypsepsp}} +\newcommand{\fullbigmtyp}[3]{\ensuremath{ + \left\langle\!\vrule \begin{array}{l} + #1 ~\mtypsep \\[0pt] + #2 ~\mtypsep \\ + #3 + \end{array} \vrule\!\right\rangle +}} +\renewcommand{\msh}[2]{\aoxb{#1 \mtypsepsp\mtypsep\mtypsepsp #2}} +\renewcommand{\mtyp}[2]{ + \aoxb{#1 ~\mtypsep~ #2}} +\newcommand{\mtypstretch}[2]{ + \left\langle\!\vrule + \mtypsepsp #1 \mtypsepsp\mtypsep\mtypsepsp #2 \mtypsepsp + \vrule\!\right\rangle +} +\renewcommand{\bigmtyp}[2]{\ensuremath{ + \left\langle\!\vrule \begin{array}{l} + #1 ~\mtypsep \\[0pt] #2 + \end{array} \vrule\!\right\rangle +}} + + + +%% change syntax of signatures +\renewcommand{\Esig}[1]{\ensuremath{\,[#1]}} + +\renewcommandx*{\JBVSh}[3][1=\Delta, usedefault=@]% + {#1 \vdashsh #2 \Rightarrow #3} + + +% JUDGMENTS +\renewcommandx*{\JBTypElab}[6][1=\Delta, 2=\Gamma, 3=\shctx, usedefault=@]% + % {\JBTyp[#1][#2][#3]{#4}{#5} \elabto #6} + {\JBTyp[#1][#2][#3]{#4}{#5} \;\shade{\elabto #6}} +\renewcommandx*{\JBVTypElab}[5][1=\Delta, 2=\shctx, usedefault=@]% + % {\JBVTyp[#1][#2]{#3}{#4} \elabto #5} + {\JBVTyp[#1][#2]{#3}{#4} \;\shade{\elabto #5}} +\renewcommandx*{\JDTypElab}[4][1=\Delta, usedefault=@]% + % {#1 \vdash #2 : #3 \elabto #4} + {#1 \vdash #2 : #3 \;\shade{\elabto #4}} +\renewcommandx*{\JCModElab}[5][1=\Gamma, 2=\nu_0, usedefault=@]% + % {#1; #2 \vdashghc #3 : #4 \elabto #5} + {#1; #2 \vdashghc #3 : #4 \;\shade{\elabto #5}} + + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: "paper" +%%% End: diff --git a/docs/backpack/diagrams.pdf b/docs/backpack/diagrams.pdf Binary files differnew file mode 100644 index 0000000000..a50916b234 --- /dev/null +++ b/docs/backpack/diagrams.pdf diff --git a/docs/backpack/diagrams.xoj b/docs/backpack/diagrams.xoj Binary files differnew file mode 100644 index 0000000000..acec8d02de --- /dev/null +++ b/docs/backpack/diagrams.xoj diff --git a/docs/backpack/pkgdb.png b/docs/backpack/pkgdb.png Binary files differnew file mode 100644 index 0000000000..9779444b42 --- /dev/null +++ b/docs/backpack/pkgdb.png diff --git a/docs/comm/exts/ndp.html b/docs/comm/exts/ndp.html deleted file mode 100644 index 2c79d728d5..0000000000 --- a/docs/comm/exts/ndp.html +++ /dev/null @@ -1,360 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Parallel Arrays</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Parallel Arrays</h1> - <p> - This section describes an experimental extension by high-performance - arrays, which comprises special syntax for array types and array - comprehensions, a set of optimising program transformations, and a set - of special purpose libraries. The extension is currently only partially - implemented, but the development will be tracked here. - <p> - Parallel arrays originally got their name from the aim to provide an - architecture-independent programming model for a range of parallel - computers. However, since experiments showed that the approach is also - worthwhile for sequential array code, the emphasis has shifted to their - parallel evaluation semantics: As soon as any element in a parallel - array is demanded, all the other elements are evaluated, too. This - makes parallel arrays more strict than <a - href="http://haskell.org/onlinelibrary/array.html">standard Haskell 98 - arrays</a>, but also opens the door for a loop-based implementation - strategy that leads to significantly more efficient code. - <p> - The programming model as well as the use of the <em>flattening - transformation</em>, which is central to the approach, has its origin in - the programming language <a - href="http://www.cs.cmu.edu/~scandal/nesl.html">Nesl</a>. - - <h2>More Sugar: Special Syntax for Array Comprehensions</h2> - <p> - The option <code>-XParr</code>, which is a dynamic hsc option that can - be reversed with <code>-XNoParr</code>, enables special syntax for - parallel arrays, which is not essential to using parallel arrays, but - makes for significantly more concise programs. The switch works by - making the lexical analyser (located in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/parser/Lex.lhs"><code>Lex.lhs</code></a>) - recognise the tokens <code>[:</code> and <code>:]</code>. Given that - the additional productions in the parser (located in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/parser/Parser.y"><code>Parser.y</code></a>) - cannot be triggered without the lexer generating the necessary tokens, - there is no need to alter the behaviour of the parser. - <p> - The following additional syntax is accepted (the non-terminals are those - from the <a href="http://haskell.org/onlinereport/">Haskell 98 language - definition</a>): - <p> - <blockquote><pre> -atype -> '[:' type ':] (parallel array type) - -aexp -> '[:' exp1 ',' ... ',' expk ':]' (explicit array, k >= 0) - | '[:' exp1 [',' exp2] '..' exp3 ':]' (arithmetic array sequence) - | '[:' exp '|' quals1 '|' ... '|' qualsn ':]' (array comprehension, n >= 1) - -quals -> qual1 ',' ... ',' qualn (qualifier list, n >= 1) - -apat -> '[:' pat1 ',' ... ',' patk ':]' (array pattern, k >= 0) -</pre> - </blockquote> - <p> - Moreover, the extended comprehension syntax that allows for <em>parallel - qualifiers</em> (i.e., qualifiers separated by "<code>|</code>") is also - supported in list comprehensions. In general, the similarity to the - special syntax for list is obvious. The two main differences are that - (a) arithmetic array sequences are always finite and (b) - <code>[::]</code> is not treated as a constructor in expressions and - patterns, but rather as a special case of the explicit array syntax. - The former is a simple consequence of the parallel evaluation semantics - of parallel arrays and the latter is due to arrays not being a - constructor-based data type. - <p> - As a naming convention, types and functions that are concerned with - parallel arrays usually contain the string <code>parr</code> or - <code>PArr</code> (often as a prefix), and where corresponding types or - functions for handling lists exist, the name is identical, except for - containing the substring <code>parr</code> instead of <code>list</code> - (possibly in caps). - <p> - The following implications are worth noting explicitly: - <ul> - <li>As the value and pattern <code>[::]</code> is an empty explicit - parallel array (i.e., something of the form <code>ExplicitPArr ty - []</code> in the AST). This is in contrast to lists, which use the - nil-constructor instead. In the case of parallel arrays, using a - constructor would be rather awkward, as it is not a constructor-based - type. (This becomes rather clear in the desugarer.) - <li>As a consequence, array patterns have the general form <code>[:p1, - p2, ..., pn:]</code>, where <code>n</code> >= 0. Thus, two array - patterns overlap iff they have the same length -- an important property - for the pattern matching compiler. - </ul> - - <h2>Prelude Support for Parallel Arrays</h2> - <p> - The Prelude module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/lib/std/PrelPArr.lhs"><code>PrelPArr</code></a> - defines the standard operations and their types on parallel arrays and - provides a basic implementation based on boxed arrays. The interface of - <code>PrelPArr</code> is oriented by H98's <code>PrelList</code>, but - leaving out all functions that require infinite structures and adding - frequently needed array operations, such as permutations. Parallel - arrays are quite unqiue in that they use an entirely different - representation as soon as the flattening transformation is activated, - which is described further below. In particular, <code>PrelPArr</code> - defines the type <code>[::]</code> and operations to create, process, - and inspect parallel arrays. The type as well as the names of some of - the operations are also hardwired in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/TysWiredIn.lhs"><code>TysWiredIn</code></a> - (see the definition of <code>parrTyCon</code> in this module) and <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/PrelNames.lhs"><code>PrelNames</code></a>. - This is again very much like the case of lists, where the type is - defined in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/lib/std/PrelBase.lhs"><code>PrelBase</code></a> - and similarly wired in; however, for lists the entirely - constructor-based definition is exposed to user programs, which is not - the case for parallel arrays. - - <h2>Desugaring Parallel Arrays</h2> - <p> - The parallel array extension requires the desugarer to replace all - occurrences of (1) explicit parallel arrays, (2) array patterns, and (3) - array comprehensions by a suitable combination of invocations of - operations defined in <code>PrelPArr</code>. - - <h4>Explicit Parallel Arrays</h4> - <p> - These are by far the simplest to remove. We simply replace every - occurrence of <code>[:<i>e<sub>1</sub></i>, ..., - <i>e<sub>n</sub></i>:]</code> by - <blockquote> - <code> - toP [<i>e<sub>1</sub></i>, ..., <i>e<sub>n</sub></i>] - </code> - </blockquote> - <p> - i.e., we build a list of the array elements, which is, then, converted - into a parallel array. - - <h4>Parallel Array Patterns</h4> - <p> - Array patterns are much more tricky as element positions may contain - further patterns and the <a - href="../the-beast/desugar.html#patmat">pattern matching compiler</a> - requires us to flatten all those out. But before we turn to the gory - details, here first the basic idea: A flat array pattern matches exactly - iff it's length corresponds to the length of the matched array. Hence, - if we have a set of flat array patterns matching an array value - <code>a</code>, it suffices to generate a Core <code>case</code> - expression that scrutinises <code>lengthP a</code> and has one - alternative for every length of array occuring in one of the patterns. - Moreover, there needs to be a default case catching all other array - lengths. In each alternative, array indexing (i.e., the functions - <code>!:</code>) is used to bind array elements to the corresponding - pattern variables. This sounds easy enough and is essentially what the - parallel array equation of the function <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/DsUtils.lhs"><code>DsUtils</code></a><code>.mkCoAlgCaseMatchResult</code> - does. - <p> - Unfortunately, however, the pattern matching compiler expects that it - can turn (almost) any pattern into variable patterns, literals, or - constructor applications by way of the functions <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/Match.lhs"><code>Match</code></a><code>.tidy1</code>. - And to make matters worse, some weird machinery in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/Check.lhs"><code>Check</code></a> - insists on being able to reverse the process (essentially to pretty - print patterns in warnings for incomplete or overlapping patterns). - <p> - The solution to this is an (unlimited) set of <em>fake</em> constructors - for parallel arrays, courtesy of <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/TysWiredIn.lhs"><code>TysWiredIn</code></a><code>.parrFakeCon</code>. - In other words, any pattern of the form <code>[:<i>p<sub>1</sub></i>, - ..., <i>p<sub>n</sub></i>:]</code> is transformed into - <blockquote> - <code> - MkPArray<i>n</i> <i>p<sub>1</sub></i> ... <i>p<sub>n</sub></i> - </code> - </blockquote> - <p> - by <code>Match.tidy1</code>, then, run through the rest of the pattern - matching compiler, and finally, picked up by - <code>DsUtils.mkCoAlgCaseMatchResult</code>, which converts it into a - <code>case</code> expression as outlined above. - <p> - As an example consider the source expression - <blockquote><pre> -case v of - [:x1:] -> e1 - [:x2, x3, x4:] -> e2 - _ -> e3</pre> - </blockquote> - <p> - <code>Match.tidy1</code> converts it into a form that is equivalent to - <blockquote><pre> -case v of { - MkPArr1 x1 -> e1; - MkPArr2 x2 x3 x4 -> e2; - _ -> e3; -}</pre> - </blockquote> - <p> - which <code>DsUtils.mkCoAlgCaseMatchResult</code> turns into the - following Core code: - <blockquote><pre> - case lengthP v of - Int# i# -> - case i# of l { - DFT -> e3 - 1 -> let x1 = v!:0 in e1 - 3 -> let x2 = v!:0; x2 = v!:1; x3 = v!:2 in e2 - }</pre> - </blockquote> - - <h4>Parallel Array Comprehensions</h4> - <p> - The most challenging construct of the three are array comprehensions. - In principle, it would be possible to transform them in essentially the - same way as list comprehensions, but this would lead to abysmally slow - code as desugaring of list comprehensions generates code that is - optimised for sequential, constructor-based structures. In contrast, - array comprehensions need to be transformed into code that solely relies - on collective operations and avoids the creation of many small - intermediate arrays. - <p> - The transformation is implemented by the function <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/DsListComp.lhs"><code>DsListComp</code></a><code>.dsPArrComp</code>. - In the following, we denote this transformation function by the form - <code><<<i>e</i>>> pa ea</code>, where <code><i>e</i></code> - is the comprehension to be compiled and the arguments <code>pa</code> - and <code>ea</code> denote a pattern and the currently processed array - expression, respectively. The invariant constraining these two - arguments is that all elements in the array produced by <code>ea</code> - will <em>successfully</em> match against <code>pa</code>. - <p> - Given a source-level comprehensions <code>[:e | qss:]</code>, we compile - it with <code><<[:e | qss:]>> () [:():]</code> using the - rules - <blockquote><pre> -<<[:e' | :]>> pa ea = mapP (\pa -> e') ea -<<[:e' | b , qs:]>> pa ea = <<[:e' | qs:]>> pa (filterP (\pa -> b) ea) -<<[:e' | p <- e, qs:]>> pa ea = - let ef = filterP (\x -> case x of {p -> True; _ -> False}) e - in - <<[:e' | qs:]>> (pa, p) (crossP ea ef) -<<[:e' | let ds, qs:]>> pa ea = - <<[:e' | qs:]>> (pa, (x_1, ..., x_n)) - (mapP (\v@pa -> (v, let ds in (x_1, ..., x_n))) ea) -where - {x_1, ..., x_n} = DV (ds) -- Defined Variables -<<[:e' | qs | qss:]>> pa ea = - <<[:e' | qss:]>> (pa, (x_1, ..., x_n)) - (zipP ea <<[:(x_1, ..., x_n) | qs:]>>) -where - {x_1, ..., x_n} = DV (qs)</pre> - </blockquote> - <p> - We assume the denotation of <code>crossP</code> to be given by - <blockquote><pre> -crossP :: [:a:] -> [:b:] -> [:(a, b):] -crossP a1 a2 = let - len1 = lengthP a1 - len2 = lengthP a2 - x1 = concatP $ mapP (replicateP len2) a1 - x2 = concatP $ replicateP len1 a2 - in - zipP x1 x2</pre> - </blockquote> - <p> - For a more efficient implementation of <code>crossP</code>, see - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/lib/std/PrelPArr.lhs"><code>PrelPArr</code></a>. - <p> - Moreover, the following optimisations are important: - <ul> - <li>In the <code>p <- e</code> rule, if <code>pa == ()</code>, drop - it and simplify the <code>crossP ea e</code> to <code>e</code>. - <li>We assume that fusion will optimise sequences of array processing - combinators. - <li>FIXME: Do we want to have the following function? - <blockquote><pre> -mapFilterP :: (a -> Maybe b) -> [:a:] -> [:b:]</pre> - </blockquote> - <p> - Even with fusion <code>(mapP (\p -> e) . filterP (\p -> - b))</code> may still result in redundant pattern matching - operations. (Let's wait with this until we have seen what the - Simplifier does to the generated code.) - </ul> - - <h2>Doing Away With Nested Arrays: The Flattening Transformation</h2> - <p> - On the quest towards an entirely unboxed representation of parallel - arrays, the flattening transformation is the essential ingredient. GHC - uses a <a - href="http://www.cse.unsw.edu.au/~chak/papers/CK00.html">substantially - improved version</a> of the transformation whose original form was - described by Blelloch & Sabot. The flattening transformation - replaces values of type <code>[:a:]</code> as well as functions - operating on these values by alternative, more efficient data structures - and functions. - <p> - The flattening machinery is activated by the option - <code>-fflatten</code>, which is a static hsc option. It consists of - two steps: function vectorisation and array specialisation. Only the - first of those is implemented so far. If selected, the transformation - is applied to a module in Core form immediately after the <a - href="../the-beast/desugar.html">desugarer,</a> before the <a - href="../the-beast/simplifier.html">Mighty Simplifier</a> gets to do its - job. After vectorisation, the Core program can be dumped using the - option <code>-ddump-vect</code>. The is a good reason for us to perform - flattening immediately after the desugarer. In <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/HscMain.lhs"><code>HscMain</code></a><code>.hscRecomp</code> - the so-called <em>persistent compiler state</em> is maintained, which - contains all the information about imported interface files needed to - lookup the details of imported names (e.g., during renaming and type - checking). However, all this information is zapped before the - simplifier is invoked (supposedly to reduce the space-consumption of - GHC). As flattening has to get at all kinds of identifiers from Prelude - modules, we need to do it before the relevant information in the - persistent compiler state is gone. - - <p> - As flattening generally requires all libraries to be compiled for - flattening (just like profiling does), there is a <em>compiler way</em> - <code>"ndp"</code>, which can be selected using the way option code - <code>-ndp</code>. This option will automagically select - <code>-XParr</code> and <code>-fflatten</code>. - - <h4><code>FlattenMonad</code></h4> - <p> - The module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/ndpFlatten/FlattenMonad.lhs"><code>FlattenMonad</code></a> - implements the monad <code>Flatten</code> that is used during - vectorisation to keep track of various sets of bound variables and a - variable substitution map; moreover, it provides a supply of new uniques - and allows us to look up names in the persistent compiler state (i.e., - imported identifiers). - <p> - In order to be able to look up imported identifiers in the persistent - compiler state, it is important that these identifies are included into - the free variable lists computed by the renamer. More precisely, all - names needed by flattening are included in the names produced by - <code>RnEnv.getImplicitModuleFVs</code>. To avoid putting - flattening-dependent lists of names into the renamer, the module - <code>FlattenInfo</code> exports <code>namesNeededForFlattening</code>. - - [FIXME: It might be worthwhile to document in the non-Flattening part of - the Commentary that the persistent compiler state is zapped after - desugaring and how the free variables determined by the renamer imply - which names are imported.] - - <p><small> -<!-- hhmts start --> -Last modified: Tue Feb 12 01:44:21 EST 2002 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/exts/th.html b/docs/comm/exts/th.html deleted file mode 100644 index 539245db74..0000000000 --- a/docs/comm/exts/th.html +++ /dev/null @@ -1,197 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Template Haskell</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Template Haskell</h1> - <p> - The Template Haskell (TH) extension to GHC adds a meta-programming - facility in which all meta-level code is executed at compile time. The - design of this extension is detailed in "Template Meta-programming for - Haskell", Tim Sheard and Simon Peyton Jones, <a - href="http://portal.acm.org/toc.cfm?id=581690&type=proceeding&coll=portal&dl=ACM&part=series&WantType=proceedings&idx=unknown&title=unknown">ACM - SIGPLAN 2002 Haskell Workshop,</a> 2002. However, some of the details - changed after the paper was published. - </p> - - <h2>Meta Sugar</h2> - <p> - The extra syntax of TH (quasi-quote brackets, splices, and reification) - is handled in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/DsMeta.hs"><code>DsMeta</code></a>. - In particular, the function <code>dsBracket</code> desugars the four - types of quasi-quote brackets (<code>[|...|]</code>, - <code>[p|...|]</code>, <code>[d|...|]</code>, and <code>[t|...|]</code>) - and <code>dsReify</code> desugars the three types of reification - operations (<code>reifyType</code>, <code>reifyDecl</code>, and - <code>reifyFixity</code>). - </p> - - <h3>Desugaring of Quasi-Quote Brackets</h3> - <p> - A term in quasi-quote brackets needs to be translated into Core code - that, when executed, yields a <em>representation</em> of that term in - the form of the abstract syntax trees defined in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/libraries/template-haskell/Language/Haskell/TH/Syntax.hs"><code>Language.Haskell.TH.Syntax</code></a>. - Within <code>DsMeta</code>, this is achieved by four functions - corresponding to the four types of quasi-quote brackets: - <code>repE</code> (for <code>[|...|]</code>), <code>repP</code> (for - <code>[p|...|]</code>), <code>repTy</code> (for <code>[t|...|]</code>), - and <code>repTopDs</code> (for <code>[d|...|]</code>). All four of - these functions receive as an argument the GHC-internal Haskell AST of - the syntactic form that they quote (i.e., arguments of type <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/hsSyn/HsExpr.lhs"><code>HsExpr</code></a><code>.HsExpr - Name</code>, <a href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/hsSyn/HsPat.lhs"><code>HsPat</code></a><code>.HsPat Name</code>, - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/hsSyn/HsTypes.lhs"><code>HsType</code></a><code>.HsType - Name</code>, and <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/hsSyn/HsDecls.lhs"><code>HsDecls</code></a><code>.HsGroup - Name</code>, respectively). - </p> - <p> - To increase the static type safety in <code>DsMeta</code>, the functions - constructing representations do not just return plain values of type <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/coreSyn/CoreSyn.lhs"><code>CoreSyn</code></a> - <code>.CoreExpr</code>; instead, <code>DsMeta</code> introduces a - parametrised type <code>Core</code> whose dummy type parameter indicates - the source-level type of the value computed by the corresponding Core - expression. All construction of Core fragments in <code>DsMeta</code> - is performed by smart constructors whose type signatures use the dummy - type parameter to constrain the contexts in which they are applicable. - For example, a function that builds a Core expression that evaluates to - a TH type representation, which has type - <code>Language.Haskell.TH.Syntax.Type</code>, would return a value of - type - </p> - <blockquote> - <pre> -Core Language.Haskell.TH.Syntax.Type</pre> - </blockquote> - - <h3>Desugaring of Reification Operators</h3> - <p> - The TH paper introduces four reification operators: - <code>reifyType</code>, <code>reifyDecl</code>, - <code>reifyFixity</code>, and <code>reifyLocn</code>. Of these, - currently (= 9 Nov 2002), only the former two are implemented. - </p> - <p> - The operator <code>reifyType</code> receives the name of a function or - data constructor as its argument and yields a representation of this - entity's type in the form of a value of type - <code>TH.Syntax.Type</code>. Similarly, <code>reifyDecl</code> receives - the name of a type and yields a representation of the type's declaration - as a value of type <code>TH.Syntax.Decl</code>. The name of the reified - entity is mapped to the GHC-internal representation of the entity by - using the function <code>lookupOcc</code> on the name. - </p> - - <h3>Representing Binding Forms</h3> - <p> - Care needs to be taken when constructing TH representations of Haskell - terms that include binding forms, such as lambda abstractions or let - bindings. To avoid name clashes, fresh names need to be generated for - all defined identifiers. This is achieved via the routine - <code>DsMeta.mkGenSym</code>, which, given a <code>Name</code>, produces - a <code>Name</code> / <code>Id</code> pair (of type - <code>GenSymBind</code>) that associates the given <code>Name</code> - with a Core identifier that at runtime will be bound to a string that - contains the fresh name. Notice the two-level nature of this - arrangement. It is necessary, as the Core code that constructs the - Haskell term representation may be executed multiple types at runtime - and it must be ensured that different names are generated in each run. - </p> - <p> - Such fresh bindings need to be entered into the meta environment (of - type <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/DsMonad.lhs"><code>DsMonad</code></a><code>.DsMetaEnv</code>), - which is part of the state (of type <code>DsMonad.DsEnv</code>) - maintained in the desugarer monad (of type <code>DsMonad.DsM</code>). - This is done using the function <code>DsMeta.addBinds</code>, which - extends the current environment by a list of <code>GenSymBind</code>s - and executes a subcomputation in this extended environment. Names can - be looked up in the meta environment by way of the functions - <code>DsMeta.lookupOcc</code> and <code>DsMeta.lookupBinder</code>; more - details about the difference between these two functions can be found in - the next subsection. - </p> - <p> - NB: <code>DsMeta</code> uses <code>mkGenSym</code> only when - representing terms that may be embedded into a context where names can - be shadowed. For example, a lambda abstraction embedded into an - expression can potentially shadow names defined in the context it is - being embedded into. In contrast, this can never be the case for - top-level declarations, such as data type declarations; hence, the type - variables that a parametric data type declaration abstracts over are not - being gensym'ed. As a result, variables in defining positions are - handled differently depending on the syntactic construct in which they - appear. - </p> - - <h3>Binders Versus Occurrences</h3> - <p> - Name lookups in the meta environment of the desugarer use two functions - with slightly different behaviour, namely <code>DsMeta.lookupOcc</code> - and <code>lookupBinder</code>. The module <code>DsMeta</code> contains - the following explanation as to the difference of these functions: - </p> - <blockquote> - <pre> -When we desugar [d| data T = MkT |] -we want to get - Data "T" [] [Con "MkT" []] [] -and *not* - Data "Foo:T" [] [Con "Foo:MkT" []] [] -That is, the new data decl should fit into whatever new module it is -asked to fit in. We do *not* clone, though; no need for this: - Data "T79" .... - -But if we see this: - data T = MkT - foo = reifyDecl T - -then we must desugar to - foo = Data "Foo:T" [] [Con "Foo:MkT" []] [] - -So in repTopDs we bring the binders into scope with mkGenSyms and addBinds, -but in dsReify we do not. And we use lookupOcc, rather than lookupBinder -in repTyClD and repC.</pre> - </blockquote> - <p> - This implies that <code>lookupOcc</code>, when it does not find the name - in the meta environment, uses the function <code>DsMeta.globalVar</code> - to construct the <em>original name</em> of the entity (cf. the TH paper - for more details regarding original names). This name uniquely - identifies the entity in the whole program and is in scope - <em>independent</em> of whether the user name of the same entity is in - scope or not (i.e., it may be defined in a different module without - being explicitly imported) and has the form <module>:<name>. - <strong>NB:</strong> Incidentally, the current implementation of this - mechanisms facilitates breaking any abstraction barrier. - </p> - - <h3>Known-key Names for Template Haskell</h3> - <p> - During the construction of representations, the desugarer needs to use a - large number of functions defined in the library - <code>Language.Haskell.TH.Syntax</code>. The names of these functions - need to be made available to the compiler in the way outlined <a - href="../the-beast/prelude.html">Primitives and the Prelude.</a> - Unfortunately, any change to <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/PrelNames.lhs"><code>PrelNames</code></a> - triggers a significant amount of recompilation. Hence, the names needed - for TH are defined in <code>DsMeta</code> instead (at the end of the - module). All library functions needed by TH are contained in the name - set <code>DsMeta.templateHaskellNames</code>. - </p> - - <p><small> -<!-- hhmts start --> -Last modified: Wed Nov 13 18:01:48 EST 2002 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/feedback.html b/docs/comm/feedback.html deleted file mode 100644 index 1da8b10f29..0000000000 --- a/docs/comm/feedback.html +++ /dev/null @@ -1,34 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Feedback</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>Feedback</h1> - <p> - <a href="mailto:chak@cse.unsw.edu.au">I</a> welcome any feedback on the - material and in particular would appreciated comments on which parts of - the document are incomprehensible or miss explanation -- e.g., due to - the use of GHC speak that is explained nowhere (words like infotable or - so). Moreover, I would be interested to know which areas of GHC you - would like to see covered here. - <p> - For the moment is probably best if feedback is directed to - <p> - <blockquote> - <a - href="mailto:chak@cse.unsw.edu.au"><code>chak@cse.unsw.edu.au</code></a> - </blockquote> - <p> - However, if there is sufficient interest, we might consider setting up a - mailing list. - - <p><small> -<!-- hhmts start --> -Last modified: Wed Aug 8 00:11:42 EST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/genesis/genesis.html b/docs/comm/genesis/genesis.html deleted file mode 100644 index 2ccdf5353a..0000000000 --- a/docs/comm/genesis/genesis.html +++ /dev/null @@ -1,82 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Outline of the Genesis</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Outline of the Genesis</h1> - <p> - Building GHC happens in two stages: First you have to prepare the tree - with <code>make boot</code>; and second, you build the compiler and - associated libraries with <code>make all</code>. The <code>boot</code> - stage builds some tools used during the main build process, generates - parsers and other pre-computed source, and finally computes dependency - information. There is considerable detail on the build process in GHC's - <a - href="http://ghc.haskell.org/trac/ghc/wiki/Building">Building Guide.</a> - - <h4>Debugging the Beast</h4> - <p> - If you are hacking the compiler or like to play with unstable - development versions, chances are that the compiler someday just crashes - on you. Then, it is a good idea to load the <code>core</code> into - <code>gdb</code> as usual, but unfortunately there is usually not too - much useful information. - <p> - The next step, then, is somewhat tedious. You should build a compiler - producing programs with a runtime system that has debugging turned on - and use that to build the crashing compiler. There are many sanity - checks in the RTS, which may detect inconsistency before they lead to a - crash and you may include more debugging information, which helps - <code>gdb.</code> For a RTS with debugging turned on, add the following - to <code>build.mk</code> (see also the comment in - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/mk/config.mk.in"><code>config.mk.in</code></a> that you find when searching for - <code>GhcRtsHcOpts</code>): -<blockquote><pre> -GhcRtsHcOpts+=-optc-DDEBUG -GhcRtsCcOpts+=-g -EXTRA_LD_OPTS=-lbfd -liberty</pre></blockquote> - <p> - Then go into <code>fptools/ghc/rts</code> and <code>make clean boot && - make all</code>. With the resulting runtime system, you have to re-link - the compiler. Go into <code>fptools/ghc/compiler</code>, delete the - file <code>hsc</code> (up to version 4.08) or - <code>ghc-<version></code>, and execute <code>make all</code>. - <p> - The <code>EXTRA_LD_OPTS</code> are necessary as some of the debugging - code uses the BFD library, which in turn requires <code>liberty</code>. - I would also recommend (in 4.11 and from 5.0 upwards) adding these linker - options to the files <code>package.conf</code> and - <code>package.conf.inplace</code> in the directory - <code>fptools/ghc/driver/</code> to the <code>extra_ld_opts</code> entry - of the package <code>RTS</code>. Otherwise, you have to supply them - whenever you compile and link a program with a compiler that uses the - debugging RTS for the programs it produces. - <p> - To run GHC up to version 4.08 in <code>gdb</code>, first invoke the - compiler as usual, but pass it the option <code>-v</code>. This will - show you the exact invocation of the compiler proper <code>hsc</code>. - Run <code>hsc</code> with these options in <code>gdb</code>. The - development version 4.11 and stable releases from 5.0 on do no longer - use the Perl driver; so, you can run them directly with gdb. - <p> - <strong>Debugging a compiler during building from HC files.</strong> - If you are boot strapping the compiler on new platform from HC files and - it crashes somewhere during the build (e.g., when compiling the - libraries), do as explained above, but you may have to re-configure the - build system with <code>--enable-hc-boot</code> before re-making the - code in <code>fptools/ghc/driver/</code>. - If you do this with a compiler up to version 4.08, run the build process - with <code>make EXTRA_HC_OPTS=-v</code> to get the exact arguments with - which you have to invoke <code>hsc</code> in <code>gdb</code>. - - <p><small> -<!-- hhmts start --> -Last modified: Sun Apr 24 22:16:30 CEST 2005 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/genesis/makefiles.html b/docs/comm/genesis/makefiles.html deleted file mode 100644 index 7f01fb53ac..0000000000 --- a/docs/comm/genesis/makefiles.html +++ /dev/null @@ -1,51 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Mindboggling Makefiles</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Mindboggling Makefiles</h1> - <p> - The size and structure of GHC's makefiles makes it quite easy to scream - out loud - in pain - during the process of tracking down problems in the - make system or when attempting to alter it. GHC's <a - href="http://ghc.haskell.org/trac/ghc/wiki/Building">Building - Guide</a> has valuable information on <a - href="http://ghc.haskell.org/trac/ghc/wiki/Building/BuildSystem">the - makefile architecture.</a> - - <h4>A maze of twisty little passages, all alike</h4> - <p> - The <code>fptools/</code> toplevel and the various project directories - contain not only a <code>Makefile</code> each, but there are - subdirectories of name <code>mk/</code> at various levels that contain - rules, targets, and so on specific to a project - or, in the case of the - toplevel, the default rules for the whole system. Each <code>mk/</code> - directory contains a file <code>boilerplate.mk</code> that ties the - various other makefiles together. Files called <code>target.mk</code>, - <code>paths.mk</code>, and <code>suffix.mk</code> contain make targets, - definitions of variables containing paths, and suffix rules, - respectively. - <p> - One particularly nasty trick used in this hierarchy of makefiles is the - way in which the variable <code>$(TOP)</code> is used. AFAIK, - <code>$(TOP)</code> always points to a directory containing an - <code>mk/</code> subdirectory; however, it not necessarily points to the - toplevel <code>fptools/</code> directory. For example, within the GHC - subtree, <code>$(TOP)</code> points to <code>fptools/ghc/</code>. - However, some of the makefiles in <code>fptools/ghc/mk/</code> will then - <em>temporarily</em> redefine <code>$(TOP)</code> to point a level - higher (i.e., to <code>fptools/</code>) while they are including the - toplevel boilerplate. After that <code>$(TOP)</code> is redefined to - whatever value it had before including makefiles from higher up in the - hierarchy. - - <p><small> -<!-- hhmts start --> -Last modified: Wed Aug 22 16:46:33 GMT Daylight Time 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/genesis/modules.html b/docs/comm/genesis/modules.html deleted file mode 100644 index 10cd7a8490..0000000000 --- a/docs/comm/genesis/modules.html +++ /dev/null @@ -1,164 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Marvellous Module Structure of GHC </title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - The Marvellous Module Structure of GHC </h1> - <p> - -GHC is built out of about 245 Haskell modules. It can be quite tricky -to figure out what the module dependency graph looks like. It can be -important, too, because loops in the module dependency graph need to -be broken carefully using <tt>.hi-boot</tt> interface files. -<p> -This section of the commentary documents the subtlest part of -the module dependency graph, namely the part near the bottom. -<ul> -<li> The list is given in compilation order: that is, -module near the top are more primitive, and are compiled earlier. -<li> Each module is listed together with its most critical -dependencies in parentheses; that is, the dependencies that prevent it being -compiled earlier. -<li> Modules in the same bullet don't depend on each other. -<li> Loops are documented by a dependency such as "<tt>loop Type.Type</tt>". -This means tha the module imports <tt>Type.Type</tt>, but module <tt>Type</tt> -has not yet been compiled, so the import comes from <tt>Type.hi-boot</tt>. -</ul> - -Compilation order is as follows: -<ul> -<li> -<strong>First comes a layer of modules that have few interdependencies, -and which implement very basic data types</strong>: -<tt> <ul> -<li> Util -<li> OccName -<li> Pretty -<li> Outputable -<li> StringBuffer -<li> ListSetOps -<li> Maybes -<li> etc -</ul> </tt> - -<p> -<li> <strong> Now comes the main subtle layer, involving types, classes, type constructors -identifiers, expressions, rules, and their operations.</strong> - -<tt> -<ul> -<li> Name <br> PrimRep -<p><li> - PrelNames -<p><li> - Var (Name, loop IdInfo.IdInfo, - loop Type.Type, loop Type.Kind) -<p><li> - VarEnv, VarSet, ThinAir -<p><li> - Class (loop TyCon.TyCon, loop Type.Type) -<p><li> - TyCon (loop Type.Type, loop Type.Kind, loop DataCon.DataCon, loop Generics.GenInfo) -<p><li> - TypeRep (loop DataCon.DataCon, loop Subst.substTyWith) -<p><li> - Type (loop PprType.pprType, loop Subst.substTyWith) -<p><li> - FieldLabel(Type) <br> - TysPrim(Type) <br> -<p><li> - Literal (TysPrim, PprType) <br> - DataCon (loop PprType, loop Subst.substTyWith, FieldLabel.FieldLabel) -<p><li> - TysWiredIn (loop MkId.mkDataConIds) -<p><li> - TcType( lots of TysWiredIn stuff) -<p><li> - PprType( lots of TcType stuff ) -<p><li> - PrimOp (PprType, TysWiredIn) -<p><li> - CoreSyn [does not import Id] -<p><li> - IdInfo (CoreSyn.Unfolding, CoreSyn.CoreRules) -<p><li> - Id (lots from IdInfo) -<p><li> - CoreFVs <br> - PprCore -<p><li> - CoreUtils (PprCore.pprCoreExpr, CoreFVs.exprFreeVars, - CoreSyn.isEvaldUnfolding CoreSyn.maybeUnfoldingTemplate) -<p><li> - CoreLint( CoreUtils ) <br> - OccurAnal (CoreUtils.exprIsTrivial) <br> - CoreTidy (CoreUtils.exprArity ) <br> -<p><li> - CoreUnfold (OccurAnal.occurAnalyseGlobalExpr) -<p><li> - Subst (CoreUnfold.Unfolding, CoreFVs) <br> - Generics (CoreUnfold.mkTopUnfolding) <br> - Rules (CoreUnfold.Unfolding, PprCore.pprTidyIdRules) -<p><li> - MkId (CoreUnfold.mkUnfolding, Subst, Rules.addRule) -<p><li> - PrelInfo (MkId) <br> - HscTypes( Rules.RuleBase ) -</ul></tt> - -<p><li> <strong>That is the end of the infrastructure. Now we get the - main layer of modules that perform useful work.</strong> - -<tt><ul> -<p><li> - CoreTidy (HscTypes.PersistentCompilerState) -</ul></tt> -</ul> - -HsSyn stuff -<ul> -<li> HsPat.hs-boot -<li> HsExpr.hs-boot (loop HsPat.LPat) -<li> HsTypes (loop HsExpr.HsSplice) -<li> HsBinds (HsTypes.LHsType, loop HsPat.LPat, HsExpr.pprFunBind and others) - HsLit (HsTypes.SyntaxName) -<li> HsPat (HsBinds, HsLit) - HsDecls (HsBinds) -<li> HsExpr (HsDecls, HsPat) -</ul> - - - -<h2>Library stuff: base package</h2> - -<ul> -<li> GHC.Base -<li> Data.Tuple (GHC.Base), GHC.Ptr (GHC.Base) -<li> GHC.Enum (Data.Tuple) -<li> GHC.Show (GHC.Enum) -<li> GHC.Num (GHC.Show) -<li> GHC.ST (GHC.Num), GHC.Real (GHC.Num) -<li> GHC.Arr (GHC.ST) GHC.STRef (GHC.ST) -<li> GHC.IOBase (GHC.Arr) -<li> Data.Bits (GHC.Real) -<li> Data.HashTable (Data.Bits, Control.Monad) -<li> Data.Typeable (GHC.IOBase, Data.HashTable) -<li> GHC.Weak (Data.Typeable, GHC.IOBase) -</ul> - - - <p><small> -<!-- hhmts start --> -Last modified: Wed Aug 22 16:46:33 GMT Daylight Time 2001 -<!-- hhmts end --> - </small> - </body> -</html> - - - - - diff --git a/docs/comm/index.html b/docs/comm/index.html deleted file mode 100644 index 64b9d81ff1..0000000000 --- a/docs/comm/index.html +++ /dev/null @@ -1,121 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Beast Explained</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The Glasgow Haskell Compiler (GHC) Commentary [v0.17]</h1> - <p> - <!-- Contributors: Whoever makes substantial additions or changes to the - document, please add your name and keep the order alphabetic. Moreover, - please bump the version number for any substantial modification that you - check into CVS. - --> - <strong>Manuel M. T. Chakravarty</strong><br> - <strong>Sigbjorn Finne</strong><br> - <strong>Simon Marlow</strong><br> - <strong>Simon Peyton Jones</strong><br> - <strong>Julian Seward</strong><br> - <strong>Reuben Thomas</strong><br> - <br> - <p> - This document started as a collection of notes describing what <a - href="mailto:chak@cse.unsw.edu.au">I</a> learnt when poking around in - the <a href="http://haskell.org/ghc/">GHC</a> sources. During the - <i>Haskell Implementers Workshop</i> in January 2001, it was decided to - put the commentary into - <a href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/">GHC's CVS - repository</a> - to allow the whole developer community to add their wizardly insight to - the document. - <p> - <strong>The document is still far from being complete - help it - grow!</strong> - - <h2>Before the Show Begins</h2> - <p> - <ul> - <li><a href="feedback.html">Feedback</a> - <li><a href="others.html">Other Sources of Wisdom</a> - </ul> - - <h2>Genesis</h2> - <p> - <ul> - <li><a href="genesis/genesis.html">Outline of the Genesis</a> - <li><a href="genesis/makefiles.html">Mindboggling Makefiles</a> - <li><a href="genesis/modules.html">GHC's Marvellous Module Structure</a> - </ul> - - <h2>The Beast Dissected</h2> - <p> - <ul> - <li><a href="the-beast/coding-style.html">Coding style used in - the compiler</a> - <li><a href="the-beast/driver.html">The Glorious Driver</a> - <li><a href="the-beast/prelude.html">Primitives and the Prelude</a> - <li><a href="the-beast/syntax.html">Just Syntax</a> - <li><a href="the-beast/basicTypes.html">The Basics</a> - <li><a href="the-beast/modules.html">Modules, ModuleNames and - Packages</a> - <li><a href="the-beast/names.html">The truth about names: Names and OccNames</a> - <li><a href="the-beast/vars.html">The Real Story about Variables, Ids, - TyVars, and the like</a> - <li><a href="the-beast/data-types.html">Data types and constructors</a> - <li><a href="the-beast/renamer.html">The Glorious Renamer</a> - <li><a href="the-beast/types.html">Hybrid Types</a> - <li><a href="the-beast/typecheck.html">Checking Types</a> - <li><a href="the-beast/desugar.html">Sugar Free: From Haskell To Core</a> - <li><a href="the-beast/simplifier.html">The Mighty Simplifier</a> - <li><a href="the-beast/mangler.html">The Evil Mangler</a> - <li><a href="the-beast/alien.html">Alien Functions</a> - <li><a href="the-beast/stg.html">You Got Control: The STG-language</a> - <li><a href="the-beast/ncg.html">The Native Code Generator</a> - <li><a href="the-beast/ghci.html">GHCi</a> - <li><a href="the-beast/fexport.html">Implementation of - <code>foreign export</code></a> - <li><a href="the-beast/main.html">Compiling and running the Main module</code></a> - </ul> - - <h2>RTS & Libraries</h2> - <p> - <ul> - <li><a href="http://ghc.haskell.org/trac/ghc/wiki/Commentary/Rts/Conventions">Coding Style Guidelines</a> - <li><a href="rts-libs/stgc.html">Spineless Tagless C</a> - <li><a href="rts-libs/primitives.html">Primitives</a> - <li><a href="rts-libs/prelfound.html">Prelude Foundations</a> - <li><a href="rts-libs/prelude.html">Cunning Prelude Code</a> - <li><a href="rts-libs/foreignptr.html">On why we have <tt>ForeignPtr</tt></a> - <li><a href="rts-libs/non-blocking.html">Non-blocking I/O for Win32</a> - <li><a href="rts-libs/multi-thread.html">Supporting multi-threaded interoperation</a> - </ul> - - <h2>Extensions, or Making a Complicated System More Complicated</h2> - <p> - <ul> - <li><a href="exts/th.html">Template Haskell</a> - <li><a href="exts/ndp.html">Parallel Arrays</a> - </ul> - - <h2>The Source</h2> - <p> - The online master copy of the Commentary is at - <blockquote> - <a href="http://www.cse.unsw.edu.au/~chak/haskell/ghc/comm/">http://www.cse.unsw.edu.au/~chak/haskell/ghc/comm/</a> - </blockquote> - <p> - This online version is updated - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/docs/comm/">from - CVS</a> - daily. - - <p><small> -<!-- hhmts start --> -Last modified: Thu May 12 19:03:42 EST 2005 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/others.html b/docs/comm/others.html deleted file mode 100644 index 52d87e9419..0000000000 --- a/docs/comm/others.html +++ /dev/null @@ -1,60 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Other Sources of Wisdom</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>Other Sources of Wisdom</h1> - <p> - Believe it or not, but there are other people besides you who are - masochistic enough to study the innards of the beast. Some of the have - been kind (or cruel?) enough to share their insights with us. Here is a - probably incomplete list: - <p> - <ul> - - <li>The <a - href="http://www.cee.hw.ac.uk/~dsg/gph/docs/StgSurvival.ps.gz">STG - Survival Sheet</a> has -- according to its header -- been written by - `a poor wee soul',<sup><a href="#footnote1">1</a></sup> which - probably has been pushed into the torments of madness by the very - act of contemplating the inner workings of the STG runtime system. - This document discusses GHC's runtime system with a focus on - support for parallel processing (aka GUM). - - <li>Instructions on <a - href="http://www-users.cs.york.ac.uk/~olaf/PUBLICATIONS/extendGHC.html">Adding - an Optimisation Pass to the Glasgow Haskell Compiler</a> - have been compiled by <a - href="http://www-users.cs.york.ac.uk/~olaf/">Olaf Chitil</a>. - Unfortunately, this document is already a little aged. - - <li><a href="http://www.cs.pdx.edu/~apt/">Andrew Tolmach</a> has defined - <a href="http://www.haskell.org/ghc/docs/papers/core.ps.gz">an external - representation of - GHC's <em>Core</em> language</a> and also implemented a GHC pass - that emits the intermediate form into <code>.hcr</code> files. The - option <code>-fext-core</code> triggers GHC to emit Core code after - optimisation; in addition, <code>-fno-code</code> is often used to - stop compilation after Core has been emitted. - - <!-- Add references to other background texts listed on the GHC docu - page - --> - - </ul> - - <p><hr><p> - <sup><a name="footnote1">1</a></sup>Usually reliable sources have it that - the poor soul in question is no one less than GUM hardcore hacker <a - href="http://www.cee.hw.ac.uk/~hwloidl/">Hans-Wolfgang Loidl</a>. - - <p><small> -<!-- hhmts start --> -Last modified: Tue Nov 13 10:56:57 EST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/rts-libs/foreignptr.html b/docs/comm/rts-libs/foreignptr.html deleted file mode 100644 index febe9fe422..0000000000 --- a/docs/comm/rts-libs/foreignptr.html +++ /dev/null @@ -1,68 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - why we have <tt>ForeignPtr</tt></title> - </head> - - <body BGCOLOR="FFFFFF"> - - <h1>On why we have <tt>ForeignPtr</tt></h1> - - <p>Unfortunately it isn't possible to add a finalizer to a normal - <tt>Ptr a</tt>. We already have a generic finalization mechanism: - see the Weak module in package lang. But the only reliable way to - use finalizers is to attach one to an atomic heap object - that - way the compiler's optimiser can't interfere with the lifetime of - the object. - - <p>The <tt>Ptr</tt> type is really just a boxed address - it's - defined like - - <pre> -data Ptr a = Ptr Addr# -</pre> - - <p>where <tt>Addr#</tt> is an unboxed native address (just a 32- - or 64- bit word). Putting a finalizer on a <tt>Ptr</tt> is - dangerous, because the compiler's optimiser might remove the box - altogether. - - <p><tt>ForeignPtr</tt> is defined like this - - <pre> -data ForeignPtr a = ForeignPtr ForeignObj# -</pre> - - <p>where <tt>ForeignObj#</tt> is a *boxed* address, it corresponds - to a real heap object. The heap object is primitive from the - point of view of the compiler - it can't be optimised away. So it - works to attach a finalizer to the <tt>ForeignObj#</tt> (but not - to the <tt>ForeignPtr</tt>!). - - <p>There are several primitive objects to which we can attach - finalizers: <tt>MVar#</tt>, <tt>MutVar#</tt>, <tt>ByteArray#</tt>, - etc. We have special functions for some of these: eg. - <tt>MVar.addMVarFinalizer</tt>. - - <p>So a nicer interface might be something like - -<pre> -class Finalizable a where - addFinalizer :: a -> IO () -> IO () - -instance Finalizable (ForeignPtr a) where ... -instance Finalizable (MVar a) where ... -</pre> - - <p>So you might ask why we don't just get rid of <tt>Ptr</tt> and - rename <tt>ForeignPtr</tt> to <tt>Ptr</tt>. The reason for that - is just efficiency, I think. - - <p><small> -<!-- hhmts start --> -Last modified: Wed Sep 26 09:49:37 BST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/rts-libs/multi-thread.html b/docs/comm/rts-libs/multi-thread.html deleted file mode 100644 index 67a544be85..0000000000 --- a/docs/comm/rts-libs/multi-thread.html +++ /dev/null @@ -1,445 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> -<head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> -<title>The GHC Commentary - Supporting multi-threaded interoperation</title> -</head> -<body> -<h1>The GHC Commentary - Supporting multi-threaded interoperation</h1> -<em> -<p> -Authors: sof@galois.com, simonmar@microsoft.com<br> -Date: April 2002 -</p> -</em> -<p> -This document presents the implementation of an extension to -Concurrent Haskell that provides two enhancements: -</p> -<ul> -<li>A Concurrent Haskell thread may call an external (e.g., C) -function in a manner that's transparent to the execution/evaluation of -other Haskell threads. Section <a href="#callout">Calling out"</a> covers this. -</li> -<li> -OS threads may safely call Haskell functions concurrently. Section -<a href="#callin">"Calling in"</a> covers this. -</li> -</ul> - -<!---- *************************************** -----> -<h2 id="callout">The problem: foreign calls that block</h2> -<p> -When a Concurrent Haskell(CH) thread calls a 'foreign import'ed -function, the runtime system(RTS) has to handle this in a manner -transparent to other CH threads. That is, they shouldn't be blocked -from making progress while the CH thread executes the external -call. Presently, all threads will block. -</p> -<p> -Clearly, we have to rely on OS-level threads in order to support this -kind of concurrency. The implementation described here defines the -(abstract) OS threads interface that the RTS assumes. The implementation -currently provides two instances of this interface, one for POSIX -threads (pthreads) and one for the Win32 threads. -</p> - -<!---- *************************************** -----> -<h3>Multi-threading the RTS</h3> - -<p> -A simple and efficient way to implement non-blocking foreign calls is like this: -<ul> -<li> Invariant: only one OS thread is allowed to -execute code inside of the GHC runtime system. [There are alternate -designs, but I won't go into details on their pros and cons here.] -We'll call the OS thread that is currently running Haskell threads -the <em>Current Haskell Worker Thread</em>. -<p> -The Current Haskell Worker Thread repeatedly grabs a Haskell thread, executes it until its -time-slice expires or it blocks on an MVar, then grabs another, and executes -that, and so on. -</p> -<li> -<p> -When the Current Haskell Worker comes to execute a potentially blocking 'foreign -import', it leaves the RTS and ceases being the Current Haskell Worker, but before doing so it makes certain that -another OS worker thread is available to become the Current Haskell Worker. -Consequently, even if the external call blocks, the new Current Haskell Worker -continues execution of the other Concurrent Haskell threads. -When the external call eventually completes, the Concurrent Haskell -thread that made the call is passed the result and made runnable -again. -</p> -<p> -<li> -A pool of OS threads are constantly trying to become the Current Haskell Worker. -Only one succeeds at any moment. If the pool becomes empty, the RTS creates more workers. -<p><li> -The OS worker threads are regarded as interchangeable. A given Haskell thread -may, during its lifetime, be executed entirely by one OS worker thread, or by more than one. -There's just no way to tell. - -<p><li>If a foreign program wants to call a Haskell function, there is always a thread switch involved. -The foreign program uses thread-safe mechanisms to create a Haskell thread and make it runnable; and -the current Haskell Worker Thread exectutes it. See Section <a href="#callin">Calling in</a>. -</ul> -<p> -The rest of this section describes the mechanics of implementing all -this. There's two parts to it, one that describes how a native (OS) thread -leaves the RTS to service the external call, the other how the same -thread handles returning the result of the external call back to the -Haskell thread. -</p> - -<!---- *************************************** -----> -<h3>Making the external call</h3> - -<p> -Presently, GHC handles 'safe' C calls by effectively emitting the -following code sequence: -</p> - -<pre> - ...save thread state... - t = suspendThread(); - r = foo(arg1,...,argn); - resumeThread(t); - ...restore thread state... - return r; -</pre> - -<p> -After having squirreled away the state of a Haskell thread, -<tt>Schedule.c:suspendThread()</tt> is called which puts the current -thread on a list [<tt>Schedule.c:suspended_ccalling_threads</tt>] -containing threads that are currently blocked waiting for external calls -to complete (this is done for the purposes of finding roots when -garbage collecting). -</p> - -<p> -In addition to putting the Haskell thread on -<tt>suspended_ccalling_threads</tt>, <tt>suspendThread()</tt> now also -does the following: -</p> -<ul> -<li>Instructs the <em>Task Manager</em> to make sure that there's a -another native thread waiting in the wings to take over the execution -of Haskell threads. This might entail creating a new -<em>worker thread</em> or re-using one that's currently waiting for -more work to do. The <a href="#taskman">Task Manager</a> section -presents the functionality provided by this subsystem. -</li> - -<li>Releases its capability to execute within the RTS. By doing -so, another worker thread will become unblocked and start executing -code within the RTS. See the <a href="#capability">Capability</a> -section for details. -</li> - -<li><tt>suspendThread()</tt> returns a token which is used to -identify the Haskell thread that was added to -<tt>suspended_ccalling_threads</tt>. This is done so that once the -external call has completed, we know what Haskell thread to pull off -the <tt>suspended_ccalling_threads</tt> list. -</li> -</ul> - -<p> -Upon return from <tt>suspendThread()</tt>, the OS thread is free of -its RTS executing responsibility, and can now invoke the external -call. Meanwhile, the other worker thread that have now gained access -to the RTS will continue executing Concurrent Haskell code. Concurrent -'stuff' is happening! -</p> - -<!---- *************************************** -----> -<h3>Returning the external result</h3> - -<p> -When the native thread eventually returns from the external call, -the result needs to be communicated back to the Haskell thread that -issued the external call. The following steps takes care of this: -</p> - -<ul> -<li>The returning OS thread calls <tt>Schedule.c:resumeThread()</tt>, -passing along the token referring to the Haskell thread that made the -call we're returning from. -</li> - -<li> -The OS thread then tries to grab hold of a <em>returning worker -capability</em>, via <tt>Capability.c:grabReturnCapability()</tt>. -Until granted, the thread blocks waiting for RTS permissions. Clearly we -don't want the thread to be blocked longer than it has to, so whenever -a thread that is executing within the RTS enters the Scheduler (which -is quite often, e.g., when a Haskell thread context switch is made), -it checks to see whether it can give up its RTS capability to a -returning worker, which is done by calling -<tt>Capability.c:yieldToReturningWorker()</tt>. -</li> - -<li> -If a returning worker is waiting (the code in <tt>Capability.c</tt> -keeps a counter of the number of returning workers that are currently -blocked waiting), it is woken up and the given the RTS execution -priviledges/capabilities of the worker thread that gave up its. -</li> - -<li> -The thread that gave up its capability then tries to re-acquire -the capability to execute RTS code; this is done by calling -<tt>Capability.c:waitForWorkCapability()</tt>. -</li> - -<li> -The returning worker that was woken up will continue execution in -<tt>resumeThread()</tt>, removing its associated Haskell thread -from the <tt>suspended_ccalling_threads</tt> list and start evaluating -that thread, passing it the result of the external call. -</li> -</ul> - -<!---- *************************************** -----> -<h3 id="rts-exec">RTS execution</h3> - -<p> -If a worker thread inside the RTS runs out of runnable Haskell -threads, it goes to sleep waiting for the external calls to complete. -It does this by calling <tt>waitForWorkCapability</tt> -</p> - -<p> -The availability of new runnable Haskell threads is signalled when: -</p> - -<ul> -<li>When an external call is set up in <tt>suspendThread()</tt>.</li> -<li>When a new Haskell thread is created (e.g., whenever -<tt>Concurrent.forkIO</tt> is called from within Haskell); this is -signalled in <tt>Schedule.c:scheduleThread_()</tt>. -</li> -<li>Whenever a Haskell thread is removed from a 'blocking queue' -attached to an MVar (only?). -</li> -</ul> - -<!---- *************************************** -----> -<h2 id="callin">Calling in</h2> - -Providing robust support for having multiple OS threads calling into -Haskell is not as involved as its dual. - -<ul> -<li>The OS thread issues the call to a Haskell function by going via -the <em>Rts API</em> (as specificed in <tt>RtsAPI.h</tt>). -<li>Making the function application requires the construction of a -closure on the heap. This is done in a thread-safe manner by having -the OS thread lock a designated block of memory (the 'Rts API' block, -which is part of the GC's root set) for the short period of time it -takes to construct the application. -<li>The OS thread then creates a new Haskell thread to execute the -function application, which (eventually) boils down to calling -<tt>Schedule.c:createThread()</tt> -<li> -Evaluation is kicked off by calling <tt>Schedule.c:scheduleExtThread()</tt>, -which asks the Task Manager to possibly create a new worker (OS) -thread to execute the Haskell thread. -<li> -After the OS thread has done this, it blocks waiting for the -Haskell thread to complete the evaluation of the Haskell function. -<p> -The reason why a separate worker thread is made to evaluate the Haskell -function and not the OS thread that made the call-in via the -Rts API, is that we want that OS thread to return as soon as possible. -We wouldn't be able to guarantee that if the OS thread entered the -RTS to (initially) just execute its function application, as the -Scheduler may side-track it and also ask it to evaluate other Haskell threads. -</li> -</ul> - -<p> -<strong>Note:</strong> As of 20020413, the implementation of the RTS API -only serializes access to the allocator between multiple OS threads wanting -to call into Haskell (via the RTS API.) It does not coordinate this access -to the allocator with that of the OS worker thread that's currently executing -within the RTS. This weakness/bug is scheduled to be tackled as part of an -overhaul/reworking of the RTS API itself. - - -<!---- *************************************** -----> -<h2>Subsystems introduced/modified</h2> - -<p> -These threads extensions affect the Scheduler portions of the runtime -system. To make it more manageable to work with, the changes -introduced a couple of new RTS 'sub-systems'. This section presents -the functionality and API of these sub-systems. -</p> - -<!---- *************************************** -----> -<h3 id="#capability">Capabilities</h3> - -<p> -A Capability represent the token required to execute STG code, -and all the state an OS thread/task needs to run Haskell code: -its STG registers, a pointer to its TSO, a nursery etc. During -STG execution, a pointer to the capabilitity is kept in a -register (BaseReg). -</p> -<p> -Only in an SMP build will there be multiple capabilities, for -the threaded RTS and other non-threaded builds, there is only -one global capability, namely <tt>MainCapability</tt>. - -<p> -The Capability API is as follows: -<pre> -/* Capability.h */ -extern void initCapabilities(void); - -extern void grabReturnCapability(Mutex* pMutex, Capability** pCap); -extern void waitForWorkCapability(Mutex* pMutex, Capability** pCap, rtsBool runnable); -extern void releaseCapability(Capability* cap); - -extern void yieldToReturningWorker(Mutex* pMutex, Capability* cap); - -extern void grabCapability(Capability** cap); -</pre> - -<ul> -<li><tt>initCapabilities()</tt> initialises the subsystem. - -<li><tt>grabReturnCapability()</tt> is called by worker threads -returning from an external call. It blocks them waiting to gain -permissions to do so. - -<li><tt>waitForWorkCapability()</tt> is called by worker threads -already inside the RTS, but without any work to do. It blocks them -waiting for there to new work to become available. - -<li><tt>releaseCapability()</tt> hands back a capability. If a -'returning worker' is waiting, it is signalled that a capability -has become available. If not, <tt>releaseCapability()</tt> tries -to signal worker threads that are blocked waiting inside -<tt>waitForWorkCapability()</tt> that new work might now be -available. - -<li><tt>yieldToReturningWorker()</tt> is called by the worker thread -that's currently inside the Scheduler. It checks whether there are other -worker threads waiting to return from making an external call. If so, -they're given preference and a capability is transferred between worker -threads. One of the waiting 'returning worker' threads is signalled and made -runnable, with the other, yielding, worker blocking to re-acquire -a capability. -</ul> - -<p> -The condition variables used to implement the synchronisation between -worker consumers and providers are local to the Capability -implementation. See source for details and comments. -</p> - -<!---- *************************************** -----> -<h3 id="taskman">The Task Manager</h3> - -<p> -The Task Manager API is responsible for managing the creation of -OS worker RTS threads. When a Haskell thread wants to make an -external call, the Task Manager is asked to possibly create a -new worker thread to take over the RTS-executing capability of -the worker thread that's exiting the RTS to execute the external call. - -<p> -The Capability subsystem keeps track of idle worker threads, so -making an informed decision about whether or not to create a new OS -worker thread is easy work for the task manager. The Task manager -provides the following API: -</p> - -<pre> -/* Task.h */ -extern void startTaskManager ( nat maxTasks, void (*taskStart)(void) ); -extern void stopTaskManager ( void ); - -extern void startTask ( void (*taskStart)(void) ); -</pre> - -<ul> -<li><tt>startTaskManager()</tt> and <tt>stopTaskManager()</tt> starts -up and shuts down the subsystem. When starting up, you have the option -to limit the overall number of worker threads that can be -created. An unbounded (modulo OS thread constraints) number of threads -is created if you pass '0'. -<li><tt>startTask()</tt> is called when a worker thread calls -<tt>suspendThread()</tt> to service an external call, asking another -worker thread to take over its RTS-executing capability. It is also -called when an external OS thread invokes a Haskell function via the -<em>Rts API</em>. -</ul> - -<!---- *************************************** -----> -<h3>Native threads API</h3> - -To hide OS details, the following API is used by the task manager and -the scheduler to interact with an OS' threads API: - -<pre> -/* OSThreads.h */ -typedef <em>..OS specific..</em> Mutex; -extern void initMutex ( Mutex* pMut ); -extern void grabMutex ( Mutex* pMut ); -extern void releaseMutex ( Mutex* pMut ); - -typedef <em>..OS specific..</em> Condition; -extern void initCondition ( Condition* pCond ); -extern void closeCondition ( Condition* pCond ); -extern rtsBool broadcastCondition ( Condition* pCond ); -extern rtsBool signalCondition ( Condition* pCond ); -extern rtsBool waitCondition ( Condition* pCond, - Mutex* pMut ); - -extern OSThreadId osThreadId ( void ); -extern void shutdownThread ( void ); -extern void yieldThread ( void ); -extern int createOSThread ( OSThreadId* tid, - void (*startProc)(void) ); -</pre> - - - -<!---- *************************************** -----> -<h2>User-level interface</h2> - -To signal that you want an external call to be serviced by a separate -OS thread, you have to add the attribute <tt>threadsafe</tt> to -a foreign import declaration, i.e., - -<pre> -foreign import "bigComp" threadsafe largeComputation :: Int -> IO () -</pre> - -<p> -The distinction between 'safe' and thread-safe C calls is made -so that we may call external functions that aren't re-entrant but may -cause a GC to occur. -<p> -The <tt>threadsafe</tt> attribute subsumes <tt>safe</tt>. -</p> - -<!---- *************************************** -----> -<h2>Building the GHC RTS</h2> - -The multi-threaded extension isn't currently enabled by default. To -have it built, you need to run the <tt>fptools</tt> configure script -with the extra option <tt>--enable-threaded-rts</tt> turned on, and -then proceed to build the compiler as per normal. - -<hr> -<small> -<!-- hhmts start --> Last modified: Wed Apr 10 14:21:57 Pacific Daylight Time 2002 <!-- hhmts end --> -</small> -</body> </html> - diff --git a/docs/comm/rts-libs/non-blocking.html b/docs/comm/rts-libs/non-blocking.html deleted file mode 100644 index 627bde8d88..0000000000 --- a/docs/comm/rts-libs/non-blocking.html +++ /dev/null @@ -1,133 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Non-blocking I/O on Win32</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Non-blocking I/O on Win32</h1> - <p> - -This note discusses the implementation of non-blocking I/O on -Win32 platforms. It is not implemented yet (Apr 2002), but it seems worth -capturing the ideas. Thanks to Sigbjorn for writing them. - -<h2> Background</h2> - -GHC has provided non-blocking I/O support for Concurrent Haskell -threads on platforms that provide 'UNIX-style' non-blocking I/O for -quite a while. That is, platforms that let you alter the property of a -file descriptor to instead of having a thread block performing an I/O -operation that cannot be immediately satisfied, the operation returns -back a special error code (EWOULDBLOCK.) When that happens, the CH -thread that made the blocking I/O request is put into a blocked-on-IO -state (see Foreign.C.Error.throwErrnoIfRetryMayBlock). The RTS will -in a timely fashion check to see whether I/O is again possible -(via a call to select()), and if it is, unblock the thread & have it -re-try the I/O operation. The result is that other Concurrent Haskell -threads won't be affected, but can continue operating while a thread -is blocked on I/O. -<p> -Non-blocking I/O hasn't been supported by GHC on Win32 platforms, for -the simple reason that it doesn't provide the OS facilities described -above. - -<h2>Win32 non-blocking I/O, attempt 1</h2> - -Win32 does provide something select()-like, namely the -WaitForMultipleObjects() API. It takes an array of kernel object -handles plus a timeout interval, and waits for either one (or all) of -them to become 'signalled'. A handle representing an open file (for -reading) becomes signalled once there is input available. -<p> -So, it is possible to observe that I/O is possible using this -function, but not whether there's "enough" to satisfy the I/O request. -So, if we were to mimic select() usage with WaitForMultipleObjects(), -we'd correctly avoid blocking initially, but a thread may very well -block waiting for their I/O requests to be satisified once the file -handle has become signalled. [There is a fix for this -- only read -and write one byte at a the time -- but I'm not advocating that.] - - -<h2>Win32 non-blocking I/O, attempt 2</h2> - -Asynchronous I/O on Win32 is supported via 'overlapped I/O'; that is, -asynchronous read and write requests can be made via the ReadFile() / -WriteFile () APIs, specifying position and length of the operation. -If the I/O requests cannot be handled right away, the APIs won't -block, but return immediately (and report ERROR_IO_PENDING as their -status code.) -<p> -The completion of the request can be reported in a number of ways: -<ul> - <li> synchronously, by blocking inside Read/WriteFile(). (this is the - non-overlapped case, really.) -<p> - - <li> as part of the overlapped I/O request, pass a HANDLE to an event - object. The I/O system will signal this event once the request - completed, which a waiting thread will then be able to see. -<p> - - <li> by supplying a pointer to a completion routine, which will be - called as an Asynchronous Procedure Call (APC) whenever a thread - calls a select bunch of 'alertable' APIs. -<p> - - <li> by associating the file handle with an I/O completion port. Once - the request completes, the thread servicing the I/O completion - port will be notified. -</ul> -The use of I/O completion port looks the most interesting to GHC, -as it provides a central point where all I/O requests are reported. -<p> -Note: asynchronous I/O is only fully supported by OSes based on -the NT codebase, i.e., Win9x don't permit async I/O on files and -pipes. However, Win9x does support async socket operations, and -I'm currently guessing here, console I/O. In my view, it would -be acceptable to provide non-blocking I/O support for NT-based -OSes only. -<p> -Here's the design I currently have in mind: -<ul> -<li> Upon startup, an RTS helper thread whose only purpose is to service - an I/O completion port, is created. -<p> -<li> All files are opened in 'overlapping' mode, and associated - with an I/O completion port. -<p> -<li> Overlapped I/O requests are used to implement read() and write(). -<p> -<li> If the request cannot be satisified without blocking, the Haskell - thread is put on the blocked-on-I/O thread list & a re-schedule - is made. -<p> -<li> When the completion of a request is signalled via the I/O completion - port, the RTS helper thread will move the associated Haskell thread - from the blocked list onto the runnable list. (Clearly, care - is required here to have another OS thread mutate internal Scheduler - data structures.) - -<p> -<li> In the event all Concurrent Haskell threads are blocked waiting on - I/O, the main RTS thread blocks waiting on an event synchronisation - object, which the helper thread will signal whenever it makes - a Haskell thread runnable. - -</ul> - -I might do the communication between the RTS helper thread and the -main RTS thread differently though: rather than have the RTS helper -thread manipluate thread queues itself, thus requiring careful -locking, just have it change a bit on the relevant TSO, which the main -RTS thread can check at regular intervals (in some analog of -awaitEvent(), for example). - - <p><small> -<!-- hhmts start --> -Last modified: Wed Aug 8 19:30:18 EST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/rts-libs/prelfound.html b/docs/comm/rts-libs/prelfound.html deleted file mode 100644 index 25407eed43..0000000000 --- a/docs/comm/rts-libs/prelfound.html +++ /dev/null @@ -1,57 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Prelude Foundations</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Prelude Foundations</h1> - <p> - The standard Haskell Prelude as well as GHC's Prelude extensions are - constructed from GHC's <a href="primitives.html">primitives</a> in a - couple of layers. - - <h4><code>PrelBase.lhs</code></h4> - <p> - Some most elementary Prelude definitions are collected in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/lib/std/PrelBase.lhs"><code>PrelBase.lhs</code></a>. - In particular, it defines the boxed versions of Haskell primitive types - - for example, <code>Int</code> is defined as - <blockquote><pre> -data Int = I# Int#</pre> - </blockquote> - <p> - Saying that a boxed integer <code>Int</code> is formed by applying the - data constructor <code>I#</code> to an <em>unboxed</em> integer of type - <code>Int#</code>. Unboxed types are hardcoded in the compiler and - exported together with the <a href="primitives.html">primitive - operations</a> understood by GHC. - <p> - <code>PrelBase.lhs</code> similarly defines basic types, such as, - boolean values - <blockquote><pre> -data Bool = False | True deriving (Eq, Ord)</pre> - </blockquote> - <p> - the unit type - <blockquote><pre> -data () = ()</pre> - </blockquote> - <p> - and lists - <blockquote><pre> -data [] a = [] | a : [a]</pre> - </blockquote> - <p> - It also contains instance delarations for these types. In addition, - <code>PrelBase.lhs</code> contains some <a href="prelude.html">tricky - machinery</a> for efficient list handling. - - <p><small> -<!-- hhmts start --> -Last modified: Wed Aug 8 19:30:18 EST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/rts-libs/prelude.html b/docs/comm/rts-libs/prelude.html deleted file mode 100644 index c93e90dddc..0000000000 --- a/docs/comm/rts-libs/prelude.html +++ /dev/null @@ -1,121 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Cunning Prelude Code</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Cunning Prelude Code</h1> - <p> - GHC's uses a many optimisations and GHC specific techniques (unboxed - values, RULES pragmas, and so on) to make the heavily used Prelude code - as fast as possible. - - <hr> - <h4>Par, seq, and lazy</h4> - - In GHC.Conc you will dinf -<blockquote><pre> - pseq a b = a `seq` lazy b -</pre></blockquote> - What's this "lazy" thing. Well, <tt>pseq</tt> is a <tt>seq</tt> for a parallel setting. - We really mean "evaluate a, then b". But if the strictness analyser sees that pseq is strict - in b, then b might be evaluated <em>before</em> a, which is all wrong. -<p> -Solution: wrap the 'b' in a call to <tt>GHC.Base.lazy</tt>. This function is just the identity function, -except that it's put into the built-in environment in MkId.lhs. That is, the MkId.lhs defn over-rides the -inlining and strictness information that comes in from GHC.Base.hi. And that makes <tt>lazy</tt> look -lazy, and have no inlining. So the strictness analyser gets no traction. -<p> -In the worker/wrapper phase, after strictness analysis, <tt>lazy</tt> is "manually" inlined (see WorkWrap.lhs), -so we get all the efficiency back. -<p> -This supersedes an earlier scheme involving an even grosser hack in which par# and seq# returned an -Int#. Now there is no seq# operator at all. - - - <hr> - <h4>fold/build</h4> - <p> - There is a lot of magic in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/lib/std/PrelBase.lhs"><code>PrelBase.lhs</code></a> - - among other things, the <a - href="http://haskell.cs.yale.edu/ghc/docs/latest/set/rewrite-rules.html">RULES - pragmas</a> implementing the <a - href="http://research.microsoft.com/Users/simonpj/Papers/deforestation-short-cut.ps.Z">fold/build</a> - optimisation. The code for <code>map</code> is - a good example for how it all works. In the prelude code for version - 5.03 it reads as follows: - <blockquote><pre> -map :: (a -> b) -> [a] -> [b] -map _ [] = [] -map f (x:xs) = f x : map f xs - --- Note eta expanded -mapFB :: (elt -> lst -> lst) -> (a -> elt) -> a -> lst -> lst -{-# INLINE [0] mapFB #-} -mapFB c f x ys = c (f x) ys - -{-# RULES -"map" [~1] forall f xs. map f xs = build (\c n -> foldr (mapFB c f) n xs) -"mapList" [1] forall f. foldr (mapFB (:) f) [] = map f -"mapFB" forall c f g. mapFB (mapFB c f) g = mapFB c (f.g) - #-}</pre> - </blockquote> - <p> - Up to (but not including) phase 1, we use the <code>"map"</code> rule to - rewrite all saturated applications of <code>map</code> with its - build/fold form, hoping for fusion to happen. In phase 1 and 0, we - switch off that rule, inline build, and switch on the - <code>"mapList"</code> rule, which rewrites the foldr/mapFB thing back - into plain map. - <p> - It's important that these two rules aren't both active at once - (along with build's unfolding) else we'd get an infinite loop - in the rules. Hence the activation control using explicit phase numbers. - <p> - The "mapFB" rule optimises compositions of map. - <p> - The mechanism as described above is new in 5.03 since January 2002, - where the <code>[~</code><i>N</i><code>]</code> syntax for phase number - annotations at rules was introduced. Before that the whole arrangement - was more complicated, as the corresponding prelude code for version - 4.08.1 shows: - <blockquote><pre> -map :: (a -> b) -> [a] -> [b] -map = mapList - --- Note eta expanded -mapFB :: (elt -> lst -> lst) -> (a -> elt) -> a -> lst -> lst -mapFB c f x ys = c (f x) ys - -mapList :: (a -> b) -> [a] -> [b] -mapList _ [] = [] -mapList f (x:xs) = f x : mapList f xs - -{-# RULES -"map" forall f xs. map f xs = build (\c n -> foldr (mapFB c f) n xs) -"mapFB" forall c f g. mapFB (mapFB c f) g = mapFB c (f.g) -"mapList" forall f. foldr (mapFB (:) f) [] = mapList f - #-}</pre> - </blockquote> - <p> - This code is structured as it is, because the "map" rule first - <em>breaks</em> the map <em>open,</em> which exposes it to the various - foldr/build rules, and if no foldr/build rule matches, the "mapList" - rule <em>closes</em> it again in a later phase of optimisation - after - build was inlined. As a consequence, the whole thing depends a bit on - the timing of the various optimisations (the map might be closed again - before any of the foldr/build rules fires). To make the timing - deterministic, <code>build</code> gets a <code>{-# INLINE 2 build - #-}</code> pragma, which delays <code>build</code>'s inlining, and thus, - the closing of the map. [NB: Phase numbering was forward at that time.] - - <p><small> -<!-- hhmts start --> -Last modified: Mon Feb 11 20:00:49 EST 2002 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/rts-libs/primitives.html b/docs/comm/rts-libs/primitives.html deleted file mode 100644 index 28abc79426..0000000000 --- a/docs/comm/rts-libs/primitives.html +++ /dev/null @@ -1,70 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Primitives</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Primitives</h1> - <p> - Most user-level Haskell types and functions provided by GHC (in - particular those from the Prelude and GHC's Prelude extensions) are - internally constructed from even more elementary types and functions. - Most notably, GHC understands a notion of <em>unboxed types,</em> which - are the Haskell representation of primitive bit-level integer, float, - etc. types (as opposed to their boxed, heap allocated counterparts) - - cf. <a - href="http://research.microsoft.com/Users/simonpj/Papers/unboxed-values.ps.Z">"Unboxed - Values as First Class Citizens."</a> - - <h4>The Ultimate Source of Primitives</h4> - <p> - The hardwired types of GHC are brought into scope by the module - <code>PrelGHC</code>. This modules only exists in the form of a - handwritten interface file <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/lib/std/PrelGHC.hi-boot"><code>PrelGHC.hi-boot</code>,</a> - which lists the type and function names, as well as instance - declarations. The actually types of these names as well as their - implementation is hardwired into GHC. Note that the names in this file - are z-encoded, and in particular, identifiers ending on <code>zh</code> - denote user-level identifiers ending in a hash mark (<code>#</code>), - which is used to flag unboxed values or functions operating on unboxed - values. For example, we have <code>Char#</code>, <code>ord#</code>, and - so on. - - <h4>The New Primitive Definition Scheme</h4> - <p> - As of (about) the development version 4.11, the types and various - properties of primitive operations are defined in the file <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/primops.txt.pp"><code>primops.txt.pp</code></a>. - (Personally, I don't think that the <code>.txt</code> suffix is really - appropriate, as the file is used for automatic code generation; the - recent addition of <code>.pp</code> means that the file is now mangled - by cpp.) - <p> - The utility <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/utils/genprimopcode/"><code>genprimopcode</code></a> - generates a series of Haskell files from <code>primops.txt</code>, which - encode the types and various properties of the primitive operations as - compiler internal data structures. These Haskell files are not complete - modules, but program fragments, which are included into compiler modules - during the GHC build process. The generated include files can be found - in the directory <code>fptools/ghc/compiler/</code> and carry names - matching the pattern <code>primop-*.hs-incl</code>. They are generate - during the execution of the <code>boot</code> target in the - <code>fptools/ghc/</code> directory. This scheme significantly - simplifies the maintenance of primitive operations. - <p> - As of development version 5.02, the <code>primops.txt</code> file also allows the - recording of documentation about intended semantics of the primitives. This can - be extracted into a latex document (or rather, into latex document fragments) - via an appropriate switch to <code>genprimopcode</code>. In particular, see <code>primops.txt</code> - for full details of how GHC is configured to cope with different machine word sizes. - <p><small> -<!-- hhmts start --> -Last modified: Mon Nov 26 18:03:16 EST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/rts-libs/stgc.html b/docs/comm/rts-libs/stgc.html deleted file mode 100644 index 196ec9150d..0000000000 --- a/docs/comm/rts-libs/stgc.html +++ /dev/null @@ -1,45 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Spineless Tagless C</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Spineless Tagless C</h1> - <p> - The C code generated by GHC doesn't use higher-level features of C to be - able to control as precisely as possible what code is generated. - Moreover, it uses special features of gcc (such as, first class labels) - to produce more efficient code. - <p> - STG C makes ample use of C's macro language to define idioms, which also - reduces the size of the generated C code (thus, reducing I/O times). - These macros are defined in the C headers located in GHC's <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/includes/"><code>includes</code></a> - directory. - - <h4><code>TailCalls.h</code></h4> - <p> - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/includes/TailCalls.h"><code>TailCalls.h</code></a> - defines how tail calls are implemented - and in particular - optimised - in GHC generated code. The default case, for an architecture for which - GHC is not optimised, is to use the mini interpreter described in the <a - href="http://research.microsoft.com/copyright/accept.asp?path=/users/simonpj/papers/spineless-tagless-gmachine.ps.gz&pub=34">STG paper.</a> - <p> - For supported architectures, various tricks are used to generate - assembler implementing proper tail calls. On i386, gcc's first class - labels are used to directly jump to a function pointer. Furthermore, - markers of the form <code>--- BEGIN ---</code> and <code>--- END - ---</code> are added to the assembly right after the function prologue - and before the epilogue. These markers are used by <a - href="../the-beast/mangler.html">the Evil Mangler.</a> - - <p><small> -<!-- hhmts start --> -Last modified: Wed Aug 8 19:28:29 EST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/rts-libs/threaded-rts.html b/docs/comm/rts-libs/threaded-rts.html deleted file mode 100644 index 739dc8d58a..0000000000 --- a/docs/comm/rts-libs/threaded-rts.html +++ /dev/null @@ -1,126 +0,0 @@ -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Multi-threaded runtime, and multiprocessor execution</title> - </head> - - <body> - <h1>The GHC Commentary - The Multi-threaded runtime, and multiprocessor execution</h1> - - <p>This section of the commentary explains the structure of the runtime system - when used in threaded or SMP mode.</p> - - <p>The <em>threaded</em> version of the runtime supports - bound threads and non-blocking foreign calls, and an overview of its - design can be found in the paper <a - href="http://www.haskell.org/~simonmar/papers/conc-ffi.pdf">Extending - the Haskell Foreign Function Interface with Concurrency</a>. To - compile the runtime with threaded support, add the line - -<pre>GhcRTSWays += thr</pre> - - to <tt>mk/build.mk</tt>. When building C code in the runtime for the threaded way, - the symbol <tt>THREADED_RTS</tt> is defined (this is arranged by the - build system when building for way <tt>thr</tt>, see - <tt>mk/config.mk</tt>). To build a Haskell program - with the threaded runtime, pass the flag <tt>-threaded</tt> to GHC (this - can be used in conjunction with <tt>-prof</tt>, and possibly - <tt>-debug</tt> and others depending on which versions of the RTS have - been built.</p> - - <p>The <em>SMP</em> version runtime supports the same facilities as the - threaded version, and in addition supports execution of Haskell code by - multiple simultaneous OS threads. For SMP support, both the runtime and - the libraries must be built a special way: add the lines - - <pre> -GhcRTSWays += thr -GhcLibWays += s</pre> - - to <tt>mk/build.mk</tt>. To build Haskell code for - SMP execution, use the flag <tt>-smp</tt> to GHC (this can be used in - conjunction with <tt>-debug</tt>, but no other way-flags at this time). - When building C code in the runtime for SMP - support, the symbol <tt>SMP</tt> is defined (this is arranged by the - compiler when the <tt>-smp</tt> flag is given, see - <tt>ghc/compiler/main/StaticFlags.hs</tt>).</p> - - <p>When building the runtime in either the threaded or SMP ways, the symbol - <tt>RTS_SUPPORTS_THREADS</tt> will be defined (see <tt>Rts.h</tt>).</p> - - <h2>Overall design</h2> - - <p>The system is based around the notion of a <tt>Capability</tt>. A - <tt>Capability</tt> is an object that represents both the permission to - execute some Haskell code, and the state required to do so. In order - to execute some Haskell code, a thread must therefore hold a - <tt>Capability</tt>. The available pool of capabilities is managed by - the <tt>Capability</tt> API, described below.</p> - - <p>In the threaded runtime, there is only a single <tt>Capability</tt> in the - system, indicating that only a single thread can be executing Haskell - code at any one time. In the SMP runtime, there can be an arbitrary - number of capabilities selectable at runtime with the <tt>+RTS -N<em>n</em></tt> - flag; in practice the number is best chosen to be the same as the number of - processors on the host machine.</p> - - <p>There are a number of OS threads running code in the runtime. We call - these <em>tasks</em> to avoid confusion with Haskell <em>threads</em>. - Tasks are managed by the <tt>Task</tt> subsystem, which is mainly - concerned with keeping track of statistics such as how much time each - task spends executing Haskell code, and also keeping track of how many - tasks are around when we want to shut down the runtime.</p> - - <p>Some tasks are created by the runtime itself, and some may be here - as a result of a call to Haskell from foreign code (we - call this an in-call). The - runtime can support any number of concurrent foreign in-calls, but the - number of these calls that will actually run Haskell code in parallel is - determined by the number of available capabilities. Each in-call creates - a <em>bound thread</em>, as described in the FFI/Concurrency paper (cited - above).</p> - - <p>In the future we may want to bind a <tt>Capability</tt> to a particular - processor, so that we can support a notion of affinity - avoiding - accidental migration of work from one CPU to another, so that we can make - best use of a CPU's local cache. For now, the design ignores this - issue.</p> - - <h2>The <tt>OSThreads</tt> interface</h2> - - <p>This interface is merely an abstraction layer over the OS-specific APIs - for managing threads. It has two main implementations: Win32 and - POSIX.</p> - - <p>This is the entirety of the interface:</p> - -<pre> -/* Various abstract types */ -typedef Mutex; -typedef Condition; -typedef OSThreadId; - -extern OSThreadId osThreadId ( void ); -extern void shutdownThread ( void ); -extern void yieldThread ( void ); -extern int createOSThread ( OSThreadId* tid, - void (*startProc)(void) ); - -extern void initCondition ( Condition* pCond ); -extern void closeCondition ( Condition* pCond ); -extern rtsBool broadcastCondition ( Condition* pCond ); -extern rtsBool signalCondition ( Condition* pCond ); -extern rtsBool waitCondition ( Condition* pCond, - Mutex* pMut ); - -extern void initMutex ( Mutex* pMut ); - </pre> - - <h2>The Task interface</h2> - - <h2>The Capability interface</h2> - - <h2>Multiprocessor Haskell Execution</h2> - - </body> -</html> diff --git a/docs/comm/the-beast/alien.html b/docs/comm/the-beast/alien.html deleted file mode 100644 index 3d4776ebc9..0000000000 --- a/docs/comm/the-beast/alien.html +++ /dev/null @@ -1,56 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Alien Functions</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Alien Functions</h1> - <p> - GHC implements experimental (by now it is actually quite well tested) - support for access to foreign functions and generally the interaction - between Haskell code and code written in other languages. Code - generation in this context can get quite tricky. This section attempts - to cast some light on this aspect of the compiler. - - <h4>FFI Stub Files</h4> - <p> - For each Haskell module that contains a <code>foreign export - dynamic</code> declaration, GHC generates a <code>_stub.c</code> file - that needs to be linked with any program that imports the Haskell - module. When asked about it <a - href="mailto:simonmar@microsoft.com">Simon Marlow</a> justified the - existence of these files as follows: - <blockquote> - The stub files contain the helper function which invokes the Haskell - code when called from C. - <p> - Each time the foreign export dynamic is invoked to create a new - callback function, a small piece of code has to be dynamically - generated (by code in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/rts/Adjustor.c"><code>Adjustor.c</code></a>). It is the address of this dynamically generated bit of - code that is returned as the <code>Addr</code> (or <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/hslibs/lang/Ptr.lhs"><code>Ptr</code></a>). - When called from C, the dynamically generated code must somehow invoke - the Haskell function which was originally passed to the - f.e.d. function -- it does this by invoking the helper function, - passing it a <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/hslibs/lang/StablePtr.lhs"><code>StablePtr</code></a> - to the Haskell function. It's split this way for two reasons: the - same helper function can be used each time the f.e.d. function is - called, and to keep the amount of dynamically generated code to a - minimum. - </blockquote> - <p> - The stub code is generated by <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/DsForeign.lhs"><code>DSForeign</code></a><code>.fexportEntry</code>. - - - <p><small> -<!-- hhmts start --> -Last modified: Fri Aug 10 11:47:41 EST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/basicTypes.html b/docs/comm/the-beast/basicTypes.html deleted file mode 100644 index b411e4c5a9..0000000000 --- a/docs/comm/the-beast/basicTypes.html +++ /dev/null @@ -1,132 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Basics</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - The Basics</h1> - <p> - The directory <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/"><code>fptools/ghc/compiler/basicTypes/</code></a> - contains modules that define some of the essential types definition for - the compiler - such as, identifiers, variables, modules, and unique - names. Some of those are discussed in the following. See elsewhere for more - detailed information on: - <ul> - <li> <a href="vars.html"><code>Var</code>s, <code>Id</code>s, and <code>TyVar</code>s</a> - <li> <a href="renamer.html"><code>OccName</code>s, <code>RdrName</code>s, and <code>Names</code>s</a> - </ul> - - <h2>Elementary Types</h2> - - <h4><code>Id</code>s</h4> - <p> - An <code>Id</code> (defined in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/Id.lhs"><code>Id.lhs</code></a> - essentially records information about value and data constructor - identifiers -- to be precise, in the case of data constructors, two - <code>Id</code>s are used to represent the worker and wrapper functions - for the data constructor, respectively. The information maintained in - the <code>Id</code> abstraction includes among other items strictness, - occurrence, specialisation, and unfolding information. - <p> - Due to the way <code>Id</code>s are used for data constructors, - all <code>Id</code>s are represented as variables, which contain a - <code>varInfo</code> field of abstract type <code><a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/IdInfo.lhs">IdInfo</a>.IdInfo</code>. - This is where the information about <code>Id</code>s is really stored. - The following is a (currently, partial) list of the various items in an - <code>IdInfo</code>: - <p> - <dl> - <dt><a name="occInfo">Occurrence information</a> - <dd>The <code>OccInfo</code> data type is defined in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/BasicTypes.lhs"><code>BasicTypes.lhs</code></a>. - Apart from the trivial <code>NoOccInfo</code>, it distinguishes - between variables that do not occur at all (<code>IAmDead</code>), - occur just once (<code>OneOcc</code>), or a <a - href="simplifier.html#loopBreaker">loop breakers</a> - (<code>IAmALoopBreaker</code>). - </dl> - - <h2>Sets, Finite Maps, and Environments</h2> - <p> - Sets of variables, or more generally names, which are needed throughout - the compiler, are provided by the modules <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/VarSet.lhs"><code>VarSet.lhs</code></a> - and <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/NameSet.lhs"><code>NameSet.lhs</code></a>, - respectively. Moreover, frequently maps from variables (or names) to - other data is needed. For example, a substitution is represented by a - finite map from variable names to expressions. Jobs like this are - solved by means of variable and name environments implemented by the - modules <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/VarEnv.lhs"><code>VarEnv.lhs</code></a> - and <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/NameEnv.lhs"><code>NameEnv.lhs</code></a>. - - <h4>The Module <code>VarSet</code></h4> - <p> - The Module <code>VarSet</code> provides the types <code>VarSet</code>, - <code>IdSet</code>, and <code>TyVarSet</code>, which are synonyms in the - current implementation, as <code>Var</code>, <code>Id</code>, and - <code>TyVar</code> are synonyms. The module provides all the operations - that one would expect including the creating of sets from individual - variables and lists of variables, union and intersection operations, - element checks, deletion, filter, fold, and map functions. - <p> - The implementation is based on <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/utils/UniqSet.lhs"><code>UniqSet</code></a>s, - which in turn are simply <a href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/utils/UniqFM.lhs"><code>UniqFM</code></a>s - (i.e., finite maps with uniques as keys) that map each unique to the - variable that it represents. - - <h4>The Module <code>NameSet</code></h4> - <p> - The Module <code>NameSet</code> provides the same functionality as - <code>VarSet</code> only for <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/Name.lhs"><code>Name</code></a>s. - As for the difference between <code>Name</code>s and <code>Var</code>s, - a <code>Var</code> is built from a <code>Name</code> plus additional - information (mostly importantly type information). - - <h4>The Module <code>VarEnv</code></h4> - <p> - The module <code>VarEnv</code> provides the types <code>VarEnv</code>, - <code>IdEnv</code>, and <code>TyVarEnv</code>, which are again - synonyms. The provided base functionality is similar to - <code>VarSet</code> with the main difference that a type <code>VarEnv - T</code> associates a value of type <code>T</code> with each variable in - the environment, thus effectively implementing a finite map from - variables to values of type <code>T</code>. - <p> - The implementation of <code>VarEnv</code> is also by <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/utils/UniqFM.lhs"><code>UniqFM</code></a>, - which entails the slightly surprising implication that it is - <em>not</em> possible to retrieve the domain of a variable environment. - In other words, there is no function corresponding to - <code>VarSet.varSetElems :: VarSet -> [Var]</code> in - <code>VarEnv</code>. This is because the <code>UniqFM</code> used to - implement <code>VarEnv</code> stores only the unique corresponding to a - variable in the environment, but not the entire variable (and there is - no mapping from uniques to variables). - <p> - In addition to plain variable environments, the module also contains - special substitution environments - the type <code>SubstEnv</code> - - that associates variables with a special purpose type - <code>SubstResult</code>. - - <h4>The Module <code>NameEnv</code></h4> - <p> - The type <code>NameEnv.NameEnv</code> is like <code>VarEnv</code> only - for <code>Name</code>s. - - <p><hr><small> -<!-- hhmts start --> -Last modified: Tue Jan 8 18:29:52 EST 2002 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/coding-style.html b/docs/comm/the-beast/coding-style.html deleted file mode 100644 index 41347c6902..0000000000 --- a/docs/comm/the-beast/coding-style.html +++ /dev/null @@ -1,230 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Coding Style Guidelines</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Coding Style Guidelines</h1> - - <p>This is a rough description of some of the coding practices and - style that we use for Haskell code inside <tt>ghc/compiler</tt>. - - <p>The general rule is to stick to the same coding style as is - already used in the file you're editing. If you must make - stylistic changes, commit them separately from functional changes, - so that someone looking back through the change logs can easily - distinguish them. - - <h2>To literate or not to literate?</h2> - - <p>In GHC we use a mixture of literate (<tt>.lhs</tt>) and - non-literate (<tt>.hs</tt>) source. I (Simon M.) prefer to use - non-literate style, because I think the - <tt>\begin{code}..\end{code}</tt> clutter up the source too much, - and I like to use Haddock-style comments (we haven't tried - processing the whole of GHC with Haddock yet, though). - - <h2>To CPP or not to CPP?</h2> - - <p>We pass all the compiler sources through CPP. The - <tt>-cpp</tt> flag is always added by the build system. - - <p>The following CPP symbols are used throughout the compiler: - - <dl> - <dt><tt>DEBUG</tt></dt> - - <dd>Used to enables extra checks and debugging output in the - compiler. The <tt>ASSERT</tt> macro (see <tt>HsVersions.h</tt>) - provides assertions which disappear when <tt>DEBUG</tt> is not - defined. - - <p>All debugging output should be placed inside <tt>#ifdef - DEBUG</tt>; we generally use this to provide warnings about - strange cases and things that might warrant investigation. When - <tt>DEBUG</tt> is off, the compiler should normally be silent - unless something goes wrong (exception when the verbosity level - is greater than zero). - - <p>A good rule of thumb is that <tt>DEBUG</tt> shouldn't add - more than about 10-20% to the compilation time. This is the case - at the moment. If it gets too expensive, we won't use it. For - more expensive runtime checks, consider adding a flag - see for - example <tt>-dcore-lint</tt>. - </dd> - - <dt><tt>GHCI</tt></dt> - - <dd>Enables GHCi support, including the byte code generator and - interactive user interface. This isn't the default, because the - compiler needs to be bootstrapped with itself in order for GHCi - to work properly. The reason is that the byte-code compiler and - linker are quite closely tied to the runtime system, so it is - essential that GHCi is linked with the most up-to-date RTS. - Another reason is that the representation of certain datatypes - must be consistent between GHCi and its libraries, and if these - were inconsistent then disaster could follow. - </dd> - - </dl> - - <h2>Platform tests</h2> - - <p>There are three platforms of interest to GHC: - - <ul> - <li>The <b>Build</b> platform. This is the platform on which we - are building GHC.</li> - <li>The <b>Host</b> platform. This is the platform on which we - are going to run this GHC binary, and associated tools.</li> - <li>The <b>Target</b> platform. This is the platform for which - this GHC binary will generate code.</li> - </ul> - - <p>At the moment, there is very limited support for having - different values for buil, host, and target. In particular:</p> - - <ul> - <li>The build platform is currently always the same as the host - platform. The build process needs to use some of the tools in - the source tree, for example <tt>ghc-pkg</tt> and - <tt>hsc2hs</tt>.</li> - - <li>If the target platform differs from the host platform, then - this is generally for the purpose of building <tt>.hc</tt> files - from Haskell source for porting GHC to the target platform. - Full cross-compilation isn't supported (yet).</li> - </ul> - - <p>In the compiler's source code, you may make use of the - following CPP symbols:</p> - - <ul> - <li><em>xxx</em><tt>_TARGET_ARCH</tt></li> - <li><em>xxx</em><tt>_TARGET_VENDOR</tt></li> - <li><em>xxx</em><tt>_TARGET_OS</tt></li> - <li><em>xxx</em><tt>_HOST_ARCH</tt></li> - <li><em>xxx</em><tt>_HOST_VENDOR</tt></li> - <li><em>xxx</em><tt>_HOST_OS</tt></li> - </ul> - - <p>where <em>xxx</em> is the appropriate value: - eg. <tt>i386_TARGET_ARCH</tt>. - - <h2>Compiler versions</h2> - - <p>GHC must be compilable by every major version of GHC from 5.02 - onwards, and itself. It isn't necessary for it to be compilable - by every intermediate development version (that includes last - week's CVS sources). - - <p>To maintain compatibility, use <tt>HsVersions.h</tt> (see - below) where possible, and try to avoid using <tt>#ifdef</tt> in - the source itself. - - <h2>The source file</h2> - - <p>We now describe a typical source file, annotating stylistic - choices as we go. - -<pre> -{-# OPTIONS ... #-} -</pre> - - <p>An <tt>OPTIONS</tt> pragma is optional, but if present it - should go right at the top of the file. Things you might want to - put in <tt>OPTIONS</tt> include: - - <ul> - <li><tt>-#include</tt> options to bring into scope prototypes - for FFI declarations</li> - <li><tt>-fvia-C</tt> if you know that - this module won't compile with the native code generator. - </ul> - - <p>Don't bother putting <tt>-cpp</tt> or <tt>-fglasgow-exts</tt> - in the <tt>OPTIONS</tt> pragma; these are already added to the - command line by the build system. - - -<pre> -module Foo ( - T(..), - foo, -- :: T -> T - ) where -</pre> - - <p>We usually (99% of the time) include an export list. The only - exceptions are perhaps where the export list would list absolutely - everything in the module, and even then sometimes we do it anyway. - - <p>It's helpful to give type signatures inside comments in the - export list, but hard to keep them consistent, so we don't always - do that. - -<pre> -#include "HsVersions.h" -</pre> - - <p><tt>HsVersions.h</tt> is a CPP header file containing a number - of macros that help smooth out the differences between compiler - versions. It defines, for example, macros for library module - names which have moved between versions. Take a look. - -<pre> --- friends -import SimplMonad - --- GHC -import CoreSyn -import Id ( idName, idType ) -import BasicTypes - --- libraries -import DATA_IOREF ( newIORef, readIORef ) - --- std -import List ( partition ) -import Maybe ( fromJust ) -</pre> - - <p>List imports in the following order: - - <ul> - <li>Local to this subsystem (or directory) first</li> - - <li>Compiler imports, generally ordered from specific to generic - (ie. modules from <tt>utils/</tt> and <tt>basicTypes/</tt> - usually come last)</li> - - <li>Library imports</li> - - <li>Standard Haskell 98 imports last</li> - </ul> - - <p>Import library modules from the <tt>base</tt> and - <tt>haskell98</tt> packages only. Use <tt>#defines</tt> in - <tt>HsVersions.h</tt> when the modules names differ between - versions of GHC (eg. <tt>DATA_IOREF</tt> in the example above). - For code inside <tt>#ifdef GHCI</tt>, don't need to worry about GHC - versioning (because we are bootstrapped). - - <p>We usually use import specs to give an explicit list of the - entities imported from a module. The main reason for doing this is - so that you can search the file for an entity and find which module - it comes from. However, huge import lists can be a pain to - maintain, so we often omit the import specs when they start to get - long (actually I start omitting them when they don't fit on one - line --Simon M.). Tip: use GHC's <tt>-fwarn-unused-imports</tt> - flag so that you get notified when an import isn't being used any - more. - - <p>If the module can be compiled multiple ways (eg. GHCI - vs. non-GHCI), make sure the imports are properly <tt>#ifdefed</tt> - too, so as to avoid spurious unused import warnings. - - <p><em>ToDo: finish this</em> - </body> -</html> diff --git a/docs/comm/the-beast/data-types.html b/docs/comm/the-beast/data-types.html deleted file mode 100644 index 4ec220c937..0000000000 --- a/docs/comm/the-beast/data-types.html +++ /dev/null @@ -1,242 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Data types and data constructors</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Data types and data constructors</h1> - <p> - -This chapter was thoroughly changed Feb 2003. - -<h2>Data types</h2> - -Consider the following data type declaration: - -<pre> - data T a = MkT !(a,a) !(T a) | Nil - - f x = case x of - MkT p q -> MkT p (q+1) - Nil -> Nil -</pre> -The user's source program mentions only the constructors <tt>MkT</tt> -and <tt>Nil</tt>. However, these constructors actually <em>do</em> something -in addition to building a data value. For a start, <tt>MkT</tt> evaluates -its arguments. Secondly, with the flag <tt>-funbox-strict-fields</tt> GHC -will flatten (or unbox) the strict fields. So we may imagine that there's the -<em>source</em> constructor <tt>MkT</tt> and the <em>representation</em> constructor -<tt>MkT</tt>, and things start to get pretty confusing. -<p> -GHC now generates three unique <tt>Name</tt>s for each data constructor: -<pre> - ---- OccName ------ - String Name space Used for - --------------------------------------------------------------------------- - The "source data con" MkT DataName The DataCon itself - The "worker data con" MkT VarName Its worker Id - aka "representation data con" - The "wrapper data con" $WMkT VarName Its wrapper Id (optional) -</pre> -Recall that each occurrence name (OccName) is a pair of a string and a -name space (see <a href="names.html">The truth about names</a>), and -two OccNames are considered the same only if both components match. -That is what distinguishes the name of the name of the DataCon from -the name of its worker Id. To keep things unambiguous, in what -follows we'll write "MkT{d}" for the source data con, and "MkT{v}" for -the worker Id. (Indeed, when you dump stuff with "-ddumpXXX", if you -also add "-dppr-debug" you'll get stuff like "Foo {- d rMv -}". The -"d" part is the name space; the "rMv" is the unique key.) -<p> -Each of these three names gets a distinct unique key in GHC's name cache. - -<h2>The life cycle of a data type</h2> - -Suppose the Haskell source looks like this: -<pre> - data T a = MkT !(a,a) !Int | Nil - - f x = case x of - Nil -> Nil - MkT p q -> MkT p (q+1) -</pre> -When the parser reads it in, it decides which name space each lexeme comes -from, thus: -<pre> - data T a = MkT{d} !(a,a) !Int | Nil{d} - - f x = case x of - Nil{d} -> Nil{d} - MkT{d} p q -> MkT{d} p (q+1) -</pre> -Notice that in the Haskell source <em>all data contructors are named via the "source data con" MkT{d}</em>, -whether in pattern matching or in expressions. -<p> -In the translated source produced by the type checker (-ddump-tc), the program looks like this: -<pre> - f x = case x of - Nil{d} -> Nil{v} - MkT{d} p q -> $WMkT p (q+1) - -</pre> -Notice that the type checker replaces the occurrence of MkT by the <em>wrapper</em>, but -the occurrence of Nil by the <em>worker</em>. Reason: Nil doesn't have a wrapper because there is -nothing to do in the wrapper (this is the vastly common case). -<p> -Though they are not printed out by "-ddump-tc", behind the scenes, there are -also the following: the data type declaration and the wrapper function for MkT. -<pre> - data T a = MkT{d} a a Int# | Nil{d} - - $WMkT :: (a,a) -> T a -> T a - $WMkT p t = case p of - (a,b) -> seq t (MkT{v} a b t) -</pre> -Here, the <em>wrapper</em> <tt>$WMkT</tt> evaluates and takes apart the argument <tt>p</tt>, -evaluates the argument <tt>t</tt>, and builds a three-field data value -with the <em>worker</em> constructor <tt>MkT{v}</tt>. (There are more notes below -about the unboxing of strict fields.) The worker $WMkT is called an <em>implicit binding</em>, -because it's introduced implicitly by the data type declaration (record selectors -are also implicit bindings, for example). Implicit bindings are injected into the code -just before emitting code or External Core. -<p> -After desugaring into Core (-ddump-ds), the definition of <tt>f</tt> looks like this: -<pre> - f x = case x of - Nil{d} -> Nil{v} - MkT{d} a b r -> let { p = (a,b); q = I# r } in - $WMkT p (q+1) -</pre> -Notice the way that pattern matching has been desugared to take account of the fact -that the "real" data constructor MkT has three fields. -<p> -By the time the simplifier has had a go at it, <tt>f</tt> will be transformed to: -<pre> - f x = case x of - Nil{d} -> Nil{v} - MkT{d} a b r -> MkT{v} a b (r +# 1#) -</pre> -Which is highly cool. - - -<h2> The constructor wrapper functions </h2> - -The wrapper functions are automatically generated by GHC, and are -really emitted into the result code (albeit only after CorePre; see -<tt>CorePrep.mkImplicitBinds</tt>). -The wrapper functions are inlined very -vigorously, so you will not see many occurrences of the wrapper -functions in an optimised program, but you may see some. For example, -if your Haskell source has -<pre> - map MkT xs -</pre> -then <tt>$WMkT</tt> will not be inlined (because it is not applied to anything). -That is why we generate real top-level bindings for the wrapper functions, -and generate code for them. - - -<h2> The constructor worker functions </h2> - -Saturated applications of the constructor worker function MkT{v} are -treated specially by the code generator; they really do allocation. -However, we do want a single, shared, top-level definition for -top-level nullary constructors (like True and False). Furthermore, -what if the code generator encounters a non-saturated application of a -worker? E.g. <tt>(map Just xs)</tt>. We could declare that to be an -error (CorePrep should saturate them). But instead we currently -generate a top-level definition for each constructor worker, whether -nullary or not. It takes the form: -<pre> - MkT{v} = \ p q r -> MkT{v} p q r -</pre> -This is a real hack. The occurrence on the RHS is saturated, so the code generator (both the -one that generates abstract C and the byte-code generator) treats it as a special case and -allocates a MkT; it does not make a recursive call! So now there's a top-level curried -version of the worker which is available to anyone who wants it. -<p> -This strange definition is not emitted into External Core. Indeed, you might argue that -we should instead pass the list of <tt>TyCon</tt>s to the code generator and have it -generate magic bindings directly. As it stands, it's a real hack: see the code in -CorePrep.mkImplicitBinds. - - -<h2> External Core </h2> - -When emitting External Core, we should see this for our running example: - -<pre> - data T a = MkT a a Int# | Nil{d} - - $WMkT :: (a,a) -> T a -> T a - $WMkT p t = case p of - (a,b) -> seq t (MkT a b t) - - f x = case x of - Nil -> Nil - MkT a b r -> MkT a b (r +# 1#) -</pre> -Notice that it makes perfect sense as a program all by itself. Constructors -look like constructors (albeit not identical to the original Haskell ones). -<p> -When reading in External Core, the parser is careful to read it back in just -as it was before it was spat out, namely: -<pre> - data T a = MkT{d} a a Int# | Nil{d} - - $WMkT :: (a,a) -> T a -> T a - $WMkT p t = case p of - (a,b) -> seq t (MkT{v} a b t) - - f x = case x of - Nil{d} -> Nil{v} - MkT{d} a b r -> MkT{v} a b (r +# 1#) -</pre> - - -<h2> Unboxing strict fields </h2> - -If GHC unboxes strict fields (as in the first argument of <tt>MkT</tt> above), -it also transforms -source-language case expressions. Suppose you write this in your Haskell source: -<pre> - case e of - MkT p t -> ..p..t.. -</pre> -GHC will desugar this to the following Core code: -<pre> - case e of - MkT a b t -> let p = (a,b) in ..p..t.. -</pre> -The local let-binding reboxes the pair because it may be mentioned in -the case alternative. This may well be a bad idea, which is why -<tt>-funbox-strict-fields</tt> is an experimental feature. -<p> -It's essential that when importing a type <tt>T</tt> defined in some -external module <tt>M</tt>, GHC knows what representation was used for -that type, and that in turn depends on whether module <tt>M</tt> was -compiled with <tt>-funbox-strict-fields</tt>. So when writing an -interface file, GHC therefore records with each data type whether its -strict fields (if any) should be unboxed. - -<h2> Labels and info tables </h2> - -<em>Quick rough notes: SLPJ March 2003</em>. -<p> -Every data constructor <tt>C</tt>has two info tables: -<ul> -<li> The static info table (label <tt>C_static_info</tt>), used for statically-allocated constructors. - -<li> The dynamic info table (label <tt>C_con_info</tt>), used for dynamically-allocated constructors. -</ul> -Statically-allocated constructors are not moved by the garbage collector, and therefore have a different closure -type from dynamically-allocated constructors; hence they need -a distinct info table. -Both info tables share the same entry code, but since the entry code is phyiscally juxtaposed with the -info table, it must be duplicated (<tt>C_static_entry</tt> and <tt>C_con_entry</tt> respectively). - - </body> -</html> - diff --git a/docs/comm/the-beast/desugar.html b/docs/comm/the-beast/desugar.html deleted file mode 100644 index a66740259b..0000000000 --- a/docs/comm/the-beast/desugar.html +++ /dev/null @@ -1,156 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Sugar Free: From Haskell To Core</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Sugar Free: From Haskell To Core</h1> - <p> - Up until after type checking, GHC keeps the source program in an - abstract representation of Haskell source without removing any of the - syntactic sugar (such as, list comprehensions) that could easily be - represented by more primitive Haskell. This complicates part of the - front-end considerably as the abstract syntax of Haskell (as exported by - the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/hsSyn/HsSyn.lhs"><code>HsSyn</code></a>) - is much more complex than a simplified representation close to, say, the - <a href="http://haskell.org/onlinereport/intro.html#sect1.2">Haskell - Kernel</a> would be. However, having a representation that is as close - as possible to the surface syntax simplifies the generation of clear - error messages. As GHC (quite in contrast to "conventional" compilers) - prints code fragments as part of error messages, the choice of - representation is especially important. - <p> - Nonetheless, as soon as the input has passed all static checks, it is - transformed into GHC's principal intermediate language that goes by the - name of <em>Core</em> and whose representation is exported by the - module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/coreSyn/CoreSyn.lhs"><code>CoreSyn</code></a>. - All following compiler phases, except code generation operate on Core. - Due to Andrew Tolmach's effort, there is also an <a - href="http://www.haskell.org/ghc/docs/papers/core.ps.gz">external - representation for Core.</a> - <p> - The conversion of the compiled module from <code>HsSyn</code> into that - of <code>CoreSyn</code> is performed by a phase called the - <em>desugarer</em>, which is located in - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/"><code>fptools/ghc/compiler/deSugar/</code></a>. - It's operation is detailed in the following. - </p> - - <h2>Auxilliary Functions</h2> - <p> - The modules <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/DsMonad.lhs"><code>DsMonad</code></a> - defines the desugarer monad (of type <code>DsM</code>) which maintains - the environment needed for desugaring. In particular, it encapsulates a - unique supply for generating new variables, a map to lookup standard - names (such as functions from the prelude), a source location for error - messages, and a pool to collect warning messages generated during - desugaring. Initialisation of the environment happens in the function <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/Desugar.lhs"><code>Desugar</code></a><code>.desugar</code>, - which is also the main entry point into the desugarer. - <p> - The generation of Core code often involves the use of standard functions - for which proper identifiers (i.e., values of type <code>Id</code> that - actually refer to the definition in the right Prelude) need to be - obtained. This is supported by the function - <code>DsMonad.dsLookupGlobalValue :: Name -> DsM Id</code>. - - <h2><a name="patmat">Pattern Matching</a></h2> - <p> - Nested pattern matching with guards and everything is translated into - the simple, flat case expressions of Core by the following modules: - <dl> - <dt><a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/Match.lhs"><code>Match</code></a>: - <dd>This modules contains the main pattern-matching compiler in the form - of a function called <code>match</code>. There is some documentation - as to how <code>match</code> works contained in the module itself. - Generally, the implemented algorithm is similar to the one described - in Phil Wadler's Chapter ? of Simon Peyton Jones' <em>The - Implementation of Functional Programming Languages</em>. - <code>Match</code> exports a couple of functions with not really - intuitive names. In particular, it exports <code>match</code>, - <code>matchWrapper</code>, <code>matchExport</code>, and - <code>matchSimply</code>. The function <code>match</code>, which is - the main work horse, is only used by the other matching modules. The - function <code>matchExport</code> - despite it's name - is merely used - internally in <code>Match</code> and handles warning messages (see - below for more details). The actual interface to the outside is - <code>matchWrapper</code>, which converts the output of the type - checker into the form needed by the pattern matching compiler (i.e., a - list of <code>EquationInfo</code>). Similar in function to - <code>matchWrapper</code> is <code>matchSimply</code>, which provides - an interface for the case where a single expression is to be matched - against a single pattern (as, for example, is the case in bindings in - a <code>do</code> expression). - <dt><a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/MatchCon.lhs"><code>MatchCon</code></a>: - <dd>This module generates code for a set of alternative constructor - patterns that belong to a single type by means of the routine - <code>matchConFamily</code>. More precisely, the routine gets a set - of equations where the left-most pattern of each equation is a - constructor pattern with a head symbol from the same type as that of - all the other equations. A Core case expression is generated that - distinguihes between all these constructors. The routine is clever - enough to generate a sparse case expression and to add a catch-all - default case only when needed (i.e., if the case expression isn't - exhaustive already). There is also an explanation at the start of the - modules. - <dt><a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/MatchLit.lhs"><code>MatchLit</code></a>: - <dd>Generates code for a set of alternative literal patterns by means of - the routine <code>matchLiterals</code>. The principle is similar to - that of <code>matchConFamily</code>, but all left-most patterns are - literals of the same type. - <dt><a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/DsUtils.lhs"><code>DsUtils</code></a>: - <dd>This module provides a set of auxilliary definitions as well as the - data types <code>EquationInfo</code> and <code>MatchResult</code> that - form the input and output, respectively, of the pattern matching - compiler. - <dt><a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/deSugar/Check.lhs"><code>Check</code></a>: - <dd>This module does not really contribute the compiling pattern - matching, but it inspects sets of equations to find whether there are - any overlapping patterns or non-exhaustive pattern sets. This task is - implemented by the function <code>check</code>, which returns a list of - patterns that are part of a non-exhaustive case distinction as well as a - set of equation labels that can be reached during execution of the code; - thus, the remaining equations are shadowed due to overlapping patterns. - The function <code>check</code> is invoked and its result converted into - suitable warning messages by the function <code>Match.matchExport</code> - (which is a wrapper for <code>Match.match</code>). - </dl> - <p> - The central function <code>match</code>, given a set of equations, - proceeds in a number of steps: - <ol> - <li>It starts by desugaring the left-most pattern of each equation using - the function <code>tidy1</code> (indirectly via - <code>tidyEqnInfo</code>). During this process, non-elementary - pattern (e.g., those using explicit list syntax <code>[x, y, ..., - z]</code>) are converted to a standard constructor pattern and also - irrefutable pattern are removed. - <li>Then, a process called <em>unmixing</em> clusters the equations into - blocks (without re-ordering them), such that the left-most pattern of - all equations in a block are either all variables, all literals, or - all constructors. - <li>Each block is, then, compiled by <code>matchUnmixedEqns</code>, - which forwards the handling of literal pattern blocks to - <code>MatchLit.matchLiterals</code>, of constructor pattern blocks to - <code>MatchCon.matchConFamily</code>, and hands variable pattern - blocks back to <code>match</code>. - </ol> - - <p><hr><small> -<!-- hhmts start --> -Last modified: Mon Feb 11 22:35:25 EST 2002 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/driver.html b/docs/comm/the-beast/driver.html deleted file mode 100644 index fbf65e33e7..0000000000 --- a/docs/comm/the-beast/driver.html +++ /dev/null @@ -1,179 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Glorious Driver</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - The Glorious Driver</h1> - <p> - The Glorious Driver (GD) is the part of GHC that orchestrates the - interaction of all the other pieces that make up GHC. It supersedes the - <em>Evil Driver (ED),</em> which was a Perl script that served the same - purpose and was in use until version 4.08.1 of GHC. Simon Marlow - eventually slayed the ED and instated the GD. The GD is usually called - the <em>Compilation Manager</em> these days. - </p> - <p> - The GD has been substantially extended for GHCi, i.e., the interactive - variant of GHC that integrates the compiler with a (meta-circular) - interpreter since version 5.00. Most of the driver is located in the - directory - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/"><code>fptools/ghc/compiler/main/</code></a>. - </p> - - <h2>Command Line Options</h2> - <p> - GHC's many flavours of command line options make the code interpreting - them rather involved. The following provides a brief overview of the - processing of these options. Since the addition of the interactive - front-end to GHC, there are two kinds of options: <em>static - options</em> and <em>dynamic options.</em> The former can only be set - when the system is invoked, whereas the latter can be altered in the - course of an interactive session. A brief explanation on the difference - between these options and related matters is at the start of the module - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/CmdLineOpts.lhs"><code>CmdLineOpts</code></a>. - The same module defines the enumeration <code>DynFlag</code>, which - contains all dynamic flags. Moreover, there is the labelled record - <code>DynFlags</code> that collects all the flag-related information - that is passed by the compilation manager to the compiler proper, - <code>hsc</code>, whenever a compilation is triggered. If you like to - find out whether an option is static, use the predicate - <code>isStaticHscFlag</code> in the same module. - <p> - The second module that contains a lot of code related to the management - of flags is <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/DriverFlags.hs"><code>DriverFlags.hs</code></a>. - In particular, the module contains two association lists that map the - textual representation of the various flags to a data structure that - tells the driver how to parse the flag (e.g., whether it has any - arguments) and provides its internal representation. All static flags - are contained in <code>static_flags</code>. A whole range of - <code>-f</code> flags can be negated by adding a <code>-f-no-</code> - prefix. These flags are contained in the association list - <code>fFlags</code>. - <p> - The driver uses a nasty hack based on <code>IORef</code>s that permits - the rest of the compiler to access static flags as CAFs; i.e., there is - a family of toplevel variable definitions in - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/CmdLineOpts.lhs"><code>CmdLineOpts</code></a>, - below the literate section heading <i>Static options</i>, each of which - contains the value of one static option. This is essentially realised - via global variables (in the sense of C-style, updatable, global - variables) defined via an evil pre-processor macro named - <code>GLOBAL_VAR</code>, which is defined in a particularly ugly corner - of GHC, namely the C header file - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/HsVersions.h"><code>HsVersions.h</code></a>. - - <h2>What Happens When</h2> - <p> - Inside the Haskell compiler proper (<code>hsc</code>), a whole series of - stages (``passes'') are executed in order to transform your Haskell program - into C or native code. This process is orchestrated by - <code>main/HscMain.hscMain</code> and its relative - <code>hscReComp</code>. The latter directly invokes, in order, - the parser, the renamer, the typechecker, the desugarer, the - simplifier (Core2Core), the CoreTidy pass, the CorePrep pass, - conversion to STG (CoreToStg), the interface generator - (MkFinalIface), the code generator, and code output. The - simplifier is the most complex of these, and is made up of many - sub-passes. These are controlled by <code>buildCoreToDo</code>, - as described below. - - <h2>Scheduling Optimisations Phases</h2> - <p> - GHC has a large variety of optimisations at its disposal, many of which - have subtle interdependencies. The overall plan for program - optimisation is fixed in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/DriverState.hs"><code>DriverState.hs</code></a>. - First of all, there is the variable <code>hsc_minusNoO_flags</code> that - determines the <code>-f</code> options that you get without - <code>-O</code> (aka optimisation level 0) as well as - <code>hsc_minusO_flags</code> and <code>hsc_minusO2_flags</code> for - <code>-O</code> and <code>-O2</code>. - <p> - However, most of the strategic decisions about optimisations on the - intermediate language Core are encoded in the value produced by - <code>buildCoreToDo</code>, which is a list with elements of type - <code>CoreToDo</code>. Each element of this list specifies one step in - the sequence of core optimisations executed by the <a - href="simplifier.html">Mighty Simplifier</a>. The type - <code>CoreToDo</code> is defined in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/CmdLineOpts.lhs"><code>CmdLineOpts.lhs</code></a>. - The actual execution of the optimisation plan produced by - <code>buildCoreToDo</code> is performed by <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/simplCore/SimplCore.lhs"><code>SimpleCore</code></a><code>.doCorePasses</code>. - Core optimisation plans consist of a number of simplification phases - (currently, three for optimisation levels of 1 or higher) with - decreasing phase numbers (the lowest, corresponding to the last phase, - namely 0). Before and after these phases, optimisations such as - specialisation, let floating, worker/wrapper, and so on are executed. - The sequence of phases is such that the synergistic effect of the phases - is maximised -- however, this is a fairly fragile arrangement. - <p> - There is a similar construction for optimisations on STG level stored in - the variable <code>buildStgToDo :: [StgToDo]</code>. However, this is a - lot less complex than the arrangement for Core optimisations. - - <h2>Linking the <code>RTS</code> and <code>libHSstd</code></h2> - <p> - Since the RTS and HSstd refer to each other, there is a Cunning - Hack to avoid putting them each on the command-line twice or - thrice (aside: try asking for `plaice and chips thrice' in a - fish and chip shop; bet you only get two lots). The hack involves - adding - the symbols that the RTS needs from libHSstd, such as - <code>PrelWeak_runFinalizzerBatch_closure</code> and - <code>__stginit_Prelude</code>, to the link line with the - <code>-u</code> flag. The standard library appears before the - RTS on the link line, and these options cause the corresponding - symbols to be picked up even so the linked might not have seen them - being used as the RTS appears later on the link line. As a result, - when the RTS is also scanned, these symbols are already resolved. This - avoids the linker having to read the standard library and RTS - multiple times. - </p> - <p> - This does, however, leads to a complication. Normal Haskell - programs do not have a <code>main()</code> function, so this is - supplied by the RTS (in the file - <a href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/rts/Main.c"><code>Main.c</code></a>). - It calls <code>startupHaskell</code>, which - itself calls <code>__stginit_PrelMain</code>, which is therefore, - since it occurs in the standard library, one of the symbols - passed to the linker using the <code>-u</code> option. This is fine - for standalone Haskell programs, but as soon as the Haskell code is only - used as part of a program implemented in a foreign language, the - <code>main()</code> function of that foreign language should be used - instead of that of the Haskell runtime. In this case, the previously - described arrangement unfortunately fails as - <code>__stginit_PrelMain</code> had better not be linked in, - because it tries to call <code>__stginit_Main</code>, which won't - exist. In other words, the RTS's <code>main()</code> refers to - <code>__stginit_PrelMain</code> which in turn refers to - <code>__stginit_Main</code>. Although the RTS's <code>main()</code> - might not be linked in if the program provides its own, the driver - will normally force <code>__stginit_PrelMain</code> to be linked in anyway, - using <code>-u</code>, because it's a back-reference from the - RTS to HSstd. This case is coped with by the <code>-no-hs-main</code> - flag, which suppresses passing the corresonding <code>-u</code> option - to the linker -- although in some versions of the compiler (e.g., 5.00.2) - it didn't work. In addition, the driver generally places the C program - providing the <code>main()</code> that we want to use before the RTS - on the link line. Therefore, the RTS's main is never used and - without the <code>-u</code> the label <code>__stginit_PrelMain</code> - will not be linked. - </p> - - <p><small> -<!-- hhmts start --> -Last modified: Tue Feb 19 11:09:00 UTC 2002 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/fexport.html b/docs/comm/the-beast/fexport.html deleted file mode 100644 index 956043bafb..0000000000 --- a/docs/comm/the-beast/fexport.html +++ /dev/null @@ -1,231 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - foreign export</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - foreign export</h1> - - The implementation scheme for foreign export, as of 27 Feb 02, is - as follows. There are four cases, of which the first two are easy. - <p> - <b>(1) static export of an IO-typed function from some module <code>MMM</code></b> - <p> - <code>foreign export foo :: Int -> Int -> IO Int</code> - <p> - For this we generate no Haskell code. However, a C stub is - generated, and it looks like this: - <p> - <pre> -extern StgClosure* MMM_foo_closure; - -HsInt foo (HsInt a1, HsInt a2) -{ - SchedulerStatus rc; - HaskellObj ret; - rc = rts_evalIO( - rts_apply(rts_apply(MMM_foo_closure,rts_mkInt(a1)), - rts_mkInt(a2) - ), - &ret - ); - rts_checkSchedStatus("foo",rc); - return(rts_getInt(ret)); -} -</pre> - <p> - This does the obvious thing: builds in the heap the expression - <code>(foo a1 a2)</code>, calls <code>rts_evalIO</code> to run it, - and uses <code>rts_getInt</code> to fish out the result. - - <p> - <b>(2) static export of a non-IO-typed function from some module <code>MMM</code></b> - <p> - <code>foreign export foo :: Int -> Int -> Int</code> - <p> - This is identical to case (1), with the sole difference that the - stub calls <code>rts_eval</code> rather than - <code>rts_evalIO</code>. - <p> - - <b>(3) dynamic export of an IO-typed function from some module <code>MMM</code></b> - <p> - <code>foreign export mkCallback :: (Int -> Int -> IO Int) -> IO (FunPtr a)</code> - <p> - Dynamic exports are a whole lot more complicated than their static - counterparts. - <p> - First of all, we get some Haskell code, which, when given a - function <code>callMe :: (Int -> Int -> IO Int)</code> to be made - C-callable, IO-returns a <code>FunPtr a</code>, which is the - address of the resulting C-callable code. This address can now be - handed out to the C-world, and callers to it will get routed - through to <code>callMe</code>. - <p> - The generated Haskell function looks like this: - <p> -<pre> -mkCallback f - = do sp <- mkStablePtr f - r <- ccall "createAdjustorThunk" sp (&"run_mkCallback") - return r -</pre> - <p> - <code>createAdjustorThunk</code> is a gruesome, - architecture-specific function in the RTS. It takes a stable - pointer to the Haskell function to be run, and the address of the - associated C wrapper, and returns a piece of machine code, - which, when called from the outside (C) world, eventually calls - through to <code>f</code>. - <p> - This machine code fragment is called the "Adjustor Thunk" (don't - ask me why). What it does is simply to call onwards to the C - helper - function <code>run_mkCallback</code>, passing all the args given - to it but also conveying <code>sp</code>, which is a stable - pointer - to the Haskell function to run. So: - <p> -<pre> -createAdjustorThunk ( StablePtr sp, CCodeAddress addr_of_helper_C_fn ) -{ - create malloc'd piece of machine code "mc", behaving thusly: - - mc ( args_to_mc ) - { - jump to addr_of_helper_C_fn, passing sp as an additional - argument - } -</pre> - <p> - This is a horrible hack, because there is no portable way, even at - the machine code level, to function which adds one argument and - then transfers onwards to another C function. On x86s args are - pushed R to L onto the stack, so we can just push <code>sp</code>, - fiddle around with return addresses, and jump onwards to the - helper C function. However, on architectures which use register - windows and/or pass args extensively in registers (Sparc, Alpha, - MIPS, IA64), this scheme borders on the unviable. GHC has a - limited <code>createAdjustorThunk</code> implementation for Sparc - and Alpha, which handles only the cases where all args, including - the extra one, fit in registers. - <p> - Anyway: the other lump of code generated as a result of a - f-x-dynamic declaration is the C helper stub. This is basically - the same as in the static case, except that it only ever gets - called from the adjustor thunk, and therefore must accept - as an extra argument, a stable pointer to the Haskell function - to run, naturally enough, as this is not known until run-time. - It then dereferences the stable pointer and does the call in - the same way as the f-x-static case: -<pre> -HsInt Main_d1kv ( StgStablePtr the_stableptr, - void* original_return_addr, - HsInt a1, HsInt a2 ) -{ - SchedulerStatus rc; - HaskellObj ret; - rc = rts_evalIO( - rts_apply(rts_apply((StgClosure*)deRefStablePtr(the_stableptr), - rts_mkInt(a1) - ), - rts_mkInt(a2) - ), - &ret - ); - rts_checkSchedStatus("Main_d1kv",rc); - return(rts_getInt(ret)); -} -</pre> - <p> - Note how this function has a purely made-up name - <code>Main_d1kv</code>, since unlike the f-x-static case, this - function is never called from user code, only from the adjustor - thunk. - <p> - Note also how the function takes a bogus parameter - <code>original_return_addr</code>, which is part of this extra-arg - hack. The usual scheme is to leave the original caller's return - address in place and merely push the stable pointer above that, - hence the spare parameter. - <p> - Finally, there is some extra trickery, detailed in - <code>ghc/rts/Adjustor.c</code>, to get round the following - problem: the adjustor thunk lives in mallocville. It is - quite possible that the Haskell code will actually - call <code>free()</code> on the adjustor thunk used to get to it - -- because otherwise there is no way to reclaim the space used - by the adjustor thunk. That's all very well, but it means that - the C helper cannot return to the adjustor thunk in the obvious - way, since we've already given it back using <code>free()</code>. - So we leave, on the C stack, the address of whoever called the - adjustor thunk, and before calling the helper, mess with the stack - such that when the helper returns, it returns directly to the - adjustor thunk's caller. - <p> - That's how the <code>stdcall</code> convention works. If the - adjustor thunk has been called using the <code>ccall</code> - convention, we return indirectly, via a statically-allocated - yet-another-magic-piece-of-code, which takes care of removing the - extra argument that the adjustor thunk pushed onto the stack. - This is needed because in <code>ccall</code>-world, it is the - caller who removes args after the call, and the original caller of - the adjustor thunk has no way to know about the extra arg pushed - by the adjustor thunk. - <p> - You didn't really want to know all this stuff, did you? - <p> - - - - <b>(4) dynamic export of an non-IO-typed function from some module <code>MMM</code></b> - <p> - <code>foreign export mkCallback :: (Int -> Int -> Int) -> IO (FunPtr a)</code> - <p> - (4) relates to (3) as (2) relates to (1), that is, it's identical, - except the C stub uses <code>rts_eval</code> instead of - <code>rts_evalIO</code>. - <p> - - - <h2>Some perspective on f-x-dynamic</h2> - - The only really horrible problem with f-x-dynamic is how the - adjustor thunk should pass to the C helper the stable pointer to - use. Ideally we would like this to be conveyed via some invisible - side channel, since then the adjustor thunk could simply jump - directly to the C helper, with no non-portable stack fiddling. - <p> - Unfortunately there is no obvious candidate for the invisible - side-channel. We've chosen to pass it on the stack, with the - bad consequences detailed above. Another possibility would be to - park it in a global variable, but this is non-reentrant and - non-(OS-)thread-safe. A third idea is to put it into a callee-saves - register, but that has problems too: the C helper may not use that - register and therefore we will have trashed any value placed there - by the caller; and there is no C-level portable way to read from - the register inside the C helper. - <p> - In short, we can't think of a really satisfactory solution. I'd - vote for introducing some kind of OS-thread-local-state and passing - it in there, but that introduces complications of its own. - <p> - <b>OS-thread-safety</b> is of concern in the C stubs, whilst - building up the expressions to run. These need to have exclusive - access to the heap whilst allocating in it. Also, there needs to - be some guarantee that no GC will happen in between the - <code>deRefStablePtr</code> call and when <code>rts_eval[IO]</code> - starts running. At the moment there are no guarantees for - either property. This needs to be sorted out before the - implementation can be regarded as fully safe to use. - -<p><small> - -<!-- hhmts start --> -Last modified: Weds 27 Feb 02 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/ghci.html b/docs/comm/the-beast/ghci.html deleted file mode 100644 index b893acdeb4..0000000000 --- a/docs/comm/the-beast/ghci.html +++ /dev/null @@ -1,407 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - GHCi</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - GHCi</h1> - - This isn't a coherent description of how GHCi works, sorry. What - it is (currently) is a dumping ground for various bits of info - pertaining to GHCi, which ought to be recorded somewhere. - - <h2>Debugging the interpreter</h2> - - The usual symptom is that some expression / program crashes when - running on the interpreter (commonly), or gets wierd results - (rarely). Unfortunately, finding out what the problem really is - has proven to be extremely difficult. In retrospect it may be - argued a design flaw that GHC's implementation of the STG - execution mechanism provides only the weakest of support for - automated internal consistency checks. This makes it hard to - debug. - <p> - Execution failures in the interactive system can be due to - problems with the bytecode interpreter, problems with the bytecode - generator, or problems elsewhere. From the bugs seen so far, - the bytecode generator is often the culprit, with the interpreter - usually being correct. - <p> - Here are some tips for tracking down interactive nonsense: - <ul> - <li>Find the smallest source fragment which causes the problem. - <p> - <li>Using an RTS compiled with <code>-DDEBUG</code> (nb, that - means the RTS from the previous stage!), run with <code>+RTS - -D2</code> to get a listing in great detail from the - interpreter. Note that the listing is so voluminous that - this is impractical unless you have been diligent in - the previous step. - <p> - <li>At least in principle, using the trace and a bit of GDB - poking around at the time of death, you can figure out what - the problem is. In practice you quickly get depressed at - the hopelessness of ever making sense of the mass of - details. Well, I do, anyway. - <p> - <li><code>+RTS -D2</code> tries hard to print useful - descriptions of what's on the stack, and often succeeds. - However, it has no way to map addresses to names in - code/data loaded by our runtime linker. So the C function - <code>ghci_enquire</code> is provided. Given an address, it - searches the loaded symbol tables for symbols close to that - address. You can run it from inside GDB: - <pre> - (gdb) p ghci_enquire ( 0x50a406f0 ) - 0x50a406f0 + -48 == `PrelBase_Czh_con_info' - 0x50a406f0 + -12 == `PrelBase_Izh_static_info' - 0x50a406f0 + -48 == `PrelBase_Czh_con_entry' - 0x50a406f0 + -24 == `PrelBase_Izh_con_info' - 0x50a406f0 + 16 == `PrelBase_ZC_con_entry' - 0x50a406f0 + 0 == `PrelBase_ZMZN_static_entry' - 0x50a406f0 + -36 == `PrelBase_Czh_static_entry' - 0x50a406f0 + -24 == `PrelBase_Izh_con_entry' - 0x50a406f0 + 64 == `PrelBase_EQ_static_info' - 0x50a406f0 + 0 == `PrelBase_ZMZN_static_info' - 0x50a406f0 + 48 == `PrelBase_LT_static_entry' - $1 = void - </pre> - In this case the enquired-about address is - <code>PrelBase_ZMZN_static_entry</code>. If no symbols are - close to the given addr, nothing is printed. Not a great - mechanism, but better than nothing. - <p> - <li>We have had various problems in the past due to the bytecode - generator (<code>compiler/ghci/ByteCodeGen.lhs</code>) being - confused about the true set of free variables of an - expression. The compilation scheme for <code>let</code>s - applies the BCO for the RHS of the let to its free - variables, so if the free-var annotation is wrong or - misleading, you end up with code which has wrong stack - offsets, which is usually fatal. - <p> - <li>The baseline behaviour of the interpreter is to interpret - BCOs, and hand all other closures back to the scheduler for - evaluation. However, this causes a huge number of expensive - context switches, so the interpreter knows how to enter the - most common non-BCO closure types by itself. - <p> - These optimisations complicate the interpreter. - If you think you have an interpreter problem, re-enable the - define <code>REFERENCE_INTERPRETER</code> in - <code>ghc/rts/Interpreter.c</code>. All optimisations are - thereby disabled, giving the baseline - I-only-know-how-to-enter-BCOs behaviour. - <p> - <li>Following the traces is often problematic because execution - hops back and forth between the interpreter, which is - traced, and compiled code, which you can't see. - Particularly annoying is when the stack looks OK in the - interpreter, then compiled code runs for a while, and later - we arrive back in the interpreter, with the stack corrupted, - and usually in a completely different place from where we - left off. - <p> - If this is biting you baaaad, it may be worth copying - sources for the compiled functions causing the problem, into - your interpreted module, in the hope that you stay in the - interpreter more of the time. Of course this doesn't work - very well if you've defined - <code>REFERENCE_INTERPRETER</code> in - <code>ghc/rts/Interpreter.c</code>. - <p> - <li>There are various commented-out pieces of code in - <code>Interpreter.c</code> which can be used to get the - stack sanity-checked after every entry, and even after after - every bytecode instruction executed. Note that some - bytecodes (<code>PUSH_UBX</code>) leave the stack in - an unwalkable state, so the <code>do_print_stack</code> - local variable is used to suppress the stack walk after - them. - </ul> - - - <h2>Useful stuff to know about the interpreter</h2> - - The code generation scheme is straightforward (naive, in fact). - <code>-ddump-bcos</code> prints each BCO along with the Core it - was generated from, which is very handy. - <ul> - <li>Simple lets are compiled in-line. For the general case, let - v = E in ..., E is compiled into a new BCO which takes as - args its free variables, and v is bound to AP(the new BCO, - free vars of E). - <p> - <li><code>case</code>s as usual, become: push the return - continuation, enter the scrutinee. There is some magic to - make all combinations of compiled/interpreted calls and - returns work, described below. In the interpreted case, all - case alts are compiled into a single big return BCO, which - commences with instructions implementing a switch tree. - </ul> - <p> - <b>ARGCHECK magic</b> - <p> - You may find ARGCHECK instructions at the start of BCOs which - don't appear to need them; case continuations in particular. - These play an important role: they force objects which should - evaluated to BCOs to actually be BCOs. - <p> - Typically, there may be an application node somewhere in the heap. - This is a thunk which when leant on turns into a BCO for a return - continuation. The thunk may get entered with an update frame on - top of the stack. This is legitimate since from one viewpoint - this is an AP which simply reduces to a data object, so does not - have functional type. However, once the AP turns itself into a - BCO (so to speak) we cannot simply enter the BCO, because that - expects to see args on top of the stack, not an update frame. - Therefore any BCO which expects something on the stack above an - update frame, even non-function BCOs, start with an ARGCHECK. In - this case it fails, the update is done, the update frame is - removed, and the BCO re-entered. Subsequent entries of the BCO of - course go unhindered. - <p> - The optimised (<code>#undef REFERENCE_INTERPRETER</code>) handles - this case specially, so that a trip through the scheduler is - avoided. When reading traces from <code>+RTS -D2 -RTS</code>, you - may see BCOs which appear to execute their initial ARGCHECK insn - twice. The first time it fails; the interpreter does the update - immediately and re-enters with no further comment. - <p> - This is all a bit ugly, and, as SimonM correctly points out, it - would have been cleaner to make BCOs unpointed (unthunkable) - objects, so that a pointer to something <code>:: BCO#</code> - really points directly at a BCO. - <p> - <b>Stack management</b> - <p> - There isn't any attempt to stub the stack, minimise its growth, or - generally remove unused pointers ahead of time. This is really - due to lazyness on my part, although it does have the minor - advantage that doing something cleverer would almost certainly - increase the number of bytecodes that would have to be executed. - Of course we SLIDE out redundant stuff, to get the stack back to - the sequel depth, before returning a HNF, but that's all. As - usual this is probably a cause of major space leaks. - <p> - <b>Building constructors</b> - <p> - Constructors are built on the stack and then dumped into the heap - with a single PACK instruction, which simply copies the top N - words of the stack verbatim into the heap, adds an info table, and zaps N - words from the stack. The constructor args are pushed onto the - stack one at a time. One upshot of this is that unboxed values - get pushed untaggedly onto the stack (via PUSH_UBX), because that's how they - will be in the heap. That in turn means that the stack is not - always walkable at arbitrary points in BCO execution, although - naturally it is whenever GC might occur. - <p> - Function closures created by the interpreter use the AP-node - (tagged) format, so although their fields are similarly - constructed on the stack, there is never a stack walkability - problem. - <p> - <b>Unpacking constructors</b> - <p> - At the start of a case continuation, the returned constructor is - unpacked onto the stack, which means that unboxed fields have to - be tagged. Rather than burdening all such continuations with a - complex, general mechanism, I split it into two. The - allegedly-common all-pointers case uses a single UNPACK insn - to fish out all fields with no further ado. The slow case uses a - sequence of more complex UPK_TAG insns, one for each field (I - think). This seemed like a good compromise to me. - <p> - <b>Perspective</b> - <p> - I designed the bytecode mechanism with the experience of both STG - hugs and Classic Hugs in mind. The latter has an small - set of bytecodes, a small interpreter loop, and runs amazingly - fast considering the cruddy code it has to interpret. The former - had a large interpretative loop with many different opcodes, - including multiple minor variants of the same thing, which - made it difficult to optimise and maintain, yet it performed more - or less comparably with Classic Hugs. - <p> - My design aims were therefore to minimise the interpreter's - complexity whilst maximising performance. This means reducing the - number of opcodes implemented, whilst reducing the number of insns - despatched. In particular there are only two opcodes, PUSH_UBX - and UPK_TAG, which deal with tags. STG Hugs had dozens of opcodes - for dealing with tagged data. In cases where the common - all-pointers case is significantly simpler (UNPACK) I deal with it - specially. Finally, the number of insns executed is reduced a - little by merging multiple pushes, giving PUSH_LL and PUSH_LLL. - These opcode pairings were determined by using the opcode-pair - frequency profiling stuff which is ifdef-d out in - <code>Interpreter.c</code>. These significantly improve - performance without having much effect on the uglyness or - complexity of the interpreter. - <p> - Overall, the interpreter design is something which turned out - well, and I was pleased with it. Unfortunately I cannot say the - same of the bytecode generator. - - <h2><code>case</code> returns between interpreted and compiled code</h2> - - Variants of the following scheme have been drifting around in GHC - RTS documentation for several years. Since what follows is - actually what is implemented, I guess it supersedes all other - documentation. Beware; the following may make your brain melt. - In all the pictures below, the stack grows downwards. - <p> - <b>Returning to interpreted code</b>. - <p> - Interpreted returns employ a set of polymorphic return infotables. - Each element in the set corresponds to one of the possible return - registers (R1, D1, F1) that compiled code will place the returned - value in. In fact this is a bit misleading, since R1 can be used - to return either a pointer or an int, and we need to distinguish - these cases. So, supposing the set of return registers is {R1p, - R1n, D1, F1}, there would be four corresponding infotables, - <code>stg_ctoi_ret_R1p_info</code>, etc. In the pictures below we - call them <code>stg_ctoi_ret_REP_info</code>. - <p> - These return itbls are polymorphic, meaning that all 8 vectored - return codes and the direct return code are identical. - <p> - Before the scrutinee is entered, the stack is arranged like this: - <pre> - | | - +--------+ - | BCO | -------> the return contination BCO - +--------+ - | itbl * | -------> stg_ctoi_ret_REP_info, with all 9 codes as follows: - +--------+ - BCO* bco = Sp[1]; - push R1/F1/D1 depending on REP - push bco - yield to sched - </pre> - On entry, the interpreted contination BCO expects the stack to look - like this: - <pre> - | | - +--------+ - | BCO | -------> the return contination BCO - +--------+ - | itbl * | -------> ret_REP_ctoi_info, with all 9 codes as follows: - +--------+ - : VALUE : (the returned value, shown with : since it may occupy - +--------+ multiple stack words) - </pre> - A machine code return will park the returned value in R1/F1/D1, - and enter the itbl on the top of the stack. Since it's our magic - itbl, this pushes the returned value onto the stack, which is - where the interpreter expects to find it. It then pushes the BCO - (again) and yields. The scheduler removes the BCO from the top, - and enters it, so that the continuation is interpreted with the - stack as shown above. - <p> - An interpreted return will create the value to return at the top - of the stack. It then examines the return itbl, which must be - immediately underneath the return value, to see if it is one of - the magic <code>stg_ctoi_ret_REP_info</code> set. Since this is so, - it knows it is returning to an interpreted contination. It - therefore simply enters the BCO which it assumes it immediately - underneath the itbl on the stack. - - <p> - <b>Returning to compiled code</b>. - <p> - Before the scrutinee is entered, the stack is arranged like this: - <pre> - ptr to vec code 8 ------> return vector code 8 - | | .... - +--------+ ptr to vec code 1 ------> return vector code 1 - | itbl * | -- Itbl end - +--------+ \ .... - \ Itbl start - ----> direct return code - </pre> - The scrutinee value is then entered. - The case continuation(s) expect the stack to look the same, with - the returned HNF in a suitable return register, R1, D1, F1 etc. - <p> - A machine code return knows whether it is doing a vectored or - direct return, and, if the former, which vector element it is. - So, for a direct return we jump to <code>Sp[0]</code>, and for a - vectored return, jump to <code>((CodePtr*)(Sp[0]))[ - ITBL_LENGTH - - vector number ]</code>. This is (of course) the scheme that - compiled code has been using all along. - <p> - An interpreted return will, as described just above, have examined - the itbl immediately beneath the return value it has just pushed, - and found it not to be one of the <code>ret_REP_ctoi_info</code> set, - so it knows this must be a return to machine code. It needs to - pop the return value, currently on the stack, into R1/F1/D1, and - jump through the info table. Unfortunately the first part cannot - be accomplished directly since we are not in Haskellised-C world. - <p> - We therefore employ a second family of magic infotables, indexed, - like the first, on the return representation, and therefore with - names of the form <code>stg_itoc_ret_REP_info</code>. (Note: - <code>itoc</code>; the previous bunch were <code>ctoi</code>). - This is pushed onto the stack (note, tagged values have their tag - zapped), giving: - <pre> - | | - +--------+ - | itbl * | -------> arbitrary machine code return itbl - +--------+ - : VALUE : (the returned value, possibly multiple words) - +--------+ - | itbl * | -------> stg_itoc_ret_REP_info, with code: - +--------+ - pop myself (stg_itoc_ret_REP_info) off the stack - pop return value into R1/D1/F1 - do standard machine code return to itbl at t.o.s. - </pre> - We then return to the scheduler, asking it to enter the itbl at - t.o.s. When entered, <code>stg_itoc_ret_REP_info</code> removes - itself from the stack, pops the return value into the relevant - return register, and returns to the itbl to which we were trying - to return in the first place. - <p> - Amazingly enough, this stuff all actually works! Well, mostly ... - <p> - <b>Unboxed tuples: a Right Royal Spanner In The Works</b> - <p> - The above scheme depends crucially on having magic infotables - <code>stg_{itoc,ctoi}_ret_REP_info</code> for each return - representation <code>REP</code>. It unfortunately fails miserably - in the face of unboxed tuple returns, because the set of required - tables would be infinite; this despite the fact that for any given - unboxed tuple return type, the scheme could be made to work fine. - <p> - This is a serious problem, because it prevents interpreted - code from doing <code>IO</code>-typed returns, since <code>IO - t</code> is implemented as <code>(# t, RealWorld# #)</code> or - thereabouts. This restriction in turn rules out FFI stuff in the - interpreter. Not good. - <p> - Although we have no way to make general unboxed tuples work, we - can at least make <code>IO</code>-types work using the following - ultra-kludgey observation: <code>RealWorld#</code> doesn't really - exist and so has zero size, in compiled code. In turn this means - that a type of the form <code>(# t, RealWorld# #)</code> has the - same representation as plain <code>t</code> does. So the bytecode - generator, whilst rejecting code with general unboxed tuple - returns, recognises and accepts this special case. Which means - that <code>IO</code>-typed stuff works in the interpreter. Just. - <p> - If anyone asks, I will claim I was out of radio contact, on a - 6-month walking holiday to the south pole, at the time this was - ... er ... dreamt up. - - -<p><small> - -<!-- hhmts start --> -Last modified: Thursday February 7 15:33:49 GMT 2002 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/main.html b/docs/comm/the-beast/main.html deleted file mode 100644 index 332ffaa501..0000000000 --- a/docs/comm/the-beast/main.html +++ /dev/null @@ -1,35 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Compiling and running the Main module</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>Compiling and running the Main module</h1> - -GHC allows you to determine which module contains the "main" function, and -what that function is called, via the <code>-fmain-is</code> flag. The trouble is -that the runtime system is fixed, so what symbol should it link to? -<p> -The current solution is this. Suppose the main function is <code>Foo.run</code>. -<ul> -<li> -Then, when compiling module <code>Foo</code>, GHC adds an extra definition: -<pre> - :Main.main = runIO Foo.run -</pre> -Now the RTS can invoke <code>:Main.main</code> to start the program. (This extra -definition is inserted in TcRnDriver.checkMain.) -<p><li> -Before starting the program, though, the RTS also initialises the module tree -by calling <code>init_:Main</code>, so when compiling the main module (Foo in this case), -as well as generating <code>init_Foo</code> as usual, GHC also generates -<pre> - init_zcMain() { init_Foo; } -</pre> -This extra initialisation code is generated in CodeGen.mkModuleInit. -</ul> - - </body> -</html> diff --git a/docs/comm/the-beast/mangler.html b/docs/comm/the-beast/mangler.html deleted file mode 100644 index 1ad80f0d5c..0000000000 --- a/docs/comm/the-beast/mangler.html +++ /dev/null @@ -1,79 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Evil Mangler</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - The Evil Mangler</h1> - <p> - The Evil Mangler (EM) is a Perl script invoked by the <a - href="driver.html">Glorious Driver</a> after the C compiler (gcc) has - translated the GHC-produced C code into assembly. Consequently, it is - only of interest if <code>-fvia-C</code> is in effect (either explicitly - or implicitly). - - <h4>Its purpose</h4> - <p> - The EM reads the assembly produced by gcc and re-arranges code blocks as - well as nukes instructions that it considers <em>non-essential.</em> It - derives it evilness from its utterly ad hoc, machine, compiler, and - whatnot dependent design and implementation. More precisely, the EM - performs the following tasks: - <ul> - <li>The code executed when a closure is entered is moved adjacent to - that closure's infotable. Moreover, the order of the info table - entries is reversed. Also, SRT pointers are removed from closures that - don't need them (non-FUN, RET and THUNK ones). - <li>Function prologue and epilogue code is removed. (GHC generated code - manages its own stack and uses the system stack only for return - addresses and during calls to C code.) - <li>Certain code patterns are replaced by simpler code (eg, loads of - fast entry points followed by indirect jumps are replaced by direct - jumps to the fast entry point). - </ul> - - <h4>Implementation</h4> - <p> - The EM is located in the Perl script <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/driver/mangler/ghc-asm.lprl"><code>ghc-asm.lprl</code></a>. - The script reads the <code>.s</code> file and chops it up into - <em>chunks</em> (that's how they are actually called in the script) that - roughly correspond to basic blocks. Each chunk is annotated with an - educated guess about what kind of code it contains (e.g., infotable, - fast entry point, slow entry point, etc.). The annotations also contain - the symbol introducing the chunk of assembly and whether that chunk has - already been processed or not. - <p> - The parsing of the input into chunks as well as recognising assembly - instructions that are to be removed or altered is based on a large - number of Perl regular expressions sprinkled over the whole code. These - expressions are rather fragile as they heavily rely on the structure of - the generated code - in fact, they even rely on the right amount of - white space and thus on the formatting of the assembly. - <p> - Afterwards, the chunks are reordered, some of them purged, and some - stripped of some useless instructions. Moreover, some instructions are - manipulated (eg, loads of fast entry points followed by indirect jumps - are replaced by direct jumps to the fast entry point). - <p> - The EM knows which part of the code belongs to function prologues and - epilogues as <a href="../rts-libs/stgc.html">STG C</a> adds tags of the - form <code>--- BEGIN ---</code> and <code>--- END ---</code> the - assembler just before and after the code proper of a function starts. - It adds these tags using gcc's <code>__asm__</code> feature. - <p> - <strong>Update:</strong> Gcc 2.96 upwards performs more aggressive basic - block re-ordering and dead code elimination. This seems to make the - whole <code>--- END ---</code> tag business redundant -- in fact, if - proper code is generated, no <code>--- END ---</code> tags survive gcc - optimiser. - - <p><small> -<!-- hhmts start --> -Last modified: Sun Feb 17 17:55:47 EST 2002 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/modules.html b/docs/comm/the-beast/modules.html deleted file mode 100644 index a6655a68a7..0000000000 --- a/docs/comm/the-beast/modules.html +++ /dev/null @@ -1,80 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Modules, ModuleNames and Packages</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>Modules, ModuleNames and Packages</h1> - - <p>This section describes the datatypes <code>ModuleName</code> - <code>Module</code> and <code>PackageName</code> all available - from the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/Module.lhs"><code>Module</code></a>.<p> - - <h2>Packages</h2> - - <p>A package is a collection of (zero or more) Haskell modules, - together with some information about external libraries, extra C - compiler options, and other things that this collection of modules - requires. When using DLLs on windows (or shared libraries on a - Unix system; currently unsupported), a package can consist of only - a single shared library of Haskell code; the reason for this is - described below. - - <p>Packages are further described in the User's Guide <a - href="http://www.haskell.org/ghc/docs/latest/packages.html">here</a>. - - <h2>The ModuleName type</h2> - - <p>At the bottom of the hierarchy is a <code>ModuleName</code>, - which, as its name suggests, is simply the name of a module. It - is represented as a Z-encoded FastString, and is an instance of - <code>Uniquable</code> so we can build <code>FiniteMap</code>s - with <code>ModuleName</code>s as the keys. - - <p>A <code>ModuleName</code> can be built from a - <code>String</code>, using the <code>mkModuleName</code> function. - - <h2>The Module type</h2> - - <p>For a given module, the compiler also needs to know whether the - module is in the <em>home package</em>, or in another package. - This distinction is important for two reasons: - - <ul> - <li><p>When generating code to call a function in another package, - the compiler might have to generate a cross-DLL call, which is - different from an intra-DLL call (hence the restriction that the - code in a package can only reside in a single DLL). - - <li><p>We avoid putting version information in an interface file - for entities defined in another package, on the grounds that other - packages are generally "stable". This also helps keep the size of - interface files down. - </ul> - - <p>The <code>Module</code> type contains a <code>ModuleName</code> - and a <code>PackageInfo</code> field. The - <code>PackageInfo</code> indicates whether the given - <code>Module</code> comes from the current package or from another - package. - - <p>To get the actual package in which a given module resides, you - have to read the interface file for that module, which contains - the package name (actually the value of the - <code>-package-name</code> flag when that module was built). This - information is currently unused inside the compiler, but we might - make use of it in the future, especially with the advent of - hierarchical modules, to allow the compiler to automatically - figure out which packages a program should be linked with, and - thus avoid the need to specify <code>-package</code> options on - the command line. - - <p><code>Module</code>s are also instances of - <code>Uniquable</code>, and indeed the unique of a - <code>Module</code> is the same as the unique of the underlying - <code>ModuleName</code>. - </body> -</html> diff --git a/docs/comm/the-beast/names.html b/docs/comm/the-beast/names.html deleted file mode 100644 index 061fae3ebf..0000000000 --- a/docs/comm/the-beast/names.html +++ /dev/null @@ -1,169 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The truth about names: OccNames, and Names</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - The truth about names: OccNames, and Names</h1> - <p> - Every entity (type constructor, class, identifier, type variable) has a - <code>Name</code>. The <code>Name</code> type is pervasive in GHC, and - is defined in <code>basicTypes/Name.lhs</code>. Here is what a Name - looks like, though it is private to the Name module. - </p> - <blockquote> - <pre> -data Name = Name { - n_sort :: NameSort, -- What sort of name it is - n_occ :: !OccName, -- Its occurrence name - n_uniq :: Unique, -- Its identity - n_loc :: !SrcLoc -- Definition site - }</pre> - </blockquote> - <ul> - <li> The <code>n_sort</code> field says what sort of name this is: see - <a href="#sort">NameSort below</a>. - <li> The <code>n_occ</code> field gives the "occurrence name" of the - Name; see - <a href="#occname">OccName below</a>. - <li> The <code>n_uniq</code> field allows fast tests for equality of - Names. - <li> The <code>n_loc</code> field gives some indication of where the - name was bound. - </ul> - - <h2><a name="sort">The <code>NameSort</code> of a <code>Name</code></a></h2> - <p> - There are four flavours of <code>Name</code>: - </p> - <blockquote> - <pre> -data NameSort - = External Module (Maybe Name) - -- (Just parent) => this Name is a subordinate name of 'parent' - -- e.g. data constructor of a data type, method of a class - -- Nothing => not a subordinate - - | WiredIn Module (Maybe Name) TyThing BuiltInSyntax - -- A variant of External, for wired-in things - - | Internal -- A user-defined Id or TyVar - -- defined in the module being compiled - - | System -- A system-defined Id or TyVar. Typically the - -- OccName is very uninformative (like 's')</pre> - </blockquote> - <ul> - <li>Here are the sorts of Name an entity can have: - <ul> - <li> Class, TyCon: External. - <li> Id: External, Internal, or System. - <li> TyVar: Internal, or System. - </ul> - </li> - <li>An <code>External</code> name has a globally-unique - (module name, occurrence name) pair, namely the - <em>original name</em> of the entity, - describing where the thing was originally defined. So for example, - if we have - <blockquote> - <pre> -module M where - f = e1 - g = e2 - -module A where - import qualified M as Q - import M - a = Q.f + g</pre> - </blockquote> - <p> - then the RdrNames for "a", "Q.f" and "g" get replaced (by the - Renamer) by the Names "A.a", "M.f", and "M.g" respectively. - </p> - </li> - <li>An <code>InternalName</code> - has only an occurrence name. Distinct InternalNames may have the same - occurrence name; use the Unique to distinguish them. - </li> - <li>An <code>ExternalName</code> has a unique that never changes. It - is never cloned. This is important, because the simplifier invents - new names pretty freely, but we don't want to lose the connnection - with the type environment (constructed earlier). An - <code>InternalName</code> name can be cloned freely. - </li> - <li><strong>Before CoreTidy</strong>: the Ids that were defined at top - level in the original source program get <code>ExternalNames</code>, - whereas extra top-level bindings generated (say) by the type checker - get <code>InternalNames</code>. q This distinction is occasionally - useful for filtering diagnostic output; e.g. for -ddump-types. - </li> - <li><strong>After CoreTidy</strong>: An Id with an - <code>ExternalName</code> will generate symbols that - appear as external symbols in the object file. An Id with an - <code>InternalName</code> cannot be referenced from outside the - module, and so generates a local symbol in the object file. The - CoreTidy pass makes the decision about which names should be External - and which Internal. - </li> - <li>A <code>System</code> name is for the most part the same as an - <code>Internal</code>. Indeed, the differences are purely cosmetic: - <ul> - <li>Internal names usually come from some name the - user wrote, whereas a System name has an OccName like "a", or "t". - Usually there are masses of System names with the same OccName but - different uniques, whereas typically there are only a handful of - distince Internal names with the same OccName. - </li> - <li>Another difference is that when unifying the type checker tries - to unify away type variables with System names, leaving ones with - Internal names (to improve error messages). - </li> - </ul> - </li> - </ul> - - <h2><a name="occname">Occurrence names: <code>OccName</code></a></h2> - <p> - An <code>OccName</code> is more-or-less just a string, like "foo" or - "Tree", giving the (unqualified) name of an entity. - </p> - <p> - Well, not quite just a string, because in Haskell a name like "C" could - mean a type constructor or data constructor, depending on context. So - GHC defines a type <tt>OccName</tt> (defined in - <tt>basicTypes/OccName.lhs</tt>) that is a pair of a <tt>FastString</tt> - and a <tt>NameSpace</tt> indicating which name space the name is drawn - from: - <blockquote> - <pre> -data OccName = OccName NameSpace EncodedFS</pre> - </blockquote> - <p> - The <tt>EncodedFS</tt> is a synonym for <tt>FastString</tt> indicating - that the string is Z-encoded. (Details in <tt>OccName.lhs</tt>.) - Z-encoding encodes funny characters like '%' and '$' into alphabetic - characters, like "zp" and "zd", so that they can be used in object-file - symbol tables without confusing linkers and suchlike. - </p> - <p> - The name spaces are: - </p> - <ul> - <li> <tt>VarName</tt>: ordinary variables</li> - <li> <tt>TvName</tt>: type variables</li> - <li> <tt>DataName</tt>: data constructors</li> - <li> <tt>TcClsName</tt>: type constructors and classes (in Haskell they - share a name space) </li> - </ul> - - <small> -<!-- hhmts start --> -Last modified: Wed May 4 14:57:55 EST 2005 -<!-- hhmts end --> - </small> - </body> -</html> - diff --git a/docs/comm/the-beast/ncg.html b/docs/comm/the-beast/ncg.html deleted file mode 100644 index 84beac2d51..0000000000 --- a/docs/comm/the-beast/ncg.html +++ /dev/null @@ -1,749 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Native Code Generator</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - The Native Code Generator</h1> - <p> - On some platforms (currently x86 and PowerPC, with bitrotted - support for Sparc and Alpha), GHC can generate assembly code - directly, without having to go via C. This can sometimes almost - halve compilation time, and avoids the fragility and - horribleness of the <a href="mangler.html">mangler</a>. The NCG - is enabled by default for - non-optimising compilation on supported platforms. For most programs - it generates code which runs only 1-3% slower - (depending on platform and type of code) than that - created by gcc on x86s, so it is well worth using even with - optimised compilation. FP-intensive x86 programs see a bigger - slowdown, and all Sparc code runs about 5% slower due to - us not filling branch delay slots. - <p> - The NCG has always been something of a second-class citizen - inside GHC, an unloved child, rather. This means that its - integration into the compiler as a whole is rather clumsy, which - brings some problems described below. That apart, the NCG - proper is fairly cleanly designed, as target-independent as it - reasonably can be, and so should not be difficult to retarget. - <p> - <b>NOTE!</b> The native code generator was largely rewritten as part - of the C-- backend changes, around May 2004. Unfortunately the - rest of this document still refers to the old version, and was written - with relation to the CVS head as of end-Jan 2002. Some of it is relevant, - some of it isn't. - - <h2>Overview</h2> - The top-level code generator fn is - <p> - <code>absCtoNat :: AbstractC -> UniqSM (SDoc, Pretty.Doc)</code> - <p> - The returned <code>SDoc</code> is for debugging, so is empty unless - you specify <code>-ddump-stix</code>. The <code>Pretty.Doc</code> - bit is the final assembly code. Translation involves three main - phases, the first and third of which are target-independent. - <ul> - <li><b>Translation into the <code>Stix</code> representation.</b> Stix - is a simple tree-like RTL-style language, in which you can - mention: - <p> - <ul> - <li>An infinite number of temporary, virtual registers. - <li>The STG "magic" registers (<code>MagicId</code>), such as - the heap and stack pointers. - <li>Literals and low-level machine ops (<code>MachOp</code>). - <li>Simple address computations. - <li>Reads and writes of: memory, virtual regs, and various STG - regs. - <li>Labels and <code>if ... goto ...</code> style control-flow. - </ul> - <p> - Stix has two main associated types: - <p> - <ul> - <li><code>StixStmt</code> -- trees executed for their side - effects: assignments, control transfers, and auxiliary junk - such as segment changes and literal data. - <li><code>StixExpr</code> -- trees which denote a value. - </ul> - <p> - Translation into Stix is almost completely - target-independent. Needed dependencies are knowledge of - word size and endianness, used when generating code to do - deal with half-word fields in info tables. This could be - abstracted out easily enough. Also, the Stix translation - needs to know which <code>MagicId</code>s map to registers - on the given target, and which are stored in offsets from - <code>BaseReg</code>. - <p> - After initial Stix generation, the trees are cleaned up with - constant-folding and a little copy-propagation ("Stix - inlining", as the code misleadingly calls it). We take - the opportunity to translate <code>MagicId</code>s which are - stored in memory on the given target, into suitable memory - references. Those which are stored in registers are left - alone. There is also a half-hearted attempt to lift literal - strings to the top level in cases where nested strings have - been observed to give incorrect code in the past. - <p> - Primitive machine-level operations will already be phrased in - terms of <code>MachOp</code>s in the presented Abstract C, and - these are passed through unchanged. We comment only that the - <code>MachOp</code>s have been chosen so as to be easy to - implement on all targets, and their meaning is intended to be - unambiguous, and the same on all targets, regardless of word - size or endianness. - <p> - <b>A note on <code>MagicId</code>s.</b> - Those which are assigned to - registers on the current target are left unmodified. Those - which are not are stored in memory as offsets from - <code>BaseReg</code> (which is assumed to permanently have the - value <code>(&MainCapability.r)</code>), so the constant folder - calculates the offsets and inserts suitable loads/stores. One - complication is that not all archs have <code>BaseReg</code> - itself in a register, so for those (sparc), we instead - generate the address as an offset from the static symbol - <code>MainCapability</code>, since the register table lives in - there. - <p> - Finally, <code>BaseReg</code> does occasionally itself get - mentioned in Stix expression trees, and in this case what is - denoted is precisely <code>(&MainCapability.r)</code>, not, as - in all other cases, the value of memory at some offset from - the start of the register table. Since what it denotes is an - r-value and not an l-value, assigning <code>BaseReg</code> is - meaningless, so the machinery checks to ensure this never - happens. All these details are taken into account by the - constant folder. - <p> - <li><b>Instruction selection.</b> This is the only majorly - target-specific phase. It turns Stix statements and - expressions into sequences of <code>Instr</code>, a data - type which is different for each architecture. - <code>Instr</code>, unsurprisingly, has various supporting - types, such as <code>Reg</code>, <code>Operand</code>, - <code>Imm</code>, etc. The generated instructions may refer - to specific machine registers, or to arbitrary virtual - registers, either those created within the instruction - selector, or those mentioned in the Stix passed to it. - <p> - The instruction selectors live in <code>MachCode.lhs</code>. - The core functions, for each target, are: - <p> - <code> - getAmode :: StixExpr -> NatM Amode - <br>getRegister :: StixExpr -> NatM Register - <br>assignMem_IntCode :: PrimRep -> StixExpr -> StixExpr -> NatM InstrBlock - <br>assignReg_IntCode :: PrimRep -> StixReg -> StixExpr -> NatM InstrBlock - </code> - <p> - The insn selectors use the "maximal munch" algorithm. The - bizarrely-misnamed <code>getRegister</code> translates - expressions. A simplified version of its type is: - <p> - <code>getRegister :: StixExpr -> NatM (OrdList Instr, Reg)</code> - <p> - That is: it (monadically) turns a <code>StixExpr</code> into a - sequence of instructions, and a register, with the meaning - that after executing the (possibly empty) sequence of - instructions, the (possibly virtual) register will - hold the resulting value. The real situation is complicated - by the presence of fixed registers, and is detailed below. - <p> - Maximal munch is a greedy algorithm and is known not to give - globally optimal code sequences, but it is good enough, and - fast and simple. Early incarnations of the NCG used something - more sophisticated, but that is long gone now. - <p> - Similarly, <code>getAmode</code> translates a value, intended - to denote an address, into a sequence of insns leading up to - a (processor-specific) addressing mode. This stuff could be - done using the general <code>getRegister</code> selector, but - would necessarily generate poorer code, because the calculated - address would be forced into a register, which might be - unnecessary if it could partially or wholly be calculated - using an addressing mode. - <p> - Finally, <code>assignMem_IntCode</code> and - <code>assignReg_IntCode</code> create instruction sequences to - calculate a value and store it in the given register, or at - the given address. Because these guys translate a statement, - not a value, they just return a sequence of insns and no - associated register. Floating-point and 64-bit integer - assignments have analogous selectors. - <p> - Apart from the complexities of fixed vs floating registers, - discussed below, the instruction selector is as simple - as it can be. It looks long and scary but detailed - examination reveals it to be fairly straightforward. - <p> - <li><b>Register allocation.</b> The register allocator, - <code>AsmRegAlloc.lhs</code> takes sequences of - <code>Instr</code>s which mention a mixture of real and - virtual registers, and returns a modified sequence referring - only to real ones. It is gloriously and entirely - target-independent. Well, not exactly true. Instead it - regards <code>Instr</code> (instructions) and <code>Reg</code> - (virtual and real registers) as abstract types, to which it has - the following interface: - <p> - <code> - insnFuture :: Instr -> InsnFuture - <br>regUsage :: Instr -> RegUsage - <br>patchRegs :: Instr -> (Reg -> Reg) -> Instr - </code> - <p> - <code>insnFuture</code> is used to (re)construct the graph of - all possible control transfers between the insns to be - allocated. <code>regUsage</code> returns the sets of registers - read and written by an instruction. And - <code>patchRegs</code> is used to apply the allocator's final - decision on virtual-to-real reg mapping to an instruction. - <p> - Clearly these 3 fns have to be written anew for each - architecture. They are defined in - <code>RegAllocInfo.lhs</code>. Think twice, no, thrice, - before modifying them: making false claims about insn - behaviour will lead to hard-to-find register allocation - errors. - <p> - <code>AsmRegAlloc.lhs</code> contains detailed comments about - how the allocator works. Here is a summary. The head honcho - <p> - <code>allocUsingTheseRegs :: [Instr] -> [Reg] -> (Bool, [Instr])</code> - <p> - takes a list of instructions and a list of real registers - available for allocation, and maps as many of the virtual regs - in the input into real ones as it can. The returned - <code>Bool</code> indicates whether or not it was - successful. If so, that's the end of it. If not, the caller - of <code>allocUsingTheseRegs</code> will attempt spilling. - More of that later. What <code>allocUsingTheseRegs</code> - does is: - <p> - <ul> - <li>Implicitly number each instruction by its position in the - input list. - <p> - <li>Using <code>insnFuture</code>, create the set of all flow - edges -- possible control transfers -- within this set of - insns. - <p> - <li>Using <code>regUsage</code> and iterating around the flow - graph from the previous step, calculate, for each virtual - register, the set of flow edges on which it is live. - <p> - <li>Make a real-register committment map, which gives the set - of edges for which each real register is committed (in - use). These sets are initially empty. For each virtual - register, attempt to find a real register whose current - committment does not intersect that of the virtual - register -- ie, is uncommitted on all edges that the - virtual reg is live. If successful, this means the vreg - can be assigned to the realreg, so add the vreg's set to - the realreg's committment. - <p> - <li>If all the vregs were assigned to a realreg, use - <code>patchInstr</code> to apply the mapping to the insns themselves. - </ul> - <p> - <b>Spilling</b> - <p> - If <code>allocUsingTheseRegs</code> fails, a baroque - mechanism comes into play. We now know that much simpler - schemes are available to do the same thing and give better - results. - Anyways: - <p> - The logic above <code>allocUsingTheseRegs</code>, in - <code>doGeneralAlloc</code> and <code>runRegAllocate</code>, - observe that allocation has failed with some set R of real - registers. So they apply <code>runRegAllocate</code> a second - time to the code, but remove (typically) two registers from R - before doing so. This naturally fails too, but returns a - partially-allocated sequence. <code>doGeneralAlloc</code> - then inserts spill code into the sequence, and finally re-runs - <code>allocUsingTheseRegs</code>, but supplying the original, - unadulterated R. This is guaranteed to succeed since the two - registers previously removed from R are sufficient to allocate - all the spill/restore instructions added. - <p> - Because x86 is very short of registers, and in the worst case - needs three removed from R, a softly-softly approach is used. - <code>doGeneralAlloc</code> first tries with zero regs removed - from R, then if that fails one, then two, etc. This means - <code>allocUsingTheseRegs</code> may get run several times - before a successful arrangement is arrived at. - <code>findReservedRegs</code> cooks up the sets of spill - registers to try with. - <p> - The resulting machinery is complicated and the generated spill - code is appalling. The saving grace is that spills are very - rare so it doesn't matter much. I did not invent this -- I inherited it. - <p> - <b>Dealing with common cases fast</b> - <p> - The entire reg-alloc mechanism described so far is general and - correct, but expensive overkill for many simple code blocks. - So to begin with we use - <code>doSimpleAlloc</code>, which attempts to do something - simple. It exploits the observation that if the total number - of virtual registers does not exceed the number of real ones - available, we can simply dole out a new realreg each time we - see mention of a new vreg, with no regard for control flow. - <code>doSimpleAlloc</code> therefore attempts this in a - single pass over the code. It gives up if it runs out of real - regs or sees any condition which renders the above observation - invalid (fixed reg uses, for example). - <p> - This clever hack handles the majority of code blocks quickly. - It was copied from the previous reg-allocator (the - Mattson/Partain/Marlow/Gill one). - </ul> - -<p> -<h2>Complications, observations, and possible improvements</h2> - -<h3>Real vs virtual registers in the instruction selectors</h3> - -The instruction selectors for expression trees, namely -<code>getRegister</code>, are complicated by the fact that some -expressions can only be computed into a specific register, whereas -the majority can be computed into any register. We take x86 as an -example, but the problem applies to all archs. -<p> -Terminology: <em>rreg</em> means real register, a real machine -register. <em>vreg</em> means one of an infinite set of virtual -registers. The type <code>Reg</code> is the sum of <em>rreg</em> and -<em>vreg</em>. The instruction selector generates sequences with -unconstrained use of vregs, leaving the register allocator to map them -all into rregs. -<p> -Now, where was I ? Oh yes. We return to the type of -<code>getRegister</code>, which despite its name, selects instructions -to compute the value of an expression tree. -<pre> - getRegister :: StixExpr -> NatM Register - - data Register - = Fixed PrimRep Reg InstrBlock - | Any PrimRep (Reg -> InstrBlock) - - type InstrBlock -- sequence of instructions -</pre> -At first this looks eminently reasonable (apart from the stupid -name). <code>getRegister</code>, and nobody else, knows whether or -not a given expression has to be computed into a fixed rreg or can be -computed into any rreg or vreg. In the first case, it returns -<code>Fixed</code> and indicates which rreg the result is in. In the -second case it defers committing to any specific target register by -returning a function from <code>Reg</code> to <code>InstrBlock</code>, -and the caller can specify the target reg as it sees fit. -<p> -Unfortunately, that forces <code>getRegister</code>'s callers (usually -itself) to use a clumsy and confusing idiom in the common case where -they do not care what register the result winds up in. The reason is -that although a value might be computed into a fixed rreg, we are -forbidden (on pain of segmentation fault :) from subsequently -modifying the fixed reg. This and other rules are record in "Rules of -the game" inside <code>MachCode.lhs</code>. -<p> -Why can't fixed registers be modified post-hoc? Consider a simple -expression like <code>Hp+1</code>. Since the heap pointer -<code>Hp</code> is definitely in a fixed register, call it R, -<code>getRegister</code> on subterm <code>Hp</code> will simply return -<code>Fixed</code> with an empty sequence and R. But we can't just -emit an increment instruction for R, because that trashes -<code>Hp</code>; instead we first have to copy it into a fresh vreg -and increment that. -<p> -With all that in mind, consider now writing a <code>getRegister</code> -clause for terms of the form <code>(1 + E)</code>. Contrived, yes, -but illustrates the matter. First we do -<code>getRegister</code> on E. Now we are forced to examine what -comes back. -<pre> - getRegister (OnePlus e) - = getRegister e `thenNat` \ e_result -> - case e_result of - Fixed e_code e_fixed - -> returnNat (Any IntRep (\dst -> e_code ++ [MOV e_fixed dst, INC dst])) - Any e_any - -> Any (\dst -> e_any dst ++ [INC dst]) -</pre> -This seems unreasonably cumbersome, yet the instruction selector is -full of such idioms. A good example of the complexities induced by -this scheme is shown by <code>trivialCode</code> for x86 in -<code>MachCode.lhs</code>. This deals with general integer dyadic -operations on x86 and has numerous cases. It was difficult to get -right. -<p> -An alternative suggestion is to simplify the type of -<code>getRegister</code> to this: -<pre> - getRegister :: StixExpr -> NatM (InstrBloc, VReg) - type VReg = .... a vreg ... -</pre> -and then we could safely write -<pre> - getRegister (OnePlus e) - = getRegister e `thenNat` \ (e_code, e_vreg) -> - returnNat (e_code ++ [INC e_vreg], e_vreg) -</pre> -which is about as straightforward as you could hope for. -Unfortunately, it requires <code>getRegister</code> to insert moves of -values which naturally compute into an rreg, into a vreg. Consider: -<pre> - 1 + ccall some-C-fn -</pre> -On x86 the ccall result is returned in rreg <code>%eax</code>. The -resulting sequence, prior to register allocation, would be: -<pre> - # push args - call some-C-fn - # move %esp to nuke args - movl %eax, %vreg - incl %vreg -</pre> -If, as is likely, <code>%eax</code> is not held live beyond this point -for any other purpose, the move into a fresh register is pointless; -we'd have been better off leaving the value in <code>%eax</code> as -long as possible. -<p> -The simplified <code>getRegister</code> story is attractive. It would -clean up the instruction selectors significantly and make it simpler -to write new ones. The only drawback is that it generates redundant -register moves. I suggest that eliminating these should be the job -of the register allocator. Indeed: -<ul> -<li>There has been some work on this already ("Iterated register - coalescing" ?), so this isn't a new idea. -<p> -<li>You could argue that the existing scheme inappropriately blurs the - boundary between the instruction selector and the register - allocator. The instruction selector should .. well .. just - select instructions, without having to futz around worrying about - what kind of registers subtrees get generated into. Register - allocation should be <em>entirely</em> the domain of the register - allocator, with the proviso that it should endeavour to allocate - registers so as to minimise the number of non-redundant reg-reg - moves in the final output. -</ul> - - -<h3>Selecting insns for 64-bit values/loads/stores on 32-bit platforms</h3> - -Note that this stuff doesn't apply on 64-bit archs, since the -<code>getRegister</code> mechanism applies there. - -The relevant functions are: -<pre> - assignMem_I64Code :: StixExpr -> StixExpr -> NatM InstrBlock - assignReg_I64Code :: StixReg -> StixExpr -> NatM InstrBlock - iselExpr64 :: StixExpr -> NatM ChildCode64 - - data ChildCode64 -- a.k.a "Register64" - = ChildCode64 - InstrBlock -- code - VRegUnique -- unique for the lower 32-bit temporary -</pre> -<code>iselExpr64</code> is the 64-bit, plausibly-named analogue of -<code>getRegister</code>, and <code>ChildCode64</code> is the analogue -of <code>Register</code>. The aim here was to generate working 64 -bit code as simply as possible. To this end, I used the -simplified <code>getRegister</code> scheme described above, in which -<code>iselExpr64</code>generates its results into two vregs which -can always safely be modified afterwards. -<p> -Virtual registers are, unsurprisingly, distinguished by their -<code>Unique</code>s. There is a small difficulty in how to -know what the vreg for the upper 32 bits of a value is, given the vreg -for the lower 32 bits. The simple solution adopted is to say that -any low-32 vreg may also have a hi-32 counterpart which shares the -same unique, but is otherwise regarded as a separate entity. -<code>getHiVRegFromLo</code> gets one from the other. -<pre> - data VRegUnique - = VRegUniqueLo Unique -- lower part of a split quantity - | VRegUniqueHi Unique -- upper part thereof -</pre> -Apart from that, 64-bit code generation is really simple. The sparc -and x86 versions are almost copy-n-pastes of each other, with minor -adjustments for endianness. The generated code isn't wonderful but -is certainly acceptable, and it works. - - - -<h3>Shortcomings and inefficiencies in the register allocator</h3> - -<h4>Redundant reconstruction of the control flow graph</h4> - -The allocator goes to considerable computational expense to construct -all the flow edges in the group of instructions it's allocating for, -by using the <code>insnFuture</code> function in the -<code>Instr</code> pseudo-abstract type. -<p> -This is really silly, because all that information is present at the -abstract C stage, but is thrown away in the translation to Stix. -So a good thing to do is to modify that translation to -produce a directed graph of Stix straight-line code blocks, -and to preserve that structure through the insn selector, so the -allocator can see it. -<p> -This would eliminate the fragile, hacky, arch-specific -<code>insnFuture</code> mechanism, and probably make the whole -compiler run measurably faster. Register allocation is a fair chunk -of the time of non-optimising compilation (10% or more), and -reconstructing the flow graph is an expensive part of reg-alloc. -It would probably accelerate the vreg liveness computation too. - -<h4>Really ridiculous method for doing spilling</h4> - -This is a more ambitious suggestion, but ... reg-alloc should be -reimplemented, using the scheme described in "Quality and speed in -linear-scan register allocation." (Traub?) For straight-line code -blocks, this gives an elegant one-pass algorithm for assigning -registers and creating the minimal necessary spill code, without the -need for reserving spill registers ahead of time. -<p> -I tried it in Rigr, replacing the previous spiller which used the -current GHC scheme described above, and it cut the number of spill -loads and stores by a factor of eight. Not to mention being simpler, -easier to understand and very fast. -<p> -The Traub paper also describes how to extend their method to multiple -basic blocks, which will be needed for GHC. It comes down to -reconciling multiple vreg-to-rreg mappings at points where control -flow merges. - -<h4>Redundant-move support for revised instruction selector suggestion</h4> - -As mentioned above, simplifying the instruction selector will require -the register allocator to try and allocate source and destination -vregs to the same rreg in reg-reg moves, so as to make as many as -possible go away. Without that, the revised insn selector would -generate worse code than at present. I know this stuff has been done -but know nothing about it. The Linear-scan reg-alloc paper mentioned -above does indeed mention a bit about it in the context of single -basic blocks, but I don't know if that's sufficient. - - - -<h3>x86 arcana that you should know about</h3> - -The main difficulty with x86 is that many instructions have fixed -register constraints, which can occasionally make reg-alloc fail -completely. And the FPU doesn't have the flat register model which -the reg-alloc abstraction (implicitly) assumes. -<p> -Our strategy is: do a good job for the common small subset, that is -integer loads, stores, address calculations, basic ALU ops (+, -, -and, or, xor), and jumps. That covers the vast majority of -executed insns. And indeed we do a good job, with a loss of -less than 2% compared with gcc. -<p> -Initially we tried to handle integer instructions with awkward -register constraints (mul, div, shifts by non-constant amounts) via -various jigglings of the spiller et al. This never worked robustly, -and putting platform-specific tweaks in the generic infrastructure is -a big No-No. (Not quite true; shifts by a non-constant amount are -still done by a giant kludge, and should be moved into this new -framework.) -<p> -Fortunately, all such insns are rare. So the current scheme is to -pretend that they don't have any such constraints. This fiction is -carried all the way through the register allocator. When the insn -finally comes to be printed, we emit a sequence which copies the -operands through memory (<code>%esp</code>-relative), satisfying the -constraints of the real instruction. This localises the gruesomeness -to just one place. Here, for example, is the code generated for -integer divison of <code>%esi</code> by <code>%ecx</code>: -<pre> - # BEGIN IQUOT %ecx, %esi - pushl $0 - pushl %eax - pushl %edx - pushl %ecx - movl %esi,% eax - cltd - idivl 0(%esp) - movl %eax, 12(%esp) - popl %edx - popl %edx - popl %eax - popl %esi - # END IQUOT %ecx, %esi -</pre> -This is not quite as appalling as it seems, if you consider that the -division itself typically takes 16+ cycles, whereas the rest of the -insns probably go through in about 1 cycle each. -<p> -This trick is taken to extremes for FP operations. -<p> -All notions of the x86 FP stack and its insns have been removed. -Instead, we pretend, to the instruction selector and register -allocator, that x86 has six floating point registers, -<code>%fake0</code> .. <code>%fake5</code>, which can be used in the -usual flat manner. We further claim that x86 has floating point -instructions very similar to SPARC and Alpha, that is, a simple -3-operand register-register arrangement. Code generation and register -allocation proceed on this basis. -<p> -When we come to print out the final assembly, our convenient fiction -is converted to dismal reality. Each fake instruction is -independently converted to a series of real x86 instructions. -<code>%fake0</code> .. <code>%fake5</code> are mapped to -<code>%st(0)</code> .. <code>%st(5)</code>. To do reg-reg arithmetic -operations, the two operands are pushed onto the top of the FP stack, -the operation done, and the result copied back into the relevant -register. When one of the operands is also the destination, we emit a -slightly less scummy translation. There are only six -<code>%fake</code> registers because 2 are needed for the translation, -and x86 has 8 in total. -<p> -The translation is inefficient but is simple and it works. A cleverer -translation would handle a sequence of insns, simulating the FP stack -contents, would not impose a fixed mapping from <code>%fake</code> to -<code>%st</code> regs, and hopefully could avoid most of the redundant -reg-reg moves of the current translation. -<p> -There are, however, two unforeseen bad side effects: -<ul> -<li>This doesn't work properly, because it doesn't observe the normal - conventions for x86 FP code generation. It turns out that each of - the 8 elements in the x86 FP register stack has a tag bit which - indicates whether or not that register is notionally in use or - not. If you do a FPU operation which happens to read a - tagged-as-empty register, you get an x87 FPU (stack invalid) - exception, which is normally handled by the FPU without passing it - to the OS: the program keeps going, but the resulting FP values - are garbage. The OS can ask for the FPU to pass it FP - stack-invalid exceptions, but it usually doesn't. - <p> - Anyways: inside NCG created x86 FP code this all works fine. - However, the NCG's fiction of a flat register set does not operate - the x87 register stack in the required stack-like way. When - control returns to a gcc-generated world, the stack tag bits soon - cause stack exceptions, and thus garbage results. - <p> - The only fix I could think of -- and it is horrible -- is to clear - all the tag bits just before the next STG-level entry, in chunks - of code which use FP insns. <code>i386_insert_ffrees</code> - inserts the relevant <code>ffree</code> insns into such code - blocks. It depends critically on <code>is_G_instr</code> to - detect such blocks. -<p> -<li>It's very difficult to read the generated assembly and - reason about it when debugging, because there's so much clutter. - We print the fake insns as comments in the output, and that helps - a bit. -</ul> - - - -<h3>Generating code for ccalls</h3> - -For reasons I don't really understand, the instruction selectors for -generating calls to C (<code>genCCall</code>) have proven surprisingly -difficult to get right, and soaked up a lot of debugging time. As a -result, I have once again opted for schemes which are simple and not -too difficult to argue as correct, even if they don't generate -excellent code. -<p> -The sparc ccall generator in particular forces all arguments into -temporary virtual registers before moving them to the final -out-registers (<code>%o0</code> .. <code>%o5</code>). This creates -some unnecessary reg-reg moves. The reason is explained in a -comment in the code. - - -<h3>Duplicate implementation for many STG macros</h3> - -This has been discussed at length already. It has caused a couple of -nasty bugs due to subtle untracked divergence in the macro -translations. The macro-expander really should be pushed up into the -Abstract C phase, so the problem can't happen. -<p> -Doing so would have the added benefit that the NCG could be used to -compile more "ways" -- well, at least the 'p' profiling way. - - -<h3>How to debug the NCG without losing your sanity/hair/cool</h3> - -Last, but definitely not least ... -<p> -The usual syndrome is that some program, when compiled via C, works, -but not when compiled via the NCG. Usually the problem is fairly -simple to fix, once you find the specific code block which has been -mistranslated. But the latter can be nearly impossible, since most -modules generate at least hundreds and often thousands of them. -<p> -My solution: cheat. -<p> -Because the via-C and native routes diverge only late in the day, -it is not difficult to construct a 1-1 correspondence between basic -blocks on the two routes. So, if the program works via C but not on -the NCG, do the following: -<ul> -<li>Recompile <code>AsmCodeGen.lhs</code> in the afflicted compiler - with <code>-DDEBUG_NCG</code>, so that it inserts - <code>___ncg_debug_marker</code>s - into the assembly it emits. -<p> -<li>Using a binary search on modules, find the module which is causing - the problem. -<p> -<li>Compile that module to assembly code, with identical flags, twice, - once via C and once via NCG. - Call the outputs <code>ModuleName.s-gcc</code> and - <code>ModuleName.s-nat</code>. Check that the latter does indeed have - <code>___ncg_debug_marker</code>s in it; otherwise the next steps fail. -<p> -<li>Build (with a working compiler) the program - <code>fptools/ghc/utils/debugNCG/diff_gcc_nat</code>. -<p> -<li>Run: <code>diff_gcc_nat ModuleName.s</code>. This will - construct the 1-1 correspondence, and emits on stdout - a cppable assembly output. Place this in a file -- I always - call it <code>synth.S</code>. Note, the capital S is important; - otherwise it won't get cpp'd. You can feed this file directly to - ghc and it will automatically get cpp'd; you don't have to do so - yourself. -<p> -<li>By messing with the <code>#define</code>s at the top of - <code>synth.S</code>, do a binary search to find the incorrect - block. Keep a careful record of where you are in the search; it - is easy to get confused. Remember also that multiple blocks may - be wrong, which also confuses matters. Finally, I usually start - off by re-checking that I can build the executable with all the - <code>#define</code>s set to 0 and then all to 1. This ensures - you won't get halfway through the search and then get stuck due to - some snafu with gcc-specific literals. Usually I set - <code>UNMATCHED_GCC</code> to 1 all the time, and this bit should - contain only literal data. - <code>UNMATCHED_NAT</code> should be empty. -</ul> -<p> -<code>diff_gcc_nat</code> was known to work correctly last time I used -it, in December 01, for both x86 and sparc. If it doesn't work, due -to changes in assembly syntax, or whatever, make it work. The -investment is well worth it. Searching for the incorrect block(s) any -other way is a total time waster. - - - -</ul> - - - - - <p><small> -<!-- hhmts start --> -Last modified: Mon Aug 19 11:41:43 CEST 2013 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/optimistic.html b/docs/comm/the-beast/optimistic.html deleted file mode 100644 index 4d158022e8..0000000000 --- a/docs/comm/the-beast/optimistic.html +++ /dev/null @@ -1,65 +0,0 @@ -<h2> Architectural stuff </h2> - -New fields in the TSO: -<ul> -<li> New global speculation-depth register; always counts the number of specuation frames -on the stack; incremented when -starting speculation, decremented when finishing. -<li> Profiling stuff -</ul> - - -<h2> Speculation frames </h2> - -The info table for a speculation frame points to the static spec-depth configuration -for that speculation point. (Points to, because the config is mutable, and the info -table has to be adjacent to the (immutable) code.) - - - -<h2> Abortion</h2> - -Abortion is modelled by a special asynchronous exception ThreadAbort. - -<ul> -<li> In the scheduler, if a thread returns with ThreadBlocked, and non-zero SpecDepth, send it -an asynchronous exception. - -<li> In the implementation of the <tt>catch#</tt> primop, raise an asynchonous exception if -SpecDepth is nonzero. - -<li> Timeout, administered by scheduler. Current story: abort if a speculation frame lasts from -one minor GC to the next. We detect this by seeing if there's a profiling frame on the stack --- a -profiling frame is added at a minor GC in place of a speculation frame (see Online Profiling). -</ul> - - -When tearing frames off the stack, we start a new chunk at every speculation frame, as well as every -update frame. We proceed down to the deepest speculation frame. -<p> -The <tt>AP_STACK</tt> closure built for a speculation frame must be careful <em>not</em> to enter the -next <tt>AP_STACK</tt> closure up, because that would re-enter a possible loop. -<p> -Delivering an asynch exception to a thread that is speculating. Invariant: there can be no catch frames -inside speculation (we abort in <tt>catch#</tt> when speculating. So the asynch exception just -tears off frames in the standard way until it gets to a catch frame, just as it would usually do. -<p> -Abortion can punish one or more of the speculation frames by decrementing their static config variables. - -<h3>Synchronous exceptions</h3> - -Synchronous exceptions are treated similarly as before. The stack is discarded up to an update frame; the -thunk to be updated is overwritten with "raise x", and the process continues. Until a catch frame. -<p> -When we find a spec frame, we allocate a "raise x" object, and resume execution with the return address -in the spec frame. In that way the spec frame is like a catch frame; it stops the unwinding process. -<p> -It's essential that every hard failure is caught, else speculation is unsafe. In particular, divide by zero -is hard to catch using OS support, so we test explicitly in library code. You can shoot yourself in the foot -by writing <tt>x `div#` 0</tt>, side-stepping the test. - - -<h3> Online profiling </h3> - -Sampling can be more frequent than minor GC (by jiggling the end-of-block code) but cannot -be less frequent, because GC doesn't expect to see profiling frames.
\ No newline at end of file diff --git a/docs/comm/the-beast/prelude.html b/docs/comm/the-beast/prelude.html deleted file mode 100644 index 64b607def5..0000000000 --- a/docs/comm/the-beast/prelude.html +++ /dev/null @@ -1,207 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Primitives and the Prelude</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Primitives and the Prelude</h1> - <p> - One of the trickiest aspects of GHC is the delicate interplay - between what knowledge is baked into the compiler, and what - knowledge it gets by reading the interface files of library - modules. In general, the less that is baked in, the better. -<p> - Most of what the compiler has to have wired in about primitives and - prelude definitions is in - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/"><code>fptools/ghc/compiler/prelude/</code></a>. - </p> - -GHC recognises these main classes of baked-in-ness: -<dl> -<dt><strong>Primitive types.</strong> -<dd>Primitive types cannot be defined in Haskell, and are utterly baked into the compiler. -They are notionally defined in the fictional module <tt>GHC.Prim</tt>. The <tt>TyCon</tt>s for these types are all defined -in module <tt>TysPrim</tt>; for example, -<pre> - intPrimTyCon :: TyCon - intPrimTyCon = .... -</pre> -Examples: -<tt>Int#, Float#, Addr#, State#</tt>. -<p> -<dt><strong>Wired-in types.</strong> -<dd>Wired-in types can be defined in Haskell, and indeed are (many are defined in </tt>GHC.Base</tt>). -However, it's very convenient for GHC to be able to use the type constructor for (say) <tt>Int</tt> -without looking it up in any environment. So module <tt>TysWiredIn</tt> contains many definitions -like this one: -<pre> - intTyCon :: TyCon - intTyCon = .... - - intDataCon :: DataCon - intDataCon = .... -</pre> -However, since a <tt>TyCon</tt> value contains the entire type definition inside it, it follows -that the complete definition of <tt>Int</tt> is thereby baked into the compiler. -<p> -Nevertheless, the library module <tt>GHC.Base</tt> still contains a definition for <tt>Int</tt> -just so that its info table etc get generated somewhere. Chaos will result if the wired-in definition -in <tt>TysWiredIn</tt> differs from that in <tt>GHC.Base</tt>. -<p> -The rule is that only very simple types should be wired in (for example, <tt>Ratio</tt> is not, -and <tt>IO</tt> is certainly not). No class is wired in: classes are just too complicated. -<p> -Examples: <tt>Int</tt>, <tt>Float</tt>, <tt>List</tt>, tuples. - -<p> -<dt><strong>Known-key things.</strong> -<dd>GHC knows of the existence of many, many other types, classes and values. <em>But all it knows is -their <tt>Name</tt>.</em> Remember, a <tt>Name</tt> includes a unique key that identifies the -thing, plus its defining module and occurrence name -(see <a href="names.html">The truth about Names</a>). Knowing a <tt>Name</tt>, therefore, GHC can -run off to the interface file for the module and find out everything else it might need. -<p> -Most of these known-key names are defined in module <tt>PrelNames</tt>; a further swathe concerning -Template Haskell are defined in <tt>DsMeta</tt>. The allocation of unique keys is done manually; -chaotic things happen if you make a mistake here, which is why they are all together. -</dl> - -All the <tt>Name</tt>s from all the above categories are used to initialise the global name cache, -which maps (module,occurrence-name) pairs to the globally-unique <tt>Name</tt> for that -thing. (See <tt>HscMain.initOrigNames</tt>.) - -<p> -The next sections elaborate these three classes a bit. - - - <h2>Primitives (module <tt>TysPrim</tt>)</h2> - <p> - Some types and functions have to be hardwired into the compiler as they - are atomic; all other code is essentially built around this primitive - functionality. This includes basic arithmetic types, such as integers, - and their elementary operations as well as pointer types. Primitive - types and functions often receive special treatment in the code - generator, which means that these entities have to be explicitly - represented in the compiler. Moreover, many of these types receive some - explicit treatment in the runtime system, and so, there is some further - information about <a href="../rts-libs/primitives.html">primitives in - the RTS section</a> of this document. - <p> - The module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/TysPrim.lhs"><code>TysPrim</code></a> - exports a list of all primitive type constructors as <code>primTyCons :: - [TyCon]</code>. All of these type constructors (of type - <code>TyCon</code>) are also exported as <code>intPrimTyCon</code>, - <code>stablePtrPrimTyCon</code>, and so on. In addition, for each - nullary type constructor the corresponding type (of type - <code>Type</code>) is also exported; for example, we have - <code>intPrimTy :: Type</code>. For all other type constructors, a - function is exported that constructs the type obtained by applying the - type constructors to an argument type (of type <code>Type</code>); for - example, we have <code>mkStablePtrPrimTy :: Type -> Type</code>. - <p> - As it is inconvenient to identify type that receive a special treatment - by the code generator by looking at their name, the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/PrimRep.lhs"><code>PrimRep</code></a> - exports a data type <code>PrimRep</code>, which lists all - machine-manipulable implementation types. The module also exports a set - of query functions on <code>PrimRep</code> that define properties, such - as a type's byte size or whether a primitive type is a pointer type. - Moreover, the function <code>TysPrim.primRepTyCon :: PrimRep -> - TyCon</code> converts <code>PrimRep</code> values into the corresponding - type constructor. - - <h2>Wired in types (module <tt>TysWiredIn</tt>)</h2> - <p> - In addition to entities that are primitive, as the compiler has to treat - them specially in the backend, there is a set of types, functions, - etc. that the Haskell language definition flags as essential to the - language by placing them into the special module <code>Prelude</code> - that is implicitly imported into each Haskell module. For some of these - entities it suffices to define them (by standard Haskell definitions) in - a <code>Prelude</code> module and ensuring that this module is treated - specially by being always imported . - <p> - However, there is a set of entities (such as, for example, the list type - and the corresponding data constructors) that have an inbetween status: - They are not truly primitive (lists, for example, can easily be defined - by a <code>data</code> declaration), but the compiler has to have extra - knowledge about them, as they are associated with some particular - features of the language (in the case of lists, there is special syntax, - such as list comprehensions, associated with the type). Another - example, for a special kind of entity are type classes that can be used - in a <code>deriving</code> clause. All types that are not-primitive, - but about which the compiler nonetheless has to have some extra - knowledge are defined in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/TysWiredIn.lhs"><code>TysWiredIn</code></a>. - <p> - All wired in type constructors are contained in <code>wiredInTyCons :: - [TyCon]</code>. In addition to that list, <code>TysWiredIn</code> - exports variables bound to representations of all listed type - constructors and their data constructors. So, for example, we have - <code>listTyCon</code> together with <code>nilDataCon</cons> and - </code>consDataCon</code>. There are also convenience functions, such - as <code>mkListTy</code> and <code>mkTupleTy</code>, which construct - compound types. - <p> - - <h2>Known-key names (module <tt>PrelNames</tt>)</h2> - - All names of types, functions, etc. known to the compiler are defined in - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/PrelNames.lhs"><code>PrelNames</code></a>. - This includes the names of types and functions exported from - <code>TysWiredIn</code>, but also others. In particular, this module - also fixes the names of all prelude modules; i.e., of the modules whose - name starts with <code>Prel</code>, which GHC's library uses to bring - some structure into the quite large number of <code>Prelude</code> - definitions. - <p> - <code>PrelNames.knownKeyNames :: [Name]</code> contains all names known - to the compiler, but the elements of the list are also exported - individually as variables, such as <code>floatTyConName</code> (having - the lexeme <code>Float</code>) and <code>floatDataConName</code> (having - the lexeme <code>F#</code>). For each of these names, - <code>PrelNames</code> derfines a unique key with a definition, such as - <p> -<blockquote><pre> -floatPrimTyConKey = mkPreludeTyConUnique 11</pre> -</blockquote> - <p> - that is, all unique keys for known prelude names are hardcoded into - <code>PrelNames</code> (and uniqueness has to be manually ensured in - that module). To simplify matching the types of important groups of - type constructors, <code>PrelNames</code> also exports lists, such as - <code>numericTyKeys</code> (keys of all numeric types), that contain the - unique keys of all names in that group. In addition, derivable type - classes and their structure is defined by - <code>derivableClassKeys</code> and related definitions. - <p> - In addition to names that have unique keys, <code>PrelNames</code> also - defines a set of names without uniqueness information. These names end - on the suffix <code>_RDR</code> and are of type <code>RdrName</code> (an - example, is <code>times_RDR</code>, which represents the lexeme - <code>*</code>). The names are used in locations where they pass - through the renamer anyway (e.g., special constructors encountered by - the parser, such as [], and code generated from deriving clauses), which - will take care of adding uniqueness information. - <p> - -<h2>Gathering it all together (module <tt>PrelInfo</tt>)</h2> - The module - <a href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/prelude/PrelInfo.lhs"><code>PrelInfo</code></a> - in some sense ties all the above together and provides a reasonably - restricted interface to these definition to the rest of the compiler. - However, from what I have seen, this doesn't quite work out and the - earlier mentioned modules are directly imported in many places. - - <p><small> -<!-- hhmts start --> -Last modified: Tue Dec 11 17:54:07 EST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/renamer.html b/docs/comm/the-beast/renamer.html deleted file mode 100644 index 878e82b370..0000000000 --- a/docs/comm/the-beast/renamer.html +++ /dev/null @@ -1,249 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Glorious Renamer</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - The Glorious Renamer</h1> - <p> - The <em>renamer</em> sits between the parser and the typechecker. - However, its operation is quite tightly interwoven with the - typechecker. This is partially due to support for Template Haskell, - where spliced code has to be renamed and type checked. In particular, - top-level splices lead to multiple rounds of renaming and type - checking. - </p> - <p> - The main externally used functions of the renamer are provided by the - module <code>rename/RnSource.lhs</code>. In particular, we have - </p> - <blockquote> - <pre> -rnSrcDecls :: HsGroup RdrName -> RnM (TcGblEnv, HsGroup Name) -rnTyClDecls :: [LTyClDecl RdrName] -> RnM [LTyClDecl Name] -rnSplice :: HsSplice RdrName -> RnM (HsSplice Name, FreeVars)</pre> - </blockquote> - <p> - All of which execute in the renamer monad <code>RnM</code>. The first - function, <code>rnSrcDecls</code> renames a binding group; the second, - <code>rnTyClDecls</code> renames a list of (toplevel) type and class - declarations; and the third, <code>rnSplice</code> renames a Template - Haskell splice. As the types indicate, the main task of the renamer is - to convert converts all the <tt>RdrNames</tt> to <a - href="names.html"><tt>Names</tt></a>, which includes a number of - well-formedness checks (no duplicate declarations, all names are in - scope, and so on). In addition, the renamer performs other, not - strictly name-related, well-formedness checks, which includes checking - that the appropriate flags have been supplied whenever language - extensions are used in the source. - </p> - - <h2>RdrNames</h2> - <p> - A <tt>RdrName.RdrName</tt> is pretty much just a string (for an - unqualified name like "<tt>f</tt>") or a pair of strings (for a - qualified name like "<tt>M.f</tt>"): - </p> - <blockquote> - <pre> -data RdrName - = Unqual OccName - -- Used for ordinary, unqualified occurrences - - | Qual Module OccName - -- A qualified name written by the user in - -- *source* code. The module isn't necessarily - -- the module where the thing is defined; - -- just the one from which it is imported - - | Orig Module OccName - -- An original name; the module is the *defining* module. - -- This is used when GHC generates code that will be fed - -- into the renamer (e.g. from deriving clauses), but where - -- we want to say "Use Prelude.map dammit". - - | Exact Name - -- We know exactly the Name. This is used - -- (a) when the parser parses built-in syntax like "[]" - -- and "(,)", but wants a RdrName from it - -- (b) when converting names to the RdrNames in IfaceTypes - -- Here an Exact RdrName always contains an External Name - -- (Internal Names are converted to simple Unquals) - -- (c) by Template Haskell, when TH has generated a unique name</pre> - </blockquote> - <p> - The OccName type is described in <a href="names.html#occname">The - truth about names</a>. - </p> - - <h2>The Renamer Monad</h2> - <p> - Due to the tight integration of the renamer with the typechecker, both - use the same monad in recent versions of GHC. So, we have - </p> - <blockquote> - <pre> -type RnM a = TcRn a -- Historical -type TcM a = TcRn a -- Historical</pre> - </blockquote> - <p> - with the combined monad defined as - </p> - <blockquote> - <pre> -type TcRn a = TcRnIf TcGblEnv TcLclEnv a -type TcRnIf a b c = IOEnv (Env a b) c - -data Env gbl lcl -- Changes as we move into an expression - = Env { - env_top :: HscEnv, -- Top-level stuff that never changes - -- Includes all info about imported things - - env_us :: TcRef UniqSupply, -- Unique supply for local varibles - - env_gbl :: gbl, -- Info about things defined at the top level - -- of the module being compiled - - env_lcl :: lcl -- Nested stuff; changes as we go into - -- an expression - }</pre> - </blockquote> - <p> - the details of the global environment type <code>TcGblEnv</code> and - local environment type <code>TcLclEnv</code> are also defined in the - module <code>typecheck/TcRnTypes.lhs</code>. The monad - <code>IOEnv</code> is defined in <code>utils/IOEnv.hs</code> and extends - the vanilla <code>IO</code> monad with an additional state parameter - <code>env</code> that is treated as in a reader monad. (Side effecting - operations, such as updating the unique supply, are done with - <code>TcRef</code>s, which are simply a synonym for <code>IORef</code>s.) - </p> - - <h2>Name Space Management</h2> - <p> - As anticipated by the variants <code>Orig</code> and <code>Exact</code> - of <code>RdrName</code> some names should not change during renaming, - whereas others need to be turned into unique names. In this context, - the two functions <code>RnEnv.newTopSrcBinder</code> and - <code>RnEnv.newLocals</code> are important: - </p> - <blockquote> - <pre> -newTopSrcBinder :: Module -> Maybe Name -> Located RdrName -> RnM Name -newLocalsRn :: [Located RdrName] -> RnM [Name]</pre> - </blockquote> - <p> - The two functions introduces new toplevel and new local names, - respectively, where the first two arguments to - <code>newTopSrcBinder</code> determine the currently compiled module and - the parent construct of the newly defined name. Both functions create - new names only for <code>RdrName</code>s that are neither exact nor - original. - </p> - - <h3>Introduction of Toplevel Names: Global RdrName Environment</h3> - <p> - A global <code>RdrName</code> environment - <code>RdrName.GlobalRdrEnv</code> is a map from <code>OccName</code>s to - lists of qualified names. More precisely, the latter are - <code>Name</code>s with an associated <code>Provenance</code>: - </p> - <blockquote> - <pre> -data Provenance - = LocalDef -- Defined locally - Module - - | Imported -- Imported - [ImportSpec] -- INVARIANT: non-empty - Bool -- True iff the thing was named *explicitly* - -- in *any* of the import specs rather than being - -- imported as part of a group; - -- e.g. - -- import B - -- import C( T(..) ) - -- Here, everything imported by B, and the constructors of T - -- are not named explicitly; only T is named explicitly. - -- This info is used when warning of unused names.</pre> - </blockquote> - <p> - The part of the global <code>RdrName</code> environment for a module - that contains the local definitions is created by the function - <code>RnNames.importsFromLocalDecls</code>, which also computes a data - structure recording all imported declarations in the form of a value of - type <code>TcRnTypes.ImportAvails</code>. - </p> - <p> - The function <code>importsFromLocalDecls</code>, in turn, makes use of - <code>RnNames.getLocalDeclBinders :: Module -> HsGroup RdrName -> RnM - [AvailInfo]</code> to extract all declared names from a binding group, - where <code>HscTypes.AvailInfo</code> is essentially a collection of - <code>Name</code>s; i.e., <code>getLocalDeclBinders</code>, on the fly, - generates <code>Name</code>s from the <code>RdrName</code>s of all - top-level binders of the module represented by the <code>HsGroup - RdrName</code> argument. - </p> - <p> - It is important to note that all this happens before the renamer - actually descends into the toplevel bindings of a module. In other - words, before <code>TcRnDriver.rnTopSrcDecls</code> performs the - renaming of a module by way of <code>RnSource.rnSrcDecls</code>, it uses - <code>importsFromLocalDecls</code> to set up the global - <code>RdrName</code> environment, which contains <code>Name</code>s for - all imported <em>and</em> all locally defined toplevel binders. Hence, - when the helpers of <code>rnSrcDecls</code> come across the - <em>defining</em> occurrences of a toplevel <code>RdrName</code>, they - don't rename it by generating a new name, but they simply look up its - name in the global <code>RdrName</code> environment. - </p> - - <h2>Rebindable syntax</h2> - <p> - In Haskell when one writes "3" one gets "fromInteger 3", where - "fromInteger" comes from the Prelude (regardless of whether the - Prelude is in scope). If you want to completely redefine numbers, - that becomes inconvenient. So GHC lets you say - "-fno-implicit-prelude"; in that case, the "fromInteger" comes from - whatever is in scope. (This is documented in the User Guide.) - </p> - <p> - This feature is implemented as follows (I always forget). - <ul> - <li>Names that are implicitly bound by the Prelude, are marked by the - type <code>HsExpr.SyntaxExpr</code>. Moreover, the association list - <code>HsExpr.SyntaxTable</code> is set up by the renamer to map - rebindable names to the value they are bound to. - </li> - <li>Currently, five constructs related to numerals - (<code>HsExpr.NegApp</code>, <code>HsPat.NPat</code>, - <code>HsPat.NPlusKPat</code>, <code>HsLit.HsIntegral</code>, and - <code>HsLit.HsFractional</code>) and - two constructs related to code>do</code> expressions - (<code>HsExpr.BindStmt</code> and - <code>HsExpr.ExprStmt</code>) have rebindable syntax. - </li> - <li> When the parser builds these constructs, it puts in the - built-in Prelude Name (e.g. PrelNum.fromInteger). - </li> - <li> When the renamer encounters these constructs, it calls - <tt>RnEnv.lookupSyntaxName</tt>. - This checks for <tt>-fno-implicit-prelude</tt>; if not, it just - returns the same Name; otherwise it takes the occurrence name of the - Name, turns it into an unqualified RdrName, and looks it up in the - environment. The returned name is plugged back into the construct. - </li> - <li> The typechecker uses the Name to generate the appropriate typing - constraints. - </li> - </ul> - - <p><small> -<!-- hhmts start --> -Last modified: Wed May 4 17:16:15 EST 2005 -<!-- hhmts end --> - </small> - </body> -</html> - diff --git a/docs/comm/the-beast/simplifier.html b/docs/comm/the-beast/simplifier.html deleted file mode 100644 index 4dbce7765b..0000000000 --- a/docs/comm/the-beast/simplifier.html +++ /dev/null @@ -1,86 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Mighty Simplifier</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - The Mighty Simplifier</h1> - <p> - Most of the optimising program transformations applied by GHC are - performed on an intermediate language called <em>Core,</em> which - essentially is a compiler-friendly formulation of rank-2 polymorphic - lambda terms defined in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/coreSyn/CoreSyn.lhs/"><code>CoreSyn.lhs</code>.</a> - The transformation engine optimising Core programs is called the - <em>Simplifier</em> and composed from a couple of modules located in the - directory <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/simplCore/"><code>fptools/ghc/compiler/simplCore/</code>.</a> - The main engine of the simplifier is contained in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/simplCore/Simplify.lhs"><code>Simplify.lhs</code>.</a> - and its driver is the routine <code>core2core</code> in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/simplCore/SimplCore.lhs"><code>SimplCore.lhs</code>.</a> - <p> - The program that the simplifier has produced after applying its various - optimisations can be obtained by passing the option - <code>-ddump-simpl</code> to GHC. Moreover, the various intermediate - stages of the optimisation process is printed when passing - <code>-dverbose-core2core</code>. - - <h4><a name="loopBreaker">Recursive Definitions</a></h4> - <p> - The simplification process has to take special care when handling - recursive binding groups; otherwise, the compiler might loop. - Therefore, the routine <code>reOrderRec</code> in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/simplCore/OccurAnal.lhs"><code>OccurAnal.lhs</code></a> - computes a set of <em>loop breakers</em> - a set of definitions that - together cut any possible loop in the binding group. It marks the - identifiers bound by these definitions as loop breakers by enriching - their <a href="basicTypes.html#occInfo">occurrence information.</a> Loop - breakers will <em>never</em> be inlined by the simplifier; thus, - guaranteeing termination of the simplification procedure. (This is not - entirely accurate -- see <a href="#rules">rewrite rules</a> below.) - - The processes finding loop breakers works as follows: First, the - strongly connected components (SCC) of the graph representing all - function dependencies is computed. Then, each SCC is inspected in turn. - If it contains only a single binding (self-recursive function), this is - the loop breaker. In case of multiple recursive bindings, the function - attempts to select bindings where the decision not to inline them does - cause the least harm - in the sense of inhibiting optimisations in the - code. This is achieved by considering each binding in turn and awarding - a <em>score</em> between 0 and 4, where a lower score means that the - function is less useful for inlining - and thus, a better loop breaker. - The evaluation of bingings is performed by the function - <code>score</code> locally defined in <code>OccurAnal</code>. - - Note that, because core programs represent function definitions as - <em>one</em> binding choosing between the possibly many equations in the - source program with a <code>case</code> construct, a loop breaker cannot - inline any of its possibly many alternatives (not even the non-recursive - alternatives). - - <h4><a name="rules">Rewrite Rules</a></h4> - <p> - The application of rewrite rules is controlled in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/simplCore/Simplify.lhs"><code>Simplify.lhs</code></a> - by the function <code>completeCall</code>. This function first checks - whether it should inline the function applied at the currently inspected - call site, then simplifies the arguments, and finally, checks whether - any rewrite rule can be applied (and also whether there is a matching - specialised version of the applied function). The actual check for rule - application is performed by the function <code><a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/specialise/Rules.lhs">Rules</a>.lookupRule</code>. - <p> - It should be note that the application of rewrite rules is not subject - to the loop breaker check - i.e., rules of loop breakers will be applied - regardless of whether this may cause the simplifier to diverge. - - <p><small> -<!-- hhmts start --> -Last modified: Wed Aug 8 19:25:33 EST 2001 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/stg.html b/docs/comm/the-beast/stg.html deleted file mode 100644 index 6c9851623a..0000000000 --- a/docs/comm/the-beast/stg.html +++ /dev/null @@ -1,164 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - You Got Control: The STG-language</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - You Got Control: The STG-language</h1> - <p> - GHC contains two completely independent backends: the byte code - generator and the machine code generator. The decision over which of - the two is invoked is made in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/HscMain.lhs"><code>HscMain</code></a><code>.hscCodeGen</code>. - The machine code generator proceeds itself in a number of phases: First, - the <a href="desugar.html">Core</a> intermediate language is translated - into <em>STG-language</em>; second, STG-language is transformed into a - GHC-internal variant of <a href="http://www.cminusminus.org/">C--</a>; - and thirdly, this is either emitted as concrete C--, converted to GNU C, - or translated to native code (by the <a href="ncg.html">native code - generator</a> which targets IA32, Sparc, and PowerPC [as of March '5]). - </p> - <p> - In the following, we will have a look at the first step of machine code - generation, namely the translation steps involving the STG-language. - Details about the underlying abstract machine, the <em>Spineless Tagless - G-machine</em>, are in <a - href="http://research.microsoft.com/copyright/accept.asp?path=/users/simonpj/papers/spineless-tagless-gmachine.ps.gz&pub=34">Implementing - lazy functional languages on stock hardware: the Spineless Tagless - G-machine</a>, SL Peyton Jones, Journal of Functional Programming 2(2), - Apr 1992, pp127-202. (Some details have changed since the publication of - this article, but it still gives a good introduction to the main - concepts.) - </p> - - <h2>The STG Language</h2> - <p> - The AST of the STG-language and the generation of STG code from Core is - both located in the <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/stgSyn/"><code>stgSyn/</code></a> - directory; in the modules <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/stgSyn/StgSyn.lhs"><code>StgSyn</code></a> - and <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/stgSyn/CoreToStg.lhs"><code>CoreToStg</code></a>, - respectively. - </p> - <p> - Conceptually, the STG-language is a lambda calculus (including data - constructors and case expressions) whose syntax is restricted to make - all control flow explicit. As such, it can be regarded as a variant of - <em>administrative normal form (ANF).</em> (C.f., <a - href="http://doi.acm.org/10.1145/173262.155113">The essence of compiling - with continuations.</a> Cormac Flanagan, Amr Sabry, Bruce F. Duba, and - Matthias Felleisen. <em>ACM SIGPLAN Conference on Programming Language - Design and Implementation,</em> ACM Press, 1993.) Each syntactic from - has a precise operational interpretation, in addition to the - denotational interpretation inherited from the lambda calculus. The - concrete representation of the STG language inside GHC also includes - auxiliary attributes, such as <em>static reference tables (SRTs),</em> - which determine the top-level bindings referenced by each let binding - and case expression. - </p> - <p> - As usual in ANF, arguments to functions etc. are restricted to atoms - (i.e., constants or variables), which implies that all sub-expressions - are explicitly named and evaluation order is explicit. Specific to the - STG language is that all let bindings correspond to closure allocation - (thunks, function closures, and data constructors) and that case - expressions encode both computation and case selection. There are two - flavours of case expressions scrutinising boxed and unboxed values, - respectively. The former perform function calls including demanding the - evaluation of thunks, whereas the latter execute primitive operations - (such as arithmetic on fixed size integers and floating-point numbers). - </p> - <p> - The representation of STG language defined in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/stgSyn/StgSyn.lhs"><code>StgSyn</code></a> - abstracts over both binders and occurrences of variables. The type names - involved in this generic definition all carry the prefix - <code>Gen</code> (such as in <code>GenStgBinding</code>). Instances of - these generic definitions, where both binders and occurrences are of type - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/basicTypes/Id.lhs"><code>Id</code></a><code>.Id</code> - are defined as type synonyms and use type names that drop the - <code>Gen</code> prefix (i.e., becoming plain <code>StgBinding</code>). - Complete programs in STG form are represented by values of type - <code>[StgBinding]</code>. - </p> - - <h2>From Core to STG</h2> - <p> - Although, the actual translation from Core AST into STG AST is performed - by the function <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/stgSyn/CoreToStg.lhs"><code>CoreToStg</code></a><code>.coreToStg</code> - (or <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/stgSyn/CoreToStg.lhs"><code>CoreToStg</code></a><code>.coreExprToStg</code> - for individual expressions), the translation crucial depends on <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/coreSyn/CorePrep.lhs"><code>CorePrep</code></a><code>.corePrepPgm</code> - (resp. <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/coreSyn/CorePrep.lhs"><code>CorePrep</code></a><code>.corePrepExpr</code>), - which prepares Core code for code generation (for both byte code and - machine code generation). <code>CorePrep</code> saturates primitive and - constructor applications, turns the code into A-normal form, renames all - identifiers into globally unique names, generates bindings for - constructor workers, constructor wrappers, and record selectors plus - some further cleanup. - </p> - <p> - In other words, after Core code is prepared for code generation it is - structurally already in the form required by the STG language. The main - work performed by the actual transformation from Core to STG, as - performed by <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/stgSyn/CoreToStg.lhs"><code>CoreToStg</code></a><code>.coreToStg</code>, - is to compute the live and free variables as well as live CAFs (constant - applicative forms) at each let binding and case alternative. In - subsequent phases, the live CAF information is used to compute SRTs. - The live variable information is used to determine which stack slots - need to be zapped (to avoid space leaks) and the free variable - information is need to construct closures. Moreover, hints for - optimised code generation are computed, such as whether a closure needs - to be updated after is has been evaluated. - </p> - - <h2>STG Passes</h2> - <p> - These days little actual work is performed on programs in STG form; in - particular, the code is not further optimised. All serious optimisation - (except low-level optimisations which are performed during native code - generation) has already been done on Core. The main task of <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/stgSyn/CoreToStg.lhs"><code>CoreToStg</code></a><code>.stg2stg</code> - is to compute SRTs from the live CAF information determined during STG - generation. Other than that, <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/profiling/SCCfinal.lhs"><code>SCCfinal</code></a><code>.stgMassageForProfiling</code> - is executed when compiling for profiling and information may be dumped - for debugging purposes. - </p> - - <h2>Towards C--</h2> - <p> - GHC's internal form of C-- is defined in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/cmm/Cmm.hs"><code>Cmm</code></a>. - The definition is generic in that it abstracts over the type of static - data and of the contents of basic blocks (i.e., over the concrete - representation of constant data and instructions). These generic - definitions have names carrying the prefix <code>Gen</code> (such as - <code>GenCmm</code>). The same module also instantiates the generic - form to a concrete form where data is represented by - <code>CmmStatic</code> and instructions are represented by - <code>CmmStmt</code> (giving us, e.g., <code>Cmm</code> from - <code>GenCmm</code>). The concrete form more or less follows the - external <a href="http://www.cminusminus.org/">C--</a> language. - </p> - <p> - Programs in STG form are translated to <code>Cmm</code> by <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/codeGen/CodeGen.lhs"><code>CodeGen</code></a><code>.codeGen</code> - </p> - - <p><hr><small> -<!-- hhmts start --> -Last modified: Sat Mar 5 22:55:25 EST 2005 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/syntax.html b/docs/comm/the-beast/syntax.html deleted file mode 100644 index be5bbefa17..0000000000 --- a/docs/comm/the-beast/syntax.html +++ /dev/null @@ -1,99 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Just Syntax</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Just Syntax</h1> - <p> - The lexical and syntactic analyser for Haskell programs are located in - <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/parser/"><code>fptools/ghc/compiler/parser/</code></a>. - </p> - - <h2>The Lexer</h2> - <p> - The lexer is a rather tedious piece of Haskell code contained in the - module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/parser/Lex.lhs"><code>Lex</code></a>. - Its complexity partially stems from covering, in addition to Haskell 98, - also the whole range of GHC language extensions plus its ability to - analyse interface files in addition to normal Haskell source. The lexer - defines a parser monad <code>P a</code>, where <code>a</code> is the - type of the result expected from a successful parse. More precisely, a - result of type -<blockquote><pre> -data ParseResult a = POk PState a - | PFailed Message</pre> -</blockquote> - <p> - is produced with <code>Message</code> being from <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/ErrUtils.lhs"><code>ErrUtils</code></a> - (and currently is simply a synonym for <code>SDoc</code>). - <p> - The record type <code>PState</code> contains information such as the - current source location, buffer state, contexts for layout processing, - and whether Glasgow extensions are accepted (either due to - <code>-fglasgow-exts</code> or due to reading an interface file). Most - of the fields of <code>PState</code> store unboxed values; in fact, even - the flag indicating whether Glasgow extensions are enabled is - represented by an unboxed integer instead of by a <code>Bool</code>. My - (= chak's) guess is that this is to avoid having to perform a - <code>case</code> on a boxed value in the inner loop of the lexer. - <p> - The same lexer is used by the Haskell source parser, the Haskell - interface parser, and the package configuration parser. - - <h2>The Haskell Source Parser</h2> - <p> - The parser for Haskell source files is defined in the form of a parser - specification for the parser generator <a - href="http://haskell.org/happy/">Happy</a> in the file <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/parser/Parser.y"><code>Parser.y</code></a>. - The parser exports three entry points for parsing entire modules - (<code>parseModule</code>, individual statements - (<code>parseStmt</code>), and individual identifiers - (<code>parseIdentifier</code>), respectively. The last two are needed - for GHCi. All three require a parser state (of type - <code>PState</code>) and are invoked from <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/HscMain.lhs"><code>HscMain</code></a>. - <p> - Parsing of Haskell is a rather involved process. The most challenging - features are probably the treatment of layout and expressions that - contain infix operators. The latter may be user-defined and so are not - easily captured in a static syntax specification. Infix operators may - also appear in the right hand sides of value definitions, and so, GHC's - parser treats those in the same way as expressions. In other words, as - general expressions are a syntactic superset of expressions - ok, they - <em>nearly</em> are - the parser simply attempts to parse a general - expression in such positions. Afterwards, the generated parse tree is - inspected to ensure that the accepted phrase indeed forms a legal - pattern. This and similar checks are performed by the routines from <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/parser/ParseUtil.lhs"><code>ParseUtil</code></a>. In - some cases, these routines do, in addition to checking for - wellformedness, also transform the parse tree, such that it fits into - the syntactic context in which it has been parsed; in fact, this happens - for patterns, which are transformed from a representation of type - <code>RdrNameHsExpr</code> into a representation of type - <code>RdrNamePat</code>. - - <h2>The Haskell Interface Parser</h2> - <p> - The parser for interface files is also generated by Happy from <a href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/rename/ParseIface.y"><code>ParseIface.y</code></a>. - It's main routine <code>parseIface</code> is invoked from <a href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/rename/RnHiFiles.lhs"><code>RnHiFiles</code></a><code>.readIface</code>. - - <h2>The Package Configuration Parser</h2> - <p> - The parser for configuration files is by far the smallest of the three - and defined in <a href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/ParsePkgConf.y"><code>ParsePkgConf.y</code></a>. - It exports <code>loadPackageConfig</code>, which is used by <a href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/main/DriverState.hs"><code>DriverState</code></a><code>.readPackageConf</code>. - - <p><small> -<!-- hhmts start --> -Last modified: Wed Jan 16 00:30:14 EST 2002 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/typecheck.html b/docs/comm/the-beast/typecheck.html deleted file mode 100644 index 482a447628..0000000000 --- a/docs/comm/the-beast/typecheck.html +++ /dev/null @@ -1,316 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Checking Types</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Checking Types</h1> - <p> - Probably the most important phase in the frontend is the type checker, - which is located at <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/"><code>fptools/ghc/compiler/typecheck/</code>.</a> - GHC type checks programs in their original Haskell form before the - desugarer converts them into Core code. This complicates the type - checker as it has to handle the much more verbose Haskell AST, but it - improves error messages, as those message are based on the same - structure that the user sees. - </p> - <p> - GHC defines the abstract syntax of Haskell programs in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/hsSyn/HsSyn.lhs"><code>HsSyn</code></a> - using a structure that abstracts over the concrete representation of - bound occurrences of identifiers and patterns. The module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcHsSyn.lhs"><code>TcHsSyn</code></a> - defines a number of helper function required by the type checker. Note - that the type <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcRnTypes.lhs"><code>TcRnTypes</code></a>.<code>TcId</code> - used to represent identifiers in some signatures during type checking - is, in fact, nothing but a synonym for a <a href="vars.html">plain - <code>Id</code>.</a> - </p> - <p> - It is also noteworthy, that the representations of types changes during - type checking from <code>HsType</code> to <code>TypeRep.Type</code>. - The latter is a <a href="types.html">hybrid type representation</a> that - is used to type Core, but still contains sufficient information to - recover source types. In particular, the type checker maintains and - compares types in their <code>Type</code> form. - </p> - - <h2>The Overall Flow of Things</h2> - - <h4>Entry Points Into the Type Checker</h4> - <p> - The interface of the type checker (and <a - href="renamer.html">renamer</a>) to the rest of the compiler is provided - by <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcRnDriver.lhs"><code>TcRnDriver</code></a>. - Entire modules are processed by calling <code>tcRnModule</code> and GHCi - uses <code>tcRnStmt</code>, <code>tcRnExpr</code>, and - <code>tcRnType</code> to typecheck statements and expressions, and to - kind check types, respectively. Moreover, <code>tcRnExtCore</code> is - provided to typecheck external Core code. Moreover, - <code>tcTopSrcDecls</code> is used by Template Haskell - more - specifically by <code>TcSplice.tc_bracket</code> - - to type check the contents of declaration brackets. - </p> - - <h4>Renaming and Type Checking a Module</h4> - <p> - The function <code>tcRnModule</code> controls the complete static - analysis of a Haskell module. It sets up the combined renamer and type - checker monad, resolves all import statements, initiates the actual - renaming and type checking process, and finally, wraps off by processing - the export list. - </p> - <p> - The actual type checking and renaming process is initiated via - <code>TcRnDriver.tcRnSrcDecls</code>, which uses a helper called - <code>tc_rn_src_decls</code> to implement the iterative renaming and - type checking process required by <a href="../exts/th.html">Template - Haskell</a>. However, before it invokes <code>tc_rn_src_decls</code>, - it takes care of hi-boot files; afterwards, it simplifies type - constraints and zonking (see below regarding the later). - </p> - <p> - The function <code>tc_rn_src_decls</code> partitions static analysis of - a whole module into multiple rounds, where the initial round is followed - by an additional one for each toplevel splice. It collects all - declarations up to the next splice into an <code>HsDecl.HsGroup</code> - to rename and type check that <em>declaration group</em> by calling - <code>TcRnDriver.tcRnGroup</code>. Afterwards, it executes the - splice (if there are any left) and proceeds to the next group, which - includes the declarations produced by the splice. - </p> - <p> - The function <code>tcRnGroup</code>, finally, gets down to invoke the - actual renaming and type checking via - <code>TcRnDriver.rnTopSrcDecls</code> and - <code>TcRnDriver.tcTopSrcDecls</code>, respectively. The renamer, apart - from renaming, computes the global type checking environment, of type - <code>TcRnTypes.TcGblEnv</code>, which is stored in the type checking - monad before type checking commences. - </p> - - <h2>Type Checking a Declaration Group</h2> - <p> - The type checking of a declaration group, performed by - <code>tcTopSrcDecls</code> starts by processing of the type and class - declarations of the current module, using the function - <code>TcTyClsDecls.tcTyAndClassDecls</code>. This is followed by a - first round over instance declarations using - <code>TcInstDcls.tcInstDecls1</code>, which in particular generates all - additional bindings due to the deriving process. Then come foreign - import declarations (<code>TcForeign.tcForeignImports</code>) and - default declarations (<code>TcDefaults.tcDefaults</code>). - </p> - <p> - Now, finally, toplevel value declarations (including derived ones) are - type checked using <code>TcBinds.tcTopBinds</code>. Afterwards, - <code>TcInstDcls.tcInstDecls2</code> traverses instances for the second - time. Type checking concludes with processing foreign exports - (<code>TcForeign.tcForeignExports</code>) and rewrite rules - (<code>TcRules.tcRules</code>). Finally, the global environment is - extended with the new bindings. - </p> - - <h2>Type checking Type and Class Declarations</h2> - <p> - Type and class declarations are type checked in a couple of phases that - contain recursive dependencies - aka <em>knots.</em> The first knot - encompasses almost the whole type checking of these declarations and - forms the main piece of <code>TcTyClsDecls.tcTyAndClassDecls</code>. - </p> - <p> - Inside this big knot, the first main operation is kind checking, which - again involves a knot. It is implemented by <code>kcTyClDecls</code>, - which performs kind checking of potentially recursively-dependent type - and class declarations using kind variables for initially unknown kinds. - During processing the individual declarations some of these variables - will be instantiated depending on the context; the rest gets by default - kind <code>*</code> (during <em>zonking</em> of the kind signatures). - Type synonyms are treated specially in this process, because they can - have an unboxed type, but they cannot be recursive. Hence, their kinds - are inferred in dependency order. Moreover, in contrast to class - declarations and other type declarations, synonyms are not entered into - the global environment as a global <code>TyThing</code>. - (<code>TypeRep.TyThing</code> is a sum type that combines the various - flavours of typish entities, such that they can be stuck into type - environments and similar.) - </p> - - <h2>More Details</h2> - - <h4>Types Variables and Zonking</h4> - <p> - During type checking type variables are represented by mutable variables - - cf. the <a href="vars.html#TyVar">variable story.</a> Consequently, - unification can instantiate type variables by updating those mutable - variables. This process of instantiation is (for reasons that elude me) - called <a - href="http://www.dictionary.com/cgi-bin/dict.pl?term=zonk&db=*">zonking</a> - in GHC's sources. The zonking routines for the various forms of Haskell - constructs are responsible for most of the code in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcHsSyn.lhs"><code>TcHsSyn</code>,</a> - whereas the routines that actually operate on mutable types are defined - in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcMType.lhs"><code>TcMType</code></a>; - this includes the zonking of type variables and type terms, routines to - create mutable structures and update them as well as routines that check - constraints, such as that type variables in function signatures have not - been instantiated during type checking. The actual type unification - routine is <code>uTys</code> in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcUnify.lhs"><code>TcUnify</code></a>. - </p> - <p> - All type variables that may be instantiated (those in signatures - may not), but haven't been instantiated during type checking, are zonked - to <code>()</code>, so that after type checking all mutable variables - have been eliminated. - </p> - - <h4>Type Representation</h4> - <p> - The representation of types is fixed in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcRep.lhs"><code>TcRep</code></a> - and exported as the data type <code>Type</code>. As explained in <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcType.lhs"><code>TcType</code></a>, - GHC supports rank-N types, but, in the type checker, maintains the - restriction that type variables cannot be instantiated to quantified - types (i.e., the type system is predicative). The type checker floats - universal quantifiers outside and maintains types in prenex form. - (However, quantifiers can, of course, not float out of negative - positions.) Overall, we have - </p> - <blockquote> - <pre> -sigma -> forall tyvars. phi -phi -> theta => rho -rho -> sigma -> rho - | tau -tau -> tyvar - | tycon tau_1 .. tau_n - | tau_1 tau_2 - | tau_1 -> tau_2</pre> - </blockquote> - <p> - where <code>sigma</code> is in prenex form; i.e., there is never a - forall to the right of an arrow in a <code>phi</code> type. Moreover, a - type of the form <code>tau</code> never contains a quantifier (which - includes arguments to type constructors). - </p> - <p> - Of particular interest are the variants <code>SourceTy</code> and - <code>NoteTy</code> of <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TypeRep.lhs"><code>TypeRep</code></a>.<code>Type</code>. - The constructor <code>SourceTy :: SourceType -> Type</code> represents a - type constraint; that is, a predicate over types represented by a - dictionary. The type checker treats a <code>SourceTy</code> as opaque, - but during the translation to core it will be expanded into its concrete - representation (i.e., a dictionary type) by the function <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/types/Type.lhs"><code>Type</code></a>.<code>sourceTypeRep</code>. - Note that newtypes are not covered by <code>SourceType</code>s anymore, - even if some comments in GHC still suggest this. Instead, all newtype - applications are initially represented as a <code>NewTcApp</code>, until - they are eliminated by calls to <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/types/Type.lhs"><code>Type</code></a>.<code>newTypeRep</code>. - </p> - <p> - The <code>NoteTy</code> constructor is used to add non-essential - information to a type term. Such information has the type - <code>TypeRep.TyNote</code> and is either the set of free type variables - of the annotated expression or the unexpanded version of a type synonym. - Free variables sets are cached as notes to save the overhead of - repeatedly computing the same set for a given term. Unexpanded type - synonyms are useful for generating comprehensible error messages, but - have no influence on the process of type checking. - </p> - - <h4>Type Checking Environment</h4> - <p> - During type checking, GHC maintains a <em>type environment</em> whose - type definitions are fixed in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcRnTypes.lhs"><code>TcRnTypes</code></a> with the operations defined in -<a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcEnv.lhs"><code>TcEnv</code></a>. - Among other things, the environment contains all imported and local - instances as well as a list of <em>global</em> entities (imported and - local types and classes together with imported identifiers) and - <em>local</em> entities (locally defined identifiers). This environment - is threaded through the type checking monad, whose support functions - including initialisation can be found in the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcRnMonad.lhs"><code>TcRnMonad</code>.</a> - - <h4>Expressions</h4> - <p> - Expressions are type checked by <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/TcExpr.lhs"><code>TcExpr</code>.</a> - <p> - Usage occurrences of identifiers are processed by the function - <code>tcId</code> whose main purpose is to <a href="#inst">instantiate - overloaded identifiers.</a> It essentially calls - <code>TcInst.instOverloadedFun</code> once for each universally - quantified set of type constraints. It should be noted that overloaded - identifiers are replaced by new names that are first defined in the LIE - (Local Instance Environment?) and later promoted into top-level - bindings. - - <h4><a name="inst">Handling of Dictionaries and Method Instances</a></h4> - <p> - GHC implements overloading using so-called <em>dictionaries.</em> A - dictionary is a tuple of functions -- one function for each method in - the class of which the dictionary implements an instance. During type - checking, GHC replaces each type constraint of a function with one - additional argument. At runtime, the extended function gets passed a - matching class dictionary by way of these additional arguments. - Whenever the function needs to call a method of such a class, it simply - extracts it from the dictionary. - <p> - This sounds simple enough; however, the actual implementation is a bit - more tricky as it wants to keep track of all the instances at which - overloaded functions are used in a module. This information is useful - to optimise the code. The implementation is the module <a - href="http://cvs.haskell.org/cgi-bin/cvsweb.cgi/fptools/ghc/compiler/typecheck/Inst.lhs"><code>Inst.lhs</code>.</a> - <p> - The function <code>instOverloadedFun</code> is invoked for each - overloaded usage occurrence of an identifier, where overloaded means that - the type of the idendifier contains a non-trivial type constraint. It - proceeds in two steps: (1) Allocation of a method instance - (<code>newMethodWithGivenTy</code>) and (2) instantiation of functional - dependencies. The former implies allocating a new unique identifier, - which replaces the original (overloaded) identifier at the currently - type-checked usage occurrence. - <p> - The new identifier (after being threaded through the LIE) eventually - will be bound by a top-level binding whose rhs contains a partial - application of the original overloaded identifier. This papp applies - the overloaded function to the dictionaries needed for the current - instance. In GHC lingo, this is called a <em>method.</em> Before - becoming a top-level binding, the method is first represented as a value - of type <code>Inst.Inst</code>, which makes it easy to fold multiple - instances of the same identifier at the same types into one global - definition. (And probably other things, too, which I haven't - investigated yet.) - - <p> - <strong>Note:</strong> As of 13 January 2001 (wrt. to the code in the - CVS HEAD), the above mechanism interferes badly with RULES pragmas - defined over overloaded functions. During instantiation, a new name is - created for an overloaded function partially applied to the dictionaries - needed in a usage position of that function. As the rewrite rule, - however, mentions the original overloaded name, it won't fire anymore - -- unless later phases remove the intermediate definition again. The - latest CVS version of GHC has an option - <code>-fno-method-sharing</code>, which avoids sharing instantiation - stubs. This is usually/often/sometimes sufficient to make the rules - fire again. - - <p><small> -<!-- hhmts start --> -Last modified: Thu May 12 22:52:46 EST 2005 -<!-- hhmts end --> - </small> - </body> -</html> diff --git a/docs/comm/the-beast/types.html b/docs/comm/the-beast/types.html deleted file mode 100644 index 383b71f054..0000000000 --- a/docs/comm/the-beast/types.html +++ /dev/null @@ -1,179 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - Hybrid Types</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - Hybrid Types</h1> - <p> - GHC essentially supports two type systems: (1) the <em>source type - system</em> (which is a heavily extended version of the type system of - Haskell 98) and (2) the <em>Core type system,</em> which is the type system - used by the intermediate language (see also <a - href="desugar.html">Sugar Free: From Haskell To Core</a>). - </p> - <p> - During parsing and renaming, type information is represented in a form - that is very close to Haskell's concrete syntax; it is defined by - <code>HsTypes.HsType</code>. In addition, type, class, and instance - declarations are maintained in their source form as defined in the - module <code>HsDecl</code>. The situation changes during type checking, - where types are translated into a second representation, which is - defined in the module <code>types/TypeRep.lhs</code>, as type - <code>Type</code>. This second representation is peculiar in that it is - a hybrid between the source representation of types and the Core - representation of types. Using functions, such as - <code>Type.coreView</code> and <code>Type.deepCoreView</code>, a value - of type <code>Type</code> exhibits its Core representation. On the - other hand, pretty printing a <code>Type</code> with - <code>TypeRep.pprType</code> yields the type's source representation. - </p> - <p> - In fact, the <a href="typecheck.html">type checker</a> maintains type - environments based on <code>Type</code>, but needs to perform type - checking on source-level types. As a result, we have functions - <code>Type.tcEqType</code> and <code>Type.tcCmpType</code>, which - compare types based on their source representation, as well as the - function <code>coreEqType</code>, which compares them based on their - core representation. The latter is needed during type checking of Core - (as performed by the functions in the module - <code>coreSyn/CoreLint.lhs</code>). - </p> - - <h2>Type Synonyms</h2> - <p> - Type synonyms in Haskell are essentially a form of macro definitions on - the type level. For example, when the type checker compares two type - terms, synonyms are always compared in their expanded form. However, to - produce good error messages, we like to avoid expanding type synonyms - during pretty printing. Hence, <code>Type</code> has a variant - <code>NoteTy TyNote Type</code>, where - </p> - <blockquote> - <pre> -data TyNote - = FTVNote TyVarSet -- The free type variables of the noted expression - - | SynNote Type -- Used for type synonyms - -- The Type is always a TyConApp, and is the un-expanded form. - -- The type to which the note is attached is the expanded form.</pre> - </blockquote> - <p> - In other words, a <code>NoteTy</code> represents the expanded form of a - type synonym together with a note stating its source form. - </p> - - <h3>Creating Representation Types of Synonyms</h3> - <p> - During translation from <code>HsType</code> to <code>Type</code> the - function <code>Type.mkSynTy</code> is used to construct representations - of applications of type synonyms. It creates a <code>NoteTy</code> node - if the synonym is applied to a sufficient number of arguments; - otherwise, it builds a simple <code>TyConApp</code> and leaves it to - <code>TcMType.checkValidType</code> to pick up invalid unsaturated - synonym applications. While creating a <code>NoteTy</code>, - <code>mkSynTy</code> also expands the synonym by substituting the type - arguments for the parameters of the synonym definition, using - <code>Type.substTyWith</code>. - </p> - <p> - The function <code>mkSynTy</code> is used indirectly via - <code>mkGenTyConApp</code>, <code>mkAppTy</code>, and - <code>mkAppTy</code>, which construct type representations involving - type applications. The function <code>mkSynTy</code> is also used - directly during type checking interface files; this is for tedious - reasons to do with forall hoisting - see the comment at - <code>TcIface.mkIfTcApp</code>. - </p> - - <h2>Newtypes</h2> - <p> - Data types declared by a <code>newtype</code> declarations constitute new - type constructors---i.e., they are not just type macros, but introduce - new type names. However, provided that a newtype is not recursive, we - still want to implement it by its representation type. GHC realises this - by providing two flavours of type equality: (1) <code>tcEqType</code> is - source-level type equality, which compares newtypes and - <code>PredType</code>s by name, and (2) <code>coreEqType</code> compares - them structurally (by using <code>deepCoreView</code> to expand the - representation before comparing). The function - <code>deepCoreView</code> (via <code>coreView</code>) invokes - <code>expandNewTcApp</code> for every type constructor application - (<code>TyConApp</code>) to determine whether we are looking at a newtype - application that needs to be expanded to its representation type. - </p> - - <h2>Predicates</h2> - <p> - The dictionary translation of type classes, translates each predicate in - a type context of a type signature into an additional argument, which - carries a dictionary with the functions overloaded by the corresponding - class. The <code>Type</code> data type has a special variant - <code>PredTy PredType</code> for predicates, where - </p> - <blockquote> - <pre> -data PredType - = ClassP Class [Type] -- Class predicate - | IParam (IPName Name) Type -- Implicit parameter</pre> - </blockquote> - <p> - These types need to be handled as source type during type checking, but - turn into their representations when inspected through - <code>coreView</code>. The representation is determined by - <code>Type.predTypeRep</code>. - </p> - - <h2>Representation of Type Constructors</h2> - <p> - Type constructor applications are represented in <code>Type</code> by - the variant <code>TyConApp :: TyCon -> [Type] -> Type</code>. The first - argument to <code>TyConApp</code>, namely <code>TyCon.TyCon</code>, - distinguishes between function type constructors (variant - <code>FunTyCon</code>) and algebraic type constructors (variant - <code>AlgTyCon</code>), which arise from data and newtype declarations. - The variant <code>AlgTyCon</code> contains all the information available - from the data/newtype declaration as well as derived information, such - as the <code>Unique</code> and argument variance information. This - includes a field <code>algTcRhs :: AlgTyConRhs</code>, where - <code>AlgTyConRhs</code> distinguishes three kinds of algebraic data - type declarations: (1) declarations that have been exported abstractly, - (2) <code>data</code> declarations, and (3) <code>newtype</code> - declarations. The last two both include their original right hand side; - in addition, the third variant also caches the "ultimate" representation - type, which is the right hand side after expanding all type synonyms and - non-recursive newtypes. - </p> - <p> - Both data and newtype declarations refer to their data constructors - represented as <code>DataCon.DataCon</code>, which include all details - of their signature (as derived from the original declaration) as well - information for code generation, such as their tag value. - </p> - - <h2>Representation of Classes and Instances</h2> - <p> - Class declarations turn into values of type <code>Class.Class</code>. - They represent methods as the <code>Id</code>s of the dictionary - selector functions. Similar selector functions are available for - superclass dictionaries. - </p> - <p> - Instance declarations turn into values of type - <code>InstEnv.Instance</code>, which in interface files are represented - as <code>IfaceSyn.IfaceInst</code>. Moreover, the type - <code>InstEnv.InstEnv</code>, which is a synonym for <code>UniqFM - ClsInstEnv</code>, provides a mapping of classes to their - instances---<code>ClsInstEnv</code> is essentially a list of instance - declarations. - </p> - - <p><small> -<!-- hhmts start --> -Last modified: Sun Jun 19 13:07:22 EST 2005 -<!-- hhmts end --> - </small></p> - </body> -</html> diff --git a/docs/comm/the-beast/vars.html b/docs/comm/the-beast/vars.html deleted file mode 100644 index 9bbd310c60..0000000000 --- a/docs/comm/the-beast/vars.html +++ /dev/null @@ -1,235 +0,0 @@ -<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> -<html> - <head> - <META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=ISO-8859-1"> - <title>The GHC Commentary - The Real Story about Variables, Ids, TyVars, and the like</title> - </head> - - <body BGCOLOR="FFFFFF"> - <h1>The GHC Commentary - The Real Story about Variables, Ids, TyVars, and the like</h1> - <p> - - -<h2>Variables</h2> - -The <code>Var</code> type, defined in <code>basicTypes/Var.lhs</code>, -represents variables, both term variables and type variables: -<pre> - data Var - = Var { - varName :: Name, - realUnique :: FastInt, - varType :: Type, - varDetails :: VarDetails, - varInfo :: IdInfo - } -</pre> -<ul> -<li> The <code>varName</code> field contains the identity of the variable: -its unique number, and its print-name. See "<a href="names.html">The truth about names</a>". - -<p><li> The <code>realUnique</code> field caches the unique number in the -<code>varName</code> field, just to make comparison of <code>Var</code>s a little faster. - -<p><li> The <code>varType</code> field gives the type of a term variable, or the kind of a -type variable. (Types and kinds are both represented by a <code>Type</code>.) - -<p><li> The <code>varDetails</code> field distinguishes term variables from type variables, -and makes some further distinctions (see below). - -<p><li> For term variables (only) the <code>varInfo</code> field contains lots of useful -information: strictness, unfolding, etc. However, this information is all optional; -you can always throw away the <code>IdInfo</code>. In contrast, you can't safely throw away -the <code>VarDetails</code> of a <code>Var</code> -</ul> -<p> -It's often fantastically convenient to have term variables and type variables -share a single data type. For example, -<pre> - exprFreeVars :: CoreExpr -> VarSet -</pre> -If there were two types, we'd need to return two sets. Simiarly, big lambdas and -little lambdas use the same constructor in Core, which is extremely convenient. -<p> -We define a couple of type synonyms: -<pre> - type Id = Var -- Term variables - type TyVar = Var -- Type variables -</pre> -just to help us document the occasions when we are expecting only term variables, -or only type variables. - - -<h2> The <code>VarDetails</code> field </h2> - -The <code>VarDetails</code> field tells what kind of variable this is: -<pre> -data VarDetails - = LocalId -- Used for locally-defined Ids (see NOTE below) - LocalIdDetails - - | GlobalId -- Used for imported Ids, dict selectors etc - GlobalIdDetails - - | TyVar - | MutTyVar (IORef (Maybe Type)) -- Used during unification; - TyVarDetails -</pre> - -<a name="TyVar"> -<h2>Type variables (<code>TyVar</code>)</h2> -</a> -<p> -The <code>TyVar</code> case is self-explanatory. The <code>MutTyVar</code> -case is used only during type checking. Then a type variable can be unified, -using an imperative update, with a type, and that is what the -<code>IORef</code> is for. The <code>TcType.TyVarDetails</code> field records -the sort of type variable we are dealing with. It is defined as -<pre> -data TyVarDetails = SigTv | ClsTv | InstTv | VanillaTv -</pre> -<code>SigTv</code> marks type variables that were introduced when -instantiating a type signature prior to matching it against the inferred type -of a definition. The variants <code>ClsTv</code> and <code>InstTv</code> mark -scoped type variables introduced by class and instance heads, respectively. -These first three sorts of type variables are skolem variables (tested by the -predicate <code>isSkolemTyVar</code>); i.e., they must <em>not</em> be -instantiated. All other type variables are marked as <code>VanillaTv</code>. -<p> -For a long time I tried to keep mutable Vars statically type-distinct -from immutable Vars, but I've finally given up. It's just too painful. -After type checking there are no MutTyVars left, but there's no static check -of that fact. - -<h2>Term variables (<code>Id</code>)</h2> - -A term variable (of type <code>Id</code>) is represented either by a -<code>LocalId</code> or a <code>GlobalId</code>: -<p> -A <code>GlobalId</code> is -<ul> -<li> Always bound at top-level. -<li> Always has a <code>GlobalName</code>, and hence has - a <code>Unique</code> that is globally unique across the whole - GHC invocation (a single invocation may compile multiple modules). -<li> Has <code>IdInfo</code> that is absolutely fixed, forever. -</ul> - -<p> -A <code>LocalId</code> is: -<ul> -<li> Always bound in the module being compiled: -<ul> -<li> <em>either</em> bound within an expression (lambda, case, local let(rec)) -<li> <em>or</em> defined at top level in the module being compiled. -</ul> -<li> Has IdInfo that changes as the simpifier bashes repeatedly on it. -</ul> -<p> -The key thing about <code>LocalId</code>s is that the free-variable finder -typically treats them as candidate free variables. That is, it ignores -<code>GlobalId</code>s such as imported constants, data contructors, etc. -<p> -An important invariant is this: <em>All the bindings in the module -being compiled (whether top level or not) are <code>LocalId</code>s -until the CoreTidy phase.</em> In the CoreTidy phase, all -externally-visible top-level bindings are made into GlobalIds. This -is the point when a <code>LocalId</code> becomes "frozen" and becomes -a fixed, immutable <code>GlobalId</code>. -<p> -(A binding is <em>"externally-visible"</em> if it is exported, or -mentioned in the unfolding of an externally-visible Id. An -externally-visible Id may not have an unfolding, either because it is -too big, or because it is the loop-breaker of a recursive group.) - -<h3>Global Ids and implicit Ids</h3> - -<code>GlobalId</code>s are further categorised by their <code>GlobalIdDetails</code>. -This type is defined in <code>basicTypes/IdInfo</code>, because it mentions other -structured types like <code>DataCon</code>. Unfortunately it is *used* in <code>Var.lhs</code> -so there's a <code>hi-boot</code> knot to get it there. Anyway, here's the declaration: -<pre> -data GlobalIdDetails - = NotGlobalId -- Used as a convenient extra return value - -- from globalIdDetails - - | VanillaGlobal -- Imported from elsewhere - - | PrimOpId PrimOp -- The Id for a primitive operator - | FCallId ForeignCall -- The Id for a foreign call - - -- These next ones are all "implicit Ids" - | RecordSelId FieldLabel -- The Id for a record selector - | DataConId DataCon -- The Id for a data constructor *worker* - | DataConWrapId DataCon -- The Id for a data constructor *wrapper* - -- [the only reasons we need to know is so that - -- a) we can suppress printing a definition in the interface file - -- b) when typechecking a pattern we can get from the - -- Id back to the data con] -</pre> -The <code>GlobalIdDetails</code> allows us to go from the <code>Id</code> for -a record selector, say, to its field name; or the <code>Id</code> for a primitive -operator to the <code>PrimOp</code> itself. -<p> -Certain <code>GlobalId</code>s are called <em>"implicit"</em> Ids. An implicit -Id is derived by implication from some other declaration. So a record selector is -derived from its data type declaration, for example. An implicit Ids is always -a <code>GlobalId</code>. For most of the compilation, the implicit Ids are just -that: implicit. If you do -ddump-simpl you won't see their definition. (That's -why it's true to say that until CoreTidy all Ids in this compilation unit are -LocalIds.) But at CorePrep, a binding is added for each implicit Id defined in -this module, so that the code generator will generate code for the (curried) function. -<p> -Implicit Ids carry their unfolding inside them, of course, so they may well have -been inlined much earlier; but we generate the curried top-level defn just in -case its ever needed. - -<h3>LocalIds</h3> - -The <code>LocalIdDetails</code> gives more info about a <code>LocalId</code>: -<pre> -data LocalIdDetails - = NotExported -- Not exported - | Exported -- Exported - | SpecPragma -- Not exported, but not to be discarded either - -- It's unclean that this is so deeply built in -</pre> -From this we can tell whether the <code>LocalId</code> is exported, and that -tells us whether we can drop an unused binding as dead code. -<p> -The <code>SpecPragma</code> thing is a HACK. Suppose you write a SPECIALIZE pragma: -<pre> - foo :: Num a => a -> a - {-# SPECIALIZE foo :: Int -> Int #-} - foo = ... -</pre> -The type checker generates a dummy call to <code>foo</code> at the right types: -<pre> - $dummy = foo Int dNumInt -</pre> -The Id <code>$dummy</code> is marked <code>SpecPragma</code>. Its role is to hang -onto that call to <code>foo</code> so that the specialiser can see it, but there -are no calls to <code>$dummy</code>. -The simplifier is careful not to discard <code>SpecPragma</code> Ids, so that it -reaches the specialiser. The specialiser processes the right hand side of a <code>SpecPragma</code> Id -to find calls to overloaded functions, <em>and then discards the <code>SpecPragma</code> Id</em>. -So <code>SpecPragma</code> behaves a like <code>Exported</code>, at least until the specialiser. - - -<h3> ExternalNames and InternalNames </h3> - -Notice that whether an Id is a <code>LocalId</code> or <code>GlobalId</code> is -not the same as whether the Id has an <code>ExternaName</code> or an <code>InternalName</code> -(see "<a href="names.html#sort">The truth about Names</a>"): -<ul> -<li> Every <code>GlobalId</code> has an <code>ExternalName</code>. -<li> A <code>LocalId</code> might have either kind of <code>Name</code>. -</ul> - -<!-- hhmts start --> -Last modified: Fri Sep 12 15:17:18 BST 2003 -<!-- hhmts end --> - </small> - </body> -</html> - diff --git a/docs/users_guide/7.10.1-notes.xml b/docs/users_guide/7.10.1-notes.xml new file mode 100644 index 0000000000..6d9b9378a1 --- /dev/null +++ b/docs/users_guide/7.10.1-notes.xml @@ -0,0 +1,376 @@ +<?xml version="1.0" encoding="iso-8859-1"?> +<sect1 id="release-7-10-1"> + <title>Release notes for version 7.10.1</title> + + <para> + The significant changes to the various parts of the compiler are listed + in the following sections. There have also been numerous bug fixes and + performance improvements over the 7.8 branch. + </para> + + <sect2> + <title>Highlights</title> + + <para> + The highlights, since the 7.8 branch, are: + </para> + + <itemizedlist> + <listitem> + <para> + TODO FIXME + </para> + </listitem> + </itemizedlist> + </sect2> + + <sect2> + <title>Full details</title> + <sect3> + <title>Language</title> + <itemizedlist> + <listitem> + <para> + Added support for <link linkend="binary-literals">binary integer literals</link> + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>Compiler</title> + <itemizedlist> + <listitem> + <para> + GHC now checks that all the language extensions required for + the inferred type signatures are explicitly enabled. This + means that if any of the type signatures inferred in your + program requires some language extension you will need to + enable it. The motivation is that adding a missing type + signature inferred by GHC should yield a program that + typechecks. Previously this was not the case. + </para> + <para> + This is a breaking change. Code that used to compile in the + past might fail with an error message requiring some + particular language extension (most likely + <option>-XTypeFamilies</option>, <option>-XGADTs</option> or + <option>-XFlexibleContexts</option>). + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>GHCi</title> + <itemizedlist> + <listitem> + <para> + TODO FIXME + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>Template Haskell</title> + <itemizedlist> + <listitem> + <para> + TODO FIXME + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>Runtime system</title> + <itemizedlist> + <listitem> + <para> + TODO FIXME + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>Build system</title> + <itemizedlist> + <listitem> + <para> + TODO FIXME + </para> + </listitem> + </itemizedlist> + </sect3> + </sect2> + + <sect2> + <title>Libraries</title> + + <sect3> + <title>array</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 0.5.0.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>base</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 4.7.0.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>bin-package-db</title> + <itemizedlist> + <listitem> + <para> + This is an internal package, and should not be used. + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>binary</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 0.7.1.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>bytestring</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 0.10.4.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>Cabal</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 1.18.1.3) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>containers</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 0.5.4.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>deepseq</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 1.3.0.2) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>directory</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 1.2.0.2) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>filepath</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 1.3.0.2) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>ghc-prim</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 0.3.1.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>haskell98</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 2.0.0.3) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>haskell2010</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 1.1.1.1) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>hoopl</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 3.10.0.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>hpc</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 0.6.0.1) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>integer-gmp</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 0.5.1.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>old-locale</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 1.0.0.6) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>old-time</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 1.1.0.2) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>process</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 1.2.0.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>template-haskell</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 2.9.0.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>time</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 1.4.1) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>unix</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 2.7.0.0) + </para> + </listitem> + </itemizedlist> + </sect3> + + <sect3> + <title>Win32</title> + <itemizedlist> + <listitem> + <para> + Version number XXXXX (was 2.3.0.1) + </para> + </listitem> + </itemizedlist> + </sect3> + </sect2> + + <sect2> + <title>Known bugs</title> + <itemizedlist> + <listitem> + <para> + TODO FIXME + </para> + </listitem> + </itemizedlist> + </sect2> +</sect1> diff --git a/docs/users_guide/7.8.1-notes.xml b/docs/users_guide/7.8.1-notes.xml deleted file mode 100644 index 36b0ad52a7..0000000000 --- a/docs/users_guide/7.8.1-notes.xml +++ /dev/null @@ -1,1251 +0,0 @@ -<?xml version="1.0" encoding="iso-8859-1"?> -<sect1 id="release-7-8-1"> - <title>Release notes for version 7.8.1</title> - - <para> - The significant changes to the various parts of the compiler are listed - in the following sections. There have also been numerous bug fixes and - performance improvements over the 7.6 branch. - </para> - - <sect2> - <title>Highlights</title> - - <para> - The highlights, since the 7.6 branch, are: - </para> - - <itemizedlist> - <listitem> - <para> - OS X Mavericks with XCode 5 is now properly supported - by GHC. As a result of this, GHC now uses Clang to - preprocess Haskell code by default for Mavericks - builds. - </para> - - <para> - Note that normally, GHC used <literal>gcc</literal> as - the preprocessor for Haskell code (as it was the - default everywhere,) which implements - <literal>-traditional</literal> behavior. However, - Clang is not 100% compatible with GCC's - <literal>-traditional</literal> as it is rather - implementation specified and does not match any - specification. Clang is also more strict. - </para> - - <para> - As a result of this, when using Clang as the - preprocessor, some programs which previously used - <literal>-XCPP</literal> and the preprocessor will now - fail to compile. Users who wish to retain the previous - behavior are better off using cpphs as an external - preprocessor for the time being. - </para> - - <para> - In the future, we hope to fix this by adopting a - better preprocessor implementation independent of the - C compiler (perhaps cpphs itself,) and ship that - instead. - </para> - </listitem> - - <listitem> - <para> - By default, GHC has a new warning enabled, - <literal>-fwarn-typed-holes</literal>, which causes the - compiler to respond with the types of unbound - variables it encounters in the source code. (It is - reminiscient of the "holes" feature in languages such - as Agda.) - - For more information, see <xref linkend="typed-holes"/>. - </para> - </listitem> - - <listitem> - <para> - GHC can now perform simple evaluation of type-level - natural numbers, when using the - <literal>DataKinds</literal> extension. For example, - given a type-level constraint such as <literal>(x + 3) - ~ 5</literal>, GHC is able to infer that - <literal>x</literal> is 2. Similarly, GHC can now - understand type-level identities such as <literal>x + - 0 ~ x</literal>. - </para> - - <para> - Note that the solving of these equations is only used - to resolve unification variables - it does not - generate new facts in the type checker. This is - similar to how functional dependencies work. - </para> - </listitem> - - <listitem> - <para> - It is now possible to declare a 'closed' <literal>type - family</literal> when using the - <literal>TypeFamilies</literal> extension. A closed - <literal>type family</literal> cannot have any - instances created other than the ones in its - definition. - - For more information, see <xref linkend="closed-type-families"/>. - </para> - </listitem> - - <listitem> - <para> - Use of the <literal>GeneralizedNewtypeDeriving</literal> - extension is now subject to <emphasis>role checking</emphasis>, - to ensure type safety of the derived instances. As this change - increases the type safety of GHC, it is possible that some code - that previously compiled will no longer work. - - For more information, see <xref linkend="roles"/>. - </para> - </listitem> - - <listitem> - <para> - GHC now supports overloading list literals using the new - <literal>OverloadedLists</literal> extension. - - For more information, see <xref linkend="overloaded-lists"/>. - </para> - </listitem> - - <listitem> - <para> - GHC now supports pattern synonyms, enabled by the - <literal>-XPatternSynonyms</literal> extension, - allowing you to name and abstract over patterns more - easily. - - For more information, see <xref linkend="pattern-synonyms"/>. - </para> - <para> - Note: For the GHC 7.8.1 version, this language feature - should be regarded as a preview. - </para> - </listitem> - - <listitem> - <para> - There has been significant overhaul of the type - inference engine and constraint solver, meaning it - should be faster and use less memory. - </para> - </listitem> - - <listitem> - <para> - By default, GHC will now unbox all "small" strict - fields in a data type. A "small" data type is one - whose size is equivalent to or smaller than the native - word size of the machine. This means you no longer - have to specify <literal>UNPACK</literal> pragmas for - e.g. strict <literal>Int</literal> fields. This also - applies to floating-point values. - </para> - </listitem> - - <listitem> - <para> - GHC now has a brand-new I/O manager that scales significantly - better for larger workloads compared to the previous one. It - should scale linearly up to approximately 32 cores. - </para> - </listitem> - - <listitem> - <para> - The LLVM backend now supports 128- and 256-bit SIMD - operations. - </para> - <para> - Note carefully: this is <emphasis>only</emphasis> available with - the LLVM backend, and should be considered - experimental. - </para> - </listitem> - - <listitem> - <para> - The new code generator, after significant work by many - individuals over the past several years, is now enabled by - default. This is a complete rewrite of the STG to Cmm - transformation. In general, your programs may get slightly - faster. - </para> - - <para> - The old code generator has been removed completely. - </para> - </listitem> - - <listitem> - <para> - GHC now has substantially better support for cross - compilation. In particular, GHC now has all the - necessary patches to support cross compilation to - Apple iOS, using the LLVM backend. - </para> - </listitem> - - <listitem> - <para> - PrimOps for comparing unboxed values now return - <literal>Int#</literal> instead of <literal>Bool</literal>. - This change is backwards incompatible. See - <ulink url="http://ghc.haskell.org/trac/ghc/wiki/NewPrimopsInGHC7.8"> - this GHC wiki page</ulink> for instructions how to update your - existing code. See <ulink url="http://ghc.haskell.org/trac/ghc/wiki/PrimBool"> - here</ulink> for motivation and discussion of implementation details. - </para> - </listitem> - - <listitem> - <para> - New PrimOps for atomic memory operations. - The <literal>casMutVar#</literal> PrimOp was introduced in - GHC 7.2 (debugged in 7.4). This release also includes additional - PrimOps for compare-and-swap (<literal>casArray#</literal> and - <literal>casIntArray#</literal>) and one for fetch-and-add - (<literal>fetchAddIntArray#</literal>). - </para> - </listitem> - - <listitem> - <para> - On Linux, FreeBSD and Mac OS X, GHCi now uses the - system dynamic linker by default, instead of its built - in (static) object linker. This is more robust - cross-platform, and fixes many long-standing bugs (for - example: constructors and destructors, weak symbols, - etc work correctly, and several edge cases in the RTS - are fixed.) - </para> - - <para> - As a result of this, GHCi (and Template Haskell) must - now load <emphasis>dynamic</emphasis> object files, not static - ones. To assist this, there is a new compilation flag, - <literal>-dynamic-too</literal>, which when used - during compilation causes GHC to emit both static and - dynamic object files at the same time. GHC itself - still defaults to static linking. - </para> - - <para> - Note that Cabal will correctly handle - <literal>-dynamic-too</literal> for you automatically, - especially when <literal>-XTemplateHaskell</literal> - is needed - but you <emphasis>must</emphasis> tell Cabal you are - using the <literal>TemplateHaskell</literal> - extension. - </para> - - <para> - Note that you must be using Cabal and Cabal-install - 1.18 for it to correctly build dynamic shared libraries - for you. - </para> - - <para> - Currently, Dynamic GHCi and - <literal>-dynamic-too</literal> are not supported on - Windows (32bit or 64bit.) - </para> - </listitem> - - <listitem> - <para> - <literal>Typeable</literal> is now poly-kinded, making - <literal>Typeable1</literal>, <literal>Typeable2</literal>, - etc., obsolete, deprecated, and relegated to - <literal>Data.OldTypeable</literal>. Furthermore, user-written - instances of <literal>Typeable</literal> are now disallowed: - use <literal>deriving</literal> or the new extension - <literal>-XAutoDeriveTypeable</literal>, which will create - <literal>Typeable</literal> instances for every datatype - declared in the module. - </para> - </listitem> - - <listitem> - <para> - GHC now has a parallel compilation driver. When - compiling with <literal>--make</literal> (which is on - by default,) you may also specify - <literal>-jN</literal> in order to compile - <replaceable>N</replaceable> modules in - parallel. (Note: this will automatically scale on - multicore machines without specifying <literal>+RTS - -N</literal> to the compiler.) - </para> - </listitem> - - <listitem> - <para> - GHC now has support for a new pragma, - <literal>{-# MINIMAL #-}</literal>, allowing you to - explicitly declare the minimal complete definition of - a class. Should an instance not provide the minimal - required definitions, a warning will be emitted. - See <xref linkend="minimal-pragma"/> for details. - </para> - </listitem> - - <listitem> - <para> - In GHC 7.10, <literal>Applicative</literal> will - become a superclass of <literal>Monad</literal>, - potentially breaking a lot of user code. To ease this - transition, GHC now generates warnings when - definitions conflict with the Applicative-Monad - Proposal (AMP). - </para> - - <para> - A warning is emitted if a type is an instance of - <literal>Monad</literal> but not of - <literal>Applicative</literal>, - <literal>MonadPlus</literal> but not - <literal>Alternative</literal>, and when a local - function named <literal>join</literal>, - <literal><*></literal> or <literal>pure</literal> is - defined. - </para> - - <para> - The warnings are enabled by default, and can be controlled - using the new flag <literal>-f[no-]warn-amp</literal>. - </para> - </listitem> - - <listitem> - <para> - Using the new <literal>InterruptibleFFI</literal> - extension, it's possible to now declare a foreign - import as <literal>interruptible</literal>, as opposed - to only <literal>safe</literal> or - <literal>unsafe</literal>. An - <literal>interruptible</literal> foreign call is the - same as a <literal>safe</literal> call, but may be - interrupted by asynchronous <emphasis>Haskell - exceptions</emphasis>, such as those generated by - <literal>throwTo</literal> or - <literal>timeout</literal>. - </para> - - <para> - For more information (including the exact details on - how the foreign thread is interrupted,) see <xref - linkend="ffi-interruptible"/>. - </para> - </listitem> - - <listitem> - <para> - GHC's internal compiler pipeline is now exposed - through a <literal>Hooks</literal> module inside the - GHC API. These hooks allow you to control most of the - internal compiler phase machinery, including compiling - expressions, phase control, and linking. - </para> - - <para> - Note: this interface will likely see continuous - refinement and API changes in future releases, so it - should be considered a preview. - </para> - </listitem> - <listitem> - <para> - The LLVM code generator has been fixed to support - dynamic linking. This enables runtime-linking - (e.g. GHCi) support for architectures without support in - GHC's own runtime linker (e.g. ARM). - </para> - <para> - Note: Tables-next-to-code is disabled when building on - ARM with binutil's ld due to a - <ulink url="https://sourceware.org/bugzilla/show_bug.cgi?id=16177"> - bug</ulink> in ld. - </para> - </listitem> - </itemizedlist> - </sect2> - - <sect2> - <title>Full details</title> - <sect3> - <title>Language</title> - <itemizedlist> - <listitem> - <para> - There is a new extension, - <literal>NullaryTypeClasses</literal>, which - allows you to declare a type class without any - parameters. - </para> - </listitem> - </itemizedlist> - - <itemizedlist> - <listitem> - <para> - There is a new extension, - <literal>NumDecimals</literal>, which allows you - to specify an integer using compact "floating - literal" syntax. This lets you say things like - <literal>1.2e6 :: Integer</literal> instead of - <literal>1200000</literal> - </para> - </listitem> - </itemizedlist> - - <itemizedlist> - <listitem> - <para> - There is a new extension, - <literal>NegativeLiterals</literal>, which will - cause GHC to interpret the expression - <literal>-123</literal> as <literal>fromIntegral - (-123)</literal>. Haskell 98 and Haskell 2010 both - specify that it should instead desugar to - <literal>negate (fromIntegral 123)</literal> - </para> - </listitem> - </itemizedlist> - - <itemizedlist> - <listitem> - <para> - There is a new extension, - <literal>EmptyCase</literal>, which allows - to write a case expression with no alternatives - <literal>case ... of {}</literal>. - </para> - </listitem> - </itemizedlist> - - <itemizedlist> - <listitem> - <para> - The <literal>IncoherentInstances</literal> - extension has seen a behavioral change, and is - now 'liberated' and less conservative during - instance resolution. This allows more programs to - compile than before. - </para> - <para> - Now, <literal>IncoherentInstances</literal> will - always pick an arbitrary matching instance, if - multiple ones exist. - </para> - </listitem> - </itemizedlist> - - <itemizedlist> - <listitem> - <para> - A new built-in function <literal>coerce</literal> is - provided that allows to safely coerce values between types - that have the same run-time-presentation, such as - newtypes, but also newtypes inside containers. See the - haddock documentation of - <ulink url="&libraryBaseLocation;/Data-Coerce.html#v%3Acoerce">coerce</ulink> - and of the class - <ulink url="&libraryBaseLocation;/Data-Coerce.html#t%3ACoercible">Coercible</ulink> - for more details. - </para> - <para> - This feature is included in this release as a technology - preview, and may change its syntax and/or semantics in the - next release. - </para> - </listitem> - </itemizedlist> - - <itemizedlist> - <listitem> - <para> - The new pragma, <literal>{-# MINIMAL #-}</literal>, - allows to explicitly declare the minimal complete - definition of a class. Should an instance not provide - the minimal required definitions, a warning will be - emitted. - </para> - - <para> - See <xref linkend="minimal-pragma"/> for more details. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>Compiler</title> - <itemizedlist> - <listitem> - <para> - GHC can now build both static and dynamic object - files at the same time in a single compilation - pass, when given the - <literal>-dynamic-too</literal> flag. This will - produce both a statically-linkable - <literal>.o</literal> object file, and a - dynamically-linkable <literal>.dyn_o</literal> - file. The output suffix of the dynamic objects can - be controlled by the flag - <literal>-dynosuf</literal>. - </para> - - <para> - Note that GHC still builds statically by default. - </para> - </listitem> - <listitem> - <para> - GHC now supports a - <literal>--show-options</literal> flag, which will - dump all of the flags it supports to standard out. - </para> - </listitem> - <listitem> - <para> - GHC now supports warning about overflow of integer - literals, enabled by - <literal>-fwarn-overflowed-literals</literal>. It - is enabled by default. - </para> - </listitem> - <listitem> - <para> - It's now possible to switch the system linker on Linux - (between GNU gold and GNU ld) at runtime without problem. - </para> - </listitem> - <listitem> - <para> - The <literal>-fwarn-dodgy-imports</literal> flag now warns - in the case an <literal>import</literal> statement hides an - entity which is not exported. - </para> - </listitem> - <listitem> - <para> - The LLVM backend was overhauled and rewritten, and - should hopefully be easier to maintain and work on - in the future. - </para> - </listitem> - <listitem> - <para> - GHC now detects annotation changes during - recompilation, and correctly persists new - annotations. - </para> - </listitem> - <listitem> - <para> - There is a new set of primops for utilizing - hardware-based prefetch instructions, to help - guide the processor's caching decisions. - </para> - <para> - Currently, the primops get translated into - the associated hardware supported prefetch - instructions only with the LLVM backend and - x86/amd64 backends. On all other backends, - the prefetch primops are currently erased - at code generation time. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>GHCi</title> - <itemizedlist> - <listitem> - The monomorphism restriction is now turned off - by default in GHCi. - </listitem> - - <listitem> - <para> - GHCi now supports a <literal>prompt2</literal> - setting, which allows you to customize the - continuation prompt of multi-line input. - - For more information, see <xref linkend="ghci-commands"/>. - </para> - </listitem> - <listitem> - <para> - The new <literal>:shows paths</literal> command - shows the current working directory and the - current search path for Haskell modules. - </para> - </listitem> - - <listitem> - <para> - On Linux, the static GHCi linker now supports weak symbols. - </para> - </listitem> - - <listitem> - <para> - The (static) GHCi linker (except 64-bit Windows) now runs - constructors for linked libraries. This means for example - that C code using - <literal>__attribute__((constructor))</literal> - can now properly be loaded into GHCi. - </para> - - <para> - Note: destructors are not supported. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>Template Haskell</title> - <itemizedlist> - <listitem> - <para> - Template Haskell now supports Roles. - </para> - </listitem> - <listitem> - <para> - Template Haskell now supports annotation pragmas. - </para> - </listitem> - <listitem> - <para> - Typed Template Haskell expressions are now supported. See - <xref linkend="template-haskell"/> for more details. - </para> - </listitem> - <listitem> - <para> - Template Haskell declarations, types, patterns, and - <emphasis>untyped</emphasis> expressions are no longer - typechecked at all. This is a backwards-compatible change - since it allows strictly more programs to be typed. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>Runtime system</title> - <itemizedlist> - <listitem> - <para> - The RTS linker can now unload object code at - runtime (when using the GHC API - <literal>ObjLink</literal> module.) Previously, - GHC would not unload the old object file, causing - a gradual memory leak as more objects were loaded - over time. - </para> - - <para> - Note that this change in unloading behavior - <emphasis>only</emphasis> affects statically - linked binaries, and not dynamic ones. - </para> - </listitem> - - <listitem> - <para> - The performance of <literal>StablePtr</literal>s and - <literal>StableName</literal>s has been improved. - </para> - </listitem> - - <listitem> - <para> - The default maximum stack size has - increased. Previously, it defaulted to 8m - (equivalent to passing <literal>+RTS - -K8m</literal>. Now, GHC will use up-to 80% of the - <emphasis>physical memory</emphasis> available at - runtime. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>Build system</title> - <itemizedlist> - <listitem> - <para> - GHC >= 7.4 is now required for bootstrapping. - </para> - </listitem> - <listitem> - <para> - GHC can now be built with Clang, and use Clang as - the preprocessor for Haskell code. Only Clang - version 3.4 (or Apple LLVM Clang 5.0) or beyond is - reliably supported. - </para> - - <para> - Note that normally, GHC uses - <literal>gcc</literal> as the preprocessor for - Haskell code, which implements - <literal>-traditional</literal> behavior. However, - Clang is not 100% compatible with GCC's - <literal>-traditional</literal> as it is rather - implementation specified, and is more strict. - </para> - - <para> - As a result of this, when using Clang as the - preprocessor, some programs which previously used - <literal>-XCPP</literal> and the preprocessor will - now fail to compile. Users who wish to retain the - previous behavior are better off using cpphs. - </para> - </listitem> - </itemizedlist> - </sect3> - </sect2> - - <sect2> - <title>Libraries</title> - - <sect3> - <title>array</title> - <itemizedlist> - <listitem> - <para> - Version number 0.5.0.0 (was 0.4.0.1) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>base</title> - <itemizedlist> - <listitem> - <para> - Version number 4.7.0.0 (was 4.6.0.1) - </para> - </listitem> - <listitem> - <para> - The <literal>Control.Category</literal> module now has the - <literal>PolyKinds</literal> extension enabled, meaning - that instances of <literal>Category</literal> no longer - need be of kind <literal>* -> * -> *</literal>. - </para> - </listitem> - <listitem> - <para> - There are now <literal>Foldable</literal> and <literal>Traversable</literal> - instances for <literal>Either a</literal>, <literal>Const r</literal>, and <literal>(,) a</literal>. - </para> - </listitem> - <listitem> - <para> - There is now a <literal>Monoid</literal> instance for <literal>Const</literal>. - </para> - </listitem> - <listitem> - <para> - There is now a <literal>Data</literal> instance for <literal>Data.Version</literal>. - </para> - </listitem> - <listitem> - <para> - There are now <literal>Data</literal>, - <literal>Typeable</literal>, and - <literal>Generic</literal> instances for the types - in <literal>Data.Monoid</literal> and - <literal>Control.Applicative</literal> - </para> - </listitem> - <listitem> - <para> - There are now <literal>Num</literal> instances for <literal>Data.Monoid.Product</literal> and <literal>Data.Monoid.Sum</literal> - </para> - </listitem> - <listitem> - <para> - There are now <literal>Eq</literal>, <literal>Ord</literal>, <literal>Show</literal> and <literal>Read</literal> instances for <literal>ZipList</literal>. - </para> - </listitem> - <listitem> - <para> - There are now <literal>Eq</literal>, <literal>Ord</literal>, <literal>Show</literal> and <literal>Read</literal> instances for <literal>Down</literal>. - </para> - </listitem> - <listitem> - <para> - There are now <literal>Eq</literal>, <literal>Ord</literal>, <literal>Show</literal>, <literal>Read</literal> and <literal>Generic</literal> instances for types in GHC.Generics (<literal>U1</literal>, <literal>Par1</literal>, <literal>Rec1</literal>, <literal>K1</literal>, <literal>M1</literal>, <literal>(:+:)</literal>, <literal>(:*:)</literal>, <literal>(:.:)</literal>). - </para> - </listitem> - <listitem> - <para> - A zero-width unboxed poly-kinded <literal>Proxy#</literal> - was added to <literal>GHC.Prim</literal>. It can be used to make it so - that there is no the operational overhead for passing around proxy - arguments to model type application. - </para> - </listitem> - <listitem> - <para> - <literal>Control.Concurrent.MVar</literal> has a new - implementation of <literal>readMVar</literal>, which - fixes a long-standing bug where - <literal>readMVar</literal> is only atomic if there - are no other threads running - <literal>putMVar</literal>. - <literal>readMVar</literal> now is atomic, and is - guaranteed to return the value from the first - <literal>putMVar</literal>. There is also a new <literal>tryReadMVar</literal> - which is a non-blocking version. - </para> - </listitem> - <listitem> - <para> - There are now byte endian-swapping primitives - available in <literal>Data.Word</literal>, which - use optimized machine instructions when available. - </para> - </listitem> - <listitem> - <para> - <literal>Data.Bool</literal> now exports - <literal>bool :: a -> a -> Bool -> a</literal>, analogously - to <literal>maybe</literal> and <literal>either</literal> - in their respective modules. - </para> - </listitem> - <listitem> - <para> - Rewrote portions of <literal>Text.Printf</literal>, and - made changes to <literal>Numeric</literal> (added - <literal>Numeric.showFFloatAlt</literal> and - <literal>Numeric.showGFloatAlt</literal>) and - <literal>GHC.Float</literal> (added - <literal>formatRealFloatAlt</literal>) to support it. - The rewritten version is extensible to user types, adds a - "generic" format specifier "<literal>%v</literal>", - extends the <literal>printf</literal> spec - to support much of C's <literal>printf(3)</literal> - functionality, and fixes the spurious warnings about - using <literal>Text.Printf.printf</literal> at - <literal>(IO a)</literal> while ignoring the return value. - These changes were contributed by Bart Massey. - </para> - </listitem> - <listitem> - <para> - The minimal complete definitions for all - type-classes with cyclic default implementations - have been explicitly annotated with the new - <literal>{-# MINIMAL #-}</literal> pragma. - </para> - </listitem> - <listitem> - <para> - <literal>Control.Applicative.WrappedMonad</literal>, - which can be used to convert a <literal>Monad</literal> - to an <literal>Applicative</literal>, has now - a <literal>Monad m => Monad (WrappedMonad m)</literal> - instance. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>bin-package-db</title> - <itemizedlist> - <listitem> - <para> - This is an internal package, and should not be used. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>binary</title> - <itemizedlist> - <listitem> - <para> - Version number 0.7.1.0 (was 0.5.1.1) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>bytestring</title> - <itemizedlist> - <listitem> - <para> - Version number 0.10.4.0 (was 0.10.0.0) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>Cabal</title> - <itemizedlist> - <listitem> - <para> - Version number 1.18.1.3 (was 1.16.0) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>containers</title> - <itemizedlist> - <listitem> - <para> - Version number 0.5.4.0 (was 0.5.0.0) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>deepseq</title> - <itemizedlist> - <listitem> - <para> - Version number 1.3.0.2 (was 1.3.0.1) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>directory</title> - <itemizedlist> - <listitem> - <para> - Version number 1.2.0.2 (was 1.2.0.1) - </para> - </listitem> - <listitem> - <para> - The function <literal>findExecutables</literal> - now correctly checks to see if the execute bit is - set on Linux, rather than just looking in - <literal>$PATH</literal>. - </para> - </listitem> - <listitem> - <para> - There are several new functions for finding files, - including <literal>findFiles</literal> and - <literal>findFilesWith</literal>, which allow you - to search for a file given a set of filepaths, and - run a predicate over them. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>filepath</title> - <itemizedlist> - <listitem> - <para> - Version number 1.3.0.2 (was 1.3.0.1) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>ghc-prim</title> - <itemizedlist> - <listitem> - <para> - Version number 0.3.1.0 (was 0.3.0.0) - </para> - </listitem> - <listitem> - <para> - The type-classes <literal>Eq</literal> and - <literal>Ord</literal> have been annotated with - the new <literal>{-# MINIMAL #-}</literal> - pragma. - </para> - </listitem> - <listitem> - <para> - There is a new type exposed by - <literal>GHC.Types</literal>, called - <literal>SPEC</literal>, which can be used to - inform GHC to perform call-pattern specialisation - extremely aggressively. See <xref - linkend="options-optimise"/> for more details - concerning <literal>-fspec-constr</literal>. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>haskell98</title> - <itemizedlist> - <listitem> - <para> - Version number 2.0.0.3 (was 2.0.0.2) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>haskell2010</title> - <itemizedlist> - <listitem> - <para> - Version number 1.1.1.1 (was 1.1.1.0) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>hoopl</title> - <itemizedlist> - <listitem> - <para> - Version number 3.10.0.0 (was 3.9.0.0) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>hpc</title> - <itemizedlist> - <listitem> - <para> - Version number 0.6.0.1 (was 0.6.0.0) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>integer-gmp</title> - <itemizedlist> - <listitem> - <para> - Version number 0.5.1.0 (was 0.5.0.0) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>old-locale</title> - <itemizedlist> - <listitem> - <para> - Version number 1.0.0.6 (was 1.0.0.5) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>old-time</title> - <itemizedlist> - <listitem> - <para> - Version number 1.1.0.2 (was 1.1.0.1) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>process</title> - <itemizedlist> - <listitem> - <para> - Version number 1.2.0.0 (was 1.1.0.2) - </para> - </listitem> - <listitem> - <para> - Several bugs have been fixed, including deadlocks - in <literal>readProcess</literal> and - <literal>readProcessWithExitCode</literal>. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>template-haskell</title> - <itemizedlist> - <listitem> - <para> - Version number 2.9.0.0 (was 2.8.0.0) - </para> - </listitem> - <listitem> - <para> - Typed Template Haskell expressions are now - supported. See <xref linkend="template-haskell"/> - for more details. - </para> - </listitem> - <listitem> - <para> - There is now support for roles. - </para> - </listitem> - <listitem> - <para> - There is now support for annotation pragmas. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>time</title> - <itemizedlist> - <listitem> - <para> - Version number 1.4.1 (was 1.4.1) - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>unix</title> - <itemizedlist> - <listitem> - <para> - Version number 2.7.0.0 (was 2.6.0.0) - </para> - </listitem> - <listitem> - <para> - A crash in <literal>getGroupEntryForID</literal> - (and related functions like - <literal>getUserEntryForID</literal> and - <literal>getUserEntryForName</literal>) in - multi-threaded applications has been fixed. - </para> - </listitem> - <listitem> - <para> - The functions <literal>getGroupEntryForID</literal> - and <literal>getUserEntryForID</literal> now fail - with a <literal>isDoesNotExist</literal> error when - the specified ID cannot be found. - </para> - </listitem> - </itemizedlist> - </sect3> - - <sect3> - <title>Win32</title> - <itemizedlist> - <listitem> - <para> - Version number 2.3.0.1 (was 2.3.0.0) - </para> - </listitem> - </itemizedlist> - </sect3> - </sect2> - - <sect2> - <title>Known bugs</title> - <itemizedlist> - <listitem> - <para> - On OS X Mavericks, when using Clang as the C - preprocessor, Haddock has a bug that causes it to fail - to generate documentation, with an error similar to - the following: - -<programlisting> -<no location info>: - module 'xhtml-3000.2.1:Main' is defined in multiple files: dist-bindist/build/tmp-72252/Text/XHtml.hs - dist-bindist/build/tmp-72252/Text/XHtml/Frameset.hs - dist-bindist/build/tmp-72252/Text/XHtml/Strict.hs - dist-bindist/build/tmp-72252/Text/XHtml/Transitional.hs -... -</programlisting> - - </para> - <para> - This only affects certain packages. This is due to a - bad interaction with Clang, which we hope to resolve - soon. - </para> - <para> - Note that when using <literal>cabal-install</literal>, - this only effects the package documentation, not - installation or building. - </para> - </listitem> - <listitem> - <para> - On OS X 10.7 and beyond, with default build settings, - the runtime system currently suffers from a fairly - large (approx. 30%) performance regression in the - parallel garbage collector when using - <literal>-threaded</literal>. - </para> - <para> - This is due to the fact that the OS X 10.7+ toolchain - does not (by default) support register variables, or a - fast <literal>__thread</literal> implementation. Note - that this can be worked around by building GHC using - GCC instead on OS X platforms, but the binary - distribution then requires GCC later. - </para> - </listitem> - - <listitem> - <para> - On Windows, <literal>-dynamic-too</literal> is unsupported. - </para> - </listitem> - - <listitem> - <para> - On Windows, we currently don't ship dynamic libraries - or use a dynamic GHCi, unlike Linux, FreeBSD or OS X. - </para> - </listitem> - </itemizedlist> - </sect2> -</sect1> diff --git a/docs/users_guide/codegens.xml b/docs/users_guide/codegens.xml index 2eb9408c6c..d2a805a3ee 100644 --- a/docs/users_guide/codegens.xml +++ b/docs/users_guide/codegens.xml @@ -38,7 +38,7 @@ <para>You must install and have LLVM available on your PATH for the LLVM code generator to work. Specifically GHC needs to be able to call the - <command>opt</command>and <command>llc</command> tools. Secondly, if you + <command>opt</command> and <command>llc</command> tools. Secondly, if you are running Mac OS X with LLVM 3.0 or greater then you also need the <ulink url="http://clang.llvm.org">Clang c compiler</ulink> compiler available on your PATH. diff --git a/docs/users_guide/external_core.xml b/docs/users_guide/external_core.xml deleted file mode 100644 index e4354410ef..0000000000 --- a/docs/users_guide/external_core.xml +++ /dev/null @@ -1,1804 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> - -<!-- -This document is a semi-automatic conversion of docs/ext-core/core.tex to DocBook using - -1. `htlatex` to convert LaTeX to HTML -2. `pandoc` to convert HTML to DocBook -3. extensive manual work by James H. Fisher (jameshfisher@gmail.com) ---> - -<!-- -TODO: - -* Replace "java" programlisting with "ghccore" -("ghccore" is not recognized by highlighters, -causing some generators to fail). - -* Complete bibliography entries with journal titles; -I am unsure of the proper DocBook elements. - -* Integrate this file with the rest of the Users' Guide. ---> - - -<chapter id="an-external-representation-for-the-ghc-core-language-for-ghc-6.10"> - <title>An External Representation for the GHC Core Language (For GHC 6.10)</title> - - <para>Andrew Tolmach, Tim Chevalier ({apt,tjc}@cs.pdx.edu) and The GHC Team</para> - - <para>This chapter provides a precise definition for the GHC Core - language, so that it can be used to communicate between GHC and new - stand-alone compilation tools such as back-ends or - optimizers.<footnote> - <para>This is a draft document, which attempts - to describe GHC’s current behavior as precisely as possible. Working - notes scattered throughout indicate areas where further work is - needed. Constructive comments are very welcome, both on the - presentation, and on ways in which GHC could be improved in order to - simplify the Core story.</para> - - <para>Support for generating external Core (post-optimization) was - originally introduced in GHC 5.02. The definition of external Core in - this document reflects the version of external Core generated by the - HEAD (unstable) branch of GHC as of May 3, 2008 (version 6.9), using - the compiler flag <code>-fext-core</code>. We expect that GHC 6.10 will be - consistent with this definition.</para> - </footnote> - The definition includes a formal grammar and an informal semantics. - An executable typechecker and interpreter (in Haskell), which - formally embody the static and dynamic semantics, are available - separately.</para> - - <section id="introduction"> - <title>Introduction</title> - - <para>The Glasgow Haskell Compiler (GHC) uses an intermediate language, - called <quote>Core,</quote> as its internal program representation within the - compiler’s simplification phase. Core resembles a subset of - Haskell, but with explicit type annotations in the style of the - polymorphic lambda calculus (F<subscript>ω</subscript>).</para> - - <para>GHC’s front-end translates full Haskell 98 (plus some extensions) - into Core. The GHC optimizer then repeatedly transforms Core - programs while preserving their meaning. A <quote>Core Lint</quote> pass in GHC - typechecks Core in between transformation passes (at least when - the user enables linting by setting a compiler flag), verifying - that transformations preserve type-correctness. Finally, GHC’s - back-end translates Core into STG-machine code <citation>stg-machine</citation> and then into C - or native code.</para> - - <para>Two existing papers discuss the original rationale for the design - and use of Core <citation>ghc-inliner,comp-by-trans-scp</citation>, although the (two different) idealized - versions of Core described therein differ in significant ways from - the actual Core language in current GHC. In particular, with the - advent of GHC support for generalized algebraic datatypes (GADTs) - <citation>gadts</citation> Core was extended beyond its previous - F<subscript>ω</subscript>-style incarnation to support type - equality constraints and safe coercions, and is now based on a - system known as F<subscript>C</subscript> <citation>system-fc</citation>.</para> - - <para>Researchers interested in writing just <emphasis>part</emphasis> of a Haskell compiler, - such as a new back-end or a new optimizer pass, might like to use - GHC to provide the other parts of the compiler. For example, they - might like to use GHC’s front-end to parse, desugar, and - type-check source Haskell, then feeding the resulting code to - their own back-end tool. As another example, they might like to - use Core as the target language for a front-end compiler of their - own design, feeding externally synthesized Core into GHC in order - to take advantage of GHC’s optimizer, code generator, and run-time - system. Without external Core, there are two ways for compiler - writers to do this: they can link their code into the GHC - executable, which is an arduous process, or they can use the GHC - API <citation>ghc-api</citation> to do the same task more cleanly. Both ways require new - code to be written in Haskell.</para> - - <para>We present a precisely specified external format for Core files. - The external format is text-based and human-readable, to promote - interoperability and ease of use. We hope this format will make it - easier for external developers to use GHC in a modular way.</para> - - <para>It has long been true that GHC prints an ad-hoc textual - representation of Core if you set certain compiler flags. But this - representation is intended to be read by people who are debugging - the compiler, not by other programs. Making Core into a - machine-readable, bi-directional communication format requires: - - <orderedlist> - <listitem> - precisely specifying the external format of Core; - </listitem> - <listitem> - modifying GHC to generate external Core files - (post-simplification; as always, users can control the exact - transformations GHC does with command-line flags); - </listitem> - <listitem> - modifying GHC to accept external Core files in place of - Haskell source files (users will also be able to control what - GHC does to those files with command-line flags). - </listitem> - </orderedlist> - </para> - - <para>The first two facilities will let developers couple GHC’s - front-end (parser, type-checker, desugarer), and optionally its - optimizer, with new back-end tools. The last facility will let - developers write new Core-to-Core transformations as an external - tool and integrate them into GHC. It will also allow new - front-ends to generate Core that can be fed into GHC’s optimizer - or back-end.</para> - - <para>However, because there are many (undocumented) idiosyncracies in - the way GHC produces Core from source Haskell, it will be hard for - an external tool to produce Core that can be integrated with - GHC-produced Core (e.g., for the Prelude), and we don’t aim to - support this. Indeed, for the time being, we aim to support only - the first two facilities and not the third: we define and - implement Core as an external format that GHC can use to - communicate with external back-end tools, and defer the larger - task of extending GHC to support reading this external format back - in.</para> - - <para>This document addresses the first requirement, a formal Core - definition, by proposing a formal grammar for an - <link linkend="external-grammar-of-core">external representation of Core</link>, - and an <link linkend="informal-semantics">informal semantics</link>.</para> - - <para>GHC supports many type system extensions; the External Core - printer built into GHC only supports some of them. However, - External Core should be capable of representing any Haskell 98 - program, and may be able to represent programs that require - certain type system extensions as well. If a program uses - unsupported features, GHC may fail to compile it to Core when the - -fext-core flag is set, or GHC may successfully compile it to - Core, but the external tools will not be able to typecheck or - interpret it.</para> - - <para>Formal static and dynamic semantics in the form of an executable - typechecker and interpreter are available separately in the GHC - source tree - <footnote><ulink url="http://git.haskell.org/ghc.git/tree">http://git.haskell.org/ghc.git</ulink></footnote> - under <code>utils/ext-core</code>.</para> - - </section> - <section id="external-grammar-of-core"> - <title>External Grammar of Core</title> - - <para>In designing the external grammar, we have tried to strike a - balance among a number of competing goals, including easy - parseability by machines, easy readability by humans, and adequate - structural simplicity to allow straightforward presentations of - the semantics. Thus, we had to make some compromises. - Specifically:</para> - - <itemizedlist> - <listitem>In order to avoid explosion of parentheses, we support - standard precedences and short-cuts for expressions, types, - and kinds. Thus we had to introduce multiple non-terminals for - each of these syntactic categories, and as a result, the - concrete grammar is longer and more complex than the - underlying abstract syntax.</listitem> - - <listitem>On the other hand, we have kept the grammar simpler by - avoiding special syntax for tuple types and terms. Tuples - (both boxed and unboxed) are treated as ordinary constructors.</listitem> - - <listitem>All type abstractions and applications are given in full, even - though some of them (e.g., for tuples) could be reconstructed; - this means a parser for Core does not have to reconstruct - types.<footnote> - These choices are certainly debatable. In - particular, keeping type applications on tuples and case arms - considerably increases the size of Core files and makes them less - human-readable, though it allows a Core parser to be simpler. - </footnote></listitem> - - <listitem>The syntax of identifiers is heavily restricted (to just - alphanumerics and underscores); this again makes Core easier - to parse but harder to read.</listitem> - </itemizedlist> - - <para>We use the following notational conventions for syntax: - - <informaltable frame="none"> - <tgroup cols='2' align='left' colsep="0" rowsep="0"> - <tbody> - <row> - <entry>[ pat ]</entry> - <entry>optional</entry> - </row> - - <row> - <entry>{ pat }</entry> - <entry>zero or more repetitions</entry> - </row> - - <row> - <entry> - { pat }<superscript>+</superscript> - </entry> - <entry>one or more repetitions</entry> - </row> - - <row> - <entry> - pat<subscript>1</subscript> ∣ pat<subscript>2</subscript> - </entry> - <entry>choice</entry> - </row> - - <row> - <entry> - <code>fibonacci</code> - </entry> - <entry>terminal syntax in typewriter font</entry> - </row> - </tbody> - </tgroup> - </informaltable> - </para> - - <informaltable frame="none" colsep="0" rowsep="0"> - <tgroup cols='5'> - <colspec colname="cat" align="left" colwidth="3*" /> - <colspec colname="lhs" align="right" colwidth="2*" /> - <colspec align="center" colwidth="*" /> - <colspec colname="rhs" align="left" colwidth="10*" /> - <colspec colname="name" align="right" colwidth="6*" /> - <tbody> - <row rowsep="1"> - <entry>Module</entry> - <entry>module</entry> - <entry>→</entry> - <entry> - <code>%module</code> mident { tdef ; }{ vdefg ; } - </entry> - <entry></entry> - </row> - - <row> - <entry morerows="1" valign="top">Type defn.</entry> - <entry morerows="1" valign="top">tdef</entry> - <entry>→</entry> - <entry> - <code>%data</code> qtycon { tbind } <code>= {</code> [ cdef {<code>;</code> cdef } ] <code>}</code> - </entry> - <entry>algebraic type</entry> - </row> - <row rowsep="1"> - <entry>∣</entry> - <entry> - <code>%newtype</code> qtycon qtycon { tbind } <code>=</code> ty - </entry> - <entry>newtype</entry> - </row> - - <row rowsep="1"> - <entry>Constr. defn.</entry> - <entry>cdef</entry> - <entry>→</entry> - <entry> - qdcon { <code>@</code> tbind }{ aty }<superscript>+</superscript> - </entry> - </row> - - <row> - <entry morerows="2" valign="top">Value defn.</entry> - <entry morerows="1" valign="top">vdefg</entry> - <entry>→</entry> - <entry><code>%rec {</code> vdef { <code>;</code> vdef } <code>}</code></entry> - <entry>recursive</entry> - </row> - - <row> - <entry>∣</entry> - <entry>vdef</entry> - <entry>non-recursive</entry> - </row> - - <row rowsep="1"> - <entry>vdef</entry> - <entry>→</entry> - <entry>qvar <code>::</code> ty <code>=</code> exp</entry> - <entry></entry> - </row> - - <row> - <entry morerows="3" valign="top">Atomic expr.</entry> - <entry morerows="3" valign="top">aexp</entry> - <entry>→</entry> - <entry>qvar</entry> - <entry>variable</entry> - </row> - - <row> - <entry>∣</entry> - <entry>qdcon</entry> - <entry>data constructor</entry> - </row> - - <row> - <entry>∣</entry> - <entry>lit</entry> - <entry>literal</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry><code>(</code> exp <code>)</code></entry> - <entry>nested expr.</entry> - </row> - - <row> - <entry morerows="9" valign="top">Expression</entry> - <entry morerows="9" valign="top">exp</entry> - <entry>→</entry> - <entry>aexp</entry> - <entry>atomic expresion</entry> - </row> - - <row> - <entry>∣</entry> - <entry>aexp { arg }<superscript>+</superscript></entry> - <entry>application</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>\</code> { binder }<superscript>+</superscript> &arw; exp</entry> - <entry>abstraction</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%let</code> vdefg <code>%in</code> exp</entry> - <entry>local definition</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%case (</code> aty <code>)</code> exp <code>%of</code> vbind <code>{</code> alt { <code>;</code> alt } <code>}</code></entry> - <entry>case expression</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%cast</code> exp aty</entry> - <entry>type coercion</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%note</code> " { char } " exp</entry> - <entry>expression note</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%external ccall "</code> { char } <code>"</code> aty</entry> - <entry>external reference</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%dynexternal ccall</code> aty</entry> - <entry>external reference (dynamic)</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry><code>%label "</code> { char } <code>"</code></entry> - <entry>external label</entry> - </row> - - <row> - <entry morerows="1" valign="top">Argument</entry> - <entry morerows="1" valign="top">arg</entry> - <entry>→</entry> - <entry><code>@</code> aty</entry> - <entry>type argument</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry>aexp</entry> - <entry>value argument</entry> - </row> - - <row> - <entry morerows="2" valign="top">Case alt.</entry> - <entry morerows="2" valign="top">alt</entry> - <entry>→</entry> - <entry>qdcon { <code>@</code> tbind }{ vbind } <code>&arw;</code> exp</entry> - <entry>constructor alternative</entry> - </row> - - <row> - <entry>∣</entry> - <entry>lit <code>&arw;</code> exp</entry> - <entry>literal alternative</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry><code>%_ &arw;</code> exp</entry> - <entry>default alternative</entry> - </row> - - <row> - <entry morerows="1" valign="top">Binder</entry> - <entry morerows="1" valign="top">binder</entry> - <entry>→</entry> - <entry><code>@</code> tbind</entry> - <entry>type binder</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry>vbind</entry> - <entry>value binder</entry> - </row> - - <row> - <entry morerows="1" valign="top">Type binder</entry> - <entry morerows="1" valign="top">tbind</entry> - <entry>→</entry> - <entry>tyvar</entry> - <entry>implicitly of kind *</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry><code>(</code> tyvar <code>::</code> kind <code>)</code></entry> - <entry>explicitly kinded</entry> - </row> - - <row rowsep="1"> - <entry>Value binder</entry> - <entry>vbind</entry> - <entry>→</entry> - <entry><code>(</code> var <code>::</code> ty <code>)</code></entry> - <entry></entry> - </row> - - <row> - <entry morerows="3" valign="top">Literal</entry> - <entry morerows="3" valign="top">lit</entry> - <entry>→</entry> - <entry><code>(</code> [<code>-</code>] { digit }<superscript>+</superscript> <code>::</code> ty <code>)</code></entry> - <entry>integer</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>(</code> [<code>-</code>] { digit }<superscript>+</superscript> <code>%</code> { digit }<superscript>+</superscript> <code>::</code> ty <code>)</code></entry> - <entry>rational</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>( '</code> char <code>' ::</code> ty <code>)</code></entry> - <entry>character</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry><code>( "</code> { char } <code>" ::</code> ty <code>)</code></entry> - <entry>string</entry> - </row> - - <row> - <entry morerows="2" valign="top">Character</entry> - <entry morerows="1" valign="top">char</entry> - <entry>→</entry> - <entry namest="rhs" nameend="name"><emphasis>any ASCII character in range 0x20-0x7E except 0x22,0x27,0x5c</emphasis></entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>\x</code> hex hex</entry> - <entry>ASCII code escape sequence</entry> - </row> - - <row rowsep="1"> - <entry>hex</entry> - <entry>→</entry> - <entry>0∣…∣9 ∣a ∣…∣f</entry> - <entry></entry> - </row> - - <row> - <entry morerows="2" valign="top">Atomic type</entry> - <entry morerows="2" valign="top">aty</entry> - <entry>→</entry> - <entry>tyvar</entry> - <entry>type variable</entry> - </row> - - <row> - <entry>∣</entry> - <entry>qtycon</entry> - <entry>type constructor</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry><code>(</code> ty <code>)</code></entry> - <entry>nested type</entry> - </row> - - <row> - <entry morerows="7" valign="top">Basic type</entry> - <entry morerows="7" valign="top">bty</entry> - <entry>→</entry> - <entry>aty</entry> - <entry>atomic type</entry> - </row> - - <row> - <entry>∣</entry> - <entry>bty aty</entry> - <entry>type application</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%trans</code> aty aty</entry> - <entry>transitive coercion</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%sym</code> aty</entry> - <entry>symmetric coercion</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%unsafe</code> aty aty</entry> - <entry>unsafe coercion</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%left</code> aty</entry> - <entry>left coercion</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%right</code> aty</entry> - <entry>right coercion</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry><code>%inst</code> aty aty</entry> - <entry>instantiation coercion</entry> - </row> - - <row> - <entry morerows="2" valign="top">Type</entry> - <entry morerows="2" valign="top">ty</entry> - <entry>→</entry> - <entry>bty</entry> - <entry>basic type</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>%forall</code> { tbind }<superscript>+</superscript> <code>.</code> ty</entry> - <entry>type abstraction</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry>bty <code>&arw;</code> ty</entry> - <entry>arrow type construction</entry> - </row> - - <row> - <entry morerows="4" valign="top">Atomic kind</entry> - <entry morerows="4" valign="top">akind</entry> - <entry>→</entry> - <entry><code>*</code></entry> - <entry>lifted kind</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>#</code></entry> - <entry>unlifted kind</entry> - </row> - - <row> - <entry>∣</entry> - <entry><code>?</code></entry> - <entry>open kind</entry> - </row> - - <row> - <entry>∣</entry> - <entry>bty <code>:=:</code> bty</entry> - <entry>equality kind</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry><code>(</code> kind <code>)</code></entry> - <entry>nested kind</entry> - </row> - - <row> - <entry morerows="1" valign="top">Kind</entry> - <entry morerows="1" valign="top">kind</entry> - <entry>→</entry> - <entry>akind</entry> - <entry>atomic kind</entry> - </row> - - <row rowsep="1"> - <entry>∣</entry> - <entry>akind <code>&arw;</code> kind</entry> - <entry>arrow kind</entry> - </row> - - <row> - <entry morerows="7" valign="top">Identifier</entry> - <entry>mident</entry> - <entry>→</entry> - <entry>pname <code>:</code> uname</entry> - <entry>module</entry> - </row> - - <row> - <entry>tycon</entry> - <entry>→</entry> - <entry>uname</entry> - <entry>type constr.</entry> - </row> - - <row> - <entry>qtycon</entry> - <entry>→</entry> - <entry>mident <code>.</code> tycon</entry> - <entry>qualified type constr.</entry> - </row> - - <row> - <entry>tyvar</entry> - <entry>→</entry> - <entry>lname</entry> - <entry>type variable</entry> - </row> - - <row> - <entry>dcon</entry> - <entry>→</entry> - <entry>uname</entry> - <entry>data constr.</entry> - </row> - - <row> - <entry>qdcon</entry> - <entry>→</entry> - <entry>mident <code>.</code> dcon</entry> - <entry>qualified data constr.</entry> - </row> - - <row> - <entry>var</entry> - <entry>→</entry> - <entry>lname</entry> - <entry>variable</entry> - </row> - - <row rowsep="1"> - <entry>qvar</entry> - <entry>→</entry> - <entry>[ mident <code>.</code> ] var</entry> - <entry>optionally qualified variable</entry> - </row> - - <row> - <entry morerows="6" valign="top">Name</entry> - <entry>lname</entry> - <entry>→</entry> - <entry>lower { namechar }</entry> - <entry></entry> - </row> - - <row> - <entry>uname</entry> - <entry>→</entry> - <entry>upper { namechar }</entry> - <entry></entry> - </row> - - <row> - <entry>pname</entry> - <entry>→</entry> - <entry>{ namechar }<superscript>+</superscript></entry> - <entry></entry> - </row> - - <row> - <entry>namechar</entry> - <entry>→</entry> - <entry>lower ∣ upper ∣ digit</entry> - <entry></entry> - </row> - - <row> - <entry>lower</entry> - <entry>→</entry> - <entry><code>a</code> ∣ <code>b</code> ∣ … ∣ <code>z</code> ∣ <code>_</code></entry> - <entry></entry> - </row> - - <row> - <entry>upper</entry> - <entry>→</entry> - <entry><code>A</code> ∣ <code>B</code> ∣ … ∣ <code>Z</code></entry> - <entry></entry> - </row> - - <row> - <entry>digit</entry> - <entry>→</entry> - <entry><code>0</code> ∣ <code>1</code> ∣ … ∣ <code>9</code></entry> - <entry></entry> - </row> - </tbody> - </tgroup> - </informaltable> - </section> - - <section id="informal-semantics"> - <title>Informal Semantics</title> - - <para>At the term level, Core resembles a explicitly-typed polymorphic - lambda calculus (F<subscript>ω</subscript>), with the addition of - local <code>let</code> bindings, algebraic type definitions, constructors, and - <code>case</code> expressions, and primitive types, literals and operators. Its - type system is richer than that of System F, supporting explicit - type equality coercions and type functions.<citation>system-fc</citation></para> - - <para>In this section we concentrate on the less obvious points about - Core.</para> - - <section id="program-organization-and-modules"> - <title>Program Organization and Modules</title> - - <para>Core programs are organized into <emphasis>modules</emphasis>, corresponding directly - to source-level Haskell modules. Each module has a identifying - name <emphasis>mident</emphasis>. A module identifier consists of a <emphasis>package name</emphasis> - followed by a module name, which may be hierarchical: for - example, <code>base:GHC.Base</code> is the module identifier for GHC’s Base - module. Its name is <code>Base</code>, and it lives in the GHC hierarchy - within the <code>base</code> package. Section 5.8 of the GHC users’ guide - explains package names <citation>ghc-user-guide</citation>. In particular, note that a Core - program may contain multiple modules with the same (possibly - hierarchical) module name that differ in their package names. In - some of the code examples that follow, we will omit package - names and possibly full hierarchical module names from - identifiers for brevity, but be aware that they are always - required.<footnote> - A possible improvement to the Core syntax - would be to add explicit import lists to Core modules, which could be - used to specify abbrevations for long qualified names. This would make - the code more human-readable. - </footnote></para> - - <para>Each module may contain the following kinds of top-level - declarations: - - <itemizedlist> - <listitem> - Algebraic data type declarations, each defining a type - constructor and one or more data constructors; - </listitem> - <listitem> - Newtype declarations, corresponding to Haskell <code>newtype</code> - declarations, each defining a type constructor and a - coercion name; and - </listitem> - <listitem> - Value declarations, defining the types and values of - top-level variables. - </listitem> - </itemizedlist> - </para> - - <para>No type constructor, data constructor, or top-level value may be - declared more than once within a given module. All the type - declarations are (potentially) mutually recursive. Value - declarations must be in dependency order, with explicit grouping - of potentially mutually recursive declarations.</para> - - <para>Identifiers defined in top-level declarations may be <emphasis>external</emphasis> or - <emphasis>internal</emphasis>. External identifiers can be referenced from any other - module in the program, using conventional dot notation (e.g., - <code>base:GHC.Base.Bool</code>, <code>base:GHC.Base.True</code>). Internal identifiers - are visible only within the defining module. All type and data - constructors are external, and are always defined and referenced - using fully qualified names (with dots).</para> - - <para>A top-level value is external if it is defined and referenced - using a fully qualified name with a dot (e.g., <code>main:MyModule.foo = ...</code>); - otherwise, it is internal (e.g., <code>bar = ...</code>). Note that - Core’s notion of an external identifier does not necessarily - coincide with that of <quote>exported</quote> identifier in a Haskell source - module. An identifier can be an external identifier in Core, but - not be exported by the original Haskell source - module.<footnote> - Two examples of such identifiers are: data - constructors, and values that potentially appear in an unfolding. For an - example of the latter, consider <code>Main.foo = ... Main.bar ...</code>, where - <code>Main.foo</code> is inlineable. Since <code>bar</code> appears in <code>foo</code>’s unfolding, it is - defined and referenced with an external name, even if <code>bar</code> was not - exported by the original source module. - </footnote> - However, if an identifier was exported by the Haskell source - module, it will appear as an external name in Core.</para> - - <para>Core modules have no explicit import or export lists. Modules - may be mutually recursive. Note that because of the latter fact, - GHC currently prints out the top-level bindings for every module - as a single recursive group, in order to avoid keeping track of - dependencies between top-level values within a module. An - external Core tool could reconstruct dependencies later, of - course.</para> - - <para>There is also an implicitly-defined module <code>ghc-prim:GHC.Prim</code>, - which exports the <quote>built-in</quote> types and values that must be - provided by any implementation of Core (including GHC). Details - of this module are in the <link linkend="primitive-module">Primitive Module section</link>.</para> - - <para>A Core <emphasis>program</emphasis> is a collection of distinctly-named modules that - includes a module called main:Main having an exported value - called <code>main:ZCMain.main</code> of type <code>base:GHC.IOBase.IO a</code> (for some - type <code>a</code>). (Note that the strangely named wrapper for <code>main</code> is the - one exception to the rule that qualified names defined within a - module <code>m</code> must have module name <code>m</code>.)</para> - - <para>Many Core programs will contain library modules, such as - <code>base:GHC.Base</code>, which implement parts of the Haskell standard - library. In principle, these modules are ordinary Haskell - modules, with no special status. In practice, the requirement on - the type of <code>main:Main.main</code> implies that every program will - contain a large subset of the standard library modules.</para> - - </section> - <section id="namespaces"> - <title>Namespaces</title> - - <para>There are five distinct namespaces: - <orderedlist> - <listitem>module identifiers (<code>mident</code>),</listitem> - <listitem>type constructors (<code>tycon</code>),</listitem> - <listitem>type variables (<code>tyvar</code>),</listitem> - <listitem>data constructors (<code>dcon</code>),</listitem> - <listitem>term variables (<code>var</code>).</listitem> - </orderedlist> - </para> - - <para>Spaces (1), (2+3), and (4+5) can be distinguished from each - other by context. To distinguish (2) from (3) and (4) from (5), - we require that data and type constructors begin with an - upper-case character, and that term and type variables begin - with a lower-case character.</para> - - <para>Primitive types and operators are not syntactically - distinguished.</para> - - <para>Primitive <emphasis>coercion</emphasis> operators, of which there are six, <emphasis>are</emphasis> - syntactically distinguished in the grammar. This is because - these coercions must be fully applied, and because - distinguishing their applications in the syntax makes - typechecking easier.</para> - - <para>A given variable (type or term) may have multiple definitions - within a module. However, definitions of term variables never - <quote>shadow</quote> one another: the scope of the definition of a given - variable never contains a redefinition of the same variable. - Type variables may be shadowed. Thus, if a term variable has - multiple definitions within a module, all those definitions must - be local (let-bound). The only exception to this rule is that - (necessarily closed) types labelling <code>%external</code> expressions may - contain <code>tyvar</code> bindings that shadow outer bindings.</para> - - <para>Core generated by GHC makes heavy use of encoded names, in which - the characters <code>Z</code> and <code>z</code> are used to introduce escape sequences - for non-alphabetic characters such as dollar sign <code>$</code> (<code>zd</code>), hash <code>#</code> - (<code>zh</code>), plus <code>+</code> (<code>zp</code>), etc. This is the same encoding used in <code>.hi</code> - files and in the back-end of GHC itself, except that we - sometimes change an initial <code>z</code> to <code>Z</code>, or vice-versa, in order to - maintain case distinctions.</para> - - <para>Finally, note that hierarchical module names are z-encoded in - Core: for example, <code>base:GHC.Base.foo</code> is rendered as - <code>base:GHCziBase.foo</code>. A parser may reconstruct the module - hierarchy, or regard <code>GHCziBase</code> as a flat name.</para> - - </section> - <section id="types-and-kinds"> - <title>Types and Kinds</title> - - <para>In Core, all type abstractions and applications are explicit. - This make it easy to typecheck any (closed) fragment of Core - code. An full executable typechecker is available separately.</para> - - <section id="types"> - <title>Types</title> - - <para>Types are described by type expressions, which are built from - named type constructors and type variables using type - application and universal quantification. Each type - constructor has a fixed arity ≥ 0. Because it is so widely - used, there is special infix syntax for the fully-applied - function type constructor (<code>&arw;</code>). (The prefix identifier for - this constructor is <code>ghc-prim:GHC.Prim.ZLzmzgZR</code>; this should - only appear in unapplied or partially applied form.)</para> - - <para>There are also a number of other primitive type constructors - (e.g., <code>Intzh</code>) that are predefined in the <code>GHC.Prim</code> module, but - have no special syntax. <code>%data</code> and <code>%newtype</code> declarations - introduce additional type constructors, as described below. - Type constructors are distinguished solely by name.</para> - - </section> - <section id="coercions"> - <title>Coercions</title> - - <para>A type may also be built using one of the primitive coercion - operators, as described in <link linkend="namespaces">the Namespaces section</link>. For details on the - meanings of these operators, see the System FC paper <citation>system-fc</citation>. Also - see <link linkend="newtypes">the Newtypes section</link> for - examples of how GHC uses coercions in Core code.</para> - - </section> - <section id="kinds"> - <title>Kinds</title> - <para>As described in the Haskell definition, it is necessary to - distinguish well-formed type-expressions by classifying them - into different <emphasis>kinds</emphasis> <citation>haskell98, p. 41</citation><!-- TODO -->. In particular, Core - explicitly records the kind of every bound type variable.</para> - - <para>In addition, Core’s kind system includes equality kinds, as in - System FC <citation>system-fc</citation>. An application of a built-in coercion, or of a - user-defined coercion as introduced by a <code>newtype</code> declaration, - has an equality kind.</para> - - </section> - <section id="lifted-and-unlifted-types"> - <title>Lifted and Unlifted Types</title> - - <para>Semantically, a type is <emphasis>lifted</emphasis> if and only if it has bottom as - an element. We need to distinguish them because operationally, - terms with lifted types may be represented by closures; terms - with unlifted types must not be represented by closures, which - implies that any unboxed value is necessarily unlifted. We - distinguish between lifted and unlifted types by ascribing - them different kinds.</para> - - <para>Currently, all the primitive types are unlifted (including a - few boxed primitive types such as <code>ByteArrayzh</code>). Peyton-Jones - and Launchbury <citation>pj:unboxed</citation> described the ideas behind unboxed and - unlifted types.</para> - - </section> - <section id="type-constructors-base-kinds-and-higher-kinds"> - <title>Type Constructors; Base Kinds and Higher Kinds</title> - - <para>Every type constructor has a kind, depending on its arity and - whether it or its arguments are lifted.</para> - - <para>Term variables can only be assigned types that have base - kinds: the base kinds are <code>*</code>, <code>#</code>, and <code>?</code>. The three base kinds - distinguish the liftedness of the types they classify: <code>*</code> - represents lifted types; <code>#</code> represents unlifted types; and <code>?</code> is - the <quote>open</quote> kind, representing a type that may be either lifted - or unlifted. Of these, only <code>*</code> ever appears in Core type - declarations generated from user code; the other two are - needed to describe certain types in primitive (or otherwise - specially-generated) code (which, after optimization, could - potentially appear anywhere).</para> - - <para>In particular, no top-level identifier (except in - <code>ghc-prim:GHC.Prim</code>) has a type of kind <code>#</code> or <code>?</code>.</para> - - <para>Nullary type constructors have base kinds: for example, the - type <code>Int</code> has kind <code>*</code>, and <code>Int#</code> has kind <code>#</code>.</para> - - <para>Non-nullary type constructors have higher kinds: kinds that - have the form - k<subscript>1</subscript><code>&arw;</code>k<subscript>2</subscript>, where - k<subscript>1</subscript> and k<subscript>2</subscript> are - kinds. For example, the function type constructor <code>&arw;</code> has - kind <code>* &arw; (* &arw; *)</code>. Since Haskell allows abstracting - over type constructors, type variables may have higher kinds; - however, much more commonly they have kind <code>*</code>, so that is the - default if a type binder omits a kind.</para> - - </section> - - <section id="type-synonyms-and-type-equivalence"> - <title>Type Synonyms and Type Equivalence</title> - - <para>There is no mechanism for defining type synonyms - (corresponding to Haskell <code>type</code> declarations).</para> - - <para>Type equivalence is just syntactic equivalence on type - expressions (of base kinds) modulo:</para> - - <itemizedlist> - <listitem>alpha-renaming of variables bound in <code>%forall</code> types;</listitem> - <listitem>the identity a <code>&arw;</code> b ≡ <code>ghc-prim:GHC.Prim.ZLzmzgZR</code> a b</listitem> - </itemizedlist> - - </section> - </section> - <section id="algebraic-data-types"> - <title>Algebraic data types</title> - - <para>Each data declaration introduces a new type constructor and a - set of one or more data constructors, normally corresponding - directly to a source Haskell <code>data</code> declaration. For example, the - source declaration - - <programlisting language="haskell"> -data Bintree a = - Fork (Bintree a) (Bintree a) - | Leaf a - </programlisting> - - might induce the following Core declaration - - <programlisting language="java"> -%data Bintree a = { - Fork (Bintree a) (Bintree a); - Leaf a)} - </programlisting> - - which introduces the unary type constructor Bintree of kind - <code>*&arw;*</code> and two data constructors with types - - <programlisting language="java"> -Fork :: %forall a . Bintree a &arw; Bintree a &arw; Bintree a -Leaf :: %forall a . a &arw; Bintree a - </programlisting> - - We define the <emphasis>arity</emphasis> of each data constructor to be the number of - value arguments it takes; e.g. <code>Fork</code> has arity 2 and <code>Leaf</code> has - arity 1.</para> - - <para>For a less conventional example illustrating the possibility of - higher-order kinds, the Haskell source declaration - - <programlisting language="haskell"> -data A f a = MkA (f a) - </programlisting> - - might induce the Core declaration - - <programlisting language="java"> -%data A (f::*&arw;*) a = { MkA (f a) } - </programlisting> - - which introduces the constructor - - <programlisting language="java"> -MkA :: %forall (f::*&arw;*) a . (f a) &arw; (A f) a - </programlisting></para> - - <para>GHC (like some other Haskell implementations) supports an - extension to Haskell98 for existential types such as - - <programlisting language="haskell"> -data T = forall a . MkT a (a &arw; Bool) - </programlisting> - - This is represented by the Core declaration - - <programlisting language="java"> -%data T = {MkT @a a (a &arw; Bool)} - </programlisting> - - which introduces the nullary type constructor T and the data - constructor - - <programlisting language="java"> -MkT :: %forall a . a &arw; (a &arw; Bool) &arw; T - </programlisting> - - In general, existentially quantified variables appear as extra - universally quantified variables in the data contructor types. An - example of how to construct and deconstruct values of type <code>T</code> is - shown in <link linkend="expression-forms">the Expression Forms section</link>.</para> - - </section> - <section id="newtypes"> - <title>Newtypes</title> - - <para>Each Core <code>%newtype</code> declaration introduces a new type constructor - and an associated representation type, corresponding to a source - Haskell <code>newtype</code> declaration. However, unlike in source Haskell, - a <code>%newtype</code> declaration does not introduce any data constructors.</para> - - <para>Each <code>%newtype</code> declaration also introduces a new coercion - (syntactically, just another type constructor) that implies an - axiom equating the type constructor, applied to any type - variables bound by the <code>%newtype</code>, to the representation type.</para> - - <para>For example, the Haskell fragment - - <programlisting language="haskell"> -newtype U = MkU Bool -u = MkU True -v = case u of - MkU b &arw; not b - </programlisting> - - might induce the Core fragment - - <programlisting language="java"> -%newtype U ZCCoU = Bool; -u :: U = %cast (True) - ((%sym ZCCoU)); -v :: Bool = not (%cast (u) ZCCoU); - </programlisting></para> - - <para>The <code>newtype</code> declaration implies that the types <code>U</code> and <code>Bool</code> have - equivalent representations, and the coercion axiom <code>ZCCoU</code> - provides evidence that <code>U</code> is equivalent to <code>Bool</code>. Notice that in - the body of <code>u</code>, the boolean value <code>True</code> is cast to type <code>U</code> using - the primitive symmetry rule applied to <code>ZCCoU</code>: that is, using a - coercion of kind <code>Bool :=: U</code>. And in the body of <code>v</code>, <code>u</code> is cast - back to type <code>Bool</code> using the axiom <code>ZCCoU</code>.</para> - - <para>Notice that the <code>case</code> in the Haskell source code above translates - to a <code>cast</code> in the corresponding Core code. That is because - operationally, a <code>case</code> on a value whose type is declared by a - <code>newtype</code> declaration is a no-op. Unlike a <code>case</code> on any other - value, such a <code>case</code> does no evaluation: its only function is to - coerce its scrutinee’s type.</para> - - <para>Also notice that unlike in a previous draft version of External - Core, there is no need to handle recursive newtypes specially.</para> - - </section> - - <section id="expression-forms"> - <title>Expression Forms</title> - - <para>Variables and data constructors are straightforward.</para> - - <para>Literal (<emphasis role="variable">lit</emphasis>) expressions consist of a literal value, in one of - four different formats, and a (primitive) type annotation. Only - certain combinations of format and type are permitted; - see <link linkend="primitive-module">the Primitive Module section</link>. - The character and string formats can describe only 8-bit ASCII characters.</para> - - <para>Moreover, because the operational semantics for Core interprets - strings as C-style null-terminated strings, strings should not - contain embedded nulls.</para> - - <para>In Core, value applications, type applications, value - abstractions, and type abstractions are all explicit. To tell - them apart, type arguments in applications and formal type - arguments in abstractions are preceded by an <code>@ symbol</code>. (In - abstractions, the <code>@</code> plays essentially the same role as the more - usual Λ symbol.) For example, the Haskell source declaration - - <programlisting language="haskell"> -f x = Leaf (Leaf x) - </programlisting> - - might induce the Core declaration - - <programlisting language="java"> -f :: %forall a . a &arw; BinTree (BinTree a) = - \ @a (x::a) &arw; Leaf @(Bintree a) (Leaf @a x) - </programlisting></para> - - <para>Value applications may be of user-defined functions, data - constructors, or primitives. None of these sorts of applications - are necessarily saturated.</para> - - <para>Note that the arguments of type applications are not always of - kind <code>*</code>. For example, given our previous definition of type <code>A</code>: - - <programlisting language="haskell"> -data A f a = MkA (f a) - </programlisting> - - the source code - - <programlisting language="haskell"> -MkA (Leaf True) - </programlisting> - - becomes - - <programlisting language="java"> -(MkA @Bintree @Bool) (Leaf @Bool True) - </programlisting></para> - - <para>Local bindings, of a single variable or of a set of mutually - recursive variables, are represented by <code>%let</code> expressions in the - usual way.</para> - - <para>By far the most complicated expression form is <code>%case</code>. <code>%case</code> - expressions are permitted over values of any type, although they - will normally be algebraic or primitive types (with literal - values). Evaluating a <code>%case</code> forces the evaluation of the - expression being tested (the <quote>scrutinee</quote>). The value of the - scrutinee is bound to the variable following the <code>%of</code> keyword, - which is in scope in all alternatives; this is useful when the - scrutinee is a non-atomic expression (see next example). The - scrutinee is preceded by the type of the entire <code>%case</code> - expression: that is, the result type that all of the <code>%case</code> - alternatives have (this is intended to make type reconstruction - easier in the presence of type equality coercions).</para> - - <para>In an algebraic <code>%case</code>, all the case alternatives must be labeled - with distinct data constructors from the algebraic type, - followed by any existential type variable bindings (see below), - and typed term variable bindings corresponding to the data - constructor’s arguments. The number of variables must match the - data constructor’s arity.</para> - - <para>For example, the following Haskell source expression - - <programlisting language="haskell"> -case g x of - Fork l r &arw; Fork r l - t@(Leaf v) &arw; Fork t t - </programlisting> - - might induce the Core expression - - <programlisting language="java"> -%case ((Bintree a)) g x %of (t::Bintree a) - Fork (l::Bintree a) (r::Bintree a) &arw; - Fork @a r l - Leaf (v::a) &arw; - Fork @a t t - </programlisting></para> - - <para>When performing a <code>%case</code> over a value of an - existentially-quantified algebraic type, the alternative must - include extra local type bindings for the - existentially-quantified variables. For example, given - - <programlisting language="haskell"> -data T = forall a . MkT a (a &arw; Bool) - </programlisting> - - the source - - <programlisting language="haskell"> -case x of - MkT w g &arw; g w - </programlisting> - - becomes - - <programlisting language="java"> -%case x %of (x’::T) - MkT @b (w::b) (g::b&arw;Bool) &arw; g w - </programlisting></para> - - <para>In a <code>%case</code> over literal alternatives, all the case alternatives - must be distinct literals of the same primitive type.</para> - - <para>The list of alternatives may begin with a default alternative - labeled with an underscore (<code>%_</code>), whose right-hand side will be - evaluated if none of the other alternatives match. The default - is optional except for in a case over a primitive type, or when - there are no other alternatives. If the case is over neither an - algebraic type nor a primitive type, then the list of - alternatives must contain a default alternative and nothing - else. For algebraic cases, the set of alternatives need not be - exhaustive, even if no default is given; if alternatives are - missing, this implies that GHC has deduced that they cannot - occur.</para> - - <para><code>%cast</code> is used to manipulate newtypes, as described in - <link linkend="newtypes">the Newtype section</link>. The <code>%cast</code> expression - takes an expression and a coercion: syntactically, the coercion - is an arbitrary type, but it must have an equality kind. In an - expression <code>(cast e co)</code>, if <code>e :: T</code> and <code>co</code> has kind <code>T :=: U</code>, then - the overall expression has type <code>U</code> <citation>ghc-fc-commentary</citation>. Here, <code>co</code> must be a - coercion whose left-hand side is <code>T</code>.</para> - - <para>Note that unlike the <code>%coerce</code> expression that existed in previous - versions of Core, this means that <code>%cast</code> is (almost) type-safe: - the coercion argument provides evidence that can be verified by - a typechecker. There are still unsafe <code>%cast</code>s, corresponding to - the unsafe <code>%coerce</code> construct that existed in old versions of - Core, because there is a primitive unsafe coercion type that can - be used to cast arbitrary types to each other. GHC uses this for - such purposes as coercing the return type of a function (such as - error) which is guaranteed to never return: - - <programlisting language="haskell"> -case (error "") of - True &arw; 1 - False &arw; 2 - </programlisting> - - becomes: - - <programlisting language="java"> -%cast (error @ Bool (ZMZN @ Char)) -(%unsafe Bool Integer); - </programlisting> - - <code>%cast</code> has no operational meaning and is only used in - typechecking.</para> - - <para>A <code>%note</code> expression carries arbitrary internal information that - GHC finds interesting. The information is encoded as a string. - Expression notes currently generated by GHC include the inlining - pragma (<code>InlineMe</code>) and cost-center labels for profiling.</para> - - <para>A <code>%external</code> expression denotes an external identifier, which has - the indicated type (always expressed in terms of Haskell - primitive types). External Core supports two kinds of external - calls: <code>%external</code> and <code>%dynexternal</code>. Only the former is supported - by the current set of stand-alone Core tools. In addition, there - is a <code>%label</code> construct which GHC may generate but which the Core - tools do not support.</para> - - <para>The present syntax for externals is sufficient for describing C - functions and labels. Interfacing to other languages may require - additional information or a different interpretation of the name - string.</para> - - </section> - - <section id="expression-evaluation"> - <title>Expression Evaluation</title> - <para>The dynamic semantics of Core are defined on the type-erasure of - the program: for example, we ignore all type abstractions and - applications. The denotational semantics of the resulting - type-free program are just the conventional ones for a - call-by-name language, in which expressions are only evaluated - on demand. But Core is intended to be a call-by-<emphasis>need</emphasis> language, - in which expressions are only evaluated once. To express the - sharing behavior of call-by-need, we give an operational model - in the style of Launchbury <citation>launchbury93natural</citation>.</para> - - <para>This section describes the model informally; a more formal - semantics is separately available as an executable interpreter.</para> - - <para>To simplify the semantics, we consider only <quote>well-behaved</quote> Core - programs in which constructor and primitive applications are - fully saturated, and in which non-trivial expresssions of - unlifted kind (<code>#</code>) appear only as scrutinees in <code>%case</code> - expressions. Any program can easily be put into this form; a - separately available preprocessor illustrates how. In the - remainder of this section, we use <quote>Core</quote> to mean <quote>well-behaved</quote> - Core.</para> - - <para>Evaluating a Core expression means reducing it to <emphasis>weak-head normal form (WHNF)</emphasis>, - i.e., a primitive value, lambda abstraction, - or fully-applied data constructor. Evaluating a program means - evaluating the expression <code>main:ZCMain.main</code>.</para> - - <para>To make sure that expression evaluation is shared, we make use - of a <emphasis>heap</emphasis>, which contains <emphasis>heap entries</emphasis>. A heap entry can be: - - <itemizedlist> - <listitem> - A <emphasis>thunk</emphasis>, representing an unevaluated expression, also known - as a suspension. - </listitem> - <listitem> - A <emphasis>WHNF</emphasis>, representing an evaluated expression. The result of - evaluating a thunk is a WHNF. A WHNF is always a closure - (corresponding to a lambda abstraction in the source - program) or a data constructor application: computations - over primitive types are never suspended. - </listitem> - </itemizedlist></para> - - <para><emphasis>Heap pointers</emphasis> point to heap entries: at different times, the - same heap pointer can point to either a thunk or a WHNF, because - the run-time system overwrites thunks with WHNFs as computation - proceeds.</para> - - <para>The suspended computation that a thunk represents might - represent evaluating one of three different kinds of expression. - The run-time system allocates a different kind of thunk - depending on what kind of expression it is: - - <itemizedlist> - <listitem> - A thunk for a value definition has a group of suspended - defining expressions, along with a list of bindings between - defined names and heap pointers to those suspensions. (A - value definition may be a recursive group of definitions or - a single non-recursive definition, and it may be top-level - (global) or <code>let</code>-bound (local)). - </listitem> - <listitem> - A thunk for a function application (where the function is - user-defined) has a suspended actual argument expression, - and a binding between the formal argument and a heap pointer - to that suspension. - </listitem> - <listitem> - A thunk for a constructor application has a suspended actual - argument expression; the entire constructed value has a heap - pointer to that suspension embedded in it. - </listitem> - </itemizedlist></para> - - <para>As computation proceeds, copies of the heap pointer for a given - thunk propagate through the executing program. When another - computation demands the result of that thunk, the thunk is - <emphasis>forced</emphasis>: the run-time system computes the thunk’s result, - yielding a WHNF, and overwrites the heap entry for the thunk - with the WHNF. Now, all copies of the heap pointer point to the - new heap entry: a WHNF. Forcing occurs only in the context of - - <itemizedlist> - <listitem>evaluating the operator expression of an application;</listitem> - <listitem>evaluating the scrutinee of a <code>case</code> expression; or</listitem> - <listitem>evaluating an argument to a primitive or external function application</listitem> - </itemizedlist> - </para> - - <para>When no pointers to a heap entry (whether it is a thunk or WHNF) - remain, the garbage collector can reclaim the space it uses. We - assume this happens implicitly.</para> - - <para>With the exception of functions, arrays, and mutable variables, - we intend that values of all primitive types should be held - <emphasis>unboxed</emphasis>: they should not be heap-allocated. This does not - violate call-by-need semantics: all primitive types are - <emphasis>unlifted</emphasis>, which means that values of those types must be - evaluated strictly. Unboxed tuple types are not heap-allocated - either.</para> - - <para>Certain primitives and <code>%external</code> functions cause side-effects to - state threads or to the real world. Where the ordering of these - side-effects matters, Core already forces this order with data - dependencies on the pseudo-values representing the threads.</para> - - <para>An implementation must specially support the <code>raisezh</code> and - <code>handlezh</code> primitives: for example, by using a handler stack. - Again, real-world threading guarantees that they will execute in - the correct order.</para> - - </section> - </section> - <section id="primitive-module"> - <title>Primitive Module</title> - - <para>The semantics of External Core rely on the contents and informal - semantics of the primitive module <code>ghc-prim:GHC.Prim</code>. Nearly all - the primitives are required in order to cover GHC’s implementation - of the Haskell98 standard prelude; the only operators that can be - completely omitted are those supporting the byte-code interpreter, - parallelism, and foreign objects. Some of the concurrency - primitives are needed, but can be given degenerate implementations - if it desired to target a purely sequential backend (see Section - <link linkend="non-concurrent-back-end">the Non-concurrent Back End section</link>).</para> - - <para>In addition to these primitives, a large number of C library - functions are required to implement the full standard Prelude, - particularly to handle I/O and arithmetic on less usual types.</para> - - <para>For a full listing of the names and types of the primitive - operators, see the GHC library documentation <citation>ghcprim</citation>.</para> - - <section id="non-concurrent-back-end"> - <title>Non-concurrent Back End</title> - - <para>The Haskell98 standard prelude doesn’t include any concurrency - support, but GHC’s implementation of it relies on the existence - of some concurrency primitives. However, it never actually forks - multiple threads. Hence, the concurrency primitives can be given - degenerate implementations that will work in a non-concurrent - setting, as follows:</para> - - <itemizedlist> - <listitem> - <code>ThreadIdzh</code> can be represented by a singleton type, whose - (unique) value is returned by <code>myThreadIdzh</code>. - </listitem> - <listitem> - <code>forkzh</code> can just die with an <quote>unimplemented</quote> message. - </listitem> - <listitem> - <code>killThreadzh</code> and <code>yieldzh</code> can also just die <quote>unimplemented</quote> - since in a one-thread world, the only thread a thread can - kill is itself, and if a thread yields the program hangs. - </listitem> - <listitem> - <code>MVarzh a</code> can be represented by <code>MutVarzh (Maybe a)</code>; where a - concurrent implementation would block, the sequential - implementation can just die with a suitable message (since - no other thread exists to unblock it). - </listitem> - <listitem> - <code>waitReadzh</code> and <code>waitWritezh</code> can be implemented using a <code>select</code> - with no timeout. - </listitem> - </itemizedlist> - </section> - - <section id="literals"> - <title>Literals</title> - - <para>Only the following combination of literal forms and types are - permitted:</para> - - <informaltable frame="none" colsep="0" rowsep="0"> - <tgroup cols='3'> - <colspec colname="literal" align="left" colwidth="*" /> - <colspec colname="type" align="left" colwidth="*" /> - <colspec colname="description" align="left" colwidth="4*" /> - <thead> - <row> - <entry>Literal form</entry> - <entry>Type</entry> - <entry>Description</entry> - </row> - </thead> - <tbody> - <row> - <entry morerows="3" valign="top">integer</entry> - <entry><code>Intzh</code></entry> - <entry>Int</entry> - </row> - <row> - <entry><code>Wordzh</code></entry> - <entry>Word</entry> - </row> - <row> - <entry><code>Addrzh</code></entry> - <entry>Address</entry> - </row> - <row> - <entry><code>Charzh</code></entry> - <entry>Unicode character code</entry> - </row> - - <row> - <entry morerows="1" valign="top">rational</entry> - <entry><code>Floatzh</code></entry> - <entry>Float</entry> - </row> - <row> - <entry><code>Doublezh</code></entry> - <entry>Double</entry> - </row> - - <row> - <entry>character</entry> - <entry><code>Charzh</code></entry> - <entry>Unicode character specified by ASCII character</entry> - </row> - - <row> - <entry>string</entry> - <entry><code>Addrzh</code></entry> - <entry>Address of specified C-format string</entry> - </row> - </tbody> - </tgroup> - </informaltable> - </section> - </section> - - - <bibliolist> - <!-- This bibliography was semi-automatically converted by JabRef from core.bib. --> - - <title>References</title> - - <biblioentry> - <abbrev>ghc-user-guide</abbrev> - <authorgroup> - <author><surname>The GHC Team</surname></author> - </authorgroup> - <citetitle pubwork="article">The Glorious Glasgow Haskell Compilation System User's Guide, Version 6.8.2</citetitle> - <pubdate>2008</pubdate> - <bibliomisc><ulink url="http://www.haskell.org/ghc/docs/latest/html/users_guide/index.html">http://www.haskell.org/ghc/docs/latest/html/users_guide/index.html</ulink></bibliomisc> - </biblioentry> - - <biblioentry> - <abbrev>ghc-fc-commentary</abbrev> - <authorgroup> - <author><surname>GHC Wiki</surname></author> - </authorgroup> - <citetitle pubwork="article">System FC: equality constraints and coercions</citetitle> - <pubdate>2006</pubdate> - <bibliomisc><ulink url="http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/FC">http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/FC</ulink></bibliomisc> - </biblioentry> - - <biblioentry> - <abbrev>ghc-api</abbrev> - <authorgroup> - <author><surname>Haskell Wiki</surname></author> - </authorgroup> - <citetitle pubwork="article">Using GHC as a library</citetitle> - <pubdate>2007</pubdate> - <bibliomisc><ulink url="http://haskell.org/haskellwiki/GHC/As_a_library">http://haskell.org/haskellwiki/GHC/As_a_library</ulink></bibliomisc> - </biblioentry> - - <biblioentry> - <abbrev>haskell98</abbrev> - <authorgroup> - <editor><firstname>Simon</firstname><surname>Peyton-Jones</surname></editor> - </authorgroup> - <citetitle pubwork="article">Haskell 98 Language and Libraries: The Revised Report</citetitle> - <publisher> - <publishername>Cambridge University Press</publishername> - <address> - <city>Cambridge></city> - <state>UK</state> - </address> - </publisher> - <pubdate>2003</pubdate> - </biblioentry> - - <biblioentry> - <abbrev>system-fc</abbrev> - <authorgroup> - <author><firstname>Martin</firstname><surname>Sulzmann</surname></author> - <author><firstname>Manuel M.T.</firstname><surname>Chakravarty</surname></author> - <author><firstname>Simon</firstname><surname>Peyton-Jones</surname></author> - <author><firstname>Kevin</firstname><surname>Donnelly</surname></author> - </authorgroup> - <citetitle pubwork="article">System F with type equality coercions</citetitle> - <publisher> - <publishername>ACM</publishername> - <address> - <city>New York</city> - <state>NY</state> - <country>USA</country> - </address> - </publisher> - <artpagenums>53-66</artpagenums> - <pubdate>2007</pubdate> - <bibliomisc><ulink url="http://portal.acm.org/citation.cfm?id=1190324">http://portal.acm.org/citation.cfm?id=1190324</ulink></bibliomisc> - <!-- booktitle = {{TLDI '07: Proceedings of the 2007 ACM SIGPLAN International Workshop on Types in Language Design and Implementation}}, --> - </biblioentry> - - <biblioentry> - <abbrev>gadts</abbrev> - <authorgroup> - <author><firstname>Simon</firstname><surname>Peyton-Jones</surname></author> - <author><firstname>Dimitrios</firstname><surname>Vytiniotis</surname></author> - <author><firstname>Stephanie</firstname><surname>Weirich</surname></author> - <author><firstname>Geoffrey</firstname><surname>Washburn</surname></author> - </authorgroup> - <citetitle pubwork="article">Simple unification-based type inference for GADTs</citetitle> - <publisher> - <publishername>ACM</publishername> - <address> - <city>New York</city> - <state>NY</state> - <country>USA</country> - </address> - </publisher> - <artpagenums>50-61</artpagenums> - <pubdate>2006</pubdate> - <bibliomisc><ulink url="http://research.microsoft.com/Users/simonpj/papers/gadt/index.htm">http://research.microsoft.com/Users/simonpj/papers/gadt/index.htm</ulink></bibliomisc> - </biblioentry> - - <biblioentry> - <abbrev>Launchbury94</abbrev> - <authorgroup> - <author><firstname>John</firstname><surname>Launchbury</surname></author> - <author><firstname>Simon L.</firstname><surname>Peyton-Jones</surname></author> - </authorgroup> - <citetitle pubwork="article">Lazy Functional State Threads</citetitle> - <artpagenums>24-35</artpagenums> - <pubdate>1994</pubdate> - <bibliomisc><ulink url="http://citeseer.ist.psu.edu/article/launchbury93lazy.html">http://citeseer.ist.psu.edu/article/launchbury93lazy.html</ulink></bibliomisc> - <!-- booktitle = "{SIGPLAN} {Conference} on {Programming Language Design and Implementation}", --> - </biblioentry> - - <biblioentry> - <abbrev>pj:unboxed</abbrev> - <authorgroup> - <author><firstname>Simon L.</firstname><surname>Peyton-Jones</surname></author> - <author><firstname>John</firstname><surname>Launchbury</surname></author> - <editor><firstname>J.</firstname><surname>Hughes</surname></editor> - </authorgroup> - <citetitle pubwork="article">Unboxed Values as First Class Citizens in a Non-strict Functional Language</citetitle> - <publisher> - <publishername>Springer-Verlag LNCS523</publishername> - <address> - <city>Cambridge</city> - <state>Massachussetts</state> - <country>USA</country> - </address> - </publisher> - <artpagenums>636-666</artpagenums> - <pubdate>1991, August 26-28</pubdate> - <bibliomisc><ulink url="http://citeseer.ist.psu.edu/jones91unboxed.html">http://citeseer.ist.psu.edu/jones91unboxed.html</ulink></bibliomisc> - <!-- booktitle = "Proceedings of the Conference on Functional Programming and Computer Architecture", --> - </biblioentry> - - <biblioentry> - <abbrev>ghc-inliner</abbrev> - <authorgroup> - <author><firstname>Simon</firstname><surname>Peyton-Jones</surname></author> - <author><firstname>Simon</firstname><surname>Marlow</surname></author> - </authorgroup> - <citetitle pubwork="article">Secrets of the Glasgow Haskell Compiler inliner</citetitle> - <pubdate>1999</pubdate> - <address> - <city>Paris</city> - <country>France</country> - </address> - <bibliomisc><ulink url="http://research.microsoft.com/Users/simonpj/Papers/inlining/inline.pdf">http://research.microsoft.com/Users/simonpj/Papers/inlining/inline.pdf</ulink></bibliomisc> - <!-- booktitle = "Workshop on Implementing Declarative Languages", --> - </biblioentry> - - <biblioentry> - <abbrev>comp-by-trans-scp</abbrev> - <authorgroup> - <author><firstname>Simon L.</firstname><surname>Peyton-Jones</surname></author> - <author><firstname>A. L. M.</firstname><surname>Santos</surname></author> - </authorgroup> - <citetitle pubwork="article">A transformation-based optimiser for Haskell</citetitle> - <citetitle pubwork="journal">Science of Computer Programming</citetitle> - <volumenum>32</volumenum> - <issuenum>1-3</issuenum> - <artpagenums>3-47</artpagenums> - <pubdate>1998</pubdate> - <bibliomisc><ulink url="http://citeseer.ist.psu.edu/peytonjones98transformationbased.html">http://citeseer.ist.psu.edu/peytonjones98transformationbased.html</ulink></bibliomisc> - </biblioentry> - - <biblioentry> - <abbrev>stg-machine</abbrev> - <authorgroup> - <author><firstname>Simon L.</firstname><surname>Peyton-Jones</surname></author> - </authorgroup> - <citetitle pubwork="article">Implementing Lazy Functional Languages on Stock Hardware: The Spineless Tagless G-Machine</citetitle> - <citetitle pubwork="journal">Journal of Functional Programming</citetitle> - <volumenum>2</volumenum> - <issuenum>2</issuenum> - <artpagenums>127-202</artpagenums> - <pubdate>1992</pubdate> - <bibliomisc><ulink url="http://citeseer.ist.psu.edu/peytonjones92implementing.html">http://citeseer.ist.psu.edu/peytonjones92implementing.html</ulink></bibliomisc> - </biblioentry> - - <biblioentry> - <abbrev>launchbury93natural</abbrev> - <authorgroup> - <author><firstname>John</firstname><surname>Launchbury</surname></author> - </authorgroup> - <citetitle pubwork="article">A Natural Semantics for Lazy Evaluation</citetitle> - <artpagenums>144-154</artpagenums> - <address> - <city>Charleston</city> - <state>South Carolina</state> - </address> - <pubdate>1993</pubdate> - <bibliomisc><ulink url="http://citeseer.ist.psu.edu/launchbury93natural.html">http://citeseer.ist.psu.edu/launchbury93natural.html</ulink></bibliomisc> - <!-- booktitle = "Conference Record of the Twentieth Annual {ACM} {SIGPLAN}-{SIGACT} Symposium on Principles of Programming Languages", --> - </biblioentry> - - <biblioentry> - <abbrev>ghcprim</abbrev> - <authorgroup> - <author><surname>The GHC Team</surname></author> - </authorgroup> - <citetitle pubwork="article">Library documentation: GHC.Prim</citetitle> - <pubdate>2008</pubdate> - <bibliomisc><ulink url="http://www.haskell.org/ghc/docs/latest/html/libraries/base/GHC-Prim.html">http://www.haskell.org/ghc/docs/latest/html/libraries/base/GHC-Prim.html</ulink></bibliomisc> - </biblioentry> - </bibliolist> - -</chapter> diff --git a/docs/users_guide/flags.xml b/docs/users_guide/flags.xml index 593bf4b1ef..1dd224a611 100644 --- a/docs/users_guide/flags.xml +++ b/docs/users_guide/flags.xml @@ -705,6 +705,12 @@ </thead> <tbody> <row> + <entry><option>-fcontext-stack=N</option><replaceable>n</replaceable></entry> + <entry>set the <link linkend="undecidable-instances">limit for context reduction</link>. Default is 20.</entry> + <entry>dynamic</entry> + <entry></entry> + </row> + <row> <entry><option>-fglasgow-exts</option></entry> <entry>Deprecated. Enable most language extensions; see <xref linkend="options-language"/> for exactly which ones.</entry> <entry>dynamic</entry> @@ -717,10 +723,10 @@ <entry><option>-fno-irrefutable-tuples</option></entry> </row> <row> - <entry><option>-fcontext-stack=N</option><replaceable>n</replaceable></entry> - <entry>set the <link linkend="undecidable-instances">limit for context reduction</link>. Default is 20.</entry> + <entry><option>-fpackage-trust</option></entry> + <entry>Enable <link linkend="safe-haskell">Safe Haskell</link> trusted package requirement for trustworthy modules.</entry> <entry>dynamic</entry> - <entry></entry> + <entry><option>-</option></entry> </row> <row> <entry><option>-ftype-function-depth=N</option><replaceable>n</replaceable></entry> @@ -751,65 +757,168 @@ <entry><option>-XNoAutoDeriveTypeable</option></entry> </row> <row> + <entry><option>-XBangPatterns</option></entry> + <entry>Enable <link linkend="bang-patterns">bang patterns</link>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoBangPatterns</option></entry> + </row> + <row> + <entry><option>-XBinaryLiterals</option></entry> + <entry>Enable support for <link linkend="binary-literals">binary literals</link>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoBinaryLiterals</option></entry> + </row> + <row> + <entry><option>-XCApiFFI</option></entry> + <entry>Enable <link linkend="ffi-capi">the CAPI calling convention</link>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoCAPIFFI</option></entry> + </row> + <row> + <entry><option>-XConstrainedClassMethods</option></entry> + <entry>Enable <link linkend="class-method-types">constrained class methods</link>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoConstrainedClassMethods</option></entry> + </row> + <row> <entry><option>-XConstraintKinds</option></entry> <entry>Enable a <link linkend="constraint-kind">kind of constraints</link>.</entry> <entry>dynamic</entry> <entry><option>-XNoConstraintKinds</option></entry> </row> <row> + <entry><option>-XCPP</option></entry> + <entry>Enable the <link linkend="c-pre-processor">C preprocessor</link>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoCPP</option></entry> + </row> + <row> <entry><option>-XDataKinds</option></entry> <entry>Enable <link linkend="promotion">datatype promotion</link>.</entry> <entry>dynamic</entry> <entry><option>-XNoDataKinds</option></entry> </row> <row> + <entry><option>-XDefaultSignatures</option></entry> + <entry>Enable <link linkend="class-default-signatures">default signatures</link>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoDefaultSignatures</option></entry> + </row> + <row> <entry><option>-XDeriveDataTypeable</option></entry> - <entry>Enable <link linkend="deriving-typeable">deriving for the Data and Typeable classes</link>.</entry> + <entry>Enable <link linkend="deriving-typeable">deriving for the Data and Typeable classes</link>. + Implied by <option>-XAutoDeriveTypeable</option>.</entry> <entry>dynamic</entry> <entry><option>-XNoDeriveDataTypeable</option></entry> </row> <row> + <entry><option>-XDeriveFunctor</option></entry> + <entry>Enable <link linkend="deriving-extra">deriving for the Functor class</link>. + Implied by <option>-XDeriveTraversable</option>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoDeriveFunctor</option></entry> + </row> + <row> + <entry><option>-XDeriveFoldable</option></entry> + <entry>Enable <link linkend="deriving-extra">deriving for the Foldable class</link>. + Implied by <option>-XDeriveTraversable</option>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoDeriveFoldable</option></entry> + </row> + <row> <entry><option>-XDeriveGeneric</option></entry> <entry>Enable <link linkend="deriving-typeable">deriving for the Generic class</link>.</entry> <entry>dynamic</entry> <entry><option>-XNoDeriveGeneric</option></entry> </row> <row> - <entry><option>-XGeneralizedNewtypeDeriving</option></entry> - <entry>Enable <link linkend="newtype-deriving">newtype deriving</link>.</entry> + <entry><option>-XDeriveTraversable</option></entry> + <entry>Enable <link linkend="deriving-extra">deriving for the Traversable class</link>. + Implies <option>-XDeriveFunctor</option> and <option>-XDeriveFoldable</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoGeneralizedNewtypeDeriving</option></entry> + <entry><option>-XNoDeriveTraversable</option></entry> </row> <row> <entry><option>-XDisambiguateRecordFields</option></entry> - <entry>Enable <link linkend="disambiguate-fields">record - field disambiguation</link></entry> + <entry>Enable <link linkend="disambiguate-fields">record field disambiguation</link>. + Implied by <option>-XRecordWildCards</option>.</entry> <entry>dynamic</entry> <entry><option>-XNoDisambiguateRecordFields</option></entry> </row> <row> <entry><option>-XEmptyCase</option></entry> - <entry>Allow <link linkend="empty-case">empty case alternatives</link> - </entry> + <entry>Allow <link linkend="empty-case">empty case alternatives</link>.</entry> <entry>dynamic</entry> <entry><option>-XNoEmptyCase</option></entry> </row> <row> + <entry><option>-XEmptyDataDecls</option></entry> + <entry>Enable empty data declarations.</entry> + <entry>dynamic</entry> + <entry><option>-XNoEmptyDataDecls</option></entry> + </row> + <row> + <entry><option>-XExistentialQuantification</option></entry> + <entry>Enable <link linkend="existential-quantification">existential quantification</link>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoExistentialQuantification</option></entry> + </row> + <row> + <entry><option>-XExplicitForAll</option></entry> + <entry>Enable <link linkend="explicit-foralls">explicit universal quantification</link>. + Implied by <option>-XScopedTypeVariables</option>, + <option>-XLiberalTypeSynonyms</option>, + <option>-XRankNTypes</option> and + <option>-XExistentialQuantification</option>. + </entry> + <entry>dynamic</entry> + <entry><option>-XNoExplicitForAll</option></entry> + </row> + <row> + <entry><option>-XExplicitNamespaces</option></entry> + <entry>Enable using the keyword <literal>type</literal> to specify the namespace of + entries in imports and exports (<xref linkend="explicit-namespaces"/>). + Implied by <option>-XTypeOperators</option> and <option>-XTypeFamilies</option>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoExplicitNamespaces</option></entry> + </row> + <row> <entry><option>-XExtendedDefaultRules</option></entry> - <entry>Use GHCi's <link linkend="extended-default-rules">extended default rules</link> in a normal module</entry> + <entry>Use GHCi's <link linkend="extended-default-rules">extended default rules</link> in a normal module.</entry> <entry>dynamic</entry> <entry><option>-XNoExtendedDefaultRules</option></entry> </row> <row> + <entry><option>-XFlexibleContexts</option></entry> + <entry>Enable <link linkend="flexible-contexts">flexible contexts</link>. + Implied by <option>-XImplicitParams</option>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoFlexibleContexts</option></entry> + </row> + <row> + <entry><option>-XFlexibleInstances</option></entry> + <entry>Enable <link linkend="instance-rules">flexible instances</link>. + Implies <option>-XTypeSynonymInstances</option>. Implied by <option>-XImplicitParams</option>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoFlexibleInstances</option></entry> + </row> + <row> <entry><option>-XForeignFunctionInterface</option></entry> <entry>Enable <link linkend="ffi">foreign function interface</link>.</entry> <entry>dynamic</entry> <entry><option>-XNoForeignFunctionInterface</option></entry> </row> <row> + <entry><option>-XFunctionalDependencies</option></entry> + <entry>Enable <link linkend="functional-dependencies">functional dependencies</link>. + Implies <option>-XMultiParamTypeClasses</option>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoFunctionalDependencies</option></entry> + </row> + <row> <entry><option>-XGADTs</option></entry> <entry>Enable <link linkend="gadt">generalised algebraic data types</link>. - </entry> + Implies <option>-XGADTSyntax</option> and <option>-XMonoLocalBinds</option>.</entry> <entry>dynamic</entry> <entry><option>-XNoGADTs</option></entry> </row> @@ -821,6 +930,12 @@ <entry><option>-XNoGADTSyntax</option></entry> </row> <row> + <entry><option>-XGeneralizedNewtypeDeriving</option></entry> + <entry>Enable <link linkend="newtype-deriving">newtype deriving</link>.</entry> + <entry>dynamic</entry> + <entry><option>-XNoGeneralizedNewtypeDeriving</option></entry> + </row> + <row> <entry><option>-XGenerics</option></entry> <entry>Deprecated, does nothing. No longer enables <link linkend="generic-classes">generic classes</link>. See also GHC's support for @@ -830,103 +945,74 @@ </row> <row> <entry><option>-XImplicitParams</option></entry> - <entry>Enable <link linkend="implicit-parameters">Implicit Parameters</link>.</entry> + <entry>Enable <link linkend="implicit-parameters">Implicit Parameters</link>. + Implies <option>-XFlexibleContexts</option> and <option>-XFlexibleInstances</option>.</entry> <entry>dynamic</entry> <entry><option>-XNoImplicitParams</option></entry> </row> <row> <entry><option>-XNoImplicitPrelude</option></entry> - <entry>Don't implicitly <literal>import Prelude</literal></entry> + <entry>Don't implicitly <literal>import Prelude</literal>. + Implied by <option>-XRebindableSyntax</option>.</entry> <entry>dynamic</entry> <entry><option>-XImplicitPrelude</option></entry> </row> <row> - <entry><option>-XIncoherentInstances</option></entry> - <entry>Enable <link linkend="instance-overlap">incoherent instances</link>. - Implies <option>-XOverlappingInstances</option> </entry> - <entry>dynamic</entry> - <entry><option>-XNoIncoherentInstances</option></entry> - </row> - <row> - <entry><option>-XNoMonomorphismRestriction</option></entry> - <entry>Disable the <link linkend="monomorphism">monomorphism restriction</link></entry> - <entry>dynamic</entry> - <entry><option>-XMonomorphismRrestriction</option></entry> - </row> - <row> - <entry><option>-XNegativeLiterals</option></entry> - <entry>Enable support for <link linkend="negative-literals">negative literals</link></entry> - <entry>dynamic</entry> - <entry><option>-XNoNegativeLiterals</option></entry> - </row> - <row> - <entry><option>-XNoNPlusKPatterns</option></entry> - <entry>Disable support for <literal>n+k</literal> patterns</entry> - <entry>dynamic</entry> - <entry><option>-XNPlusKPatterns</option></entry> - </row> - <row> - <entry><option>-XNumDecimals</option></entry> - <entry>Enable support for 'fractional' integer literals</entry> - <entry>dynamic</entry> - <entry><option>-XNoNumDecimals</option></entry> - </row> - <row> - <entry><option>-XOverlappingInstances</option></entry> - <entry>Enable <link linkend="instance-overlap">overlapping instances</link></entry> + <entry><option>-XImpredicativeTypes</option></entry> + <entry>Enable <link linkend="impredicative-polymorphism">impredicative types</link>. + Implies <option>-XRankNTypes</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoOverlappingInstances</option></entry> + <entry><option>-XNoImpredicativeTypes</option></entry> </row> <row> - <entry><option>-XOverloadedLists</option></entry> - <entry>Enable <link linkend="overloaded-lists">overloaded lists</link>. - </entry> + <entry><option>-XIncoherentInstances</option></entry> + <entry>Enable <link linkend="instance-overlap">incoherent instances</link>. + Implies <option>-XOverlappingInstances</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoOverloadedLists</option></entry> + <entry><option>-XNoIncoherentInstances</option></entry> </row> <row> - <entry><option>-XOverloadedStrings</option></entry> - <entry>Enable <link linkend="overloaded-strings">overloaded string literals</link>. - </entry> + <entry><option>-XInstanceSigs</option></entry> + <entry>Enable <link linkend="instance-sigs">instance signatures</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoOverloadedStrings</option></entry> + <entry><option>-XNoInstanceSigs</option></entry> </row> <row> - <entry><option>-XQuasiQuotes</option></entry> - <entry>Enable <link linkend="th-quasiquotation">quasiquotation</link>.</entry> + <entry><option>-XInterruptibleFFI</option></entry> + <entry>Enable interruptible FFI.</entry> <entry>dynamic</entry> - <entry><option>-XNoQuasiQuotes</option></entry> + <entry><option>-XNoInterruptibleFFI</option></entry> </row> <row> - <entry><option>-XRelaxedPolyRec</option></entry> - <entry>Relaxed checking for <link linkend="typing-binds">mutually-recursive polymorphic functions</link></entry> + <entry><option>-XKindSignatures</option></entry> + <entry>Enable <link linkend="kinding">kind signatures</link>. + Implied by <option>-XTypeFamilies</option> and <option>-XPolyKinds</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoRelaxedPolyRec</option></entry> + <entry><option>-XNoKindSignatures</option></entry> </row> <row> - <entry><option>-XNoTraditionalRecordSyntax</option></entry> - <entry>Disable support for traditional record syntax (as supported by Haskell 98) <literal>C {f = x}</literal></entry> + <entry><option>-XLambdaCase</option></entry> + <entry>Enable <link linkend="lambda-case">lambda-case expressions</link>.</entry> <entry>dynamic</entry> - <entry><option>-XTraditionalRecordSyntax</option></entry> + <entry><option>-XNoLambdaCase</option></entry> </row> <row> - <entry><option>-XTypeFamilies</option></entry> - <entry>Enable <link linkend="type-families">type families</link>.</entry> + <entry><option>-XLiberalTypeSynonyms</option></entry> + <entry>Enable <link linkend="type-synonyms">liberalised type synonyms</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoTypeFamilies</option></entry> + <entry><option>-XNoLiberalTypeSynonyms</option></entry> </row> <row> - <entry><option>-XUndecidableInstances</option></entry> - <entry>Enable <link linkend="undecidable-instances">undecidable instances</link></entry> + <entry><option>-XMagicHash</option></entry> + <entry>Allow "#" as a <link linkend="magic-hash">postfix modifier on identifiers</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoUndecidableInstances</option></entry> + <entry><option>-XNoMagicHash</option></entry> </row> <row> - <entry><option>-XPolyKinds</option></entry> - <entry>Enable <link linkend="kind-polymorphism">kind polymorphism</link>. - Implies <option>-XKindSignatures</option>.</entry> + <entry><option>-XMonadComprehensions</option></entry> + <entry>Enable <link linkend="monad-comprehensions">monad comprehensions</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoPolyKinds</option></entry> + <entry><option>-XNoMonadComprehensions</option></entry> </row> <row> <entry><option>-XMonoLocalBinds</option></entry> @@ -935,164 +1021,159 @@ </entry> <entry>dynamic</entry> <entry><option>-XNoMonoLocalBinds</option></entry> - </row> + </row> <row> - <entry><option>-XRebindableSyntax</option></entry> - <entry>Employ <link linkend="rebindable-syntax">rebindable syntax</link></entry> + <entry><option>-XNoMonomorphismRestriction</option></entry> + <entry>Disable the <link linkend="monomorphism">monomorphism restriction</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoRebindableSyntax</option></entry> + <entry><option>-XMonomorphismRrestriction</option></entry> </row> <row> - <entry><option>-XScopedTypeVariables</option></entry> - <entry>Enable <link linkend="scoped-type-variables">lexically-scoped type variables</link>. - </entry> + <entry><option>-XMultiParamTypeClasses</option></entry> + <entry>Enable <link linkend="multi-param-type-classes">multi parameter type classes</link>. + Implied by <option>-XFunctionalDependencies</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoScopedTypeVariables</option></entry> + <entry><option>-XNoMultiParamTypeClasses</option></entry> </row> <row> - <entry><option>-XTemplateHaskell</option></entry> - <entry>Enable <link linkend="template-haskell">Template Haskell</link>.</entry> + <entry><option>-XMultiWayIf</option></entry> + <entry>Enable <link linkend="multi-way-if">multi-way if-expressions</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoTemplateHaskell</option></entry> + <entry><option>-XNoMultiWayIf</option></entry> </row> <row> - <entry><option>-XBangPatterns</option></entry> - <entry>Enable <link linkend="bang-patterns">bang patterns</link>.</entry> + <entry><option>-XNamedFieldPuns</option></entry> + <entry>Enable <link linkend="record-puns">record puns</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoBangPatterns</option></entry> + <entry><option>-XNoNamedFieldPuns</option></entry> </row> <row> - <entry><option>-XCPP</option></entry> - <entry>Enable the <link linkend="c-pre-processor">C preprocessor</link>.</entry> + <entry><option>-XNegativeLiterals</option></entry> + <entry>Enable support for <link linkend="negative-literals">negative literals</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoCPP</option></entry> + <entry><option>-XNoNegativeLiterals</option></entry> </row> <row> - <entry><option>-XPatternGuards</option></entry> - <entry>Enable <link linkend="pattern-guards">pattern guards</link>.</entry> + <entry><option>-XNoNPlusKPatterns</option></entry> + <entry>Disable support for <literal>n+k</literal> patterns.</entry> <entry>dynamic</entry> - <entry><option>-XNoPatternGuards</option></entry> + <entry><option>-XNPlusKPatterns</option></entry> </row> <row> - <entry><option>-XViewPatterns</option></entry> - <entry>Enable <link linkend="view-patterns">view patterns</link>.</entry> + <entry><option>-XNullaryTypeClasses</option></entry> + <entry>Deprecated, does nothing. <link linkend="nullary-type-classes">nullary (no parameter) type classes</link> are now enabled using <option>-XMultiParamTypeClasses</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoViewPatterns</option></entry> + <entry><option>-XNoNullaryTypeClasses</option></entry> </row> <row> - <entry><option>-XUnicodeSyntax</option></entry> - <entry>Enable <link linkend="unicode-syntax">unicode syntax</link>.</entry> + <entry><option>-XNumDecimals</option></entry> + <entry>Enable support for 'fractional' integer literals.</entry> <entry>dynamic</entry> - <entry><option>-XNoUnicodeSyntax</option></entry> + <entry><option>-XNoNumDecimals</option></entry> </row> <row> - <entry><option>-XMagicHash</option></entry> - <entry>Allow "#" as a <link linkend="magic-hash">postfix modifier on identifiers</link>.</entry> + <entry><option>-XOverlappingInstances</option></entry> + <entry>Enable <link linkend="instance-overlap">overlapping instances</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoMagicHash</option></entry> + <entry><option>-XNoOverlappingInstances</option></entry> </row> <row> - <entry><option>-XExplicitForAll</option></entry> - <entry>Enable <link linkend="explicit-foralls">explicit universal quantification</link>. - Implied by <option>-XScopedTypeVariables</option>, - <option>-XLiberalTypeSynonyms</option>, - <option>-XRankNTypes</option>, - <option>-XExistentialQuantification</option> + <entry><option>-XOverloadedLists</option></entry> + <entry>Enable <link linkend="overloaded-lists">overloaded lists</link>. </entry> <entry>dynamic</entry> - <entry><option>-XNoExplicitForAll</option></entry> - </row> - <row> - <entry><option>-XPolymorphicComponents</option></entry> - <entry>Enable <link linkend="universal-quantification">polymorphic components for data constructors</link>.</entry> - <entry>dynamic, synonym for <option>-XRankNTypes</option></entry> - <entry><option>-XNoPolymorphicComponents</option></entry> + <entry><option>-XNoOverloadedLists</option></entry> </row> <row> - <entry><option>-XRank2Types</option></entry> - <entry>Enable <link linkend="universal-quantification">rank-2 types</link>.</entry> - <entry>dynamic, synonym for <option>-XRankNTypes</option></entry> - <entry><option>-XNoRank2Types</option></entry> + <entry><option>-XOverloadedStrings</option></entry> + <entry>Enable <link linkend="overloaded-strings">overloaded string literals</link>. + </entry> + <entry>dynamic</entry> + <entry><option>-XNoOverloadedStrings</option></entry> </row> <row> - <entry><option>-XRankNTypes</option></entry> - <entry>Enable <link linkend="universal-quantification">rank-N types</link>.</entry> + <entry><option>-XPackageImports</option></entry> + <entry>Enable <link linkend="package-imports">package-qualified imports</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoRankNTypes</option></entry> + <entry><option>-XNoPackageImports</option></entry> </row> <row> - <entry><option>-XImpredicativeTypes</option></entry> - <entry>Enable <link linkend="impredicative-polymorphism">impredicative types</link>.</entry> + <entry><option>-XParallelArrays</option></entry> + <entry>Enable parallel arrays. + Implies <option>-XParallelListComp</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoImpredicativeTypes</option></entry> + <entry><option>-XNoParallelArrays</option></entry> </row> <row> - <entry><option>-XExistentialQuantification</option></entry> - <entry>Enable <link linkend="existential-quantification">existential quantification</link>.</entry> + <entry><option>-XParallelListComp</option></entry> + <entry>Enable <link linkend="parallel-list-comprehensions">parallel list comprehensions</link>. + Implied by <option>-XParallelArrays</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoExistentialQuantification</option></entry> + <entry><option>-XNoParallelListComp</option></entry> </row> <row> - <entry><option>-XKindSignatures</option></entry> - <entry>Enable <link linkend="kinding">kind signatures</link>.</entry> + <entry><option>-XPatternGuards</option></entry> + <entry>Enable <link linkend="pattern-guards">pattern guards</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoKindSignatures</option></entry> + <entry><option>-XNoPatternGuards</option></entry> </row> <row> - <entry><option>-XEmptyDataDecls</option></entry> - <entry>Enable empty data declarations.</entry> + <entry><option>-XPatternSynonyms</option></entry> + <entry>Enable <link linkend="pattern-synonyms">pattern synonyms</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoEmptyDataDecls</option></entry> + <entry><option>-XNoPatternSynonyms</option></entry> </row> <row> - <entry><option>-XParallelListComp</option></entry> - <entry>Enable <link linkend="parallel-list-comprehensions">parallel list comprehensions</link>.</entry> + <entry><option>-XPolyKinds</option></entry> + <entry>Enable <link linkend="kind-polymorphism">kind polymorphism</link>. + Implies <option>-XKindSignatures</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoParallelListComp</option></entry> + <entry><option>-XNoPolyKinds</option></entry> </row> <row> - <entry><option>-XTransformListComp</option></entry> - <entry>Enable <link linkend="generalised-list-comprehensions">generalised list comprehensions</link>.</entry> - <entry>dynamic</entry> - <entry><option>-XNoTransformListComp</option></entry> + <entry><option>-XPolymorphicComponents</option></entry> + <entry>Enable <link linkend="universal-quantification">polymorphic components for data constructors</link>.</entry> + <entry>dynamic, synonym for <option>-XRankNTypes</option></entry> + <entry><option>-XNoPolymorphicComponents</option></entry> </row> <row> - <entry><option>-XMonadComprehensions</option></entry> - <entry>Enable <link linkend="monad-comprehensions">monad comprehensions</link>.</entry> + <entry><option>-XPostfixOperators</option></entry> + <entry>Enable <link linkend="postfix-operators">postfix operators</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoMonadComprehensions</option></entry> + <entry><option>-XNoPostfixOperators</option></entry> </row> <row> - <entry><option>-XUnliftedFFITypes</option></entry> - <entry>Enable unlifted FFI types.</entry> + <entry><option>-XQuasiQuotes</option></entry> + <entry>Enable <link linkend="th-quasiquotation">quasiquotation</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoUnliftedFFITypes</option></entry> + <entry><option>-XNoQuasiQuotes</option></entry> </row> <row> - <entry><option>-XInterruptibleFFI</option></entry> - <entry>Enable interruptible FFI.</entry> - <entry>dynamic</entry> - <entry><option>-XNoInterruptibleFFI</option></entry> + <entry><option>-XRank2Types</option></entry> + <entry>Enable <link linkend="universal-quantification">rank-2 types</link>.</entry> + <entry>dynamic, synonym for <option>-XRankNTypes</option></entry> + <entry><option>-XNoRank2Types</option></entry> </row> <row> - <entry><option>-XLiberalTypeSynonyms</option></entry> - <entry>Enable <link linkend="type-synonyms">liberalised type synonyms</link>.</entry> + <entry><option>-XRankNTypes</option></entry> + <entry>Enable <link linkend="universal-quantification">rank-N types</link>. + Implied by <option>-XImpredicativeTypes</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoLiberalTypeSynonyms</option></entry> + <entry><option>-XNoRankNTypes</option></entry> </row> <row> - <entry><option>-XTypeOperators</option></entry> - <entry>Enable <link linkend="type-operators">type operators</link>.</entry> + <entry><option>-XRebindableSyntax</option></entry> + <entry>Employ <link linkend="rebindable-syntax">rebindable syntax</link>. + Implies <option>-XNoImplicitPrelude</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoTypeOperators</option></entry> + <entry><option>-XNoRebindableSyntax</option></entry> </row> <row> - <entry><option>-XExplicitNamespaces</option></entry> - <entry>Enable using the keyword <literal>type</literal> to specify the namespace of - entries in imports and exports (<xref linkend="explicit-namespaces"/>). - Implied by <option>-XTypeOperators</option> and <option>-XTypeFamilies</option>.</entry> + <entry><option>-XRecordWildCards</option></entry> + <entry>Enable <link linkend="record-wildcards">record wildcards</link>. + Implies <option>-XDisambiguateRecordFields</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoExplicitNamespaces</option></entry> + <entry><option>-XNoRecordWildCards</option></entry> </row> <row> <entry><option>-XRecursiveDo</option></entry> @@ -1101,34 +1182,30 @@ <entry><option>-XNoRecursiveDo</option></entry> </row> <row> - <entry><option>-XParallelArrays</option></entry> - <entry>Enable parallel arrays.</entry> - <entry>dynamic</entry> - <entry><option>-XNoParallelArrays</option></entry> - </row> - <row> - <entry><option>-XRecordWildCards</option></entry> - <entry>Enable <link linkend="record-wildcards">record wildcards</link>.</entry> + <entry><option>-XRelaxedPolyRec</option></entry> + <entry><emphasis>(deprecated)</emphasis> Relaxed checking for + <link linkend="typing-binds">mutually-recursive polymorphic functions</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoRecordWildCards</option></entry> + <entry><option>-XNoRelaxedPolyRec</option></entry> </row> <row> - <entry><option>-XNamedFieldPuns</option></entry> - <entry>Enable <link linkend="record-puns">record puns</link>.</entry> + <entry><option>-XRoleAnnotations</option></entry> + <entry>Enable <link linkend="role-annotations">role annotations</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoNamedFieldPuns</option></entry> + <entry><option>-XNoRoleAnnotations</option></entry> </row> <row> - <entry><option>-XDisambiguateRecordFields</option></entry> - <entry>Enable <link linkend="disambiguate-fields">record field disambiguation</link>. </entry> + <entry><option>-XSafe</option></entry> + <entry>Enable the <link linkend="safe-haskell">Safe Haskell</link> Safe mode.</entry> <entry>dynamic</entry> - <entry><option>-XNoDisambiguateRecordFields</option></entry> + <entry><option>-</option></entry> </row> <row> - <entry><option>-XUnboxedTuples</option></entry> - <entry>Enable <link linkend="unboxed-tuples">unboxed tuples</link>.</entry> + <entry><option>-XScopedTypeVariables</option></entry> + <entry>Enable <link linkend="scoped-type-variables">lexically-scoped type variables</link>. + </entry> <entry>dynamic</entry> - <entry><option>-XNoUnboxedTuples</option></entry> + <entry><option>-XNoScopedTypeVariables</option></entry> </row> <row> <entry><option>-XStandaloneDeriving</option></entry> @@ -1137,83 +1214,80 @@ <entry><option>-XNoStandaloneDeriving</option></entry> </row> <row> - <entry><option>-XTypeSynonymInstances</option></entry> - <entry>Enable <link linkend="flexible-instance-head">type synonyms in instance heads</link>.</entry> - <entry>dynamic</entry> - <entry><option>-XNoTypeSynonymInstances</option></entry> - </row> - <row> - <entry><option>-XFlexibleContexts</option></entry> - <entry>Enable <link linkend="flexible-contexts">flexible contexts</link>.</entry> + <entry><option>-XTemplateHaskell</option></entry> + <entry>Enable <link linkend="template-haskell">Template Haskell</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoFlexibleContexts</option></entry> + <entry><option>-XNoTemplateHaskell</option></entry> </row> <row> - <entry><option>-XFlexibleInstances</option></entry> - <entry>Enable <link linkend="instance-rules">flexible instances</link>. - Implies <option>-XTypeSynonymInstances</option> </entry> + <entry><option>-XNoTraditionalRecordSyntax</option></entry> + <entry>Disable support for traditional record syntax (as supported by Haskell 98) <literal>C {f = x}</literal></entry> <entry>dynamic</entry> - <entry><option>-XNoFlexibleInstances</option></entry> + <entry><option>-XTraditionalRecordSyntax</option></entry> </row> <row> - <entry><option>-XConstrainedClassMethods</option></entry> - <entry>Enable <link linkend="class-method-types">constrained class methods</link>.</entry> + <entry><option>-XTransformListComp</option></entry> + <entry>Enable <link linkend="generalised-list-comprehensions">generalised list comprehensions</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoConstrainedClassMethods</option></entry> + <entry><option>-XNoTransformListComp</option></entry> </row> <row> - <entry><option>-XDefaultSignatures</option></entry> - <entry>Enable <link linkend="class-default-signatures">default signatures</link>.</entry> + <entry><option>-XTrustworthy</option></entry> + <entry>Enable the <link linkend="safe-haskell">Safe Haskell</link> Trustworthy mode.</entry> <entry>dynamic</entry> - <entry><option>-XNoDefaultSignatures</option></entry> + <entry><option>-</option></entry> </row> <row> - <entry><option>-XMultiParamTypeClasses</option></entry> - <entry>Enable <link linkend="multi-param-type-classes">multi parameter type classes</link>.</entry> + <entry><option>-XTupleSections</option></entry> + <entry>Enable <link linkend="tuple-sections">tuple sections</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoMultiParamTypeClasses</option></entry> + <entry><option>-XNoTupleSections</option></entry> </row> <row> - <entry><option>-XNullaryTypeClasses</option></entry> - <entry>Enable <link linkend="nullary-type-classes">nullary (no parameter) type classes</link>.</entry> + <entry><option>-XTypeFamilies</option></entry> + <entry>Enable <link linkend="type-families">type families</link>. + Implies <option>-XExplicitNamespaces</option>, <option>-XKindSignatures</option> + and <option>-XMonoLocalBinds</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoNullaryTypeClasses</option></entry> + <entry><option>-XNoTypeFamilies</option></entry> </row> <row> - <entry><option>-XFunctionalDependencies</option></entry> - <entry>Enable <link linkend="functional-dependencies">functional dependencies</link>.</entry> + <entry><option>-XTypeOperators</option></entry> + <entry>Enable <link linkend="type-operators">type operators</link>. + Implies <option>-XExplicitNamespaces</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoFunctionalDependencies</option></entry> + <entry><option>-XNoTypeOperators</option></entry> </row> <row> - <entry><option>-XPackageImports</option></entry> - <entry>Enable <link linkend="package-imports">package-qualified imports</link>.</entry> + <entry><option>-XTypeSynonymInstances</option></entry> + <entry>Enable <link linkend="flexible-instance-head">type synonyms in instance heads</link>. + Implied by <option>-XFlexibleInstances</option>.</entry> <entry>dynamic</entry> - <entry><option>-XNoPackageImports</option></entry> + <entry><option>-XNoTypeSynonymInstances</option></entry> </row> <row> - <entry><option>-XLambdaCase</option></entry> - <entry>Enable <link linkend="lambda-case">lambda-case expressions</link>.</entry> + <entry><option>-XUnboxedTuples</option></entry> + <entry>Enable <link linkend="unboxed-tuples">unboxed tuples</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoLambdaCase</option></entry> + <entry><option>-XNoUnboxedTuples</option></entry> </row> <row> - <entry><option>-XMultiWayIf</option></entry> - <entry>Enable <link linkend="multi-way-if">multi-way if-expressions</link>.</entry> + <entry><option>-XUndecidableInstances</option></entry> + <entry>Enable <link linkend="undecidable-instances">undecidable instances</link>.</entry> <entry>dynamic</entry> - <entry><option>-XNoMultiWayIf</option></entry> + <entry><option>-XNoUndecidableInstances</option></entry> </row> <row> - <entry><option>-XSafe</option></entry> - <entry>Enable the <link linkend="safe-haskell">Safe Haskell</link> Safe mode.</entry> + <entry><option>-XUnicodeSyntax</option></entry> + <entry>Enable <link linkend="unicode-syntax">unicode syntax</link>.</entry> <entry>dynamic</entry> - <entry><option>-</option></entry> + <entry><option>-XNoUnicodeSyntax</option></entry> </row> <row> - <entry><option>-XTrustworthy</option></entry> - <entry>Enable the <link linkend="safe-haskell">Safe Haskell</link> Trustworthy mode.</entry> + <entry><option>-XUnliftedFFITypes</option></entry> + <entry>Enable unlifted FFI types.</entry> <entry>dynamic</entry> - <entry><option>-</option></entry> + <entry><option>-XNoUnliftedFFITypes</option></entry> </row> <row> <entry><option>-XUnsafe</option></entry> @@ -1222,10 +1296,10 @@ <entry><option>-</option></entry> </row> <row> - <entry><option>-fpackage-trust</option></entry> - <entry>Enable <link linkend="safe-haskell">Safe Haskell</link> trusted package requirement for trustworthy modules.</entry> + <entry><option>-XViewPatterns</option></entry> + <entry>Enable <link linkend="view-patterns">view patterns</link>.</entry> <entry>dynamic</entry> - <entry><option>-</option></entry> + <entry><option>-XNoViewPatterns</option></entry> </row> </tbody> </tgroup> @@ -2141,6 +2215,12 @@ <entry>-</entry> </row> <row> + <entry><option>-fwrite-interface</option></entry> + <entry>Always write interface files</entry> + <entry>dynamic</entry> + <entry>-</entry> + </row> + <row> <entry><option>-fbyte-code</option></entry> <entry>Generate byte-code</entry> <entry>dynamic</entry> @@ -2607,34 +2687,6 @@ <sect2> - <title>External core file options</title> - - <para><xref linkend="ext-core"/></para> - - <informaltable> - <tgroup cols="4" align="left" colsep="1" rowsep="1"> - <thead> - <row> - <entry>Flag</entry> - <entry>Description</entry> - <entry>Static/Dynamic</entry> - <entry>Reverse</entry> - </row> - </thead> - <tbody> - <row> - <entry><option>-fext-core</option></entry> - <entry>Generate <filename>.hcr</filename> external Core files</entry> - <entry>dynamic</entry> - <entry>-</entry> - </row> - </tbody> - </tgroup> - </informaltable> - </sect2> - - - <sect2> <title>Compiler debugging options</title> <para><xref linkend="options-debugging"/></para> diff --git a/docs/users_guide/ghci.xml b/docs/users_guide/ghci.xml index 912ecb25ce..729f96f244 100644 --- a/docs/users_guide/ghci.xml +++ b/docs/users_guide/ghci.xml @@ -2432,7 +2432,9 @@ Prelude> :. cmds.ghci <listitem> <para>Opens an editor to edit the file <replaceable>file</replaceable>, or the most recently loaded - module if <replaceable>file</replaceable> is omitted. The + module if <replaceable>file</replaceable> is omitted. + If there were errors during the last loading, + the cursor will be positioned at the line of the first error. The editor to invoke is taken from the <literal>EDITOR</literal> environment variable, or a default editor on your system if <literal>EDITOR</literal> is not set. You can change the @@ -3294,12 +3296,38 @@ Prelude> :set -fno-warn-incomplete-patterns -XNoMultiParamTypeClasses <title>Setting options for interactive evaluation only</title> <para> - GHCi actually maintains two sets of options: one set that - applies when loading modules, and another set that applies for - expressions and commands typed at the prompt. The - <literal>:set</literal> command modifies both, but there is + GHCi actually maintains <emphasis>two</emphasis> sets of options: +<itemizedlist> +<listitem><para> + The <emphasis>loading options</emphasis> apply when loading modules +</para></listitem> +<listitem><para> + The <emphasis>interactive options</emphasis> apply when evaluating expressions and commands typed at the GHCi prompt. +</para></listitem> +</itemizedlist> +The <literal>:set</literal> command modifies both, but there is also a <literal>:seti</literal> command (for "set - interactive") that affects only the second set. + interactive") that affects only the interactive options set. + </para> + + <para> + It is often useful to change the interactive options, + without having that option apply to loaded modules + too. For example +<screen> +:seti -XMonoLocalBinds +</screen> + It would be undesirable if <option>-XMonoLocalBinds</option> were to + apply to loaded modules too: that might cause a compilation error, but + more commonly it will cause extra recompilation, because GHC will think + that it needs to recompile the module because the flags have changed. + </para> + + <para> + If you are setting language options in your <literal>.ghci</literal> file, it is good practice + to use <literal>:seti</literal> rather than <literal>:set</literal>, + unless you really do want them to apply to all modules you + load in GHCi. </para> <para> @@ -3307,8 +3335,6 @@ Prelude> :set -fno-warn-incomplete-patterns -XNoMultiParamTypeClasses <literal>:set</literal> and <literal>:seti</literal> commands respectively, with no arguments. For example, in a clean GHCi session we might see something like this: - </para> - <screen> Prelude> :seti base language is: Haskell2010 @@ -3322,38 +3348,24 @@ other dynamic, non-language, flag settings: -fimplicit-import-qualified warning settings: </screen> - <para> - Note that the option <option>-XExtendedDefaultRules</option> - is on, because we apply special defaulting rules to + </para> +<para> +The two sets of options are initialised as follows. First, both sets of options +are initialised as described in <xref linkend="ghci-dot-files"/>. +Then the interactive options are modified as follows: +<itemizedlist> +<listitem><para> + The option <option>-XExtendedDefaultRules</option> + is enabled, in order to apply special defaulting rules to expressions typed at the prompt (see <xref linkend="extended-default-rules" />). - </para> - - <para> - Furthermore, the Monomorphism Restriction is disabled by default in - GHCi (see <xref linkend="monomorphism" />). - </para> - - <para> - It is often useful to change the language options for expressions typed - at the prompt only, without having that option apply to loaded modules - too. For example -<screen> -:seti -XMonoLocalBinds -</screen> - It would be undesirable if <option>-XMonoLocalBinds</option> were to - apply to loaded modules too: that might cause a compilation error, but - more commonly it will cause extra recompilation, because GHC will think - that it needs to recompile the module because the flags have changed. - </para> + </para></listitem> - <para> - It is therefore good practice if you are setting language - options in your <literal>.ghci</literal> file, to use - <literal>:seti</literal> rather than <literal>:set</literal> - unless you really do want them to apply to all modules you - load in GHCi. - </para> +<listitem> <para> + The Monomorphism Restriction is disabled (see <xref linkend="monomorphism" />). + </para></listitem> +</itemizedlist> +</para> </sect2> </sect1> diff --git a/docs/users_guide/glasgow_exts.xml b/docs/users_guide/glasgow_exts.xml index acc796371a..9acb56fc29 100644 --- a/docs/users_guide/glasgow_exts.xml +++ b/docs/users_guide/glasgow_exts.xml @@ -480,6 +480,26 @@ Indeed, the bindings can even be recursive. </para> </sect2> + <sect2 id="binary-literals"> + <title>Binary integer literals</title> + <para> + Haskell 2010 and Haskell 98 allows for integer literals to + be given in decimal, octal (prefixed by + <literal>0o</literal> or <literal>0O</literal>), or + hexadecimal notation (prefixed by <literal>0x</literal> or + <literal>0X</literal>). + </para> + + <para> + The language extension <option>-XBinaryLiterals</option> + adds support for expressing integer literals in binary + notation with the prefix <literal>0b</literal> or + <literal>0B</literal>. For instance, the binary integer + literal <literal>0b11001001</literal> will be desugared into + <literal>fromInteger 201</literal> when + <option>-XBinaryLiterals</option> is enabled. + </para> + </sect2> <!-- ====================== HIERARCHICAL MODULES ======================= --> @@ -971,25 +991,27 @@ right-hand side. </para> <para> -The semantics of a unidirectional pattern synonym declaration and -usage are as follows: - -<itemizedlist> +The syntax and semantics of pattern synonyms are elaborated in the +following subsections. +See the <ulink +url="http://ghc.haskell.org/trac/ghc/wiki/PatternSynonyms">Wiki +page</ulink> for more details. +</para> -<listitem> Syntax: +<sect3> <title>Syntax and scoping of pattern synonyms</title> <para> A pattern synonym declaration can be either unidirectional or bidirectional. The syntax for unidirectional pattern synonyms is: -</para> <programlisting> pattern Name args <- pat </programlisting> -<para> and the syntax for bidirectional pattern synonyms is: -</para> <programlisting> pattern Name args = pat </programlisting> + Either prefix or infix syntax can be + used. +</para> <para> Pattern synonym declarations can only occur in the top level of a module. In particular, they are not allowed as local @@ -997,20 +1019,6 @@ bidirectional. The syntax for unidirectional pattern synonyms is: technical restriction that will be lifted in later versions. </para> <para> - The name of the pattern synonym itself is in the same namespace as - proper data constructors. Either prefix or infix syntax can be - used. In export/import specifications, you have to prefix pattern - names with the <literal>pattern</literal> keyword, e.g.: -</para> -<programlisting> - module Example (pattern Single) where - pattern Single x = [x] -</programlisting> -</listitem> - -<listitem> Scoping: - -<para> The variables in the left-hand side of the definition are bound by the pattern on the right-hand side. For bidirectional pattern synonyms, all the variables of the right-hand side must also occur @@ -1022,10 +1030,35 @@ bidirectional. The syntax for unidirectional pattern synonyms is: <para> Pattern synonyms cannot be defined recursively. </para> +</sect3> -</listitem> +<sect3 id="patsyn-impexp"> <title>Import and export of pattern synonyms</title> + +<para> + The name of the pattern synonym itself is in the same namespace as + proper data constructors. In an export or import specification, + you must prefix pattern + names with the <literal>pattern</literal> keyword, e.g.: +<programlisting> + module Example (pattern Single) where + pattern Single x = [x] +</programlisting> +Without the <literal>pattern</literal> prefix, <literal>Single</literal> would +be interpreted as a type constructor in the export list. +</para> +<para> +You may also use the <literal>pattern</literal> keyword in an import/export +specification to import or export an ordinary data constructor. For example: +<programlisting> + import Data.Maybe( pattern Just ) +</programlisting> +would bring into scope the data constructor <literal>Just</literal> from the +<literal>Maybe</literal> type, without also bringing the type constructor +<literal>Maybe</literal> into scope. +</para> +</sect3> -<listitem> Typing: +<sect3> <title>Typing of pattern synonyms</title> <para> Given a pattern synonym definition of the form @@ -1100,10 +1133,9 @@ pattern (Show b) => ExNumPat b :: (Num a, Eq a) => T a <programlisting> ExNumPat :: (Show b, Num a, Eq a) => b -> T t </programlisting> +</sect3> -</listitem> - -<listitem> Matching: +<sect3><title>Matching of pattern synonyms</title> <para> A pattern synonym occurrence in a pattern is evaluated by first @@ -1125,8 +1157,6 @@ f' _ = False <para> Note that the strictness of <literal>f</literal> differs from that of <literal>g</literal> defined below: -</para> - <programlisting> g [True, True] = True g _ = False @@ -1136,9 +1166,8 @@ g _ = False *Main> g (False:undefined) False </programlisting> -</listitem> -</itemizedlist> </para> +</sect3> </sect2> @@ -1883,7 +1912,8 @@ the comprehension being over an arbitrary monad. functions <literal>(>>=)</literal>, <literal>(>>)</literal>, and <literal>fail</literal>, are in scope (not the Prelude - versions). List comprehensions, mdo (<xref linkend="recursive-do-notation"/>), and parallel array + versions). List comprehensions, <literal>mdo</literal> + (<xref linkend="recursive-do-notation"/>), and parallel array comprehensions, are unaffected. </para></listitem> <listitem> @@ -2391,6 +2421,35 @@ necessary to enable them. </sect2> <sect2 id="package-imports"> +<title>Import and export extensions</title> + +<sect3> + <title>Hiding things the imported module doesn't export</title> + +<para> +Technically in Haskell 2010 this is illegal: +<programlisting> +module A( f ) where + f = True + +module B where + import A hiding( g ) -- A does not export g + g = f +</programlisting> +The <literal>import A hiding( g )</literal> in module <literal>B</literal> +is technically an error (<ulink url="http://www.haskell.org/onlinereport/haskell2010/haskellch5.html#x11-1020005.3.1">Haskell Report, 5.3.1</ulink>) +because <literal>A</literal> does not export <literal>g</literal>. +However GHC allows it, in the interests of supporting backward compatibility; for example, a newer version of +<literal>A</literal> might export <literal>g</literal>, and you want <literal>B</literal> to work +in either case. +</para> +<para> +The warning <literal>-fwarn-dodgy-imports</literal>, which is off by default but included with <literal>-W</literal>, +warns if you hide something that the imported module does not export. +</para> +</sect3> + +<sect3> <title>Package-qualified imports</title> <para>With the <option>-XPackageImports</option> flag, GHC allows @@ -2415,9 +2474,9 @@ import "network" Network.Socket packages when APIs change. It can lead to fragile dependencies in the common case: modules occasionally move from one package to another, rendering any package-qualified imports broken.</para> -</sect2> +</sect3> -<sect2 id="safe-imports-ext"> +<sect3 id="safe-imports-ext"> <title>Safe imports</title> <para>With the <option>-XSafe</option>, <option>-XTrustworthy</option> @@ -2435,9 +2494,9 @@ import safe qualified Network.Socket as NS safely imported. For a description of when a import is considered safe see <xref linkend="safe-haskell"/></para> -</sect2> +</sect3> -<sect2 id="explicit-namespaces"> +<sect3 id="explicit-namespaces"> <title>Explicit namespaces in import/export</title> <para> In an import or export list, such as @@ -2465,6 +2524,14 @@ disambiguate this case, thus: The extension <option>-XExplicitNamespaces</option> is implied by <option>-XTypeOperators</option> and (for some reason) by <option>-XTypeFamilies</option>. </para> +<para> +In addition, with <option>-XPatternSynonyms</option> you can prefix the name of +a data constructor in an import or export list with the keyword <literal>pattern</literal>, +to allow the import or export of a data constructor without its parent type constructor +(see <xref linkend="patsyn-impexp"/>). +</para> +</sect3> + </sect2> <sect2 id="syntax-stolen"> @@ -3882,7 +3949,11 @@ defined in <literal>Data.Foldable</literal>. <listitem><para> With <option>-XDeriveTraversable</option>, you can derive instances of the class <literal>Traversable</literal>, -defined in <literal>Data.Traversable</literal>. +defined in <literal>Data.Traversable</literal>. Since the <literal>Traversable</literal> +instance dictates the instances of <literal>Functor</literal> and +<literal>Foldable</literal>, you'll probably want to derive them too, so +<option>-XDeriveTraversable</option> implies +<option>-XDeriveFunctor</option> and <option>-XDeriveFoldable</option>. </para></listitem> </itemizedlist> You can also use a standalone deriving declaration instead @@ -4317,7 +4388,9 @@ We use default signatures to simplify generic programming in GHC <sect3 id="nullary-type-classes"> <title>Nullary type classes</title> -Nullary (no parameter) type classes are enabled with <option>-XNullaryTypeClasses</option>. +Nullary (no parameter) type classes are enabled with +<option>-XMultiTypeClasses</option>; historically, they were enabled with the +(now deprecated) <option>-XNullaryTypeClasses</option>. Since there are no available parameters, there can be at most one instance of a nullary class. A nullary type class might be used to document some assumption in a type signature (such as reliance on the Riemann hypothesis) or add some @@ -4938,7 +5011,8 @@ with <option>-fcontext-stack=</option><emphasis>N</emphasis>. In general, as discussed in <xref linkend="instance-resolution"/>, <emphasis>GHC requires that it be unambiguous which instance declaration -should be used to resolve a type-class constraint</emphasis>. This behaviour +should be used to resolve a type-class constraint</emphasis>. +This behaviour can be modified by two flags: <option>-XOverlappingInstances</option> <indexterm><primary>-XOverlappingInstances </primary></indexterm> @@ -4947,6 +5021,8 @@ and <option>-XIncoherentInstances</option> </primary></indexterm>, as this section discusses. Both these flags are dynamic flags, and can be set on a per-module basis, using an <literal>LANGUAGE</literal> pragma if desired (<xref linkend="language-pragma"/>).</para> + + <para> The <option>-XOverlappingInstances</option> flag instructs GHC to loosen the instance resolution described in <xref linkend="instance-resolution"/>, by @@ -4954,18 +5030,83 @@ allowing more than one instance to match, <emphasis>provided there is a most specific one</emphasis>. The <option>-XIncoherentInstances</option> flag further loosens the resolution, by allowing more than one instance to match, irespective of whether there is a most specific one. +The <option>-XIncoherentInstances</option> flag implies the +<option>-XOverlappingInstances</option> flag, but not vice versa. </para> <para> -For example, consider +A more precise specification is as follows. +The willingness to be overlapped or incoherent is a property of +the <emphasis>instance declaration</emphasis> itself, controlled by the +presence or otherwise of the <option>-XOverlappingInstances</option> +and <option>-XIncoherentInstances</option> flags when that instance declaration is +being compiled. Now suppose that, in some client module, we are searching for an instance of the +<emphasis>target constraint</emphasis> <literal>(C ty1 .. tyn)</literal>. +The search works like this. +<itemizedlist> +<listitem><para> +Find all instances I that <emphasis>match</emphasis> the target constraint; +that is, the target constraint is a substitution instance of I. These +instance declarations are the <emphasis>candidates</emphasis>. +</para></listitem> + +<listitem><para> +Find all <emphasis>non-candidate</emphasis> instances +that <emphasis>unify</emphasis> with the target constraint. +Such non-candidates instances might match when the target constraint is further +instantiated. If all of them were compiled with +<option>-XIncoherentInstances</option>, proceed; if not, the search fails. +</para></listitem> + +<listitem><para> +Eliminate any candidate IX for which both of the following hold: + +<itemizedlist> +<listitem><para>There is another candidate IY that is strictly more specific; +that is, IY is a substitution instance of IX but not vice versa. +</para></listitem> +<listitem><para>Either IX or IY was compiled with +<option>-XOverlappingInstances</option>. +</para></listitem> +</itemizedlist> + +</para></listitem> + +<listitem><para> +If only one candidate remains, pick it. +Otherwise if all remaining candidates were compiled with +<option>-XInccoherentInstances</option>, pick an arbitrary candidate. +</para></listitem> + +</itemizedlist> +These rules make it possible for a library author to design a library that relies on +overlapping instances without the library client having to know. +</para> +<para> +Errors are reported <emphasis>lazily</emphasis> (when attempting to solve a constraint), rather than <emphasis>eagerly</emphasis> +(when the instances themselves are defined). So for example +<programlisting> + instance C Int b where .. + instance C a Bool where .. +</programlisting> +These potentially overlap, but GHC will not complain about the instance declarations +themselves, regardless of flag settings. If we later try to solve the constraint +<literal>(C Int Char)</literal> then only the first instance matches, and all is well. +Similarly with <literal>(C Bool Bool)</literal>. But if we try to solve <literal>(C Int Bool)</literal>, +both instances match and an error is reported. +</para> + +<para> +As a more substantial example of the rules in action, consider <programlisting> instance context1 => C Int b where ... -- (A) instance context2 => C a Bool where ... -- (B) instance context3 => C a [b] where ... -- (C) instance context4 => C Int [Int] where ... -- (D) </programlisting> -compiled with <option>-XOverlappingInstances</option> enabled. The constraint -<literal>C Int [Int]</literal> matches instances (A), (C) and (D), but the last +compiled with <option>-XOverlappingInstances</option> enabled. Now suppose that the type inference +engine needs to solve The constraint +<literal>C Int [Int]</literal>. This constraint matches instances (A), (C) and (D), but the last is more specific, and hence is chosen. </para> <para>If (D) did not exist then (A) and (C) would still be matched, but neither is @@ -4981,7 +5122,7 @@ the head of former is a substitution instance of the latter. For example substituting <literal>a:=Int</literal>. </para> <para> -However, GHC is conservative about committing to an overlapping instance. For example: +GHC is conservative about committing to an overlapping instance. For example: <programlisting> f :: [b] -> [b] f x = ... @@ -5078,56 +5219,6 @@ the program prints would be to reject module <literal>Help</literal> on the grounds that a later instance declaration might overlap the local one.) </para> -<para> -The willingness to be overlapped or incoherent is a property of -the <emphasis>instance declaration</emphasis> itself, controlled by the -presence or otherwise of the <option>-XOverlappingInstances</option> -and <option>-XIncoherentInstances</option> flags when that module is -being defined. Suppose we are searching for an instance of the -<emphasis>target constraint</emphasis> <literal>(C ty1 .. tyn)</literal>. -The search works like this. -<itemizedlist> -<listitem><para> -Find all instances I that <emphasis>match</emphasis> the target constraint; -that is, the target constraint is a substitution instance of I. These -instance declarations are the <emphasis>candidates</emphasis>. -</para></listitem> - -<listitem><para> -Find all <emphasis>non-candidate</emphasis> instances -that <emphasis>unify</emphasis> with the target constraint. -Such non-candidates instances might match when the target constraint is further -instantiated. If all of them were compiled with -<option>-XIncoherentInstances</option>, proceed; if not, the search fails. -</para></listitem> - -<listitem><para> -Eliminate any candidate IX for which both of the following hold: - -<itemizedlist> -<listitem><para>There is another candidate IY that is strictly more specific; -that is, IY is a substitution instance of IX but not vice versa. -</para></listitem> -<listitem><para>Either IX or IY was compiled with -<option>-XOverlappingInstances</option>. -</para></listitem> -</itemizedlist> - -</para></listitem> - -<listitem><para> -If only one candidate remains, pick it. -Otherwise if all remaining candidates were compiled with -<option>-XInccoherentInstances</option>, pick an arbitrary candidate. -</para></listitem> - -</itemizedlist> -These rules make it possible for a library author to design a library that relies on -overlapping instances without the library client having to know. -</para> -<para>The <option>-XIncoherentInstances</option> flag implies the -<option>-XOverlappingInstances</option> flag, but not vice versa. -</para> </sect3> <sect3 id="instance-sigs"> @@ -5201,21 +5292,30 @@ it explicitly (for example, to give an instance declaration for it), you can imp from module <literal>GHC.Exts</literal>. </para> <para> -Haskell's defaulting mechanism is extended to cover string literals, when <option>-XOverloadedStrings</option> is specified. +Haskell's defaulting mechanism (<ulink url="http://www.haskell.org/onlinereport/decls.html#sect4.3.4">Haskell Report, Section 4.3.4</ulink>) +is extended to cover string literals, when <option>-XOverloadedStrings</option> is specified. Specifically: <itemizedlist> <listitem><para> -Each type in a default declaration must be an +Each type in a <literal>default</literal> declaration must be an instance of <literal>Num</literal> <emphasis>or</emphasis> of <literal>IsString</literal>. </para></listitem> <listitem><para> -The standard defaulting rule (<ulink url="http://www.haskell.org/onlinereport/decls.html#sect4.3.4">Haskell Report, Section 4.3.4</ulink>) +If no <literal>default</literal> declaration is given, then it is just as if the module +contained the declaration <literal>default( Integer, Double, String)</literal>. +</para></listitem> + +<listitem><para> +The standard defaulting rule is extended thus: defaulting applies when all the unresolved constraints involve standard classes <emphasis>or</emphasis> <literal>IsString</literal>; and at least one is a numeric class <emphasis>or</emphasis> <literal>IsString</literal>. </para></listitem> </itemizedlist> +So, for example, the expression <literal>length "foo"</literal> will give rise +to an ambiguous use of <literal>IsString a0</literal> which, becuase of the above +rules, will default to <literal>String</literal>. </para> <para> A small example: @@ -5942,28 +6042,39 @@ instance (GMapKey a, GMapKey b) => GMapKey (Either a b) where data GMap (Either a b) v = GMapEither (GMap a v) (GMap b v) ... -instance (Eq (Elem [e])) => Collects ([e]) where +instance Eq (Elem [e]) => Collects [e] where type Elem [e] = e ... </programlisting> - The most important point about associated family instances is that the - type indexes corresponding to class parameters must be identical to - the type given in the instance head; here this is the first argument - of <literal>GMap</literal>, namely <literal>Either a b</literal>, - which coincides with the only class parameter. - </para> - <para> - Instances for an associated family can only appear as part of - instance declarations of the class in which the family was declared - - just as with the equations of the methods of a class. Also in - correspondence to how methods are handled, declarations of associated - types can be omitted in class instances. If an associated family - instance is omitted, the corresponding instance type is not inhabited; +Note the following points: +<itemizedlist> +<listitem><para> + The type indexes corresponding to class parameters must have precisely the same shape + the type given in the instance head. To have the same "shape" means that + the two types are identical modulo renaming of type variables. For example: +<programlisting> +instance Eq (Elem [e]) => Collects [e] where + -- Choose one of the following alternatives: + type Elem [e] = e -- OK + type Elem [x] = x -- OK + type Elem x = x -- BAD; shape of 'x' is different to '[e]' + type Elem [Maybe x] = x -- BAD: shape of '[Maybe x]' is different to '[e]' +</programlisting> +</para></listitem> +<listitem><para> + An instances for an associated family can only appear as part of + an instance declarations of the class in which the family was declared, + just as with the equations of the methods of a class. +</para></listitem> +<listitem><para> + The instance for an associated type can be omitted in class instances. In that case, + unless there is a default instance (see <xref linkend="assoc-decl-defs"/>), + the corresponding instance type is not inhabited; i.e., only diverging expressions, such as <literal>undefined</literal>, can assume the type. - </para> - <para> - Although it is unusual, there can be <emphasis>multiple</emphasis> +</para></listitem> +<listitem><para> + Although it is unusual, there (currently) can be <emphasis>multiple</emphasis> instances for an associated family in a single instance declaration. For example, this is legitimate: <programlisting> @@ -5977,8 +6088,10 @@ instance GMapKey Flob where Since you cannot give any <emphasis>subsequent</emphasis> instances for <literal>(GMap Flob ...)</literal>, this facility is most useful when the free indexed parameter is of a kind with a finite number of alternatives - (unlike <literal>*</literal>). - </para> + (unlike <literal>*</literal>). WARNING: this facility may be withdrawn in the future. +</para></listitem> +</itemizedlist> +</para> </sect3> <sect3 id="assoc-decl-defs"> @@ -5996,22 +6109,50 @@ class IsBoolMap v where instance IsBoolMap [(Int, Bool)] where lookupKey = lookup </programlisting> -The <literal>instance</literal> keyword is optional. - </para> +In an <literal>instance</literal> declaration for the class, if no explicit +<literal>type instance</literal> declaration is given for the associated type, the default declaration +is used instead, just as with default class methods. +</para> <para> -There can also be multiple defaults for a single type, as long as they do not -overlap: +Note the following points: +<itemizedlist> +<listitem><para> + The <literal>instance</literal> keyword is optional. +</para></listitem> +<listitem><para> + There can be at most one default declaration for an associated type synonym. +</para></listitem> +<listitem><para> + A default declaration is not permitted for an associated + <emphasis>data</emphasis> type. +</para></listitem> +<listitem><para> + The default declaration must mention only type <emphasis>variables</emphasis> on the left hand side, + and the right hand side must mention only type varaibels bound on the left hand side. + However, unlike the associated type family declaration itself, + the type variables of the default instance are independent of those of the parent class. +</para></listitem> +</itemizedlist> +Here are some examples: <programlisting> -class C a where - type F a b - type F a Int = Bool - type F a Bool = Int + class C a where + type F1 a :: * + type instance F1 a = [a] -- OK + type instance F1 a = a->a -- BAD; only one default instance is allowed + + type F2 b a -- OK; note the family has more type + -- variables than the class + type instance F2 c d = c->d -- OK; you don't have to use 'a' in the type instance + + type F3 a + type F3 [b] = b -- BAD; only type variables allowed on the LHS + + type F4 a + type F4 b = a -- BAD; 'a' is not in scope in the RHS </programlisting> +</para> -A default declaration is not permitted for an associated -<emphasis>data</emphasis> type. - </para> - </sect3> +</sect3> <sect3 id="scoping-class-params"> <title>Scoping of class parameters</title> @@ -8039,7 +8180,7 @@ scope over the methods defined in the <literal>where</literal> part. For exampl of the Haskell Report) can be completely switched off by <option>-XNoMonomorphismRestriction</option>. Since GHC 7.8.1, the monomorphism -restriction is switched off by default in GHCi. +restriction is switched off by default in GHCi's interactive options (see <xref linkend="ghci-interactive-options"/>). </para> </sect3> @@ -8112,12 +8253,30 @@ pattern binding must have the same context. For example, this is fine: <para> An ML-style language usually generalises the type of any let-bound or where-bound variable, so that it is as polymorphic as possible. -With the flag <option>-XMonoLocalBinds</option> GHC implements a slightly more conservative policy: -<emphasis>it generalises only "closed" bindings</emphasis>. -A binding is considered "closed" if either +With the flag <option>-XMonoLocalBinds</option> GHC implements a slightly more conservative policy, +using the following rules: <itemizedlist> -<listitem><para>It is one of the top-level bindings of a module, or </para></listitem> -<listitem><para>Its free variables are all themselves closed</para></listitem> + <listitem><para> + A variable is <emphasis>closed</emphasis> if and only if + <itemizedlist> + <listitem><para> the variable is let-bound</para></listitem> + <listitem><para> one of the following holds: + <itemizedlist> + <listitem><para>the variable has an explicit type signature that has no free type variables, or</para></listitem> + <listitem><para>its binding group is fully generalised (see next bullet) </para></listitem> + </itemizedlist> + </para></listitem> + </itemizedlist> + </para></listitem> + + <listitem><para> + A binding group is <emphasis>fully generalised</emphasis> if and only if + <itemizedlist> + <listitem><para>each of its free variables is either imported or closed, and</para></listitem> + <listitem><para>the binding is not affected by the monomorphism restriction + (<ulink url="http://www.haskell.org/onlinereport/decls.html#sect4.5.5">Haskell Report, Section 4.5.5</ulink>)</para></listitem> + </itemizedlist> + </para></listitem> </itemizedlist> For example, consider <programlisting> @@ -8126,15 +8285,18 @@ g x = let h y = f y * 2 k z = z+x in h x + k x </programlisting> -Here <literal>f</literal> and <literal>g</literal> are closed because they are bound at top level. -Also <literal>h</literal> is closed because its only free variable <literal>f</literal> is closed. -But <literal>k</literal> is not closed because it mentions <literal>x</literal> which is locally bound. -Another way to think of it is this: all closed bindings <literal>could</literal> be defined at top level. -(In the example, we could move <literal>h</literal> to top level.) -</para><para> -All of this applies only to bindings that lack an explicit type signature, so that GHC has to -infer its type. If you supply a type signature, then that fixes type of the binding, end of story. -</para><para> +Here <literal>f</literal> is generalised because it has no free variables; and its binding group +is unaffected by the monomorphism restriction; and hence <literal>f</literal> is closed. +The same reasoning applies to <literal>g</literal>, except that it has one closed free variable, namely <literal>f</literal>. +Similarly <literal>h</literal> is closed, <emphasis>even though it is not bound at top level</emphasis>, +because its only free variable <literal>f</literal> is closed. +But <literal>k</literal> is not closed, because it mentions <literal>x</literal> which is not closed (because it is not let-bound). +</para> +<para> +Notice that a top-level binding that is affected by the monomorphism restriction is not closed, and hence may +in turn prevent generalisation of bindings that mention it. +</para> +<para> The rationale for this more conservative strategy is given in <ulink url="http://research.microsoft.com/~simonpj/papers/constraints/index.htm">the papers</ulink> "Let should not be generalised" and "Modular type inference with local assumptions", and a related <ulink url="http://ghc.haskell.org/trac/ghc/blog/LetGeneralisationInGhc7">blog post</ulink>. @@ -10885,8 +11047,8 @@ not be substituted, and the rule would not fire. </sect2> -<sect2 id="conlike"> -<title>How rules interact with INLINE/NOINLINE and CONLIKE pragmas</title> +<sect2 id="rules-inline"> +<title>How rules interact with INLINE/NOINLINE pragmas</title> <para> Ordinary inlining happens at the same time as rule rewriting, which may lead to unexpected @@ -10912,7 +11074,14 @@ would have been a better chance that <literal>f</literal>'s RULE might fire. The way to get predictable behaviour is to use a NOINLINE pragma, or an INLINE[<replaceable>phase</replaceable>] pragma, on <literal>f</literal>, to ensure that it is not inlined until its RULEs have had a chance to fire. +The warning flag <option>-fwarn-inline-rule-shadowing</option> (see <xref linkend="options-sanity"/>) +warns about this situation. </para> +</sect2> + +<sect2 id="conlike"> +<title>How rules interact with CONLIKE pragmas</title> + <para> GHC is very cautious about duplicating work. For example, consider <programlisting> @@ -11257,69 +11426,6 @@ program even if fusion doesn't happen. More rules in <filename>GHC/List.lhs</fi </sect2> -<sect2 id="core-pragma"> - <title>CORE pragma</title> - - <indexterm><primary>CORE pragma</primary></indexterm> - <indexterm><primary>pragma, CORE</primary></indexterm> - <indexterm><primary>core, annotation</primary></indexterm> - -<para> - The external core format supports <quote>Note</quote> annotations; - the <literal>CORE</literal> pragma gives a way to specify what these - should be in your Haskell source code. Syntactically, core - annotations are attached to expressions and take a Haskell string - literal as an argument. The following function definition shows an - example: - -<programlisting> -f x = ({-# CORE "foo" #-} show) ({-# CORE "bar" #-} x) -</programlisting> - - Semantically, this is equivalent to: - -<programlisting> -g x = show x -</programlisting> -</para> - -<para> - However, when external core is generated (via - <option>-fext-core</option>), there will be Notes attached to the - expressions <function>show</function> and <varname>x</varname>. - The core function declaration for <function>f</function> is: -</para> - -<programlisting> - f :: %forall a . GHCziShow.ZCTShow a -> - a -> GHCziBase.ZMZN GHCziBase.Char = - \ @ a (zddShow::GHCziShow.ZCTShow a) (eta::a) -> - (%note "foo" - %case zddShow %of (tpl::GHCziShow.ZCTShow a) - {GHCziShow.ZCDShow - (tpl1::GHCziBase.Int -> - a -> - GHCziBase.ZMZN GHCziBase.Char -> GHCziBase.ZMZN GHCziBase.Cha -r) - (tpl2::a -> GHCziBase.ZMZN GHCziBase.Char) - (tpl3::GHCziBase.ZMZN a -> - GHCziBase.ZMZN GHCziBase.Char -> GHCziBase.ZMZN GHCziBase.Cha -r) -> - tpl2}) - (%note "bar" - eta); -</programlisting> - -<para> - Here, we can see that the function <function>show</function> (which - has been expanded out to a case expression over the Show dictionary) - has a <literal>%note</literal> attached to it, as does the - expression <varname>eta</varname> (which used to be called - <varname>x</varname>). -</para> - -</sect2> - </sect1> <sect1 id="special-ids"> @@ -11613,7 +11719,7 @@ described in <ulink url="http://www.seas.upenn.edu/~sweirich/papers/popl163af-weirich.pdf">Generative type abstraction and type-level computation</ulink>, published at POPL 2011.</para> -<sect2> +<sect2 id="nominal-representational-and-phantom"> <title>Nominal, Representational, and Phantom</title> <para>The goal of the roles system is to track when two types have the same @@ -11670,7 +11776,7 @@ are unrelated.</para> </sect2> -<sect2> +<sect2 id="role-inference"> <title>Role inference</title> <para> @@ -11724,7 +11830,7 @@ but role nominal for <literal>b</literal>.</para> </sect2> -<sect2> +<sect2 id="role-annotations"> <title>Role annotations <indexterm><primary>-XRoleAnnotations</primary></indexterm> </title> diff --git a/docs/users_guide/gone_wrong.xml b/docs/users_guide/gone_wrong.xml index 114b06cfd6..bb5fcb0d4e 100644 --- a/docs/users_guide/gone_wrong.xml +++ b/docs/users_guide/gone_wrong.xml @@ -146,7 +146,7 @@ <emphasis>must</emphasis> be re-compiled.</para> <para>A useful option to alert you when interfaces change is - <option>-hi-diffs</option><indexterm><primary>-hi-diffs + <option>-ddump-hi-diffs</option><indexterm><primary>-ddump-hi-diffs option</primary></indexterm>. It will run <command>diff</command> on the changed interface file, before and after, when applicable.</para> @@ -167,7 +167,7 @@ <screen> % rm *.o # scrub your object files -% make my_prog # re-make your program; use -hi-diffs to highlight changes; +% make my_prog # re-make your program; use -ddump-hi-diffs to highlight changes; # as mentioned above, use -dcore-lint to be more paranoid % ./my_prog ... # retry... </screen> diff --git a/docs/users_guide/phases.xml b/docs/users_guide/phases.xml index db32f38870..8a5589acda 100644 --- a/docs/users_guide/phases.xml +++ b/docs/users_guide/phases.xml @@ -576,8 +576,22 @@ $ cat foo.hspp</screen> </term> <listitem> <para>Omit code generation (and all later phases) - altogether. Might be of some use if you just want to see - dumps of the intermediate compilation phases.</para> + altogether. This is useful if you're only interested in + type checking code.</para> + </listitem> + </varlistentry> + + <varlistentry> + <term> + <option>-fwrite-interface</option> + <indexterm><primary><option>-fwrite-interface</option></primary></indexterm> + </term> + <listitem> + <para>Always write interface files. GHC will normally write + interface files automatically, but this flag is useful with + <option>-fno-code</option>, which normally suppresses generation + of interface files. This is useful if you want to type check + over multiple runs of GHC without compiling dependencies.</para> </listitem> </varlistentry> diff --git a/docs/users_guide/ug-book.xml.in b/docs/users_guide/ug-book.xml.in index dc5d4f7c35..b87563ac3b 100644 --- a/docs/users_guide/ug-book.xml.in +++ b/docs/users_guide/ug-book.xml.in @@ -17,7 +17,6 @@ &lang-features; &ffi-chap; &extending-ghc; -&external-core; &wrong; &utils; &win32-dll; diff --git a/docs/users_guide/ug-ent.xml.in b/docs/users_guide/ug-ent.xml.in index ce87089f24..6753ff7e5b 100644 --- a/docs/users_guide/ug-ent.xml.in +++ b/docs/users_guide/ug-ent.xml.in @@ -3,7 +3,7 @@ <!ENTITY flags SYSTEM "flags.xml"> <!ENTITY license SYSTEM "license.xml"> <!ENTITY intro SYSTEM "intro.xml" > -<!ENTITY relnotes1 SYSTEM "7.8.1-notes.xml" > +<!ENTITY relnotes1 SYSTEM "7.10.1-notes.xml" > <!ENTITY using SYSTEM "using.xml" > <!ENTITY code-gens SYSTEM "codegens.xml" > <!ENTITY runtime SYSTEM "runtime_control.xml" > @@ -12,7 +12,6 @@ <!ENTITY sooner SYSTEM "sooner.xml" > <!ENTITY lang-features SYSTEM "lang.xml" > <!ENTITY glasgowexts SYSTEM "glasgow_exts.xml" > -<!ENTITY external-core SYSTEM "external_core.xml" > <!ENTITY packages SYSTEM "packages.xml" > <!ENTITY parallel SYSTEM "parallel.xml" > <!ENTITY safehaskell SYSTEM "safe_haskell.xml" > diff --git a/docs/users_guide/using.xml b/docs/users_guide/using.xml index 8d8211eb5a..921d5a3345 100644 --- a/docs/users_guide/using.xml +++ b/docs/users_guide/using.xml @@ -899,20 +899,37 @@ ghci> :set -fprint-explicit-foralls ghci> :t f f :: forall a. a -> a </screen> - Using <option>-fprint-explicit-kinds</option> makes GHC print kind-foralls and kind applications +However, regardless of the flag setting, the quantifiers are printed under these circumstances: +<itemizedlist> +<listitem><para>For nested <literal>foralls</literal>, e.g. +<screen> +ghci> :t GHC.ST.runST +GHC.ST.runST :: (forall s. GHC.ST.ST s a) -> a +</screen> +</para></listitem> +<listitem><para>If any of the quantified type variables has a kind +that mentions a kind variable, e.g. +<screen> +ghci> :i Data.Coerce.coerce +coerce :: + forall (k :: BOX) (a :: k) (b :: k). Coercible a b => a -> b + -- Defined in GHC.Prim +</screen> +</para></listitem> +</itemizedlist> + </para> + <para> + Using <option>-fprint-explicit-kinds</option> makes GHC print kind arguments in types, which are normally suppressed. This can be important when you are using kind polymorphism. For example: <screen> ghci> :set -XPolyKinds ghci> data T a = MkT ghci> :t MkT -MkT :: T b +MkT :: forall (k :: BOX) (a :: k). T a ghci> :set -fprint-explicit-foralls ghci> :t MkT -MkT :: forall (b::k). T b -ghci> :set -fprint-explicit-kinds -ghci> :t MkT -MkT :: forall (k::BOX) (b:k). T b +MkT :: forall (k :: BOX) (a :: k). T k a </screen> </para> </listitem> @@ -1719,15 +1736,50 @@ f "2" = 2 <indexterm><primary>unused binds, warning</primary></indexterm> <indexterm><primary>binds, unused</primary></indexterm> <para>Report any function definitions (and local bindings) - which are unused. For top-level functions, the warning is - only given if the binding is not exported.</para> - <para>A definition is regarded as "used" if (a) it is exported, or (b) it is - mentioned in the right hand side of another definition that is used, or (c) the - function it defines begins with an underscore. The last case provides a - way to suppress unused-binding warnings selectively. </para> - <para> Notice that a variable - is reported as unused even if it appears in the right-hand side of another - unused binding. </para> + which are unused. More precisely: + + <itemizedlist> + <listitem><para>Warn if a binding brings into scope a variable that is not used, + except if the variable's name starts with an underscore. The "starts-with-underscore" + condition provides a way to selectively disable the warning. + </para> + <para> + A variable is regarded as "used" if + <itemizedlist> + <listitem><para>It is exported, or</para></listitem> + <listitem><para>It appears in the right hand side of a binding that binds at + least one used variable that is used</para></listitem> + </itemizedlist> + For example + <programlisting> +module A (f) where +f = let (p,q) = rhs1 in t p -- Warning about unused q +t = rhs3 -- No warning: f is used, and hence so is t +g = h x -- Warning: g unused +h = rhs2 -- Warning: h is only used in the right-hand side of another unused binding +_w = True -- No warning: _w starts with an underscore + </programlisting> + </para></listitem> + + <listitem><para> + Warn if a pattern binding binds no variables at all, unless it is a lone, possibly-banged, wild-card pattern. + For example: + <programlisting> +Just _ = rhs3 -- Warning: unused pattern binding +(_, _) = rhs4 -- Warning: unused pattern binding +_ = rhs3 -- No warning: lone wild-card pattern +!_ = rhs4 -- No warning: banged wild-card pattern; behaves like seq + </programlisting> + The motivation for allowing lone wild-card patterns is they + are not very different from <literal>_v = rhs3</literal>, + which elicits no warning; and they can be useful to add a type + constraint, e.g. <literal>_ = x::Int</literal>. A lone + banged wild-card pattern is is useful as an alternative + (to <literal>seq</literal>) way to force evaluation. + </para> + </listitem> + </itemizedlist> + </para> </listitem> </varlistentry> @@ -1814,6 +1866,16 @@ f "2" = 2 </listitem> </varlistentry> + <varlistentry> + <term><option>-fwarn-inline-rule-shadowing</option>:</term> + <listitem> + <indexterm><primary><option>-fwarn-inline-rule-shadowing</option></primary></indexterm> + <para>Warn if a rewrite RULE might fail to fire because the function might be + inlined before the rule has a chance to fire. See <xref linkend="rules-inline"/>. + </para> + </listitem> + </varlistentry> + </variablelist> <para>If you're feeling really paranoid, the @@ -2967,44 +3029,6 @@ data D = D !C </sect1> &runtime; - -<sect1 id="ext-core"> - <title>Generating and compiling External Core Files</title> - - <indexterm><primary>intermediate code generation</primary></indexterm> - - <para>GHC can dump its optimized intermediate code (said to be in “Core” format) - to a file as a side-effect of compilation. Non-GHC back-end tools can read and process Core files; these files have the suffix - <filename>.hcr</filename>. The Core format is described in <ulink url="../../core.pdf"> - <citetitle>An External Representation for the GHC Core Language</citetitle></ulink>, - and sample tools - for manipulating Core files (in Haskell) are available in the - <ulink url="http://hackage.haskell.org/package/extcore">extcore package on Hackage</ulink>. Note that the format of <literal>.hcr</literal> - files is <emphasis>different</emphasis> from the Core output format that GHC generates - for debugging purposes (<xref linkend="options-debugging"/>), though the two formats appear somewhat similar.</para> - - <para>The Core format natively supports notes which you can add to - your source code using the <literal>CORE</literal> pragma (see <xref - linkend="pragmas"/>).</para> - - <variablelist> - - <varlistentry> - <term> - <option>-fext-core</option> - <indexterm><primary><option>-fext-core</option></primary></indexterm> - </term> - <listitem> - <para>Generate <literal>.hcr</literal> files.</para> - </listitem> - </varlistentry> - - </variablelist> - -<para>Currently (as of version 6.8.2), GHC does not have the ability to read in External Core files as source. If you would like GHC to have this ability, please <ulink url="http://ghc.haskell.org/trac/ghc/wiki/MailingListsAndIRC">make your wishes known to the GHC Team</ulink>.</para> - -</sect1> - &debug; &flags; diff --git a/driver/ghc-usage.txt b/driver/ghc-usage.txt index 9de4090bc4..0b56db7419 100644 --- a/driver/ghc-usage.txt +++ b/driver/ghc-usage.txt @@ -73,7 +73,7 @@ Given the above, here are some TYPICAL invocations of $$: The User's Guide has more information about GHC's *many* options. An online copy can be found here: - http://haskell.org/haskellwiki/GHC + http://www.haskell.org/ghc/docs/latest/html/users_guide/ If you *really* want to see every option, then you can pass '--show-options' to the compiler. diff --git a/driver/ghci-usage.txt b/driver/ghci-usage.txt index d9628b2c41..1a848fc9b5 100644 --- a/driver/ghci-usage.txt +++ b/driver/ghci-usage.txt @@ -21,4 +21,4 @@ GHC does. Some of the options that are commonly used are: Full details can be found in the User's Guide, an online copy of which can be found here: - http://haskell.org/haskellwiki/GHC + http://www.haskell.org/ghc/docs/latest/html/users_guide/ @@ -425,13 +425,6 @@ PACKAGES_STAGE2 += haskell98 PACKAGES_STAGE2 += haskell2010 endif -# We normally install only the packages down to this point -REGULAR_INSTALL_PACKAGES := $(addprefix libraries/,$(PACKAGES_STAGE1)) -ifneq "$(Stage1Only)" "YES" -REGULAR_INSTALL_PACKAGES += compiler -endif -REGULAR_INSTALL_PACKAGES += $(addprefix libraries/,$(PACKAGES_STAGE2)) - PACKAGES_STAGE1 += xhtml ifeq "$(Windows_Target)" "NO" ifneq "$(TargetOS_CPP)" "ios" @@ -440,6 +433,13 @@ endif endif PACKAGES_STAGE1 += haskeline +# We normally install only the packages down to this point +REGULAR_INSTALL_PACKAGES := $(addprefix libraries/,$(PACKAGES_STAGE1)) +ifneq "$(Stage1Only)" "YES" +REGULAR_INSTALL_PACKAGES += compiler +endif +REGULAR_INSTALL_PACKAGES += $(addprefix libraries/,$(PACKAGES_STAGE2)) + # If we have built the programs with dynamic libraries, then # ghc will be dynamically linked against haskeline.so etc, so # we need the dynamic libraries of everything down to here @@ -452,9 +452,17 @@ ifneq "$(CrossCompiling)" "YES" define addExtraPackage ifeq "$2" "-" # Do nothing; this package is already handled above -else ifeq "$2 $$(GhcProfiled)" "dph YES" -# Ignore the package: These packages need TH, which is incompatible -# with a profiled GHC +else ifeq "$2" "dph" +## DPH-specific clause +ifeq "$$(GhcProfiled)" "YES" +# Ignore package: The DPH packages need TH, which is incompatible with +# a profiled GHC +else ifneq "$$(BUILD_DPH)" "YES" +# Ignore package: DPH was disabled +else +PACKAGES_STAGE2 += $1 +endif +## end of DPH-specific clause else PACKAGES_STAGE2 += $1 endif @@ -635,8 +643,10 @@ ifneq "$(CLEANING)" "YES" BUILD_DIRS += $(patsubst %, libraries/%, $(PACKAGES_STAGE2)) BUILD_DIRS += $(patsubst %, libraries/%, $(PACKAGES_STAGE1)) BUILD_DIRS += $(patsubst %, libraries/%, $(filter-out $(PACKAGES_STAGE1),$(PACKAGES_STAGE0))) +ifeq "$(BUILD_DPH)" "YES" BUILD_DIRS += $(wildcard libraries/dph) endif +endif ifeq "$(INTEGER_LIBRARY)" "integer-gmp" @@ -1131,7 +1141,6 @@ sdist-ghc-prep : $(call sdist_ghc_file,compiler,stage2,cmm,,CmmParse,y) $(call sdist_ghc_file,compiler,stage2,parser,,Lexer,x) $(call sdist_ghc_file,compiler,stage2,parser,,Parser,y.pp) - $(call sdist_ghc_file,compiler,stage2,parser,,ParserCore,y) $(call sdist_ghc_file,utils/hpc,dist-install,,,HpcParser,y) $(call sdist_ghc_file,utils/genprimopcode,dist,,,Lexer,x) $(call sdist_ghc_file,utils/genprimopcode,dist,,,Parser,y) diff --git a/ghc/GhciMonad.hs b/ghc/GhciMonad.hs index a4abe322d2..22109c428d 100644 --- a/ghc/GhciMonad.hs +++ b/ghc/GhciMonad.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP, FlexibleInstances, UnboxedTuples, MagicHash #-} {-# OPTIONS_GHC -fno-cse -fno-warn-orphans #-} -- -fno-cse is needed for GLOBAL_VAR's to behave properly @@ -32,6 +33,7 @@ import Outputable hiding (printForUser, printForUserPartWay) import qualified Outputable import Util import DynFlags +import FastString import HscTypes import SrcLoc import Module @@ -104,7 +106,8 @@ data GHCiState = GHCiState -- help text to display to a user short_help :: String, - long_help :: String + long_help :: String, + lastErrorLocations :: IORef [(FastString, Int)] } type TickArray = Array Int [(BreakIndex,SrcSpan)] diff --git a/ghc/InteractiveUI.hs b/ghc/InteractiveUI.hs index b41c2db45a..ef48c348bd 100644 --- a/ghc/InteractiveUI.hs +++ b/ghc/InteractiveUI.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE CPP, MagicHash, NondecreasingIndentation, TupleSections #-} {-# OPTIONS -fno-cse #-} -- -fno-cse is needed for GLOBAL_VAR's to behave properly @@ -27,6 +28,7 @@ import Debugger -- The GHC interface import DynFlags +import ErrUtils import GhcMonad ( modifySession ) import qualified GHC import GHC ( LoadHowMuch(..), Target(..), TargetId(..), InteractiveImport(..), @@ -71,7 +73,7 @@ import Data.Array import qualified Data.ByteString.Char8 as BS import Data.Char import Data.Function -import Data.IORef ( IORef, readIORef, writeIORef ) +import Data.IORef ( IORef, modifyIORef, newIORef, readIORef, writeIORef ) import Data.List ( find, group, intercalate, intersperse, isPrefixOf, nub, partition, sort, sortBy ) import Data.Maybe @@ -103,7 +105,6 @@ import GHC.IO.Exception ( IOErrorType(InvalidArgument) ) import GHC.IO.Handle ( hFlushAll ) import GHC.TopHandler ( topHandler ) - ----------------------------------------------------------------------------- data GhciSettings = GhciSettings { @@ -379,6 +380,12 @@ interactiveUI config srcs maybe_exprs = do $ dflags GHC.setInteractiveDynFlags dflags' + lastErrLocationsRef <- liftIO $ newIORef [] + progDynFlags <- GHC.getProgramDynFlags + _ <- GHC.setProgramDynFlags $ + progDynFlags { log_action = ghciLogAction lastErrLocationsRef } + + liftIO $ when (isNothing maybe_exprs) $ do -- Only for GHCi (not runghc and ghc -e): @@ -399,31 +406,46 @@ interactiveUI config srcs maybe_exprs = do #endif default_editor <- liftIO $ findEditor - startGHCi (runGHCi srcs maybe_exprs) - GHCiState{ progname = default_progname, - GhciMonad.args = default_args, - prompt = defPrompt config, - prompt2 = defPrompt2 config, - stop = default_stop, - editor = default_editor, - options = [], - line_number = 1, - break_ctr = 0, - breaks = [], - tickarrays = emptyModuleEnv, - ghci_commands = availableCommands config, - last_command = Nothing, - cmdqueue = [], - remembered_ctx = [], - transient_ctx = [], - ghc_e = isJust maybe_exprs, - short_help = shortHelpText config, - long_help = fullHelpText config + GHCiState{ progname = default_progname, + GhciMonad.args = default_args, + prompt = defPrompt config, + prompt2 = defPrompt2 config, + stop = default_stop, + editor = default_editor, + options = [], + line_number = 1, + break_ctr = 0, + breaks = [], + tickarrays = emptyModuleEnv, + ghci_commands = availableCommands config, + last_command = Nothing, + cmdqueue = [], + remembered_ctx = [], + transient_ctx = [], + ghc_e = isJust maybe_exprs, + short_help = shortHelpText config, + long_help = fullHelpText config, + lastErrorLocations = lastErrLocationsRef } - + return () +resetLastErrorLocations :: GHCi () +resetLastErrorLocations = do + st <- getGHCiState + liftIO $ writeIORef (lastErrorLocations st) [] + +ghciLogAction :: IORef [(FastString, Int)] -> LogAction +ghciLogAction lastErrLocations dflags severity srcSpan style msg = do + defaultLogAction dflags severity srcSpan style msg + case severity of + SevError -> case srcSpan of + RealSrcSpan rsp -> modifyIORef lastErrLocations + (++ [(srcLocFile (realSrcSpanStart rsp), srcLocLine (realSrcSpanStart rsp))]) + _ -> return () + _ -> return () + withGhcAppData :: (FilePath -> IO a) -> IO a -> IO a withGhcAppData right left = do either_dir <- tryIO (getAppUserDataDirectory "ghc") @@ -1119,9 +1141,10 @@ runMain s = case toArgs s of Left err -> liftIO (hPutStrLn stderr err) Right args -> do dflags <- getDynFlags - case mainFunIs dflags of - Nothing -> doWithArgs args "main" - Just f -> doWithArgs args f + let main = fromMaybe "main" (mainFunIs dflags) + -- Wrap the main function in 'void' to discard its value instead + -- of printing it (#9086). See Haskell 2010 report Chapter 5. + doWithArgs args $ "Control.Monad.void (" ++ main ++ ")" ----------------------------------------------------------------------------- -- :run @@ -1169,10 +1192,18 @@ editFile :: String -> InputT GHCi () editFile str = do file <- if null str then lift chooseEditFile else expandPath str st <- lift getGHCiState + errs <- liftIO $ readIORef $ lastErrorLocations st let cmd = editor st when (null cmd) $ throwGhcException (CmdLineError "editor not set, use :set editor") - code <- liftIO $ system (cmd ++ ' ':file) + lineOpt <- liftIO $ do + curFileErrs <- filterM (\(f, _) -> unpackFS f `sameFile` file) errs + return $ case curFileErrs of + (_, line):_ -> " +" ++ show line + _ -> "" + let cmdArgs = ' ':(file ++ lineOpt) + code <- liftIO $ system (cmd ++ cmdArgs) + when (code == ExitSuccess) $ reloadModule "" @@ -1363,6 +1394,7 @@ doLoad retain_context howmuch = do -- the ModBreaks will have gone away. lift discardActiveBreakPoints + lift resetLastErrorLocations -- Enable buffering stdout and stderr as we're compiling. Keeping these -- handles unbuffered will just slow the compilation down, especially when -- compiling in parallel. @@ -1387,7 +1419,6 @@ afterLoad ok retain_context = do modulesLoadedMsg ok loaded_mods lift $ setContextAfterLoad retain_context loaded_mod_summaries - setContextAfterLoad :: Bool -> [GHC.ModSummary] -> GHCi () setContextAfterLoad keep_ctxt [] = do setContextKeepingPackageModules keep_ctxt [] @@ -1493,7 +1524,7 @@ kindOfType norm str $ do (ty, kind) <- GHC.typeKind norm str printForUser $ vcat [ text str <+> dcolon <+> pprTypeForUser kind - , ppWhen norm $ equals <+> ppr ty ] + , ppWhen norm $ equals <+> pprTypeForUser ty ] ----------------------------------------------------------------------------- @@ -2505,14 +2536,14 @@ unionComplete f1 f2 line = do wrapCompleter :: String -> (String -> GHCi [String]) -> CompletionFunc GHCi wrapCompleter breakChars fun = completeWord Nothing breakChars - $ fmap (map simpleCompletion) . fmap sort . fun + $ fmap (map simpleCompletion . nubSort) . fun wrapIdentCompleter :: (String -> GHCi [String]) -> CompletionFunc GHCi wrapIdentCompleter = wrapCompleter word_break_chars wrapIdentCompleterWithModifier :: String -> (Maybe Char -> String -> GHCi [String]) -> CompletionFunc GHCi wrapIdentCompleterWithModifier modifChars fun = completeWordWithPrev Nothing word_break_chars - $ \rest -> fmap (map simpleCompletion) . fmap sort . fun (getModifier rest) + $ \rest -> fmap (map simpleCompletion . nubSort) . fun (getModifier rest) where getModifier = find (`elem` modifChars) @@ -3117,7 +3148,13 @@ expandPathIO p = tilde <- getHomeDirectory -- will fail if HOME not defined return (tilde ++ '/':d) other -> - return other + return other + +sameFile :: FilePath -> FilePath -> IO Bool +sameFile path1 path2 = do + absPath1 <- canonicalizePath path1 + absPath2 <- canonicalizePath path2 + return $ absPath1 == absPath2 wantInterpretedModule :: GHC.GhcMonad m => String -> m Module wantInterpretedModule str = wantInterpretedModuleName (GHC.mkModuleName str) diff --git a/ghc/Main.hs b/ghc/Main.hs index d056bf97c4..2bb156c5b9 100644 --- a/ghc/Main.hs +++ b/ghc/Main.hs @@ -1,5 +1,5 @@ +{-# LANGUAGE CPP, NondecreasingIndentation #-} {-# OPTIONS -fno-warn-incomplete-patterns -optc-DNON_POSIX_SOURCE #-} -{-# LANGUAGE ForeignFunctionInterface #-} ----------------------------------------------------------------------------- -- @@ -33,7 +33,7 @@ import InteractiveUI ( interactiveUI, ghciWelcomeMsg, defaultGhciSettings ) import Config import Constants import HscTypes -import Packages ( dumpPackages ) +import Packages ( dumpPackages, simpleDumpPackages ) import DriverPhases import BasicTypes ( failed ) import StaticFlags @@ -209,8 +209,10 @@ main' postLoadMode dflags0 args flagWarnings = do hsc_env <- GHC.getSession ---------------- Display configuration ----------- - when (verbosity dflags6 >= 4) $ - liftIO $ dumpPackages dflags6 + case verbosity dflags6 of + v | v == 4 -> liftIO $ simpleDumpPackages dflags6 + | v >= 5 -> liftIO $ dumpPackages dflags6 + | otherwise -> return () when (verbosity dflags6 >= 3) $ do liftIO $ hPutStrLn stderr ("Hsc static flags: " ++ unwords staticFlags) diff --git a/ghc/ghc-bin.cabal.in b/ghc/ghc-bin.cabal.in index 68338f37f7..dcbc695675 100644 --- a/ghc/ghc-bin.cabal.in +++ b/ghc/ghc-bin.cabal.in @@ -16,7 +16,7 @@ Category: XXX Data-Dir: .. Data-Files: settings Build-Type: Simple -Cabal-Version: >= 1.2 +Cabal-Version: >=1.10 Flag ghci Description: Build GHCi support. @@ -24,6 +24,8 @@ Flag ghci Manual: True Executable ghc + Default-Language: Haskell2010 + Main-Is: Main.hs Build-Depends: base >= 3 && < 5, array >= 0.1 && < 0.6, @@ -43,12 +45,17 @@ Executable ghc if flag(ghci) CPP-Options: -DGHCI GHC-Options: -fno-warn-name-shadowing - Other-Modules: InteractiveUI, GhciMonad, GhciTags + Other-Modules: + InteractiveUI + GhciMonad + GhciTags Build-Depends: transformers, haskeline - Extensions: ForeignFunctionInterface, - UnboxedTuples, - FlexibleInstances, - TupleSections, - MagicHash + Other-Extensions: + FlexibleInstances + MagicHash + TupleSections + UnboxedTuples - Extensions: CPP, PatternGuards, NondecreasingIndentation + Other-Extensions: + CPP + NondecreasingIndentation diff --git a/includes/CodeGen.Platform.hs b/includes/CodeGen.Platform.hs index 3d6dd41ae4..62708cc4cc 100644 --- a/includes/CodeGen.Platform.hs +++ b/includes/CodeGen.Platform.hs @@ -742,6 +742,8 @@ globalRegMaybe CurrentTSO = Just (RealRegSingle REG_CurrentTSO) globalRegMaybe CurrentNursery = Just (RealRegSingle REG_CurrentNursery) # endif globalRegMaybe _ = Nothing +#elif MACHREGS_NO_REGS +globalRegMaybe _ = Nothing #else globalRegMaybe = panic "globalRegMaybe not defined for this platform" #endif diff --git a/includes/rts/Constants.h b/includes/rts/Constants.h index 842c37b369..6fd0dc0dfc 100644 --- a/includes/rts/Constants.h +++ b/includes/rts/Constants.h @@ -202,32 +202,34 @@ */ #define NotBlocked 0 #define BlockedOnMVar 1 -#define BlockedOnMVarRead 2 -#define BlockedOnBlackHole 3 -#define BlockedOnRead 4 -#define BlockedOnWrite 5 -#define BlockedOnDelay 6 -#define BlockedOnSTM 7 +#define BlockedOnMVarRead 14 /* TODO: renumber me, see #9003 */ +#define BlockedOnBlackHole 2 +#define BlockedOnRead 3 +#define BlockedOnWrite 4 +#define BlockedOnDelay 5 +#define BlockedOnSTM 6 /* Win32 only: */ -#define BlockedOnDoProc 8 +#define BlockedOnDoProc 7 /* Only relevant for PAR: */ /* blocked on a remote closure represented by a Global Address: */ -#define BlockedOnGA 9 +#define BlockedOnGA 8 /* same as above but without sending a Fetch message */ -#define BlockedOnGA_NoSend 10 +#define BlockedOnGA_NoSend 9 /* Only relevant for THREADED_RTS: */ -#define BlockedOnCCall 11 -#define BlockedOnCCall_Interruptible 12 +#define BlockedOnCCall 10 +#define BlockedOnCCall_Interruptible 11 /* same as above but permit killing the worker thread */ /* Involved in a message sent to tso->msg_cap */ -#define BlockedOnMsgThrowTo 13 +#define BlockedOnMsgThrowTo 12 /* The thread is not on any run queues, but can be woken up by tryWakeupThread() */ -#define ThreadMigrating 14 +#define ThreadMigrating 13 + +/* WARNING WARNING top number is BlockedOnMVarRead 14, not 13!! */ /* * These constants are returned to the scheduler by a thread that has diff --git a/includes/rts/EventLogFormat.h b/includes/rts/EventLogFormat.h index e08a44996f..a1e038f823 100644 --- a/includes/rts/EventLogFormat.h +++ b/includes/rts/EventLogFormat.h @@ -162,6 +162,8 @@ #define EVENT_TASK_MIGRATE 56 /* (taskID, cap, new_cap) */ #define EVENT_TASK_DELETE 57 /* (taskID) */ #define EVENT_USER_MARKER 58 /* (marker_name) */ +#define EVENT_HACK_BUG_T9003 59 /* Hack: see trac #9003 */ + /* Range 59 - 59 is available for new GHC and common events. */ /* Range 60 - 80 is used by eden for parallel tracing @@ -177,7 +179,7 @@ * ranges higher than this are reserved but not currently emitted by ghc. * This must match the size of the EventDesc[] array in EventLog.c */ -#define NUM_GHC_EVENT_TAGS 59 +#define NUM_GHC_EVENT_TAGS 60 #if 0 /* DEPRECATED EVENTS: */ /* we don't actually need to record the thread, it's implicit */ diff --git a/includes/rts/storage/ClosureMacros.h b/includes/rts/storage/ClosureMacros.h index 92b78de6f7..3407b716c8 100644 --- a/includes/rts/storage/ClosureMacros.h +++ b/includes/rts/storage/ClosureMacros.h @@ -338,6 +338,11 @@ EXTERN_INLINE StgWord bco_sizeW ( StgBCO *bco ); EXTERN_INLINE StgWord bco_sizeW ( StgBCO *bco ) { return bco->size; } +/* + * TODO: Consider to switch return type from 'nat' to 'StgWord' #8742 + * + * (Also for 'closure_sizeW' below) + */ EXTERN_INLINE nat closure_sizeW_ (StgClosure *p, StgInfoTable *info); EXTERN_INLINE nat closure_sizeW_ (StgClosure *p, StgInfoTable *info) diff --git a/includes/stg/MiscClosures.h b/includes/stg/MiscClosures.h index 0c4d2f9eaf..ee5a119aa1 100644 --- a/includes/stg/MiscClosures.h +++ b/includes/stg/MiscClosures.h @@ -348,7 +348,6 @@ RTS_FUN_DECL(stg_newByteArrayzh); RTS_FUN_DECL(stg_newPinnedByteArrayzh); RTS_FUN_DECL(stg_newAlignedPinnedByteArrayzh); RTS_FUN_DECL(stg_casIntArrayzh); -RTS_FUN_DECL(stg_fetchAddIntArrayzh); RTS_FUN_DECL(stg_newArrayzh); RTS_FUN_DECL(stg_newArrayArrayzh); RTS_FUN_DECL(stg_copyArrayzh); diff --git a/includes/stg/SMP.h b/includes/stg/SMP.h index 01663dd86e..00608c707c 100644 --- a/includes/stg/SMP.h +++ b/includes/stg/SMP.h @@ -107,7 +107,10 @@ EXTERN_INLINE StgWord xchg(StgPtr p, StgWord w) { StgWord result; -#if i386_HOST_ARCH || x86_64_HOST_ARCH +#if defined(NOSMP) + result = *p; + *p = w; +#elif i386_HOST_ARCH || x86_64_HOST_ARCH result = w; __asm__ __volatile__ ( // NB: the xchg instruction is implicitly locked, so we do not @@ -154,9 +157,6 @@ xchg(StgPtr p, StgWord w) : "r" (w), "r" (p) : "memory" ); -#elif !defined(WITHSMP) - result = *p; - *p = w; #else #error xchg() unimplemented on this architecture #endif @@ -170,7 +170,14 @@ xchg(StgPtr p, StgWord w) EXTERN_INLINE StgWord cas(StgVolatilePtr p, StgWord o, StgWord n) { -#if i386_HOST_ARCH || x86_64_HOST_ARCH +#if defined(NOSMP) + StgWord result; + result = *p; + if (result == o) { + *p = n; + } + return result; +#elif i386_HOST_ARCH || x86_64_HOST_ARCH __asm__ __volatile__ ( "lock\ncmpxchg %3,%1" :"=a"(o), "+m" (*(volatile unsigned int *)p) @@ -225,13 +232,6 @@ cas(StgVolatilePtr p, StgWord o, StgWord n) : "cc","memory"); return result; -#elif !defined(WITHSMP) - StgWord result; - result = *p; - if (result == o) { - *p = n; - } - return result; #else #error cas() unimplemented on this architecture #endif @@ -302,7 +302,9 @@ busy_wait_nop(void) */ EXTERN_INLINE void write_barrier(void) { -#if i386_HOST_ARCH || x86_64_HOST_ARCH +#if defined(NOSMP) + return; +#elif i386_HOST_ARCH || x86_64_HOST_ARCH __asm__ __volatile__ ("" : : : "memory"); #elif powerpc_HOST_ARCH __asm__ __volatile__ ("lwsync" : : : "memory"); @@ -313,8 +315,6 @@ write_barrier(void) { __asm__ __volatile__ ("" : : : "memory"); #elif arm_HOST_ARCH && !defined(arm_HOST_ARCH_PRE_ARMv7) __asm__ __volatile__ ("dmb st" : : : "memory"); -#elif !defined(WITHSMP) - return; #else #error memory barriers unimplemented on this architecture #endif @@ -322,7 +322,9 @@ write_barrier(void) { EXTERN_INLINE void store_load_barrier(void) { -#if i386_HOST_ARCH +#if defined(NOSMP) + return; +#elif i386_HOST_ARCH __asm__ __volatile__ ("lock; addl $0,0(%%esp)" : : : "memory"); #elif x86_64_HOST_ARCH __asm__ __volatile__ ("lock; addq $0,0(%%rsp)" : : : "memory"); @@ -332,8 +334,6 @@ store_load_barrier(void) { __asm__ __volatile__ ("membar #StoreLoad" : : : "memory"); #elif arm_HOST_ARCH && !defined(arm_HOST_ARCH_PRE_ARMv7) __asm__ __volatile__ ("dmb" : : : "memory"); -#elif !defined(WITHSMP) - return; #else #error memory barriers unimplemented on this architecture #endif @@ -341,7 +341,9 @@ store_load_barrier(void) { EXTERN_INLINE void load_load_barrier(void) { -#if i386_HOST_ARCH +#if defined(NOSMP) + return; +#elif i386_HOST_ARCH __asm__ __volatile__ ("" : : : "memory"); #elif x86_64_HOST_ARCH __asm__ __volatile__ ("" : : : "memory"); @@ -352,8 +354,6 @@ load_load_barrier(void) { __asm__ __volatile__ ("" : : : "memory"); #elif arm_HOST_ARCH && !defined(arm_HOST_ARCH_PRE_ARMv7) __asm__ __volatile__ ("dmb" : : : "memory"); -#elif !defined(WITHSMP) - return; #else #error memory barriers unimplemented on this architecture #endif diff --git a/libffi-tarballs b/libffi-tarballs new file mode 160000 +Subproject a0088d1da0e171849ddb47a46c869856037a01d diff --git a/libraries/Cabal b/libraries/Cabal -Subproject 597ed8f613db327cfab958aa64da6c0f9e1ee29 +Subproject 337f9cd7927b787c6796acddc943393cf5b8e64 diff --git a/libraries/array b/libraries/array new file mode 160000 +Subproject 7784c531e2fc8ae7e544ce50293a6108005cedd diff --git a/libraries/base/.gitignore b/libraries/base/.gitignore index 69a9c5124b..3581b447ac 100644 --- a/libraries/base/.gitignore +++ b/libraries/base/.gitignore @@ -1,14 +1,6 @@ *.o *.aux *.hi -*.comp.stderr -*.comp.stdout -*.interp.stderr -*.interp.stdout -*.run.stderr -*.run.stdout -*.eventlog -*.genscript # Backup files *~ @@ -26,242 +18,3 @@ /include/HsBaseConfig.h /include/HsBaseConfig.h.in -tests/.hpc/ -tests/4006 -tests/CPUTime001 -tests/Concurrent/.hpc/ -tests/Concurrent/4876 -tests/Concurrent/ThreadDelay001 -tests/IO/.hpc/ -tests/IO/2122 -tests/IO/2122-test -tests/IO/3307 -tests/IO/4808 -tests/IO/4808.test -tests/IO/4855 -tests/IO/4895 -tests/IO/IOError001 -tests/IO/IOError002 -tests/IO/T4144 -tests/IO/chinese-file-* -tests/IO/chinese-name -tests/IO/concio002 -tests/IO/countReaders001 -tests/IO/countReaders001.txt -tests/IO/decodingerror001 -tests/IO/decodingerror002 -tests/IO/encoding001 -tests/IO/encoding001.utf16 -tests/IO/encoding001.utf16.utf16be -tests/IO/encoding001.utf16.utf16le -tests/IO/encoding001.utf16.utf32 -tests/IO/encoding001.utf16.utf32be -tests/IO/encoding001.utf16.utf32le -tests/IO/encoding001.utf16.utf8 -tests/IO/encoding001.utf16.utf8_bom -tests/IO/encoding001.utf16be -tests/IO/encoding001.utf16be.utf16 -tests/IO/encoding001.utf16be.utf16le -tests/IO/encoding001.utf16be.utf32 -tests/IO/encoding001.utf16be.utf32be -tests/IO/encoding001.utf16be.utf32le -tests/IO/encoding001.utf16be.utf8 -tests/IO/encoding001.utf16be.utf8_bom -tests/IO/encoding001.utf16le -tests/IO/encoding001.utf16le.utf16 -tests/IO/encoding001.utf16le.utf16be -tests/IO/encoding001.utf16le.utf32 -tests/IO/encoding001.utf16le.utf32be -tests/IO/encoding001.utf16le.utf32le -tests/IO/encoding001.utf16le.utf8 -tests/IO/encoding001.utf16le.utf8_bom -tests/IO/encoding001.utf32 -tests/IO/encoding001.utf32.utf16 -tests/IO/encoding001.utf32.utf16be -tests/IO/encoding001.utf32.utf16le -tests/IO/encoding001.utf32.utf32be -tests/IO/encoding001.utf32.utf32le -tests/IO/encoding001.utf32.utf8 -tests/IO/encoding001.utf32.utf8_bom -tests/IO/encoding001.utf32be -tests/IO/encoding001.utf32be.utf16 -tests/IO/encoding001.utf32be.utf16be -tests/IO/encoding001.utf32be.utf16le -tests/IO/encoding001.utf32be.utf32 -tests/IO/encoding001.utf32be.utf32le -tests/IO/encoding001.utf32be.utf8 -tests/IO/encoding001.utf32be.utf8_bom -tests/IO/encoding001.utf32le -tests/IO/encoding001.utf32le.utf16 -tests/IO/encoding001.utf32le.utf16be -tests/IO/encoding001.utf32le.utf16le -tests/IO/encoding001.utf32le.utf32 -tests/IO/encoding001.utf32le.utf32be -tests/IO/encoding001.utf32le.utf8 -tests/IO/encoding001.utf32le.utf8_bom -tests/IO/encoding001.utf8 -tests/IO/encoding001.utf8.utf16 -tests/IO/encoding001.utf8.utf16be -tests/IO/encoding001.utf8.utf16le -tests/IO/encoding001.utf8.utf32 -tests/IO/encoding001.utf8.utf32be -tests/IO/encoding001.utf8.utf32le -tests/IO/encoding001.utf8.utf8_bom -tests/IO/encoding001.utf8_bom -tests/IO/encoding001.utf8_bom.utf16 -tests/IO/encoding001.utf8_bom.utf16be -tests/IO/encoding001.utf8_bom.utf16le -tests/IO/encoding001.utf8_bom.utf32 -tests/IO/encoding001.utf8_bom.utf32be -tests/IO/encoding001.utf8_bom.utf32le -tests/IO/encoding001.utf8_bom.utf8 -tests/IO/encoding002 -tests/IO/encodingerror001 -tests/IO/environment001 -tests/IO/finalization001 -tests/IO/hClose001 -tests/IO/hClose001.tmp -tests/IO/hClose002 -tests/IO/hClose002.tmp -tests/IO/hClose003 -tests/IO/hDuplicateTo001 -tests/IO/hFileSize001 -tests/IO/hFileSize002 -tests/IO/hFileSize002.out -tests/IO/hFlush001 -tests/IO/hFlush001.out -tests/IO/hGetBuf001 -tests/IO/hGetBuffering001 -tests/IO/hGetChar001 -tests/IO/hGetLine001 -tests/IO/hGetLine002 -tests/IO/hGetLine003 -tests/IO/hGetPosn001 -tests/IO/hGetPosn001.out -tests/IO/hIsEOF001 -tests/IO/hIsEOF002 -tests/IO/hIsEOF002.out -tests/IO/hReady001 -tests/IO/hReady002 -tests/IO/hSeek001 -tests/IO/hSeek002 -tests/IO/hSeek003 -tests/IO/hSeek004 -tests/IO/hSeek004.out -tests/IO/hSetBuffering002 -tests/IO/hSetBuffering003 -tests/IO/hSetBuffering004 -tests/IO/hSetEncoding001 -tests/IO/ioeGetErrorString001 -tests/IO/ioeGetFileName001 -tests/IO/ioeGetHandle001 -tests/IO/isEOF001 -tests/IO/misc001 -tests/IO/misc001.out -tests/IO/newline001 -tests/IO/newline001.out -tests/IO/openFile001 -tests/IO/openFile002 -tests/IO/openFile003 -tests/IO/openFile004 -tests/IO/openFile004.out -tests/IO/openFile005 -tests/IO/openFile005.out1 -tests/IO/openFile005.out2 -tests/IO/openFile006 -tests/IO/openFile006.out -tests/IO/openFile007 -tests/IO/openFile007.out -tests/IO/openFile008 -tests/IO/openTempFile001 -tests/IO/putStr001 -tests/IO/readFile001 -tests/IO/readFile001.out -tests/IO/readwrite001 -tests/IO/readwrite001.inout -tests/IO/readwrite002 -tests/IO/readwrite002.inout -tests/IO/readwrite003 -tests/IO/readwrite003.txt -tests/IO/tmp -tests/Numeric/.hpc/ -tests/Numeric/num001 -tests/Numeric/num002 -tests/Numeric/num003 -tests/Numeric/num004 -tests/Numeric/num005 -tests/Numeric/num006 -tests/Numeric/num007 -tests/Numeric/num008 -tests/Numeric/num009 -tests/Numeric/num010 -tests/System/.hpc/ -tests/System/exitWith001 -tests/System/getArgs001 -tests/System/getEnv001 -tests/System/system001 -tests/Text.Printf/.hpc/ -tests/Text.Printf/1548 -tests/addr001 -tests/assert -tests/char001 -tests/char002 -tests/cstring001 -tests/data-fixed-show-read -tests/dynamic001 -tests/dynamic002 -tests/dynamic003 -tests/dynamic004 -tests/dynamic005 -tests/echo001 -tests/enum01 -tests/enum02 -tests/enum03 -tests/enum04 -tests/enumDouble -tests/enumRatio -tests/exceptionsrun001 -tests/exceptionsrun002 -tests/fixed -tests/genericNegative001 -tests/hGetBuf002 -tests/hGetBuf003 -tests/hPutBuf001 -tests/hPutBuf002 -tests/hPutBuf002.out -tests/hTell001 -tests/hTell002 -tests/hash001 -tests/ioref001 -tests/ix001 -tests/length001 -tests/lex001 -tests/list001 -tests/list002 -tests/list003 -tests/memo001 -tests/memo002 -tests/performGC001 -tests/quotOverflow -tests/rand001 -tests/ratio001 -tests/readDouble001 -tests/readFloat -tests/readInteger001 -tests/readLitChar -tests/reads001 -tests/show001 -tests/showDouble -tests/stableptr001 -tests/stableptr003 -tests/stableptr004 -tests/stableptr005 -tests/take001 -tests/tempfiles -tests/text001 -tests/trace001 -tests/tup001 -tests/unicode001 -tests/unicode002 -tests/weak001 - diff --git a/libraries/base/Control/Concurrent.hs b/libraries/base/Control/Concurrent.hs index c487190f27..e5a0ebfe20 100644 --- a/libraries/base/Control/Concurrent.hs +++ b/libraries/base/Control/Concurrent.hs @@ -361,8 +361,8 @@ is /bound/, an unbound thread is created temporarily using 'forkIO'. Use this function /only/ in the rare case that you have actually observed a performance loss due to the use of bound threads. A program that -doesn't need it's main thread to be bound and makes /heavy/ use of concurrency -(e.g. a web server), might want to wrap it's @main@ action in +doesn't need its main thread to be bound and makes /heavy/ use of concurrency +(e.g. a web server), might want to wrap its @main@ action in @runInUnboundThread@. Note that exceptions which are thrown to the current thread are thrown in turn diff --git a/libraries/base/Control/Concurrent/Chan.hs b/libraries/base/Control/Concurrent/Chan.hs index 32387da5bf..e0b7b54c23 100644 --- a/libraries/base/Control/Concurrent/Chan.hs +++ b/libraries/base/Control/Concurrent/Chan.hs @@ -1,6 +1,6 @@ {-# LANGUAGE Trustworthy #-} {-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/Control/Concurrent/QSem.hs b/libraries/base/Control/Concurrent/QSem.hs index 2761ef2bb1..223d86539d 100644 --- a/libraries/base/Control/Concurrent/QSem.hs +++ b/libraries/base/Control/Concurrent/QSem.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Trustworthy #-} -{-# LANGUAGE DeriveDataTypeable, BangPatterns #-} +{-# LANGUAGE AutoDeriveTypeable, BangPatterns #-} {-# OPTIONS_GHC -funbox-strict-fields #-} ----------------------------------------------------------------------------- diff --git a/libraries/base/Control/Concurrent/QSemN.hs b/libraries/base/Control/Concurrent/QSemN.hs index 546b8f945e..a377e5e804 100644 --- a/libraries/base/Control/Concurrent/QSemN.hs +++ b/libraries/base/Control/Concurrent/QSemN.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Trustworthy #-} -{-# LANGUAGE DeriveDataTypeable, BangPatterns #-} +{-# LANGUAGE AutoDeriveTypeable, BangPatterns #-} {-# OPTIONS_GHC -funbox-strict-fields #-} ----------------------------------------------------------------------------- diff --git a/libraries/base/Control/Exception/Base.hs b/libraries/base/Control/Exception/Base.hs index d8a0d9635f..8df4958cbb 100644 --- a/libraries/base/Control/Exception/Base.hs +++ b/libraries/base/Control/Exception/Base.hs @@ -1,6 +1,6 @@ {-# LANGUAGE Trustworthy #-} {-# LANGUAGE NoImplicitPrelude, MagicHash #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/Control/Monad.hs b/libraries/base/Control/Monad.hs index 19c9a87bde..00c1fdda37 100644 --- a/libraries/base/Control/Monad.hs +++ b/libraries/base/Control/Monad.hs @@ -74,6 +74,9 @@ module Control.Monad , ap + -- ** Strict monadic functions + + , (<$!>) ) where import Data.Maybe @@ -311,6 +314,18 @@ is equivalent to ap :: (Monad m) => m (a -> b) -> m a -> m b ap = liftM2 id +infixl 4 <$!> + +-- | Strict version of 'Data.Functor.<$>'. +-- +-- /Since: 4.7.1.0/ +(<$!>) :: Monad m => (a -> b) -> m a -> m b +{-# INLINE (<$!>) #-} +f <$!> m = do + x <- m + let z = f x + z `seq` return z + -- ----------------------------------------------------------------------------- -- Other MonadPlus functions diff --git a/libraries/base/Data/Coerce.hs b/libraries/base/Data/Coerce.hs index bf269f5ea8..653a857da8 100644 --- a/libraries/base/Data/Coerce.hs +++ b/libraries/base/Data/Coerce.hs @@ -1,6 +1,5 @@ {-# LANGUAGE Unsafe #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE NoImplicitPrelude #-} ----------------------------------------------------------------------------- -- | @@ -26,3 +25,6 @@ module Data.Coerce ) where import GHC.Prim (coerce) import GHC.Types (Coercible) + +import GHC.Base () -- for build ordering + diff --git a/libraries/base/Data/Complex.hs b/libraries/base/Data/Complex.hs index af593cda2f..0ce148788d 100644 --- a/libraries/base/Data/Complex.hs +++ b/libraries/base/Data/Complex.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Trustworthy #-} -{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE AutoDeriveTypeable #-} {-# LANGUAGE StandaloneDeriving #-} ----------------------------------------------------------------------------- diff --git a/libraries/base/Data/Data.hs b/libraries/base/Data/Data.hs index a12a6d7144..49407fae16 100644 --- a/libraries/base/Data/Data.hs +++ b/libraries/base/Data/Data.hs @@ -1,6 +1,6 @@ {-# LANGUAGE Trustworthy, FlexibleInstances #-} {-# LANGUAGE RankNTypes, ScopedTypeVariables, PolyKinds #-} -{-# LANGUAGE StandaloneDeriving, DeriveDataTypeable, TypeOperators, +{-# LANGUAGE StandaloneDeriving, AutoDeriveTypeable, TypeOperators, GADTs #-} ----------------------------------------------------------------------------- @@ -323,7 +323,7 @@ class Typeable a => Data a where -- | A generic query that processes the immediate subterms and returns a list -- of results. The list is given in the same order as originally specified - -- in the declaratoin of the data constructors. + -- in the declaration of the data constructors. gmapQ :: (forall d. Data d => d -> u) -> a -> [u] gmapQ f = gmapQr (:) [] f @@ -777,12 +777,12 @@ mkCharConstr dt c = case datarep dt of ------------------------------------------------------------------------------ -- --- Non-representations for non-presentable types +-- Non-representations for non-representable types -- ------------------------------------------------------------------------------ --- | Constructs a non-representation for a non-presentable type +-- | Constructs a non-representation for a non-representable type mkNoRepType :: String -> DataType mkNoRepType str = DataType { tycon = str diff --git a/libraries/base/Data/Dynamic.hs b/libraries/base/Data/Dynamic.hs index 7d49a06bc3..50bea62e1a 100644 --- a/libraries/base/Data/Dynamic.hs +++ b/libraries/base/Data/Dynamic.hs @@ -1,6 +1,6 @@ {-# LANGUAGE Trustworthy #-} {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/Data/Either.hs b/libraries/base/Data/Either.hs index cf45e79456..9abb20522c 100644 --- a/libraries/base/Data/Either.hs +++ b/libraries/base/Data/Either.hs @@ -1,6 +1,6 @@ {-# LANGUAGE Trustworthy #-} {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} {-# LANGUAGE PolyKinds, DataKinds, TypeFamilies, TypeOperators, UndecidableInstances #-} ----------------------------------------------------------------------------- diff --git a/libraries/base/Data/Fixed.hs b/libraries/base/Data/Fixed.hs index f5fb896c38..cadbb61ac1 100644 --- a/libraries/base/Data/Fixed.hs +++ b/libraries/base/Data/Fixed.hs @@ -1,6 +1,6 @@ {-# LANGUAGE Trustworthy #-} {-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE AutoDeriveTypeable #-} {-# OPTIONS -Wall -fno-warn-unused-binds #-} ----------------------------------------------------------------------------- @@ -158,9 +158,13 @@ instance (HasResolution a) => Read (Fixed a) where convertFixed :: forall a . HasResolution a => Lexeme -> ReadPrec (Fixed a) convertFixed (Number n) - | Just (i, f) <- numberToFixed r n = - return (fromInteger i + (fromInteger f / (10 ^ r))) + | Just (i, f) <- numberToFixed e n = + return (fromInteger i + (fromInteger f / (10 ^ e))) where r = resolution (undefined :: Fixed a) + -- round 'e' up to help make the 'read . show == id' property + -- possible also for cases where 'resolution' is not a + -- power-of-10, such as e.g. when 'resolution = 128' + e = ceiling (logBase 10 (fromInteger r) :: Double) convertFixed _ = pfail data E0 = E0 diff --git a/libraries/base/Data/Type/Equality.hs b/libraries/base/Data/Type/Equality.hs index 464f7d28dd..626e817b30 100644 --- a/libraries/base/Data/Type/Equality.hs +++ b/libraries/base/Data/Type/Equality.hs @@ -127,6 +127,61 @@ instance TestEquality ((:~:) a) where type family (a :: k) == (b :: k) :: Bool infix 4 == +{- +This comment explains more about why a poly-kinded instance for (==) is +not provided. To be concrete, here would be the poly-kinded instance: + +type family EqPoly (a :: k) (b :: k) where + EqPoly a a = True + EqPoly a b = False +type instance (a :: k) == (b :: k) = EqPoly a b + +Note that this overlaps with every other instance -- if this were defined, +it would be the only instance for (==). + +Now, consider +data Nat = Zero | Succ Nat + +Suppose I want +foo :: (Succ n == Succ m) ~ True => ((n == m) :~: True) +foo = Refl + +This would not type-check with the poly-kinded instance. `Succ n == Succ m` +quickly becomes `EqPoly (Succ n) (Succ m)` but then is stuck. We don't know +enough about `n` and `m` to reduce further. + +On the other hand, consider this: + +type family EqNat (a :: Nat) (b :: Nat) where + EqNat Zero Zero = True + EqNat (Succ n) (Succ m) = EqNat n m + EqNat n m = False +type instance (a :: Nat) == (b :: Nat) = EqNat a b + +With this instance, `foo` type-checks fine. `Succ n == Succ m` becomes `EqNat +(Succ n) (Succ m)` which becomes `EqNat n m`. Thus, we can conclude `(n == m) +~ True` as desired. + +So, the Nat-specific instance allows strictly more reductions, and is thus +preferable to the poly-kinded instance. But, if we introduce the poly-kinded +instance, we are barred from writing the Nat-specific instance, due to +overlap. + +Even better than the current instance for * would be one that does this sort +of recursion for all datatypes, something like this: + +type family EqStar (a :: *) (b :: *) where + EqStar Bool Bool = True + EqStar (a,b) (c,d) = a == c && b == d + EqStar (Maybe a) (Maybe b) = a == b + ... + EqStar a b = False + +The problem is the (...) is extensible -- we would want to add new cases for +all datatypes in scope. This is not currently possible for closed type +families. +-} + -- all of the following closed type families are local to this module type family EqStar (a :: *) (b :: *) where EqStar a a = True diff --git a/libraries/base/Data/Typeable/Internal.hs b/libraries/base/Data/Typeable/Internal.hs index fa18bf9c60..93b64ef9e9 100644 --- a/libraries/base/Data/Typeable/Internal.hs +++ b/libraries/base/Data/Typeable/Internal.hs @@ -1,4 +1,5 @@ {-# LANGUAGE Unsafe #-} +{-# LANGUAGE BangPatterns #-} ----------------------------------------------------------------------------- -- | @@ -22,6 +23,8 @@ , PolyKinds , ConstraintKinds , DeriveDataTypeable + , DataKinds + , UndecidableInstances , StandaloneDeriving #-} module Data.Typeable.Internal ( @@ -50,6 +53,7 @@ module Data.Typeable.Internal ( import GHC.Base import GHC.Word import GHC.Show +import GHC.Read ( Read ) import Data.Maybe import Data.Proxy import GHC.Num @@ -57,13 +61,21 @@ import GHC.Real -- import GHC.IORef -- import GHC.IOArray -- import GHC.MVar -import GHC.ST ( ST ) +import GHC.ST ( ST, STret ) import GHC.STRef ( STRef ) import GHC.Ptr ( Ptr, FunPtr ) -- import GHC.Stable -import GHC.Arr ( Array, STArray ) +import GHC.Arr ( Array, STArray, Ix ) +import GHC.TypeLits ( Nat, Symbol, KnownNat, KnownSymbol, natVal', symbolVal' ) import Data.Type.Coercion import Data.Type.Equality +import Text.ParserCombinators.ReadP ( ReadP ) +import Text.Read.Lex ( Lexeme, Number ) +import Text.ParserCombinators.ReadPrec ( ReadPrec ) +import GHC.Float ( FFFormat, RealFloat, Floating ) +import Data.Bits ( Bits, FiniteBits ) +import GHC.Enum ( Bounded, Enum ) +import Control.Monad ( MonadPlus ) -- import Data.Int import GHC.Fingerprint.Type @@ -251,8 +263,20 @@ type Typeable7 (a :: * -> * -> * -> * -> * -> * -> * -> *) = Typeable a {-# DEPRECATED Typeable7 "renamed to 'Typeable'" #-} -- deprecated in 7.8 -- | Kind-polymorphic Typeable instance for type application -instance (Typeable s, Typeable a) => Typeable (s a) where - typeRep# _ = typeRep# (proxy# :: Proxy# s) `mkAppTy` typeRep# (proxy# :: Proxy# a) +instance {-# INCOHERENT #-} (Typeable s, Typeable a) => Typeable (s a) where + typeRep# = \_ -> rep -- Note [Memoising typeOf] + where !ty1 = typeRep# (proxy# :: Proxy# s) + !ty2 = typeRep# (proxy# :: Proxy# a) + !rep = ty1 `mkAppTy` ty2 + +{- Note [Memoising typeOf] +~~~~~~~~~~~~~~~~~~~~~~~~~~ +See #3245, #9203 + +IMPORTANT: we don't want to recalculate the TypeRep once per call with +the proxy argument. This is what went wrong in #3245 and #9203. So we +help GHC by manually keeping the 'rep' *outside* the lambda. +-} ----------------- Showing TypeReps -------------------- @@ -316,6 +340,7 @@ deriving instance Typeable IO deriving instance Typeable Array deriving instance Typeable ST +deriving instance Typeable STret deriving instance Typeable STRef deriving instance Typeable STArray @@ -351,8 +376,106 @@ deriving instance Typeable Word64 deriving instance Typeable TyCon deriving instance Typeable TypeRep +deriving instance Typeable Fingerprint deriving instance Typeable RealWorld deriving instance Typeable Proxy +deriving instance Typeable KProxy deriving instance Typeable (:~:) deriving instance Typeable Coercion + +deriving instance Typeable ReadP +deriving instance Typeable Lexeme +deriving instance Typeable Number +deriving instance Typeable ReadPrec + +deriving instance Typeable FFFormat + +------------------------------------------------------- +-- +-- Generate Typeable instances for standard classes +-- +------------------------------------------------------- + +deriving instance Typeable (~) +deriving instance Typeable Coercible +deriving instance Typeable TestEquality +deriving instance Typeable TestCoercion + +deriving instance Typeable Eq +deriving instance Typeable Ord + +deriving instance Typeable Bits +deriving instance Typeable FiniteBits +deriving instance Typeable Num +deriving instance Typeable Real +deriving instance Typeable Integral +deriving instance Typeable Fractional +deriving instance Typeable RealFrac +deriving instance Typeable Floating +deriving instance Typeable RealFloat + +deriving instance Typeable Bounded +deriving instance Typeable Enum +deriving instance Typeable Ix + +deriving instance Typeable Show +deriving instance Typeable Read + +deriving instance Typeable Functor +deriving instance Typeable Monad +deriving instance Typeable MonadPlus + +deriving instance Typeable Typeable + + + +-------------------------------------------------------------------------------- +-- Instances for type literals + +{- Note [Potential Collisions in `Nat` and `Symbol` instances] + +Kinds resulting from lifted types have finitely many type-constructors. +This is not the case for `Nat` and `Symbol`, which both contain *infinitely* +many type constructors (e.g., `Nat` has 0, 1, 2, 3, etc.). One might think +that this would increase the chance of hash-collisions in the type but this +is not the case because the fingerprint stored in a `TypeRep` identifies +the whole *type* and not just the type constructor. This is why the chance +of collisions for `Nat` and `Symbol` is not any worse than it is for other +lifted types with infinitely many inhabitants. Indeed, `Nat` is +isomorphic to (lifted) `[()]` and `Symbol` is isomorphic to `[Char]`. +-} + +instance KnownNat n => Typeable (n :: Nat) where + -- See #9203 for an explanation of why this is written as `\_ -> rep`. + typeRep# = \_ -> rep + where + rep = mkTyConApp tc [] + tc = TyCon + { tyConHash = fingerprintString (mk pack modu nm) + , tyConPackage = pack + , tyConModule = modu + , tyConName = nm + } + pack = "base" + modu = "GHC.TypeLits" + nm = show (natVal' (proxy# :: Proxy# n)) + mk a b c = a ++ " " ++ b ++ " " ++ c + + +instance KnownSymbol s => Typeable (s :: Symbol) where + -- See #9203 for an explanation of why this is written as `\_ -> rep`. + typeRep# = \_ -> rep + where + rep = mkTyConApp tc [] + tc = TyCon + { tyConHash = fingerprintString (mk pack modu nm) + , tyConPackage = pack + , tyConModule = modu + , tyConName = nm + } + pack = "base" + modu = "GHC.TypeLits" + nm = show (symbolVal' (proxy# :: Proxy# s)) + mk a b c = a ++ " " ++ b ++ " " ++ c + diff --git a/libraries/base/Data/Unique.hs b/libraries/base/Data/Unique.hs index 2d30cc18e9..a5c0d6c753 100644 --- a/libraries/base/Data/Unique.hs +++ b/libraries/base/Data/Unique.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Trustworthy #-} -{-# LANGUAGE MagicHash, DeriveDataTypeable #-} +{-# LANGUAGE MagicHash, AutoDeriveTypeable #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/Data/Version.hs b/libraries/base/Data/Version.hs index 23d217634e..8b88486571 100644 --- a/libraries/base/Data/Version.hs +++ b/libraries/base/Data/Version.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Trustworthy #-} -{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE AutoDeriveTypeable #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/Foreign/C/Types.hs b/libraries/base/Foreign/C/Types.hs index 4aeae53df6..39ba2a868c 100644 --- a/libraries/base/Foreign/C/Types.hs +++ b/libraries/base/Foreign/C/Types.hs @@ -5,7 +5,7 @@ , GeneralizedNewtypeDeriving #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} -- XXX -fno-warn-unused-binds stops us warning about unused constructors, -- but really we should just remove them if we don't want them diff --git a/libraries/base/Foreign/ForeignPtr.hs b/libraries/base/Foreign/ForeignPtr.hs index 0017c0e8ac..2e9b9ec08a 100644 --- a/libraries/base/Foreign/ForeignPtr.hs +++ b/libraries/base/Foreign/ForeignPtr.hs @@ -1,6 +1,5 @@ {-# LANGUAGE Unsafe #-} {-# LANGUAGE NoImplicitPrelude #-} -{-# OPTIONS_HADDOCK hide #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/Foreign/Ptr.hs b/libraries/base/Foreign/Ptr.hs index 0d74a8269c..f35fdeb70f 100644 --- a/libraries/base/Foreign/Ptr.hs +++ b/libraries/base/Foreign/Ptr.hs @@ -4,7 +4,7 @@ , MagicHash , GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/GHC/Base.lhs b/libraries/base/GHC/Base.lhs index 1c8e144b7f..6a089ee432 100644 --- a/libraries/base/GHC/Base.lhs +++ b/libraries/base/GHC/Base.lhs @@ -114,14 +114,8 @@ import GHC.Prim import GHC.Err import {-# SOURCE #-} GHC.IO (failIO) --- This is not strictly speaking required by this module, but is an --- implicit dependency whenever () or tuples are mentioned, so adding it --- as an import here helps to get the dependencies right in the new --- build system. -import GHC.Tuple () --- Likewise we need Integer when deriving things like Eq instances, and --- this is a convenient place to force it to be built -import GHC.Integer () +import GHC.Tuple () -- Note [Depend on GHC.Tuple] +import GHC.Integer () -- Note [Depend on GHC.Integer] infixr 9 . infixr 5 ++ @@ -132,6 +126,36 @@ infixr 0 $ default () -- Double isn't available yet \end{code} +Note [Depend on GHC.Integer] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The Integer type is special because TidyPgm uses +GHC.Integer.Type.mkInteger to construct Integer literal values +Currently it reads the interface file whether or not the current +module *has* any Integer literals, so it's important that +GHC.Integer.Type (in patckage integer-gmp or integer-simple) is +compiled before any other module. (There's a hack in GHC to disable +this for packages ghc-prim, integer-gmp, integer-simple, which aren't +allowed to contain any Integer literals.) + +Likewise we implicitly need Integer when deriving things like Eq +instances. + +The danger is that if the build system doesn't know about the dependency +on Integer, it'll compile some base module before GHC.Integer.Type, +resulting in: + Failed to load interface for ‘GHC.Integer.Type’ + There are files missing in the ‘integer-gmp’ package, + +Bottom line: we make GHC.Base depend on GHC.Integer; and everything +else either depends on GHC.Base, or does not have NoImplicitPrelude +(and hence depends on Prelude). + +Note [Depend on GHC.Tuple] +~~~~~~~~~~~~~~~~~~~~~~~~~~ +Similarly, tuple syntax (or ()) creates an implicit dependency on +GHC.Tuple, so we use the same rule as for Integer --- see Note [Depend on +GHC.Integer] --- to explain this to the build system. We make GHC.Base +depend on GHC.Tuple, and everything else depends on GHC.Base or Prelude. %********************************************************* %* * @@ -406,7 +430,7 @@ mapFB c f = \x ys -> c (f x) ys -- -- http://research.microsoft.com/en-us/um/people/simonpj/papers/ext-f/coercible.pdf -{-# RULES "map/coerce" map coerce = coerce #-} +{-# RULES "map/coerce" [1] map coerce = coerce #-} \end{code} diff --git a/libraries/base/GHC/Conc/Sync.lhs b/libraries/base/GHC/Conc/Sync.lhs index ebb7226d09..713e0b5c3b 100644 --- a/libraries/base/GHC/Conc/Sync.lhs +++ b/libraries/base/GHC/Conc/Sync.lhs @@ -219,10 +219,10 @@ forkIOWithUnmask :: ((forall a . IO a -> IO a) -> IO ()) -> IO ThreadId forkIOWithUnmask io = forkIO (io unsafeUnmask) {- | -Like 'forkIO', but lets you specify on which processor the thread +Like 'forkIO', but lets you specify on which capability the thread should run. Unlike a `forkIO` thread, a thread created by `forkOn` -will stay on the same processor for its entire lifetime (`forkIO` -threads can migrate between processors according to the scheduling +will stay on the same capability for its entire lifetime (`forkIO` +threads can migrate between capabilities according to the scheduling policy). `forkOn` is useful for overriding the scheduling policy when you know in advance how best to distribute the threads. diff --git a/libraries/base/GHC/Conc/Windows.hs b/libraries/base/GHC/Conc/Windows.hs index ad60a07d40..c01281ac68 100644 --- a/libraries/base/GHC/Conc/Windows.hs +++ b/libraries/base/GHC/Conc/Windows.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Trustworthy #-} -{-# LANGUAGE CPP, NoImplicitPrelude, MagicHash, UnboxedTuples, DeriveDataTypeable #-} +{-# LANGUAGE CPP, NoImplicitPrelude, MagicHash, UnboxedTuples, AutoDeriveTypeable #-} {-# OPTIONS_GHC -fno-warn-missing-signatures #-} {-# OPTIONS_HADDOCK not-home #-} diff --git a/libraries/base/GHC/Enum.lhs b/libraries/base/GHC/Enum.lhs index fe2e6b6a71..d94e2ec54b 100644 --- a/libraries/base/GHC/Enum.lhs +++ b/libraries/base/GHC/Enum.lhs @@ -705,7 +705,7 @@ enumDeltaToIntegerFB c n x delta lim "enumDeltaToInteger1" [0] forall c n x . enumDeltaToIntegerFB c n x 1 = up_fb c n x 1 #-} -- This rule ensures that in the common case (delta = 1), we do not do the check here, --- and also that we have the chance to inline up_fb, which would allow the constuctor to be +-- and also that we have the chance to inline up_fb, which would allow the constructor to be -- inlined and good things to happen. -- We do not do it for Int this way because hand-tuned code already exists, and -- the special case varies more from the general case, due to the issue of overflows. diff --git a/libraries/base/GHC/Event.hs b/libraries/base/GHC/Event.hs index b49645e5fe..9746bc7f2e 100644 --- a/libraries/base/GHC/Event.hs +++ b/libraries/base/GHC/Event.hs @@ -11,6 +11,7 @@ module GHC.Event ( -- * Types EventManager + , TimerManager -- * Creation , getSystemEventManager @@ -39,6 +40,6 @@ module GHC.Event import GHC.Event.Manager import GHC.Event.TimerManager (TimeoutCallback, TimeoutKey, registerTimeout, - updateTimeout, unregisterTimeout) + updateTimeout, unregisterTimeout, TimerManager) import GHC.Event.Thread (getSystemEventManager, getSystemTimerManager) diff --git a/libraries/base/GHC/Event/Poll.hsc b/libraries/base/GHC/Event/Poll.hsc index bb0b6e570b..2ed25bec8b 100644 --- a/libraries/base/GHC/Event/Poll.hsc +++ b/libraries/base/GHC/Event/Poll.hsc @@ -14,6 +14,7 @@ module GHC.Event.Poll #if !defined(HAVE_POLL_H) import GHC.Base +import qualified GHC.Event.Internal as E new :: IO E.Backend new = error "Poll back end not implemented for this platform" diff --git a/libraries/base/GHC/Event/Thread.hs b/libraries/base/GHC/Event/Thread.hs index c599047db6..6e991bfb6c 100644 --- a/libraries/base/GHC/Event/Thread.hs +++ b/libraries/base/GHC/Event/Thread.hs @@ -12,9 +12,10 @@ module GHC.Event.Thread , closeFdWith , threadDelay , registerDelay + , blockedOnBadFD -- used by RTS ) where -import Control.Exception (finally) +import Control.Exception (finally, SomeException, toException) import Control.Monad (forM, forM_, sequence_, zipWithM, when) import Data.IORef (IORef, newIORef, readIORef, writeIORef) import Data.List (zipWith3) @@ -115,6 +116,9 @@ threadWait evt fd = mask_ $ do then ioError $ errnoToIOError "threadWait" eBADF Nothing Nothing else return () +-- used at least by RTS in 'select()' IO manager backend +blockedOnBadFD :: SomeException +blockedOnBadFD = toException $ errnoToIOError "awaitEvent" eBADF Nothing Nothing threadWaitSTM :: Event -> Fd -> IO (STM (), IO ()) threadWaitSTM evt fd = mask_ $ do diff --git a/libraries/base/GHC/Event/TimerManager.hs b/libraries/base/GHC/Event/TimerManager.hs index f94f06148a..f581330e25 100644 --- a/libraries/base/GHC/Event/TimerManager.hs +++ b/libraries/base/GHC/Event/TimerManager.hs @@ -98,7 +98,7 @@ become roots for all subsequent minor GCs. When the thunks eventually get evaluated they will each create a new intermediate 'TimeoutQueue' that immediately becomes garbage. Since the thunks serve as roots until the next major GC these intermediate -'TimeoutQueue's will get copied unnecesarily in the next minor GC, +'TimeoutQueue's will get copied unnecessarily in the next minor GC, increasing GC time. This problem is known as "floating garbage". Keeping a list of edits doesn't stop this from happening but makes the diff --git a/libraries/base/GHC/Exts.hs b/libraries/base/GHC/Exts.hs index 05e72811c8..938631001a 100755 --- a/libraries/base/GHC/Exts.hs +++ b/libraries/base/GHC/Exts.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Unsafe #-} -{-# LANGUAGE MagicHash, UnboxedTuples, DeriveDataTypeable, TypeFamilies, MultiParamTypeClasses, FlexibleInstances #-} +{-# LANGUAGE MagicHash, UnboxedTuples, AutoDeriveTypeable, TypeFamilies, MultiParamTypeClasses, FlexibleInstances #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/GHC/ForeignPtr.hs b/libraries/base/GHC/ForeignPtr.hs index 58b4f9a3fa..fe7293e41e 100644 --- a/libraries/base/GHC/ForeignPtr.hs +++ b/libraries/base/GHC/ForeignPtr.hs @@ -5,7 +5,7 @@ , UnboxedTuples #-} {-# OPTIONS_HADDOCK hide #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/GHC/IO/Exception.hs b/libraries/base/GHC/IO/Exception.hs index 7f5bc4ef18..e7e3316ca9 100644 --- a/libraries/base/GHC/IO/Exception.hs +++ b/libraries/base/GHC/IO/Exception.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Trustworthy #-} -{-# LANGUAGE NoImplicitPrelude, DeriveDataTypeable, MagicHash, +{-# LANGUAGE NoImplicitPrelude, AutoDeriveTypeable, MagicHash, ExistentialQuantification #-} {-# OPTIONS_GHC -funbox-strict-fields #-} {-# OPTIONS_HADDOCK hide #-} diff --git a/libraries/base/GHC/IO/FD.hs b/libraries/base/GHC/IO/FD.hs index 2023526e55..7b30504f8e 100644 --- a/libraries/base/GHC/IO/FD.hs +++ b/libraries/base/GHC/IO/FD.hs @@ -2,7 +2,7 @@ {-# LANGUAGE CPP , NoImplicitPrelude , BangPatterns - , DeriveDataTypeable + , AutoDeriveTypeable #-} {-# OPTIONS_GHC -fno-warn-identities #-} -- Whether there are identities depends on the platform diff --git a/libraries/base/GHC/IO/Handle/Types.hs b/libraries/base/GHC/IO/Handle/Types.hs index ec8f453cb6..defa33bbca 100644 --- a/libraries/base/GHC/IO/Handle/Types.hs +++ b/libraries/base/GHC/IO/Handle/Types.hs @@ -2,7 +2,7 @@ {-# LANGUAGE CPP , NoImplicitPrelude , ExistentialQuantification - , DeriveDataTypeable + , AutoDeriveTypeable #-} {-# OPTIONS_GHC -funbox-strict-fields #-} {-# OPTIONS_HADDOCK hide #-} diff --git a/libraries/base/GHC/IOArray.hs b/libraries/base/GHC/IOArray.hs index 8594e2ada5..ff9e545817 100644 --- a/libraries/base/GHC/IOArray.hs +++ b/libraries/base/GHC/IOArray.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Unsafe #-} -{-# LANGUAGE NoImplicitPrelude, DeriveDataTypeable #-} +{-# LANGUAGE NoImplicitPrelude, AutoDeriveTypeable #-} {-# OPTIONS_GHC -funbox-strict-fields #-} {-# OPTIONS_HADDOCK hide #-} diff --git a/libraries/base/GHC/IORef.hs b/libraries/base/GHC/IORef.hs index bb618341b4..154c30cd8d 100644 --- a/libraries/base/GHC/IORef.hs +++ b/libraries/base/GHC/IORef.hs @@ -1,5 +1,5 @@ {-# LANGUAGE Unsafe #-} -{-# LANGUAGE NoImplicitPrelude, MagicHash, DeriveDataTypeable #-} +{-# LANGUAGE NoImplicitPrelude, MagicHash, AutoDeriveTypeable #-} {-# OPTIONS_GHC -funbox-strict-fields #-} {-# OPTIONS_HADDOCK hide #-} diff --git a/libraries/base/GHC/IP.hs b/libraries/base/GHC/IP.hs index 4794c05452..95b00c15ff 100644 --- a/libraries/base/GHC/IP.hs +++ b/libraries/base/GHC/IP.hs @@ -2,7 +2,7 @@ {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE DataKinds #-} -{-# OPTIONS_GHC -XNoImplicitPrelude #-} +{-# LANGUAGE NoImplicitPrelude #-} -- | /Since: 4.6.0.0/ module GHC.IP (IP(..)) where diff --git a/libraries/base/GHC/Int.hs b/libraries/base/GHC/Int.hs index 899d9ad712..467b3f4e30 100644 --- a/libraries/base/GHC/Int.hs +++ b/libraries/base/GHC/Int.hs @@ -1,6 +1,6 @@ {-# LANGUAGE Trustworthy #-} {-# LANGUAGE CPP, NoImplicitPrelude, BangPatterns, MagicHash, UnboxedTuples, - StandaloneDeriving, DeriveDataTypeable, NegativeLiterals #-} + StandaloneDeriving, AutoDeriveTypeable, NegativeLiterals #-} {-# OPTIONS_HADDOCK hide #-} ----------------------------------------------------------------------------- diff --git a/libraries/base/GHC/MVar.hs b/libraries/base/GHC/MVar.hs index 03facad608..ff138a5ef2 100644 --- a/libraries/base/GHC/MVar.hs +++ b/libraries/base/GHC/MVar.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE Unsafe, DeriveDataTypeable #-} +{-# LANGUAGE Unsafe, AutoDeriveTypeable #-} {-# LANGUAGE NoImplicitPrelude, MagicHash, UnboxedTuples #-} {-# OPTIONS_GHC -funbox-strict-fields #-} {-# OPTIONS_HADDOCK hide #-} diff --git a/libraries/base/GHC/Ptr.lhs b/libraries/base/GHC/Ptr.lhs index c959d1e375..a55f01e9b1 100644 --- a/libraries/base/GHC/Ptr.lhs +++ b/libraries/base/GHC/Ptr.lhs @@ -37,7 +37,12 @@ import Numeric ( showHex ) ------------------------------------------------------------------------ -- Data pointers. -type role Ptr representational +-- The role of Ptr's parameter is phantom, as there is no relation between +-- the Haskell representation and whathever the user puts at the end of the +-- pointer. And phantom is useful to implement castPtr (see #9163) + +-- redundant role annotation checks that this doesn't change +type role Ptr phantom data Ptr a = Ptr Addr# deriving (Eq, Ord) -- ^ A value of type @'Ptr' a@ represents a pointer to an object, or an -- array of objects, which may be marshalled to or from Haskell values @@ -56,7 +61,7 @@ nullPtr = Ptr nullAddr# -- |The 'castPtr' function casts a pointer from one type to another. castPtr :: Ptr a -> Ptr b -castPtr (Ptr addr) = Ptr addr +castPtr = coerce -- |Advances the given address by the given offset in bytes. plusPtr :: Ptr a -> Int -> Ptr b @@ -82,7 +87,10 @@ minusPtr (Ptr a1) (Ptr a2) = I# (minusAddr# a1 a2) ------------------------------------------------------------------------ -- Function pointers for the default calling convention. -type role FunPtr representational +-- 'FunPtr' has a phantom role for similar reasons to 'Ptr'. Note +-- that 'FunPtr's role cannot become nominal without changes elsewhere +-- in GHC. See Note [FFI type roles] in TcForeign. +type role FunPtr phantom data FunPtr a = FunPtr Addr# deriving (Eq, Ord) -- ^ A value of type @'FunPtr' a@ is a pointer to a function callable -- from foreign code. The type @a@ will normally be a /foreign type/, @@ -132,7 +140,7 @@ nullFunPtr = FunPtr nullAddr# -- |Casts a 'FunPtr' to a 'FunPtr' of a different type. castFunPtr :: FunPtr a -> FunPtr b -castFunPtr (FunPtr addr) = FunPtr addr +castFunPtr = coerce -- |Casts a 'FunPtr' to a 'Ptr'. -- diff --git a/libraries/base/GHC/TypeLits.hs b/libraries/base/GHC/TypeLits.hs index 083ae4d144..7ae6fb0422 100644 --- a/libraries/base/GHC/TypeLits.hs +++ b/libraries/base/GHC/TypeLits.hs @@ -9,7 +9,9 @@ {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE UndecidableInstances #-} -- for compiling instances of (==) -{-# OPTIONS_GHC -XNoImplicitPrelude #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE MagicHash #-} + {-| This module is an internal GHC module. It declares the constants used in the implementation of type-level natural numbers. The programmer interface for working with type-level naturals should be defined in a separate library. @@ -22,8 +24,8 @@ module GHC.TypeLits Nat, Symbol -- * Linking type and value level - , KnownNat, natVal - , KnownSymbol, symbolVal + , KnownNat, natVal, natVal' + , KnownSymbol, symbolVal, symbolVal' , SomeNat(..), SomeSymbol(..) , someNatVal, someSymbolVal , sameNat, sameSymbol @@ -40,9 +42,9 @@ import GHC.Num(Integer) import GHC.Base(String) import GHC.Show(Show(..)) import GHC.Read(Read(..)) -import GHC.Prim(magicDict) +import GHC.Prim(magicDict, Proxy#) import Data.Maybe(Maybe(..)) -import Data.Proxy(Proxy(..)) +import Data.Proxy (Proxy(..)) import Data.Type.Equality(type (==), (:~:)(Refl)) import Unsafe.Coerce(unsafeCoerce) @@ -79,6 +81,16 @@ symbolVal :: forall n proxy. KnownSymbol n => proxy n -> String symbolVal _ = case symbolSing :: SSymbol n of SSymbol x -> x +-- | /Since: 4.7.1.0/ +natVal' :: forall n. KnownNat n => Proxy# n -> Integer +natVal' _ = case natSing :: SNat n of + SNat x -> x + +-- | /Since: 4.7.1.0/ +symbolVal' :: forall n. KnownSymbol n => Proxy# n -> String +symbolVal' _ = case symbolSing :: SSymbol n of + SSymbol x -> x + -- | This type represents unknown type-level natural numbers. diff --git a/libraries/base/System/Mem/StableName.hs b/libraries/base/System/Mem/StableName.hs index 7d080d9247..4f2cab81f8 100644 --- a/libraries/base/System/Mem/StableName.hs +++ b/libraries/base/System/Mem/StableName.hs @@ -1,6 +1,6 @@ {-# LANGUAGE Trustworthy #-} {-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} {-# LANGUAGE MagicHash #-} #if !defined(__PARALLEL_HASKELL__) {-# LANGUAGE UnboxedTuples #-} diff --git a/libraries/base/System/Posix/Types.hs b/libraries/base/System/Posix/Types.hs index aa159155c0..8b95699b27 100644 --- a/libraries/base/System/Posix/Types.hs +++ b/libraries/base/System/Posix/Types.hs @@ -4,7 +4,7 @@ , MagicHash , GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} ----------------------------------------------------------------------------- -- | diff --git a/libraries/base/System/Timeout.hs b/libraries/base/System/Timeout.hs index 59e6647b0e..322a842392 100644 --- a/libraries/base/System/Timeout.hs +++ b/libraries/base/System/Timeout.hs @@ -1,6 +1,6 @@ {-# LANGUAGE Trustworthy #-} {-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} +{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-} ------------------------------------------------------------------------------- -- | diff --git a/libraries/base/base.cabal b/libraries/base/base.cabal index a70a661920..e56724ce4f 100644 --- a/libraries/base/base.cabal +++ b/libraries/base/base.cabal @@ -48,6 +48,7 @@ Flag integer-simple Library default-language: Haskell2010 other-extensions: + AutoDeriveTypeable BangPatterns CApiFFI CPP diff --git a/libraries/base/cbits/inputReady.c b/libraries/base/cbits/inputReady.c index 51f278feb1..dac9d9b524 100644 --- a/libraries/base/cbits/inputReady.c +++ b/libraries/base/cbits/inputReady.c @@ -25,7 +25,11 @@ fdReady(int fd, int write, int msecs, int isSock) int maxfd, ready; fd_set rfd, wfd; struct timeval tv; - + if ((fd >= (int)FD_SETSIZE) || (fd < 0)) { + /* avoid memory corruption on too large FDs */ + errno = EINVAL; + return -1; + } FD_ZERO(&rfd); FD_ZERO(&wfd); if (write) { diff --git a/libraries/base/changelog.md b/libraries/base/changelog.md index c561165024..46006b134c 100644 --- a/libraries/base/changelog.md +++ b/libraries/base/changelog.md @@ -12,6 +12,21 @@ * Weaken RealFloat constraints on some `Data.Complex` functions + * Add `Control.Monad.(<$!>)` as a strict version of `(<$>)` + +## 4.7.0.1 *Jul 2014* + + * Bundled with GHC 7.8.3 + + * Unhide `Foreign.ForeignPtr` in Haddock (#8475) + + * Fix recomputation of `TypeRep` in `Typeable` type-application instance + (#9203) + + * Fix regression in Data.Fixed Read instance (#9231) + + * Fix `fdReady` to honor `FD_SETSIZE` (#9168) + ## 4.7.0.0 *Apr 2014* * Bundled with GHC 7.8.1 diff --git a/libraries/base/tests/.gitignore b/libraries/base/tests/.gitignore new file mode 100644 index 0000000000..b7b2dc8e76 --- /dev/null +++ b/libraries/base/tests/.gitignore @@ -0,0 +1,271 @@ +*.eventlog +*.genscript + +*.stderr.normalised +*.stdout.normalised +*.comp.stderr +*.comp.stdout +*.interp.stderr +*.interp.stdout +*.run.stderr +*.run.stdout + +.hpc.*/ +.hpc/ + +# specific files +/CPUTime001 +/Concurrent/4876 +/Concurrent/Chan002 +/Concurrent/Chan003 +/Concurrent/ThreadDelay001 +/IO/IOError001 +/IO/IOError002 +/IO/T2122 +/IO/T2122-test +/IO/T3307 +/IO/T4144 +/IO/T4808 +/IO/T4808.test +/IO/T4855 +/IO/T4895 +/IO/T7853 +/IO/chinese-file-* +/IO/chinese-name +/IO/concio002 +/IO/countReaders001 +/IO/countReaders001.txt +/IO/decodingerror001 +/IO/decodingerror002 +/IO/encoding001 +/IO/encoding001.utf16 +/IO/encoding001.utf16.utf16be +/IO/encoding001.utf16.utf16le +/IO/encoding001.utf16.utf32 +/IO/encoding001.utf16.utf32be +/IO/encoding001.utf16.utf32le +/IO/encoding001.utf16.utf8 +/IO/encoding001.utf16.utf8_bom +/IO/encoding001.utf16be +/IO/encoding001.utf16be.utf16 +/IO/encoding001.utf16be.utf16le +/IO/encoding001.utf16be.utf32 +/IO/encoding001.utf16be.utf32be +/IO/encoding001.utf16be.utf32le +/IO/encoding001.utf16be.utf8 +/IO/encoding001.utf16be.utf8_bom +/IO/encoding001.utf16le +/IO/encoding001.utf16le.utf16 +/IO/encoding001.utf16le.utf16be +/IO/encoding001.utf16le.utf32 +/IO/encoding001.utf16le.utf32be +/IO/encoding001.utf16le.utf32le +/IO/encoding001.utf16le.utf8 +/IO/encoding001.utf16le.utf8_bom +/IO/encoding001.utf32 +/IO/encoding001.utf32.utf16 +/IO/encoding001.utf32.utf16be +/IO/encoding001.utf32.utf16le +/IO/encoding001.utf32.utf32be +/IO/encoding001.utf32.utf32le +/IO/encoding001.utf32.utf8 +/IO/encoding001.utf32.utf8_bom +/IO/encoding001.utf32be +/IO/encoding001.utf32be.utf16 +/IO/encoding001.utf32be.utf16be +/IO/encoding001.utf32be.utf16le +/IO/encoding001.utf32be.utf32 +/IO/encoding001.utf32be.utf32le +/IO/encoding001.utf32be.utf8 +/IO/encoding001.utf32be.utf8_bom +/IO/encoding001.utf32le +/IO/encoding001.utf32le.utf16 +/IO/encoding001.utf32le.utf16be +/IO/encoding001.utf32le.utf16le +/IO/encoding001.utf32le.utf32 +/IO/encoding001.utf32le.utf32be +/IO/encoding001.utf32le.utf8 +/IO/encoding001.utf32le.utf8_bom +/IO/encoding001.utf8 +/IO/encoding001.utf8.utf16 +/IO/encoding001.utf8.utf16be +/IO/encoding001.utf8.utf16le +/IO/encoding001.utf8.utf32 +/IO/encoding001.utf8.utf32be +/IO/encoding001.utf8.utf32le +/IO/encoding001.utf8.utf8_bom +/IO/encoding001.utf8_bom +/IO/encoding001.utf8_bom.utf16 +/IO/encoding001.utf8_bom.utf16be +/IO/encoding001.utf8_bom.utf16le +/IO/encoding001.utf8_bom.utf32 +/IO/encoding001.utf8_bom.utf32be +/IO/encoding001.utf8_bom.utf32le +/IO/encoding001.utf8_bom.utf8 +/IO/encoding002 +/IO/encoding003 +/IO/encoding004 +/IO/encodingerror001 +/IO/environment001 +/IO/finalization001 +/IO/hClose001 +/IO/hClose001.tmp +/IO/hClose002 +/IO/hClose002.tmp +/IO/hClose003 +/IO/hDuplicateTo001 +/IO/hFileSize001 +/IO/hFileSize002 +/IO/hFileSize002.out +/IO/hFlush001 +/IO/hFlush001.out +/IO/hGetBuf001 +/IO/hGetBuffering001 +/IO/hGetChar001 +/IO/hGetLine001 +/IO/hGetLine002 +/IO/hGetLine003 +/IO/hGetPosn001 +/IO/hGetPosn001.out +/IO/hIsEOF001 +/IO/hIsEOF002 +/IO/hIsEOF002.out +/IO/hReady001 +/IO/hReady002 +/IO/hSeek001 +/IO/hSeek002 +/IO/hSeek003 +/IO/hSeek004 +/IO/hSeek004.out +/IO/hSetBuffering002 +/IO/hSetBuffering003 +/IO/hSetBuffering004 +/IO/hSetEncoding001 +/IO/ioeGetErrorString001 +/IO/ioeGetFileName001 +/IO/ioeGetHandle001 +/IO/isEOF001 +/IO/misc001 +/IO/misc001.out +/IO/newline001 +/IO/newline001.out +/IO/openFile001 +/IO/openFile002 +/IO/openFile003 +/IO/openFile003Dir +/IO/openFile004 +/IO/openFile004.out +/IO/openFile005 +/IO/openFile005.out1 +/IO/openFile005.out2 +/IO/openFile006 +/IO/openFile006.out +/IO/openFile007 +/IO/openFile007.out +/IO/openFile008 +/IO/openTempFile001 +/IO/putStr001 +/IO/readFile001 +/IO/readFile001.out +/IO/readwrite001 +/IO/readwrite001.inout +/IO/readwrite002 +/IO/readwrite002.inout +/IO/readwrite003 +/IO/readwrite003.txt +/IO/tmp +/Numeric/num001 +/Numeric/num002 +/Numeric/num003 +/Numeric/num004 +/Numeric/num005 +/Numeric/num006 +/Numeric/num007 +/Numeric/num008 +/Numeric/num009 +/Numeric/num010 +/System/T5930 +/System/Timeout001 +/System/exitWith001 +/System/getArgs001 +/System/getEnv001 +/System/system001 +/T4006 +/T5943 +/T5962 +/T7034 +/T7457 +/T7653 +/T7773 +/T7787 +/T8766 +/T8766.stats +/Text.Printf/T1548 +/addr001 +/assert +/char001 +/char002 +/cstring001 +/data-fixed-show-read +/dynamic001 +/dynamic002 +/dynamic003 +/dynamic004 +/dynamic005 +/echo001 +/enum01 +/enum02 +/enum03 +/enum04 +/enumDouble +/enumRatio +/exceptionsrun001 +/exceptionsrun002 +/fixed +/genericNegative001 +/hGetBuf002 +/hGetBuf003 +/hPutBuf001 +/hPutBuf002 +/hPutBuf002.out +/hTell001 +/hTell002 +/hash001 +/ioref001 +/ix001 +/length001 +/lex001 +/list001 +/list002 +/list003 +/memo001 +/memo002 +/performGC001 +/qsem001 +/qsemn001 +/quotOverflow +/rand001 +/ratio001 +/readDouble001 +/readFixed001 +/readFloat +/readInteger001 +/readLitChar +/reads001 +/show001 +/showDouble +/stableptr001 +/stableptr003 +/stableptr004 +/stableptr005 +/take001 +/tempfiles +/text001 +/topHandler01 +/topHandler02 +/topHandler03 +/trace001 +/tup001 +/unicode001 +/unicode002 +/weak001 diff --git a/libraries/base/tests/IO/all.T b/libraries/base/tests/IO/all.T index 94d75f0989..e15c84d9f8 100644 --- a/libraries/base/tests/IO/all.T +++ b/libraries/base/tests/IO/all.T @@ -121,8 +121,7 @@ test('concio002', reqlib('process'), compile_and_run, ['']) test('T2122', extra_clean(['T2122-test']), compile_and_run, ['']) test('T3307', - [when(msys(), expect_fail), # Doesn't work on MSYS; see #5599 - extra_clean(['chinese-file-小说', 'chinese-name'])], + [extra_clean(['chinese-file-小说', 'chinese-name'])], run_command, ['$MAKE -s --no-print-directory T3307-test']) test('T4855', normal, compile_and_run, ['']) @@ -148,8 +147,7 @@ test('encoding003', normal, compile_and_run, ['']) test('encoding004', normal, compile_and_run, ['']) test('environment001', - [when(msys(), expect_fail), # Doesn't work on MSYS; see #5599 - extra_clean(['environment001'])], + [extra_clean(['environment001'])], run_command, ['$MAKE -s --no-print-directory environment001-test']) diff --git a/libraries/base/tests/T9111.hs b/libraries/base/tests/T9111.hs new file mode 100644 index 0000000000..b2d1716ccd --- /dev/null +++ b/libraries/base/tests/T9111.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE DataKinds #-} + +module T9111 where + +import Data.Typeable + +a = typeRep (Proxy :: Proxy 'True) +b = typeRep (Proxy :: Proxy Typeable) +c = typeRep (Proxy :: Proxy (~)) +d = typeRep (Proxy :: Proxy 'Left) diff --git a/libraries/base/tests/all.T b/libraries/base/tests/all.T index d4a6c0511b..12a241085a 100644 --- a/libraries/base/tests/all.T +++ b/libraries/base/tests/all.T @@ -114,7 +114,8 @@ test('weak001', normal, compile_and_run, ['']) # cat: write error: Permission denied # Seems to be a known problem, e.g. # http://mingw-users.1079350.n2.nabble.com/Bug-re-Unicode-on-the-console-td3121717.html -test('T4006', when(msys(), expect_fail), compile_and_run, ['']) +# May 2014: seems to work on msys2 +test('T4006', normal, compile_and_run, ['']) test('T5943', normal, compile_and_run, ['']) test('T5962', normal, compile_and_run, ['']) @@ -156,10 +157,14 @@ test('topHandler03', test('T8766', - [ stats_num_field('bytes allocated', (16828144, 5)), + [ stats_num_field('bytes allocated', + [ (wordsize(64), 16828144, 5) # with GHC-7.6.3: 83937384 (but faster execution than the next line) # before: 58771216 (without call-arity-analysis) # expected value: 16828144 (2014-01-14) - only_ways(['normal'])], + , (wordsize(32), 8433644, 5) ]) + , only_ways(['normal'])], compile_and_run, ['-O']) + +test('T9111', normal, compile, ['']) diff --git a/libraries/bin-package-db/Distribution/InstalledPackageInfo/Binary.hs b/libraries/bin-package-db/Distribution/InstalledPackageInfo/Binary.hs index ab7fcf5652..6ad169787f 100644 --- a/libraries/bin-package-db/Distribution/InstalledPackageInfo/Binary.hs +++ b/libraries/bin-package-db/Distribution/InstalledPackageInfo/Binary.hs @@ -1,11 +1,10 @@ -{-# LANGUAGE CPP, RecordWildCards, TypeSynonymInstances, StandaloneDeriving, GeneralizedNewtypeDeriving #-} +{-# LANGUAGE RecordWildCards, Trustworthy, TypeSynonymInstances, StandaloneDeriving, + GeneralizedNewtypeDeriving #-} {-# OPTIONS_GHC -fno-warn-name-shadowing #-} -- This module deliberately defines orphan instances for now. Should -- become unnecessary once we move to using the binary package properly: {-# OPTIONS_GHC -fno-warn-orphans #-} -#if __GLASGOW_HASKELL__ >= 701 -{-# LANGUAGE Trustworthy #-} -#endif + ----------------------------------------------------------------------------- -- | -- Module : Distribution.InstalledPackageInfo.Binary diff --git a/libraries/bin-package-db/bin-package-db.cabal b/libraries/bin-package-db/bin-package-db.cabal index 44408a77b6..e8b4fd45ee 100644 --- a/libraries/bin-package-db/bin-package-db.cabal +++ b/libraries/bin-package-db/bin-package-db.cabal @@ -4,22 +4,27 @@ license: BSD3 maintainer: ghc-devs@haskell.org bug-reports: glasgow-haskell-bugs@haskell.org synopsis: A binary format for the package database -cabal-version: >=1.6 -build-type: Simple +cabal-version: >=1.10 +build-type: Simple source-repository head type: git location: http://git.haskell.org/ghc.git subdir: libraries/bin-package-db -Library { +Library + default-language: Haskell2010 + other-extensions: + GeneralizedNewtypeDeriving + RecordWildCards + StandaloneDeriving + Trustworthy + TypeSynonymInstances + exposed-modules: Distribution.InstalledPackageInfo.Binary - build-depends: base >= 4 && < 5 - - build-depends: binary >= 0.5 && < 0.8, - Cabal >= 1.20 && < 1.21 + build-depends: base >= 4 && < 5, + binary >= 0.5 && < 0.8, + Cabal >= 1.20 && < 1.22 - extensions: CPP -} diff --git a/libraries/containers b/libraries/containers -Subproject e787f05e7ef7b07363bd04962af8b1ec6569388 +Subproject e84c5d2145415cb0beacce0909a551ae5e28d39 diff --git a/libraries/deepseq b/libraries/deepseq new file mode 160000 +Subproject 3a9c431e4c89ca506aae8e80867cfcde8c09972 diff --git a/libraries/directory b/libraries/directory new file mode 160000 +Subproject 54c677d227b278de694b10398404981d64ece62 diff --git a/libraries/dph b/libraries/dph new file mode 160000 +Subproject 3ebad521cd1e3b5573d97b483305ca465a9cba6 diff --git a/libraries/filepath b/libraries/filepath new file mode 160000 +Subproject 57d9b11e4a551588ae5df4013e192ff6ec7812f diff --git a/libraries/ghc-prim/GHC/Types.hs b/libraries/ghc-prim/GHC/Types.hs index 44351d8777..f6f4233b5b 100644 --- a/libraries/ghc-prim/GHC/Types.hs +++ b/libraries/ghc-prim/GHC/Types.hs @@ -1,4 +1,5 @@ -{-# LANGUAGE MagicHash, NoImplicitPrelude, TypeFamilies, UnboxedTuples #-} +{-# LANGUAGE MagicHash, NoImplicitPrelude, TypeFamilies, UnboxedTuples, + RoleAnnotations #-} ----------------------------------------------------------------------------- -- | -- Module : GHC.Types @@ -80,7 +81,13 @@ at some point, directly or indirectly, from @Main.main@. or the '>>' and '>>=' operations from the 'Monad' class. -} newtype IO a = IO (State# RealWorld -> (# State# RealWorld, a #)) - +type role IO representational +{- +The above role annotation is redundant but is included because this role +is significant in the normalisation of FFI types. Specifically, if this +role were to become nominal (which would be very strange, indeed!), changes +elsewhere in GHC would be necessary. See [FFI type roles] in TcForeign. +-} {- Note [Kind-changing of (~) and Coercible] diff --git a/libraries/ghc-prim/cbits/atomic.c b/libraries/ghc-prim/cbits/atomic.c new file mode 100644 index 0000000000..e3d6cc1e95 --- /dev/null +++ b/libraries/ghc-prim/cbits/atomic.c @@ -0,0 +1,306 @@ +#include "Rts.h" + +// Fallbacks for atomic primops on byte arrays. The builtins used +// below are supported on both GCC and LLVM. +// +// Ideally these function would take StgWord8, StgWord16, etc but +// older GCC versions incorrectly assume that the register that the +// argument is passed in has been zero extended, which is incorrect +// according to the ABI and is not what GHC does when it generates +// calls to these functions. + +// FetchAddByteArrayOp_Int + +extern StgWord hs_atomic_add8(volatile StgWord8 *x, StgWord val); +StgWord +hs_atomic_add8(volatile StgWord8 *x, StgWord val) +{ + return __sync_fetch_and_add(x, (StgWord8) val); +} + +extern StgWord hs_atomic_add16(volatile StgWord16 *x, StgWord val); +StgWord +hs_atomic_add16(volatile StgWord16 *x, StgWord val) +{ + return __sync_fetch_and_add(x, (StgWord16) val); +} + +extern StgWord hs_atomic_add32(volatile StgWord32 *x, StgWord val); +StgWord +hs_atomic_add32(volatile StgWord32 *x, StgWord val) +{ + return __sync_fetch_and_add(x, (StgWord32) val); +} + +extern StgWord64 hs_atomic_add64(volatile StgWord64 *x, StgWord64 val); +StgWord64 +hs_atomic_add64(volatile StgWord64 *x, StgWord64 val) +{ + return __sync_fetch_and_add(x, val); +} + +// FetchSubByteArrayOp_Int + +extern StgWord hs_atomic_sub8(volatile StgWord8 *x, StgWord val); +StgWord +hs_atomic_sub8(volatile StgWord8 *x, StgWord val) +{ + return __sync_fetch_and_sub(x, (StgWord8) val); +} + +extern StgWord hs_atomic_sub16(volatile StgWord16 *x, StgWord val); +StgWord +hs_atomic_sub16(volatile StgWord16 *x, StgWord val) +{ + return __sync_fetch_and_sub(x, (StgWord16) val); +} + +extern StgWord hs_atomic_sub32(volatile StgWord32 *x, StgWord val); +StgWord +hs_atomic_sub32(volatile StgWord32 *x, StgWord val) +{ + return __sync_fetch_and_sub(x, (StgWord32) val); +} + +extern StgWord64 hs_atomic_sub64(volatile StgWord64 *x, StgWord64 val); +StgWord64 +hs_atomic_sub64(volatile StgWord64 *x, StgWord64 val) +{ + return __sync_fetch_and_sub(x, val); +} + +// FetchAndByteArrayOp_Int + +extern StgWord hs_atomic_and8(volatile StgWord8 *x, StgWord val); +StgWord +hs_atomic_and8(volatile StgWord8 *x, StgWord val) +{ + return __sync_fetch_and_and(x, (StgWord8) val); +} + +extern StgWord hs_atomic_and16(volatile StgWord16 *x, StgWord val); +StgWord +hs_atomic_and16(volatile StgWord16 *x, StgWord val) +{ + return __sync_fetch_and_and(x, (StgWord16) val); +} + +extern StgWord hs_atomic_and32(volatile StgWord32 *x, StgWord val); +StgWord +hs_atomic_and32(volatile StgWord32 *x, StgWord val) +{ + return __sync_fetch_and_and(x, (StgWord32) val); +} + +extern StgWord64 hs_atomic_and64(volatile StgWord64 *x, StgWord64 val); +StgWord64 +hs_atomic_and64(volatile StgWord64 *x, StgWord64 val) +{ + return __sync_fetch_and_and(x, val); +} + +// FetchNandByteArrayOp_Int + +// Workaround for http://llvm.org/bugs/show_bug.cgi?id=8842 +#define CAS_NAND(x, val) \ + { \ + __typeof__ (*(x)) tmp = *(x); \ + while (!__sync_bool_compare_and_swap(x, tmp, ~(tmp & (val)))) { \ + tmp = *(x); \ + } \ + return tmp; \ + } + +extern StgWord hs_atomic_nand8(volatile StgWord8 *x, StgWord val); +StgWord +hs_atomic_nand8(volatile StgWord8 *x, StgWord val) +{ +#ifdef __clang__ + CAS_NAND(x, (StgWord8) val) +#else + return __sync_fetch_and_nand(x, (StgWord8) val); +#endif +} + +extern StgWord hs_atomic_nand16(volatile StgWord16 *x, StgWord val); +StgWord +hs_atomic_nand16(volatile StgWord16 *x, StgWord val) +{ +#ifdef __clang__ + CAS_NAND(x, (StgWord16) val); +#else + return __sync_fetch_and_nand(x, (StgWord16) val); +#endif +} + +extern StgWord hs_atomic_nand32(volatile StgWord32 *x, StgWord val); +StgWord +hs_atomic_nand32(volatile StgWord32 *x, StgWord val) +{ +#ifdef __clang__ + CAS_NAND(x, (StgWord32) val); +#else + return __sync_fetch_and_nand(x, (StgWord32) val); +#endif +} + +extern StgWord64 hs_atomic_nand64(volatile StgWord64 *x, StgWord64 val); +StgWord64 +hs_atomic_nand64(volatile StgWord64 *x, StgWord64 val) +{ +#ifdef __clang__ + CAS_NAND(x, val); +#else + return __sync_fetch_and_nand(x, val); +#endif +} + +// FetchOrByteArrayOp_Int + +extern StgWord hs_atomic_or8(volatile StgWord8 *x, StgWord val); +StgWord +hs_atomic_or8(volatile StgWord8 *x, StgWord val) +{ + return __sync_fetch_and_or(x, (StgWord8) val); +} + +extern StgWord hs_atomic_or16(volatile StgWord16 *x, StgWord val); +StgWord +hs_atomic_or16(volatile StgWord16 *x, StgWord val) +{ + return __sync_fetch_and_or(x, (StgWord16) val); +} + +extern StgWord hs_atomic_or32(volatile StgWord32 *x, StgWord val); +StgWord +hs_atomic_or32(volatile StgWord32 *x, StgWord val) +{ + return __sync_fetch_and_or(x, (StgWord32) val); +} + +extern StgWord64 hs_atomic_or64(volatile StgWord64 *x, StgWord64 val); +StgWord64 +hs_atomic_or64(volatile StgWord64 *x, StgWord64 val) +{ + return __sync_fetch_and_or(x, val); +} + +// FetchXorByteArrayOp_Int + +extern StgWord hs_atomic_xor8(volatile StgWord8 *x, StgWord val); +StgWord +hs_atomic_xor8(volatile StgWord8 *x, StgWord val) +{ + return __sync_fetch_and_xor(x, (StgWord8) val); +} + +extern StgWord hs_atomic_xor16(volatile StgWord16 *x, StgWord val); +StgWord +hs_atomic_xor16(volatile StgWord16 *x, StgWord val) +{ + return __sync_fetch_and_xor(x, (StgWord16) val); +} + +extern StgWord hs_atomic_xor32(volatile StgWord32 *x, StgWord val); +StgWord +hs_atomic_xor32(volatile StgWord32 *x, StgWord val) +{ + return __sync_fetch_and_xor(x, (StgWord32) val); +} + +extern StgWord64 hs_atomic_xor64(volatile StgWord64 *x, StgWord64 val); +StgWord64 +hs_atomic_xor64(volatile StgWord64 *x, StgWord64 val) +{ + return __sync_fetch_and_xor(x, val); +} + +// CasByteArrayOp_Int + +extern StgWord hs_cmpxchg8(volatile StgWord8 *x, StgWord old, StgWord new); +StgWord +hs_cmpxchg8(volatile StgWord8 *x, StgWord old, StgWord new) +{ + return __sync_val_compare_and_swap(x, (StgWord8) old, (StgWord8) new); +} + +extern StgWord hs_cmpxchg16(volatile StgWord16 *x, StgWord old, StgWord new); +StgWord +hs_cmpxchg16(volatile StgWord16 *x, StgWord old, StgWord new) +{ + return __sync_val_compare_and_swap(x, (StgWord16) old, (StgWord16) new); +} + +extern StgWord hs_cmpxchg32(volatile StgWord32 *x, StgWord old, StgWord new); +StgWord +hs_cmpxchg32(volatile StgWord32 *x, StgWord old, StgWord new) +{ + return __sync_val_compare_and_swap(x, (StgWord32) old, (StgWord32) new); +} + +extern StgWord hs_cmpxchg64(volatile StgWord64 *x, StgWord64 old, StgWord64 new); +StgWord +hs_cmpxchg64(volatile StgWord64 *x, StgWord64 old, StgWord64 new) +{ + return __sync_val_compare_and_swap(x, old, new); +} + +// AtomicReadByteArrayOp_Int + +extern StgWord hs_atomicread8(volatile StgWord8 *x); +StgWord +hs_atomicread8(volatile StgWord8 *x) +{ + return *x; +} + +extern StgWord hs_atomicread16(volatile StgWord16 *x); +StgWord +hs_atomicread16(volatile StgWord16 *x) +{ + return *x; +} + +extern StgWord hs_atomicread32(volatile StgWord32 *x); +StgWord +hs_atomicread32(volatile StgWord32 *x) +{ + return *x; +} + +extern StgWord64 hs_atomicread64(volatile StgWord64 *x); +StgWord64 +hs_atomicread64(volatile StgWord64 *x) +{ + return *x; +} + +// AtomicWriteByteArrayOp_Int + +extern void hs_atomicwrite8(volatile StgWord8 *x, StgWord val); +void +hs_atomicwrite8(volatile StgWord8 *x, StgWord val) +{ + *x = (StgWord8) val; +} + +extern void hs_atomicwrite16(volatile StgWord16 *x, StgWord val); +void +hs_atomicwrite16(volatile StgWord16 *x, StgWord val) +{ + *x = (StgWord16) val; +} + +extern void hs_atomicwrite32(volatile StgWord32 *x, StgWord val); +void +hs_atomicwrite32(volatile StgWord32 *x, StgWord val) +{ + *x = (StgWord32) val; +} + +extern void hs_atomicwrite64(volatile StgWord64 *x, StgWord64 val); +void +hs_atomicwrite64(volatile StgWord64 *x, StgWord64 val) +{ + *x = (StgWord64) val; +} diff --git a/libraries/ghc-prim/ghc-prim.cabal b/libraries/ghc-prim/ghc-prim.cabal index c861342b56..bc9f57126a 100644 --- a/libraries/ghc-prim/ghc-prim.cabal +++ b/libraries/ghc-prim/ghc-prim.cabal @@ -52,6 +52,7 @@ Library exposed-modules: GHC.Prim c-sources: + cbits/atomic.c cbits/bswap.c cbits/debug.c cbits/longlong.c diff --git a/libraries/haskeline b/libraries/haskeline -Subproject 9a1d72aa30b093e27abbeed8cd0c863f0f109fe +Subproject 5579fc2a2949a143ec8946b9bc9dd2ba957bf09 diff --git a/libraries/haskell2010 b/libraries/haskell2010 new file mode 160000 +Subproject c0c87ad53e377aa00f4897bc729c261459b6048 diff --git a/libraries/haskell98 b/libraries/haskell98 new file mode 160000 +Subproject cc6bbbf2bf4eaea57062043cbb6e7c5d6c2f42a diff --git a/libraries/hoopl b/libraries/hoopl new file mode 160000 +Subproject a2e34db038b737365c4126f69b1a32eae84dae6 diff --git a/libraries/hpc b/libraries/hpc new file mode 160000 +Subproject 5a1ee4e8a2056beff16f0a3cac2c4da61b96f31 diff --git a/libraries/integer-gmp/.gitignore b/libraries/integer-gmp/.gitignore index 295f5b267a..4e7da368da 100644 --- a/libraries/integer-gmp/.gitignore +++ b/libraries/integer-gmp/.gitignore @@ -11,3 +11,6 @@ /include/HsIntegerGmp.h /integer-gmp.buildinfo /mkGmpDerivedConstants/dist/ + +/gmp/gmp.h +/gmp/gmpbuild diff --git a/libraries/integer-gmp/gmp/ghc.mk b/libraries/integer-gmp/gmp/ghc.mk index ab899176ab..139ae93515 100644 --- a/libraries/integer-gmp/gmp/ghc.mk +++ b/libraries/integer-gmp/gmp/ghc.mk @@ -146,7 +146,7 @@ libraries/integer-gmp/gmp/libgmp.a libraries/integer-gmp/gmp/gmp.h: PATH=`pwd`:$$PATH; \ export PATH; \ cd gmpbuild && \ - CC=$(CCX) NM=$(NM) AR=$(AR_STAGE1) $(SHELL) ./configure \ + CC=$(CCX) NM=$(NM) AR=$(AR_STAGE1) ./configure \ --enable-shared=no \ --host=$(HOSTPLATFORM) --build=$(BUILDPLATFORM) $(MAKE) -C libraries/integer-gmp/gmp/gmpbuild MAKEFLAGS= diff --git a/libraries/integer-simple/GHC/Integer/Type.hs b/libraries/integer-simple/GHC/Integer/Type.hs index 5deecd29bb..cd39b7d6bd 100644 --- a/libraries/integer-simple/GHC/Integer/Type.hs +++ b/libraries/integer-simple/GHC/Integer/Type.hs @@ -1,6 +1,5 @@ -{-# LANGUAGE CPP, MagicHash, ForeignFunctionInterface, - NoImplicitPrelude, BangPatterns, UnboxedTuples, +{-# LANGUAGE CPP, MagicHash, NoImplicitPrelude, BangPatterns, UnboxedTuples, UnliftedFFITypes #-} -- Commentary of Integer library is located on the wiki: diff --git a/libraries/integer-simple/integer-simple.cabal b/libraries/integer-simple/integer-simple.cabal index fa41c240c4..51d3cc7b5b 100644 --- a/libraries/integer-simple/integer-simple.cabal +++ b/libraries/integer-simple/integer-simple.cabal @@ -7,7 +7,7 @@ maintainer: igloo@earth.li synopsis: Simple Integer library description: This package contains an simple Integer library. -cabal-version: >=1.6 +cabal-version: >=1.10 build-type: Simple source-repository head @@ -15,17 +15,17 @@ source-repository head location: http://git.haskell.org/ghc.git subdir: libraries/integer-simple -Library { +Library + default-language: Haskell2010 + build-depends: ghc-prim exposed-modules: GHC.Integer GHC.Integer.Simple.Internals GHC.Integer.Logarithms GHC.Integer.Logarithms.Internals other-modules: GHC.Integer.Type - extensions: CPP, MagicHash, BangPatterns, UnboxedTuples, - ForeignFunctionInterface, UnliftedFFITypes, - NoImplicitPrelude + default-extensions: CPP, MagicHash, BangPatterns, UnboxedTuples, + UnliftedFFITypes, NoImplicitPrelude -- We need to set the package name to integer-simple -- (without a version number) as it's magic. ghc-options: -package-name integer-simple -Wall -} diff --git a/libraries/old-locale b/libraries/old-locale new file mode 160000 +Subproject 7e7f6722895af36ca4e2f60f2fdfdc056b70db0 diff --git a/libraries/old-time b/libraries/old-time new file mode 160000 +Subproject e816d30ae8c64ccde2dde3063a7420abc922a0d diff --git a/libraries/parallel b/libraries/parallel new file mode 160000 +Subproject 8df9de914ea3ab7f47874e53b7e9d7c6af4d7f8 diff --git a/libraries/process b/libraries/process new file mode 160000 +Subproject 35bf51cb72baaaeaad22fb340aa9d8c957d2186 diff --git a/libraries/stm b/libraries/stm new file mode 160000 +Subproject e8a901fddc88c6560af34e18a5201deeb8d5155 diff --git a/libraries/template-haskell/Language/Haskell/TH.hs b/libraries/template-haskell/Language/Haskell/TH.hs index e9765a9747..29e3787bd0 100644 --- a/libraries/template-haskell/Language/Haskell/TH.hs +++ b/libraries/template-haskell/Language/Haskell/TH.hs @@ -5,52 +5,52 @@ For other documentation, refer to: -} module Language.Haskell.TH( - -- * The monad and its operations - Q, - runQ, + -- * The monad and its operations + Q, + runQ, -- ** Administration: errors, locations and IO - reportError, -- :: String -> Q () - reportWarning, -- :: String -> Q () - report, -- :: Bool -> String -> Q () - recover, -- :: Q a -> Q a -> Q a - location, -- :: Q Loc - Loc(..), - runIO, -- :: IO a -> Q a - -- ** Querying the compiler - -- *** Reify - reify, -- :: Name -> Q Info - reifyModule, - thisModule, - Info(..), ModuleInfo(..), - InstanceDec, - ParentName, - Arity, - Unlifted, - -- *** Name lookup - lookupTypeName, -- :: String -> Q (Maybe Name) - lookupValueName, -- :: String -> Q (Maybe Name) - -- *** Instance lookup - reifyInstances, - isInstance, + reportError, -- :: String -> Q () + reportWarning, -- :: String -> Q () + report, -- :: Bool -> String -> Q () + recover, -- :: Q a -> Q a -> Q a + location, -- :: Q Loc + Loc(..), + runIO, -- :: IO a -> Q a + -- ** Querying the compiler + -- *** Reify + reify, -- :: Name -> Q Info + reifyModule, + thisModule, + Info(..), ModuleInfo(..), + InstanceDec, + ParentName, + Arity, + Unlifted, + -- *** Name lookup + lookupTypeName, -- :: String -> Q (Maybe Name) + lookupValueName, -- :: String -> Q (Maybe Name) + -- *** Instance lookup + reifyInstances, + isInstance, -- *** Roles lookup reifyRoles, -- *** Annotation lookup reifyAnnotations, AnnLookup(..), - -- * Typed expressions - TExp, unType, - - -- * Names - Name, NameSpace, -- Abstract - -- ** Constructing names - mkName, -- :: String -> Name - newName, -- :: String -> Q Name - -- ** Deconstructing names - nameBase, -- :: Name -> String - nameModule, -- :: Name -> Maybe String - -- ** Built-in names - tupleTypeName, tupleDataName, -- Int -> Name - unboxedTupleTypeName, unboxedTupleDataName, -- :: Int -> Name + -- * Typed expressions + TExp, unType, + + -- * Names + Name, NameSpace, -- Abstract + -- ** Constructing names + mkName, -- :: String -> Name + newName, -- :: String -> Q Name + -- ** Deconstructing names + nameBase, -- :: Name -> String + nameModule, -- :: Name -> Maybe String + -- ** Built-in names + tupleTypeName, tupleDataName, -- Int -> Name + unboxedTupleTypeName, unboxedTupleDataName, -- :: Int -> Name -- * The algebraic data types -- | The lowercase versions (/syntax operators/) of these constructors are @@ -58,11 +58,11 @@ module Language.Haskell.TH( -- quotations (@[| |]@) and splices (@$( ... )@) -- ** Declarations - Dec(..), Con(..), Clause(..), - Strict(..), Foreign(..), Callconv(..), Safety(..), Pragma(..), - Inline(..), RuleMatch(..), Phases(..), RuleBndr(..), AnnTarget(..), - FunDep(..), FamFlavour(..), TySynEqn(..), - Fixity(..), FixityDirection(..), defaultFixity, maxPrecedence, + Dec(..), Con(..), Clause(..), + Strict(..), Foreign(..), Callconv(..), Safety(..), Pragma(..), + Inline(..), RuleMatch(..), Phases(..), RuleBndr(..), AnnTarget(..), + FunDep(..), FamFlavour(..), TySynEqn(..), + Fixity(..), FixityDirection(..), defaultFixity, maxPrecedence, -- ** Expressions Exp(..), Match(..), Body(..), Guard(..), Stmt(..), Range(..), Lit(..), -- ** Patterns @@ -78,22 +78,22 @@ module Language.Haskell.TH( -- ** Constructors lifted to 'Q' -- *** Literals - intPrimL, wordPrimL, floatPrimL, doublePrimL, integerL, rationalL, - charL, stringL, stringPrimL, + intPrimL, wordPrimL, floatPrimL, doublePrimL, integerL, rationalL, + charL, stringL, stringPrimL, -- *** Patterns - litP, varP, tupP, conP, uInfixP, parensP, infixP, - tildeP, bangP, asP, wildP, recP, - listP, sigP, viewP, - fieldPat, + litP, varP, tupP, conP, uInfixP, parensP, infixP, + tildeP, bangP, asP, wildP, recP, + listP, sigP, viewP, + fieldPat, -- *** Pattern Guards - normalB, guardedB, normalG, normalGE, patG, patGE, match, clause, + normalB, guardedB, normalG, normalGE, patG, patGE, match, clause, -- *** Expressions - dyn, global, varE, conE, litE, appE, uInfixE, parensE, - infixE, infixApp, sectionL, sectionR, - lamE, lam1E, lamCaseE, tupE, condE, multiIfE, letE, caseE, appsE, - listE, sigE, recConE, recUpdE, stringE, fieldExp, + dyn, global, varE, conE, litE, appE, uInfixE, parensE, + infixE, infixApp, sectionL, sectionR, + lamE, lam1E, lamCaseE, tupE, condE, multiIfE, letE, caseE, appsE, + listE, sigE, recConE, recUpdE, stringE, fieldExp, -- **** Ranges fromE, fromThenE, fromToE, fromThenToE, @@ -105,24 +105,24 @@ module Language.Haskell.TH( bindS, letS, noBindS, parS, -- *** Types - forallT, varT, conT, appT, arrowT, equalityT, listT, tupleT, sigT, litT, + forallT, varT, conT, appT, arrowT, equalityT, listT, tupleT, sigT, litT, promotedT, promotedTupleT, promotedNilT, promotedConsT, -- **** Type literals numTyLit, strTyLit, -- **** Strictness - isStrict, notStrict, strictType, varStrictType, + isStrict, notStrict, strictType, varStrictType, -- **** Class Contexts - cxt, normalC, recC, infixC, forallC, + cxt, classP, equalP, normalC, recC, infixC, forallC, -- *** Kinds - varK, conK, tupleK, arrowK, listK, appK, starK, constraintK, + varK, conK, tupleK, arrowK, listK, appK, starK, constraintK, -- *** Roles nominalR, representationalR, phantomR, inferR, -- *** Top Level Declarations -- **** Data - valD, funD, tySynD, dataD, newtypeD, + valD, funD, tySynD, dataD, newtypeD, -- **** Class classD, instanceD, sigD, -- **** Role annotations @@ -138,7 +138,7 @@ module Language.Haskell.TH( ruleVar, typedRuleVar, pragInlD, pragSpecD, pragSpecInlD, pragSpecInstD, pragRuleD, pragAnnD, - -- * Pretty-printer + -- * Pretty-printer Ppr(..), pprint, pprExp, pprLit, pprPat, pprParendType ) where diff --git a/libraries/template-haskell/Language/Haskell/TH/Lib.hs b/libraries/template-haskell/Language/Haskell/TH/Lib.hs index 49baa96cde..3ac16d1dba 100644 --- a/libraries/template-haskell/Language/Haskell/TH/Lib.hs +++ b/libraries/template-haskell/Language/Haskell/TH/Lib.hs @@ -526,6 +526,22 @@ sigT t k equalityT :: TypeQ equalityT = return EqualityT +{-# DEPRECATED classP "As of template-haskell-2.10, constraint predicates (Pred) are just types (Type), in keeping with ConstraintKinds. Please use 'conT' and 'appT'." #-} +classP :: Name -> [Q Type] -> Q Pred +classP cla tys + = do + tysl <- sequence tys + return (foldl AppT (ConT cla) tysl) + +{-# DEPRECATED equalP "As of template-haskell-2.10, constraint predicates (Pred) are just types (Type), in keeping with ConstraintKinds. Please see 'equalityT'." #-} +equalP :: TypeQ -> TypeQ -> PredQ +equalP tleft tright + = do + tleft1 <- tleft + tright1 <- tright + eqT <- equalityT + return (foldl AppT eqT [tleft1, tright1]) + promotedT :: Name -> TypeQ promotedT = return . PromotedT diff --git a/libraries/template-haskell/Language/Haskell/TH/Syntax.hs b/libraries/template-haskell/Language/Haskell/TH/Syntax.hs index 589c66a530..3172cbbced 100644 --- a/libraries/template-haskell/Language/Haskell/TH/Syntax.hs +++ b/libraries/template-haskell/Language/Haskell/TH/Syntax.hs @@ -369,7 +369,7 @@ location = Q qLocation -- a single 'Q' computation, but not about the order in which splices are run. -- -- Note: for various murky reasons, stdout and stderr handles are not --- necesarily flushed when the compiler finishes running, so you should +-- necessarily flushed when the compiler finishes running, so you should -- flush them yourself. runIO :: IO a -> Q a runIO m = Q (qRunIO m) diff --git a/libraries/template-haskell/tests/.gitignore b/libraries/template-haskell/tests/.gitignore new file mode 100644 index 0000000000..f847e98ada --- /dev/null +++ b/libraries/template-haskell/tests/.gitignore @@ -0,0 +1,16 @@ +*.eventlog +*.genscript + +*.stderr.normalised +*.stdout.normalised +*.comp.stderr +*.comp.stdout +*.interp.stderr +*.interp.stdout +*.run.stderr +*.run.stdout + +.hpc.*/ +.hpc/ + +# specific files diff --git a/libraries/transformers b/libraries/transformers -Subproject a59fb93860f84ccd44178dcbbb82cfea7e02cd0 +Subproject 87d9892a604b56d687ce70f1d1abc7848f78c6e diff --git a/libraries/unix b/libraries/unix new file mode 160000 +Subproject 54fbbdecb673705a67d5b9594503cf86d53265c diff --git a/mk/build.mk.sample b/mk/build.mk.sample index 3d47bbe82a..a323884334 100644 --- a/mk/build.mk.sample +++ b/mk/build.mk.sample @@ -66,6 +66,9 @@ V = 1 # working on stage 2 and want to freeze stage 1 and the libraries for # a while. +# Uncomment the following line to disable building DPH +#BUILD_DPH=NO + GhcLibWays = $(if $(filter $(DYNAMIC_GHC_PROGRAMS),YES),v dyn,v) # ----------- A Performance/Distribution build -------------------------------- diff --git a/mk/config.mk.in b/mk/config.mk.in index 7cc7aecf2c..7a73d4632a 100644 --- a/mk/config.mk.in +++ b/mk/config.mk.in @@ -790,24 +790,8 @@ else HSCOLOUR_SRCS = YES endif -################################################################################ -# -# 31-bit-Int Core files -# -################################################################################ - -# -# It is possible to configure the compiler and prelude to support 31-bit -# integers, suitable for a back-end and RTS using a tag bit on a 32-bit -# architecture. Currently the only useful output from this option is external Core -# files. The following additions to your build.mk will produce the -# 31-bit core output. Note that this is *not* just a library "way"; the -# compiler must be built a special way too. - -# GhcCppOpts +=-DWORD_SIZE_IN_BITS=31 -# GhcLibHcOpts +=-fext-core -fno-code -DWORD_SIZE_IN_BITS=31 -# GhcLibCppOpts += -DWORD_SIZE_IN_BITS=31 -# SplitObjs=NO +# Build DPH? +BUILD_DPH = YES ################################################################################ # diff --git a/mk/fptools.css b/mk/fptools.css index 97f276c8d5..7a2b39b10c 100644 --- a/mk/fptools.css +++ b/mk/fptools.css @@ -23,6 +23,8 @@ pre { pre.screen { color: #006400 } pre.programlisting { color: maroon } +code.option { white-space: nowrap } + div.example { margin: 1ex 0em; border: solid #412e25 1px; diff --git a/nofib b/nofib new file mode 160000 +Subproject d98f7038d1111e515db9cc27d5d3bbe237e6e14 @@ -26,7 +26,10 @@ # * 'remotepath' is where the repository is in the central repository. # It is - for submodules. # * 'upstreamurl' is the upstream Git repo location for packages -# maintained outside of GHC HQ. +# maintained outside of GHC HQ. Repositories which are hosted on +# GitHub and GHC developers are granted push-rights for are denoted by +# being specified with the `ssh://` scheme. Thus, `https://` +# repo urls denote read-only access. # # * The 'tag' determines when "sync-all get" will get the # repo. If the tag is "-" then it will always get it, but if there @@ -45,37 +48,37 @@ # localpath tag remotepath upstreamurl # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ghc-tarballs windows ghc-tarballs.git - -libffi-tarballs - libffi-tarballs.git - -utils/hsc2hs - hsc2hs.git - -utils/haddock - - - -libraries/array - packages/array.git - +libffi-tarballs - - - +utils/hsc2hs - - - +utils/haddock - - ssh://git@github.com/haskell/haddock.git +libraries/array - - - libraries/binary - - https://github.com/kolmodin/binary.git libraries/bytestring - - https://github.com/haskell/bytestring.git libraries/Cabal - - https://github.com/haskell/cabal.git libraries/containers - - https://github.com/haskell/containers.git -libraries/deepseq - packages/deepseq.git - -libraries/directory - packages/directory.git - -libraries/filepath - packages/filepath.git - +libraries/deepseq - - - +libraries/directory - - - +libraries/filepath - - - libraries/haskeline - - https://github.com/judah/haskeline.git -libraries/haskell98 - packages/haskell98.git - -libraries/haskell2010 - packages/haskell2010.git - -libraries/hoopl - packages/hoopl.git - -libraries/hpc - packages/hpc.git - -libraries/old-locale - packages/old-locale.git - -libraries/old-time - packages/old-time.git - +libraries/haskell98 - - - +libraries/haskell2010 - - - +libraries/hoopl - - - +libraries/hpc - - - +libraries/old-locale - - - +libraries/old-time - - - libraries/pretty - - https://github.com/haskell/pretty.git -libraries/process - packages/process.git - +libraries/process - - - libraries/terminfo - - https://github.com/judah/terminfo.git libraries/time - - http://git.haskell.org/darcs-mirrors/time.git libraries/transformers - - http://git.haskell.org/darcs-mirrors/transformers.git -libraries/unix - packages/unix.git - +libraries/unix - - ssh://git@github.com/haskell/unix.git libraries/Win32 - - https://github.com/haskell/win32.git libraries/xhtml - - https://github.com/haskell/xhtml.git -nofib nofib nofib.git - -libraries/parallel extra packages/parallel.git - -libraries/stm extra packages/stm.git - +nofib nofib - - +libraries/parallel extra - - +libraries/stm extra - - libraries/random dph - https://github.com/haskell/random.git libraries/primitive dph - https://github.com/haskell/primitive.git libraries/vector dph - https://github.com/haskell/vector.git -libraries/dph dph packages/dph.git - +libraries/dph dph - - . - ghc.git - diff --git a/rts/Capability.c b/rts/Capability.c index 16b71b7045..805a35be9f 100644 --- a/rts/Capability.c +++ b/rts/Capability.c @@ -273,6 +273,8 @@ initCapability( Capability *cap, nat i ) cap->mut_lists[g] = NULL; } + cap->weak_ptr_list_hd = NULL; + cap->weak_ptr_list_tl = NULL; cap->free_tvar_watch_queues = END_STM_WATCH_QUEUE; cap->free_invariant_check_queues = END_INVARIANT_CHECK_QUEUE; cap->free_trec_chunks = END_STM_CHUNK_LIST; diff --git a/rts/Capability.h b/rts/Capability.h index f342d92244..d36d50293a 100644 --- a/rts/Capability.h +++ b/rts/Capability.h @@ -79,6 +79,11 @@ struct Capability_ { // full pinned object blocks allocated since the last GC bdescr *pinned_object_blocks; + // per-capability weak pointer list associated with nursery (older + // lists stored in generation object) + StgWeak *weak_ptr_list_hd; + StgWeak *weak_ptr_list_tl; + // Context switch flag. When non-zero, this means: stop running // Haskell code, and switch threads. int context_switch; diff --git a/rts/HeapStackCheck.cmm b/rts/HeapStackCheck.cmm index d826529aef..12bcfb26df 100644 --- a/rts/HeapStackCheck.cmm +++ b/rts/HeapStackCheck.cmm @@ -196,7 +196,8 @@ stg_gc_prim_n (W_ arg, W_ fun) jump fun(arg); } -stg_gc_prim_p_ll_ret +INFO_TABLE_RET(stg_gc_prim_p_ll, RET_SMALL, W_ info, P_ arg, W_ fun) + /* explicit stack */ { W_ fun; P_ arg; @@ -216,7 +217,7 @@ stg_gc_prim_p_ll Sp_adj(-3); Sp(2) = fun; Sp(1) = arg; - Sp(0) = stg_gc_prim_p_ll_ret; + Sp(0) = stg_gc_prim_p_ll_info; jump stg_gc_noregs []; } diff --git a/rts/Linker.c b/rts/Linker.c index 1b0d48facf..ad96d74b6f 100644 --- a/rts/Linker.c +++ b/rts/Linker.c @@ -1186,7 +1186,6 @@ typedef struct _RtsSymbolVal { SymI_HasProto(stg_newBCOzh) \ SymI_HasProto(stg_newByteArrayzh) \ SymI_HasProto(stg_casIntArrayzh) \ - SymI_HasProto(stg_fetchAddIntArrayzh) \ SymI_HasProto(stg_newMVarzh) \ SymI_HasProto(stg_newMutVarzh) \ SymI_HasProto(stg_newTVarzh) \ @@ -1900,6 +1899,7 @@ addDLL( pathchar *dll_name ) // success -- try to dlopen the first named file IF_DEBUG(linker, debugBelch("match%s\n","")); line[match[2].rm_eo] = '\0'; + stgFree((void*)errmsg); // Free old message before creating new one errmsg = internal_dlopen(line+match[2].rm_so); break; } @@ -2718,6 +2718,7 @@ loadArchive( pathchar *path ) if (0 == loadOc(oc)) { stgFree(fileName); + fclose(f); return 0; } } @@ -4143,6 +4144,7 @@ ocGetNames_PEi386 ( ObjectCode* oc ) if (0==strcmp(".text",(char*)secname) || 0==strcmp(".text.startup",(char*)secname) || + 0==strcmp(".text.unlikely", (char*)secname) || 0==strcmp(".rdata",(char*)secname)|| 0==strcmp(".eh_frame", (char*)secname)|| 0==strcmp(".rodata",(char*)secname)) diff --git a/rts/Prelude.h b/rts/Prelude.h index 89e80a0a3d..0c54148ba2 100644 --- a/rts/Prelude.h +++ b/rts/Prelude.h @@ -42,6 +42,7 @@ PRELUDE_CLOSURE(base_GHCziIOziException_blockedIndefinitelyOnMVar_closure); PRELUDE_CLOSURE(base_GHCziIOziException_blockedIndefinitelyOnSTM_closure); PRELUDE_CLOSURE(base_ControlziExceptionziBase_nonTermination_closure); PRELUDE_CLOSURE(base_ControlziExceptionziBase_nestedAtomically_closure); +PRELUDE_CLOSURE(base_GHCziEventziThread_blockedOnBadFD_closure); PRELUDE_CLOSURE(base_GHCziConcziSync_runSparks_closure); PRELUDE_CLOSURE(base_GHCziConcziIO_ensureIOManagerIsRunning_closure); @@ -104,6 +105,7 @@ PRELUDE_INFO(base_GHCziStable_StablePtr_con_info); #define blockedIndefinitelyOnSTM_closure DLL_IMPORT_DATA_REF(base_GHCziIOziException_blockedIndefinitelyOnSTM_closure) #define nonTermination_closure DLL_IMPORT_DATA_REF(base_ControlziExceptionziBase_nonTermination_closure) #define nestedAtomically_closure DLL_IMPORT_DATA_REF(base_ControlziExceptionziBase_nestedAtomically_closure) +#define blockedOnBadFD_closure DLL_IMPORT_DATA_REF(base_GHCziEventziThread_blockedOnBadFD_closure) #define Czh_static_info DLL_IMPORT_DATA_REF(ghczmprim_GHCziTypes_Czh_static_info) #define Fzh_static_info DLL_IMPORT_DATA_REF(ghczmprim_GHCziTypes_Fzh_static_info) diff --git a/rts/PrimOps.cmm b/rts/PrimOps.cmm index 1dc232d9a7..5f04a6d732 100644 --- a/rts/PrimOps.cmm +++ b/rts/PrimOps.cmm @@ -151,18 +151,6 @@ stg_casIntArrayzh( gcptr arr, W_ ind, W_ old, W_ new ) } -stg_fetchAddIntArrayzh( gcptr arr, W_ ind, W_ incr ) -/* MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s, Int# #) */ -{ - W_ p, h; - - p = arr + SIZEOF_StgArrWords + WDS(ind); - (h) = ccall atomic_inc(p, incr); - - return(h); -} - - stg_newArrayzh ( W_ n /* words */, gcptr init ) { W_ words, size, p; @@ -577,10 +565,11 @@ stg_mkWeakzh ( gcptr key, StgWeak_finalizer(w) = finalizer; StgWeak_cfinalizers(w) = stg_NO_FINALIZER_closure; - ACQUIRE_LOCK(sm_mutex); - StgWeak_link(w) = generation_weak_ptr_list(W_[g0]); - generation_weak_ptr_list(W_[g0]) = w; - RELEASE_LOCK(sm_mutex); + StgWeak_link(w) = Capability_weak_ptr_list_hd(MyCapability()); + Capability_weak_ptr_list_hd(MyCapability()) = w; + if (Capability_weak_ptr_list_tl(MyCapability()) == NULL) { + Capability_weak_ptr_list_tl(MyCapability()) = w; + } IF_DEBUG(weak, ccall debugBelch(stg_weak_msg,w)); @@ -1785,6 +1774,7 @@ stg_tryReadMVarzh ( P_ mvar, /* :: MVar a */ ) LOCK_CLOSURE(mvar, info); if (StgMVar_value(mvar) == stg_END_TSO_QUEUE_closure) { + unlockClosure(mvar, info); return (0, stg_NO_FINALIZER_closure); } diff --git a/rts/Profiling.c b/rts/Profiling.c index 50c9c391e7..53f64a7280 100644 --- a/rts/Profiling.c +++ b/rts/Profiling.c @@ -619,10 +619,8 @@ actualPush_ (CostCentreStack *ccs, CostCentre *cc, CostCentreStack *new_ccs) ccsSetSelected(new_ccs); /* update the memoization table for the parent stack */ - if (ccs != EMPTY_STACK) { - ccs->indexTable = addToIndexTable(ccs->indexTable, new_ccs, cc, - 0/*not a back edge*/); - } + ccs->indexTable = addToIndexTable(ccs->indexTable, new_ccs, cc, + 0/*not a back edge*/); /* return a pointer to the new stack */ return new_ccs; diff --git a/rts/RetainerProfile.c b/rts/RetainerProfile.c index bdfc831b94..bfc96247aa 100644 --- a/rts/RetainerProfile.c +++ b/rts/RetainerProfile.c @@ -1781,6 +1781,12 @@ computeRetainerSet( void ) // // The following code assumes that WEAK objects are considered to be roots // for retainer profilng. + for (n = 0; n < n_capabilities; n++) { + // NB: after a GC, all nursery weak_ptr_lists have been migrated + // to the global lists living in the generations + ASSERT(capabilities[n]->weak_ptr_list_hd == NULL); + ASSERT(capabilities[n]->weak_ptr_list_tl == NULL); + } for (g = 0; g < RtsFlags.GcFlags.generations; g++) { for (weak = generations[g].weak_ptr_list; weak != NULL; weak = weak->link) { // retainRoot((StgClosure *)weak); diff --git a/rts/RtsFlags.c b/rts/RtsFlags.c index af1b2049f6..44c05cec3b 100644 --- a/rts/RtsFlags.c +++ b/rts/RtsFlags.c @@ -241,7 +241,8 @@ usage_text[] = { " -? Prints this message and exits; the program is not executed", " --info Print information about the RTS used by this program", "", -" -K<size> Sets the maximum stack size (default 8M) Egs: -K32k -K512k", +" -K<size> Sets the maximum stack size (default: 80% of the heap)", +" Egs: -K32k -K512k -K8M", " -ki<size> Sets the initial thread stack size (default 1k) Egs: -ki4k -ki2m", " -kc<size> Sets the stack chunk size (default 32k)", " -kb<size> Sets the stack chunk buffer size (default 1k)", diff --git a/rts/RtsStartup.c b/rts/RtsStartup.c index aa7306f88a..8e7e11dd26 100644 --- a/rts/RtsStartup.c +++ b/rts/RtsStartup.c @@ -214,6 +214,7 @@ hs_init_ghc(int *argc, char **argv[], RtsConfig rts_config) getStablePtr((StgPtr)ensureIOManagerIsRunning_closure); getStablePtr((StgPtr)ioManagerCapabilitiesChanged_closure); #ifndef mingw32_HOST_OS + getStablePtr((StgPtr)blockedOnBadFD_closure); getStablePtr((StgPtr)runHandlers_closure); #endif @@ -304,7 +305,7 @@ hs_add_root(void (*init_root)(void) STG_UNUSED) static void hs_exit_(rtsBool wait_foreign) { - nat g; + nat g, i; if (hs_init_count <= 0) { errorBelch("warning: too many hs_exit()s"); @@ -336,6 +337,9 @@ hs_exit_(rtsBool wait_foreign) exitScheduler(wait_foreign); /* run C finalizers for all active weak pointers */ + for (i = 0; i < n_capabilities; i++) { + runAllCFinalizers(capabilities[i]->weak_ptr_list_hd); + } for (g = 0; g < RtsFlags.GcFlags.generations; g++) { runAllCFinalizers(generations[g].weak_ptr_list); } @@ -355,8 +359,12 @@ hs_exit_(rtsBool wait_foreign) resetTerminalSettings(); #endif - // uninstall signal handlers - resetDefaultHandlers(); +#if defined(RTS_USER_SIGNALS) + if (RtsFlags.MiscFlags.install_signal_handlers) { + // uninstall signal handlers + resetDefaultHandlers(); + } +#endif /* stop timing the shutdown, we're about to print stats */ stat_endExit(); diff --git a/rts/Schedule.c b/rts/Schedule.c index adf2b5cb39..7f8ced6f3e 100644 --- a/rts/Schedule.c +++ b/rts/Schedule.c @@ -1802,6 +1802,10 @@ forkProcess(HsStablePtr *entry ACQUIRE_LOCK(&capabilities[i]->lock); } +#ifdef THREADED_RTS + ACQUIRE_LOCK(&all_tasks_mutex); +#endif + stopTimer(); // See #4074 #if defined(TRACING) @@ -1823,13 +1827,18 @@ forkProcess(HsStablePtr *entry releaseCapability_(capabilities[i],rtsFalse); RELEASE_LOCK(&capabilities[i]->lock); } + +#ifdef THREADED_RTS + RELEASE_LOCK(&all_tasks_mutex); +#endif + boundTaskExiting(task); // just return the pid return pid; } else { // child - + #if defined(THREADED_RTS) initMutex(&sched_mutex); initMutex(&sm_mutex); @@ -1839,6 +1848,8 @@ forkProcess(HsStablePtr *entry for (i=0; i < n_capabilities; i++) { initMutex(&capabilities[i]->lock); } + + initMutex(&all_tasks_mutex); #endif #ifdef TRACING diff --git a/rts/Stable.c b/rts/Stable.c index ec74b0da13..431b7c66c1 100644 --- a/rts/Stable.c +++ b/rts/Stable.c @@ -246,6 +246,7 @@ STATIC_INLINE void freeSnEntry(snEntry *sn) { ASSERT(sn->sn_obj == NULL); + removeHashTable(addrToStableHash, (W_)sn->old, NULL); sn->addr = (P_)stable_name_free; stable_name_free = sn; } diff --git a/rts/Stats.c b/rts/Stats.c index 48c320c8f7..c3d963c845 100644 --- a/rts/Stats.c +++ b/rts/Stats.c @@ -173,8 +173,8 @@ initStats1 (void) nat i; if (RtsFlags.GcFlags.giveStats >= VERBOSE_GC_STATS) { - statsPrintf(" Alloc Copied Live GC GC TOT TOT Page Flts\n"); - statsPrintf(" bytes bytes bytes user elap user elap\n"); + statsPrintf(" Alloc Copied Live GC GC TOT TOT Page Flts\n"); + statsPrintf(" bytes bytes bytes user elap user elap\n"); } GC_coll_cpu = (Time *)stgMallocBytes( @@ -287,53 +287,12 @@ stat_startGC (Capability *cap, gc_thread *gct) traceEventGcStartAtT(cap, TimeToNS(gct->gc_start_elapsed - start_init_elapsed)); - gct->gc_start_thread_cpu = getThreadCPUTime(); - if (RtsFlags.GcFlags.giveStats != NO_GC_STATS) { gct->gc_start_faults = getPageFaults(); } } -void -stat_gcWorkerThreadStart (gc_thread *gct STG_UNUSED) -{ -#if 0 - /* - * We dont' collect per-thread GC stats any more, but this code - * could be used to do that if we want to in the future: - */ - if (RtsFlags.GcFlags.giveStats != NO_GC_STATS) - { - getProcessTimes(&gct->gc_start_cpu, &gct->gc_start_elapsed); - gct->gc_start_thread_cpu = getThreadCPUTime(); - } -#endif -} - -void -stat_gcWorkerThreadDone (gc_thread *gct STG_UNUSED) -{ -#if 0 - /* - * We dont' collect per-thread GC stats any more, but this code - * could be used to do that if we want to in the future: - */ - Time thread_cpu, elapsed, gc_cpu, gc_elapsed; - - if (RtsFlags.GcFlags.giveStats != NO_GC_STATS) - { - elapsed = getProcessElapsedTime(); - thread_cpu = getThreadCPUTime(); - - gc_cpu = thread_cpu - gct->gc_start_thread_cpu; - gc_elapsed = elapsed - gct->gc_start_elapsed; - - taskDoneGC(gct->cap->running_task, gc_cpu, gc_elapsed); - } -#endif -} - /* ----------------------------------------------------------------------------- * Calculate the total allocated memory since the start of the * program. Also emits events reporting the per-cap allocation diff --git a/rts/Stats.h b/rts/Stats.h index 9839e5cf2a..925920f108 100644 --- a/rts/Stats.h +++ b/rts/Stats.h @@ -32,9 +32,6 @@ void stat_endGC (Capability *cap, struct gc_thread_ *_gct, W_ live, W_ copied, W_ slop, nat gen, nat n_gc_threads, W_ par_max_copied, W_ par_tot_copied); -void stat_gcWorkerThreadStart (struct gc_thread_ *_gct); -void stat_gcWorkerThreadDone (struct gc_thread_ *_gct); - #ifdef PROFILING void stat_startRP(void); void stat_endRP(nat, diff --git a/rts/Task.c b/rts/Task.c index 12c22c4b02..842ad84a89 100644 --- a/rts/Task.c +++ b/rts/Task.c @@ -39,7 +39,7 @@ static Task * allocTask (void); static Task * newTask (rtsBool); #if defined(THREADED_RTS) -static Mutex all_tasks_mutex; +Mutex all_tasks_mutex; #endif /* ----------------------------------------------------------------------------- @@ -350,6 +350,20 @@ discardTasksExcept (Task *keep) next = task->all_next; if (task != keep) { debugTrace(DEBUG_sched, "discarding task %" FMT_SizeT "", (size_t)TASK_ID(task)); +#if defined(THREADED_RTS) + // It is possible that some of these tasks are currently blocked + // (in the parent process) either on their condition variable + // `cond` or on their mutex `lock`. If they are we may deadlock + // when `freeTask` attempts to call `closeCondition` or + // `closeMutex` (the behaviour of these functions is documented to + // be undefined in the case that there are threads blocked on + // them). To avoid this, we re-initialize both the condition + // variable and the mutex before calling `freeTask` (we do + // precisely the same for all global locks in `forkProcess`). + initCondition(&task->cond); + initMutex(&task->lock); +#endif + // Note that we do not traceTaskDelete here because // we are not really deleting a task. // The OS threads for all these tasks do not exist in diff --git a/rts/Task.h b/rts/Task.h index cf70256326..8dab0a2fcf 100644 --- a/rts/Task.h +++ b/rts/Task.h @@ -171,6 +171,11 @@ isBoundTask (Task *task) // extern Task *all_tasks; +// The all_tasks list is protected by the all_tasks_mutex +#if defined(THREADED_RTS) +extern Mutex all_tasks_mutex; +#endif + // Start and stop the task manager. // Requires: sched_mutex. // diff --git a/rts/eventlog/EventLog.c b/rts/eventlog/EventLog.c index 2e0e9bbddc..4fd4b44d80 100644 --- a/rts/eventlog/EventLog.c +++ b/rts/eventlog/EventLog.c @@ -106,6 +106,7 @@ char *EventDesc[] = { [EVENT_TASK_CREATE] = "Task create", [EVENT_TASK_MIGRATE] = "Task migrate", [EVENT_TASK_DELETE] = "Task delete", + [EVENT_HACK_BUG_T9003] = "Empty event for bug #9003", }; // Event type. @@ -420,6 +421,10 @@ initEventLogging(void) sizeof(EventCapNo); break; + case EVENT_HACK_BUG_T9003: + eventTypes[t].size = 0; + break; + default: continue; /* ignore deprecated events */ } diff --git a/rts/package.conf.in b/rts/package.conf.in index 4c8686f262..8250bc2bb6 100644 --- a/rts/package.conf.in +++ b/rts/package.conf.in @@ -99,6 +99,7 @@ ld-options: , "-Wl,-u,_base_GHCziIOziException_blockedIndefinitelyOnMVar_closure" , "-Wl,-u,_base_GHCziIOziException_blockedIndefinitelyOnSTM_closure" , "-Wl,-u,_base_ControlziExceptionziBase_nestedAtomically_closure" + , "-Wl,-u,_base_GHCziEventziThread_blockedOnBadFD_closure" , "-Wl,-u,_base_GHCziWeak_runFinalizzerBatch_closure" , "-Wl,-u,_base_GHCziTopHandler_flushStdHandles_closure" , "-Wl,-u,_base_GHCziTopHandler_runIO_closure" @@ -139,6 +140,7 @@ ld-options: , "-Wl,-u,base_GHCziIOziException_blockedIndefinitelyOnMVar_closure" , "-Wl,-u,base_GHCziIOziException_blockedIndefinitelyOnSTM_closure" , "-Wl,-u,base_ControlziExceptionziBase_nestedAtomically_closure" + , "-Wl,-u,base_GHCziEventziThread_blockedOnBadFD_closure" , "-Wl,-u,base_GHCziWeak_runFinalizzerBatch_closure" , "-Wl,-u,base_GHCziTopHandler_flushStdHandles_closure" , "-Wl,-u,base_GHCziTopHandler_runIO_closure" diff --git a/rts/posix/Select.c b/rts/posix/Select.c index 3d92a4666a..a101f03dd5 100644 --- a/rts/posix/Select.c +++ b/rts/posix/Select.c @@ -14,6 +14,8 @@ #include "Signals.h" #include "Schedule.h" +#include "Prelude.h" +#include "RaiseAsync.h" #include "RtsUtils.h" #include "Itimer.h" #include "Capability.h" @@ -120,6 +122,85 @@ fdOutOfRange (int fd) stg_exit(EXIT_FAILURE); } +/* + * State of individual file descriptor after a 'select()' poll. + */ +enum FdState { + RTS_FD_IS_READY = 0, + RTS_FD_IS_BLOCKING, + RTS_FD_IS_INVALID, +}; + +static enum FdState fdPollReadState (int fd) +{ + int r; + fd_set rfd; + struct timeval now; + + FD_ZERO(&rfd); + FD_SET(fd, &rfd); + + /* only poll */ + now.tv_sec = 0; + now.tv_usec = 0; + for (;;) + { + r = select(fd+1, &rfd, NULL, NULL, &now); + /* the descriptor is sane */ + if (r != -1) + break; + + switch (errno) + { + case EBADF: return RTS_FD_IS_INVALID; + case EINTR: continue; + default: + sysErrorBelch("select"); + stg_exit(EXIT_FAILURE); + } + } + + if (r == 0) + return RTS_FD_IS_BLOCKING; + else + return RTS_FD_IS_READY; +} + +static enum FdState fdPollWriteState (int fd) +{ + int r; + fd_set wfd; + struct timeval now; + + FD_ZERO(&wfd); + FD_SET(fd, &wfd); + + /* only poll */ + now.tv_sec = 0; + now.tv_usec = 0; + for (;;) + { + r = select(fd+1, NULL, &wfd, NULL, &now); + /* the descriptor is sane */ + if (r != -1) + break; + + switch (errno) + { + case EBADF: return RTS_FD_IS_INVALID; + case EINTR: continue; + default: + sysErrorBelch("select"); + stg_exit(EXIT_FAILURE); + } + } + + if (r == 0) + return RTS_FD_IS_BLOCKING; + else + return RTS_FD_IS_READY; +} + /* Argument 'wait' says whether to wait for I/O to become available, * or whether to just check and return immediately. If there are * other threads ready to run, we normally do the non-waiting variety, @@ -137,12 +218,10 @@ void awaitEvent(rtsBool wait) { StgTSO *tso, *prev, *next; - rtsBool ready; fd_set rfd,wfd; int numFound; int maxfd = -1; - rtsBool select_succeeded = rtsTrue; - rtsBool unblock_all = rtsFalse; + rtsBool seen_bad_fd = rtsFalse; struct timeval tv, *ptv; LowResTime now; @@ -225,25 +304,8 @@ awaitEvent(rtsBool wait) while ((numFound = select(maxfd+1, &rfd, &wfd, NULL, ptv)) < 0) { if (errno != EINTR) { - /* Handle bad file descriptors by unblocking all the - waiting threads. Why? Because a thread might have been - a bit naughty and closed a file descriptor while another - was blocked waiting. This is less-than-good programming - practice, but having the RTS as a result fall over isn't - acceptable, so we simply unblock all the waiting threads - should we see a bad file descriptor & give the threads - a chance to clean up their act. - - Note: assume here that threads becoming unblocked - will try to read/write the file descriptor before trying - to issue a threadWaitRead/threadWaitWrite again (==> an - IOError will result for the thread that's got the bad - file descriptor.) Hence, there's no danger of a bad - file descriptor being repeatedly select()'ed on, so - the RTS won't loop. - */ if ( errno == EBADF ) { - unblock_all = rtsTrue; + seen_bad_fd = rtsTrue; break; } else { sysErrorBelch("select"); @@ -286,33 +348,58 @@ awaitEvent(rtsBool wait) */ prev = NULL; - if (select_succeeded || unblock_all) { - for(tso = blocked_queue_hd; tso != END_TSO_QUEUE; tso = next) { - next = tso->_link; + { + for(tso = blocked_queue_hd; tso != END_TSO_QUEUE; tso = next) { + next = tso->_link; + int fd; + enum FdState fd_state = RTS_FD_IS_BLOCKING; switch (tso->why_blocked) { - case BlockedOnRead: - ready = unblock_all || FD_ISSET(tso->block_info.fd, &rfd); - break; - case BlockedOnWrite: - ready = unblock_all || FD_ISSET(tso->block_info.fd, &wfd); - break; - default: - barf("awaitEvent"); - } - - if (ready) { - IF_DEBUG(scheduler,debugBelch("Waking up blocked thread %lu\n", (unsigned long)tso->id)); - tso->why_blocked = NotBlocked; - tso->_link = END_TSO_QUEUE; - pushOnRunQueue(&MainCapability,tso); - } else { - if (prev == NULL) - blocked_queue_hd = tso; - else - setTSOLink(&MainCapability, prev, tso); - prev = tso; - } + case BlockedOnRead: + fd = tso->block_info.fd; + + if (seen_bad_fd) { + fd_state = fdPollReadState (fd); + } else if (FD_ISSET(fd, &rfd)) { + fd_state = RTS_FD_IS_READY; + } + break; + case BlockedOnWrite: + fd = tso->block_info.fd; + + if (seen_bad_fd) { + fd_state = fdPollWriteState (fd); + } else if (FD_ISSET(fd, &wfd)) { + fd_state = RTS_FD_IS_READY; + } + break; + default: + barf("awaitEvent"); + } + + switch (fd_state) { + case RTS_FD_IS_INVALID: + /* + * Don't let RTS loop on such descriptors, + * pass an IOError to blocked threads (Trac #4934) + */ + IF_DEBUG(scheduler,debugBelch("Killing blocked thread %lu on bad fd=%i\n", (unsigned long)tso->id, fd)); + throwToSingleThreaded(&MainCapability, tso, (StgClosure *)blockedOnBadFD_closure); + break; + case RTS_FD_IS_READY: + IF_DEBUG(scheduler,debugBelch("Waking up blocked thread %lu\n", (unsigned long)tso->id)); + tso->why_blocked = NotBlocked; + tso->_link = END_TSO_QUEUE; + pushOnRunQueue(&MainCapability,tso); + break; + case RTS_FD_IS_BLOCKING: + if (prev == NULL) + blocked_queue_hd = tso; + else + setTSOLink(&MainCapability, prev, tso); + prev = tso; + break; + } } if (prev == NULL) diff --git a/rts/sm/Compact.c b/rts/sm/Compact.c index 8ae72a96e0..b07a886eab 100644 --- a/rts/sm/Compact.c +++ b/rts/sm/Compact.c @@ -183,7 +183,7 @@ loop: // A word-aligned memmove will be faster for small objects than libc's or gcc's. // Remember, the two regions *might* overlap, but: to <= from. STATIC_INLINE void -move(StgPtr to, StgPtr from, W_ size) +move(StgPtr to, StgPtr from, StgWord size) { for(; size > 0; --size) { *to++ = *from++; @@ -225,7 +225,7 @@ thread_static( StgClosure* p ) } STATIC_INLINE void -thread_large_bitmap( StgPtr p, StgLargeBitmap *large_bitmap, W_ size ) +thread_large_bitmap( StgPtr p, StgLargeBitmap *large_bitmap, StgWord size ) { W_ i, b; StgWord bitmap; @@ -248,11 +248,25 @@ thread_large_bitmap( StgPtr p, StgLargeBitmap *large_bitmap, W_ size ) } STATIC_INLINE StgPtr +thread_small_bitmap (StgPtr p, StgWord size, StgWord bitmap) +{ + while (size > 0) { + if ((bitmap & 1) == 0) { + thread((StgClosure **)p); + } + p++; + bitmap = bitmap >> 1; + size--; + } + return p; +} + +STATIC_INLINE StgPtr thread_arg_block (StgFunInfoTable *fun_info, StgClosure **args) { StgPtr p; StgWord bitmap; - W_ size; + StgWord size; p = (StgPtr)args; switch (fun_info->f.fun_type) { @@ -269,14 +283,7 @@ thread_arg_block (StgFunInfoTable *fun_info, StgClosure **args) bitmap = BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]); size = BITMAP_SIZE(stg_arg_bitmaps[fun_info->f.fun_type]); small_bitmap: - while (size > 0) { - if ((bitmap & 1) == 0) { - thread((StgClosure **)p); - } - p++; - bitmap = bitmap >> 1; - size--; - } + p = thread_small_bitmap(p, size, bitmap); break; } return p; @@ -287,7 +294,7 @@ thread_stack(StgPtr p, StgPtr stack_end) { const StgRetInfoTable* info; StgWord bitmap; - W_ size; + StgWord size; // highly similar to scavenge_stack, but we do pointer threading here. @@ -315,19 +322,11 @@ thread_stack(StgPtr p, StgPtr stack_end) p++; // NOTE: the payload starts immediately after the info-ptr, we // don't have an StgHeader in the same sense as a heap closure. - while (size > 0) { - if ((bitmap & 1) == 0) { - thread((StgClosure **)p); - } - p++; - bitmap = bitmap >> 1; - size--; - } + p = thread_small_bitmap(p, size, bitmap); continue; case RET_BCO: { StgBCO *bco; - nat size; p++; bco = (StgBCO *)*p; @@ -395,14 +394,7 @@ thread_PAP_payload (StgClosure *fun, StgClosure **payload, StgWord size) default: bitmap = BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]); small_bitmap: - while (size > 0) { - if ((bitmap & 1) == 0) { - thread((StgClosure **)p); - } - p++; - bitmap = bitmap >> 1; - size--; - } + p = thread_small_bitmap(p, size, bitmap); break; } @@ -773,7 +765,7 @@ update_fwd_compact( bdescr *blocks ) #endif bdescr *bd, *free_bd; StgInfoTable *info; - nat size; + StgWord size; StgWord iptr; bd = blocks; @@ -858,7 +850,8 @@ update_bkwd_compact( generation *gen ) #endif bdescr *bd, *free_bd; StgInfoTable *info; - W_ size, free_blocks; + StgWord size; + W_ free_blocks; StgWord iptr; bd = free_bd = gen->old_blocks; diff --git a/rts/sm/GC.c b/rts/sm/GC.c index d22a31eccb..dabcd722d7 100644 --- a/rts/sm/GC.c +++ b/rts/sm/GC.c @@ -286,6 +286,9 @@ GarbageCollect (nat collect_gen, memInventory(DEBUG_gc); #endif + // do this *before* we start scavenging + collectFreshWeakPtrs(); + // check sanity *before* GC IF_DEBUG(sanity, checkSanity(rtsFalse /* before GC */, major_gc)); @@ -1038,8 +1041,6 @@ gcWorkerThread (Capability *cap) SET_GCT(gc_threads[cap->no]); gct->id = osThreadId(); - stat_gcWorkerThreadStart(gct); - // Wait until we're told to wake up RELEASE_SPIN_LOCK(&gct->mut_spin); // yieldThread(); @@ -1097,9 +1098,6 @@ gcWorkerThread (Capability *cap) ACQUIRE_SPIN_LOCK(&gct->mut_spin); debugTrace(DEBUG_gc, "GC thread %d on my way...", gct->thread_index); - // record the time spent doing GC in the Task structure - stat_gcWorkerThreadDone(gct); - SET_GCT(saved_gct); } diff --git a/rts/sm/GCThread.h b/rts/sm/GCThread.h index 12ef999a9b..84ce3f0239 100644 --- a/rts/sm/GCThread.h +++ b/rts/sm/GCThread.h @@ -77,7 +77,7 @@ ------------------------------------------------------------------------- */ typedef struct gen_workspace_ { - generation * gen; // the gen for this workspace + generation * gen; // the gen for this workspace struct gc_thread_ * my_gct; // the gc_thread that contains this workspace // where objects to be scavenged go @@ -184,7 +184,6 @@ typedef struct gc_thread_ { Time gc_start_cpu; // process CPU time Time gc_start_elapsed; // process elapsed time - Time gc_start_thread_cpu; // thread CPU time W_ gc_start_faults; // ------------------- diff --git a/rts/sm/MarkWeak.c b/rts/sm/MarkWeak.c index 4e0c1369a1..0324f3b4b9 100644 --- a/rts/sm/MarkWeak.c +++ b/rts/sm/MarkWeak.c @@ -25,6 +25,8 @@ #include "Storage.h" #include "Threads.h" +#include "sm/Sanity.h" + /* ----------------------------------------------------------------------------- Weak Pointers @@ -39,10 +41,8 @@ new live weak pointers, then all the currently unreachable ones are dead. - For generational GC: we just don't try to finalize weak pointers in - older generations than the one we're collecting. This could - probably be optimised by keeping per-generation lists of weak - pointers, but for a few weak pointers this scheme will work. + For generational GC: we don't try to finalize weak pointers in + older generations than the one we're collecting. There are three distinct stages to processing weak pointers: @@ -343,6 +343,39 @@ static void tidyThreadList (generation *gen) } } +#ifdef DEBUG +static void checkWeakPtrSanity(StgWeak *hd, StgWeak *tl) +{ + StgWeak *w, *prev; + for (w = hd; w != NULL; prev = w, w = w->link) { + ASSERT(INFO_PTR_TO_STRUCT(UNTAG_CLOSURE((StgClosure*)w)->header.info)->type == WEAK); + checkClosure((StgClosure*)w); + } + if (tl != NULL) { + ASSERT(prev == tl); + } +} +#endif + +void collectFreshWeakPtrs() +{ + nat i; + generation *gen = &generations[0]; + // move recently allocated weak_ptr_list to the old list as well + for (i = 0; i < n_capabilities; i++) { + Capability *cap = capabilities[i]; + if (cap->weak_ptr_list_tl != NULL) { + IF_DEBUG(sanity, checkWeakPtrSanity(cap->weak_ptr_list_hd, cap->weak_ptr_list_tl)); + cap->weak_ptr_list_tl->link = gen->weak_ptr_list; + gen->weak_ptr_list = cap->weak_ptr_list_hd; + cap->weak_ptr_list_tl = NULL; + cap->weak_ptr_list_hd = NULL; + } else { + ASSERT(cap->weak_ptr_list_hd == NULL); + } + } +} + /* ----------------------------------------------------------------------------- Evacuate every weak pointer object on the weak_ptr_list, and update the link fields. diff --git a/rts/sm/MarkWeak.h b/rts/sm/MarkWeak.h index f9bacfa0da..bd0231d74c 100644 --- a/rts/sm/MarkWeak.h +++ b/rts/sm/MarkWeak.h @@ -20,6 +20,7 @@ extern StgWeak *old_weak_ptr_list; extern StgTSO *resurrected_threads; extern StgTSO *exception_threads; +void collectFreshWeakPtrs ( void ); void initWeakForGC ( void ); rtsBool traverseWeakPtrList ( void ); void markWeakPtrList ( void ); diff --git a/rts/sm/Scav.c b/rts/sm/Scav.c index c35444bbaa..b9f8f1259b 100644 --- a/rts/sm/Scav.c +++ b/rts/sm/Scav.c @@ -32,7 +32,7 @@ static void scavenge_stack (StgPtr p, StgPtr stack_end); static void scavenge_large_bitmap (StgPtr p, StgLargeBitmap *large_bitmap, - nat size ); + StgWord size ); #if defined(THREADED_RTS) && !defined(PARALLEL_GC) # define evacuate(a) evacuate1(a) @@ -168,6 +168,20 @@ static StgPtr scavenge_mut_arr_ptrs_marked (StgMutArrPtrs *a) return (StgPtr)a + mut_arr_ptrs_sizeW(a); } +STATIC_INLINE StgPtr +scavenge_small_bitmap (StgPtr p, StgWord size, StgWord bitmap) +{ + while (size > 0) { + if ((bitmap & 1) == 0) { + evacuate((StgClosure **)p); + } + p++; + bitmap = bitmap >> 1; + size--; + } + return p; +} + /* ----------------------------------------------------------------------------- Blocks of function args occur on the stack (at the top) and in PAPs. @@ -178,7 +192,7 @@ scavenge_arg_block (StgFunInfoTable *fun_info, StgClosure **args) { StgPtr p; StgWord bitmap; - nat size; + StgWord size; p = (StgPtr)args; switch (fun_info->f.fun_type) { @@ -195,14 +209,7 @@ scavenge_arg_block (StgFunInfoTable *fun_info, StgClosure **args) bitmap = BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]); size = BITMAP_SIZE(stg_arg_bitmaps[fun_info->f.fun_type]); small_bitmap: - while (size > 0) { - if ((bitmap & 1) == 0) { - evacuate((StgClosure **)p); - } - p++; - bitmap = bitmap >> 1; - size--; - } + p = scavenge_small_bitmap(p, size, bitmap); break; } return p; @@ -234,14 +241,7 @@ scavenge_PAP_payload (StgClosure *fun, StgClosure **payload, StgWord size) default: bitmap = BITMAP_BITS(stg_arg_bitmaps[fun_info->f.fun_type]); small_bitmap: - while (size > 0) { - if ((bitmap & 1) == 0) { - evacuate((StgClosure **)p); - } - p++; - bitmap = bitmap >> 1; - size--; - } + p = scavenge_small_bitmap(p, size, bitmap); break; } return p; @@ -1498,7 +1498,7 @@ scavenge_one(StgPtr p) { StgPtr start = gen->scan; bdescr *start_bd = gen->scan_bd; - nat size = 0; + StgWord size = 0; scavenge(&gen); if (start_bd != gen->scan_bd) { size += (P_)BLOCK_ROUND_UP(start) - start; @@ -1745,7 +1745,7 @@ scavenge_static(void) -------------------------------------------------------------------------- */ static void -scavenge_large_bitmap( StgPtr p, StgLargeBitmap *large_bitmap, nat size ) +scavenge_large_bitmap( StgPtr p, StgLargeBitmap *large_bitmap, StgWord size ) { nat i, j, b; StgWord bitmap; @@ -1765,19 +1765,6 @@ scavenge_large_bitmap( StgPtr p, StgLargeBitmap *large_bitmap, nat size ) } } -STATIC_INLINE StgPtr -scavenge_small_bitmap (StgPtr p, nat size, StgWord bitmap) -{ - while (size > 0) { - if ((bitmap & 1) == 0) { - evacuate((StgClosure **)p); - } - p++; - bitmap = bitmap >> 1; - size--; - } - return p; -} /* ----------------------------------------------------------------------------- scavenge_stack walks over a section of stack and evacuates all the @@ -1790,7 +1777,7 @@ scavenge_stack(StgPtr p, StgPtr stack_end) { const StgRetInfoTable* info; StgWord bitmap; - nat size; + StgWord size; /* * Each time around this loop, we are looking at a chunk of stack @@ -1874,7 +1861,7 @@ scavenge_stack(StgPtr p, StgPtr stack_end) case RET_BCO: { StgBCO *bco; - nat size; + StgWord size; p++; evacuate((StgClosure **)p); @@ -1889,7 +1876,7 @@ scavenge_stack(StgPtr p, StgPtr stack_end) // large bitmap (> 32 entries, or > 64 on a 64-bit machine) case RET_BIG: { - nat size; + StgWord size; size = GET_LARGE_BITMAP(&info->i)->size; p++; diff --git a/rts/sm/Storage.c b/rts/sm/Storage.c index 86bd1c2bb3..379d9da769 100644 --- a/rts/sm/Storage.c +++ b/rts/sm/Storage.c @@ -686,7 +686,15 @@ StgPtr allocate (Capability *cap, W_ n) CCS_ALLOC(cap->r.rCCCS,n); if (n >= LARGE_OBJECT_THRESHOLD/sizeof(W_)) { - W_ req_blocks = (W_)BLOCK_ROUND_UP(n*sizeof(W_)) / BLOCK_SIZE; + // The largest number of words such that + // the computation of req_blocks will not overflow. + W_ max_words = (HS_WORD_MAX & ~(BLOCK_SIZE-1)) / sizeof(W_); + W_ req_blocks; + + if (n > max_words) + req_blocks = HS_WORD_MAX; // signal overflow below + else + req_blocks = (W_)BLOCK_ROUND_UP(n*sizeof(W_)) / BLOCK_SIZE; // Attempting to allocate an object larger than maxHeapSize // should definitely be disallowed. (bug #1791) diff --git a/rts/win32/libHSbase.def b/rts/win32/libHSbase.def index 119237b652..8140528c70 100644 --- a/rts/win32/libHSbase.def +++ b/rts/win32/libHSbase.def @@ -40,5 +40,4 @@ EXPORTS base_ControlziExceptionziBase_nonTermination_closure base_ControlziExceptionziBase_nestedAtomically_closure - - + base_GHCziEventziThread_blockedOnBadFD_closure diff --git a/settings.in b/settings.in index 9f9654c689..1bcb4aebc9 100644 --- a/settings.in +++ b/settings.in @@ -2,6 +2,8 @@ ("C compiler command", "@SettingsCCompilerCommand@"), ("C compiler flags", "@SettingsCCompilerFlags@"), ("C compiler link flags", "@SettingsCCompilerLinkFlags@"), + ("Haskell CPP command","@SettingsHaskellCPPCommand@"), + ("Haskell CPP flags","@SettingsHaskellCPPFlags@"), ("ld command", "@SettingsLdCommand@"), ("ld flags", "@SettingsLdFlags@"), ("ld supports compact unwind", "@LdHasNoCompactUnwind@"), @@ -1,5 +1,6 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl +use warnings; use strict; use Cwd; use English; @@ -15,7 +16,6 @@ my $verbose = 2; my $try_to_resume = 0; my $ignore_failure = 0; my $checked_out_flag = 0; # NOT the opposite of bare_flag (describes remote repo state) -my $get_mode; my $bare_flag = ""; # NOT the opposite of checked_out_flag (describes local repo state) my %tags; @@ -69,16 +69,6 @@ sub parsePackages { } } -sub tryReadFile { - my $filename = shift; - my @lines; - - open (FH, $filename) or return ""; - @lines = <FH>; - close FH; - return join('', @lines); -} - sub message { if ($verbose >= 2) { print "@_\n"; @@ -158,11 +148,11 @@ sub readgit { sub configure_repository { my $localpath = shift; - &git($localpath, "config", "--local", "core.ignorecase", "true"); + &git($localpath, "config", "core.ignorecase", "true"); my $autocrlf = &readgitline($localpath, 'config', '--get', 'core.autocrlf'); if ($autocrlf eq "true") { - &git($localpath, "config", "--local", "core.autocrlf", "false"); + &git($localpath, "config", "core.autocrlf", "false"); &git($localpath, "reset", "--hard"); } } @@ -181,14 +171,14 @@ sub getrepo { my $branch = &readgitline($git_dir, "rev-parse", "--abbrev-ref", "HEAD"); die "Bad branch: $branch" unless $branch =~ m!^[a-zA-Z][a-zA-Z0-9./-]*$!; - my $remote = &readgitline($git_dir, "config", "branch.$branch.remote"); + my $remote = &readgitline($git_dir, "config", "--get", "branch.$branch.remote"); if ($remote eq "") { # remotes are not mandatory for branches (e.g. not recorded by default for bare repos) $remote = "origin"; } die "Bad remote: $remote" unless $remote =~ m!^[a-zA-Z][a-zA-Z0-9./-]*$!; - $repo = &readgitline($git_dir, "config", "remote.$remote.url"); + $repo = &readgitline($git_dir, "config", "--get", "remote.$remote.url"); } my $repo_base; @@ -203,11 +193,7 @@ sub getrepo { # --checked-out is needed if you want to use a checked-out repo # over SSH or HTTP - if ($checked_out_flag) { - $checked_out_tree = 1; - } else { - $checked_out_tree = 0; - } + $checked_out_tree = $checked_out_flag; # Don't drop the last part of the path if specified with -r, as # it expects repos of the form: @@ -252,6 +238,7 @@ sub gitall { my $tag; my $remotepath; my $line; + my $repo_is_submodule; my $branch_name; my $subcommand; @@ -321,11 +308,13 @@ sub gitall { for $line (@packages) { $tag = $$line{"tag"}; + if ($tags{$tag} == 0) { + next; + } + # Use the "remote" structure for bare git repositories $localpath = ($bare_flag) ? $$line{"remotepath"} : $$line{"localpath"}; - $remotepath = ($checked_out_tree) ? - $$line{"localpath"} : $$line{"remotepath"}; if (!$started) { if ($start_repo eq $localpath) { @@ -342,6 +331,19 @@ sub gitall { close RESUME; rename "resume.tmp", "resume"; + $repo_is_submodule = $$line{"remotepath"} eq "-"; + + if ($checked_out_tree) { + $remotepath = $$line{"localpath"}; + } + elsif ($repo_is_submodule) { + $remotepath = &readgitline(".", 'config', '-f', '.gitmodules', '--get', "submodule.$localpath.url"); + $remotepath =~ s/\.\.\///; + } + else { + $remotepath = $$line{"remotepath"}; + } + # We can't create directories on GitHub, so we translate # "packages/foo" into "package-foo". if ($is_github_repo) { @@ -352,15 +354,7 @@ sub gitall { $path = "$repo_base/$remotepath"; if ($command eq "get") { - next if $remotepath eq "-"; # "git submodule init/update" will get this later - - # Skip any repositories we have not included the tag for - if (not defined($tags{$tag})) { - $tags{$tag} = 0; - } - if ($tags{$tag} == 0) { - next; - } + next if $repo_is_submodule; # "git submodule init/update" will get this later if (-d $localpath) { warning("$localpath already present; omitting") @@ -380,8 +374,8 @@ sub gitall { my $git_repo_present = 1 if -e "$localpath/.git" || ($bare_flag && -d "$localpath"); if (not $git_repo_present) { - if ($tag eq "") { - die "Required repo $localpath is missing"; + if ($tag eq "-") { + die "Required repo $localpath is missing. Please first run './sync-all get'.\n"; } else { message "== $localpath repo not present; skipping"; @@ -400,7 +394,7 @@ sub gitall { } elsif ($command eq "check_submodules") { # If we have a submodule then check whether it is up-to-date - if ($remotepath eq "-") { + if ($repo_is_submodule) { my %remote_heads; message "== Checking sub-module $localpath"; @@ -433,14 +427,14 @@ sub gitall { # to push to them then you need to use a special command, as # described on # http://ghc.haskell.org/trac/ghc/wiki/Repositories/Upstream - if ($remotepath ne "-") { + if (!$repo_is_submodule) { &git($localpath, "push", @args); } } elsif ($command eq "pull") { my $realcmd; my @realargs; - if ($remotepath eq "-") { + if ($repo_is_submodule) { # Only fetch for the submodules. "git submodule update" # will take care of making us point to the right commit. $realcmd = "fetch"; @@ -472,30 +466,15 @@ sub gitall { } elsif ($command eq "remote") { my @scm_args; - my $rpath; $ignore_failure = 1; - if ($remotepath eq '-') { - $rpath = "$localpath.git"; # N.B.: $localpath lacks the .git suffix - if ($localpath =~ m!^(?:libraries|utils)/!) { - # FIXME: This is just a simple heuristic to - # infer the remotepath for Git submodules. A - # proper solution would require to parse the - # .gitmodules file to obtain the actual - # localpath<->remotepath mapping. - $rpath =~ s!^(?:libraries|utils)/!packages/!; - } - $rpath = "$repo_base/$rpath"; - } else { - $rpath = $path; - } if ($subcommand eq 'add') { - @scm_args = ("remote", "add", $branch_name, $rpath); + @scm_args = ("remote", "add", $branch_name, $path); } elsif ($subcommand eq 'rm') { @scm_args = ("remote", "rm", $branch_name); } elsif ($subcommand eq 'set-branches') { @scm_args = ("remote", "set-branches", $branch_name); } elsif ($subcommand eq 'set-url') { - @scm_args = ("remote", "set-url", $branch_name, $rpath); + @scm_args = ("remote", "set-url", $branch_name, $path); } &git($localpath, @scm_args, @args); } @@ -539,7 +518,7 @@ sub gitall { elsif ($command eq "compare") { # Don't compare the subrepos; it doesn't work properly as # they aren't on a branch. - next if $remotepath eq "-"; + next if $repo_is_submodule; my $compareto; if ($#args eq -1) { @@ -587,14 +566,14 @@ sub gitInitSubmodules { my $submodulespaths = &readgit(".", "config", "--get-regexp", "^submodule[.].*[.]url"); # if we came from github, change the urls appropriately - while ($submodulespaths =~ m!^(submodule.(?:libraries|utils)/[a-zA-Z0-9]+.url) ($GITHUB)/ghc/packages/([a-zA-Z0-9]+).git$!gm) { + while ($submodulespaths =~ m!^(submodule.(?:libraries/|utils/)?[a-zA-Z0-9-]+.url) ($GITHUB)/ghc/packages/([a-zA-Z0-9-]+).git$!gm) { &git(".", "config", $1, "$2/ghc/packages-$3"); } # if we came from a local repository, grab our submodules from their # checkouts over there, if they exist. if ($repo_local) { - while ($submodulespaths =~ m!^(submodule.((?:libraries|utils)/[a-zA-Z0-9]+).url) .*$!gm) { + while ($submodulespaths =~ m!^(submodule.((?:libraries/|utils/)?[a-zA-Z0-9-]+).url) .*$!gm) { if (-e "$repo_base/$2/.git") { &git(".", "config", $1, "$repo_base/$2"); } @@ -603,9 +582,7 @@ sub gitInitSubmodules { } sub checkCurrentBranchIsMaster { - my $branch = `git symbolic-ref HEAD`; - $branch =~ s/refs\/heads\///; - $branch =~ s/\n//; + my $branch = &readgitline(".", "rev-parse", "--abbrev-ref", "HEAD"); if ($branch !~ /master/) { print "\nWarning: You are trying to 'pull' while on branch '$branch'.\n" @@ -620,8 +597,7 @@ sub help my $tags = join ' ', sort (grep !/^-$/, keys %tags); - # Get the built in help - my $help = <<END; + print <<END; Usage: ./sync-all [-q] [-s] [--ignore-failure] [-r repo] [--checked-out] [--bare] @@ -789,30 +765,9 @@ sync-all *ignores* the defaultrepo of all repos other than the root one. So the remote repos must be laid out in one of the two formats given by <local-path> and <remote-path> in the file 'packages'. -Available package-tags are: -END +Available package-tags are: $tags - # Collect all the tags in the packages file - my %available_tags; - open IN, "< packages.conf" - or open IN, "< packages" # clashes with packages directory when using --bare - or die "Can't open packages file (or packages.conf)"; - while (<IN>) { - chomp; - if (/^([^# ]+) +(?:([^ ]+) +)?([^ ]+) +([^ ]+)/) { - if (defined($2) && $2 ne "-") { - $available_tags{$2} = 1; - } - } - elsif (! /^(#.*)?$/) { - die "Bad line: $_"; - } - } - close IN; - - # Show those tags and the help text - my @available_tags = keys %available_tags; - print "$help@available_tags\n\n"; +END exit $exit; } @@ -845,9 +800,6 @@ sub main { elsif ($arg eq "--ignore-failure") { $ignore_failure = 1; } - elsif ($arg eq "--complete" || $arg eq "--partial") { - $get_mode = $arg; - } # Use --checked-out if the _remote_ repos are a checked-out tree, # rather than the master trees. elsif ($arg eq "--checked-out") { @@ -934,17 +886,7 @@ sub main { &gitInitSubmodules(@submodule_args); } - if ($command eq "pull") { - my $gitConfig = &tryReadFile(".git/config"); - if ($gitConfig !~ /submodule/) { - &gitInitSubmodules(@submodule_args); - } - } if ($command eq "get" or $command eq "pull") { - my $gitConfig = &tryReadFile(".git/config"); - if ($gitConfig !~ /submodule/) { - &gitInitSubmodules(@submodule_args); - } &git(".", "submodule", "update", @submodule_args); } } @@ -1001,8 +943,8 @@ END { ["libraries/Cabal", "c8ebd66a32865f72ae03ee0663c62df3d77f08fe"], ); for (@obsolete_dirs) { - my ($dir, $hash) = $_; - my $name = $dir =~ m!/([^/]+)$!; + my ($dir, $hash) = @$_; + my ($name) = $dir =~ m!/([^/]+)$!; message "== Checking for old $name repo"; if (-d "$dir/.git") { chdir($dir); @@ -1061,4 +1003,3 @@ EOF } main(@ARGV); - diff --git a/testsuite/.gitignore b/testsuite/.gitignore index 519d432273..efb9c1c204 100644 --- a/testsuite/.gitignore +++ b/testsuite/.gitignore @@ -22,13 +22,16 @@ Thumbs.db *.o *.o-boot *.pyc -*.normalised *.eventlog *.comp.std* -.hpc +.hpc/ +.hpc.*/ *.genscript +*.stderr.normalised +*.stderr-ghc.normalised +*.stdout.normalised *.interp.stdout *.interp.stderr *.run.stdout @@ -42,11 +45,9 @@ tests/**/*.ps *.dyn_hi *.dyn_hi-boot *o -*.hi *.dll *.dylib *.so -*.hpc.* *bindisttest_install___dir_bin_ghc.mk *bindisttest_install___dir_bin_ghc.exe.mk mk/ghcconfig_*_inplace_bin_ghc-stage2.mk @@ -56,1351 +57,1439 @@ mk/ghcconfig_*_inplace_bin_ghc-stage2.exe.mk # ----------------------------------------------------------------------------- # specific generated files +/mk/ghc-config +/tests/annotations/should_compile/th/build_make +/tests/annotations/should_run/Config.hs +/tests/annotations/should_run/annrun01 +/tests/array/should_run/arr001 +/tests/array/should_run/arr002 +/tests/array/should_run/arr003 +/tests/array/should_run/arr004 +/tests/array/should_run/arr005 +/tests/array/should_run/arr006 +/tests/array/should_run/arr007 +/tests/array/should_run/arr008 +/tests/array/should_run/arr009 +/tests/array/should_run/arr010 +/tests/array/should_run/arr011 +/tests/array/should_run/arr012 +/tests/array/should_run/arr013 +/tests/array/should_run/arr014 +/tests/array/should_run/arr015 +/tests/array/should_run/arr016 +/tests/array/should_run/arr017 +/tests/array/should_run/arr018 +/tests/array/should_run/arr019 +/tests/array/should_run/arr020 +/tests/arrows/should_run/T3822 +/tests/arrows/should_run/arrowrun001 +/tests/arrows/should_run/arrowrun002 +/tests/arrows/should_run/arrowrun003 +/tests/arrows/should_run/arrowrun004 +/tests/boxy/T2193 +/tests/cabal/1750.hs +/tests/cabal/1750.out +/tests/cabal/T1750.hs +/tests/cabal/T1750.out +/tests/cabal/cabal01/dist/ +/tests/cabal/cabal01/install/ +/tests/cabal/cabal01/local.db/ +/tests/cabal/cabal01/setup +/tests/cabal/cabal03/Setup +/tests/cabal/cabal03/p/dist/ +/tests/cabal/cabal03/q/dist/ +/tests/cabal/cabal03/tmp.d/ +/tests/cabal/cabal04/Setup +/tests/cabal/cabal04/dist/ +/tests/cabal/cabal04/err +/tests/cabal/local01.package.conf/ +/tests/cabal/local03.package.conf/ +/tests/cabal/local04.package.conf/ +/tests/cabal/local05a.package.conf/ +/tests/cabal/local05b.package.conf/ +/tests/cabal/local06.package.conf/ +/tests/cabal/local1750.package.conf/ +/tests/cabal/localT1750.package.conf/ +/tests/cabal/localshadow1.package.conf/ +/tests/cabal/localshadow2.package.conf/ +/tests/cabal/package.conf.ghcpkg02/ +/tests/cabal/shadow.hs +/tests/cabal/shadow1.out +/tests/cabal/shadow2.out +/tests/cabal/shadow3.out +/tests/callarity/perf/T3924 +/tests/callarity/should_run/StrictLet +/tests/callarity/unittest/CallArity1 +/tests/codeGen/should_compile/2578 +/tests/codeGen/should_compile/T2578 +/tests/codeGen/should_gen_asm/memcpy-unroll-conprop.s +/tests/codeGen/should_gen_asm/memcpy-unroll.s +/tests/codeGen/should_gen_asm/memcpy.s +/tests/codeGen/should_gen_asm/memset-unroll.s +/tests/codeGen/should_run/1852 +/tests/codeGen/should_run/1861 +/tests/codeGen/should_run/2080 +/tests/codeGen/should_run/2838 +/tests/codeGen/should_run/3207 +/tests/codeGen/should_run/3561 +/tests/codeGen/should_run/3677 +/tests/codeGen/should_run/4441 +/tests/codeGen/should_run/5129 +/tests/codeGen/should_run/5149 +/tests/codeGen/should_run/5626 +/tests/codeGen/should_run/5747 +/tests/codeGen/should_run/5785 +/tests/codeGen/should_run/6146 +/tests/codeGen/should_run/CopySmallArray +/tests/codeGen/should_run/CopySmallArrayStressTest +/tests/codeGen/should_run/SizeOfSmallArray +/tests/codeGen/should_run/StaticArraySize +/tests/codeGen/should_run/StaticByteArraySize +/tests/codeGen/should_run/T1852 +/tests/codeGen/should_run/T1861 +/tests/codeGen/should_run/T2080 +/tests/codeGen/should_run/T2838 +/tests/codeGen/should_run/T3207 +/tests/codeGen/should_run/T3561 +/tests/codeGen/should_run/T3677 +/tests/codeGen/should_run/T4441 +/tests/codeGen/should_run/T5129 +/tests/codeGen/should_run/T5149 +/tests/codeGen/should_run/T5626 +/tests/codeGen/should_run/T5747 +/tests/codeGen/should_run/T5785 +/tests/codeGen/should_run/T5900 +/tests/codeGen/should_run/T6084 +/tests/codeGen/should_run/T6146 +/tests/codeGen/should_run/T7163 +/tests/codeGen/should_run/T7319 +/tests/codeGen/should_run/T7361 +/tests/codeGen/should_run/T7600 +/tests/codeGen/should_run/T7953 +/tests/codeGen/should_run/T8103 +/tests/codeGen/should_run/T8256 +/tests/codeGen/should_run/T9001 +/tests/codeGen/should_run/Word2Float64 +/tests/codeGen/should_run/cgrun001 +/tests/codeGen/should_run/cgrun002 +/tests/codeGen/should_run/cgrun003 +/tests/codeGen/should_run/cgrun004 +/tests/codeGen/should_run/cgrun005 +/tests/codeGen/should_run/cgrun006 +/tests/codeGen/should_run/cgrun007 +/tests/codeGen/should_run/cgrun008 +/tests/codeGen/should_run/cgrun009 +/tests/codeGen/should_run/cgrun010 +/tests/codeGen/should_run/cgrun011 +/tests/codeGen/should_run/cgrun012 +/tests/codeGen/should_run/cgrun013 +/tests/codeGen/should_run/cgrun014 +/tests/codeGen/should_run/cgrun015 +/tests/codeGen/should_run/cgrun016 +/tests/codeGen/should_run/cgrun017 +/tests/codeGen/should_run/cgrun018 +/tests/codeGen/should_run/cgrun019 +/tests/codeGen/should_run/cgrun020 +/tests/codeGen/should_run/cgrun021 +/tests/codeGen/should_run/cgrun022 +/tests/codeGen/should_run/cgrun024 +/tests/codeGen/should_run/cgrun026 +/tests/codeGen/should_run/cgrun027 +/tests/codeGen/should_run/cgrun028 +/tests/codeGen/should_run/cgrun031 +/tests/codeGen/should_run/cgrun032 +/tests/codeGen/should_run/cgrun033 +/tests/codeGen/should_run/cgrun034 +/tests/codeGen/should_run/cgrun035 +/tests/codeGen/should_run/cgrun036 +/tests/codeGen/should_run/cgrun037 +/tests/codeGen/should_run/cgrun038 +/tests/codeGen/should_run/cgrun039 +/tests/codeGen/should_run/cgrun040 +/tests/codeGen/should_run/cgrun043 +/tests/codeGen/should_run/cgrun044 +/tests/codeGen/should_run/cgrun045 +/tests/codeGen/should_run/cgrun046 +/tests/codeGen/should_run/cgrun047 +/tests/codeGen/should_run/cgrun048 +/tests/codeGen/should_run/cgrun049 +/tests/codeGen/should_run/cgrun050 +/tests/codeGen/should_run/cgrun051 +/tests/codeGen/should_run/cgrun052 +/tests/codeGen/should_run/cgrun053 +/tests/codeGen/should_run/cgrun054 +/tests/codeGen/should_run/cgrun055 +/tests/codeGen/should_run/cgrun056 +/tests/codeGen/should_run/cgrun057 +/tests/codeGen/should_run/cgrun058 +/tests/codeGen/should_run/cgrun059 +/tests/codeGen/should_run/cgrun060 +/tests/codeGen/should_run/cgrun061 +/tests/codeGen/should_run/cgrun062 +/tests/codeGen/should_run/cgrun063 +/tests/codeGen/should_run/cgrun064 +/tests/codeGen/should_run/cgrun065 +/tests/codeGen/should_run/cgrun066 +/tests/codeGen/should_run/cgrun067 +/tests/codeGen/should_run/cgrun068 +/tests/codeGen/should_run/cgrun069 +/tests/codeGen/should_run/cgrun070 +/tests/codeGen/should_run/cgrun071 +/tests/codeGen/should_run/cgrun072 +/tests/codeGen/should_run/setByteArray +/tests/concurrent/T2317/T2317 +/tests/concurrent/prog001/concprog001 +/tests/concurrent/prog002/concprog002 +/tests/concurrent/prog002/concprog002.aux +/tests/concurrent/prog002/concprog002.hp +/tests/concurrent/prog002/concprog002.ps +/tests/concurrent/prog003/concprog003 +/tests/concurrent/should_run/1980 +/tests/concurrent/should_run/2910 +/tests/concurrent/should_run/2910a +/tests/concurrent/should_run/3279 +/tests/concurrent/should_run/3429 +/tests/concurrent/should_run/367 +/tests/concurrent/should_run/367_letnoescape +/tests/concurrent/should_run/4030 +/tests/concurrent/should_run/4811 +/tests/concurrent/should_run/4813 +/tests/concurrent/should_run/5238 +/tests/concurrent/should_run/5421 +/tests/concurrent/should_run/5558 +/tests/concurrent/should_run/5611 +/tests/concurrent/should_run/5866 +/tests/concurrent/should_run/T1980 +/tests/concurrent/should_run/T2910 +/tests/concurrent/should_run/T2910a +/tests/concurrent/should_run/T3279 +/tests/concurrent/should_run/T3429 +/tests/concurrent/should_run/T367 +/tests/concurrent/should_run/T367_letnoescape +/tests/concurrent/should_run/T4030 +/tests/concurrent/should_run/T4811 +/tests/concurrent/should_run/T4813 +/tests/concurrent/should_run/T5238 +/tests/concurrent/should_run/T5421 +/tests/concurrent/should_run/T5558 +/tests/concurrent/should_run/T5611 +/tests/concurrent/should_run/T5866 +/tests/concurrent/should_run/T7970 +/tests/concurrent/should_run/AtomicPrimops +/tests/concurrent/should_run/allowinterrupt001 +/tests/concurrent/should_run/async001 +/tests/concurrent/should_run/compareAndSwap +/tests/concurrent/should_run/conc001 +/tests/concurrent/should_run/conc002 +/tests/concurrent/should_run/conc003 +/tests/concurrent/should_run/conc004 +/tests/concurrent/should_run/conc006 +/tests/concurrent/should_run/conc007 +/tests/concurrent/should_run/conc008 +/tests/concurrent/should_run/conc009 +/tests/concurrent/should_run/conc010 +/tests/concurrent/should_run/conc012 +/tests/concurrent/should_run/conc013 +/tests/concurrent/should_run/conc014 +/tests/concurrent/should_run/conc015 +/tests/concurrent/should_run/conc015a +/tests/concurrent/should_run/conc016 +/tests/concurrent/should_run/conc017 +/tests/concurrent/should_run/conc017a +/tests/concurrent/should_run/conc018 +/tests/concurrent/should_run/conc019 +/tests/concurrent/should_run/conc020 +/tests/concurrent/should_run/conc021 +/tests/concurrent/should_run/conc022 +/tests/concurrent/should_run/conc023 +/tests/concurrent/should_run/conc024 +/tests/concurrent/should_run/conc025 +/tests/concurrent/should_run/conc026 +/tests/concurrent/should_run/conc027 +/tests/concurrent/should_run/conc028 +/tests/concurrent/should_run/conc029 +/tests/concurrent/should_run/conc030 +/tests/concurrent/should_run/conc031 +/tests/concurrent/should_run/conc032 +/tests/concurrent/should_run/conc033 +/tests/concurrent/should_run/conc034 +/tests/concurrent/should_run/conc035 +/tests/concurrent/should_run/conc036 +/tests/concurrent/should_run/conc037 +/tests/concurrent/should_run/conc038 +/tests/concurrent/should_run/conc039 +/tests/concurrent/should_run/conc040 +/tests/concurrent/should_run/conc041 +/tests/concurrent/should_run/conc042 +/tests/concurrent/should_run/conc043 +/tests/concurrent/should_run/conc044 +/tests/concurrent/should_run/conc045 +/tests/concurrent/should_run/conc051 +/tests/concurrent/should_run/conc058 +/tests/concurrent/should_run/conc059 +/tests/concurrent/should_run/conc064 +/tests/concurrent/should_run/conc065 +/tests/concurrent/should_run/conc066 +/tests/concurrent/should_run/conc067 +/tests/concurrent/should_run/conc068 +/tests/concurrent/should_run/conc069 +/tests/concurrent/should_run/conc069a +/tests/concurrent/should_run/conc070 +/tests/concurrent/should_run/conc071 +/tests/concurrent/should_run/conc072 +/tests/concurrent/should_run/conc073 +/tests/concurrent/should_run/foreignInterruptible +/tests/concurrent/should_run/mask001 +/tests/concurrent/should_run/mask002 +/tests/concurrent/should_run/numsparks001 +/tests/concurrent/should_run/readMVar1 +/tests/concurrent/should_run/readMVar2 +/tests/concurrent/should_run/readMVar3 +/tests/concurrent/should_run/setnumcapabilities001 +/tests/concurrent/should_run/throwto001 +/tests/concurrent/should_run/throwto002 +/tests/concurrent/should_run/throwto003 +/tests/concurrent/should_run/tryReadMVar1 +/tests/concurrent/should_run/tryReadMVar2 +/tests/cpranal/should_run/CPRRepeat +/tests/deSugar/should_run/DsLambdaCase +/tests/deSugar/should_run/DsMultiWayIf +/tests/deSugar/should_run/T246 +/tests/deSugar/should_run/T3126 +/tests/deSugar/should_run/T3382 +/tests/deSugar/should_run/T5742 +/tests/deSugar/should_run/T8952 +/tests/deSugar/should_run/dsrun001 +/tests/deSugar/should_run/dsrun002 +/tests/deSugar/should_run/dsrun003 +/tests/deSugar/should_run/dsrun004 +/tests/deSugar/should_run/dsrun005 +/tests/deSugar/should_run/dsrun006 +/tests/deSugar/should_run/dsrun007 +/tests/deSugar/should_run/dsrun008 +/tests/deSugar/should_run/dsrun009 +/tests/deSugar/should_run/dsrun010 +/tests/deSugar/should_run/dsrun011 +/tests/deSugar/should_run/dsrun012 +/tests/deSugar/should_run/dsrun013 +/tests/deSugar/should_run/dsrun014 +/tests/deSugar/should_run/dsrun015 +/tests/deSugar/should_run/dsrun016 +/tests/deSugar/should_run/dsrun017 +/tests/deSugar/should_run/dsrun018 +/tests/deSugar/should_run/dsrun019 +/tests/deSugar/should_run/dsrun020 +/tests/deSugar/should_run/dsrun021 +/tests/deSugar/should_run/dsrun022 +/tests/deSugar/should_run/dsrun023 +/tests/deSugar/should_run/mc01 +/tests/deSugar/should_run/mc02 +/tests/deSugar/should_run/mc03 +/tests/deSugar/should_run/mc04 +/tests/deSugar/should_run/mc05 +/tests/deSugar/should_run/mc06 +/tests/deSugar/should_run/mc07 +/tests/deSugar/should_run/mc08 +/tests/deriving/should_run/T2529 +/tests/deriving/should_run/T4136 +/tests/deriving/should_run/T4528a +/tests/deriving/should_run/T5041 +/tests/deriving/should_run/T5628 +/tests/deriving/should_run/T5712 +/tests/deriving/should_run/T7931 +/tests/deriving/should_run/T8280 +/tests/deriving/should_run/drvrun-foldable1 +/tests/deriving/should_run/drvrun-functor1 +/tests/deriving/should_run/drvrun001 +/tests/deriving/should_run/drvrun002 +/tests/deriving/should_run/drvrun003 +/tests/deriving/should_run/drvrun004 +/tests/deriving/should_run/drvrun005 +/tests/deriving/should_run/drvrun006 +/tests/deriving/should_run/drvrun007 +/tests/deriving/should_run/drvrun008 +/tests/deriving/should_run/drvrun009 +/tests/deriving/should_run/drvrun010 +/tests/deriving/should_run/drvrun011 +/tests/deriving/should_run/drvrun012 +/tests/deriving/should_run/drvrun013 +/tests/deriving/should_run/drvrun014 +/tests/deriving/should_run/drvrun015 +/tests/deriving/should_run/drvrun016 +/tests/deriving/should_run/drvrun017 +/tests/deriving/should_run/drvrun018 +/tests/deriving/should_run/drvrun019 +/tests/deriving/should_run/drvrun020 +/tests/deriving/should_run/drvrun021 +/tests/dph/classes/dph-classes-copy-fast +/tests/dph/classes/dph-classes-fast +/tests/dph/classes/dph-classes-vseg-fast +/tests/dph/diophantine/dph-diophantine-copy-fast +/tests/dph/diophantine/dph-diophantine-copy-opt /tests/dph/diophantine/dph-diophantine-fast /tests/dph/diophantine/dph-diophantine-opt +/tests/dph/dotp/dph-dotp-copy-fast +/tests/dph/dotp/dph-dotp-copy-opt /tests/dph/dotp/dph-dotp-fast +/tests/dph/dotp/dph-dotp-opt +/tests/dph/dotp/dph-dotp-vseg-fast +/tests/dph/dotp/dph-dotp-vseg-opt +/tests/dph/nbody/dph-nbody-copy-fast +/tests/dph/nbody/dph-nbody-copy-opt +/tests/dph/nbody/dph-nbody-vseg-fast +/tests/dph/nbody/dph-nbody-vseg-opt +/tests/dph/primespj/dph-primespj-copy-fast +/tests/dph/primespj/dph-primespj-copy-opt /tests/dph/primespj/dph-primespj-fast +/tests/dph/primespj/dph-primespj-opt +/tests/dph/quickhull/dph-quickhull-copy-fast +/tests/dph/quickhull/dph-quickhull-copy-opt /tests/dph/quickhull/dph-quickhull-fast +/tests/dph/quickhull/dph-quickhull-opt +/tests/dph/quickhull/dph-quickhull-vseg-fast +/tests/dph/quickhull/dph-quickhull-vseg-opt /tests/dph/smvm/dph-smvm +/tests/dph/smvm/dph-smvm-copy +/tests/dph/smvm/dph-smvm-vseg /tests/dph/sumnats/dph-sumnats +/tests/dph/sumnats/dph-sumnats-copy +/tests/dph/sumnats/dph-sumnats-vseg +/tests/dph/words/dph-words-copy-fast +/tests/dph/words/dph-words-copy-opt /tests/dph/words/dph-words-fast +/tests/dph/words/dph-words-opt +/tests/dph/words/dph-words-vseg-fast +/tests/dph/words/dph-words-vseg-opt +/tests/driver/1959/E.hs +/tests/driver/1959/prog +/tests/driver/3674_pre +/tests/driver/437/Test +/tests/driver/437/Test2 +/tests/driver/5313 +/tests/driver/A012.ooo +/tests/driver/A013.xhi +/tests/driver/A061a.s +/tests/driver/A061b.s +/tests/driver/A064.hspp +/tests/driver/A065.hspp +/tests/driver/A066.tmp +/tests/driver/A067.tmp +/tests/driver/A070.s +/tests/driver/A071.tmp +/tests/driver/B022/C.ooo +/tests/driver/B023/C.xhi +/tests/driver/B024a/ +/tests/driver/B062d/ +/tests/driver/B062e/ +/tests/driver/F018a.obj.018 +/tests/driver/F018a_stub.obj.018 +/tests/driver/Hello062a.hs +/tests/driver/Hello062b.hs +/tests/driver/Hello062c.hs +/tests/driver/T1959/E.hs +/tests/driver/T1959/prog +/tests/driver/T3007/A/Setup +/tests/driver/T3007/A/dist/ +/tests/driver/T3007/B/Setup +/tests/driver/T3007/B/dist/ +/tests/driver/T3007/package.conf +/tests/driver/T3389 +/tests/driver/T3674_pre +/tests/driver/T437/Test +/tests/driver/T437/Test2 +/tests/driver/T4437 +/tests/driver/T5147/B.hs +/tests/driver/T5198dump/ +/tests/driver/T5313 +/tests/driver/T5584/A.hi-boot +/tests/driver/T5584_out/ +/tests/driver/T703 +/tests/driver/T706.hs +/tests/driver/T7060dump/ +/tests/driver/T7373/package.conf +/tests/driver/T7373/pkg/Setup +/tests/driver/T7373/pkg/dist/ +/tests/driver/T7835/Test +/tests/driver/T8526/A.inc +/tests/driver/T8602/t8602.sh +/tests/driver/Test.081b +/tests/driver/Test.081b.hs +/tests/driver/Test_081a +/tests/driver/Test_081a.hs +/tests/driver/depend200 +/tests/driver/dynHelloWorld +/tests/driver/dynamicToo/A001.dyn_hi +/tests/driver/dynamicToo/A001.dyn_o +/tests/driver/dynamicToo/A002.dyn_hi +/tests/driver/dynamicToo/A002.dyn_o +/tests/driver/dynamicToo/A003.dyn_hi +/tests/driver/dynamicToo/A003.dyn_o +/tests/driver/dynamicToo/B001.dyn_hi +/tests/driver/dynamicToo/B001.dyn_o +/tests/driver/dynamicToo/B002.dyn_hi +/tests/driver/dynamicToo/B002.dyn_o +/tests/driver/dynamicToo/C001.dyn_hi +/tests/driver/dynamicToo/C001.dyn_o +/tests/driver/dynamicToo/C002.dyn_hi +/tests/driver/dynamicToo/C002.dyn_o +/tests/driver/dynamicToo/d001 +/tests/driver/dynamicToo/dynamicToo004/Setup +/tests/driver/dynamicToo/dynamicToo004/local.package.conf/ +/tests/driver/dynamicToo/dynamicToo004/pkg1/dist/ +/tests/driver/dynamicToo/dynamicToo004/pkg1dyn/dist/ +/tests/driver/dynamicToo/dynamicToo004/pkg2/dist/ +/tests/driver/dynamicToo/dynamicToo004/progstatic +/tests/driver/dynamicToo/s001 +/tests/driver/dynamic_flags_001/C +/tests/driver/hello062a +/tests/driver/hello062b +/tests/driver/hello062c +/tests/driver/hello062d +/tests/driver/hello062e +/tests/driver/objc/objc-hi +/tests/driver/objc/objcpp-hi +/tests/driver/out019/ +/tests/driver/recomp001/B.hs +/tests/driver/recomp001/C +/tests/driver/recomp003/Data/ +/tests/driver/recomp003/err +/tests/driver/recomp004/MainX +/tests/driver/recomp004/MainX.hs +/tests/driver/recomp004/c.c +/tests/driver/recomp005/C.hs +/tests/driver/recomp006/B.hs +/tests/driver/recomp006/err +/tests/driver/recomp006/out +/tests/driver/recomp007/Setup +/tests/driver/recomp007/a1/dist/ +/tests/driver/recomp007/a2/dist/ +/tests/driver/recomp007/b/dist/ +/tests/driver/recomp007/local.package.conf/ +/tests/driver/recomp008/A.hs +/tests/driver/recomp008/prog /tests/driver/recomp009/Main /tests/driver/recomp009/Sub.hs +/tests/driver/recomp010/Main +/tests/driver/recomp010/X.hs +/tests/driver/recomp011/A.hsinc +/tests/driver/recomp011/B.hsinc +/tests/driver/recomp011/Main +/tests/driver/recomp012/Foo.hs +/tests/driver/recomp012/Main +/tests/driver/recomp012/Main.hs +/tests/driver/recomp012/MyBool.hs +/tests/driver/rtsOpts +/tests/driver/rtsopts002 +/tests/driver/spacesInArgs +/tests/driver/stub017/ +/tests/driver/stub028/ +/tests/driver/stub035/ +/tests/driver/stub045/ +/tests/driver/withRtsOpts +/tests/driver/withRtsOpts.out +/tests/dynlibs/T3807-load +/tests/dynlibs/T3807test.so +/tests/dynlibs/T5373A +/tests/dynlibs/T5373B +/tests/dynlibs/T5373C +/tests/dynlibs/T5373D +/tests/ext-core/T7239.hcr +/tests/ffi/should_run/1288 +/tests/ffi/should_run/1679 +/tests/ffi/should_run/2276 +/tests/ffi/should_run/2469 +/tests/ffi/should_run/2594 +/tests/ffi/should_run/2917a +/tests/ffi/should_run/4038 +/tests/ffi/should_run/4221 +/tests/ffi/should_run/5402 +/tests/ffi/should_run/5594 +/tests/ffi/should_run/7170 +/tests/ffi/should_run/Capi_Ctype_001 +/tests/ffi/should_run/Capi_Ctype_001.hs +/tests/ffi/should_run/Capi_Ctype_002 +/tests/ffi/should_run/Capi_Ctype_A_001.hs +/tests/ffi/should_run/Capi_Ctype_A_002.hs +/tests/ffi/should_run/T1288 +/tests/ffi/should_run/T1679 +/tests/ffi/should_run/T2276 +/tests/ffi/should_run/T2469 +/tests/ffi/should_run/T2594 +/tests/ffi/should_run/T2917a +/tests/ffi/should_run/T4012 +/tests/ffi/should_run/T4038 +/tests/ffi/should_run/T4221 +/tests/ffi/should_run/T5402 +/tests/ffi/should_run/T5594 +/tests/ffi/should_run/T7170 +/tests/ffi/should_run/T8083 +/tests/ffi/should_run/capi_value +/tests/ffi/should_run/fed001 +/tests/ffi/should_run/ffi001 +/tests/ffi/should_run/ffi002 +/tests/ffi/should_run/ffi003 +/tests/ffi/should_run/ffi005 +/tests/ffi/should_run/ffi006 +/tests/ffi/should_run/ffi007 +/tests/ffi/should_run/ffi008 +/tests/ffi/should_run/ffi009 +/tests/ffi/should_run/ffi010 +/tests/ffi/should_run/ffi011 +/tests/ffi/should_run/ffi013 +/tests/ffi/should_run/ffi014 +/tests/ffi/should_run/ffi015 +/tests/ffi/should_run/ffi016 +/tests/ffi/should_run/ffi017 +/tests/ffi/should_run/ffi018 +/tests/ffi/should_run/ffi019 +/tests/ffi/should_run/ffi020 +/tests/ffi/should_run/ffi021 +/tests/ffi/should_run/ffi022 +/tests/ffi/should_run/ffi_parsing_001 +/tests/ffi/should_run/fptr01 +/tests/ffi/should_run/fptr02 +/tests/ffi/should_run/fptrfail01 +/tests/gadt/CasePrune +/tests/gadt/Session +/tests/gadt/gadt2 +/tests/gadt/gadt23 +/tests/gadt/gadt4 +/tests/gadt/gadt5 +/tests/gadt/records +/tests/gadt/tc +/tests/gadt/type-rep +/tests/gadt/ubx-records +/tests/gadt/while +/tests/generics/GEq/GEq1 +/tests/generics/GEq/GEq2 +/tests/generics/GFunctor/GFunctor1 +/tests/generics/GMap/GMap1 +/tests/generics/GShow/GShow1 +/tests/generics/GenNewtype +/tests/generics/Uniplate/GUniplate1 +/tests/ghc-api/T4891/T4891 +/tests/ghc-api/T6145 +/tests/ghc-api/T7478/A +/tests/ghc-api/T7478/T7478 +/tests/ghc-api/T8628 +/tests/ghc-api/T8639_api +/tests/ghc-api/apirecomp001/myghc +/tests/ghc-api/dynCompileExpr/dynCompileExpr +/tests/ghc-api/ghcApi +/tests/ghci.debugger/scripts/break022/A.hs +/tests/ghci.debugger/scripts/break023/A.hs +/tests/ghci/linking/dir001/ +/tests/ghci/linking/dir002/ +/tests/ghci/linking/dir004/ +/tests/ghci/linking/dir005/ +/tests/ghci/linking/dir006/ +/tests/ghci/prog001/C.hs +/tests/ghci/prog001/D.hs +/tests/ghci/prog002/A.hs +/tests/ghci/prog003/D.hs +/tests/ghci/prog004/ctest.c +/tests/ghci/prog005/A.hs +/tests/ghci/prog006/Boot.hs +/tests/ghci/prog009/A.hs +/tests/ghci/prog012/Bar.hs +/tests/ghci/scripts/Ghci058.hs +/tests/ghci/scripts/T1914A.hs +/tests/ghci/scripts/T1914B.hs +/tests/ghci/scripts/T6106.hs +/tests/ghci/scripts/T6106_preproc +/tests/ghci/scripts/föøbàr1.hs +/tests/ghci/scripts/föøbàr2.hs +/tests/ghci/scripts/ghci027.hs +/tests/ghci/should_run/3171.err +/tests/hsc2hs/3837.hs +/tests/hsc2hs/T3837.hs +/tests/hsc2hs/hsc2hs001.hs +/tests/hsc2hs/hsc2hs002.hs +/tests/hsc2hs/hsc2hs003 +/tests/hsc2hs/hsc2hs003.hs +/tests/hsc2hs/hsc2hs004 +/tests/hsc2hs/hsc2hs004.hs +/tests/indexed-types/should_fail/T8129.trace +/tests/indexed-types/should_run/GMapAssoc +/tests/indexed-types/should_run/GMapTop +/tests/indexed-types/should_run/T2985 +/tests/indexed-types/should_run/T4235 +/tests/indexed-types/should_run/T5719 +/tests/lib/Concurrent/4876 +/tests/lib/Concurrent/ThreadDelay001 +/tests/lib/Data.ByteString/bytestring002 +/tests/lib/Data.ByteString/bytestring003 +/tests/lib/Data.ByteString/bytestring006 +/tests/lib/IO/2122 +/tests/lib/IO/2122-test +/tests/lib/IO/3307 +/tests/lib/IO/4808 +/tests/lib/IO/4808.test +/tests/lib/IO/4855 +/tests/lib/IO/4895 +/tests/lib/IO/IOError001 +/tests/lib/IO/IOError002 +/tests/lib/IO/T4113 +/tests/lib/IO/T4144 +/tests/lib/IO/chinese-file-* +/tests/lib/IO/chinese-name +/tests/lib/IO/concio002 +/tests/lib/IO/countReaders001 +/tests/lib/IO/countReaders001.txt +/tests/lib/IO/decodingerror001 +/tests/lib/IO/decodingerror002 +/tests/lib/IO/encoding001 +/tests/lib/IO/encoding001.utf16 +/tests/lib/IO/encoding001.utf16.utf16be +/tests/lib/IO/encoding001.utf16.utf16le +/tests/lib/IO/encoding001.utf16.utf32 +/tests/lib/IO/encoding001.utf16.utf32be +/tests/lib/IO/encoding001.utf16.utf32le +/tests/lib/IO/encoding001.utf16.utf8 +/tests/lib/IO/encoding001.utf16.utf8_bom +/tests/lib/IO/encoding001.utf16be +/tests/lib/IO/encoding001.utf16be.utf16 +/tests/lib/IO/encoding001.utf16be.utf16le +/tests/lib/IO/encoding001.utf16be.utf32 +/tests/lib/IO/encoding001.utf16be.utf32be +/tests/lib/IO/encoding001.utf16be.utf32le +/tests/lib/IO/encoding001.utf16be.utf8 +/tests/lib/IO/encoding001.utf16be.utf8_bom +/tests/lib/IO/encoding001.utf16le +/tests/lib/IO/encoding001.utf16le.utf16 +/tests/lib/IO/encoding001.utf16le.utf16be +/tests/lib/IO/encoding001.utf16le.utf32 +/tests/lib/IO/encoding001.utf16le.utf32be +/tests/lib/IO/encoding001.utf16le.utf32le +/tests/lib/IO/encoding001.utf16le.utf8 +/tests/lib/IO/encoding001.utf16le.utf8_bom +/tests/lib/IO/encoding001.utf32 +/tests/lib/IO/encoding001.utf32.utf16 +/tests/lib/IO/encoding001.utf32.utf16be +/tests/lib/IO/encoding001.utf32.utf16le +/tests/lib/IO/encoding001.utf32.utf32be +/tests/lib/IO/encoding001.utf32.utf32le +/tests/lib/IO/encoding001.utf32.utf8 +/tests/lib/IO/encoding001.utf32.utf8_bom +/tests/lib/IO/encoding001.utf32be +/tests/lib/IO/encoding001.utf32be.utf16 +/tests/lib/IO/encoding001.utf32be.utf16be +/tests/lib/IO/encoding001.utf32be.utf16le +/tests/lib/IO/encoding001.utf32be.utf32 +/tests/lib/IO/encoding001.utf32be.utf32le +/tests/lib/IO/encoding001.utf32be.utf8 +/tests/lib/IO/encoding001.utf32be.utf8_bom +/tests/lib/IO/encoding001.utf32le +/tests/lib/IO/encoding001.utf32le.utf16 +/tests/lib/IO/encoding001.utf32le.utf16be +/tests/lib/IO/encoding001.utf32le.utf16le +/tests/lib/IO/encoding001.utf32le.utf32 +/tests/lib/IO/encoding001.utf32le.utf32be +/tests/lib/IO/encoding001.utf32le.utf8 +/tests/lib/IO/encoding001.utf32le.utf8_bom +/tests/lib/IO/encoding001.utf8 +/tests/lib/IO/encoding001.utf8.utf16 +/tests/lib/IO/encoding001.utf8.utf16be +/tests/lib/IO/encoding001.utf8.utf16le +/tests/lib/IO/encoding001.utf8.utf32 +/tests/lib/IO/encoding001.utf8.utf32be +/tests/lib/IO/encoding001.utf8.utf32le +/tests/lib/IO/encoding001.utf8.utf8_bom +/tests/lib/IO/encoding001.utf8_bom +/tests/lib/IO/encoding001.utf8_bom.utf16 +/tests/lib/IO/encoding001.utf8_bom.utf16be +/tests/lib/IO/encoding001.utf8_bom.utf16le +/tests/lib/IO/encoding001.utf8_bom.utf32 +/tests/lib/IO/encoding001.utf8_bom.utf32be +/tests/lib/IO/encoding001.utf8_bom.utf32le +/tests/lib/IO/encoding001.utf8_bom.utf8 +/tests/lib/IO/encoding002 +/tests/lib/IO/encodingerror001 +/tests/lib/IO/environment001 +/tests/lib/IO/finalization001 +/tests/lib/IO/hClose001 +/tests/lib/IO/hClose001.tmp +/tests/lib/IO/hClose002 +/tests/lib/IO/hClose002.tmp +/tests/lib/IO/hClose003 +/tests/lib/IO/hDuplicateTo001 +/tests/lib/IO/hFileSize001 +/tests/lib/IO/hFileSize002 +/tests/lib/IO/hFileSize002.out +/tests/lib/IO/hFlush001 +/tests/lib/IO/hFlush001.out +/tests/lib/IO/hGetBuf001 +/tests/lib/IO/hGetBuffering001 +/tests/lib/IO/hGetChar001 +/tests/lib/IO/hGetLine001 +/tests/lib/IO/hGetLine002 +/tests/lib/IO/hGetLine003 +/tests/lib/IO/hGetPosn001 +/tests/lib/IO/hGetPosn001.out +/tests/lib/IO/hIsEOF001 +/tests/lib/IO/hIsEOF002 +/tests/lib/IO/hIsEOF002.out +/tests/lib/IO/hReady001 +/tests/lib/IO/hReady002 +/tests/lib/IO/hSeek001 +/tests/lib/IO/hSeek002 +/tests/lib/IO/hSeek003 +/tests/lib/IO/hSeek004 +/tests/lib/IO/hSeek004.out +/tests/lib/IO/hSetBuffering002 +/tests/lib/IO/hSetBuffering003 +/tests/lib/IO/hSetBuffering004 +/tests/lib/IO/hSetEncoding001 +/tests/lib/IO/ioeGetErrorString001 +/tests/lib/IO/ioeGetFileName001 +/tests/lib/IO/ioeGetHandle001 +/tests/lib/IO/isEOF001 +/tests/lib/IO/misc001 +/tests/lib/IO/misc001.out +/tests/lib/IO/newline001 +/tests/lib/IO/newline001.out +/tests/lib/IO/openFile001 +/tests/lib/IO/openFile002 +/tests/lib/IO/openFile003 +/tests/lib/IO/openFile004 +/tests/lib/IO/openFile004.out +/tests/lib/IO/openFile005 +/tests/lib/IO/openFile005.out1 +/tests/lib/IO/openFile005.out2 +/tests/lib/IO/openFile006 +/tests/lib/IO/openFile006.out +/tests/lib/IO/openFile007 +/tests/lib/IO/openFile007.out +/tests/lib/IO/openFile008 +/tests/lib/IO/openTempFile001 +/tests/lib/IO/putStr001 +/tests/lib/IO/readFile001 +/tests/lib/IO/readFile001.out +/tests/lib/IO/readwrite001 +/tests/lib/IO/readwrite001.inout +/tests/lib/IO/readwrite002 +/tests/lib/IO/readwrite002.inout +/tests/lib/IO/readwrite003 +/tests/lib/IO/readwrite003.txt +/tests/lib/IO/tmp +/tests/lib/IOExts/echo001 +/tests/lib/IOExts/hGetBuf002 +/tests/lib/IOExts/hGetBuf003 +/tests/lib/IOExts/hPutBuf001 +/tests/lib/IOExts/hPutBuf002 +/tests/lib/IOExts/hPutBuf002.out +/tests/lib/IOExts/hTell001 +/tests/lib/IOExts/hTell002 +/tests/lib/IOExts/performGC001 +/tests/lib/IOExts/trace001 +/tests/lib/IORef/ +/tests/lib/Numeric/ +/tests/lib/OldException/OldException001 +/tests/lib/PrettyPrint/T3911 +/tests/lib/PrettyPrint/pp1 +/tests/lib/Text.Printf/1548 +/tests/lib/Time/T5430 +/tests/lib/Time/time002 +/tests/lib/Time/time003 +/tests/lib/Time/time004 +/tests/lib/exceptions/exceptions001 +/tests/lib/integer/IntegerConversionRules.simpl +/tests/lib/integer/fromToInteger.simpl +/tests/lib/integer/gcdInteger +/tests/lib/integer/integerBits +/tests/lib/integer/integerConstantFolding +/tests/lib/integer/integerConstantFolding.simpl +/tests/lib/integer/integerConversions +/tests/lib/integer/integerGmpInternals +/tests/lib/libposix/po003.out +/tests/lib/libposix/posix002 +/tests/lib/libposix/posix003 +/tests/lib/libposix/posix004 +/tests/lib/libposix/posix006 +/tests/lib/libposix/posix009 +/tests/lib/libposix/posix010 +/tests/lib/libposix/posix014 +/tests/lib/should_run/4006 +/tests/lib/should_run/addr001 +/tests/lib/should_run/array001 +/tests/lib/should_run/array001.data +/tests/lib/should_run/char001 +/tests/lib/should_run/char002 +/tests/lib/should_run/cstring001 +/tests/lib/should_run/dynamic001 +/tests/lib/should_run/dynamic002 +/tests/lib/should_run/dynamic003 +/tests/lib/should_run/dynamic004 +/tests/lib/should_run/dynamic005 +/tests/lib/should_run/enum01 +/tests/lib/should_run/enum02 +/tests/lib/should_run/enum03 +/tests/lib/should_run/enum04 +/tests/lib/should_run/exceptionsrun001 +/tests/lib/should_run/exceptionsrun002 +/tests/lib/should_run/length001 +/tests/lib/should_run/list001 +/tests/lib/should_run/list002 +/tests/lib/should_run/list003 +/tests/lib/should_run/memo001 +/tests/lib/should_run/memo002 +/tests/lib/should_run/rand001 +/tests/lib/should_run/ratio001 +/tests/lib/should_run/reads001 +/tests/lib/should_run/show001 +/tests/lib/should_run/stableptr001 +/tests/lib/should_run/stableptr003 +/tests/lib/should_run/stableptr004 +/tests/lib/should_run/stableptr005 +/tests/lib/should_run/text001 +/tests/lib/should_run/tup001 +/tests/lib/should_run/weak001 +/tests/mdo/should_compile/mdo001 +/tests/mdo/should_compile/mdo002 +/tests/mdo/should_compile/mdo003 +/tests/mdo/should_compile/mdo004 +/tests/mdo/should_compile/mdo005 +/tests/mdo/should_fail/mdofail006 +/tests/mdo/should_run/mdorun001 +/tests/mdo/should_run/mdorun002 +/tests/mdo/should_run/mdorun003 +/tests/mdo/should_run/mdorun004 +/tests/mdo/should_run/mdorun005 +/tests/module/Mod145_A.mod146_hi +/tests/module/Mod145_A.mod146_o +/tests/module/Mod157_A.mod158_hi +/tests/module/Mod157_A.mod158_o +/tests/module/Mod157_B.mod158_hi +/tests/module/Mod157_B.mod158_o +/tests/module/Mod157_C.mod158_hi +/tests/module/Mod157_C.mod158_o +/tests/module/Mod157_D.mod158_hi +/tests/module/Mod157_D.mod158_o +/tests/module/Mod159_A.mod160_hi +/tests/module/Mod159_A.mod160_o +/tests/module/Mod159_B.mod160_hi +/tests/module/Mod159_B.mod160_o +/tests/module/Mod159_C.mod160_hi +/tests/module/Mod159_C.mod160_o +/tests/module/Mod159_D.mod160_hi +/tests/module/Mod159_D.mod160_o +/tests/module/Mod164_A.mod165_hi +/tests/module/Mod164_A.mod165_o +/tests/module/Mod164_A.mod166_hi +/tests/module/Mod164_A.mod166_o +/tests/module/Mod164_A.mod167_hi +/tests/module/Mod164_A.mod167_o +/tests/module/Mod164_B.mod165_hi +/tests/module/Mod164_B.mod165_o +/tests/module/Mod164_B.mod166_hi +/tests/module/Mod164_B.mod166_o +/tests/module/Mod164_B.mod167_hi +/tests/module/Mod164_B.mod167_o +/tests/module/mod166.mod166_hi +/tests/module/mod166.mod166_o +/tests/module/mod167.mod167_hi +/tests/module/mod167.mod167_o +/tests/module/mod175/test +/tests/module/mod175/test2 +/tests/module/mod179 +/tests/numeric/should_run/3676 +/tests/numeric/should_run/4381 +/tests/numeric/should_run/4383 +/tests/numeric/should_run/NumDecimals +/tests/numeric/should_run/T3676 +/tests/numeric/should_run/T4381 +/tests/numeric/should_run/T4383 +/tests/numeric/should_run/T5863 +/tests/numeric/should_run/T7014 +/tests/numeric/should_run/T7014.simpl +/tests/numeric/should_run/T7233 +/tests/numeric/should_run/T7689 +/tests/numeric/should_run/T8726 +/tests/numeric/should_run/add2 +/tests/numeric/should_run/arith001 +/tests/numeric/should_run/arith002 +/tests/numeric/should_run/arith003 +/tests/numeric/should_run/arith004 +/tests/numeric/should_run/arith005 +/tests/numeric/should_run/arith006 +/tests/numeric/should_run/arith007 +/tests/numeric/should_run/arith008 +/tests/numeric/should_run/arith009 +/tests/numeric/should_run/arith010 +/tests/numeric/should_run/arith011 +/tests/numeric/should_run/arith012 +/tests/numeric/should_run/arith013 +/tests/numeric/should_run/arith014 +/tests/numeric/should_run/arith015 +/tests/numeric/should_run/arith016 +/tests/numeric/should_run/arith017 +/tests/numeric/should_run/arith018 +/tests/numeric/should_run/arith019 +/tests/numeric/should_run/expfloat +/tests/numeric/should_run/mul2 +/tests/numeric/should_run/numrun009 +/tests/numeric/should_run/numrun010 +/tests/numeric/should_run/numrun011 +/tests/numeric/should_run/numrun012 +/tests/numeric/should_run/numrun013 +/tests/numeric/should_run/numrun014 +/tests/numeric/should_run/quotRem2 +/tests/optasm-log +/tests/optllvm-32-log +/tests/optllvm-log +/tests/overloadedlists/should_run/overloadedlistsrun01 +/tests/overloadedlists/should_run/overloadedlistsrun02 +/tests/overloadedlists/should_run/overloadedlistsrun03 +/tests/overloadedlists/should_run/overloadedlistsrun04 +/tests/overloadedlists/should_run/overloadedlistsrun05 +/tests/parser/should_compile/T5243 +/tests/parser/should_compile/T7476/Main.imports +/tests/parser/should_compile/T7476/T7476 +/tests/parser/should_run/BinaryLiterals0 +/tests/parser/should_run/BinaryLiterals1 +/tests/parser/should_run/BinaryLiterals2 +/tests/parser/should_run/ParserMultiWayIf +/tests/parser/should_run/T1344 +/tests/parser/should_run/operator +/tests/parser/should_run/operator2 +/tests/parser/should_run/readRun001 +/tests/parser/should_run/readRun002 +/tests/parser/should_run/readRun003 +/tests/parser/should_run/readRun004 +/tests/parser/unicode/1744 +/tests/parser/unicode/T1744 +/tests/parser/unicode/utf8_024 +/tests/patsyn/should_run/eval +/tests/patsyn/should_run/ex-prov +/tests/patsyn/should_run/ex-prov-run +/tests/patsyn/should_run/match +/tests/perf/compiler/T1969.comp.stats +/tests/perf/compiler/T3064.comp.stats +/tests/perf/compiler/T3294.comp.stats +/tests/perf/compiler/T4801.comp.stats +/tests/perf/compiler/T5030.comp.stats +/tests/perf/compiler/T5321FD.comp.stats +/tests/perf/compiler/T5321Fun.comp.stats +/tests/perf/compiler/T5631.comp.stats +/tests/perf/compiler/T5642.comp.stats +/tests/perf/compiler/T5837.comp.stats +/tests/perf/compiler/T6048.comp.stats +/tests/perf/compiler/T783.comp.stats +/tests/perf/compiler/parsing001.comp.stats +/tests/perf/should_run/3586 +/tests/perf/should_run/3586.stats +/tests/perf/should_run/Conversions +/tests/perf/should_run/Conversions.stats +/tests/perf/should_run/InlineArrayAlloc +/tests/perf/should_run/InlineByteArrayAlloc +/tests/perf/should_run/InlineCloneArrayAlloc +/tests/perf/should_run/MethSharing +/tests/perf/should_run/MethSharing.stats +/tests/perf/should_run/T149_A +/tests/perf/should_run/T149_B +/tests/perf/should_run/T2902_A +/tests/perf/should_run/T2902_B +/tests/perf/should_run/T3245 +/tests/perf/should_run/T3586 +/tests/perf/should_run/T3736 +/tests/perf/should_run/T3736.speed.f32 +/tests/perf/should_run/T3738 +/tests/perf/should_run/T3738.stats +/tests/perf/should_run/T4267 +/tests/perf/should_run/T4321 +/tests/perf/should_run/T4474a +/tests/perf/should_run/T4474a.stats +/tests/perf/should_run/T4474b +/tests/perf/should_run/T4474b.stats +/tests/perf/should_run/T4474c +/tests/perf/should_run/T4474c.stats +/tests/perf/should_run/T4830 +/tests/perf/should_run/T4830.stats +/tests/perf/should_run/T4978 +/tests/perf/should_run/T4978.stats +/tests/perf/should_run/T5113 +/tests/perf/should_run/T5113.stats +/tests/perf/should_run/T5205 +/tests/perf/should_run/T5205.stats +/tests/perf/should_run/T5237 +/tests/perf/should_run/T5237.stats +/tests/perf/should_run/T5536 +/tests/perf/should_run/T5536.data +/tests/perf/should_run/T5536.stats +/tests/perf/should_run/T5549 +/tests/perf/should_run/T5549.stats +/tests/perf/should_run/T5949 +/tests/perf/should_run/T7257 +/tests/perf/should_run/T7257.stats +/tests/perf/should_run/T7436 +/tests/perf/should_run/T7436.stats +/tests/perf/should_run/T7507 +/tests/perf/should_run/T7619 +/tests/perf/should_run/T7797 +/tests/perf/should_run/T7850 +/tests/perf/should_run/T7954 +/tests/perf/should_run/T876 +/tests/perf/should_run/T9203 +/tests/perf/should_run/lazy-bs-alloc +/tests/perf/should_run/lazy-bs-alloc.stats +/tests/perf/should_run/speed.f32 +/tests/perf/space_leaks/T2762 +/tests/perf/space_leaks/T2762.stats +/tests/perf/space_leaks/T4018 +/tests/perf/space_leaks/T4334 +/tests/perf/space_leaks/T4334.stats +/tests/perf/space_leaks/space_leak_001 +/tests/perf/space_leaks/space_leak_001.stats +/tests/plugins/plugins01 +/tests/plugins/plugins05 +/tests/plugins/plugins06 /tests/plugins/simple-plugin/dist/ /tests/plugins/simple-plugin/install/ /tests/plugins/simple-plugin/local.package.conf +/tests/plugins/simple-plugin/pkg.plugins01/ +/tests/plugins/simple-plugin/pkg.plugins02/ +/tests/plugins/simple-plugin/pkg.plugins03/ /tests/plugins/simple-plugin/setup +/tests/polykinds/Freeman +/tests/polykinds/MonoidsFD +/tests/polykinds/MonoidsTF +/tests/polykinds/PolyKinds09 +/tests/polykinds/PolyKinds10 +/tests/primops/should_run/T6135 +/tests/primops/should_run/T7689 +/tests/profiling/should_compile/prof001 +/tests/profiling/should_compile/prof002 +/tests/profiling/should_run/2592 +/tests/profiling/should_run/2592.aux +/tests/profiling/should_run/2592.hp +/tests/profiling/should_run/2592.ps +/tests/profiling/should_run/5314 +/tests/profiling/should_run/5314.hp +/tests/profiling/should_run/5314.ps +/tests/profiling/should_run/T2552 +/tests/profiling/should_run/T2592 +/tests/profiling/should_run/T3001 +/tests/profiling/should_run/T3001-2 +/tests/profiling/should_run/T3001-2.hp +/tests/profiling/should_run/T3001-2.ps +/tests/profiling/should_run/T3001.hp +/tests/profiling/should_run/T3001.ps +/tests/profiling/should_run/T5314 +/tests/profiling/should_run/T5363 +/tests/profiling/should_run/T5559 +/tests/profiling/should_run/T680 +/tests/profiling/should_run/T949 +/tests/profiling/should_run/T949.hp +/tests/profiling/should_run/T949.ps +/tests/profiling/should_run/callstack001 +/tests/profiling/should_run/callstack002 +/tests/profiling/should_run/heapprof001 +/tests/profiling/should_run/heapprof001.hp +/tests/profiling/should_run/heapprof001.ps +/tests/profiling/should_run/ioprof +/tests/profiling/should_run/prof-doc-fib +/tests/profiling/should_run/prof-doc-last +/tests/profiling/should_run/profinline001 +/tests/profiling/should_run/scc001 +/tests/profiling/should_run/scc002 +/tests/profiling/should_run/scc003 +/tests/profiling/should_run/scc004 +/tests/profiling/should_run/test.bin +/tests/programs/10queens/10queens +/tests/programs/Queens/queens +/tests/programs/andre_monad/andre_monad +/tests/programs/andy_cherry/andy_cherry +/tests/programs/barton-mangler-bug/barton-mangler-bug +/tests/programs/cholewo-eval/cholewo-eval +/tests/programs/cvh_unboxing/cvh_unboxing +/tests/programs/fast2haskell/fast2haskell +/tests/programs/fun_insts/fun_insts /tests/programs/hs-boot/Main +/tests/programs/jl_defaults/jl_defaults +/tests/programs/joao-circular/joao-circular +/tests/programs/jq_readsPrec/jq_readsPrec +/tests/programs/jtod_circint/jtod_circint +/tests/programs/jules_xref/jules_xref +/tests/programs/jules_xref2/jules_xref2 +/tests/programs/launchbury/launchbury +/tests/programs/lennart_range/lennart_range +/tests/programs/lex/lex +/tests/programs/life_space_leak/life_space_leak +/tests/programs/north_array/north_array +/tests/programs/record_upd/record_upd +/tests/programs/rittri/rittri +/tests/programs/sanders_array/sanders_array +/tests/programs/seward-space-leak/seward-space-leak +/tests/programs/strict_anns/strict_anns +/tests/programs/thurston-modular-arith/thurston-modular-arith +/tests/quasiquotation/T4491/T4491 +/tests/quasiquotation/T7918 +/tests/rebindable/T5038 +/tests/rebindable/rebindable10 +/tests/rebindable/rebindable2 +/tests/rebindable/rebindable3 +/tests/rebindable/rebindable4 +/tests/rebindable/rebindable5 +/tests/rebindable/rebindable7 +/tests/rename/prog006/local.package.conf +/tests/rename/prog006/pkg.conf +/tests/rename/prog006/pwd +/tests/rename/should_compile/T1792_imports.imports +/tests/rename/should_compile/T4239.imports +/tests/rename/should_compile/T4240.imports +/tests/rename/should_compile/T5592 +/tests/rts/2047 +/tests/rts/2783 +/tests/rts/3236 +/tests/rts/3424 +/tests/rts/4059 +/tests/rts/4850 +/tests/rts/5250 +/tests/rts/5644/5644 +/tests/rts/5993 +/tests/rts/7087 +/tests/rts/T2047 +/tests/rts/T2615 +/tests/rts/T2783 +/tests/rts/T3236 +/tests/rts/T3424 +/tests/rts/T4059 +/tests/rts/T4850 +/tests/rts/T5250 +/tests/rts/T5423 +/tests/rts/T5435*.so +/tests/rts/T5435*_o +/tests/rts/T5435_dyn_asm +/tests/rts/T5435_dyn_gcc +/tests/rts/T5435_v_asm +/tests/rts/T5435_v_gcc +/tests/rts/T5644/T5644 +/tests/rts/T5993 +/tests/rts/T6006 +/tests/rts/T7037 +/tests/rts/T7037_main +/tests/rts/T7040 +/tests/rts/T7087 +/tests/rts/T7160 +/tests/rts/T7227 +/tests/rts/T7227.stat +/tests/rts/T7636 +/tests/rts/T7815 +/tests/rts/T7919 +/tests/rts/T8035 +/tests/rts/T8124 +/tests/rts/T8209 +/tests/rts/T8242 +/tests/rts/T9045 +/tests/rts/T9078 +/tests/rts/atomicinc +/tests/rts/bug1010 +/tests/rts/derefnull +/tests/rts/divbyzero +/tests/rts/exec_signals +/tests/rts/exec_signals_child +/tests/rts/exec_signals_prepare +/tests/rts/ffishutdown +/tests/rts/libfoo_T2615.so +/tests/rts/linker_unload +/tests/rts/outofmem +/tests/rts/outofmem2 +/tests/rts/overflow1 +/tests/rts/overflow2 +/tests/rts/overflow3 +/tests/rts/prep.out +/tests/rts/return_mem_to_os +/tests/rts/rtsflags001 +/tests/rts/rtsflags002 +/tests/rts/stablename001 +/tests/rts/stack001 +/tests/rts/stack002 +/tests/rts/stack003 +/tests/rts/testblockalloc +/tests/rts/testwsdeque +/tests/rts/traceEvent +/tests/safeHaskell/check/Check04 /tests/safeHaskell/check/pkg01/dist/ /tests/safeHaskell/check/pkg01/install/ /tests/safeHaskell/check/pkg01/local.db/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly01/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly02/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly03/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly04/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly05/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly06/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly07/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly08/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly09/ +/tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly10/ +/tests/safeHaskell/check/pkg01/pdb.safePkg01/ /tests/safeHaskell/check/pkg01/setup -mk/ghc-config -tests/annotations/should_run/Config.hs -tests/annotations/should_run/annrun01 -tests/array/should_run/arr001 -tests/array/should_run/arr002 -tests/array/should_run/arr003 -tests/array/should_run/arr004 -tests/array/should_run/arr005 -tests/array/should_run/arr006 -tests/array/should_run/arr007 -tests/array/should_run/arr008 -tests/array/should_run/arr009 -tests/array/should_run/arr010 -tests/array/should_run/arr011 -tests/array/should_run/arr012 -tests/array/should_run/arr013 -tests/array/should_run/arr014 -tests/array/should_run/arr015 -tests/array/should_run/arr016 -tests/array/should_run/arr017 -tests/array/should_run/arr018 -tests/array/should_run/arr019 -tests/array/should_run/arr020 -tests/arrows/should_run/T3822 -tests/arrows/should_run/arrowrun001 -tests/arrows/should_run/arrowrun002 -tests/arrows/should_run/arrowrun003 -tests/arrows/should_run/arrowrun004 -tests/boxy/T2193 -tests/cabal/1750.hs -tests/cabal/1750.out -tests/cabal/T1750.hs -tests/cabal/T1750.out -tests/cabal/cabal01/dist/ -tests/cabal/cabal01/install/ -tests/cabal/cabal01/local.db/ -tests/cabal/cabal01/setup -tests/cabal/cabal03/Setup -tests/cabal/cabal03/p/dist/ -tests/cabal/cabal03/q/dist/ -tests/cabal/cabal03/tmp.d/ -tests/cabal/cabal04/Setup -tests/cabal/cabal04/dist/ -tests/cabal/cabal04/err -tests/cabal/local01.package.conf/ -tests/cabal/local03.package.conf/ -tests/cabal/local04.package.conf/ -tests/cabal/local05a.package.conf/ -tests/cabal/local05b.package.conf/ -tests/cabal/local06.package.conf/ -tests/cabal/local1750.package.conf/ -tests/cabal/localT1750.package.conf/ -tests/cabal/localshadow1.package.conf/ -tests/cabal/localshadow2.package.conf/ -tests/cabal/package.conf.ghcpkg02/ -tests/cabal/shadow.hs -tests/cabal/shadow1.out -tests/cabal/shadow2.out -tests/cabal/shadow3.out -tests/codeGen/should_compile/2578 -tests/codeGen/should_compile/T2578 -tests/codeGen/should_gen_asm/memcpy-unroll-conprop.s -tests/codeGen/should_gen_asm/memcpy-unroll.s -tests/codeGen/should_gen_asm/memcpy.s -tests/codeGen/should_gen_asm/memset-unroll.s -tests/codeGen/should_run/1852 -tests/codeGen/should_run/1861 -tests/codeGen/should_run/2080 -tests/codeGen/should_run/2838 -tests/codeGen/should_run/3207 -tests/codeGen/should_run/3561 -tests/codeGen/should_run/3677 -tests/codeGen/should_run/4441 -tests/codeGen/should_run/5129 -tests/codeGen/should_run/5149 -tests/codeGen/should_run/5626 -tests/codeGen/should_run/5747 -tests/codeGen/should_run/5785 -tests/codeGen/should_run/6146 -tests/codeGen/should_run/T1852 -tests/codeGen/should_run/T1861 -tests/codeGen/should_run/T2080 -tests/codeGen/should_run/T2838 -tests/codeGen/should_run/T3207 -tests/codeGen/should_run/T3561 -tests/codeGen/should_run/T3677 -tests/codeGen/should_run/T4441 -tests/codeGen/should_run/T5129 -tests/codeGen/should_run/T5149 -tests/codeGen/should_run/T5626 -tests/codeGen/should_run/T5747 -tests/codeGen/should_run/T5785 -tests/codeGen/should_run/T5900 -tests/codeGen/should_run/T6146 -tests/codeGen/should_run/T7163 -tests/codeGen/should_run/T7319 -tests/codeGen/should_run/T7361 -tests/codeGen/should_run/T7600 -tests/codeGen/should_run/Word2Float64 -tests/codeGen/should_run/cgrun001 -tests/codeGen/should_run/cgrun002 -tests/codeGen/should_run/cgrun003 -tests/codeGen/should_run/cgrun004 -tests/codeGen/should_run/cgrun005 -tests/codeGen/should_run/cgrun006 -tests/codeGen/should_run/cgrun007 -tests/codeGen/should_run/cgrun008 -tests/codeGen/should_run/cgrun009 -tests/codeGen/should_run/cgrun010 -tests/codeGen/should_run/cgrun011 -tests/codeGen/should_run/cgrun012 -tests/codeGen/should_run/cgrun013 -tests/codeGen/should_run/cgrun014 -tests/codeGen/should_run/cgrun015 -tests/codeGen/should_run/cgrun016 -tests/codeGen/should_run/cgrun017 -tests/codeGen/should_run/cgrun018 -tests/codeGen/should_run/cgrun019 -tests/codeGen/should_run/cgrun020 -tests/codeGen/should_run/cgrun021 -tests/codeGen/should_run/cgrun022 -tests/codeGen/should_run/cgrun024 -tests/codeGen/should_run/cgrun026 -tests/codeGen/should_run/cgrun027 -tests/codeGen/should_run/cgrun028 -tests/codeGen/should_run/cgrun031 -tests/codeGen/should_run/cgrun032 -tests/codeGen/should_run/cgrun033 -tests/codeGen/should_run/cgrun034 -tests/codeGen/should_run/cgrun035 -tests/codeGen/should_run/cgrun036 -tests/codeGen/should_run/cgrun037 -tests/codeGen/should_run/cgrun038 -tests/codeGen/should_run/cgrun039 -tests/codeGen/should_run/cgrun040 -tests/codeGen/should_run/cgrun043 -tests/codeGen/should_run/cgrun044 -tests/codeGen/should_run/cgrun045 -tests/codeGen/should_run/cgrun046 -tests/codeGen/should_run/cgrun047 -tests/codeGen/should_run/cgrun048 -tests/codeGen/should_run/cgrun049 -tests/codeGen/should_run/cgrun050 -tests/codeGen/should_run/cgrun051 -tests/codeGen/should_run/cgrun052 -tests/codeGen/should_run/cgrun053 -tests/codeGen/should_run/cgrun054 -tests/codeGen/should_run/cgrun055 -tests/codeGen/should_run/cgrun056 -tests/codeGen/should_run/cgrun057 -tests/codeGen/should_run/cgrun058 -tests/codeGen/should_run/cgrun059 -tests/codeGen/should_run/cgrun060 -tests/codeGen/should_run/cgrun061 -tests/codeGen/should_run/cgrun062 -tests/codeGen/should_run/cgrun063 -tests/codeGen/should_run/cgrun064 -tests/codeGen/should_run/cgrun065 -tests/codeGen/should_run/cgrun066 -tests/codeGen/should_run/cgrun067 -tests/codeGen/should_run/cgrun068 -tests/codeGen/should_run/cgrun069 -tests/codeGen/should_run/cgrun070 -tests/codeGen/should_run/cgrun071 -tests/codeGen/should_run/setByteArray -tests/concurrent/prog001/concprog001 -tests/concurrent/prog002/concprog002 -tests/concurrent/prog002/concprog002.aux -tests/concurrent/prog002/concprog002.hp -tests/concurrent/prog002/concprog002.ps -tests/concurrent/prog003/concprog003 -tests/concurrent/should_run/1980 -tests/concurrent/should_run/2910 -tests/concurrent/should_run/2910a -tests/concurrent/should_run/3279 -tests/concurrent/should_run/3429 -tests/concurrent/should_run/367 -tests/concurrent/should_run/367_letnoescape -tests/concurrent/should_run/4030 -tests/concurrent/should_run/4811 -tests/concurrent/should_run/4813 -tests/concurrent/should_run/5238 -tests/concurrent/should_run/5421 -tests/concurrent/should_run/5558 -tests/concurrent/should_run/5611 -tests/concurrent/should_run/5866 -tests/concurrent/should_run/T1980 -tests/concurrent/should_run/T2910 -tests/concurrent/should_run/T2910a -tests/concurrent/should_run/T3279 -tests/concurrent/should_run/T3429 -tests/concurrent/should_run/T367 -tests/concurrent/should_run/T367_letnoescape -tests/concurrent/should_run/T4030 -tests/concurrent/should_run/T4811 -tests/concurrent/should_run/T4813 -tests/concurrent/should_run/T5238 -tests/concurrent/should_run/T5421 -tests/concurrent/should_run/T5558 -tests/concurrent/should_run/T5611 -tests/concurrent/should_run/T5866 -tests/concurrent/should_run/allowinterrupt001 -tests/concurrent/should_run/async001 -tests/concurrent/should_run/conc001 -tests/concurrent/should_run/conc002 -tests/concurrent/should_run/conc003 -tests/concurrent/should_run/conc004 -tests/concurrent/should_run/conc006 -tests/concurrent/should_run/conc007 -tests/concurrent/should_run/conc008 -tests/concurrent/should_run/conc009 -tests/concurrent/should_run/conc010 -tests/concurrent/should_run/conc012 -tests/concurrent/should_run/conc013 -tests/concurrent/should_run/conc014 -tests/concurrent/should_run/conc015 -tests/concurrent/should_run/conc015a -tests/concurrent/should_run/conc016 -tests/concurrent/should_run/conc017 -tests/concurrent/should_run/conc017a -tests/concurrent/should_run/conc018 -tests/concurrent/should_run/conc019 -tests/concurrent/should_run/conc020 -tests/concurrent/should_run/conc021 -tests/concurrent/should_run/conc022 -tests/concurrent/should_run/conc023 -tests/concurrent/should_run/conc024 -tests/concurrent/should_run/conc025 -tests/concurrent/should_run/conc026 -tests/concurrent/should_run/conc027 -tests/concurrent/should_run/conc028 -tests/concurrent/should_run/conc029 -tests/concurrent/should_run/conc030 -tests/concurrent/should_run/conc031 -tests/concurrent/should_run/conc032 -tests/concurrent/should_run/conc033 -tests/concurrent/should_run/conc034 -tests/concurrent/should_run/conc035 -tests/concurrent/should_run/conc036 -tests/concurrent/should_run/conc037 -tests/concurrent/should_run/conc038 -tests/concurrent/should_run/conc039 -tests/concurrent/should_run/conc040 -tests/concurrent/should_run/conc041 -tests/concurrent/should_run/conc042 -tests/concurrent/should_run/conc043 -tests/concurrent/should_run/conc044 -tests/concurrent/should_run/conc045 -tests/concurrent/should_run/conc051 -tests/concurrent/should_run/conc058 -tests/concurrent/should_run/conc059 -tests/concurrent/should_run/conc064 -tests/concurrent/should_run/conc065 -tests/concurrent/should_run/conc066 -tests/concurrent/should_run/conc067 -tests/concurrent/should_run/conc068 -tests/concurrent/should_run/conc069 -tests/concurrent/should_run/conc069a -tests/concurrent/should_run/conc070 -tests/concurrent/should_run/conc071 -tests/concurrent/should_run/conc072 -tests/concurrent/should_run/conc073 -tests/concurrent/should_run/foreignInterruptible -tests/concurrent/should_run/mask001 -tests/concurrent/should_run/mask002 -tests/concurrent/should_run/numsparks001 -tests/concurrent/should_run/throwto001 -tests/concurrent/should_run/throwto002 -tests/concurrent/should_run/throwto003 -tests/deSugar/should_run/DsLambdaCase -tests/deSugar/should_run/DsMultiWayIf -tests/deSugar/should_run/T246 -tests/deSugar/should_run/T3126 -tests/deSugar/should_run/T3382 -tests/deSugar/should_run/T5742 -tests/deSugar/should_run/dsrun001 -tests/deSugar/should_run/dsrun002 -tests/deSugar/should_run/dsrun003 -tests/deSugar/should_run/dsrun004 -tests/deSugar/should_run/dsrun005 -tests/deSugar/should_run/dsrun006 -tests/deSugar/should_run/dsrun007 -tests/deSugar/should_run/dsrun008 -tests/deSugar/should_run/dsrun009 -tests/deSugar/should_run/dsrun010 -tests/deSugar/should_run/dsrun011 -tests/deSugar/should_run/dsrun012 -tests/deSugar/should_run/dsrun013 -tests/deSugar/should_run/dsrun014 -tests/deSugar/should_run/dsrun015 -tests/deSugar/should_run/dsrun016 -tests/deSugar/should_run/dsrun017 -tests/deSugar/should_run/dsrun018 -tests/deSugar/should_run/dsrun019 -tests/deSugar/should_run/dsrun020 -tests/deSugar/should_run/dsrun021 -tests/deSugar/should_run/dsrun022 -tests/deSugar/should_run/dsrun023 -tests/deSugar/should_run/mc01 -tests/deSugar/should_run/mc02 -tests/deSugar/should_run/mc03 -tests/deSugar/should_run/mc04 -tests/deSugar/should_run/mc05 -tests/deSugar/should_run/mc06 -tests/deSugar/should_run/mc07 -tests/deSugar/should_run/mc08 -tests/deriving/should_run/T2529 -tests/deriving/should_run/T4136 -tests/deriving/should_run/T4528a -tests/deriving/should_run/T5041 -tests/deriving/should_run/T5628 -tests/deriving/should_run/T5712 -tests/deriving/should_run/T7931 -tests/deriving/should_run/drvrun-foldable1 -tests/deriving/should_run/drvrun-functor1 -tests/deriving/should_run/drvrun001 -tests/deriving/should_run/drvrun002 -tests/deriving/should_run/drvrun003 -tests/deriving/should_run/drvrun004 -tests/deriving/should_run/drvrun005 -tests/deriving/should_run/drvrun006 -tests/deriving/should_run/drvrun007 -tests/deriving/should_run/drvrun008 -tests/deriving/should_run/drvrun009 -tests/deriving/should_run/drvrun010 -tests/deriving/should_run/drvrun011 -tests/deriving/should_run/drvrun012 -tests/deriving/should_run/drvrun013 -tests/deriving/should_run/drvrun014 -tests/deriving/should_run/drvrun015 -tests/deriving/should_run/drvrun016 -tests/deriving/should_run/drvrun017 -tests/deriving/should_run/drvrun018 -tests/deriving/should_run/drvrun019 -tests/deriving/should_run/drvrun020 -tests/deriving/should_run/drvrun021 -tests/dph/classes/dph-classes-copy-fast -tests/dph/classes/dph-classes-fast -tests/dph/classes/dph-classes-vseg-fast -tests/dph/diophantine/dph-diophantine-copy-fast -tests/dph/diophantine/dph-diophantine-copy-opt -tests/dph/dotp/dph-dotp-copy-fast -tests/dph/dotp/dph-dotp-copy-opt -tests/dph/dotp/dph-dotp-opt -tests/dph/dotp/dph-dotp-vseg-fast -tests/dph/dotp/dph-dotp-vseg-opt -tests/dph/nbody/dph-nbody-copy-fast -tests/dph/nbody/dph-nbody-copy-opt -tests/dph/nbody/dph-nbody-vseg-fast -tests/dph/nbody/dph-nbody-vseg-opt -tests/dph/primespj/dph-primespj-copy-fast -tests/dph/primespj/dph-primespj-copy-opt -tests/dph/primespj/dph-primespj-opt -tests/dph/quickhull/dph-quickhull-copy-fast -tests/dph/quickhull/dph-quickhull-copy-opt -tests/dph/quickhull/dph-quickhull-opt -tests/dph/quickhull/dph-quickhull-vseg-fast -tests/dph/quickhull/dph-quickhull-vseg-opt -tests/dph/smvm/dph-smvm-copy -tests/dph/smvm/dph-smvm-vseg -tests/dph/sumnats/dph-sumnats-copy -tests/dph/sumnats/dph-sumnats-vseg -tests/dph/words/dph-words-copy-fast -tests/dph/words/dph-words-copy-opt -tests/dph/words/dph-words-opt -tests/dph/words/dph-words-vseg-fast -tests/dph/words/dph-words-vseg-opt -tests/driver/1959/E.hs -tests/driver/1959/prog -tests/driver/3674_pre -tests/driver/437/Test -tests/driver/437/Test2 -tests/driver/5313 -tests/driver/A012.ooo -tests/driver/A013.xhi -tests/driver/A061a.s -tests/driver/A061b.s -tests/driver/A064.hspp -tests/driver/A065.hspp -tests/driver/A066.tmp -tests/driver/A067.tmp -tests/driver/A070.s -tests/driver/A071.tmp -tests/driver/B022/C.ooo -tests/driver/B023/C.xhi -tests/driver/B024a/ -tests/driver/B062d/ -tests/driver/B062e/ -tests/driver/F018a.obj.018 -tests/driver/F018a_stub.obj.018 -tests/driver/Hello062a.hs -tests/driver/Hello062b.hs -tests/driver/Hello062c.hs -tests/driver/T1959/E.hs -tests/driver/T1959/prog -tests/driver/T3007/A/Setup -tests/driver/T3007/A/dist/ -tests/driver/T3007/B/Setup -tests/driver/T3007/B/dist/ -tests/driver/T3007/package.conf -tests/driver/T3389 -tests/driver/T3674_pre -tests/driver/T437/Test -tests/driver/T437/Test2 -tests/driver/T4437 -tests/driver/T5147/B.hs -tests/driver/T5198dump/ -tests/driver/T5313 -tests/driver/T5584/A.hi-boot -tests/driver/T5584_out/ -tests/driver/T706.hs -tests/driver/T7060dump/ -tests/driver/T7373/package.conf -tests/driver/T7373/pkg/Setup -tests/driver/T7373/pkg/dist/ -tests/driver/Test.081b -tests/driver/Test.081b.hs -tests/driver/Test_081a -tests/driver/Test_081a.hs -tests/driver/depend200 -tests/driver/dynHelloWorld -tests/driver/dynamicToo/A001.dyn_hi -tests/driver/dynamicToo/A001.dyn_o -tests/driver/dynamicToo/A002.dyn_hi -tests/driver/dynamicToo/A002.dyn_o -tests/driver/dynamicToo/A003.dyn_hi -tests/driver/dynamicToo/A003.dyn_o -tests/driver/dynamicToo/B001.dyn_hi -tests/driver/dynamicToo/B001.dyn_o -tests/driver/dynamicToo/B002.dyn_hi -tests/driver/dynamicToo/B002.dyn_o -tests/driver/dynamicToo/C001.dyn_hi -tests/driver/dynamicToo/C001.dyn_o -tests/driver/dynamicToo/C002.dyn_hi -tests/driver/dynamicToo/C002.dyn_o -tests/driver/dynamicToo/d001 -tests/driver/dynamicToo/s001 -tests/driver/dynamicToo/dynamicToo004/Setup -tests/driver/dynamicToo/dynamicToo004/local.package.conf/ -tests/driver/dynamicToo/dynamicToo004/pkg1/dist/ -tests/driver/dynamicToo/dynamicToo004/pkg1dyn/dist/ -tests/driver/dynamicToo/dynamicToo004/pkg2/dist/ -tests/driver/dynamicToo/dynamicToo004/progstatic -tests/indexed-types/should_fail/T8129.trace -tests/rts/T5435*_o -tests/rts/T5435*.so -tests/driver/dynamic_flags_001/C -tests/driver/hello062a -tests/driver/hello062b -tests/driver/hello062c -tests/driver/hello062d -tests/driver/hello062e -tests/driver/objc/objc-hi -tests/driver/objc/objcpp-hi -tests/driver/out019/ -tests/driver/recomp001/B.hs -tests/driver/recomp001/C -tests/driver/recomp003/Data/ -tests/driver/recomp003/err -tests/driver/recomp004/MainX -tests/driver/recomp004/MainX.hs -tests/driver/recomp004/c.c -tests/driver/recomp005/C.hs -tests/driver/recomp006/B.hs -tests/driver/recomp006/err -tests/driver/recomp006/out -tests/driver/recomp007/Setup -tests/driver/recomp007/a1/dist/ -tests/driver/recomp007/a2/dist/ -tests/driver/recomp007/b/dist/ -tests/driver/recomp007/local.package.conf/ -tests/driver/recomp008/A.hs -tests/driver/recomp008/prog -tests/driver/recomp010/Main -tests/driver/recomp010/X.hs -tests/driver/recomp011/A.hsinc -tests/driver/recomp011/B.hsinc -tests/driver/recomp011/Main -tests/driver/recomp012/Foo.hs -tests/driver/recomp012/Main -tests/driver/recomp012/Main.hs -tests/driver/recomp012/MyBool.hs -tests/driver/rtsOpts -tests/driver/rtsopts002 -tests/driver/spacesInArgs -tests/driver/stub017/ -tests/driver/stub028/ -tests/driver/stub035/ -tests/driver/stub045/ -tests/driver/withRtsOpts -tests/driver/withRtsOpts.out -tests/dynlibs/T3807-load -tests/dynlibs/T3807test.so -tests/dynlibs/T5373A -tests/dynlibs/T5373B -tests/dynlibs/T5373C -tests/dynlibs/T5373D -tests/ext-core/T7239.hcr -tests/ffi/should_run/.hpc/ -tests/ffi/should_run/1288 -tests/ffi/should_run/1679 -tests/ffi/should_run/2276 -tests/ffi/should_run/2469 -tests/ffi/should_run/2594 -tests/ffi/should_run/2917a -tests/ffi/should_run/4038 -tests/ffi/should_run/4221 -tests/ffi/should_run/5402 -tests/ffi/should_run/5594 -tests/ffi/should_run/7170 -tests/ffi/should_run/Capi_Ctype_001 -tests/ffi/should_run/Capi_Ctype_001.hs -tests/ffi/should_run/Capi_Ctype_002 -tests/ffi/should_run/Capi_Ctype_A_001.hs -tests/ffi/should_run/Capi_Ctype_A_002.hs -tests/ffi/should_run/T1288 -tests/ffi/should_run/T1679 -tests/ffi/should_run/T2276 -tests/ffi/should_run/T2469 -tests/ffi/should_run/T2594 -tests/ffi/should_run/T2917a -tests/ffi/should_run/T4012 -tests/ffi/should_run/T4038 -tests/ffi/should_run/T4221 -tests/ffi/should_run/T5402 -tests/ffi/should_run/T5594 -tests/ffi/should_run/T7170 -tests/ffi/should_run/capi_value -tests/ffi/should_run/fed001 -tests/ffi/should_run/ffi001 -tests/ffi/should_run/ffi002 -tests/ffi/should_run/ffi003 -tests/ffi/should_run/ffi005 -tests/ffi/should_run/ffi006 -tests/ffi/should_run/ffi007 -tests/ffi/should_run/ffi008 -tests/ffi/should_run/ffi009 -tests/ffi/should_run/ffi010 -tests/ffi/should_run/ffi011 -tests/ffi/should_run/ffi013 -tests/ffi/should_run/ffi014 -tests/ffi/should_run/ffi015 -tests/ffi/should_run/ffi016 -tests/ffi/should_run/ffi017 -tests/ffi/should_run/ffi018 -tests/ffi/should_run/ffi019 -tests/ffi/should_run/ffi020 -tests/ffi/should_run/ffi021 -tests/ffi/should_run/ffi022 -tests/ffi/should_run/ffi_parsing_001 -tests/ffi/should_run/fptr01 -tests/ffi/should_run/fptr02 -tests/ffi/should_run/fptrfail01 -tests/gadt/CasePrune -tests/gadt/Session -tests/gadt/gadt2 -tests/gadt/gadt23 -tests/gadt/gadt4 -tests/gadt/gadt5 -tests/gadt/records -tests/gadt/tc -tests/gadt/type-rep -tests/gadt/ubx-records -tests/gadt/while -tests/generics/GEq/GEq1 -tests/generics/GEq/GEq2 -tests/generics/GFunctor/GFunctor1 -tests/generics/GMap/GMap1 -tests/generics/GShow/GShow1 -tests/generics/GenNewtype -tests/generics/Uniplate/GUniplate1 -tests/ghc-api/T4891/T4891 -tests/ghc-api/T7478/A -tests/ghc-api/T7478/T7478 -tests/ghc-api/apirecomp001/myghc -tests/ghc-api/dynCompileExpr/dynCompileExpr -tests/ghc-api/ghcApi -tests/ghci.debugger/scripts/break022/A.hs -tests/ghci.debugger/scripts/break023/A.hs -tests/ghci/linking/dir001/ -tests/ghci/linking/dir002/ -tests/ghci/linking/dir004/ -tests/ghci/linking/dir005/ -tests/ghci/linking/dir006/ -tests/ghci/prog001/C.hs -tests/ghci/prog001/D.hs -tests/ghci/prog002/A.hs -tests/ghci/prog003/D.hs -tests/ghci/prog004/ctest.c -tests/ghci/prog005/A.hs -tests/ghci/prog006/Boot.hs -tests/ghci/prog009/A.hs -tests/ghci/prog012/Bar.hs -tests/ghci/scripts/Ghci058.hs -tests/ghci/scripts/T1914A.hs -tests/ghci/scripts/T1914B.hs -tests/ghci/scripts/T6106.hs -tests/ghci/scripts/T6106_preproc -tests/ghci/scripts/ghci027.hs -tests/ghci/should_run/3171.err -tests/hsc2hs/3837.hs -tests/hsc2hs/T3837.hs -tests/hsc2hs/hsc2hs001.hs -tests/hsc2hs/hsc2hs002.hs -tests/hsc2hs/hsc2hs003 -tests/hsc2hs/hsc2hs003.hs -tests/hsc2hs/hsc2hs004 -tests/hsc2hs/hsc2hs004.hs -tests/indexed-types/should_run/GMapAssoc -tests/indexed-types/should_run/GMapTop -tests/indexed-types/should_run/T2985 -tests/indexed-types/should_run/T4235 -tests/indexed-types/should_run/T5719 -tests/lib/Concurrent/4876 -tests/lib/Concurrent/ThreadDelay001 -tests/lib/Data.ByteString/bytestring002 -tests/lib/Data.ByteString/bytestring003 -tests/lib/Data.ByteString/bytestring006 -tests/lib/IO/2122 -tests/lib/IO/2122-test -tests/lib/IO/3307 -tests/lib/IO/4808 -tests/lib/IO/4808.test -tests/lib/IO/4855 -tests/lib/IO/4895 -tests/lib/IO/IOError001 -tests/lib/IO/IOError002 -tests/lib/IO/T4113 -tests/lib/IO/T4144 -tests/lib/IO/chinese-file-* -tests/lib/IO/chinese-name -tests/lib/IO/concio002 -tests/lib/IO/countReaders001 -tests/lib/IO/countReaders001.txt -tests/lib/IO/decodingerror001 -tests/lib/IO/decodingerror002 -tests/lib/IO/encoding001 -tests/lib/IO/encoding001.utf16 -tests/lib/IO/encoding001.utf16.utf16be -tests/lib/IO/encoding001.utf16.utf16le -tests/lib/IO/encoding001.utf16.utf32 -tests/lib/IO/encoding001.utf16.utf32be -tests/lib/IO/encoding001.utf16.utf32le -tests/lib/IO/encoding001.utf16.utf8 -tests/lib/IO/encoding001.utf16.utf8_bom -tests/lib/IO/encoding001.utf16be -tests/lib/IO/encoding001.utf16be.utf16 -tests/lib/IO/encoding001.utf16be.utf16le -tests/lib/IO/encoding001.utf16be.utf32 -tests/lib/IO/encoding001.utf16be.utf32be -tests/lib/IO/encoding001.utf16be.utf32le -tests/lib/IO/encoding001.utf16be.utf8 -tests/lib/IO/encoding001.utf16be.utf8_bom -tests/lib/IO/encoding001.utf16le -tests/lib/IO/encoding001.utf16le.utf16 -tests/lib/IO/encoding001.utf16le.utf16be -tests/lib/IO/encoding001.utf16le.utf32 -tests/lib/IO/encoding001.utf16le.utf32be -tests/lib/IO/encoding001.utf16le.utf32le -tests/lib/IO/encoding001.utf16le.utf8 -tests/lib/IO/encoding001.utf16le.utf8_bom -tests/lib/IO/encoding001.utf32 -tests/lib/IO/encoding001.utf32.utf16 -tests/lib/IO/encoding001.utf32.utf16be -tests/lib/IO/encoding001.utf32.utf16le -tests/lib/IO/encoding001.utf32.utf32be -tests/lib/IO/encoding001.utf32.utf32le -tests/lib/IO/encoding001.utf32.utf8 -tests/lib/IO/encoding001.utf32.utf8_bom -tests/lib/IO/encoding001.utf32be -tests/lib/IO/encoding001.utf32be.utf16 -tests/lib/IO/encoding001.utf32be.utf16be -tests/lib/IO/encoding001.utf32be.utf16le -tests/lib/IO/encoding001.utf32be.utf32 -tests/lib/IO/encoding001.utf32be.utf32le -tests/lib/IO/encoding001.utf32be.utf8 -tests/lib/IO/encoding001.utf32be.utf8_bom -tests/lib/IO/encoding001.utf32le -tests/lib/IO/encoding001.utf32le.utf16 -tests/lib/IO/encoding001.utf32le.utf16be -tests/lib/IO/encoding001.utf32le.utf16le -tests/lib/IO/encoding001.utf32le.utf32 -tests/lib/IO/encoding001.utf32le.utf32be -tests/lib/IO/encoding001.utf32le.utf8 -tests/lib/IO/encoding001.utf32le.utf8_bom -tests/lib/IO/encoding001.utf8 -tests/lib/IO/encoding001.utf8.utf16 -tests/lib/IO/encoding001.utf8.utf16be -tests/lib/IO/encoding001.utf8.utf16le -tests/lib/IO/encoding001.utf8.utf32 -tests/lib/IO/encoding001.utf8.utf32be -tests/lib/IO/encoding001.utf8.utf32le -tests/lib/IO/encoding001.utf8.utf8_bom -tests/lib/IO/encoding001.utf8_bom -tests/lib/IO/encoding001.utf8_bom.utf16 -tests/lib/IO/encoding001.utf8_bom.utf16be -tests/lib/IO/encoding001.utf8_bom.utf16le -tests/lib/IO/encoding001.utf8_bom.utf32 -tests/lib/IO/encoding001.utf8_bom.utf32be -tests/lib/IO/encoding001.utf8_bom.utf32le -tests/lib/IO/encoding001.utf8_bom.utf8 -tests/lib/IO/encoding002 -tests/lib/IO/encodingerror001 -tests/lib/IO/environment001 -tests/lib/IO/finalization001 -tests/lib/IO/hClose001 -tests/lib/IO/hClose001.tmp -tests/lib/IO/hClose002 -tests/lib/IO/hClose002.tmp -tests/lib/IO/hClose003 -tests/lib/IO/hDuplicateTo001 -tests/lib/IO/hFileSize001 -tests/lib/IO/hFileSize002 -tests/lib/IO/hFileSize002.out -tests/lib/IO/hFlush001 -tests/lib/IO/hFlush001.out -tests/lib/IO/hGetBuf001 -tests/lib/IO/hGetBuffering001 -tests/lib/IO/hGetChar001 -tests/lib/IO/hGetLine001 -tests/lib/IO/hGetLine002 -tests/lib/IO/hGetLine003 -tests/lib/IO/hGetPosn001 -tests/lib/IO/hGetPosn001.out -tests/lib/IO/hIsEOF001 -tests/lib/IO/hIsEOF002 -tests/lib/IO/hIsEOF002.out -tests/lib/IO/hReady001 -tests/lib/IO/hReady002 -tests/lib/IO/hSeek001 -tests/lib/IO/hSeek002 -tests/lib/IO/hSeek003 -tests/lib/IO/hSeek004 -tests/lib/IO/hSeek004.out -tests/lib/IO/hSetBuffering002 -tests/lib/IO/hSetBuffering003 -tests/lib/IO/hSetBuffering004 -tests/lib/IO/hSetEncoding001 -tests/lib/IO/ioeGetErrorString001 -tests/lib/IO/ioeGetFileName001 -tests/lib/IO/ioeGetHandle001 -tests/lib/IO/isEOF001 -tests/lib/IO/misc001 -tests/lib/IO/misc001.out -tests/lib/IO/newline001 -tests/lib/IO/newline001.out -tests/lib/IO/openFile001 -tests/lib/IO/openFile002 -tests/lib/IO/openFile003 -tests/lib/IO/openFile004 -tests/lib/IO/openFile004.out -tests/lib/IO/openFile005 -tests/lib/IO/openFile005.out1 -tests/lib/IO/openFile005.out2 -tests/lib/IO/openFile006 -tests/lib/IO/openFile006.out -tests/lib/IO/openFile007 -tests/lib/IO/openFile007.out -tests/lib/IO/openFile008 -tests/lib/IO/openTempFile001 -tests/lib/IO/putStr001 -tests/lib/IO/readFile001 -tests/lib/IO/readFile001.out -tests/lib/IO/readwrite001 -tests/lib/IO/readwrite001.inout -tests/lib/IO/readwrite002 -tests/lib/IO/readwrite002.inout -tests/lib/IO/readwrite003 -tests/lib/IO/readwrite003.txt -tests/lib/IO/tmp -tests/lib/IOExts/echo001 -tests/lib/IOExts/hGetBuf002 -tests/lib/IOExts/hGetBuf003 -tests/lib/IOExts/hPutBuf001 -tests/lib/IOExts/hPutBuf002 -tests/lib/IOExts/hPutBuf002.out -tests/lib/IOExts/hTell001 -tests/lib/IOExts/hTell002 -tests/lib/IOExts/performGC001 -tests/lib/IOExts/trace001 -tests/lib/IORef/ -tests/lib/Numeric/ -tests/lib/OldException/OldException001 -tests/lib/PrettyPrint/T3911 -tests/lib/PrettyPrint/pp1 -tests/lib/Text.Printf/1548 -tests/lib/Time/T5430 -tests/lib/Time/time002 -tests/lib/Time/time003 -tests/lib/Time/time004 -tests/lib/exceptions/exceptions001 -tests/lib/integer/IntegerConversionRules.simpl -tests/lib/integer/fromToInteger.simpl -tests/lib/integer/gcdInteger -tests/lib/integer/integerBits -tests/lib/integer/integerConstantFolding -tests/lib/integer/integerConstantFolding.simpl -tests/lib/integer/integerConversions -tests/lib/integer/integerGmpInternals -tests/lib/libposix/po003.out -tests/lib/libposix/posix002 -tests/lib/libposix/posix003 -tests/lib/libposix/posix004 -tests/lib/libposix/posix006 -tests/lib/libposix/posix009 -tests/lib/libposix/posix010 -tests/lib/libposix/posix014 -tests/lib/should_run/4006 -tests/lib/should_run/addr001 -tests/lib/should_run/array001 -tests/lib/should_run/array001.data -tests/lib/should_run/char001 -tests/lib/should_run/char002 -tests/lib/should_run/cstring001 -tests/lib/should_run/dynamic001 -tests/lib/should_run/dynamic002 -tests/lib/should_run/dynamic003 -tests/lib/should_run/dynamic004 -tests/lib/should_run/dynamic005 -tests/lib/should_run/enum01 -tests/lib/should_run/enum02 -tests/lib/should_run/enum03 -tests/lib/should_run/enum04 -tests/lib/should_run/exceptionsrun001 -tests/lib/should_run/exceptionsrun002 -tests/lib/should_run/length001 -tests/lib/should_run/list001 -tests/lib/should_run/list002 -tests/lib/should_run/list003 -tests/lib/should_run/memo001 -tests/lib/should_run/memo002 -tests/lib/should_run/rand001 -tests/lib/should_run/ratio001 -tests/lib/should_run/reads001 -tests/lib/should_run/show001 -tests/lib/should_run/stableptr001 -tests/lib/should_run/stableptr003 -tests/lib/should_run/stableptr004 -tests/lib/should_run/stableptr005 -tests/lib/should_run/text001 -tests/lib/should_run/tup001 -tests/lib/should_run/weak001 -tests/mdo/should_compile/mdo001 -tests/mdo/should_compile/mdo002 -tests/mdo/should_compile/mdo003 -tests/mdo/should_compile/mdo004 -tests/mdo/should_compile/mdo005 -tests/mdo/should_fail/mdofail006 -tests/mdo/should_run/mdorun001 -tests/mdo/should_run/mdorun002 -tests/mdo/should_run/mdorun003 -tests/mdo/should_run/mdorun004 -tests/mdo/should_run/mdorun005 -tests/module/Mod145_A.mod146_hi -tests/module/Mod145_A.mod146_o -tests/module/Mod157_A.mod158_hi -tests/module/Mod157_A.mod158_o -tests/module/Mod157_B.mod158_hi -tests/module/Mod157_B.mod158_o -tests/module/Mod157_C.mod158_hi -tests/module/Mod157_C.mod158_o -tests/module/Mod157_D.mod158_hi -tests/module/Mod157_D.mod158_o -tests/module/Mod159_A.mod160_hi -tests/module/Mod159_A.mod160_o -tests/module/Mod159_B.mod160_hi -tests/module/Mod159_B.mod160_o -tests/module/Mod159_C.mod160_hi -tests/module/Mod159_C.mod160_o -tests/module/Mod159_D.mod160_hi -tests/module/Mod159_D.mod160_o -tests/module/Mod164_A.mod165_hi -tests/module/Mod164_A.mod165_o -tests/module/Mod164_A.mod166_hi -tests/module/Mod164_A.mod166_o -tests/module/Mod164_A.mod167_hi -tests/module/Mod164_A.mod167_o -tests/module/Mod164_B.mod165_hi -tests/module/Mod164_B.mod165_o -tests/module/Mod164_B.mod166_hi -tests/module/Mod164_B.mod166_o -tests/module/Mod164_B.mod167_hi -tests/module/Mod164_B.mod167_o -tests/module/mod166.mod166_hi -tests/module/mod166.mod166_o -tests/module/mod167.mod167_hi -tests/module/mod167.mod167_o -tests/module/mod175/test -tests/module/mod175/test2 -tests/module/mod179 -tests/numeric/should_run/3676 -tests/numeric/should_run/4381 -tests/numeric/should_run/4383 -tests/numeric/should_run/T3676 -tests/numeric/should_run/T4381 -tests/numeric/should_run/T4383 -tests/numeric/should_run/T5863 -tests/numeric/should_run/T7014 -tests/numeric/should_run/T7014.simpl -tests/numeric/should_run/T7233 -tests/numeric/should_run/T7689 -tests/numeric/should_run/add2 -tests/numeric/should_run/arith001 -tests/numeric/should_run/arith002 -tests/numeric/should_run/arith003 -tests/numeric/should_run/arith004 -tests/numeric/should_run/arith005 -tests/numeric/should_run/arith006 -tests/numeric/should_run/arith007 -tests/numeric/should_run/arith008 -tests/numeric/should_run/arith009 -tests/numeric/should_run/arith010 -tests/numeric/should_run/arith011 -tests/numeric/should_run/arith012 -tests/numeric/should_run/arith013 -tests/numeric/should_run/arith014 -tests/numeric/should_run/arith015 -tests/numeric/should_run/arith016 -tests/numeric/should_run/arith017 -tests/numeric/should_run/arith018 -tests/numeric/should_run/arith019 -tests/numeric/should_run/expfloat -tests/numeric/should_run/mul2 -tests/numeric/should_run/numrun009 -tests/numeric/should_run/numrun010 -tests/numeric/should_run/numrun011 -tests/numeric/should_run/numrun012 -tests/numeric/should_run/numrun013 -tests/numeric/should_run/numrun014 -tests/numeric/should_run/quotRem2 -tests/optasm-log -tests/optllvm-32-log -tests/optllvm-log -tests/overloadedlists/should_run/overloadedlistsrun01 -tests/overloadedlists/should_run/overloadedlistsrun02 -tests/overloadedlists/should_run/overloadedlistsrun03 -tests/overloadedlists/should_run/overloadedlistsrun04 -tests/overloadedlists/should_run/overloadedlistsrun05 -tests/parser/should_compile/T5243 -tests/parser/should_compile/T7476/Main.imports -tests/parser/should_compile/T7476/T7476 -tests/parser/should_run/ParserMultiWayIf -tests/parser/should_run/T1344 -tests/parser/should_run/operator -tests/parser/should_run/operator2 -tests/parser/should_run/readRun001 -tests/parser/should_run/readRun002 -tests/parser/should_run/readRun003 -tests/parser/should_run/readRun004 -tests/parser/unicode/1744 -tests/parser/unicode/T1744 -tests/parser/unicode/utf8_024 -tests/perf/compiler/T1969.comp.stats -tests/perf/compiler/T3064.comp.stats -tests/perf/compiler/T3294.comp.stats -tests/perf/compiler/T4801.comp.stats -tests/perf/compiler/T5030.comp.stats -tests/perf/compiler/T5321FD.comp.stats -tests/perf/compiler/T5321Fun.comp.stats -tests/perf/compiler/T5631.comp.stats -tests/perf/compiler/T5642.comp.stats -tests/perf/compiler/T5837.comp.stats -tests/perf/compiler/T6048.comp.stats -tests/perf/compiler/T783.comp.stats -tests/perf/compiler/parsing001.comp.stats -tests/perf/should_run/3586 -tests/perf/should_run/3586.stats -tests/perf/should_run/Conversions -tests/perf/should_run/Conversions.stats -tests/perf/should_run/MethSharing -tests/perf/should_run/MethSharing.stats -tests/perf/should_run/T149_A -tests/perf/should_run/T149_B -tests/perf/should_run/T2902_A -tests/perf/should_run/T2902_B -tests/perf/should_run/T3245 -tests/perf/should_run/T3586 -tests/perf/should_run/T3736 -tests/perf/should_run/T3736.speed.f32 -tests/perf/should_run/T3738 -tests/perf/should_run/T3738.stats -tests/perf/should_run/T4321 -tests/perf/should_run/T4474a -tests/perf/should_run/T4474a.stats -tests/perf/should_run/T4474b -tests/perf/should_run/T4474b.stats -tests/perf/should_run/T4474c -tests/perf/should_run/T4474c.stats -tests/perf/should_run/T4830 -tests/perf/should_run/T4830.stats -tests/perf/should_run/T4978 -tests/perf/should_run/T4978.stats -tests/perf/should_run/T5113 -tests/perf/should_run/T5113.stats -tests/perf/should_run/T5205 -tests/perf/should_run/T5205.stats -tests/perf/should_run/T5237 -tests/perf/should_run/T5237.stats -tests/perf/should_run/T5536 -tests/perf/should_run/T5536.data -tests/perf/should_run/T5536.stats -tests/perf/should_run/T5549 -tests/perf/should_run/T5549.stats -tests/perf/should_run/T7257 -tests/perf/should_run/T7257.stats -tests/perf/should_run/T7436 -tests/perf/should_run/T7436.stats -tests/perf/should_run/T7507 -tests/perf/should_run/T7797 -tests/perf/should_run/T876 -tests/perf/should_run/lazy-bs-alloc -tests/perf/should_run/lazy-bs-alloc.stats -tests/perf/should_run/speed.f32 -tests/perf/space_leaks/T2762 -tests/perf/space_leaks/T2762.stats -tests/perf/space_leaks/T4018 -tests/perf/space_leaks/T4334 -tests/perf/space_leaks/T4334.stats -tests/perf/space_leaks/space_leak_001 -tests/perf/space_leaks/space_leak_001.stats -tests/plugins/plugins01 -tests/plugins/plugins05 -tests/plugins/plugins06 -tests/plugins/simple-plugin/pkg.plugins01/ -tests/plugins/simple-plugin/pkg.plugins02/ -tests/plugins/simple-plugin/pkg.plugins03/ -tests/polykinds/Freeman -tests/polykinds/MonoidsFD -tests/polykinds/MonoidsTF -tests/polykinds/PolyKinds09 -tests/polykinds/PolyKinds10 -tests/profiling/should_compile/prof001 -tests/profiling/should_compile/prof002 -tests/profiling/should_run/2592 -tests/profiling/should_run/2592.aux -tests/profiling/should_run/2592.hp -tests/profiling/should_run/2592.ps -tests/profiling/should_run/5314 -tests/profiling/should_run/5314.hp -tests/profiling/should_run/5314.ps -tests/profiling/should_run/T2552 -tests/profiling/should_run/T2592 -tests/profiling/should_run/T3001 -tests/profiling/should_run/T3001-2 -tests/profiling/should_run/T3001-2.hp -tests/profiling/should_run/T3001-2.ps -tests/profiling/should_run/T3001.hp -tests/profiling/should_run/T3001.ps -tests/profiling/should_run/T5314 -tests/profiling/should_run/T5363 -tests/profiling/should_run/T5559 -tests/profiling/should_run/T680 -tests/profiling/should_run/T949 -tests/profiling/should_run/T949.hp -tests/profiling/should_run/T949.ps -tests/profiling/should_run/callstack001 -tests/profiling/should_run/callstack002 -tests/profiling/should_run/heapprof001 -tests/profiling/should_run/heapprof001.hp -tests/profiling/should_run/heapprof001.ps -tests/profiling/should_run/ioprof -tests/profiling/should_run/prof-doc-fib -tests/profiling/should_run/prof-doc-last -tests/profiling/should_run/profinline001 -tests/profiling/should_run/scc001 -tests/profiling/should_run/scc002 -tests/profiling/should_run/scc003 -tests/profiling/should_run/scc004 -tests/profiling/should_run/test.bin -tests/programs/10queens/10queens -tests/programs/Queens/queens -tests/programs/andre_monad/andre_monad -tests/programs/andy_cherry/andy_cherry -tests/programs/barton-mangler-bug/barton-mangler-bug -tests/programs/cholewo-eval/cholewo-eval -tests/programs/cvh_unboxing/cvh_unboxing -tests/programs/fast2haskell/fast2haskell -tests/programs/fun_insts/fun_insts -tests/programs/jl_defaults/jl_defaults -tests/programs/joao-circular/joao-circular -tests/programs/jq_readsPrec/jq_readsPrec -tests/programs/jtod_circint/jtod_circint -tests/programs/jules_xref/jules_xref -tests/programs/jules_xref2/jules_xref2 -tests/programs/launchbury/launchbury -tests/programs/lennart_range/lennart_range -tests/programs/lex/lex -tests/programs/life_space_leak/life_space_leak -tests/programs/north_array/north_array -tests/programs/record_upd/record_upd -tests/programs/rittri/rittri -tests/programs/sanders_array/sanders_array -tests/programs/seward-space-leak/seward-space-leak -tests/programs/strict_anns/strict_anns -tests/programs/thurston-modular-arith/thurston-modular-arith -tests/quasiquotation/T4491/T4491 -tests/rebindable/T5038 -tests/rebindable/rebindable10 -tests/rebindable/rebindable2 -tests/rebindable/rebindable3 -tests/rebindable/rebindable4 -tests/rebindable/rebindable5 -tests/rebindable/rebindable7 -tests/rename/prog006/local.package.conf -tests/rename/prog006/pkg.conf -tests/rename/prog006/pwd -tests/rename/should_compile/T1792_imports.imports -tests/rename/should_compile/T4239.imports -tests/rename/should_compile/T4240.imports -tests/rename/should_compile/T5592 -tests/rts/2047 -tests/rts/2783 -tests/rts/3236 -tests/rts/3424 -tests/rts/4059 -tests/rts/4850 -tests/rts/5250 -tests/rts/5644/5644 -tests/rts/5993 -tests/rts/7087 -tests/rts/T2047 -tests/rts/T2615 -tests/rts/T2783 -tests/rts/T3236 -tests/rts/T4059 -tests/rts/T4850 -tests/rts/T5250 -tests/rts/T5423 -tests/rts/T5644/T5644 -tests/rts/T5993 -tests/rts/T6006 -tests/rts/T7037 -tests/rts/T7037_main -tests/rts/T7040 -tests/rts/T7087 -tests/rts/T7160 -tests/rts/T7227 -tests/rts/T7227.stat -tests/rts/T7636 -tests/rts/T7815 -tests/rts/atomicinc -tests/rts/bug1010 -tests/rts/derefnull -tests/rts/divbyzero -tests/rts/exec_signals -tests/rts/exec_signals_child -tests/rts/exec_signals_prepare -tests/rts/ffishutdown -tests/rts/libfoo_T2615.so -tests/rts/outofmem -tests/rts/outofmem2 -tests/rts/prep.out -tests/rts/return_mem_to_os -tests/rts/rtsflags001 -tests/rts/rtsflags002 -tests/rts/stablename001 -tests/rts/stack001 -tests/rts/stack002 -tests/rts/stack003 -tests/rts/testblockalloc -tests/rts/testwsdeque -tests/rts/traceEvent -tests/safeHaskell/check/Check04 -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly01/ -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly02/ -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly03/ -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly04/ -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly05/ -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly06/ -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly07/ -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly08/ -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly09/ -tests/safeHaskell/check/pkg01/pdb.ImpSafeOnly10/ -tests/safeHaskell/check/pkg01/pdb.safePkg01/ -tests/safeHaskell/safeLanguage/SafeLang04 -tests/safeHaskell/safeLanguage/SafeLang05 -tests/safeHaskell/safeLanguage/SafeLang06 -tests/safeHaskell/safeLanguage/SafeLang09 -tests/safeHaskell/safeLanguage/SafeLang11 -tests/safeHaskell/safeLanguage/SafeLang13 -tests/safeHaskell/safeLanguage/SafeLang15 -tests/safeHaskell/unsafeLibs/BadImport02 -tests/simplCore/should_compile/T3055.simpl -tests/simplCore/should_compile/T4138.simpl -tests/simplCore/should_compile/T7702plugin/pkg.T7702/ -tests/simplCore/should_compile/T7796.prep -tests/simplCore/should_run/SeqRule -tests/simplCore/should_run/T2756 -tests/simplCore/should_run/T3403 -tests/simplCore/should_run/T3437 -tests/simplCore/should_run/T3591 -tests/simplCore/should_run/T3959 -tests/simplCore/should_run/T3972 -tests/simplCore/should_run/T3983 -tests/simplCore/should_run/T4814 -tests/simplCore/should_run/T5315 -tests/simplCore/should_run/T5441 -tests/simplCore/should_run/T5453 -tests/simplCore/should_run/T5587 -tests/simplCore/should_run/T5603 -tests/simplCore/should_run/T5625 -tests/simplCore/should_run/T5915 -tests/simplCore/should_run/T5920 -tests/simplCore/should_run/T5997 -tests/simplCore/should_run/T7101 -tests/simplCore/should_run/T7924 -tests/simplCore/should_run/simplrun001 -tests/simplCore/should_run/simplrun002 -tests/simplCore/should_run/simplrun003 -tests/simplCore/should_run/simplrun004 -tests/simplCore/should_run/simplrun005 -tests/simplCore/should_run/simplrun007 -tests/simplCore/should_run/simplrun008 -tests/simplCore/should_run/simplrun009 -tests/simplCore/should_run/simplrun010 -tests/stranal/should_run/T2756b -tests/stranal/should_run/T7649 -tests/stranal/should_run/strun001 -tests/stranal/should_run/strun002 -tests/stranal/should_run/strun003 -tests/stranal/should_run/strun004 -tests/th/T1835 -tests/th/T3572 -tests/th/T3920 -tests/th/T5379 -tests/th/T5410 -tests/th/T5555 -tests/th/T7064 -tests/th/T7910 -tests/th/TH_Depends -tests/th/TH_Depends_external.txt -tests/th/TH_StringPrimL -tests/th/TH_import_loop/ModuleA.hi-boot -tests/th/TH_import_loop/ModuleA.o-boot -tests/th/TH_lookupName -tests/th/TH_ppr1 -tests/th/TH_recover -tests/th/TH_repE2 -tests/th/TH_repGuardOutput -tests/th/TH_repPrimOutput -tests/th/TH_repPrimOutput2 -tests/th/TH_spliceE1 -tests/th/TH_spliceE4 -tests/th/TH_spliceE5 -tests/th/TH_spliceE5_prof -tests/th/TH_spliceViewPat/TH_spliceViewPat -tests/th/TH_unresolvedInfix -tests/th/TH_viewPatPrint -tests/th/TH_where -tests/typecheck/should_compile/tc159 -tests/typecheck/should_fail/T3468.o-boot -tests/typecheck/should_fail/tcfail149 -tests/typecheck/should_run/Defer01 -tests/typecheck/should_run/IPRun -tests/typecheck/should_run/T1624 -tests/typecheck/should_run/T1735 -tests/typecheck/should_run/T2722 -tests/typecheck/should_run/T3500a -tests/typecheck/should_run/T3500b -tests/typecheck/should_run/T3731 -tests/typecheck/should_run/T3731-short -tests/typecheck/should_run/T4809 -tests/typecheck/should_run/T5573a -tests/typecheck/should_run/T5573b -tests/typecheck/should_run/T5751 -tests/typecheck/should_run/T5759 -tests/typecheck/should_run/T5913 -tests/typecheck/should_run/T6117 -tests/typecheck/should_run/T7023 -tests/typecheck/should_run/T7126 -tests/typecheck/should_run/T7748 -tests/typecheck/should_run/T7861 -tests/typecheck/should_run/TcNullaryTC -tests/typecheck/should_run/church -tests/typecheck/should_run/mc17 -tests/typecheck/should_run/tcrun001 -tests/typecheck/should_run/tcrun002 -tests/typecheck/should_run/tcrun003 -tests/typecheck/should_run/tcrun004 -tests/typecheck/should_run/tcrun005 -tests/typecheck/should_run/tcrun006 -tests/typecheck/should_run/tcrun008 -tests/typecheck/should_run/tcrun009 -tests/typecheck/should_run/tcrun010 -tests/typecheck/should_run/tcrun011 -tests/typecheck/should_run/tcrun012 -tests/typecheck/should_run/tcrun013 -tests/typecheck/should_run/tcrun014 -tests/typecheck/should_run/tcrun015 -tests/typecheck/should_run/tcrun016 -tests/typecheck/should_run/tcrun017 -tests/typecheck/should_run/tcrun018 -tests/typecheck/should_run/tcrun019 -tests/typecheck/should_run/tcrun020 -tests/typecheck/should_run/tcrun021 -tests/typecheck/should_run/tcrun022 -tests/typecheck/should_run/tcrun023 -tests/typecheck/should_run/tcrun024 -tests/typecheck/should_run/tcrun025 -tests/typecheck/should_run/tcrun026 -tests/typecheck/should_run/tcrun027 -tests/typecheck/should_run/tcrun028 -tests/typecheck/should_run/tcrun029 -tests/typecheck/should_run/tcrun030 -tests/typecheck/should_run/tcrun031 -tests/typecheck/should_run/tcrun032 -tests/typecheck/should_run/tcrun033 -tests/typecheck/should_run/tcrun034 -tests/typecheck/should_run/tcrun036 -tests/typecheck/should_run/tcrun037 -tests/typecheck/should_run/tcrun038 -tests/typecheck/should_run/tcrun039 -tests/typecheck/should_run/tcrun040 -tests/typecheck/should_run/tcrun041 -tests/typecheck/should_run/tcrun042 -tests/typecheck/should_run/tcrun043 -tests/typecheck/should_run/tcrun044 -tests/typecheck/should_run/tcrun045 -tests/typecheck/should_run/tcrun046 -tests/typecheck/should_run/tcrun047 -tests/typecheck/should_run/tcrun048 -tests/typecheck/should_run/tcrun049 -tests/typecheck/should_run/tcrun050 -tests/typecheck/should_run/tcrun051 -tests/typecheck/should_run/testeq2 -tests/typecheck/testeq1/typecheck.testeq1 -timeout/calibrate.out -timeout/dist/ -timeout/install-inplace/ +/tests/safeHaskell/safeLanguage/SafeLang04 +/tests/safeHaskell/safeLanguage/SafeLang05 +/tests/safeHaskell/safeLanguage/SafeLang06 +/tests/safeHaskell/safeLanguage/SafeLang09 +/tests/safeHaskell/safeLanguage/SafeLang11 +/tests/safeHaskell/safeLanguage/SafeLang13 +/tests/safeHaskell/safeLanguage/SafeLang15 +/tests/safeHaskell/unsafeLibs/BadImport02 +/tests/simplCore/should_compile/T3055.simpl +/tests/simplCore/should_compile/T4138.simpl +/tests/simplCore/should_compile/T7702plugin/pkg.T7702/ +/tests/simplCore/should_compile/T7796.prep +/tests/simplCore/should_run/SeqRule +/tests/simplCore/should_run/T2110 +/tests/simplCore/should_run/T2756 +/tests/simplCore/should_run/T3403 +/tests/simplCore/should_run/T3437 +/tests/simplCore/should_run/T3591 +/tests/simplCore/should_run/T3959 +/tests/simplCore/should_run/T3972 +/tests/simplCore/should_run/T3983 +/tests/simplCore/should_run/T457 +/tests/simplCore/should_run/T4814 +/tests/simplCore/should_run/T5315 +/tests/simplCore/should_run/T5441 +/tests/simplCore/should_run/T5453 +/tests/simplCore/should_run/T5587 +/tests/simplCore/should_run/T5603 +/tests/simplCore/should_run/T5625 +/tests/simplCore/should_run/T5915 +/tests/simplCore/should_run/T5920 +/tests/simplCore/should_run/T5997 +/tests/simplCore/should_run/T7101 +/tests/simplCore/should_run/T7924 +/tests/simplCore/should_run/T9128 +/tests/simplCore/should_run/runST +/tests/simplCore/should_run/simplrun001 +/tests/simplCore/should_run/simplrun002 +/tests/simplCore/should_run/simplrun003 +/tests/simplCore/should_run/simplrun004 +/tests/simplCore/should_run/simplrun005 +/tests/simplCore/should_run/simplrun007 +/tests/simplCore/should_run/simplrun008 +/tests/simplCore/should_run/simplrun009 +/tests/simplCore/should_run/simplrun010 +/tests/simplCore/should_run/simplrun011 +/tests/stranal/should_run/T2756b +/tests/stranal/should_run/T7649 +/tests/stranal/should_run/T8425/T8425 +/tests/stranal/should_run/T9254 +/tests/stranal/should_run/strun001 +/tests/stranal/should_run/strun002 +/tests/stranal/should_run/strun003 +/tests/stranal/should_run/strun004 +/tests/th/T1835 +/tests/th/T3572 +/tests/th/T3920 +/tests/th/T5379 +/tests/th/T5410 +/tests/th/T5555 +/tests/th/T7064 +/tests/th/T7910 +/tests/th/T8186 +/tests/th/T8633 +/tests/th/TH_Depends +/tests/th/TH_Depends_external.txt +/tests/th/TH_StringPrimL +/tests/th/TH_import_loop/ModuleA.hi-boot +/tests/th/TH_import_loop/ModuleA.o-boot +/tests/th/TH_lookupName +/tests/th/TH_ppr1 +/tests/th/TH_recover +/tests/th/TH_repE2 +/tests/th/TH_repGuardOutput +/tests/th/TH_repPrimOutput +/tests/th/TH_repPrimOutput2 +/tests/th/TH_spliceE1 +/tests/th/TH_spliceE4 +/tests/th/TH_spliceE5 +/tests/th/TH_spliceE5_prof +/tests/th/TH_spliceViewPat/TH_spliceViewPat +/tests/th/TH_unresolvedInfix +/tests/th/TH_viewPatPrint +/tests/th/TH_where +/tests/typecheck/should_compile/tc159 +/tests/typecheck/should_compile/tc263 +/tests/typecheck/should_fail/T3468.o-boot +/tests/typecheck/should_fail/tcfail149 +/tests/typecheck/should_run/Defer01 +/tests/typecheck/should_run/IPRun +/tests/typecheck/should_run/T1624 +/tests/typecheck/should_run/T1735 +/tests/typecheck/should_run/T2722 +/tests/typecheck/should_run/T3500a +/tests/typecheck/should_run/T3500b +/tests/typecheck/should_run/T3731 +/tests/typecheck/should_run/T3731-short +/tests/typecheck/should_run/T4809 +/tests/typecheck/should_run/T5573a +/tests/typecheck/should_run/T5573b +/tests/typecheck/should_run/T5751 +/tests/typecheck/should_run/T5759 +/tests/typecheck/should_run/T5913 +/tests/typecheck/should_run/T6117 +/tests/typecheck/should_run/T7023 +/tests/typecheck/should_run/T7126 +/tests/typecheck/should_run/T7748 +/tests/typecheck/should_run/T7861 +/tests/typecheck/should_run/T8492 +/tests/typecheck/should_run/T8739 +/tests/typecheck/should_run/TcCoercible +/tests/typecheck/should_run/TcNullaryTC +/tests/typecheck/should_run/TcTypeNatSimpleRun +/tests/typecheck/should_run/church +/tests/typecheck/should_run/mc17 +/tests/typecheck/should_run/tcrun001 +/tests/typecheck/should_run/tcrun002 +/tests/typecheck/should_run/tcrun003 +/tests/typecheck/should_run/tcrun004 +/tests/typecheck/should_run/tcrun005 +/tests/typecheck/should_run/tcrun006 +/tests/typecheck/should_run/tcrun008 +/tests/typecheck/should_run/tcrun009 +/tests/typecheck/should_run/tcrun010 +/tests/typecheck/should_run/tcrun011 +/tests/typecheck/should_run/tcrun012 +/tests/typecheck/should_run/tcrun013 +/tests/typecheck/should_run/tcrun014 +/tests/typecheck/should_run/tcrun015 +/tests/typecheck/should_run/tcrun016 +/tests/typecheck/should_run/tcrun017 +/tests/typecheck/should_run/tcrun018 +/tests/typecheck/should_run/tcrun019 +/tests/typecheck/should_run/tcrun020 +/tests/typecheck/should_run/tcrun021 +/tests/typecheck/should_run/tcrun022 +/tests/typecheck/should_run/tcrun023 +/tests/typecheck/should_run/tcrun024 +/tests/typecheck/should_run/tcrun025 +/tests/typecheck/should_run/tcrun026 +/tests/typecheck/should_run/tcrun027 +/tests/typecheck/should_run/tcrun028 +/tests/typecheck/should_run/tcrun029 +/tests/typecheck/should_run/tcrun030 +/tests/typecheck/should_run/tcrun031 +/tests/typecheck/should_run/tcrun032 +/tests/typecheck/should_run/tcrun033 +/tests/typecheck/should_run/tcrun034 +/tests/typecheck/should_run/tcrun036 +/tests/typecheck/should_run/tcrun037 +/tests/typecheck/should_run/tcrun038 +/tests/typecheck/should_run/tcrun039 +/tests/typecheck/should_run/tcrun040 +/tests/typecheck/should_run/tcrun041 +/tests/typecheck/should_run/tcrun042 +/tests/typecheck/should_run/tcrun043 +/tests/typecheck/should_run/tcrun044 +/tests/typecheck/should_run/tcrun045 +/tests/typecheck/should_run/tcrun046 +/tests/typecheck/should_run/tcrun047 +/tests/typecheck/should_run/tcrun048 +/tests/typecheck/should_run/tcrun049 +/tests/typecheck/should_run/tcrun050 +/tests/typecheck/should_run/tcrun051 +/tests/typecheck/should_run/testeq2 +/tests/typecheck/testeq1/typecheck.testeq1 +/timeout/calibrate.out +/timeout/dist/ +/timeout/install-inplace/ diff --git a/testsuite/config/ghc b/testsuite/config/ghc index 947f558c08..f763e72ed3 100644 --- a/testsuite/config/ghc +++ b/testsuite/config/ghc @@ -21,8 +21,7 @@ config.compile_ways = ['normal', 'hpc'] config.run_ways = ['normal', 'hpc'] # ways that are not enabled by default, but can always be invoked explicitly -config.other_ways = ['extcore','optextcore', - 'prof', +config.other_ways = ['prof', 'prof_hc_hb','prof_hb', 'prof_hd','prof_hy','prof_hr', 'threaded1_ls', 'threaded2_hT', @@ -93,8 +92,6 @@ config.way_flags = lambda name : { 'profasm' : ['-O', '-prof', '-static', '-auto-all'], 'profthreaded' : ['-O', '-prof', '-static', '-auto-all', '-threaded'], 'ghci' : ['--interactive', '-v0', '-ignore-dot-ghci', '+RTS', '-I0.1', '-RTS'], - 'extcore' : ['-fext-core'], - 'optextcore' : ['-O', '-fext-core'], 'threaded1' : ['-threaded', '-debug'], 'threaded1_ls' : ['-threaded', '-debug'], 'threaded2' : ['-O', '-threaded', '-eventlog'], @@ -127,8 +124,6 @@ config.way_rts_flags = { 'profasm' : ['-hc', '-p'], # test heap profiling too 'profthreaded' : ['-p'], 'ghci' : [], - 'extcore' : [], - 'optextcore' : [], 'threaded1' : [], 'threaded1_ls' : ['-ls'], 'threaded2' : ['-N2 -ls'], diff --git a/testsuite/driver/runtests.py b/testsuite/driver/runtests.py index c92eaefacc..103c7ace7c 100644 --- a/testsuite/driver/runtests.py +++ b/testsuite/driver/runtests.py @@ -98,8 +98,8 @@ for opt,arg in opts: config.skip_perf_tests = True if opt == '--verbose': - if arg not in ["0","1","2","3"]: - sys.stderr.write("ERROR: requested verbosity %s not supported, use 0,1,2 or 3" % arg) + if arg not in ["0","1","2","3","4"]: + sys.stderr.write("ERROR: requested verbosity %s not supported, use 0,1,2,3 or 4" % arg) sys.exit(1) config.verbose = int(arg) diff --git a/testsuite/driver/testlib.py b/testsuite/driver/testlib.py index 0657db83d3..126c8e4102 100644 --- a/testsuite/driver/testlib.py +++ b/testsuite/driver/testlib.py @@ -996,8 +996,6 @@ def compile_and_run__( name, way, top_mod, extra_mods, extra_hc_opts ): if way == 'ghci': # interpreted... return interpreter_run( name, way, extra_hc_opts, 0, top_mod ) - elif way == 'extcore' or way == 'optextcore' : - return extcore_run( name, way, extra_hc_opts, 0, top_mod ) else: # compiled... force = 0 if extra_mods: @@ -1023,12 +1021,14 @@ def multi_compile_and_run( name, way, top_mod, extra_mods, extra_hc_opts ): def stats( name, way, stats_file ): opts = getTestOpts() - return checkStats(stats_file, opts.stats_range_fields) + return checkStats(name, way, stats_file, opts.stats_range_fields) # ----------------------------------------------------------------------------- # Check -t stats info -def checkStats(stats_file, range_fields): +def checkStats(name, way, stats_file, range_fields): + full_name = name + '(' + way + ')' + result = passed() if len(range_fields) > 0: f = open(in_testdir(stats_file)) @@ -1042,8 +1042,10 @@ def checkStats(stats_file, range_fields): result = failBecause('no such stats field') val = int(m.group(1)) - lowerBound = trunc( expected * ((100 - float(dev))/100)); - upperBound = trunc(0.5 + ceil(expected * ((100 + float(dev))/100))); + lowerBound = trunc( expected * ((100 - float(dev))/100)) + upperBound = trunc(0.5 + ceil(expected * ((100 + float(dev))/100))) + + deviation = round(((float(val) * 100)/ expected) - 100, 1) if val < lowerBound: print field, 'value is too low:' @@ -1054,7 +1056,7 @@ def checkStats(stats_file, range_fields): print field, 'value is too high:' result = failBecause('stat not good enough') - if val < lowerBound or val > upperBound: + if val < lowerBound or val > upperBound or config.verbose >= 4: valStr = str(val) valLen = len(valStr) expectedStr = str(expected) @@ -1062,10 +1064,12 @@ def checkStats(stats_file, range_fields): length = max(map (lambda x : len(str(x)), [expected, lowerBound, upperBound, val])) def display(descr, val, extra): print descr, string.rjust(str(val), length), extra - display(' Expected ' + field + ':', expected, '+/-' + str(dev) + '%') - display(' Lower bound ' + field + ':', lowerBound, '') - display(' Upper bound ' + field + ':', upperBound, '') - display(' Actual ' + field + ':', val, '') + display(' Expected ' + full_name + ' ' + field + ':', expected, '+/-' + str(dev) + '%') + display(' Lower bound ' + full_name + ' ' + field + ':', lowerBound, '') + display(' Upper bound ' + full_name + ' ' + field + ':', upperBound, '') + display(' Actual ' + full_name + ' ' + field + ':', val, '') + if val != expected: + display(' Deviation ' + full_name + ' ' + field + ':', deviation, '%') return result @@ -1156,7 +1160,7 @@ def simple_build( name, way, extra_hc_opts, should_fail, top_mod, link, addsuf, # ToDo: if the sub-shell was killed by ^C, then exit - statsResult = checkStats(stats_file, opts.compiler_stats_range_fields) + statsResult = checkStats(name, way, stats_file, opts.compiler_stats_range_fields) if badResult(statsResult): return statsResult @@ -1256,7 +1260,7 @@ def simple_run( name, way, prog, args ): if check_prof and not check_prof_ok(name): return failBecause('bad profile') - return checkStats(stats_file, opts.stats_range_fields) + return checkStats(name, way, stats_file, opts.stats_range_fields) def rts_flags(way): if (way == ''): @@ -1385,99 +1389,6 @@ def split_file(in_fn, delimiter, out1_fn, out2_fn): out2.close() # ----------------------------------------------------------------------------- -# Generate External Core for the given program, then compile the resulting Core -# and compare its output to the expected output - -def extcore_run( name, way, extra_hc_opts, compile_only, top_mod ): - - depsfilename = qualify(name, 'deps') - errname = add_suffix(name, 'comp.stderr') - qerrname = qualify(errname,'') - - hcname = qualify(name, 'hc') - oname = qualify(name, 'o') - - rm_no_fail( qerrname ) - rm_no_fail( qualify(name, '') ) - - if (top_mod == ''): - srcname = add_hs_lhs_suffix(name) - else: - srcname = top_mod - - qcorefilename = qualify(name, 'hcr') - corefilename = add_suffix(name, 'hcr') - rm_no_fail(qcorefilename) - - # Generate External Core - - if (top_mod == ''): - to_do = ' ' + srcname + ' ' - else: - to_do = ' --make ' + top_mod + ' ' - - flags = copy.copy(getTestOpts().compiler_always_flags) - if getTestOpts().outputdir != None: - flags.extend(["-outputdir", getTestOpts().outputdir]) - cmd = 'cd ' + getTestOpts().testdir + " && '" \ - + config.compiler + "' " \ - + join(flags,' ') + ' ' \ - + join(config.way_flags(name)[way],' ') + ' ' \ - + extra_hc_opts + ' ' \ - + getTestOpts().extra_hc_opts \ - + to_do \ - + '>' + errname + ' 2>&1' - result = runCmdFor(name, cmd) - - exit_code = result >> 8 - - if exit_code != 0: - if_verbose(1,'Compiling to External Core failed (status ' + `result` + ') errors were:') - if_verbose_dump(1,qerrname) - return failBecause('ext core exit code non-0') - - # Compile the resulting files -- if there's more than one module, we need to read the output - # of the previous compilation in order to find the dependencies - if (top_mod == ''): - to_compile = corefilename - else: - result = runCmdFor(name, 'grep Compiling ' + qerrname + ' | awk \'{print $4}\' > ' + depsfilename) - deps = open(depsfilename).read() - deplist = string.replace(deps, '\n',' '); - deplist2 = string.replace(deplist,'.lhs,', '.hcr'); - to_compile = string.replace(deplist2,'.hs,', '.hcr'); - - flags = join(filter(lambda f: f != '-fext-core',config.way_flags(name)[way]),' ') - if getTestOpts().outputdir != None: - flags.extend(["-outputdir", getTestOpts().outputdir]) - - cmd = 'cd ' + getTestOpts().testdir + " && '" \ - + config.compiler + "' " \ - + join(getTestOpts().compiler_always_flags,' ') + ' ' \ - + to_compile + ' ' \ - + extra_hc_opts + ' ' \ - + getTestOpts().extra_hc_opts + ' ' \ - + flags \ - + ' -fglasgow-exts -o ' + name \ - + '>' + errname + ' 2>&1' - - result = runCmdFor(name, cmd) - exit_code = result >> 8 - - if exit_code != 0: - if_verbose(1,'Compiling External Core file(s) failed (status ' + `result` + ') errors were:') - if_verbose_dump(1,qerrname) - return failBecause('ext core exit code non-0') - - # Clean up - rm_no_fail ( oname ) - rm_no_fail ( hcname ) - rm_no_fail ( qcorefilename ) - rm_no_fail ( depsfilename ) - - return simple_run ( name, way, './'+name, getTestOpts().extra_run_opts ) - -# ----------------------------------------------------------------------------- # Utils def check_stdout_ok( name ): diff --git a/testsuite/mk/test.mk b/testsuite/mk/test.mk index 0cc3f21c8a..d6e550fb9d 100644 --- a/testsuite/mk/test.mk +++ b/testsuite/mk/test.mk @@ -55,7 +55,7 @@ else RUNTEST_OPTS += -e ghc_with_native_codegen=0 endif -GHC_PRIM_LIBDIR := $(shell "$(GHC_PKG)" field ghc-prim library-dirs --simple-output) +GHC_PRIM_LIBDIR := $(subst library-dirs: ,,$(shell "$(GHC_PKG)" field ghc-prim library-dirs --simple-output)) HAVE_VANILLA := $(shell if [ -f $(subst \,/,$(GHC_PRIM_LIBDIR))/GHC/PrimopWrappers.hi ]; then echo YES; else echo NO; fi) HAVE_DYNAMIC := $(shell if [ -f $(subst \,/,$(GHC_PRIM_LIBDIR))/GHC/PrimopWrappers.dyn_hi ]; then echo YES; else echo NO; fi) HAVE_PROFILING := $(shell if [ -f $(subst \,/,$(GHC_PRIM_LIBDIR))/GHC/PrimopWrappers.p_hi ]; then echo YES; else echo NO; fi) diff --git a/testsuite/tests/annotations/should_compile/th/AnnHelper.hs b/testsuite/tests/annotations/should_compile/th/AnnHelper.hs new file mode 100644 index 0000000000..ac0f040ba0 --- /dev/null +++ b/testsuite/tests/annotations/should_compile/th/AnnHelper.hs @@ -0,0 +1,16 @@ +module AnnHelper where + +import Language.Haskell.TH +import Language.Haskell.TH.Syntax + +traverseModuleAnnotations :: Q [String] +traverseModuleAnnotations = do + ModuleInfo children <- reifyModule =<< thisModule + go children [] [] + where + go [] _visited acc = return acc + go (x:xs) visited acc | x `elem` visited = go xs visited acc + | otherwise = do + ModuleInfo newMods <- reifyModule x + newAnns <- reifyAnnotations $ AnnLookupModule x + go (newMods ++ xs) (x:visited) (newAnns ++ acc) diff --git a/testsuite/tests/annotations/should_compile/th/Makefile b/testsuite/tests/annotations/should_compile/th/Makefile new file mode 100644 index 0000000000..4159eeeda1 --- /dev/null +++ b/testsuite/tests/annotations/should_compile/th/Makefile @@ -0,0 +1,33 @@ +TOP=../../../.. +include $(TOP)/mk/boilerplate.mk +include $(TOP)/mk/test.mk + +annth_make: + $(MAKE) clean_annth_make + mkdir build_make + '$(TEST_HC)' $(TEST_HC_OPTS) -v0 --make \ + -odir build_make -hidir build_make -o build_make/annth annth.hs + +clean_annth_make: + rm -rf build_make + +annth_compunits: + $(MAKE) clean_annth_compunits + mkdir build_compunits + '$(TEST_HC)' $(TEST_HC_OPTS) -v0 -dynamic-too \ + -odir build_compunits -hidir build_compunits \ + -c AnnHelper.hs + '$(TEST_HC)' $(TEST_HC_OPTS) -v0 -dynamic-too \ + -odir build_compunits -hidir build_compunits \ + -c TestModule.hs + '$(TEST_HC)' $(TEST_HC_OPTS) -v0 -dynamic-too \ + -odir build_compunits -hidir build_compunits \ + -c TestModuleTH.hs + '$(TEST_HC)' $(TEST_HC_OPTS) -v0 -ibuild_compunits \ + -odir build_compunits -hidir build_compunits \ + -c annth.hs + +clean_annth_compunits: + rm -rf build_compunits + +.PHONY: annth_make clean_annth_make annth_compunits clean_annth_compunits diff --git a/testsuite/tests/annotations/should_compile/th/TestModule.hs b/testsuite/tests/annotations/should_compile/th/TestModule.hs new file mode 100644 index 0000000000..d9519eb8b2 --- /dev/null +++ b/testsuite/tests/annotations/should_compile/th/TestModule.hs @@ -0,0 +1,11 @@ +module TestModule where + +{-# ANN module "Module annotation" #-} + +{-# ANN type TestType "Type annotation" #-} +{-# ANN TestType "Constructor annotation" #-} +data TestType = TestType + +{-# ANN testValue "Value annotation" #-} +testValue :: Int +testValue = 42 diff --git a/testsuite/tests/annotations/should_compile/th/TestModuleTH.hs b/testsuite/tests/annotations/should_compile/th/TestModuleTH.hs new file mode 100644 index 0000000000..f21b13764b --- /dev/null +++ b/testsuite/tests/annotations/should_compile/th/TestModuleTH.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE TemplateHaskell #-} + +module TestModuleTH where + +import Language.Haskell.TH + +$(do + modAnn <- pragAnnD ModuleAnnotation + (stringE "TH module annotation") + [typ] <- [d| data TestTypeTH = TestTypeTH |] + conAnn <- pragAnnD (ValueAnnotation $ mkName "TestTypeTH") + (stringE "TH Constructor annotation") + typAnn <- pragAnnD (TypeAnnotation $ mkName "TestTypeTH") + (stringE "TH Type annotation") + valAnn <- pragAnnD (ValueAnnotation $ mkName "testValueTH") + (stringE "TH Value annotation") + [val] <- [d| testValueTH = (42 :: Int) |] + return [modAnn, conAnn, typAnn, typ, valAnn, val] ) diff --git a/testsuite/tests/annotations/should_compile/th/all.T b/testsuite/tests/annotations/should_compile/th/all.T new file mode 100644 index 0000000000..b44a0d594f --- /dev/null +++ b/testsuite/tests/annotations/should_compile/th/all.T @@ -0,0 +1,22 @@ +setTestOpts(when(compiler_profiled(), skip)) + +# Annotations and Template Haskell, require runtime evaluation. In +# order for this to work with profiling, we would have to build the +# program twice and use -osuf p_o (see the TH_splitE5_prof test). For +# now, just disable the profiling ways. + +test('annth_make', + [req_interp, + omit_ways(['profasm','profthreaded']), + unless(have_dynamic(),skip), + clean_cmd('$MAKE -s clean_annth_make')], + run_command, + ['$MAKE -s --no-print-directory annth_make']) + +test('annth_compunits', + [req_interp, + omit_ways(['profasm','profthreaded']), + unless(have_dynamic(),skip), + clean_cmd('$MAKE -s clean_annth_compunits')], + run_command, + ['$MAKE -s --no-print-directory annth_compunits']) diff --git a/testsuite/tests/annotations/should_compile/th/annth.hs b/testsuite/tests/annotations/should_compile/th/annth.hs new file mode 100644 index 0000000000..de5d4d32a8 --- /dev/null +++ b/testsuite/tests/annotations/should_compile/th/annth.hs @@ -0,0 +1,26 @@ +{-# LANGUAGE TemplateHaskell #-} + +import Language.Haskell.TH +import Language.Haskell.TH.Syntax + +import AnnHelper +import TestModule +import TestModuleTH + +main = do + $(do + anns <- traverseModuleAnnotations + runIO $ print (anns :: [String]) + anns <- reifyAnnotations (AnnLookupName 'testValue) + runIO $ print (anns :: [String]) + anns <- reifyAnnotations (AnnLookupName 'testValueTH) + runIO $ print (anns :: [String]) + anns <- reifyAnnotations (AnnLookupName ''TestType) + runIO $ print (anns :: [String]) + anns <- reifyAnnotations (AnnLookupName ''TestTypeTH) + runIO $ print (anns :: [String]) + anns <- reifyAnnotations (AnnLookupName 'TestType) + runIO $ print (anns :: [String]) + anns <- reifyAnnotations (AnnLookupName 'TestTypeTH) + runIO $ print (anns :: [String]) + [| return () |] ) diff --git a/testsuite/tests/annotations/should_compile/th/annth_compunits.stdout b/testsuite/tests/annotations/should_compile/th/annth_compunits.stdout new file mode 100644 index 0000000000..96e4642c7e --- /dev/null +++ b/testsuite/tests/annotations/should_compile/th/annth_compunits.stdout @@ -0,0 +1,7 @@ +["TH module annotation","Module annotation"] +["Value annotation"] +["TH Value annotation"] +["Type annotation"] +["TH Type annotation"] +["Constructor annotation"] +["TH Constructor annotation"] diff --git a/testsuite/tests/annotations/should_compile/th/annth_make.stdout b/testsuite/tests/annotations/should_compile/th/annth_make.stdout new file mode 100644 index 0000000000..96e4642c7e --- /dev/null +++ b/testsuite/tests/annotations/should_compile/th/annth_make.stdout @@ -0,0 +1,7 @@ +["TH module annotation","Module annotation"] +["Value annotation"] +["TH Value annotation"] +["Type annotation"] +["TH Type annotation"] +["Constructor annotation"] +["TH Constructor annotation"] diff --git a/testsuite/tests/callarity/perf/all.T b/testsuite/tests/callarity/perf/all.T index 765a2e94a7..1c7969474c 100644 --- a/testsuite/tests/callarity/perf/all.T +++ b/testsuite/tests/callarity/perf/all.T @@ -1,8 +1,9 @@ test('T3924', [stats_num_field('bytes allocated', - [ (wordsize(64), 51480, 5), + [ (wordsize(64), 50760, 5), # previously, without call-arity: 22326544 # 2014-01-18: 51480 (amd64/Linux) + # 2014-07-17: 50760 (amd64/Linux) (Roundabout adjustment) (wordsize(32), 44988, 5) ]), # 2014-04-04: 44988 (Windows, 64-bit machine) only_ways(['normal']) diff --git a/testsuite/tests/codeGen/should_compile/T9155.hs b/testsuite/tests/codeGen/should_compile/T9155.hs new file mode 100644 index 0000000000..6fac0bcee6 --- /dev/null +++ b/testsuite/tests/codeGen/should_compile/T9155.hs @@ -0,0 +1,30 @@ +{-# LANGUAGE ScopedTypeVariables #-} +module M () where + +import Data.Bits ((.&.)) + +bitsSet :: Int -> Int -> Bool +bitsSet mask i + = (i .&. mask == mask) + +class Eq b => BitMask b where + assocBitMask :: [(b,Int)] + + fromBitMask :: Int -> b + fromBitMask i + = walk assocBitMask + where + walk [] = error "Graphics.UI.WX.Types.fromBitMask: empty list" + walk [(x,0)] = x + walk ((x,m):xs) | bitsSet m i = x + | otherwise = walk xs + +data Align = AlignLeft + | AlignCentre + deriving Eq + +instance BitMask Align where + assocBitMask + = [(AlignCentre,512) + ,(AlignLeft, 256) + ] diff --git a/testsuite/tests/codeGen/should_compile/all.T b/testsuite/tests/codeGen/should_compile/all.T index 487b6b653c..ae8d0dd24a 100644 --- a/testsuite/tests/codeGen/should_compile/all.T +++ b/testsuite/tests/codeGen/should_compile/all.T @@ -22,3 +22,4 @@ test('massive_array', test('T7237', normal, compile, ['']) test('T7574', [cmm_src, omit_ways(['llvm', 'optllvm'])], compile, ['']) test('T8205', normal, compile, ['-O0']) +test('T9155', normal, compile, ['-O2']) diff --git a/testsuite/tests/codeGen/should_run/T9001.hs b/testsuite/tests/codeGen/should_run/T9001.hs new file mode 100644 index 0000000000..3fae93efa0 --- /dev/null +++ b/testsuite/tests/codeGen/should_run/T9001.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE RankNTypes #-} + +newtype FMList = FM {unFM :: forall m. m -> m} + +main = print (delete 2000 (FM id) :: Int) + +delete 0 _ = 0 +delete n (FM a) = a $ delete (n-1) $ FM $ \g -> a (const g) undefined diff --git a/testsuite/tests/codeGen/should_run/T9001.stdout b/testsuite/tests/codeGen/should_run/T9001.stdout new file mode 100644 index 0000000000..573541ac97 --- /dev/null +++ b/testsuite/tests/codeGen/should_run/T9001.stdout @@ -0,0 +1 @@ +0 diff --git a/testsuite/tests/codeGen/should_run/all.T b/testsuite/tests/codeGen/should_run/all.T index 9077af2e0c..2d66c42aa3 100644 --- a/testsuite/tests/codeGen/should_run/all.T +++ b/testsuite/tests/codeGen/should_run/all.T @@ -48,9 +48,7 @@ test('cgrun047', normal, compile_and_run, ['']) test('cgrun048', normal, compile_and_run, ['']) test('cgrun049', normal, compile_and_run, ['-funbox-strict-fields']) test('cgrun050', normal, compile_and_run, ['']) -# Doesn't work with External Core due to datatype declaration with no constructors -test('cgrun051', [expect_fail_for(['extcore','optextcore']), exit_code(1)], - compile_and_run, ['']) +test('cgrun051', [exit_code(1)], compile_and_run, ['']) test('cgrun052', only_ways(['optasm']), compile_and_run, ['-funbox-strict-fields']) test('cgrun053', normal, compile_and_run, ['']) test('cgrun054', normal, compile_and_run, ['']) @@ -121,3 +119,4 @@ test('StaticByteArraySize', normal, compile_and_run, ['-O2']) test('CopySmallArray', normal, compile_and_run, ['']) test('CopySmallArrayStressTest', reqlib('random'), compile_and_run, ['']) test('SizeOfSmallArray', normal, compile_and_run, ['']) +test('T9001', normal, compile_and_run, ['']) diff --git a/testsuite/tests/concurrent/should_run/AtomicPrimops.hs b/testsuite/tests/concurrent/should_run/AtomicPrimops.hs new file mode 100644 index 0000000000..0c55aba93e --- /dev/null +++ b/testsuite/tests/concurrent/should_run/AtomicPrimops.hs @@ -0,0 +1,245 @@ +{-# LANGUAGE MagicHash #-} +{-# LANGUAGE UnboxedTuples #-} + +module Main ( main ) where + +import Control.Concurrent +import Control.Concurrent.MVar +import Control.Monad (when) +import Foreign.Storable +import GHC.Exts +import GHC.IO + +-- | Iterations per worker. +iters :: Int +iters = 1000000 + +main :: IO () +main = do + fetchAddSubTest + fetchAndTest + fetchNandTest + fetchOrTest + fetchXorTest + casTest + readWriteTest + +-- | Test fetchAddIntArray# by having two threads concurrenctly +-- increment a counter and then checking the sum at the end. +fetchAddSubTest :: IO () +fetchAddSubTest = do + tot <- race 0 + (\ mba -> work fetchAddIntArray mba iters 2) + (\ mba -> work fetchSubIntArray mba iters 1) + assertEq 1000000 tot "fetchAddSubTest" + where + work :: (MByteArray -> Int -> Int -> IO ()) -> MByteArray -> Int -> Int + -> IO () + work op mba 0 val = return () + work op mba n val = op mba 0 val >> work op mba (n-1) val + +-- | Test fetchXorIntArray# by having two threads concurrenctly XORing +-- and then checking the result at the end. Works since XOR is +-- commutative. +-- +-- Covers the code paths for AND, NAND, and OR as well. +fetchXorTest :: IO () +fetchXorTest = do + res <- race n0 + (\ mba -> work mba iters t1pat) + (\ mba -> work mba iters t2pat) + assertEq expected res "fetchXorTest" + where + work :: MByteArray -> Int -> Int -> IO () + work mba 0 val = return () + work mba n val = fetchXorIntArray mba 0 val >> work mba (n-1) val + + -- Initial value is a large prime and the two patterns are 1010... + -- and 0101... + (n0, t1pat, t2pat) + | sizeOf (undefined :: Int) == 8 = + (0x00000000ffffffff, 0x5555555555555555, 0x9999999999999999) + | otherwise = (0x0000ffff, 0x55555555, 0x99999999) + expected + | sizeOf (undefined :: Int) == 8 = 4294967295 + | otherwise = 65535 + +-- The tests for AND, NAND, and OR are trivial for two reasons: +-- +-- * The code path is already well exercised by 'fetchXorTest'. +-- +-- * It's harder to test these operations, as a long sequence of them +-- convert to a single value but we'd like to write a test in the +-- style of 'fetchXorTest' that applies the operation repeatedly, +-- to make it likely that any race conditions are detected. +-- +-- Right now we only test that they return the correct value for a +-- single op on each thread. + +fetchOpTest :: (MByteArray -> Int -> Int -> IO ()) + -> Int -> String -> IO () +fetchOpTest op expected name = do + res <- race n0 + (\ mba -> work mba t1pat) + (\ mba -> work mba t2pat) + assertEq expected res name + where + work :: MByteArray -> Int -> IO () + work mba val = op mba 0 val + + -- Initial value is a large prime and the two patterns are 1010... + -- and 0101... + (n0, t1pat, t2pat) + | sizeOf (undefined :: Int) == 8 = + (0x00000000ffffffff, 0x5555555555555555, 0x9999999999999999) + | otherwise = (0x0000ffff, 0x55555555, 0x99999999) + +fetchAndTest :: IO () +fetchAndTest = fetchOpTest fetchAndIntArray expected "fetchAndTest" + where expected + | sizeOf (undefined :: Int) == 8 = 286331153 + | otherwise = 4369 + +fetchNandTest :: IO () +fetchNandTest = fetchOpTest fetchNandIntArray expected "fetchNandTest" + where expected + | sizeOf (undefined :: Int) == 8 = 7378697629770151799 + | otherwise = -2576976009 + +fetchOrTest :: IO () +fetchOrTest = fetchOpTest fetchOrIntArray expected "fetchOrTest" + where expected + | sizeOf (undefined :: Int) == 8 = 15987178197787607039 + | otherwise = 3722313727 + +-- | Test casIntArray# by using it to emulate fetchAddIntArray# and +-- then having two threads concurrenctly increment a counter, +-- checking the sum at the end. +casTest :: IO () +casTest = do + tot <- race 0 + (\ mba -> work mba iters 1) + (\ mba -> work mba iters 2) + assertEq 3000000 tot "casTest" + where + work :: MByteArray -> Int -> Int -> IO () + work mba 0 val = return () + work mba n val = add mba 0 val >> work mba (n-1) val + + -- Fetch-and-add implemented using CAS. + add :: MByteArray -> Int -> Int -> IO () + add mba ix n = do + old <- readIntArray mba ix + old' <- casIntArray mba ix old (old + n) + when (old /= old') $ add mba ix n + +-- | Tests atomic reads and writes by making sure that one thread sees +-- updates that are done on another. This test isn't very good at the +-- moment, as this might work even without atomic ops, but at least it +-- exercises the code. +readWriteTest :: IO () +readWriteTest = do + mba <- newByteArray (sizeOf (undefined :: Int)) + writeIntArray mba 0 0 + latch <- newEmptyMVar + done <- newEmptyMVar + forkIO $ do + takeMVar latch + n <- atomicReadIntArray mba 0 + assertEq 1 n "readWriteTest" + putMVar done () + atomicWriteIntArray mba 0 1 + putMVar latch () + takeMVar done + +-- | Create two threads that mutate the byte array passed to them +-- concurrently. The array is one word large. +race :: Int -- ^ Initial value of array element + -> (MByteArray -> IO ()) -- ^ Thread 1 action + -> (MByteArray -> IO ()) -- ^ Thread 2 action + -> IO Int -- ^ Final value of array element +race n0 thread1 thread2 = do + done1 <- newEmptyMVar + done2 <- newEmptyMVar + mba <- newByteArray (sizeOf (undefined :: Int)) + writeIntArray mba 0 n0 + forkIO $ thread1 mba >> putMVar done1 () + forkIO $ thread2 mba >> putMVar done2 () + mapM_ takeMVar [done1, done2] + readIntArray mba 0 + +------------------------------------------------------------------------ +-- Test helper + +assertEq :: (Eq a, Show a) => a -> a -> String -> IO () +assertEq expected actual name + | expected == actual = putStrLn $ name ++ ": OK" + | otherwise = do + putStrLn $ name ++ ": FAIL" + putStrLn $ "Expected: " ++ show expected + putStrLn $ " Actual: " ++ show actual + +------------------------------------------------------------------------ +-- Wrappers around MutableByteArray# + +data MByteArray = MBA (MutableByteArray# RealWorld) + +fetchAddIntArray :: MByteArray -> Int -> Int -> IO () +fetchAddIntArray (MBA mba#) (I# ix#) (I# n#) = IO $ \ s# -> + case fetchAddIntArray# mba# ix# n# s# of + (# s2#, _ #) -> (# s2#, () #) + +fetchSubIntArray :: MByteArray -> Int -> Int -> IO () +fetchSubIntArray (MBA mba#) (I# ix#) (I# n#) = IO $ \ s# -> + case fetchSubIntArray# mba# ix# n# s# of + (# s2#, _ #) -> (# s2#, () #) + +fetchAndIntArray :: MByteArray -> Int -> Int -> IO () +fetchAndIntArray (MBA mba#) (I# ix#) (I# n#) = IO $ \ s# -> + case fetchAndIntArray# mba# ix# n# s# of + (# s2#, _ #) -> (# s2#, () #) + +fetchNandIntArray :: MByteArray -> Int -> Int -> IO () +fetchNandIntArray (MBA mba#) (I# ix#) (I# n#) = IO $ \ s# -> + case fetchNandIntArray# mba# ix# n# s# of + (# s2#, _ #) -> (# s2#, () #) + +fetchOrIntArray :: MByteArray -> Int -> Int -> IO () +fetchOrIntArray (MBA mba#) (I# ix#) (I# n#) = IO $ \ s# -> + case fetchOrIntArray# mba# ix# n# s# of + (# s2#, _ #) -> (# s2#, () #) + +fetchXorIntArray :: MByteArray -> Int -> Int -> IO () +fetchXorIntArray (MBA mba#) (I# ix#) (I# n#) = IO $ \ s# -> + case fetchXorIntArray# mba# ix# n# s# of + (# s2#, _ #) -> (# s2#, () #) + +newByteArray :: Int -> IO MByteArray +newByteArray (I# n#) = IO $ \ s# -> + case newByteArray# n# s# of + (# s2#, mba# #) -> (# s2#, MBA mba# #) + +writeIntArray :: MByteArray -> Int -> Int -> IO () +writeIntArray (MBA mba#) (I# ix#) (I# n#) = IO $ \ s# -> + case writeIntArray# mba# ix# n# s# of + s2# -> (# s2#, () #) + +readIntArray :: MByteArray -> Int -> IO Int +readIntArray (MBA mba#) (I# ix#) = IO $ \ s# -> + case readIntArray# mba# ix# s# of + (# s2#, n# #) -> (# s2#, I# n# #) + +atomicWriteIntArray :: MByteArray -> Int -> Int -> IO () +atomicWriteIntArray (MBA mba#) (I# ix#) (I# n#) = IO $ \ s# -> + case atomicWriteIntArray# mba# ix# n# s# of + s2# -> (# s2#, () #) + +atomicReadIntArray :: MByteArray -> Int -> IO Int +atomicReadIntArray (MBA mba#) (I# ix#) = IO $ \ s# -> + case atomicReadIntArray# mba# ix# s# of + (# s2#, n# #) -> (# s2#, I# n# #) + +casIntArray :: MByteArray -> Int -> Int -> Int -> IO Int +casIntArray (MBA mba#) (I# ix#) (I# old#) (I# new#) = IO $ \ s# -> + case casIntArray# mba# ix# old# new# s# of + (# s2#, old2# #) -> (# s2#, I# old2# #) diff --git a/testsuite/tests/concurrent/should_run/AtomicPrimops.stdout b/testsuite/tests/concurrent/should_run/AtomicPrimops.stdout new file mode 100644 index 0000000000..c37041a040 --- /dev/null +++ b/testsuite/tests/concurrent/should_run/AtomicPrimops.stdout @@ -0,0 +1,7 @@ +fetchAddSubTest: OK +fetchAndTest: OK +fetchNandTest: OK +fetchOrTest: OK +fetchXorTest: OK +casTest: OK +readWriteTest: OK diff --git a/testsuite/tests/concurrent/should_run/all.T b/testsuite/tests/concurrent/should_run/all.T index d4e76c6b1e..0a66892d82 100644 --- a/testsuite/tests/concurrent/should_run/all.T +++ b/testsuite/tests/concurrent/should_run/all.T @@ -78,8 +78,10 @@ test('readMVar1', normal, compile_and_run, ['']) test('readMVar2', normal, compile_and_run, ['']) test('readMVar3', normal, compile_and_run, ['']) test('tryReadMVar1', normal, compile_and_run, ['']) +test('tryReadMVar2', normal, compile_and_run, ['']) test('T7970', normal, compile_and_run, ['']) +test('AtomicPrimops', normal, compile_and_run, ['']) # ----------------------------------------------------------------------------- # These tests we only do for a full run diff --git a/testsuite/tests/concurrent/should_run/tryReadMVar2.hs b/testsuite/tests/concurrent/should_run/tryReadMVar2.hs new file mode 100644 index 0000000000..13b8a45c32 --- /dev/null +++ b/testsuite/tests/concurrent/should_run/tryReadMVar2.hs @@ -0,0 +1,15 @@ +module Main where + +import Control.Concurrent +import Control.Monad + +main = do + m <- newEmptyMVar + done <- newEmptyMVar + let q = 200000 + forkIO (do mapM (\n -> putMVar m n) [1..q]; putMVar done ()) + forkIO (do replicateM_ q $ readMVar m; putMVar done ()) + forkIO (do replicateM_ q $ tryReadMVar m; putMVar done ()) + forkIO (do replicateM_ q $ takeMVar m; putMVar done ()) + replicateM_ 4 $ takeMVar done + diff --git a/testsuite/tests/deSugar/should_compile/T2431.stderr b/testsuite/tests/deSugar/should_compile/T2431.stderr index dbafaedf82..c40b603d3f 100644 --- a/testsuite/tests/deSugar/should_compile/T2431.stderr +++ b/testsuite/tests/deSugar/should_compile/T2431.stderr @@ -13,7 +13,7 @@ T2431.$WRefl [InlPrag=INLINE] :: forall a. a T2431.:~: a T2431.$WRefl = \ (@ a) -> T2431.Refl @ a @ a @~ <a>_N T2431.absurd - :: forall a. (GHC.Types.Int T2431.:~: GHC.Types.Bool) -> a + :: forall a. GHC.Types.Int T2431.:~: GHC.Types.Bool -> a [GblId, Arity=1, Caf=NoCafRefs, Str=DmdType <L,U>b] T2431.absurd = \ (@ a) (x :: GHC.Types.Int T2431.:~: GHC.Types.Bool) -> diff --git a/testsuite/tests/deriving/should_compile/T7269.hs b/testsuite/tests/deriving/should_compile/T7269.hs new file mode 100644 index 0000000000..2d7331bebb --- /dev/null +++ b/testsuite/tests/deriving/should_compile/T7269.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE MultiParamTypeClasses, StandaloneDeriving, GeneralizedNewtypeDeriving #-} + +module T7269 where + +class C (a :: k) + +instance C Int + +newtype MyInt = MyInt Int deriving C + +newtype YourInt = YourInt Int +deriving instance C YourInt diff --git a/testsuite/tests/deriving/should_compile/T9069.hs b/testsuite/tests/deriving/should_compile/T9069.hs new file mode 100644 index 0000000000..7ab3af3489 --- /dev/null +++ b/testsuite/tests/deriving/should_compile/T9069.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE DeriveTraversable #-} + +module T9069 where + +import Data.Foldable +import Data.Traversable + +data Trivial a = Trivial a + deriving (Functor,Foldable,Traversable)
\ No newline at end of file diff --git a/testsuite/tests/deriving/should_compile/all.T b/testsuite/tests/deriving/should_compile/all.T index 224b99ef00..f440e8043e 100644 --- a/testsuite/tests/deriving/should_compile/all.T +++ b/testsuite/tests/deriving/should_compile/all.T @@ -49,3 +49,5 @@ test('T8865', normal, compile, ['']) test('T8893', normal, compile, ['']) test('T8950', expect_broken(8950), compile, ['']) test('T8963', normal, compile, ['']) +test('T7269', normal, compile, ['']) +test('T9069', normal, compile, ['']) diff --git a/testsuite/tests/deriving/should_fail/T7959.hs b/testsuite/tests/deriving/should_fail/T7959.hs index a798bb0666..000e759be5 100644 --- a/testsuite/tests/deriving/should_fail/T7959.hs +++ b/testsuite/tests/deriving/should_fail/T7959.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE NullaryTypeClasses, StandaloneDeriving #-} +{-# LANGUAGE MultiParamTypeClasses, StandaloneDeriving #-} module T7959 where class A diff --git a/testsuite/tests/deriving/should_fail/T7959.stderr b/testsuite/tests/deriving/should_fail/T7959.stderr index dde9ee0034..5ca93a7fe3 100644 --- a/testsuite/tests/deriving/should_fail/T7959.stderr +++ b/testsuite/tests/deriving/should_fail/T7959.stderr @@ -4,5 +4,5 @@ T7959.hs:5:1: In the stand-alone deriving instance for ‘A’ T7959.hs:6:17: - Cannot derive instances for nullary classes + Expected kind ‘k0 -> Constraint’, but ‘A’ has kind ‘Constraint’ In the data declaration for ‘B’ diff --git a/testsuite/tests/deriving/should_fail/T9071-2.hs b/testsuite/tests/deriving/should_fail/T9071-2.hs new file mode 100644 index 0000000000..7a2f4749ce --- /dev/null +++ b/testsuite/tests/deriving/should_fail/T9071-2.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE DeriveFunctor #-} +module T9071_2 where + +newtype Mu f = Mu (f (Mu f)) + +newtype K1 a b = K1 a +newtype F1 a = F1 (Mu (K1 a)) deriving Functor diff --git a/testsuite/tests/deriving/should_fail/T9071.hs b/testsuite/tests/deriving/should_fail/T9071.hs new file mode 100644 index 0000000000..dc64f42db8 --- /dev/null +++ b/testsuite/tests/deriving/should_fail/T9071.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DeriveFunctor #-} +module T9071 where + +import T9071a + +newtype K a b = K a +newtype F a = F (Mu (K a)) deriving Functor + diff --git a/testsuite/tests/deriving/should_fail/T9071.stderr b/testsuite/tests/deriving/should_fail/T9071.stderr new file mode 100644 index 0000000000..259adbaef0 --- /dev/null +++ b/testsuite/tests/deriving/should_fail/T9071.stderr @@ -0,0 +1,10 @@ +[1 of 2] Compiling T9071a ( T9071a.hs, T9071a.o ) +[2 of 2] Compiling T9071 ( T9071.hs, T9071.o ) + +T9071.hs:7:37: + No instance for (Functor K) + arising from the first field of ‘F’ (type ‘Mu (K a)’) + Possible fix: + use a standalone 'deriving instance' declaration, + so you can specify the instance context yourself + When deriving the instance for (Functor F) diff --git a/testsuite/tests/deriving/should_fail/T9071_2.hs b/testsuite/tests/deriving/should_fail/T9071_2.hs new file mode 100644 index 0000000000..7a2f4749ce --- /dev/null +++ b/testsuite/tests/deriving/should_fail/T9071_2.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE DeriveFunctor #-} +module T9071_2 where + +newtype Mu f = Mu (f (Mu f)) + +newtype K1 a b = K1 a +newtype F1 a = F1 (Mu (K1 a)) deriving Functor diff --git a/testsuite/tests/deriving/should_fail/T9071_2.stderr b/testsuite/tests/deriving/should_fail/T9071_2.stderr new file mode 100644 index 0000000000..ae0fcdb928 --- /dev/null +++ b/testsuite/tests/deriving/should_fail/T9071_2.stderr @@ -0,0 +1,8 @@ + +T9071_2.hs:7:40: + No instance for (Functor Mu) + arising from the first field of ‘F1’ (type ‘Mu (K1 a)’) + Possible fix: + use a standalone 'deriving instance' declaration, + so you can specify the instance context yourself + When deriving the instance for (Functor F1) diff --git a/testsuite/tests/deriving/should_fail/T9071a.hs b/testsuite/tests/deriving/should_fail/T9071a.hs new file mode 100644 index 0000000000..bf3a126a19 --- /dev/null +++ b/testsuite/tests/deriving/should_fail/T9071a.hs @@ -0,0 +1,4 @@ +module T9071a where + +newtype Mu f = Mu (f (Mu f)) + diff --git a/testsuite/tests/deriving/should_fail/all.T b/testsuite/tests/deriving/should_fail/all.T index d503b6e266..99da88a55b 100644 --- a/testsuite/tests/deriving/should_fail/all.T +++ b/testsuite/tests/deriving/should_fail/all.T @@ -50,3 +50,6 @@ test('T7800', normal, multimod_compile_fail, ['T7800','']) test('T5498', normal, compile_fail, ['']) test('T6147', normal, compile_fail, ['']) test('T8851', normal, compile_fail, ['']) +test('T9071', normal, multimod_compile_fail, ['T9071','']) +test('T9071_2', normal, compile_fail, ['']) + diff --git a/testsuite/tests/deriving/should_fail/drvfail005.stderr b/testsuite/tests/deriving/should_fail/drvfail005.stderr index b5a2de8d01..1546a37d07 100644 --- a/testsuite/tests/deriving/should_fail/drvfail005.stderr +++ b/testsuite/tests/deriving/should_fail/drvfail005.stderr @@ -1,5 +1,5 @@ drvfail005.hs:4:13: - Can't make a derived instance of ‘Show a (Test a)’: - ‘Show a’ is not a class + Expected kind ‘k0 -> Constraint’, + but ‘Show a’ has kind ‘Constraint’ In the data declaration for ‘Test’ diff --git a/testsuite/tests/deriving/should_fail/drvfail009.stderr b/testsuite/tests/deriving/should_fail/drvfail009.stderr index fcc5b4c305..b9dd90c758 100644 --- a/testsuite/tests/deriving/should_fail/drvfail009.stderr +++ b/testsuite/tests/deriving/should_fail/drvfail009.stderr @@ -1,8 +1,8 @@ drvfail009.hs:10:31: - Can't make a derived instance of ‘C T1’ - (even with cunning newtype deriving): - ‘C’ does not have arity 1 + Expecting one more argument to ‘C’ + Expected kind ‘* -> Constraint’, + but ‘C’ has kind ‘* -> * -> Constraint’ In the newtype declaration for ‘T1’ drvfail009.hs:13:31: diff --git a/testsuite/tests/dph/modules/dph-ExportList-vseg-fast.stderr b/testsuite/tests/dph/modules/dph-ExportList-vseg-fast.stderr index 749c3cdfeb..bf6f453f71 100644 --- a/testsuite/tests/dph/modules/dph-ExportList-vseg-fast.stderr +++ b/testsuite/tests/dph/modules/dph-ExportList-vseg-fast.stderr @@ -1,6 +1,9 @@ [1 of 1] Compiling ExportList ( ExportList.hs, ExportList.o ) Warning: vectorisation failure: identityConvTyCon: type constructor contains parallel arrays [::] - Could NOT call vectorised from original version ExportList.solveV + Could NOT call vectorised from original version + ExportList.solveV :: GHC.Types.Double -> [:GHC.Types.Double:] Warning: vectorisation failure: identityConvTyCon: type constructor contains parallel arrays NodeV Could NOT call vectorised from original version - ExportList.solvePA + ExportList.solvePA :: ExportList.NodeV + -> GHC.Types.Double + -> Data.Array.Parallel.PArray.PData.Base.PArray GHC.Types.Double diff --git a/testsuite/tests/driver/Makefile b/testsuite/tests/driver/Makefile index 3603bb6bcd..62aa2f92c8 100644 --- a/testsuite/tests/driver/Makefile +++ b/testsuite/tests/driver/Makefile @@ -556,9 +556,25 @@ T6037: T2507: -LC_ALL=C "$(TEST_HC)" $(TEST_HC_OPTS_NO_RECOMP) -c T2507.hs +.PHONY: T8959a +T8959a: + -LC_ALL=C "$(TEST_HC)" $(TEST_HC_OPTS_NO_RECOMP) -c T8959a.hs -XUnicodeSyntax + .PHONY: T703 T703: $(RM) -rf T703 [ ! -d T703 ] "$(TEST_HC)" $(TEST_HC_OPTS) --make T703.hs -v0 ! readelf -W -l T703 2>/dev/null | grep 'GNU_STACK' | grep -q 'RWE' + +.PHONY: write_interface_oneshot +write_interface_oneshot: + $(RM) -rf write_interface_oneshot/A011.hi + "$(TEST_HC)" $(TEST_HC_OPTS) -hidir write_interface_oneshot -fno-code -fwrite-interface -c A011.hs + test -f write_interface_oneshot/A011.hi + +.PHONY: write_interface_make +write_interface_make: + $(RM) -rf write_interface_make/A011.hi + "$(TEST_HC)" $(TEST_HC_OPTS) -hidir write_interface_make -fno-code -fwrite-interface --make A011.hs + test -f write_interface_make/A011.hi diff --git a/testsuite/tests/driver/T8959a.hs b/testsuite/tests/driver/T8959a.hs new file mode 100644 index 0000000000..6f8fd77d15 --- /dev/null +++ b/testsuite/tests/driver/T8959a.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE UnicodeSyntax #-} +module T8959a where + +foo :: Int -> Int +foo = () diff --git a/testsuite/tests/driver/T8959a.stderr b/testsuite/tests/driver/T8959a.stderr new file mode 100644 index 0000000000..f270bb6d6e --- /dev/null +++ b/testsuite/tests/driver/T8959a.stderr @@ -0,0 +1,5 @@ + +T8959a.hs:5:7: + Couldn't match expected type `Int -> Int' with actual type `()' + In the expression: () + In an equation for `foo': foo = () diff --git a/testsuite/tests/driver/T9050.cmm b/testsuite/tests/driver/T9050.cmm new file mode 100644 index 0000000000..8b1a393741 --- /dev/null +++ b/testsuite/tests/driver/T9050.cmm @@ -0,0 +1 @@ +// empty diff --git a/testsuite/tests/driver/all.T b/testsuite/tests/driver/all.T index ed0ce0f8cb..7236ec1a3a 100644 --- a/testsuite/tests/driver/all.T +++ b/testsuite/tests/driver/all.T @@ -391,7 +391,18 @@ test('T2507', [when(opsys('mingw32'), expect_fail), when(opsys('darwin'), expect_fail)], run_command, ['$MAKE -s --no-print-directory T2507']) +test('T8959a', + # The testsuite doesn't know how to set a non-Unicode locale on Windows or Mac OS X + [when(opsys('mingw32'), expect_fail), when(opsys('darwin'), expect_fail)], + run_command, + ['$MAKE -s --no-print-directory T8959a']) test('T703', normal, run_command, ['$MAKE -s --no-print-directory T703']) test('T8101', normal, compile, ['-Wall -fno-code']) +def build_T9050(name, way): + return simple_build(name + '.cmm', way, '-outputdir=. ', 0, '', 0, 0, 0) +test('T9050', normal, build_T9050, []) + +test('write_interface_oneshot', normal, run_command, ['$MAKE -s --no-print-directory write_interface_oneshot']) +test('write_interface_make', normal, run_command, ['$MAKE -s --no-print-directory write_interface_make']) diff --git a/testsuite/tests/driver/recomp006/recomp006.stderr b/testsuite/tests/driver/recomp006/recomp006.stderr index 7119ff540b..25b48f375f 100644 --- a/testsuite/tests/driver/recomp006/recomp006.stderr +++ b/testsuite/tests/driver/recomp006/recomp006.stderr @@ -1,6 +1,7 @@ A.hs:8:8: - Couldn't match expected type ‘Int’ with actual type ‘(t0, t1)’ + Couldn't match expected type ‘Int’ + with actual type ‘(Integer, Integer)’ In the expression: (2, 3) In the expression: (1, (2, 3)) In an equation for ‘f’: f = (1, (2, 3)) diff --git a/testsuite/tests/driver/write_interface_make.stdout b/testsuite/tests/driver/write_interface_make.stdout new file mode 100644 index 0000000000..1594f5ee2f --- /dev/null +++ b/testsuite/tests/driver/write_interface_make.stdout @@ -0,0 +1 @@ +[1 of 1] Compiling A011 ( A011.hs, nothing ) diff --git a/testsuite/tests/ext-core/Makefile b/testsuite/tests/ext-core/Makefile deleted file mode 100644 index d52dd9c428..0000000000 --- a/testsuite/tests/ext-core/Makefile +++ /dev/null @@ -1,29 +0,0 @@ -TOP=../.. -include $(TOP)/mk/boilerplate.mk -include $(TOP)/mk/test.mk - -# T5881 needs a script because it goes wrong only when -# the modules are compiled separately, not with --make -T5881: - $(RM) -f T5881.hi T5881.o T5881a.hi T5881a.o - '$(TEST_HC)' $(TEST_HC_OPTS) -c T5881a.hs - '$(TEST_HC)' $(TEST_HC_OPTS) -c T5881.hs - -# T6025 is like T5881; needs separate compile -T6025: - $(RM) -f T6025.hi T6025.o T6025a.hi T6025a.o - '$(TEST_HC)' $(TEST_HC_OPTS) -c T6025a.hs - '$(TEST_HC)' $(TEST_HC_OPTS) -c T6025.hs - -# T6054 is like T5881; needs separate compile -# The second compile fails, and should do so, hence leading "-" -T6054: - $(RM) -f T6054.hi T6054.o T6054a.hi T6054a.o - '$(TEST_HC)' $(TEST_HC_OPTS) -c T6054a.hs - -'$(TEST_HC)' $(TEST_HC_OPTS) -c T6054.hs - -T7022: - $(RM) -f T7022.hi T7022.o T7022a.hi T7022a.o T7022b.hi T7022b.o - '$(TEST_HC)' $(TEST_HC_OPTS) -c T7022a.hs - '$(TEST_HC)' $(TEST_HC_OPTS) -c T7022b.hs -v0 - -'$(TEST_HC)' $(TEST_HC_OPTS) -c -v0 T7022.hs diff --git a/testsuite/tests/ext-core/T7239.hs b/testsuite/tests/ext-core/T7239.hs deleted file mode 100644 index 4331b9e493..0000000000 --- a/testsuite/tests/ext-core/T7239.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Main where - -data T a = T a - -type C = T Int -type CL = [C] - -main = print 1 diff --git a/testsuite/tests/ext-core/all.T b/testsuite/tests/ext-core/all.T deleted file mode 100644 index a1fbb8b7e7..0000000000 --- a/testsuite/tests/ext-core/all.T +++ /dev/null @@ -1,3 +0,0 @@ -setTestOpts(only_compiler_types(['ghc'])) - -test('T7239', normal, compile, ['-fext-core']) diff --git a/testsuite/tests/ffi/should_compile/all.T b/testsuite/tests/ffi/should_compile/all.T index a192a7b0cc..84c7e8602e 100644 --- a/testsuite/tests/ffi/should_compile/all.T +++ b/testsuite/tests/ffi/should_compile/all.T @@ -9,30 +9,21 @@ test('cc001', normal, compile, ['']) # Non-static C call # cc004 test also uses stdcall, so it only works on i386. if config.platform.startswith('i386-'): - ways = expect_fail_for(['extcore','optextcore']) + ways = normal else: - ways = expect_fail + ways = expect_fail test('cc004', ways, compile, ['']) -# foreign label -test('cc005', expect_fail_for(['extcore','optextcore']), compile, ['']) - -# Missing: -# test('cc006', normal, compile, ['']) - +test('cc005', normal, compile, ['']) test('cc007', normal, compile, ['']) -# foreign label -test('cc008', expect_fail_for(['extcore','optextcore']), compile, ['']) -# foreign label -test('cc009', expect_fail_for(['extcore','optextcore']), compile, ['']) -# Non-static C call -test('cc010', expect_fail_for(['extcore','optextcore']), compile, ['']) +test('cc008', normal, compile, ['']) +test('cc009', normal, compile, ['']) +test('cc010', normal , compile, ['']) test('cc011', normal, compile, ['']) test('cc012', normal, compile, ['']) test('cc013', normal, compile, ['']) test('cc014', normal, compile, ['']) test('ffi-deriv1', normal, compile, ['']) - test('T1357', normal, compile, ['']) test('T3624', normal, compile, ['']) test('T3742', normal, compile, ['']) diff --git a/testsuite/tests/ffi/should_run/all.T b/testsuite/tests/ffi/should_run/all.T index 567c3e67ce..7efc6eb3d8 100644 --- a/testsuite/tests/ffi/should_run/all.T +++ b/testsuite/tests/ffi/should_run/all.T @@ -4,10 +4,7 @@ # extra run flags # expected process return value, if not zero -# Doesn't work with External Core due to __labels -test('fed001', [only_compiler_types(['ghc']), - expect_fail_for(['extcore','optextcore'])], - compile_and_run, ['']) +test('fed001', normal, compile_and_run, ['']) # Omit GHCi for these two, as they use foreign export test('ffi001', omit_ways(['ghci']), compile_and_run, ['']) @@ -37,9 +34,7 @@ test('ffi005', [ omit_ways(prof_ways), exit_code(3) ], compile_and_run, ['']) -# ffi[006-009] don't work with External Core due to non-static-C foreign calls - -test('ffi006', expect_fail_for(['extcore','optextcore']), compile_and_run, ['']) +test('ffi006', normal, compile_and_run, ['']) # Skip ffi00{7,8} for GHCi. These tests both try to exit or raise an # error from a foreign export, which shuts down the runtime. When @@ -48,15 +43,8 @@ test('ffi006', expect_fail_for(['extcore','optextcore']), compile_and_run, ['']) # Sometimes we end up with the wrong exit code, or get an extra # 'interrupted' message from the GHCi thread shutting down. -test('ffi007', - [omit_ways(['ghci']), expect_fail_for(['extcore','optextcore'])], - compile_and_run, ['']) - -test('ffi008', - [expect_fail_for(['extcore','optextcore']), - exit_code(1), - omit_ways(['ghci'])], - compile_and_run, ['']) +test('ffi007', omit_ways(['ghci']), compile_and_run, ['']) +test('ffi008', [exit_code(1), omit_ways(['ghci'])], compile_and_run, ['']) # On i386, we need -msse2 to get reliable floating point results maybe_skip = normal @@ -68,13 +56,11 @@ if config.platform.startswith('i386-'): else: maybe_skip = only_ways(['ghci']) -test('ffi009', [when(fast(), skip), expect_fail_for(['extcore','optextcore']), +test('ffi009', [when(fast(), skip), reqlib('random'), maybe_skip] ,compile_and_run, [opts]) -# Doesn't work with External Core due to __labels -test('ffi010', expect_fail_for(['extcore','optextcore']), compile_and_run, ['']) - +test('ffi010', normal, compile_and_run, ['']) test('ffi011', normal, compile_and_run, ['']) # The stdcall calling convention works on Windows, and sometimes on @@ -88,9 +74,7 @@ else: skip_if_not_windows = skip test('ffi012', skip_if_not_windows, compile_and_run, ['']) - -# Doesn't work with External Core due to __labels -test('ffi013', expect_fail_for(['extcore','optextcore']), compile_and_run, ['']) +test('ffi013', normal, compile_and_run, ['']) # threaded2 sometimes gives ffi014: Main_dDu: interrupted test('ffi014', diff --git a/testsuite/tests/gadt/T9096.hs b/testsuite/tests/gadt/T9096.hs new file mode 100644 index 0000000000..d778798d36 --- /dev/null +++ b/testsuite/tests/gadt/T9096.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE GADTs #-} + +module T9096 where + +data Foo a where + MkFoo :: (->) a (Foo a) diff --git a/testsuite/tests/gadt/all.T b/testsuite/tests/gadt/all.T index 9192891d63..52a8812377 100644 --- a/testsuite/tests/gadt/all.T +++ b/testsuite/tests/gadt/all.T @@ -122,3 +122,4 @@ test('T7321', ['$MAKE -s --no-print-directory T7321']) test('T7974', normal, compile, ['']) test('T7558', normal, compile_fail, ['']) +test('T9096', normal, compile, ['']) diff --git a/testsuite/tests/ghc-api/T4891/T4891.hs b/testsuite/tests/ghc-api/T4891/T4891.hs index ca4aff91c9..854bf62998 100644 --- a/testsuite/tests/ghc-api/T4891/T4891.hs +++ b/testsuite/tests/ghc-api/T4891/T4891.hs @@ -20,7 +20,6 @@ import Unsafe.Coerce import Control.Monad import Data.Maybe import Bag -import PrelNames (iNTERACTIVE) import Outputable import GhcMonad import X diff --git a/testsuite/tests/ghc-api/T4891/T4891.stdout b/testsuite/tests/ghc-api/T4891/T4891.stdout index 47eb152467..8ad0b4eabe 100644 --- a/testsuite/tests/ghc-api/T4891/T4891.stdout +++ b/testsuite/tests/ghc-api/T4891/T4891.stdout @@ -1,20 +1,20 @@ ===== -Name: GHC.Types.False +Name: False OccString: 'False' -DataCon: GHC.Types.False +DataCon: False ===== Name: : OccString: ':' DataCon: : ===== -Name: X.:-> +Name: :-> OccString: ':->' -DataCon: X.:-> +DataCon: :-> ===== -Name: X.:->. +Name: :->. OccString: ':->.' -DataCon: X.:->. +DataCon: :->. ===== -Name: X.:->.+ +Name: :->.+ OccString: ':->.+' -DataCon: X.:->.+ +DataCon: :->.+ diff --git a/testsuite/tests/ghc-api/T6145.hs b/testsuite/tests/ghc-api/T6145.hs index 98e8bd0219..13b80eef87 100644 --- a/testsuite/tests/ghc-api/T6145.hs +++ b/testsuite/tests/ghc-api/T6145.hs @@ -27,15 +27,15 @@ main = do l <- loadModule d let ts=typecheckedSource l -- liftIO (putStr (showSDocDebug (ppr ts))) - let fs=filterBag (isDataCon . snd) ts + let fs=filterBag isDataCon ts return $ not $ isEmptyBag fs removeFile "Test.hs" print ok where isDataCon (L _ (AbsBinds { abs_binds = bs })) - = not (isEmptyBag (filterBag (isDataCon . snd) bs)) + = not (isEmptyBag (filterBag isDataCon bs)) isDataCon (L l (f@FunBind {})) - | (MG (m:_) _ _) <- fun_matches f, + | (MG (m:_) _ _ _) <- fun_matches f, (L _ (c@ConPatOut{}):_)<-hsLMatchPats m, (L l _)<-pat_con c = isGoodSrcSpan l -- Check that the source location is a good one diff --git a/testsuite/tests/ghc-api/T8639_api.stdout b/testsuite/tests/ghc-api/T8639_api.stdout index 659a1ddccd..7218302dc1 100644 --- a/testsuite/tests/ghc-api/T8639_api.stdout +++ b/testsuite/tests/ghc-api/T8639_api.stdout @@ -1,2 +1,2 @@ 3 -GHC.Types.Bool +Bool diff --git a/testsuite/tests/ghc-e/should_run/Makefile b/testsuite/tests/ghc-e/should_run/Makefile index 1971004d4c..5ed1ec2e6c 100644 --- a/testsuite/tests/ghc-e/should_run/Makefile +++ b/testsuite/tests/ghc-e/should_run/Makefile @@ -30,3 +30,5 @@ T3890: T7299: '$(TEST_HC)' $(TEST_HC_OPTS) -ignore-dot-ghci -e "Control.Concurrent.threadDelay (1000 * 1000)" +T9086: + '$(TEST_HC)' $(TEST_HC_OPTS) -ignore-dot-ghci -e ":main" T9086.hs diff --git a/testsuite/tests/ghc-e/should_run/T9086.hs b/testsuite/tests/ghc-e/should_run/T9086.hs new file mode 100644 index 0000000000..a2b4ace33a --- /dev/null +++ b/testsuite/tests/ghc-e/should_run/T9086.hs @@ -0,0 +1 @@ +main = return "this should not be printed" diff --git a/testsuite/tests/ghc-e/should_run/all.T b/testsuite/tests/ghc-e/should_run/all.T index 4ab7567358..9f6491819d 100644 --- a/testsuite/tests/ghc-e/should_run/all.T +++ b/testsuite/tests/ghc-e/should_run/all.T @@ -14,3 +14,4 @@ test('T2228', test('T2636', req_interp, run_command, ['$MAKE --no-print-directory -s T2636']) test('T3890', req_interp, run_command, ['$MAKE --no-print-directory -s T3890']) test('T7299', req_interp, run_command, ['$MAKE --no-print-directory -s T7299']) +test('T9086', req_interp, run_command, ['$MAKE --no-print-directory -s T9086']) diff --git a/testsuite/tests/ghci/prog013/Bad.hs b/testsuite/tests/ghci/prog013/Bad.hs new file mode 100644 index 0000000000..2c26204e77 --- /dev/null +++ b/testsuite/tests/ghci/prog013/Bad.hs @@ -0,0 +1,3 @@ +a = 1 +b = 2 +bad = ' diff --git a/testsuite/tests/ghci/prog013/Good.hs b/testsuite/tests/ghci/prog013/Good.hs new file mode 100644 index 0000000000..a9aeef048b --- /dev/null +++ b/testsuite/tests/ghci/prog013/Good.hs @@ -0,0 +1,3 @@ +a = 1 +b = 2 +c = 3 diff --git a/testsuite/tests/ghci/prog013/prog013.T b/testsuite/tests/ghci/prog013/prog013.T new file mode 100644 index 0000000000..020bdf81c8 --- /dev/null +++ b/testsuite/tests/ghci/prog013/prog013.T @@ -0,0 +1,2 @@ +test('prog013', normal, ghci_script, ['prog013.script']) + diff --git a/testsuite/tests/ghci/prog013/prog013.script b/testsuite/tests/ghci/prog013/prog013.script new file mode 100644 index 0000000000..b9df968933 --- /dev/null +++ b/testsuite/tests/ghci/prog013/prog013.script @@ -0,0 +1,8 @@ +:set editor /bin/echo +:l Good.hs +:e +:l Bad.hs +:e +:e ./Bad.hs +:l Good.hs +:e diff --git a/testsuite/tests/ghci/prog013/prog013.stderr b/testsuite/tests/ghci/prog013/prog013.stderr new file mode 100644 index 0000000000..d8970d4d2e --- /dev/null +++ b/testsuite/tests/ghci/prog013/prog013.stderr @@ -0,0 +1,9 @@ + +Bad.hs:3:8: + lexical error in string/character literal at character '\n' + +Bad.hs:3:8: + lexical error in string/character literal at character '\n' + +Bad.hs:3:8: + lexical error in string/character literal at character '\n' diff --git a/testsuite/tests/ghci/prog013/prog013.stdout b/testsuite/tests/ghci/prog013/prog013.stdout new file mode 100644 index 0000000000..0d621dad77 --- /dev/null +++ b/testsuite/tests/ghci/prog013/prog013.stdout @@ -0,0 +1,4 @@ +Good.hs +Bad.hs +3 +./Bad.hs +3 +Good.hs diff --git a/testsuite/tests/ghci/scripts/Defer02.stderr b/testsuite/tests/ghci/scripts/Defer02.stderr index ed36a3eb3c..7635c8f804 100644 --- a/testsuite/tests/ghci/scripts/Defer02.stderr +++ b/testsuite/tests/ghci/scripts/Defer02.stderr @@ -84,13 +84,13 @@ the type signature for k :: Int ~ Bool => Int -> Bool In the ambiguity check for: Int ~ Bool => Int -> Bool To defer the ambiguity check to use sites, enable AllowAmbiguousTypes - In the type signature for ‘k’: k :: Int ~ Bool => Int -> Bool + In the type signature for ‘k’: k :: (Int ~ Bool) => Int -> Bool ../../typecheck/should_run/Defer01.hs:45:6: Warning: Couldn't match expected type ‘Bool’ with actual type ‘Int’ In the ambiguity check for: Int ~ Bool => Int -> Bool To defer the ambiguity check to use sites, enable AllowAmbiguousTypes - In the type signature for ‘k’: k :: Int ~ Bool => Int -> Bool + In the type signature for ‘k’: k :: (Int ~ Bool) => Int -> Bool ../../typecheck/should_run/Defer01.hs:45:6: Warning: Couldn't match type ‘Int’ with ‘Bool’ diff --git a/testsuite/tests/ghci/scripts/T2766.stdout b/testsuite/tests/ghci/scripts/T2766.stdout index f8ee42ff6a..5bcbd9e75e 100644 --- a/testsuite/tests/ghci/scripts/T2766.stdout +++ b/testsuite/tests/ghci/scripts/T2766.stdout @@ -1,3 +1,3 @@ first :: Arrow to => b `to` c -> (b, d) `to` (c, d) :: Arrow to => to b c -> to (b, d) (c, d) -first :: b~>c -> (b, d)~>(c, d) :: (b ~> c) -> (b, d) ~> (c, d) +first :: b~>c -> (b, d)~>(c, d) :: b ~> c -> (b, d) ~> (c, d) diff --git a/testsuite/tests/ghci/scripts/T4087.stdout b/testsuite/tests/ghci/scripts/T4087.stdout index 3f600bd78d..2ca08aa449 100644 --- a/testsuite/tests/ghci/scripts/T4087.stdout +++ b/testsuite/tests/ghci/scripts/T4087.stdout @@ -1,4 +1,4 @@ -type role Equal nominal nominal -data Equal a b where - Equal :: Equal a a - -- Defined at T4087.hs:5:1 +type role Equal nominal nominal
+data Equal a b where
+ Equal :: Equal b b
+ -- Defined at T4087.hs:5:1
diff --git a/testsuite/tests/ghci/scripts/T4175.stdout b/testsuite/tests/ghci/scripts/T4175.stdout index 1f44bd1051..29bca027ce 100644 --- a/testsuite/tests/ghci/scripts/T4175.stdout +++ b/testsuite/tests/ghci/scripts/T4175.stdout @@ -1,18 +1,18 @@ type family A a b :: * -- Defined at T4175.hs:7:1 -type instance A (B a) b -- Defined at T4175.hs:10:1 -type instance A (Maybe a) a -- Defined at T4175.hs:9:1 -type instance A Int Int -- Defined at T4175.hs:8:1 +type instance A (B a) b = () -- Defined at T4175.hs:10:1 +type instance A (Maybe a) a = a -- Defined at T4175.hs:9:1 +type instance A Int Int = () -- Defined at T4175.hs:8:1 type role B nominal data family B a -- Defined at T4175.hs:12:1 instance G B -- Defined at T4175.hs:34:10 -data instance B () -- Defined at T4175.hs:13:15 -type instance A (B a) b -- Defined at T4175.hs:10:1 +data instance B () = MkB -- Defined at T4175.hs:13:15 +type instance A (B a) b = () -- Defined at T4175.hs:10:1 class C a where type family D a b :: * -- Defined at T4175.hs:16:5 -type D () () -- Defined at T4175.hs:22:5 -type D Int () -- Defined at T4175.hs:19:5 +type instance D () () = Bool -- Defined at T4175.hs:22:5 +type instance D Int () = String -- Defined at T4175.hs:19:5 type family E a :: * where E () = Bool E Int = String @@ -25,9 +25,9 @@ instance Eq () -- Defined in ‘GHC.Classes’ instance Ord () -- Defined in ‘GHC.Classes’ instance Read () -- Defined in ‘GHC.Read’ instance Show () -- Defined in ‘GHC.Show’ -type D () () -- Defined at T4175.hs:22:5 -type D Int () -- Defined at T4175.hs:19:5 -data instance B () -- Defined at T4175.hs:13:15 +type instance D () () = Bool -- Defined at T4175.hs:22:5 +type instance D Int () = String -- Defined at T4175.hs:19:5 +data instance B () = MkB -- Defined at T4175.hs:13:15 data Maybe a = Nothing | Just a -- Defined in ‘Data.Maybe’ instance Eq a => Eq (Maybe a) -- Defined in ‘Data.Maybe’ instance Monad Maybe -- Defined in ‘Data.Maybe’ @@ -35,7 +35,7 @@ instance Functor Maybe -- Defined in ‘Data.Maybe’ instance Ord a => Ord (Maybe a) -- Defined in ‘Data.Maybe’ instance Read a => Read (Maybe a) -- Defined in ‘GHC.Read’ instance Show a => Show (Maybe a) -- Defined in ‘GHC.Show’ -type instance A (Maybe a) a -- Defined at T4175.hs:9:1 +type instance A (Maybe a) a = a -- Defined at T4175.hs:9:1 data Int = I# Int# -- Defined in ‘GHC.Types’ instance C Int -- Defined at T4175.hs:18:10 instance Bounded Int -- Defined in ‘GHC.Enum’ @@ -47,7 +47,7 @@ instance Ord Int -- Defined in ‘GHC.Classes’ instance Read Int -- Defined in ‘GHC.Read’ instance Real Int -- Defined in ‘GHC.Real’ instance Show Int -- Defined in ‘GHC.Show’ -type D Int () -- Defined at T4175.hs:19:5 -type instance A Int Int -- Defined at T4175.hs:8:1 +type instance D Int () = String -- Defined at T4175.hs:19:5 +type instance A Int Int = () -- Defined at T4175.hs:8:1 class Z a -- Defined at T4175.hs:28:1 instance F (Z a) -- Defined at T4175.hs:31:10 diff --git a/testsuite/tests/ghci/scripts/T5417.stdout b/testsuite/tests/ghci/scripts/T5417.stdout index 73d1de932d..1085a1750f 100644 --- a/testsuite/tests/ghci/scripts/T5417.stdout +++ b/testsuite/tests/ghci/scripts/T5417.stdout @@ -6,4 +6,4 @@ class C.C1 a where type role C.F nominal data family C.F a -- Defined at T5417a.hs:5:5 -data C.F (B1 a) -- Defined at T5417.hs:8:10 +data instance C.F (B1 a) = B2 a -- Defined at T5417.hs:8:10 diff --git a/testsuite/tests/ghci/scripts/T7730.script b/testsuite/tests/ghci/scripts/T7730.script new file mode 100644 index 0000000000..f1e01ee1ef --- /dev/null +++ b/testsuite/tests/ghci/scripts/T7730.script @@ -0,0 +1,7 @@ +:set -XPolyKinds +data A x y +:i A +:kind A +:set -XExistentialQuantification +data T a = forall a . MkT a +:info T diff --git a/testsuite/tests/ghci/scripts/T7730.stdout b/testsuite/tests/ghci/scripts/T7730.stdout new file mode 100644 index 0000000000..e3a08c19f4 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T7730.stdout @@ -0,0 +1,8 @@ +type role A phantom phantom +data A (x :: k) (y :: k1) + -- Defined at <interactive>:3:1 +A :: k -> k1 -> * +type role T phantom +data T (a :: k) where + MkT :: forall (k :: BOX) (a :: k) a1. a1 -> T a + -- Defined at <interactive>:7:1 diff --git a/testsuite/tests/ghci/scripts/T7873.stdout b/testsuite/tests/ghci/scripts/T7873.stdout index 0167fb2eba..215757bb69 100644 --- a/testsuite/tests/ghci/scripts/T7873.stdout +++ b/testsuite/tests/ghci/scripts/T7873.stdout @@ -1,5 +1,6 @@ data D1 where - MkD1 :: (forall (p :: k -> *) (a :: k). p a -> Int) -> D1 + MkD1 :: (forall (k1 :: BOX) (p :: k1 -> *) (a :: k1). p a -> Int) + -> D1 -- Defined at <interactive>:3:1 data D2 where MkD2 :: (forall (p :: k -> *) (a :: k). p a -> Int) -> D2 diff --git a/testsuite/tests/ghci/scripts/T7939.stdout b/testsuite/tests/ghci/scripts/T7939.stdout index 9a88b5c294..feb890c578 100644 --- a/testsuite/tests/ghci/scripts/T7939.stdout +++ b/testsuite/tests/ghci/scripts/T7939.stdout @@ -3,21 +3,23 @@ class Foo (a :: k) where -- Defined at T7939.hs:6:4 Bar :: k -> * -> * type family F a :: * -- Defined at T7939.hs:8:1 -type instance F Int -- Defined at T7939.hs:9:1 +type instance F Int = Bool -- Defined at T7939.hs:9:1 F :: * -> * -type family G a :: * where G Int = Bool +type family G a :: * where + G Int = Bool -- Defined at T7939.hs:11:1 G :: * -> * -type family H (a :: Bool) :: Bool where H 'False = 'True +type family H (a :: Bool) :: Bool where + H 'False = 'True -- Defined at T7939.hs:14:1 H :: Bool -> Bool type family J (a :: [k]) :: Bool where - J '[] = 'False - J (h : t) = 'True + J k '[] = 'False + forall (k :: BOX) (h :: k) (t :: [k]). J k (h : t) = 'True -- Defined at T7939.hs:17:1 J :: [k] -> Bool type family K (a :: [k]) :: Maybe k where - K '[] = 'Nothing - K (h : t) = 'Just h + K k '[] = 'Nothing + forall (k :: BOX) (h :: k) (t :: [k]). K k (h : t) = 'Just h -- Defined at T7939.hs:21:1 K :: [k] -> Maybe k diff --git a/testsuite/tests/ghci/scripts/T8674.stdout b/testsuite/tests/ghci/scripts/T8674.stdout index a4f5bbff6e..6c13176e66 100644 --- a/testsuite/tests/ghci/scripts/T8674.stdout +++ b/testsuite/tests/ghci/scripts/T8674.stdout @@ -1,5 +1,5 @@ type role Sing nominal data family Sing (a :: k) -- Defined at T8674.hs:4:1 -data instance Sing Bool -- Defined at T8674.hs:6:15 -data instance Sing a -- Defined at T8674.hs:5:15 +data instance Sing Bool = SBool -- Defined at T8674.hs:6:15 +data instance Sing a = SNil -- Defined at T8674.hs:5:15 diff --git a/testsuite/tests/ghci/scripts/T8959.script b/testsuite/tests/ghci/scripts/T8959.script new file mode 100644 index 0000000000..124b2ab2f5 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T8959.script @@ -0,0 +1,20 @@ +:set -XPatternGuards -XArrows -XRankNTypes + +:t lookup +:t undefined :: (forall a. a -> a) -> a +:t () >- () -< () >>- () -<< () +let fun foo | True <- () = () + +:set -XUnicodeSyntax + +:t lookup +:t undefined :: (forall a. a -> a) -> a +:t () >- () -< () >>- () -<< () +let fun foo | True <- () = () + +:set -XNoUnicodeSyntax + +:t lookup +:t undefined :: (forall a. a -> a) -> a +:t () >- () -< () >>- () -<< () +let fun foo | True <- () = () diff --git a/testsuite/tests/ghci/scripts/T8959.stderr b/testsuite/tests/ghci/scripts/T8959.stderr new file mode 100644 index 0000000000..b3995c3365 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T8959.stderr @@ -0,0 +1,36 @@ + +<interactive>:1:1: + Arrow command found where an expression was expected: + () >- () -< () >>- () -<< () + +<interactive>:7:15: + Couldn't match expected type ‘()’ with actual type ‘Bool’ + In the pattern: True + In a stmt of a pattern guard for + an equation for ‘fun’: + True <- () + In an equation for ‘fun’: fun foo | True <- () = () + +<interactive>:1:1: + Arrow command found where an expression was expected: + () ↣ () ↢ () ⤜ () ⤛ () + +<interactive>:14:15: + Couldn't match expected type ‘()’ with actual type ‘Bool’ + In the pattern: True + In a stmt of a pattern guard for + an equation for ‘fun’: + True ← () + In an equation for ‘fun’: fun foo | True ← () = () + +<interactive>:1:1: + Arrow command found where an expression was expected: + () >- () -< () >>- () -<< () + +<interactive>:21:15: + Couldn't match expected type ‘()’ with actual type ‘Bool’ + In the pattern: True + In a stmt of a pattern guard for + an equation for ‘fun’: + True <- () + In an equation for ‘fun’: fun foo | True <- () = () diff --git a/testsuite/tests/ghci/scripts/T8959.stdout b/testsuite/tests/ghci/scripts/T8959.stdout new file mode 100644 index 0000000000..4631732c55 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T8959.stdout @@ -0,0 +1,6 @@ +lookup :: Eq a => a -> [(a, b)] -> Maybe b +undefined :: (forall a. a -> a) -> a :: (forall a1. a1 -> a1) -> a +lookup ∷ Eq a ⇒ a → [(a, b)] → Maybe b +undefined :: (forall a. a -> a) -> a ∷ (∀ a1. a1 → a1) → a +lookup :: Eq a => a -> [(a, b)] -> Maybe b +undefined :: (forall a. a -> a) -> a :: (forall a1. a1 -> a1) -> a diff --git a/testsuite/tests/ghci/scripts/T8959b.hs b/testsuite/tests/ghci/scripts/T8959b.hs new file mode 100644 index 0000000000..064b2670a8 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T8959b.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE UnicodeSyntax, Arrows, RankNTypes #-} +module T8959b where + +foo :: Int -> Int +foo = () + +bar :: () +bar = proc x -> do return -< x + +baz = () :: (forall a. a -> a) -> a + diff --git a/testsuite/tests/ghci/scripts/T8959b.script b/testsuite/tests/ghci/scripts/T8959b.script new file mode 100644 index 0000000000..f3c23c97a3 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T8959b.script @@ -0,0 +1 @@ +:l T8959b.hs diff --git a/testsuite/tests/ghci/scripts/T8959b.stderr b/testsuite/tests/ghci/scripts/T8959b.stderr new file mode 100644 index 0000000000..4f1ac7a97b --- /dev/null +++ b/testsuite/tests/ghci/scripts/T8959b.stderr @@ -0,0 +1,16 @@ + +T8959b.hs:5:7: + Couldn't match expected type ‘Int → Int’ with actual type ‘()’ + In the expression: () + In an equation for ‘foo’: foo = () + +T8959b.hs:8:7: + Couldn't match expected type ‘()’ with actual type ‘t0 → m0 t0’ + In the expression: proc x -> do { return ↢ x } + In an equation for ‘bar’: bar = proc x -> do { return ↢ x } + +T8959b.hs:10:7: + Couldn't match expected type ‘(∀ a2. a2 → a2) → a1’ + with actual type ‘()’ + In the expression: () ∷ (∀ a. a -> a) -> a + In an equation for ‘baz’: baz = () ∷ (∀ a. a -> a) -> a diff --git a/testsuite/tests/ghci/scripts/T9086b.script b/testsuite/tests/ghci/scripts/T9086b.script new file mode 100644 index 0000000000..d60156ad02 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T9086b.script @@ -0,0 +1,2 @@ +let main = do { putStrLn "hello"; return "discarded" } +:main diff --git a/testsuite/tests/ghci/scripts/T9086b.stdout b/testsuite/tests/ghci/scripts/T9086b.stdout new file mode 100644 index 0000000000..ce01362503 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T9086b.stdout @@ -0,0 +1 @@ +hello diff --git a/testsuite/tests/ghci/scripts/T9181.script b/testsuite/tests/ghci/scripts/T9181.script new file mode 100644 index 0000000000..b2239b9556 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T9181.script @@ -0,0 +1 @@ +:browse GHC.TypeLits diff --git a/testsuite/tests/ghci/scripts/T9181.stdout b/testsuite/tests/ghci/scripts/T9181.stdout new file mode 100644 index 0000000000..e1ac00cc83 --- /dev/null +++ b/testsuite/tests/ghci/scripts/T9181.stdout @@ -0,0 +1,54 @@ +type family (GHC.TypeLits.*) (a :: GHC.TypeLits.Nat) + (b :: GHC.TypeLits.Nat) :: + GHC.TypeLits.Nat +type family (GHC.TypeLits.+) (a :: GHC.TypeLits.Nat) + (b :: GHC.TypeLits.Nat) :: + GHC.TypeLits.Nat +type family (GHC.TypeLits.-) (a :: GHC.TypeLits.Nat) + (b :: GHC.TypeLits.Nat) :: + GHC.TypeLits.Nat +type (GHC.TypeLits.<=) (x :: GHC.TypeLits.Nat) + (y :: GHC.TypeLits.Nat) = + (x GHC.TypeLits.<=? y) ~ 'True +type family (GHC.TypeLits.<=?) (a :: GHC.TypeLits.Nat) + (b :: GHC.TypeLits.Nat) :: + Bool +type family GHC.TypeLits.CmpNat (a :: GHC.TypeLits.Nat) + (b :: GHC.TypeLits.Nat) :: + Ordering +type family GHC.TypeLits.CmpSymbol (a :: GHC.TypeLits.Symbol) + (b :: GHC.TypeLits.Symbol) :: + Ordering +class GHC.TypeLits.KnownNat (n :: GHC.TypeLits.Nat) where + GHC.TypeLits.natSing :: GHC.TypeLits.SNat n +class GHC.TypeLits.KnownSymbol (n :: GHC.TypeLits.Symbol) where + GHC.TypeLits.symbolSing :: GHC.TypeLits.SSymbol n +data GHC.TypeLits.Nat +data GHC.TypeLits.SomeNat where + GHC.TypeLits.SomeNat :: GHC.TypeLits.KnownNat n => + (Data.Proxy.Proxy n) -> GHC.TypeLits.SomeNat +data GHC.TypeLits.SomeSymbol where + GHC.TypeLits.SomeSymbol :: GHC.TypeLits.KnownSymbol n => + (Data.Proxy.Proxy n) -> GHC.TypeLits.SomeSymbol +data GHC.TypeLits.Symbol +type family (GHC.TypeLits.^) (a :: GHC.TypeLits.Nat) + (b :: GHC.TypeLits.Nat) :: + GHC.TypeLits.Nat +GHC.TypeLits.natVal :: + GHC.TypeLits.KnownNat n => proxy n -> Integer +GHC.TypeLits.natVal' :: + GHC.TypeLits.KnownNat n => GHC.Prim.Proxy# n -> Integer +GHC.TypeLits.sameNat :: + (GHC.TypeLits.KnownNat a, GHC.TypeLits.KnownNat b) => + Data.Proxy.Proxy a + -> Data.Proxy.Proxy b -> Maybe (a Data.Type.Equality.:~: b) +GHC.TypeLits.sameSymbol :: + (GHC.TypeLits.KnownSymbol a, GHC.TypeLits.KnownSymbol b) => + Data.Proxy.Proxy a + -> Data.Proxy.Proxy b -> Maybe (a Data.Type.Equality.:~: b) +GHC.TypeLits.someNatVal :: Integer -> Maybe GHC.TypeLits.SomeNat +GHC.TypeLits.someSymbolVal :: String -> GHC.TypeLits.SomeSymbol +GHC.TypeLits.symbolVal :: + GHC.TypeLits.KnownSymbol n => proxy n -> String +GHC.TypeLits.symbolVal' :: + GHC.TypeLits.KnownSymbol n => GHC.Prim.Proxy# n -> String diff --git a/testsuite/tests/ghci/scripts/all.T b/testsuite/tests/ghci/scripts/all.T index aacdd262b1..d1e67ebeca 100755 --- a/testsuite/tests/ghci/scripts/all.T +++ b/testsuite/tests/ghci/scripts/all.T @@ -113,7 +113,7 @@ test('T5564', normal, ghci_script, ['T5564.script']) test('Defer02', normal, ghci_script, ['Defer02.script']) test('T5820', normal, ghci_script, ['T5820.script']) test('T5836', normal, ghci_script, ['T5836.script']) -test('T5979', normalise_slashes, ghci_script, ['T5979.script']) +test('T5979', [reqlib('transformers'), normalise_slashes], ghci_script, ['T5979.script']) test('T5975a', [pre_cmd('touch föøbàr1.hs'), clean_cmd('rm föøbàr1.hs')], @@ -147,6 +147,7 @@ test('T7627', normal, ghci_script, ['T7627.script']) test('T7627b', normal, ghci_script, ['T7627b.script']) test('T7586', normal, ghci_script, ['T7586.script']) test('T4175', normal, ghci_script, ['T4175.script']) +test('T7730', combined_output, ghci_script, ['T7730.script']) test('T7872', normal, ghci_script, ['T7872.script']) test('T7873', normal, ghci_script, ['T7873.script']) test('T7939', normal, ghci_script, ['T7939.script']) @@ -171,3 +172,7 @@ test('ghci059', normal, ghci_script, ['ghci059.script']) test('T8831', normal, ghci_script, ['T8831.script']) test('T8917', normal, ghci_script, ['T8917.script']) test('T8931', normal, ghci_script, ['T8931.script']) +test('T8959', normal, ghci_script, ['T8959.script']) +test('T8959b', expect_broken(8959), ghci_script, ['T8959b.script']) +test('T9181', normal, ghci_script, ['T9181.script']) +test('T9086b', normal, ghci_script, ['T9086b.script']) diff --git a/testsuite/tests/ghci/scripts/ghci025.stdout b/testsuite/tests/ghci/scripts/ghci025.stdout index 9308dd3f39..9cc88b8a07 100644 --- a/testsuite/tests/ghci/scripts/ghci025.stdout +++ b/testsuite/tests/ghci/scripts/ghci025.stdout @@ -11,7 +11,7 @@ class C a b where c4 :: a1 -> b c1 :: (C a b, N b) => a -> b c2 :: (C a b, N b, S b) => a -> b -c3 :: C a b => forall a1. a1 -> b +c3 :: C a b => forall a. a -> b c4 :: C a b => forall a1. a1 -> b -- imported via Control.Monad class Monad m => MonadPlus (m :: * -> *) where @@ -69,7 +69,7 @@ class C a b where c4 :: a1 -> b c1 :: (C a b, N b) => a -> b c2 :: (C a b, N b, S b) => a -> b -c3 :: C a b => forall a1. a1 -> b +c3 :: C a b => forall a. a -> b c4 :: C a b => forall a1. a1 -> b :browse! T -- with -fprint-explicit-foralls -- defined locally @@ -83,7 +83,7 @@ class C a b where c4 :: forall a1. a1 -> b c1 :: forall a b. (C a b, N b) => a -> b c2 :: forall a b. (C a b, N b, S b) => a -> b -c3 :: forall a b. C a b => forall a1. a1 -> b +c3 :: forall a b. C a b => forall a. a -> b c4 :: forall a b. C a b => forall a1. a1 -> b -- test :browse! <target> relative to different contexts :browse! Ghci025C -- from *Ghci025C> diff --git a/testsuite/tests/ghci/scripts/ghci059.stdout b/testsuite/tests/ghci/scripts/ghci059.stdout index 6b2c8f886e..ffc893f363 100644 --- a/testsuite/tests/ghci/scripts/ghci059.stdout +++ b/testsuite/tests/ghci/scripts/ghci059.stdout @@ -1,4 +1,6 @@ type role Coercible representational representational class Coercible (a :: k) (b :: k) -- Defined in ‘GHC.Types’ -coerce :: Coercible a b => a -> b -- Defined in ‘GHC.Prim’ +coerce :: + forall (k :: BOX) (a :: k) (b :: k). Coercible a b => a -> b + -- Defined in ‘GHC.Prim’ diff --git a/testsuite/tests/haddock/haddock_examples/haddock.Test.stderr b/testsuite/tests/haddock/haddock_examples/haddock.Test.stderr index fa92d3dd92..7ce82d0067 100644 --- a/testsuite/tests/haddock/haddock_examples/haddock.Test.stderr +++ b/testsuite/tests/haddock/haddock_examples/haddock.Test.stderr @@ -85,7 +85,7 @@ newtype N5 a b newtype N6 a b = docs on the constructor only N6 {n6 :: a b} <document comment> newtype N7 a b = The 'N7' constructor N7 {n7 :: a b} -class D a => C a where +class (D a) => C a where a :: IO a b :: [a] c :: a diff --git a/testsuite/tests/haddock/should_compile_flag_haddock/haddockA023.stderr b/testsuite/tests/haddock/should_compile_flag_haddock/haddockA023.stderr index 6d803bb440..2bb1a178e0 100644 --- a/testsuite/tests/haddock/should_compile_flag_haddock/haddockA023.stderr +++ b/testsuite/tests/haddock/should_compile_flag_haddock/haddockA023.stderr @@ -1,7 +1,7 @@ ==================== Parser ==================== module ShouldCompile where -test :: Eq a => [a] doc1 -> [a] doc2 -> [a] doc3 +test :: (Eq a) => [a] doc1 -> [a] doc2 -> [a] doc3 test xs ys = xs diff --git a/testsuite/tests/haddock/should_compile_flag_haddock/haddockA026.stderr b/testsuite/tests/haddock/should_compile_flag_haddock/haddockA026.stderr index 3e3cb12d10..4a57879c5c 100644 --- a/testsuite/tests/haddock/should_compile_flag_haddock/haddockA026.stderr +++ b/testsuite/tests/haddock/should_compile_flag_haddock/haddockA026.stderr @@ -1,7 +1,7 @@ ==================== Parser ==================== module ShouldCompile where -test :: Eq a => [a] doc1 -> forall b. [b] doc2 -> [a] doc3 +test :: (Eq a) => [a] doc1 -> forall b. [b] doc2 -> [a] doc3 test xs ys = xs diff --git a/testsuite/tests/haddock/should_compile_flag_haddock/haddockA027.stderr b/testsuite/tests/haddock/should_compile_flag_haddock/haddockA027.stderr index 10e88d2bfc..d1cb709c55 100644 --- a/testsuite/tests/haddock/should_compile_flag_haddock/haddockA027.stderr +++ b/testsuite/tests/haddock/should_compile_flag_haddock/haddockA027.stderr @@ -3,8 +3,8 @@ module ShouldCompile where test :: [a] doc1 - -> forall b. Ord b => - [b] doc2 -> forall c. Num c => [c] doc3 -> [a] + -> forall b. (Ord b) => + [b] doc2 -> forall c. (Num c) => [c] doc3 -> [a] test xs ys zs = xs diff --git a/testsuite/tests/indexed-types/should_compile/T3017.stderr b/testsuite/tests/indexed-types/should_compile/T3017.stderr index 20190471ae..a6c744a177 100644 --- a/testsuite/tests/indexed-types/should_compile/T3017.stderr +++ b/testsuite/tests/indexed-types/should_compile/T3017.stderr @@ -3,24 +3,17 @@ TYPE SIGNATURES test2 :: forall c t t1. (Coll c, Num t1, Num t, Elem c ~ (t, t1)) => c -> c TYPE CONSTRUCTORS - Coll :: * -> Constraint - class Coll c - Roles: [nominal] - RecFlag NonRecursive - type family Elem c :: * (open) - empty :: c insert :: Elem c -> c -> c - ListColl :: * -> * - data ListColl a - No C type associated - Roles: [representational] - RecFlag NonRecursive, Promotable - = L :: [a] -> ListColl a Stricts: _ - FamilyInstance: none + class Coll c where + type family Elem c :: * open + empty :: c + insert :: Elem c -> c -> c + data ListColl a = L [a] + Promotable COERCION AXIOMS axiom Foo.TFCo:R:ElemListColl :: Elem (ListColl a) = a INSTANCES instance Coll (ListColl a) -- Defined at T3017.hs:12:11 FAMILY INSTANCES - type Elem (ListColl a) -- Defined at T3017.hs:13:4 + type Elem (ListColl a) Dependent modules: [] Dependent packages: [base, ghc-prim, integer-gmp] diff --git a/testsuite/tests/indexed-types/should_compile/T9085.hs b/testsuite/tests/indexed-types/should_compile/T9085.hs new file mode 100644 index 0000000000..13c9321262 --- /dev/null +++ b/testsuite/tests/indexed-types/should_compile/T9085.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TypeFamilies #-} + +module T9085 where + +type family F a where + F a = Int + F Bool = Bool diff --git a/testsuite/tests/indexed-types/should_compile/T9085.stderr b/testsuite/tests/indexed-types/should_compile/T9085.stderr new file mode 100644 index 0000000000..ee968e0d79 --- /dev/null +++ b/testsuite/tests/indexed-types/should_compile/T9085.stderr @@ -0,0 +1,4 @@ + +T9085.hs:7:3: Warning: + Overlapped type family instance equation: + F Bool = Bool diff --git a/testsuite/tests/indexed-types/should_compile/T9316.hs b/testsuite/tests/indexed-types/should_compile/T9316.hs new file mode 100644 index 0000000000..b5dfca6a94 --- /dev/null +++ b/testsuite/tests/indexed-types/should_compile/T9316.hs @@ -0,0 +1,87 @@ +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE FlexibleInstances #-} + +module SingletonsBug where + +import Control.Applicative +import Data.Traversable (for) +import GHC.Exts( Constraint ) + +----------------------------------- +-- From 'constraints' library +-- import Data.Constraint (Dict(..)) +data Dict :: Constraint -> * where + Dict :: a => Dict a + +----------------------------------- +-- From 'singletons' library +-- import Data.Singletons hiding( withSomeSing ) + +class SingI (a :: k) where + -- | Produce the singleton explicitly. You will likely need the @ScopedTypeVariables@ + -- extension to use this method the way you want. + sing :: Sing a + +data family Sing (a :: k) + +data KProxy (a :: *) = KProxy + +data SomeSing (kproxy :: KProxy k) where + SomeSing :: Sing (a :: k) -> SomeSing ('KProxy :: KProxy k) + +-- SingKind :: forall k. KProxy k -> Constraint +class (kparam ~ 'KProxy) => SingKind (kparam :: KProxy k) where + -- | Get a base type from a proxy for the promoted kind. For example, + -- @DemoteRep ('KProxy :: KProxy Bool)@ will be the type @Bool@. + type DemoteRep kparam :: * + + -- | Convert a singleton to its unrefined version. + fromSing :: Sing (a :: k) -> DemoteRep kparam + + -- | Convert an unrefined type to an existentially-quantified singleton type. + toSing :: DemoteRep kparam -> SomeSing kparam + +withSomeSing :: SingKind ('KProxy :: KProxy k) + => DemoteRep ('KProxy :: KProxy k) + -> (forall (a :: k). Sing a -> r) + -> r +withSomeSing = error "urk" + +----------------------------------- + +data SubscriptionChannel = BookingsChannel +type BookingsChannelSym0 = BookingsChannel +data instance Sing (z_a5I7 :: SubscriptionChannel) where + SBookingsChannel :: Sing BookingsChannel + +instance SingKind ('KProxy :: KProxy SubscriptionChannel) where + type DemoteRep ('KProxy :: KProxy SubscriptionChannel) = SubscriptionChannel + fromSing SBookingsChannel = BookingsChannel + toSing BookingsChannel = SomeSing SBookingsChannel + +instance SingI BookingsChannel where + sing = SBookingsChannel + +type family T (c :: SubscriptionChannel) :: * +type instance T 'BookingsChannel = Bool + +witnessC :: Sing channel -> Dict (Show (T channel), SingI channel) +witnessC SBookingsChannel = Dict + +forAllSubscriptionChannels + :: forall m r. (Applicative m) + => (forall channel. (SingI channel, Show (T channel)) => Sing channel -> m r) + -> m r +forAllSubscriptionChannels f = + withSomeSing BookingsChannel $ \(sChannel) -> + case witnessC sChannel of + Dict -> f sChannel + diff --git a/testsuite/tests/indexed-types/should_compile/all.T b/testsuite/tests/indexed-types/should_compile/all.T index 5f304463c6..016444a138 100644 --- a/testsuite/tests/indexed-types/should_compile/all.T +++ b/testsuite/tests/indexed-types/should_compile/all.T @@ -243,3 +243,5 @@ test('T8889', normal, compile, ['']) test('T8913', normal, compile, ['']) test('T8978', normal, compile, ['']) test('T8979', normal, compile, ['']) +test('T9085', normal, compile, ['']) +test('T9316', normal, compile, ['']) diff --git a/testsuite/tests/indexed-types/should_fail/ClosedFam3.stderr b/testsuite/tests/indexed-types/should_fail/ClosedFam3.stderr index 107f5ffec3..04435ba962 100644 --- a/testsuite/tests/indexed-types/should_fail/ClosedFam3.stderr +++ b/testsuite/tests/indexed-types/should_fail/ClosedFam3.stderr @@ -5,7 +5,8 @@ ClosedFam3.hs-boot:5:1: Main module: type family Foo a :: * where Foo Int = Bool Foo Double = Char - Boot file: type family Foo a :: * where Foo Int = Bool + Boot file: type family Foo a :: * where + Foo Int = Bool ClosedFam3.hs-boot:8:1: Type constructor ‘Bar’ has conflicting definitions in the module @@ -20,5 +21,7 @@ ClosedFam3.hs-boot:8:1: ClosedFam3.hs-boot:12:1: Type constructor ‘Baz’ has conflicting definitions in the module and its hs-boot file - Main module: type family Baz a :: * where Baz Int = Bool - Boot file: type family Baz (a :: k) :: * where Baz Int = Bool + Main module: type family Baz a :: * where + Baz Int = Bool + Boot file: type family Baz (a :: k) :: * where + Baz * Int = Bool diff --git a/testsuite/tests/indexed-types/should_fail/NoMatchErr.stderr b/testsuite/tests/indexed-types/should_fail/NoMatchErr.stderr index 1ff540979b..d3193d5f30 100644 --- a/testsuite/tests/indexed-types/should_fail/NoMatchErr.stderr +++ b/testsuite/tests/indexed-types/should_fail/NoMatchErr.stderr @@ -11,4 +11,4 @@ NoMatchErr.hs:19:7: In the ambiguity check for: forall d a. Fun d => Memo d a -> Memo d a To defer the ambiguity check to use sites, enable AllowAmbiguousTypes - In the type signature for ‘f’: f :: Fun d => Memo d a -> Memo d a + In the type signature for ‘f’: f :: (Fun d) => Memo d a -> Memo d a diff --git a/testsuite/tests/indexed-types/should_fail/Overlap4.stderr b/testsuite/tests/indexed-types/should_fail/Overlap4.stderr index d64036c4bc..d1622335d8 100644 --- a/testsuite/tests/indexed-types/should_fail/Overlap4.stderr +++ b/testsuite/tests/indexed-types/should_fail/Overlap4.stderr @@ -1,5 +1,4 @@ Overlap4.hs:7:3: Number of parameters must match family declaration; expected 2 - In the equations for closed type family ‘F’ In the type family declaration for ‘F’ diff --git a/testsuite/tests/indexed-types/should_fail/Overlap5.stderr b/testsuite/tests/indexed-types/should_fail/Overlap5.stderr index 3adf2f3c3e..a889145036 100644 --- a/testsuite/tests/indexed-types/should_fail/Overlap5.stderr +++ b/testsuite/tests/indexed-types/should_fail/Overlap5.stderr @@ -1,5 +1,6 @@ Overlap5.hs:8:3: - Mismatched type names in closed type family declaration. - First name was F; this one is G - In the family declaration for ‘F’ + Mismatched type name in type family instance. + Expected: F + Actual: G + In the type family declaration for ‘F’ diff --git a/testsuite/tests/indexed-types/should_fail/SimpleFail1a.stderr b/testsuite/tests/indexed-types/should_fail/SimpleFail1a.stderr index 8318927522..f57af3908b 100644 --- a/testsuite/tests/indexed-types/should_fail/SimpleFail1a.stderr +++ b/testsuite/tests/indexed-types/should_fail/SimpleFail1a.stderr @@ -1,4 +1,4 @@ SimpleFail1a.hs:4:1: - Couldn't match kind ‘* -> *’ against ‘*’ + Number of parameters must match family declaration; expected 2 In the data instance declaration for ‘T1’ diff --git a/testsuite/tests/indexed-types/should_fail/SimpleFail1b.stderr b/testsuite/tests/indexed-types/should_fail/SimpleFail1b.stderr index e1059a430b..3ecd31a003 100644 --- a/testsuite/tests/indexed-types/should_fail/SimpleFail1b.stderr +++ b/testsuite/tests/indexed-types/should_fail/SimpleFail1b.stderr @@ -1,4 +1,4 @@ SimpleFail1b.hs:4:1: - Number of parameters must match family declaration; expected no more than 2 + Number of parameters must match family declaration; expected 2 In the data instance declaration for ‘T1’ diff --git a/testsuite/tests/indexed-types/should_fail/SimpleFail4.stderr b/testsuite/tests/indexed-types/should_fail/SimpleFail4.stderr index 91a3eb282a..8c4c743a56 100644 --- a/testsuite/tests/indexed-types/should_fail/SimpleFail4.stderr +++ b/testsuite/tests/indexed-types/should_fail/SimpleFail4.stderr @@ -1,6 +1,6 @@ -SimpleFail4.hs:8:8: - Type indexes must match class instance head - Found ‘Int’ but expected ‘a’ - In the type synonym instance default declaration for ‘S2’ - In the class declaration for ‘C2’ +SimpleFail4.hs:8:11: + Unexpected type ‘Int’ + In the default declaration for ‘S2’ + A default declaration should have form + default S2 a = ... diff --git a/testsuite/tests/indexed-types/should_fail/T1897b.stderr b/testsuite/tests/indexed-types/should_fail/T1897b.stderr index 06d81a146b..6372bd9fba 100644 --- a/testsuite/tests/indexed-types/should_fail/T1897b.stderr +++ b/testsuite/tests/indexed-types/should_fail/T1897b.stderr @@ -1,14 +1,14 @@ - -T1897b.hs:16:1: - Could not deduce (Depend a0 ~ Depend a) - from the context (Bug a) - bound by the inferred type for ‘isValid’: - Bug a => [Depend a] -> Bool - at T1897b.hs:16:1-41 - NB: ‘Depend’ is a type function, and may not be injective - The type variable ‘a0’ is ambiguous - Expected type: [Depend a] -> Bool - Actual type: [Depend a0] -> Bool - When checking that ‘isValid’ - has the inferred type ‘forall a. Bug a => [Depend a] -> Bool’ - Probable cause: the inferred type is ambiguous +
+T1897b.hs:16:1:
+ Could not deduce (Depend a0 ~ Depend a)
+ from the context (Bug a)
+ bound by the inferred type for ‘isValid’:
+ Bug a => [Depend a] -> Bool
+ at T1897b.hs:16:1-41
+ NB: ‘Depend’ is a type function, and may not be injective
+ The type variable ‘a0’ is ambiguous
+ Expected type: [Depend a] -> Bool
+ Actual type: [Depend a0] -> Bool
+ When checking that ‘isValid’ has the inferred type
+ isValid :: forall a. Bug a => [Depend a] -> Bool
+ Probable cause: the inferred type is ambiguous
diff --git a/testsuite/tests/indexed-types/should_fail/T1900.stderr b/testsuite/tests/indexed-types/should_fail/T1900.stderr index 435d5e8312..d44b4ed210 100644 --- a/testsuite/tests/indexed-types/should_fail/T1900.stderr +++ b/testsuite/tests/indexed-types/should_fail/T1900.stderr @@ -11,4 +11,4 @@ T1900.hs:13:10: In the ambiguity check for: forall s. Bug s => Depend s -> Bool To defer the ambiguity check to use sites, enable AllowAmbiguousTypes In the type signature for ‘check’: - check :: Bug s => Depend s -> Bool + check :: (Bug s) => Depend s -> Bool diff --git a/testsuite/tests/indexed-types/should_fail/T2693.stderr b/testsuite/tests/indexed-types/should_fail/T2693.stderr index 58b27696ea..b613ab7ab5 100644 --- a/testsuite/tests/indexed-types/should_fail/T2693.stderr +++ b/testsuite/tests/indexed-types/should_fail/T2693.stderr @@ -1,37 +1,38 @@ - -T2693.hs:11:7: - Couldn't match expected type ‘TFn a’ with actual type ‘TFn a0’ - NB: ‘TFn’ is a type function, and may not be injective - The type variable ‘a0’ is ambiguous - When checking that ‘x’ has the inferred type ‘forall a. TFn a’ - Probable cause: the inferred type is ambiguous - In the expression: - do { let Just x = ...; - let n = fst x + fst x; - return () } - In an equation for ‘f’: - f = do { let Just x = ...; - let n = ...; - return () } - -T2693.hs:19:15: - Couldn't match expected type ‘(a2, b0)’ with actual type ‘TFn a3’ - The type variables ‘a2’, ‘b0’, ‘a3’ are ambiguous - Relevant bindings include n :: a2 (bound at T2693.hs:19:7) - In the first argument of ‘fst’, namely ‘x’ - In the first argument of ‘(+)’, namely ‘fst x’ - -T2693.hs:19:23: - Couldn't match expected type ‘(a4, a2)’ with actual type ‘TFn a5’ - The type variables ‘a2’, ‘a4’, ‘a5’ are ambiguous - Relevant bindings include n :: a2 (bound at T2693.hs:19:7) - In the first argument of ‘snd’, namely ‘x’ - In the second argument of ‘(+)’, namely ‘snd x’ - -T2693.hs:29:20: - Couldn't match type ‘TFn a0’ with ‘PVR a1’ - The type variables ‘a0’, ‘a1’ are ambiguous - Expected type: () -> Maybe (PVR a1) - Actual type: () -> Maybe (TFn a0) - In the first argument of ‘mapM’, namely ‘g’ - In a stmt of a 'do' block: pvs <- mapM g undefined +
+T2693.hs:11:7:
+ Couldn't match expected type ‘TFn a’ with actual type ‘TFn a0’
+ NB: ‘TFn’ is a type function, and may not be injective
+ The type variable ‘a0’ is ambiguous
+ When checking that ‘x’ has the inferred type
+ x :: forall a. TFn a
+ Probable cause: the inferred type is ambiguous
+ In the expression:
+ do { let Just x = ...;
+ let n = fst x + fst x;
+ return () }
+ In an equation for ‘f’:
+ f = do { let Just x = ...;
+ let n = ...;
+ return () }
+
+T2693.hs:19:15:
+ Couldn't match expected type ‘(a2, b0)’ with actual type ‘TFn a3’
+ The type variables ‘a2’, ‘b0’, ‘a3’ are ambiguous
+ Relevant bindings include n :: a2 (bound at T2693.hs:19:7)
+ In the first argument of ‘fst’, namely ‘x’
+ In the first argument of ‘(+)’, namely ‘fst x’
+
+T2693.hs:19:23:
+ Couldn't match expected type ‘(a4, a2)’ with actual type ‘TFn a5’
+ The type variables ‘a2’, ‘a4’, ‘a5’ are ambiguous
+ Relevant bindings include n :: a2 (bound at T2693.hs:19:7)
+ In the first argument of ‘snd’, namely ‘x’
+ In the second argument of ‘(+)’, namely ‘snd x’
+
+T2693.hs:29:20:
+ Couldn't match type ‘TFn a0’ with ‘PVR a1’
+ The type variables ‘a0’, ‘a1’ are ambiguous
+ Expected type: () -> Maybe (PVR a1)
+ Actual type: () -> Maybe (TFn a0)
+ In the first argument of ‘mapM’, namely ‘g’
+ In a stmt of a 'do' block: pvs <- mapM g undefined
diff --git a/testsuite/tests/indexed-types/should_fail/T2888.stderr b/testsuite/tests/indexed-types/should_fail/T2888.stderr new file mode 100644 index 0000000000..3d2c221703 --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T2888.stderr @@ -0,0 +1,5 @@ + +T2888.hs:6:1: + The associated type ‘D’ + mentions none of the type or kind variables of the class ‘C w’ + In the class declaration for ‘C’ diff --git a/testsuite/tests/indexed-types/should_fail/T5934.stderr b/testsuite/tests/indexed-types/should_fail/T5934.stderr index 85ab1a1804..67a468057c 100644 --- a/testsuite/tests/indexed-types/should_fail/T5934.stderr +++ b/testsuite/tests/indexed-types/should_fail/T5934.stderr @@ -1,8 +1,8 @@ T5934.hs:12:7:
- Cannot instantiate unification variable ‘a0’
- with a type involving foralls:
- (forall s. Gen (PrimState (ST s))) -> Int
- Perhaps you want ImpredicativeTypes
+ Couldn't match type ‘Integer’
+ with ‘(forall s. Gen (PrimState (ST s))) -> Int’
+ Expected type: Integer -> (forall s. GenST s) -> Int
+ Actual type: Integer -> Integer
In the expression: 0
In an equation for ‘run’: run = 0
diff --git a/testsuite/tests/indexed-types/should_fail/T7786.stderr b/testsuite/tests/indexed-types/should_fail/T7786.stderr index 9652643802..b081ed69b4 100644 --- a/testsuite/tests/indexed-types/should_fail/T7786.stderr +++ b/testsuite/tests/indexed-types/should_fail/T7786.stderr @@ -3,7 +3,7 @@ T7786.hs:86:22: Couldn't match type ‘xxx’ with ‘'Empty’ Inaccessible code in a pattern with constructor - Nil :: Sing 'Empty, + Nil :: forall (k :: BOX). Sing 'Empty, in a pattern binding in 'do' block In the pattern: Nil diff --git a/testsuite/tests/indexed-types/should_fail/T9036.hs b/testsuite/tests/indexed-types/should_fail/T9036.hs new file mode 100644 index 0000000000..550adb4b0a --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9036.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE TypeFamilies #-} + + +module T9036 where + +class UncurryM t where + type GetMonad t :: * -> * + +class Curry a b where + type Curried a b :: * + +gSimple :: String -> String -> [String] +gSimple = simpleLogger (return ()) + +simpleLogger :: Maybe (GetMonad t after) -> t `Curried` [t] +simpleLogger _ _ = undefined diff --git a/testsuite/tests/indexed-types/should_fail/T9036.stderr b/testsuite/tests/indexed-types/should_fail/T9036.stderr new file mode 100644 index 0000000000..2df53c712c --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9036.stderr @@ -0,0 +1,12 @@ + +T9036.hs:17:17: + Couldn't match type ‘GetMonad t0’ with ‘GetMonad t’ + NB: ‘GetMonad’ is a type function, and may not be injective + The type variable ‘t0’ is ambiguous + Expected type: Maybe (GetMonad t after) -> Curried t [t] + Actual type: Maybe (GetMonad t0 after) -> Curried t0 [t0] + In the ambiguity check for: + forall t after. Maybe (GetMonad t after) -> Curried t [t] + To defer the ambiguity check to use sites, enable AllowAmbiguousTypes + In the type signature for ‘simpleLogger’: + simpleLogger :: Maybe (GetMonad t after) -> t `Curried` [t] diff --git a/testsuite/tests/indexed-types/should_fail/T9097.hs b/testsuite/tests/indexed-types/should_fail/T9097.hs new file mode 100644 index 0000000000..b18b90b5f3 --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9097.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE TypeFamilies, DataKinds #-} + +module T9097 where + +import GHC.Exts + +type family Foo x where + Foo True = False + Foo False = False + Foo Any = True diff --git a/testsuite/tests/indexed-types/should_fail/T9097.stderr b/testsuite/tests/indexed-types/should_fail/T9097.stderr new file mode 100644 index 0000000000..02dfc33068 --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9097.stderr @@ -0,0 +1,5 @@ + +T9097.hs:10:3: + Illegal type synonym family application in instance: Any + In the equations for closed type family ‘Foo’ + In the type family declaration for ‘Foo’ diff --git a/testsuite/tests/indexed-types/should_fail/T9160.hs b/testsuite/tests/indexed-types/should_fail/T9160.hs new file mode 100644 index 0000000000..64ae3b9f9c --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9160.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE FlexibleInstances, TemplateHaskell, PolyKinds, TypeFamilies #-} + +module T9160 where +import Language.Haskell.TH + +$( do { cls_nm <- newName "C" + ; a_nm <- newName "a" + ; k_nm <- newName "k" + ; f_nm <- newName "F" + ; return [ClassD [] cls_nm [KindedTV a_nm (VarT k_nm)] [] + [FamilyD TypeFam f_nm [] (Just (VarT k_nm))]] } ) + +-- Splices in: +-- class C (a :: k) where +-- type F :: k + +instance C (a :: *) where + type F = Maybe -- Should be illegal + diff --git a/testsuite/tests/indexed-types/should_fail/T9160.stderr b/testsuite/tests/indexed-types/should_fail/T9160.stderr new file mode 100644 index 0000000000..7a476d4f42 --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9160.stderr @@ -0,0 +1,11 @@ +Loading package ghc-prim ... linking ... done. +Loading package integer-gmp ... linking ... done. +Loading package base ... linking ... done. +Loading package pretty-1.1.1.1 ... linking ... done. +Loading package template-haskell ... linking ... done. + +T9160.hs:18:8: + Type indexes must match class instance head + Found ‘* -> *’ but expected ‘*’ + In the type instance declaration for ‘F’ + In the instance declaration for ‘C (a :: *)’ diff --git a/testsuite/tests/indexed-types/should_fail/T9167.hs b/testsuite/tests/indexed-types/should_fail/T9167.hs new file mode 100644 index 0000000000..2d2f555011 --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9167.hs @@ -0,0 +1,6 @@ + {-# LANGUAGE TypeFamilies #-} + +module T9167 where + +class C a where + type F b diff --git a/testsuite/tests/indexed-types/should_fail/T9167.stderr b/testsuite/tests/indexed-types/should_fail/T9167.stderr new file mode 100644 index 0000000000..1bd21aed5e --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9167.stderr @@ -0,0 +1,5 @@ + +T9167.hs:5:1: + The associated type ‘F’ + mentions none of the type or kind variables of the class ‘C a’ + In the class declaration for ‘C’ diff --git a/testsuite/tests/indexed-types/should_fail/T9171.hs b/testsuite/tests/indexed-types/should_fail/T9171.hs new file mode 100644 index 0000000000..72a2d707b0 --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9171.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE PolyKinds, TypeFamilies #-} + +module T9171 where +data Base + +type family GetParam (p::k1) (t::k2) :: k3 + +type instance GetParam Base t = t + +foo = undefined :: GetParam Base (GetParam Base Int) diff --git a/testsuite/tests/indexed-types/should_fail/T9171.stderr b/testsuite/tests/indexed-types/should_fail/T9171.stderr new file mode 100644 index 0000000000..fe49925118 --- /dev/null +++ b/testsuite/tests/indexed-types/should_fail/T9171.stderr @@ -0,0 +1,22 @@ +
+T9171.hs:10:1:
+ Couldn't match expected type ‘GetParam Base (GetParam Base Int)’
+ with actual type ‘GetParam Base (GetParam Base Int)’
+ NB: ‘GetParam’ is a type function, and may not be injective
+ The kind variable ‘k0’ is ambiguous
+ Use -fprint-explicit-kinds to see the kind arguments
+ When checking that ‘foo’ has the inferred type
+ foo :: forall (k :: BOX). GetParam Base (GetParam Base Int)
+ Probable cause: the inferred type is ambiguous
+
+T9171.hs:10:20:
+ Couldn't match expected type ‘GetParam Base (GetParam Base Int)’
+ with actual type ‘GetParam Base (GetParam Base Int)’
+ NB: ‘GetParam’ is a type function, and may not be injective
+ The kind variable ‘k0’ is ambiguous
+ Use -fprint-explicit-kinds to see the kind arguments
+ In the ambiguity check for:
+ forall (k :: BOX). GetParam Base (GetParam Base Int)
+ To defer the ambiguity check to use sites, enable AllowAmbiguousTypes
+ In an expression type signature: GetParam Base (GetParam Base Int)
+ In the expression: undefined :: GetParam Base (GetParam Base Int)
diff --git a/testsuite/tests/indexed-types/should_fail/all.T b/testsuite/tests/indexed-types/should_fail/all.T index 54a33cd83d..2c5ae68859 100644 --- a/testsuite/tests/indexed-types/should_fail/all.T +++ b/testsuite/tests/indexed-types/should_fail/all.T @@ -47,7 +47,7 @@ test('T2157', normal, compile_fail, ['']) test('T2203a', normal, compile_fail, ['']) test('T2627b', normal, compile_fail, ['']) test('T2693', normal, compile_fail, ['']) -test('T2888', normal, compile, ['']) +test('T2888', normal, compile_fail, ['']) test('T3092', normal, compile_fail, ['']) test('NoMatchErr', normal, compile_fail, ['']) test('T2677', normal, compile_fail, ['']) @@ -119,4 +119,8 @@ test('T8129', test('T8368', normal, compile_fail, ['']) test('T8368a', normal, compile_fail, ['']) test('T8518', normal, compile_fail, ['']) - +test('T9036', normal, compile_fail, ['']) +test('T9167', normal, compile_fail, ['']) +test('T9171', normal, compile_fail, ['']) +test('T9097', normal, compile_fail, ['']) +test('T9160', normal, compile_fail, ['']) diff --git a/testsuite/tests/module/T9061.hs b/testsuite/tests/module/T9061.hs new file mode 100644 index 0000000000..1417dcad75 --- /dev/null +++ b/testsuite/tests/module/T9061.hs @@ -0,0 +1,6 @@ +{-# OPTIONS_GHC -fwarn-unused-imports #-} +module T9061 where + +import Prelude hiding (log) + +f = log where log = () diff --git a/testsuite/tests/module/all.T b/testsuite/tests/module/all.T index 8eaa1d5217..926cbb5448 100644 --- a/testsuite/tests/module/all.T +++ b/testsuite/tests/module/all.T @@ -334,3 +334,4 @@ test('T414', normal, compile_fail, ['']) test('T414a', normal, compile, ['']) test('T414b', normal, compile, ['']) test('T3776', normal, compile, ['']) +test('T9061', normal, compile, ['']) diff --git a/testsuite/tests/module/mod132.stderr b/testsuite/tests/module/mod132.stderr index 2735a73dad..0a9d25cda8 100644 --- a/testsuite/tests/module/mod132.stderr +++ b/testsuite/tests/module/mod132.stderr @@ -1,2 +1,4 @@ -mod132.hs:6:7: Not in scope: data constructor ‘Foo’ +mod132.hs:6:7: + Not in scope: data constructor ‘Foo’ + Perhaps you meant variable ‘foo’ (line 6) diff --git a/testsuite/tests/module/mod134.stderr b/testsuite/tests/module/mod134.stderr index e2171a8c6d..d6e6f0e30b 100644 --- a/testsuite/tests/module/mod134.stderr +++ b/testsuite/tests/module/mod134.stderr @@ -4,4 +4,4 @@ mod134.hs:6:19: Perhaps you meant one of these: ‘Prelude.read’ (imported from Prelude), ‘Prelude.reads’ (imported from Prelude), - ‘Prelude.snd’ (imported from Prelude) + data constructor ‘Prelude.Left’ (imported from Prelude) diff --git a/testsuite/tests/module/mod73.stderr b/testsuite/tests/module/mod73.stderr index 432f61b549..576b0e3a86 100644 --- a/testsuite/tests/module/mod73.stderr +++ b/testsuite/tests/module/mod73.stderr @@ -2,6 +2,6 @@ mod73.hs:3:7: Not in scope: ‘Prelude.g’ Perhaps you meant one of these: - ‘Prelude.id’ (imported from Prelude), - ‘Prelude.log’ (imported from Prelude), - ‘Prelude.pi’ (imported from Prelude) + data constructor ‘Prelude.LT’ (imported from Prelude), + data constructor ‘Prelude.EQ’ (imported from Prelude), + data constructor ‘Prelude.GT’ (imported from Prelude) diff --git a/testsuite/tests/overloadedlists/should_run/overloadedlistsrun02.hs b/testsuite/tests/overloadedlists/should_run/overloadedlistsrun02.hs index 8567db3566..a6b9bb8ede 100644 --- a/testsuite/tests/overloadedlists/should_run/overloadedlistsrun02.hs +++ b/testsuite/tests/overloadedlists/should_run/overloadedlistsrun02.hs @@ -6,8 +6,3 @@ import GHC.Exts main = do print ([] :: (S.Set Int)) print (['a','b','c'] :: (S.Set Char)) print (['a','c'..'g'] :: (S.Set Char)) - -instance Ord a => IsList (S.Set a) where - type (Item (S.Set a)) = a - fromList = S.fromList - toList = S.toList diff --git a/testsuite/tests/overloadedlists/should_run/overloadedlistsrun04.hs b/testsuite/tests/overloadedlists/should_run/overloadedlistsrun04.hs index 478d8d2c22..1111f93427 100644 --- a/testsuite/tests/overloadedlists/should_run/overloadedlistsrun04.hs +++ b/testsuite/tests/overloadedlists/should_run/overloadedlistsrun04.hs @@ -3,10 +3,10 @@ import qualified Data.Set as S import GHC.Exts -main = do putStrLn (f []) - putStrLn (f [1,2]) - putStrLn (f [2,0]) - putStrLn (f [3,2]) +main = do putStrLn (f []) + putStrLn (f [1,2]) + putStrLn (f [2,0]) + putStrLn (f [3,2]) putStrLn (f [2,7]) putStrLn (f [2,2]) putStrLn (f [1..7]) @@ -18,11 +18,3 @@ f [_] = "one element" f [2,_] = "two elements, the smaller one is 2" f [_,2] = "two elements, the bigger one is 2" f _ = "else" - - -instance Ord a => IsList (S.Set a) where - type (Item (S.Set a)) = a - fromList = S.fromList - toList = S.toList - - diff --git a/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals1.hs b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals1.hs new file mode 100644 index 0000000000..6b7de0f712 --- /dev/null +++ b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals1.hs @@ -0,0 +1,5 @@ +module ParserNoBinaryLiterals1 where + +f :: Int -> () +f 0b0 = () +f _ = () diff --git a/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals1.stderr b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals1.stderr new file mode 100644 index 0000000000..3b57330e59 --- /dev/null +++ b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals1.stderr @@ -0,0 +1,5 @@ + +ParserNoBinaryLiterals1.hs:4:1: + Equations for ‘f’ have different numbers of arguments + ParserNoBinaryLiterals1.hs:4:1-10 + ParserNoBinaryLiterals1.hs:5:1-10 diff --git a/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals2.hs b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals2.hs new file mode 100644 index 0000000000..e760bd888e --- /dev/null +++ b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals2.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE MagicHash #-} + +module ParserNoBinaryLiterals2 where + +import GHC.Types + +f :: Word -> () +f (W# 0b0##) = () +f _ = () diff --git a/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals2.stderr b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals2.stderr new file mode 100644 index 0000000000..4a756d6e27 --- /dev/null +++ b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals2.stderr @@ -0,0 +1,5 @@ + +ParserNoBinaryLiterals2.hs:8:4: + Constructor ‘W#’ should have 1 argument, but has been given 2 + In the pattern: W# 0 b0## + In an equation for ‘f’: f (W# 0 b0##) = () diff --git a/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals3.hs b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals3.hs new file mode 100644 index 0000000000..b6bc81b68a --- /dev/null +++ b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals3.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE MagicHash #-} + +module ParserNoBinaryLiterals3 where + +import GHC.Types + +f :: Int -> () +f (I# 0b0#) = () +f _ = () diff --git a/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals3.stderr b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals3.stderr new file mode 100644 index 0000000000..32c27e6b8a --- /dev/null +++ b/testsuite/tests/parser/should_fail/ParserNoBinaryLiterals3.stderr @@ -0,0 +1,5 @@ + +ParserNoBinaryLiterals3.hs:8:4: + Constructor ‘I#’ should have 1 argument, but has been given 2 + In the pattern: I# 0 b0# + In an equation for ‘f’: f (I# 0 b0#) = () diff --git a/testsuite/tests/parser/should_fail/T8506.stderr b/testsuite/tests/parser/should_fail/T8506.stderr index b0e9fde84b..d7de4fe4e3 100644 --- a/testsuite/tests/parser/should_fail/T8506.stderr +++ b/testsuite/tests/parser/should_fail/T8506.stderr @@ -3,4 +3,4 @@ T8506.hs:3:16: Unexpected type ‘Int’ In the class declaration for ‘Shapable’ A class declaration should have form - class Shapable a b c where ... + class Shapable a where ... diff --git a/testsuite/tests/parser/should_fail/all.T b/testsuite/tests/parser/should_fail/all.T index 45c471e2c6..7e286cf3f2 100644 --- a/testsuite/tests/parser/should_fail/all.T +++ b/testsuite/tests/parser/should_fail/all.T @@ -75,6 +75,9 @@ test('readFailTraditionalRecords3', normal, compile_fail, ['']) test('ParserNoForallUnicode', normal, compile_fail, ['']) test('ParserNoLambdaCase', when(compiler_lt('ghc', '7.5'), skip), compile_fail, ['']) test('ParserNoMultiWayIf', when(compiler_lt('ghc', '7.5'), skip), compile_fail, ['']) +test('ParserNoBinaryLiterals1', normal, compile_fail, ['']) +test('ParserNoBinaryLiterals2', normal, compile_fail, ['']) +test('ParserNoBinaryLiterals3', normal, compile_fail, ['']) test('T5425', normal, compile_fail, ['']) test('T984', normal, compile_fail, ['']) diff --git a/testsuite/tests/parser/should_fail/readFail025.stderr b/testsuite/tests/parser/should_fail/readFail025.stderr index da220cd0c3..5641642c99 100644 --- a/testsuite/tests/parser/should_fail/readFail025.stderr +++ b/testsuite/tests/parser/should_fail/readFail025.stderr @@ -3,4 +3,4 @@ readFail025.hs:5:8: Unexpected type ‘String’ In the data declaration for ‘T’ A data declaration should have form - data T a b c = ... + data T a = ... diff --git a/testsuite/tests/parser/should_run/BinaryLiterals0.hs b/testsuite/tests/parser/should_run/BinaryLiterals0.hs new file mode 100644 index 0000000000..7257445fba --- /dev/null +++ b/testsuite/tests/parser/should_run/BinaryLiterals0.hs @@ -0,0 +1,19 @@ +-- | Anti-Test for GHC 7.10+'s @BinaryLiterals@ extensions (see GHC #9224) +-- +-- NB: This code won't compile with -XBinaryLiterals enabled + +{-# LANGUAGE NegativeLiterals #-} + +module Main where + +main :: IO () +main = print lst + where + -- "0b0" is to be parsed as "0 b0" + lst = [ (,) 0b0, (,) 0b1, (,) 0b10, (,) 0b11 + , (,) -0b0, (,) -0b1, (,) -0b10, (,) -0b11 + ] :: [(Int,Int)] + b0 = 60 + b1 = 61 + b11 = 611 + b10 = 610 diff --git a/testsuite/tests/parser/should_run/BinaryLiterals0.stdout b/testsuite/tests/parser/should_run/BinaryLiterals0.stdout new file mode 100644 index 0000000000..dacce8854e --- /dev/null +++ b/testsuite/tests/parser/should_run/BinaryLiterals0.stdout @@ -0,0 +1 @@ +[(0,60),(0,61),(0,610),(0,611),(0,60),(0,61),(0,610),(0,611)] diff --git a/testsuite/tests/parser/should_run/BinaryLiterals1.hs b/testsuite/tests/parser/should_run/BinaryLiterals1.hs new file mode 100644 index 0000000000..f9918fb068 --- /dev/null +++ b/testsuite/tests/parser/should_run/BinaryLiterals1.hs @@ -0,0 +1,25 @@ +-- | Test for GHC 7.10+'s @BinaryLiterals@ extensions (see GHC #9224) + +{-# LANGUAGE BinaryLiterals #-} +{-# LANGUAGE MagicHash #-} + +module Main where + +import GHC.Types + +main = do + print [ I# 0b0#, I# -0b0#, I# 0b1#, I# -0b1# + , I# 0b00000000000000000000000000000000000000000000000000000000000000000000000000001# + , I# -0b00000000000000000000000000000000000000000000000000000000000000000000000000001# + , I# -0b11001001#, I# -0b11001001# + , I# -0b11111111#, I# -0b11111111# + ] + print [ W# 0b0##, W# 0b1##, W# 0b11001001##, W# 0b11##, W# 0b11111111## + , W# 0b00000000000000000000000000000000000000000000000000000000000000000000000000001## + ] + + print [ 0b0, 0b1, 0b10, 0b11, 0b100, 0b101, 0b110, 0b111 :: Integer + , -0b0, -0b1, -0b10, -0b11, -0b100, -0b101, -0b110, -0b111 + , 0b11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111 + , -0b11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111 + ] diff --git a/testsuite/tests/parser/should_run/BinaryLiterals1.stdout b/testsuite/tests/parser/should_run/BinaryLiterals1.stdout new file mode 100644 index 0000000000..e1065be034 --- /dev/null +++ b/testsuite/tests/parser/should_run/BinaryLiterals1.stdout @@ -0,0 +1,3 @@ +[0,0,1,-1,1,-1,-201,-201,-255,-255] +[0,1,201,3,255,1] +[0,1,2,3,4,5,6,7,0,-1,-2,-3,-4,-5,-6,-7,340282366920938463463374607431768211455,-340282366920938463463374607431768211455] diff --git a/testsuite/tests/parser/should_run/BinaryLiterals2.hs b/testsuite/tests/parser/should_run/BinaryLiterals2.hs new file mode 100644 index 0000000000..3779d52341 --- /dev/null +++ b/testsuite/tests/parser/should_run/BinaryLiterals2.hs @@ -0,0 +1,29 @@ +-- | Test for GHC 7.10+'s @BinaryLiterals@ extensions (see GHC #9224) + +{-# LANGUAGE BinaryLiterals #-} +{-# LANGUAGE MagicHash #-} +{-# LANGUAGE NegativeLiterals #-} + +module Main where + +import GHC.Types +import GHC.Int + +main = do + print [ I# 0B0#, I# -0B0#, I# 0B1#, I# -0B1# + , I# 0B00000000000000000000000000000000000000000000000000000000000000000000000000001# + , I# -0B00000000000000000000000000000000000000000000000000000000000000000000000000001# + , I# -0B11001001#, I# -0B11001001# + , I# -0B11111111#, I# -0B11111111# + ] + print [ W# 0B0##, W# 0B1##, W# 0B11001001##, W# 0B11##, W# 0B11111111## + , W# 0B00000000000000000000000000000000000000000000000000000000000000000000000000001## + ] + + print [ 0B0, 0B1, 0B10, 0B11, 0B100, 0B101, 0B110, 0B111 :: Integer + , -0B0, -0B1, -0B10, -0B11, -0B100, -0B101, -0B110, -0B111 + , 0B11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111 + , -0B11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111 + ] + + print [ I8# -0B10000000#, I8# 0B1111111# ] diff --git a/testsuite/tests/parser/should_run/BinaryLiterals2.stdout b/testsuite/tests/parser/should_run/BinaryLiterals2.stdout new file mode 100644 index 0000000000..76506e9670 --- /dev/null +++ b/testsuite/tests/parser/should_run/BinaryLiterals2.stdout @@ -0,0 +1,4 @@ +[0,0,1,-1,1,-1,-201,-201,-255,-255] +[0,1,201,3,255,1] +[0,1,2,3,4,5,6,7,0,-1,-2,-3,-4,-5,-6,-7,340282366920938463463374607431768211455,-340282366920938463463374607431768211455] +[-128,127] diff --git a/testsuite/tests/parser/should_run/all.T b/testsuite/tests/parser/should_run/all.T index eee0330e5e..cf7ee6fdd3 100644 --- a/testsuite/tests/parser/should_run/all.T +++ b/testsuite/tests/parser/should_run/all.T @@ -6,3 +6,6 @@ test('T1344', normal, compile_and_run, ['']) test('operator', normal, compile_and_run, ['']) test('operator2', normal, compile_and_run, ['']) test('ParserMultiWayIf', when(compiler_lt('ghc', '7.5'), skip), compile_and_run, ['']) +test('BinaryLiterals0', normal, compile_and_run, ['']) +test('BinaryLiterals1', when(compiler_lt('ghc', '7.9'), skip), compile_and_run, ['']) +test('BinaryLiterals2', when(compiler_lt('ghc', '7.9'), skip), compile_and_run, [''])
\ No newline at end of file diff --git a/testsuite/tests/patsyn/should_compile/.gitignore b/testsuite/tests/patsyn/should_compile/.gitignore deleted file mode 100644 index 492f1e78dd..0000000000 --- a/testsuite/tests/patsyn/should_compile/.gitignore +++ /dev/null @@ -1,9 +0,0 @@ -.hpc.bidir -.hpc.ex -.hpc.ex-num -.hpc.ex-prov -.hpc.ex-view -.hpc.incomplete -.hpc.num -.hpc.overlap -.hpc.univ diff --git a/testsuite/tests/patsyn/should_compile/T9023.hs b/testsuite/tests/patsyn/should_compile/T9023.hs new file mode 100644 index 0000000000..3a8614009f --- /dev/null +++ b/testsuite/tests/patsyn/should_compile/T9023.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE PatternSynonyms #-} + +module T9023 where + +pattern P a b = Just (a, b) +foo P{} = True diff --git a/testsuite/tests/patsyn/should_compile/all.T b/testsuite/tests/patsyn/should_compile/all.T index ecc4701661..d851bc3ac8 100644 --- a/testsuite/tests/patsyn/should_compile/all.T +++ b/testsuite/tests/patsyn/should_compile/all.T @@ -9,3 +9,4 @@ test('num', normal, compile, ['']) test('incomplete', normal, compile, ['']) test('export', normal, compile, ['']) test('T8966', normal, compile, ['']) +test('T9023', normal, compile, ['']) diff --git a/testsuite/tests/patsyn/should_fail/T9161-1.hs b/testsuite/tests/patsyn/should_fail/T9161-1.hs new file mode 100644 index 0000000000..c14eb542cc --- /dev/null +++ b/testsuite/tests/patsyn/should_fail/T9161-1.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE DataKinds #-} + +pattern PATTERN = () + +wrongLift :: PATTERN +wrongLift = undefined diff --git a/testsuite/tests/patsyn/should_fail/T9161-1.stderr b/testsuite/tests/patsyn/should_fail/T9161-1.stderr new file mode 100644 index 0000000000..1f05196ebb --- /dev/null +++ b/testsuite/tests/patsyn/should_fail/T9161-1.stderr @@ -0,0 +1,4 @@ + +T9161-1.hs:6:14: + Pattern synonym ‘PATTERN’ used as a type + In the type signature for ‘wrongLift’: wrongLift :: PATTERN diff --git a/testsuite/tests/patsyn/should_fail/T9161-2.hs b/testsuite/tests/patsyn/should_fail/T9161-2.hs new file mode 100644 index 0000000000..941d23e35f --- /dev/null +++ b/testsuite/tests/patsyn/should_fail/T9161-2.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE DataKinds, KindSignatures, PolyKinds #-} + +pattern PATTERN = () + +data Proxy (tag :: k) (a :: *) + +wrongLift :: Proxy PATTERN () +wrongLift = undefined diff --git a/testsuite/tests/patsyn/should_fail/T9161-2.stderr b/testsuite/tests/patsyn/should_fail/T9161-2.stderr new file mode 100644 index 0000000000..8d21be5906 --- /dev/null +++ b/testsuite/tests/patsyn/should_fail/T9161-2.stderr @@ -0,0 +1,5 @@ + +T9161-2.hs:8:20: + Pattern synonym ‘PATTERN’ used as a type + In the type signature for ‘wrongLift’: + wrongLift :: Proxy PATTERN () diff --git a/testsuite/tests/patsyn/should_fail/all.T b/testsuite/tests/patsyn/should_fail/all.T index 897808ef1d..bff6bdf8c2 100644 --- a/testsuite/tests/patsyn/should_fail/all.T +++ b/testsuite/tests/patsyn/should_fail/all.T @@ -4,3 +4,5 @@ test('unidir', normal, compile_fail, ['']) test('local', normal, compile_fail, ['']) test('T8961', normal, multimod_compile_fail, ['T8961','']) test('as-pattern', normal, compile_fail, ['']) +test('T9161-1', normal, compile_fail, ['']) +test('T9161-2', normal, compile_fail, ['']) diff --git a/testsuite/tests/patsyn/should_run/.gitignore b/testsuite/tests/patsyn/should_run/.gitignore deleted file mode 100644 index 7380291005..0000000000 --- a/testsuite/tests/patsyn/should_run/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -eval -ex-prov -match - -.hpc.eval -.hpc.ex-prov -.hpc.match diff --git a/testsuite/tests/perf/compiler/T5837.stderr b/testsuite/tests/perf/compiler/T5837.stderr index 2d2907d3ae..5cee13dd1d 100644 --- a/testsuite/tests/perf/compiler/T5837.stderr +++ b/testsuite/tests/perf/compiler/T5837.stderr @@ -158,4 +158,4 @@ T5837.hs:8:6: (TF a))))))))))))))))))))))))))))))))))))))))))))))))) In the ambiguity check for: forall a. a ~ TF (a, Int) => Int - In the type signature for ‘t’: t :: a ~ TF (a, Int) => Int + In the type signature for ‘t’: t :: (a ~ TF (a, Int)) => Int diff --git a/testsuite/tests/perf/compiler/all.T b/testsuite/tests/perf/compiler/all.T index 2bff1c72d5..9a67aa5431 100644 --- a/testsuite/tests/perf/compiler/all.T +++ b/testsuite/tests/perf/compiler/all.T @@ -42,15 +42,17 @@ test('T1969', # 2013-02-13 27, very unstable! # 2013-09-11 30 (amd64/Linux) compiler_stats_num_field('max_bytes_used', - [(platform('i386-unknown-mingw32'), 7295012, 20), + [(platform('i386-unknown-mingw32'), 5719436, 20), # 2010-05-17 5717704 (x86/Windows) # 2013-02-10 5159748 (x86/Windows) # 2013-02-10 5030080 (x86/Windows) # 2013-11-13 7295012 (x86/Windows, 64bit machine) - (wordsize(32), 6429864, 1), + # 2014-04-24 5719436 (x86/Windows, 64bit machine) + (wordsize(32), 5949188, 1), # 6707308 (x86/OS X) # 2009-12-31 6149572 (x86/Linux) # 2014-01-22 6429864 (x86/Linux) + # 2014-06-29 5949188 (x86/Linux) (wordsize(64), 11000000, 20)]), # looks like the peak is around ~10M, but we're # unlikely to GC exactly on the peak. @@ -64,13 +66,14 @@ test('T1969', # 2013-02-10 310633884 (x86/Windows) # 2013-11-13 317975916 (x86/Windows, 64bit machine) # 2014-04-04 301784492 (x86/Windows, 64bit machine) - (wordsize(32), 316103268, 1), + (wordsize(32), 303300692, 1), # 221667908 (x86/OS X) # 274932264 (x86/Linux) # 2012-10-08 303930948 (x86/Linux, new codegen) # 2013-02-10 322937684 (x86/OSX) # 2014-01-22 316103268 (x86/Linux) - (wordsize(64), 660922376, 5)]), + # 2014-06-29 303300692 (x86/Linux) + (wordsize(64), 651626680, 5)]), # 17/11/2009 434845560 (amd64/Linux) # 08/12/2009 459776680 (amd64/Linux) # 17/05/2010 519377728 (amd64/Linux) @@ -86,6 +89,7 @@ test('T1969', # 17/1/13: 667160192 (x86_64/Linux) new demand analyser # 18/10/2013 698612512 (x86_64/Linux) fix for #8456 # 10/02/2014 660922376 (x86_64/Linux) call artiy analysis + # 17/07/2014 651626680 (x86_64/Linux) roundabout update only_ways(['normal']), extra_hc_opts('-dcore-lint -static') @@ -110,13 +114,14 @@ else: test('T3294', [ compiler_stats_num_field('max_bytes_used', # Note [residency] - [(wordsize(32), 24009436, 15), + [(wordsize(32), 19882188, 15), # 17725476 (x86/OS X) # 14593500 (Windows) # 2013-02-10 20651576 (x86/Windows) # 2013-02-10 20772984 (x86/OSX) # 2013-11-13 24009436 (x86/Windows, 64bit machine) - (wordsize(64), 43224080, 15)]), + # 2014-04-24 19882188 (x86/Windows, 64bit machine) + (wordsize(64), 40000000, 15)]), # prev: 25753192 (amd64/Linux) # 29/08/2012: 37724352 (amd64/Linux) # (increase due to new codegen, see #7198) @@ -126,6 +131,8 @@ test('T3294', # (reason for decrease unknown) # 29/5/2013: 43224080 (amd64/Linux) # (reason for increase back to earlier value unknown) + # 2014-07-14: 36670800 (amd64/Linux) + # (reason unknown, setting expected value somewhere in between) compiler_stats_num_field('bytes allocated', [(wordsize(32), 1377050640, 5), @@ -135,7 +142,7 @@ test('T3294', # 2013-11-13: 1478325844 (x86/Windows, 64bit machine) # 2014-01-12: 1565185140 (x86/Linux) # 2013-04-04: 1377050640 (x86/Windows, 64bit machine) - (wordsize(64), 2705289664, 5)]), + (wordsize(64), 2671595512, 5)]), # old: 1357587088 (amd64/Linux) # 29/08/2012: 2961778696 (amd64/Linux) # (^ increase due to new codegen, see #7198) @@ -144,6 +151,7 @@ test('T3294', # 12/12/2013: 3083825616 (amd64/Linux) (reason unknown) # 18/02/2014: 2897630040 (amd64/Linux) (call arity improvements) # 12/03/2014: 2705289664 (amd64/Linux) (more call arity improvements) + # 2014-17-07: 2671595512 (amd64/Linux) (round-about update) conf_3294 ], compile, @@ -225,14 +233,16 @@ test('T3064', # 2012-10-30: 111189536 (x86/Windows) # 2013-11-13: 146626504 (x86/Windows, 64bit machine) # 2014-01-22: 162457940 (x86/Linux) - (wordsize(64), 308422280, 5)]), + (wordsize(64), 332702112, 5)]), # (amd64/Linux) (28/06/2011): 73259544 # (amd64/Linux) (07/02/2013): 224798696 # (amd64/Linux) (02/08/2013): 236404384, increase from roles # (amd64/Linux) (11/09/2013): 290165632, increase from AMP warnings # (amd64/Linux) (22/11/2013): 308300448, GND via Coercible and counters for constraints solving - # (amd64/Linux) (02/12/2013): 329795912, Coercible refactor + # (amd64/Linux) (02/12/2013): 329795912, Coercible refactor # (amd64/Linux) (11/02/2014): 308422280, optimize Coercions in simpleOptExpr + # (amd64/Linux) (23/05/2014): 324022680, unknown cause + # (amd64/Linux) (2014-07-17): 332702112, general round of updates compiler_stats_num_field('max_bytes_used', [(wordsize(32), 11202304, 20), @@ -267,7 +277,7 @@ test('T5030', # previous: 196457520 # 2012-10-08: 259547660 (x86/Linux, new codegen) # 2013-11-21: 198573456 (x86 Windows, 64 bit machine) - (wordsize(64), 397672152, 10)]), + (wordsize(64), 409314320, 10)]), # Previously 530000000 (+/- 10%) # 17/1/13: 602993184 (x86_64/Linux) # (new demand analyser) @@ -277,6 +287,8 @@ test('T5030', # decrease from more aggressive coercion optimisations from roles # 2013-11-12 397672152 (amd64/Linux) # big decrease following better CSE and arity + # 2014-07-17 409314320 (amd64/Linux) + # general round of updates only_ways(['normal']) ], @@ -316,7 +328,7 @@ test('T783', # 2013-02-10: 329202116 (x86/Windows) # 2013-02-10: 338465200 (x86/OSX) # 2014-04-04: 319179104 (x86 Windows, 64 bit machine) - (wordsize(64), 654804144, 10)]), + (wordsize(64), 640031840, 10)]), # prev: 349263216 (amd64/Linux) # 07/08/2012: 384479856 (amd64/Linux) # 29/08/2012: 436927840 (amd64/Linux) @@ -327,6 +339,8 @@ test('T783', # (fix for #8456) # 24/10/2013: 654804144 (amd64/Linux) # (fix previous fix for #8456) + # 2014-07-17: 640031840 (amd64/Linux) + # (general round of updates) extra_hc_opts('-static') ], compile,['']) @@ -356,7 +370,7 @@ test('T5321FD', # prev: 213380256 # 2012-10-08: 240302920 (x86/Linux) # (increase due to new codegen) - (wordsize(64), 476497048, 10)]) + (wordsize(64), 426960992, 10)]) # prev: 418306336 # 29/08/2012: 492905640 # (increase due to new codegen) @@ -364,6 +378,10 @@ test('T5321FD', # (reason for decrease unknown) # 08/06/2013: 476497048 # (reason for increase unknown) + # before 2014-07-17: 441997096 + # (with -8%, still in range, hence cause not known) + # 2014-07-17: 426960992 (-11% of previous value) + # (due to better optCoercion, 5e7406d9, #9233) ], compile,['']) @@ -372,7 +390,9 @@ test('T5642', compiler_stats_num_field('bytes allocated', [(wordsize(32), 650000000, 10), # sample from x86/Linux - (wordsize(64), 1300000000, 10)]) + (wordsize(64), 1358833928, 10)]) + # prev: 1300000000 + # 2014-07-17: 1358833928 (general round of updates) ], compile,['-O']) @@ -387,8 +407,8 @@ test('T5837', # 2012-10-02 81879216 # 2012-09-20 87254264 amd64/Linux # 2013-09-18 90587232 amd64/Linux - # 2013-11-21 86795752 amd64/Linux, GND via Coercible and counters - # for constraints solving + # 2013-11-21 86795752 amd64/Linux, GND via Coercible and counters + # for constraints solving ], compile_fail,['-ftype-function-depth=50']) @@ -399,19 +419,22 @@ test('T6048', # prev: 38000000 (x86/Linux) # 2012-10-08: 48887164 (x86/Linux) # 2014-04-04: 62618072 (x86 Windows, 64 bit machine) - (wordsize(64), 110646312, 10)]) - # 18/09/2012 97247032 amd64/Linux + (wordsize(64), 125431448, 12)]) + # 18/09/2012 97247032 amd64/Linux # 16/01/2014 108578664 amd64/Linux (unknown, likely foldl-via-foldr) - # 18/01/2014 95960720 amd64/Linux Call Arity improvements + # 18/01/2014 95960720 amd64/Linux Call Arity improvements # 28/02/2014 105556793 amd64/Linux (unknown, tweak in base/4d9e7c9e3 resulted in change) # 05/03/2014 110646312 amd64/Linux Call Arity became more elaborate + # 14/07/2014 125431448 amd64/Linux unknown reason. Even worse in GHC-7.8.3. *shurg* ], compile,['']) test('T9020', [ only_ways(['optasm']), compiler_stats_num_field('bytes allocated', - [(wordsize(32), 40000000, 10), - (wordsize(64), 795469104, 10)]) + [(wordsize(32), 381360728, 10), + (wordsize(64), 728263536, 10)]) + # prev: 795469104 + # 2014-07-17: 728263536 (general round of updates) ], compile,['']) diff --git a/testsuite/tests/perf/haddock/all.T b/testsuite/tests/perf/haddock/all.T index e1d7e9f432..b17d472928 100644 --- a/testsuite/tests/perf/haddock/all.T +++ b/testsuite/tests/perf/haddock/all.T @@ -10,11 +10,12 @@ test('haddock.base', ,(platform('i386-unknown-mingw32'), 163, 10) # 2013-02-10: 133 (x86/Windows) # 2013-11-13: 163 (x86/Windows, 64bit machine) - ,(wordsize(32), 168, 1)]) + ,(wordsize(32), 156, 1)]) # 2012-08-14: 144 (x86/OSX) # 2012-10-30: 113 (x86/Windows) # 2013-02-10: 139 (x86/OSX) # 2014-01-22: 168 (x86/Linux - new haddock) + # 2014-06-29: 156 (x86/Linux) ,stats_num_field('max_bytes_used', [(wordsize(64), 115113864, 10) # 2012-08-14: 87374568 (amd64/Linux) @@ -26,11 +27,12 @@ test('haddock.base', ,(platform('i386-unknown-mingw32'), 58557136, 10) # 2013-02-10: 47988488 (x86/Windows) # 2013-11-13: 58557136 (x86/Windows, 64bit machine) - ,(wordsize(32), 62189068, 1)]) + ,(wordsize(32), 58243640, 1)]) # 2013-02-10: 52237984 (x86/OSX) # 2014-01-22: 62189068 (x86/Linux) + # 2014-06-29: 58243640 (x86/Linux) ,stats_num_field('bytes allocated', - [(wordsize(64), 7128342344, 5) + [(wordsize(64), 7498123680, 5) # 2012-08-14: 5920822352 (amd64/Linux) # 2012-09-20: 5829972376 (amd64/Linux) # 2012-10-08: 5902601224 (amd64/Linux) @@ -40,15 +42,17 @@ test('haddock.base', # 2013-09-18: 6294339840 (x86_64/Linux) # 2013-11-21: 6756213256 (x86_64/Linux) # 2014-01-12: 7128342344 (x86_64/Linux) + # 2014-06-12: 7498123680 (x86_64/Linux) ,(platform('i386-unknown-mingw32'), 3548581572, 5) # 2013-02-10: 3358693084 (x86/Windows) # 2013-11-13: 3097751052 (x86/Windows, 64bit machine) # 2014-04-04: 3548581572 (x86/Windows, 64bit machine) - ,(wordsize(32), 3554624600, 1)]) + ,(wordsize(32), 3799130400, 1)]) # 2012-08-14: 3046487920 (x86/OSX) # 2012-10-30: 2955470952 (x86/Windows) # 2013-02-10: 3146596848 (x86/OSX) # 2014-02-22: 3554624600 (x86/Linux - new haddock) + # 2014-06-29: 3799130400 (x86/Linux) ], stats, ['../../../../libraries/base/dist-install/doc/html/base/base.haddock.t']) @@ -56,7 +60,7 @@ test('haddock.base', test('haddock.Cabal', [unless(in_tree_compiler(), skip) ,stats_num_field('peak_megabytes_allocated', - [(wordsize(64), 278, 10) + [(wordsize(64), 309, 10) # 2012-08-14: 202 (amd64/Linux) # 2012-08-29: 211 (amd64/Linux, new codegen) # 2012-09-20: 227 (amd64/Linux) @@ -64,33 +68,37 @@ test('haddock.Cabal', # 2013-06-07: 246 (amd64/Linux) (reason unknown) # 2013-11-21: 269 # 2013-11-22: 278 (amd64/Linux) (TH refactoring; weird) + # 2014-07-14: 309 (amd64/Linux) ,(platform('i386-unknown-mingw32'), 144, 10) # 2012-10-30: 83 (x86/Windows) # 2013-02-10: 116 (x86/Windows) # 2013-11-13: 129 (x86/Windows, 64bit machine) # 2014-01-28: 136 # 2014-04-04: 144 - ,(wordsize(32), 139, 1)]) + ,(wordsize(32), 147, 1)]) # 2012-08-14: 116 (x86/OSX) # 2013-02-10: 89 (x86/Windows) # 2014-01-22: 139 (x86/Linux - new haddock, but out of date before) + # 2014-06-29: 147 (x86/Linux) ,stats_num_field('max_bytes_used', - [(wordsize(64), 95356616, 15) - # 2012-08-14: 74119424 (amd64/Linux) - # 2012-08-29: 77992512 (amd64/Linux, new codegen) - # 2012-10-02: 91341568 (amd64/Linux) - # 2012-10-08: 80590280 (amd64/Linux) - # 2013-03-13: 95356616 (amd64/Linux) Cabal updated + [(wordsize(64), 113232208, 15) + # 2012-08-14: 74119424 (amd64/Linux) + # 2012-08-29: 77992512 (amd64/Linux, new codegen) + # 2012-10-02: 91341568 (amd64/Linux) + # 2012-10-08: 80590280 (amd64/Linux) + # 2013-03-13: 95356616 (amd64/Linux) Cabal updated + # 2014-07-14: 113232208 (amd64/Linux) ,(platform('i386-unknown-mingw32'), 63493200, 15) # 2012-10-30: 44224896 (x86/Windows) # 2013-11-13: 49391436 (x86/Windows, 64bit machine) # 2014-04-04: 63493200 (x86/Windows, 64bit machine) - ,(wordsize(32), 52718512, 1)]) + ,(wordsize(32), 66411508, 1)]) # 2012-08-14: 47461532 (x86/OSX) # 2013-02-10: 46563344 (x86/OSX) # 2014-01-22: 52718512 (x86/Linux) + # 2014-06-29: 66411508 (x86/Linux) ,stats_num_field('bytes allocated', - [(wordsize(64), 3979151552, 5) + [(wordsize(64), 4200993768, 5) # 2012-08-14: 3255435248 (amd64/Linux) # 2012-08-29: 3324606664 (amd64/Linux, new codegen) # 2012-10-08: 3373401360 (amd64/Linux) @@ -101,13 +109,16 @@ test('haddock.Cabal', # 2013-11-21: 3908586784 (amd64/Linux) Cabal updated # 2013-12-12: 3828567272 (amd64/Linux) # 2014-01-12: 3979151552 (amd64/Linux) new parser - ,(platform('i386-unknown-mingw32'), 1966911336, 1) + # 2014-06-29: 4200993768 (amd64/Linux) + ,(platform('i386-unknown-mingw32'), 2052220292, 5) # 2012-10-30: 1733638168 (x86/Windows) # 2013-02-10: 1906532680 (x86/Windows) # 2014-01-28: 1966911336 (x86/Windows) - ,(wordsize(32), 1986290624, 1)]) + # 2014-04-24: 2052220292 (x86/Windows) + ,(wordsize(32), 2127198484, 1)]) # 2012-08-14: 1648610180 (x86/OSX) # 2014-01-22: 1986290624 (x86/Linux) + # 2014-06-29: 2127198484 (x86/Linux) ], stats, ['../../../../libraries/Cabal/Cabal/dist-install/doc/html/Cabal/Cabal.haddock.t']) @@ -127,10 +138,11 @@ test('haddock.compiler', # 2012-10-30: 606 (x86/Windows) # 2013-02-10: 653 (x86/Windows) # 2013-11-13: 735 (x86/Windows, 64bit machine) - ,(wordsize(32), 727, 1)]) + ,(wordsize(32), 771, 1)]) # 2012-08-14: 631 (x86/OSX) # 2013-02-10: 663 (x86/OSX) # 2014-01-22: 727 (x86/Linux - new haddock, but out of date before) + # 2014-06-29: 771 (x86/Linux) ,stats_num_field('max_bytes_used', [(wordsize(64), 541926264, 10) # 2012-08-14: 428775544 (amd64/Linux) @@ -146,24 +158,27 @@ test('haddock.compiler', # 2013-11-13: 269147084 (x86/Windows, 64bit machine) # 2014-01-28: 283814088 (x86/Windows) # 2014-04-04: 278706344 (x86/Windows) - ,(wordsize(32), 278124612, 1)]) + ,(wordsize(32), 284082916, 1)]) # 2012-08-14: 231064920 (x86/OSX) # 2013-02-10: 241785276 (x86/Windows) # 2014-01-22: 278124612 (x86/Linux - new haddock) + # 2014-06-29: 284082916 (x86/Linux) ,stats_num_field('bytes allocated', - [(wordsize(64), 28708374824, 10) + [(wordsize(64), 29809571376, 10) # 2012-08-14: 26070600504 (amd64/Linux) # 2012-08-29: 26353100288 (amd64/Linux, new CG) # 2012-09-18: 26882813032 (amd64/Linux) # 2012-11-12: 25990254632 (amd64/Linux) + # 2014-07-17: 29809571376 (amd64/Linux) general round of updates # 2012-11-27: 28708374824 (amd64/Linux) ,(platform('i386-unknown-mingw32'), 14328363592, 10) # 2012-10-30: 13773051312 (x86/Windows) # 2013-02-10: 14925262356 (x86/Windows) # 2013-11-13: 14328363592 (x86/Windows, 64bit machine) - ,(wordsize(32), 14581475024, 1)]) + ,(wordsize(32), 15110426000, 1)]) # 2012-08-14: 13471797488 (x86/OSX) # 2014-01-22: 14581475024 (x86/Linux - new haddock) + # 2014-06-29: 15110426000 (x86/Linux) ], stats, ['../../../../compiler/stage2/doc/html/ghc/ghc.haddock.t']) diff --git a/testsuite/tests/perf/should_run/T9203.hs b/testsuite/tests/perf/should_run/T9203.hs new file mode 100644 index 0000000000..500fd8c98e --- /dev/null +++ b/testsuite/tests/perf/should_run/T9203.hs @@ -0,0 +1,9 @@ +module Main where + +import Data.Typeable + +f :: Typeable a => Int -> a -> TypeRep +f 0 a = typeOf a +f n a = f (n-1) [a] + +main = print (f 50000 () == f 50001 ()) diff --git a/testsuite/tests/perf/should_run/T9203.stdout b/testsuite/tests/perf/should_run/T9203.stdout new file mode 100644 index 0000000000..bc59c12aa1 --- /dev/null +++ b/testsuite/tests/perf/should_run/T9203.stdout @@ -0,0 +1 @@ +False diff --git a/testsuite/tests/perf/should_run/all.T b/testsuite/tests/perf/should_run/all.T index 606448b011..a9d7c0325d 100644 --- a/testsuite/tests/perf/should_run/all.T +++ b/testsuite/tests/perf/should_run/all.T @@ -6,8 +6,9 @@ test('T3586', [stats_num_field('peak_megabytes_allocated', (17, 1)), # expected value: 17 (amd64/Linux) - stats_num_field('bytes allocated', (16835544, 5)), - # expected value: 16835544 (amd64/Linux) + stats_num_field('bytes allocated', (16102024, 5)), + # prev: 16835544 (amd64/Linux) + # 2014-07-17: 16102024 (amd64/Linux), general round of updates only_ways(['normal']) ], compile_and_run, @@ -60,9 +61,10 @@ test('T876', [(wordsize(64), 63216 , 5), # 2013-02-14: 1263712 (x86_64/Linux) # 2014-02-10: 63216 (x86_64/Linux), call arity analysis - (wordsize(32), 56820, 5) ]), + (wordsize(32), 53024, 5) ]), # some date: 663712 (Windows, 64-bit machine) # 2014-04-04: 56820 (Windows, 64-bit machine) + # 2014-06-29: 53024 (x86_64/Linux) only_ways(['normal']), extra_run_opts('10000') ], @@ -89,9 +91,10 @@ test('T3738', # expected value: 1 (amd64/Linux) stats_num_field('bytes allocated', [(wordsize(32), 45648, 5), - # expected value: 45648 (x86/Linux) + # expected value: 50520 (x86/Linux) (wordsize(64), 49400, 5)]), - # expected value: 49400 (amd64/Linux) + # prev: 49400 (amd64/Linux) + # 2014-07-17: 50520 (amd64/Linux) general round of updates only_ways(['normal']) ], compile_and_run, @@ -153,8 +156,9 @@ test('T5205', [stats_num_field('bytes allocated', [(wordsize(32), 47088, 5), # expected value: 47088 (x86/Darwin) - (wordsize(64), 51320, 5)]), + (wordsize(64), 52600, 5)]), # expected value: 51320 (amd64/Linux) + # 2014-07-17: 52600 (amd64/Linux) general round of updates only_ways(['normal', 'optasm']) ], compile_and_run, @@ -252,8 +256,9 @@ test('Conversions', # 2013-02-10: 77472 (x86/OSX) # 2013-02-10: 79276 (x86/Windows) # 2014-01-13: 76768 (x86/Linux) due to #8647 - (wordsize(64), 110632, 5)]), + (wordsize(64), 107544, 5)]), # 2012-12-18: 109608 (amd64/OS X) + # 2014-07-17: 107544 (amd64/Linux) only_ways(['normal']) ], @@ -311,7 +316,7 @@ test('T7850', test('T5949', [stats_num_field('bytes allocated', - [ (wordsize(32), 101000, 10), + [ (wordsize(32), 116020, 10), (wordsize(64), 201008, 10)]), # previously, it was >400000 bytes only_ways(['normal'])], @@ -320,7 +325,8 @@ test('T5949', test('T4267', [stats_num_field('bytes allocated', - [ (wordsize(32), 20992, 10) + [ (wordsize(32), 36012, 10) + # 32-bit value close to 64 bit; c.f. T7619 , (wordsize(64), 40992, 10) ]), # previously, it was >170000 bytes # 2014-01-17: 130000 @@ -331,7 +337,9 @@ test('T4267', test('T7619', [stats_num_field('bytes allocated', - [ (wordsize(32), 20992, 10) + [ (wordsize(32), 36012, 10) + # 32-bit close to 64-bit value; most of this very + # small number is standard start-up boilerplate I think , (wordsize(64), 40992, 10) ]), # previously, it was >400000 bytes only_ways(['normal'])], @@ -348,8 +356,10 @@ test('InlineArrayAlloc', test('InlineByteArrayAlloc', [stats_num_field('bytes allocated', - [ (wordsize(32), 720040960, 5) + [ (wordsize(32), 1360036012, 5) , (wordsize(64), 1440040960, 5) ]), + # 32 and 64 bit not so different, because + # we are allocating *byte* arrays only_ways(['normal'])], compile_and_run, ['-O2']) @@ -361,3 +371,11 @@ test('InlineCloneArrayAlloc', only_ways(['normal'])], compile_and_run, ['-O2']) + +test('T9203', + [stats_num_field('bytes allocated', + [ (wordsize(32), 50000000, 5) + , (wordsize(64), 95747304, 5) ]), + only_ways(['normal'])], + compile_and_run, + ['-O2']) diff --git a/testsuite/tests/polykinds/Makefile b/testsuite/tests/polykinds/Makefile index aa8b482b73..8636bb959f 100644 --- a/testsuite/tests/polykinds/Makefile +++ b/testsuite/tests/polykinds/Makefile @@ -38,3 +38,9 @@ T8449: $(RM) -f T8449.hi T8449.o T8449a.hi T8449a.o '$(TEST_HC)' $(TEST_HC_OPTS) -c T8449a.hs '$(TEST_HC)' $(TEST_HC_OPTS) -c T8449.hs + +T9263: + $(RM) -f T9263.hi T9263.o T9263a.hi T9263a.o T9263b.hi T9263b.o + '$(TEST_HC)' $(TEST_HC_OPTS) -c T9263b.hs + '$(TEST_HC)' $(TEST_HC_OPTS) -c T9263a.hs + '$(TEST_HC)' $(TEST_HC_OPTS) -c T9263.hs diff --git a/testsuite/tests/polykinds/T7230.stderr b/testsuite/tests/polykinds/T7230.stderr index 7e1a7ab88f..0c34249223 100644 --- a/testsuite/tests/polykinds/T7230.stderr +++ b/testsuite/tests/polykinds/T7230.stderr @@ -7,13 +7,13 @@ T7230.hs:48:32: at T7230.hs:47:10-68 or from (xs ~ (x : xs1)) bound by a pattern with constructor - SCons :: forall (x :: k) (xs :: [k]). + SCons :: forall (k :: BOX) (x :: k) (xs :: [k]). Sing x -> Sing xs -> Sing (x : xs), in an equation for ‘crash’ at T7230.hs:48:8-27 or from (xs1 ~ (x1 : xs2)) bound by a pattern with constructor - SCons :: forall (x :: k) (xs :: [k]). + SCons :: forall (k :: BOX) (x :: k) (xs :: [k]). Sing x -> Sing xs -> Sing (x : xs), in an equation for ‘crash’ at T7230.hs:48:17-26 diff --git a/testsuite/tests/polykinds/T7278.stderr b/testsuite/tests/polykinds/T7278.stderr index f24f9b2aad..3d615c12f7 100644 --- a/testsuite/tests/polykinds/T7278.stderr +++ b/testsuite/tests/polykinds/T7278.stderr @@ -2,4 +2,4 @@ T7278.hs:8:43: ‘t’ is applied to too many type arguments In the type signature for ‘f’: - f :: C (t :: k) (TF t) => TF t p1 p0 -> t p1 p0 + f :: (C (t :: k) (TF t)) => TF t p1 p0 -> t p1 p0 diff --git a/testsuite/tests/polykinds/T7438.stderr b/testsuite/tests/polykinds/T7438.stderr index b126621ce1..b84465545f 100644 --- a/testsuite/tests/polykinds/T7438.stderr +++ b/testsuite/tests/polykinds/T7438.stderr @@ -1,19 +1,19 @@ - -T7438.hs:6:14: - Couldn't match expected type ‘t1’ with actual type ‘t’ - ‘t’ is untouchable - inside the constraints (t2 ~ t3) - bound by a pattern with constructor - Nil :: forall (a :: k). Thrist a a, - in an equation for ‘go’ - at T7438.hs:6:4-6 - ‘t’ is a rigid type variable bound by - the inferred type of go :: Thrist t2 t3 -> t -> t1 at T7438.hs:6:1 - ‘t1’ is a rigid type variable bound by - the inferred type of go :: Thrist t2 t3 -> t -> t1 at T7438.hs:6:1 - Possible fix: add a type signature for ‘go’ - Relevant bindings include - acc :: t (bound at T7438.hs:6:8) - go :: Thrist t2 t3 -> t -> t1 (bound at T7438.hs:6:1) - In the expression: acc - In an equation for ‘go’: go Nil acc = acc +
+T7438.hs:6:14:
+ Couldn't match expected type ‘t1’ with actual type ‘t’
+ ‘t’ is untouchable
+ inside the constraints (t2 ~ t3)
+ bound by a pattern with constructor
+ Nil :: forall (k :: BOX) (b :: k). Thrist b b,
+ in an equation for ‘go’
+ at T7438.hs:6:4-6
+ ‘t’ is a rigid type variable bound by
+ the inferred type of go :: Thrist t2 t3 -> t -> t1 at T7438.hs:6:1
+ ‘t1’ is a rigid type variable bound by
+ the inferred type of go :: Thrist t2 t3 -> t -> t1 at T7438.hs:6:1
+ Possible fix: add a type signature for ‘go’
+ Relevant bindings include
+ acc :: t (bound at T7438.hs:6:8)
+ go :: Thrist t2 t3 -> t -> t1 (bound at T7438.hs:6:1)
+ In the expression: acc
+ In an equation for ‘go’: go Nil acc = acc
diff --git a/testsuite/tests/polykinds/T7939a.stderr b/testsuite/tests/polykinds/T7939a.stderr index 09b818a5b5..22388ddca0 100644 --- a/testsuite/tests/polykinds/T7939a.stderr +++ b/testsuite/tests/polykinds/T7939a.stderr @@ -4,4 +4,4 @@ T7939a.hs:7:5: The first argument of ‘F’ should have kind ‘*’, but ‘Maybe’ has kind ‘* -> *’ In the type ‘Maybe’ - In the family declaration for ‘F’ + In the type family declaration for ‘F’ diff --git a/testsuite/tests/polykinds/T8566.stderr b/testsuite/tests/polykinds/T8566.stderr index 4638fd8c4d..ad0d15e69c 100644 --- a/testsuite/tests/polykinds/T8566.stderr +++ b/testsuite/tests/polykinds/T8566.stderr @@ -6,7 +6,8 @@ T8566.hs:31:9: bound by the instance declaration at T8566.hs:29:10-67 or from ('AA t (a : as) ~ 'AA t1 as1) bound by a pattern with constructor - A :: forall (r :: [*]) (t :: k) (as :: [U *]). I ('AA t as) r, + A :: forall (r :: [*]) (k :: BOX) (t :: k) (as :: [U *]). + I ('AA t as) r, in an equation for ‘c’ at T8566.hs:31:5 The type variable ‘fs0’ is ambiguous diff --git a/testsuite/tests/polykinds/T9063.hs b/testsuite/tests/polykinds/T9063.hs new file mode 100644 index 0000000000..007f475c06 --- /dev/null +++ b/testsuite/tests/polykinds/T9063.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE DataKinds, PolyKinds, TypeFamilies, TypeOperators, + UndecidableInstances #-} + +module T9063 where + +import Data.Type.Equality +import Data.Proxy + +class kproxy ~ 'KProxy => PEq (kproxy :: KProxy a) where + type (:==) (x :: a) (y :: a) :: Bool + type x :== y = x == y + +instance PEq ('KProxy :: KProxy Bool) + +foo :: Proxy (True :== True) -> Proxy (True == True) +foo = id diff --git a/testsuite/tests/polykinds/T9106.hs b/testsuite/tests/polykinds/T9106.hs new file mode 100644 index 0000000000..eaf0364235 --- /dev/null +++ b/testsuite/tests/polykinds/T9106.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE MultiParamTypeClasses, DataKinds, FunctionalDependencies, + KindSignatures, PolyKinds, FlexibleInstances, FlexibleContexts, + UndecidableInstances #-} + +module T9106 where + +import GHC.TypeLits + +class FunctorN (n :: Nat) f (a :: *) (fa :: *) | n f a -> fa where + +instance FunctorN 0 f a a where + +instance FunctorN n f a (f fa) + diff --git a/testsuite/tests/polykinds/T9106.stderr b/testsuite/tests/polykinds/T9106.stderr new file mode 100644 index 0000000000..0b239f2ea4 --- /dev/null +++ b/testsuite/tests/polykinds/T9106.stderr @@ -0,0 +1,8 @@ + +T9106.hs:13:10: + Illegal instance declaration for ‘FunctorN n f a (f fa)’ + The liberal coverage condition fails in class ‘FunctorN’ + for functional dependency: ‘n f a -> fa’ + Reason: lhs types ‘n’, ‘f’, ‘a’ + do not jointly determine rhs type ‘f fa’ + In the instance declaration for ‘FunctorN n f a (f fa)’ diff --git a/testsuite/tests/polykinds/T9144.hs b/testsuite/tests/polykinds/T9144.hs new file mode 100644 index 0000000000..0a9ef08afa --- /dev/null +++ b/testsuite/tests/polykinds/T9144.hs @@ -0,0 +1,34 @@ +{-# LANGUAGE PolyKinds, DataKinds, TypeFamilies, GADTs, RankNTypes #-} + +module T9144 where + +import Data.Proxy +import GHC.TypeLits + +data family Sing (a :: k) + +data SomeSing :: KProxy k -> * where + SomeSing :: forall (a :: k). Sing a -> SomeSing ('KProxy :: KProxy k) + +class kproxy ~ 'KProxy => SingKind (kproxy :: KProxy k) where + fromSing :: forall (a :: k). Sing a -> DemoteRep ('KProxy :: KProxy k) + toSing :: DemoteRep ('KProxy :: KProxy k) -> SomeSing ('KProxy :: KProxy k) + +type family DemoteRep (kproxy :: KProxy k) :: * + +data Foo = Bar Nat +data FooTerm = BarTerm Integer + +data instance Sing (x :: Foo) where + SBar :: Sing n -> Sing (Bar n) + +type instance DemoteRep ('KProxy :: KProxy Nat) = Integer +type instance DemoteRep ('KProxy :: KProxy Foo) = FooTerm + +instance SingKind ('KProxy :: KProxy Nat) where + fromSing = undefined + toSing = undefined + +instance SingKind ('KProxy :: KProxy Foo) where + fromSing (SBar n) = BarTerm (fromSing n) + toSing n = case toSing n of SomeSing n' -> SomeSing (SBar n') diff --git a/testsuite/tests/polykinds/T9144.stderr b/testsuite/tests/polykinds/T9144.stderr new file mode 100644 index 0000000000..f2c65530ee --- /dev/null +++ b/testsuite/tests/polykinds/T9144.stderr @@ -0,0 +1,7 @@ +
+T9144.hs:34:26:
+ Couldn't match type ‘Integer’ with ‘FooTerm’
+ Expected type: DemoteRep 'KProxy
+ Actual type: DemoteRep 'KProxy
+ In the first argument of ‘toSing’, namely ‘n’
+ In the expression: toSing n
diff --git a/testsuite/tests/polykinds/T9222.hs b/testsuite/tests/polykinds/T9222.hs new file mode 100644 index 0000000000..df112519ac --- /dev/null +++ b/testsuite/tests/polykinds/T9222.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE RankNTypes, GADTs, DataKinds, PolyKinds, TypeOperators, TypeFamilies #-} +module T9222 where + +import Data.Proxy + +data Want :: (i,j) -> * where + Want :: (a ~ '(b,c) => Proxy b) -> Want a diff --git a/testsuite/tests/polykinds/T9263.hs b/testsuite/tests/polykinds/T9263.hs new file mode 100644 index 0000000000..e913e1f653 --- /dev/null +++ b/testsuite/tests/polykinds/T9263.hs @@ -0,0 +1,2 @@ +module T9263 where + import T9263a diff --git a/testsuite/tests/polykinds/T9263a.hs b/testsuite/tests/polykinds/T9263a.hs new file mode 100644 index 0000000000..1cecabad38 --- /dev/null +++ b/testsuite/tests/polykinds/T9263a.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE DataKinds, KindSignatures, TypeFamilies #-} +module T9263a where + +import T9263b +import Data.Proxy + +data Void + +instance PEq ('KProxy :: KProxy Void) diff --git a/testsuite/tests/polykinds/T9263b.hs b/testsuite/tests/polykinds/T9263b.hs new file mode 100644 index 0000000000..d267eaca79 --- /dev/null +++ b/testsuite/tests/polykinds/T9263b.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DataKinds, PolyKinds, TypeFamilies #-} +module T9263b where + +import Data.Proxy + +class kproxy ~ 'KProxy => PEq (kproxy :: KProxy a) where + type F (x :: a) :: Bool + type F (x :: a) = False diff --git a/testsuite/tests/polykinds/T9264.hs b/testsuite/tests/polykinds/T9264.hs new file mode 100644 index 0000000000..df75599e56 --- /dev/null +++ b/testsuite/tests/polykinds/T9264.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE PolyKinds, TypeFamilies, ScopedTypeVariables #-} +module T9264 where + +class C (a :: k) where + type F (a :: k) + type F (a :: k) = Int diff --git a/testsuite/tests/polykinds/all.T b/testsuite/tests/polykinds/all.T index 3634d83537..22a159d50e 100644 --- a/testsuite/tests/polykinds/all.T +++ b/testsuite/tests/polykinds/all.T @@ -100,3 +100,9 @@ test('T8566a', expect_broken(8566), compile,['']) test('T7481', normal, compile_fail,['']) test('T8705', normal, compile, ['']) test('T8985', normal, compile, ['']) +test('T9106', normal, compile_fail, ['']) +test('T9144', normal, compile_fail, ['']) +test('T9222', normal, compile, ['']) +test('T9264', normal, compile, ['']) +test('T9263', normal, run_command, ['$MAKE -s --no-print-directory T9263']) +test('T9063', normal, compile, ['']) diff --git a/testsuite/tests/profiling/should_run/all.T b/testsuite/tests/profiling/should_run/all.T index 493c846bc7..ac70b9f643 100644 --- a/testsuite/tests/profiling/should_run/all.T +++ b/testsuite/tests/profiling/should_run/all.T @@ -25,7 +25,7 @@ test('T3001-2', test('scc001', [req_profiling, extra_ways(['prof']), only_ways(prof_ways)], compile_and_run, - ['-fno-state-hack']) # Note [consistent stacks] + ['-fno-state-hack -fno-full-laziness']) # Note [consistent stacks] test('scc002', [req_profiling, extra_ways(['prof']), only_ways(prof_ways)], diff --git a/testsuite/tests/profiling/should_run/ioprof.prof.sample b/testsuite/tests/profiling/should_run/ioprof.prof.sample index 0cdfa82f48..07257e2dfe 100644 --- a/testsuite/tests/profiling/should_run/ioprof.prof.sample +++ b/testsuite/tests/profiling/should_run/ioprof.prof.sample @@ -1,39 +1,37 @@ - Mon Nov 14 13:28 2011 Time and Allocation Profiling Report (Final) + Mon Apr 28 15:29 2014 Time and Allocation Profiling Report (Final) ioprof +RTS -hc -p -RTS - total time = 0.00 secs (0 ticks @ 20 ms) - total alloc = 53,288 bytes (excludes profiling overheads) + total time = 0.00 secs (0 ticks @ 1000 us, 1 processor) + total alloc = 52,208 bytes (excludes profiling overheads) COST CENTRE MODULE %time %alloc -main Main 0.0 16.4 -errorM.\ Main 0.0 8.3 -CAF GHC.IO.Handle.FD 0.0 65.5 +MAIN MAIN 0.0 1.4 +CAF GHC.IO.Encoding 0.0 6.3 CAF GHC.Conc.Signal 0.0 1.3 -CAF GHC.IO.Encoding 0.0 5.9 +CAF GHC.IO.Handle.FD 0.0 66.2 +main Main 0.0 16.7 +errorM.\ Main 0.0 7.0 - individual inherited -COST CENTRE MODULE no. entries %time %alloc %time %alloc + individual inherited +COST CENTRE MODULE no. entries %time %alloc %time %alloc -MAIN MAIN 45 0 0.0 0.7 0.0 100.0 - CAF GHC.IO.Encoding.Iconv 76 0 0.0 0.5 0.0 0.5 - CAF GHC.Conc.Sync 74 0 0.0 0.5 0.0 0.5 - CAF GHC.IO.Encoding 65 0 0.0 5.9 0.0 5.9 - CAF GHC.Conc.Signal 62 0 0.0 1.3 0.0 1.3 - CAF GHC.IO.Handle.FD 56 0 0.0 65.5 0.0 65.5 - CAF GHC.Exception 55 0 0.0 0.2 0.0 0.2 - CAF Main 51 0 0.0 0.6 0.0 25.6 - main Main 90 1 0.0 16.4 0.0 24.9 - runM Main 93 1 0.0 0.0 0.0 8.3 - bar Main 94 0 0.0 0.0 0.0 8.3 - foo Main 99 0 0.0 0.0 0.0 8.3 - errorM Main 100 0 0.0 0.0 0.0 8.3 - errorM.\ Main 101 1 0.0 8.3 0.0 8.3 - >>= Main 95 0 0.0 0.0 0.0 0.0 - >>=.\ Main 96 1 0.0 0.0 0.0 0.0 - bar Main 91 1 0.0 0.2 0.0 0.2 - foo Main 97 1 0.0 0.0 0.0 0.0 - errorM Main 98 1 0.0 0.0 0.0 0.0 - >>= Main 92 1 0.0 0.0 0.0 0.0 +MAIN MAIN 44 0 0.0 1.4 0.0 100.0 + main Main 89 0 0.0 16.5 0.0 16.5 + CAF Main 87 0 0.0 0.0 0.0 7.4 + main Main 88 1 0.0 0.2 0.0 7.4 + runM Main 90 1 0.0 0.2 0.0 7.2 + bar Main 91 1 0.0 0.0 0.0 7.1 + errorM Main 93 1 0.0 0.0 0.0 0.0 + >>= Main 92 1 0.0 0.0 0.0 7.0 + >>=.\ Main 94 1 0.0 0.0 0.0 7.0 + foo Main 95 1 0.0 0.0 0.0 7.0 + errorM Main 96 0 0.0 0.0 0.0 7.0 + errorM.\ Main 97 1 0.0 7.0 0.0 7.0 + CAF GHC.IO.Handle.FD 84 0 0.0 66.2 0.0 66.2 + CAF GHC.Conc.Signal 82 0 0.0 1.3 0.0 1.3 + CAF GHC.Conc.Sync 81 0 0.0 0.4 0.0 0.4 + CAF GHC.IO.Encoding 74 0 0.0 6.3 0.0 6.3 + CAF GHC.IO.Encoding.Iconv 56 0 0.0 0.4 0.0 0.4 diff --git a/testsuite/tests/rename/should_compile/T9127.hs b/testsuite/tests/rename/should_compile/T9127.hs new file mode 100644 index 0000000000..c8e827f888 --- /dev/null +++ b/testsuite/tests/rename/should_compile/T9127.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE BangPatterns #-} +module T9127 where + +f = let !_ = 2 * 2 + in 2*2 diff --git a/testsuite/tests/rename/should_compile/T9127.stderr b/testsuite/tests/rename/should_compile/T9127.stderr new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/testsuite/tests/rename/should_compile/T9127.stderr diff --git a/testsuite/tests/rename/should_compile/all.T b/testsuite/tests/rename/should_compile/all.T index 0ce4ca125d..4ed92bd328 100644 --- a/testsuite/tests/rename/should_compile/all.T +++ b/testsuite/tests/rename/should_compile/all.T @@ -214,3 +214,4 @@ test('T7969', 'T7969.imports'])], run_command, ['$MAKE -s --no-print-directory T7969']) +test('T9127', normal, compile, ['']) diff --git a/testsuite/tests/rename/should_fail/T9177.hs b/testsuite/tests/rename/should_fail/T9177.hs new file mode 100644 index 0000000000..9fbb9407be --- /dev/null +++ b/testsuite/tests/rename/should_fail/T9177.hs @@ -0,0 +1,17 @@ +module T9177 where + +-- the main use case +type Foo = (int) + +-- other interesting cases +type Foo2 = (integerr) + +foo3 = bar +foo4 = Fun + +-- this warning is suboptimal (fun would be illegal here) +foo5 Fun = () + +-- No errors here: +data Bar = Bar +fun x = x diff --git a/testsuite/tests/rename/should_fail/T9177.stderr b/testsuite/tests/rename/should_fail/T9177.stderr new file mode 100644 index 0000000000..624034053f --- /dev/null +++ b/testsuite/tests/rename/should_fail/T9177.stderr @@ -0,0 +1,20 @@ + +T9177.hs:4:13: + Not in scope: type variable ‘int’ + Perhaps you meant type constructor or class ‘Int’ (imported from Prelude) + +T9177.hs:7:14: + Not in scope: type variable ‘integerr’ + Perhaps you meant type constructor or class ‘Integer’ (imported from Prelude) + +T9177.hs:9:8: + Not in scope: ‘bar’ + Perhaps you meant data constructor ‘Bar’ (line 16) + +T9177.hs:10:8: + Not in scope: data constructor ‘Fun’ + Perhaps you meant variable ‘fun’ (line 17) + +T9177.hs:13:6: + Not in scope: data constructor ‘Fun’ + Perhaps you meant variable ‘fun’ (line 17) diff --git a/testsuite/tests/rename/should_fail/all.T b/testsuite/tests/rename/should_fail/all.T index f4c3570d3d..0f60ff6175 100644 --- a/testsuite/tests/rename/should_fail/all.T +++ b/testsuite/tests/rename/should_fail/all.T @@ -114,3 +114,4 @@ test('T8448', normal, compile_fail, ['']) test('T9006', extra_clean(['T9006a.hi', 'T9006a.o']), multimod_compile_fail, ['T9006', '-v0']) +test('T9177', normal, compile_fail, ['']) diff --git a/testsuite/tests/rename/should_fail/rnfail055.stderr b/testsuite/tests/rename/should_fail/rnfail055.stderr index 99ed2d6f12..c7b51a1d1f 100644 --- a/testsuite/tests/rename/should_fail/rnfail055.stderr +++ b/testsuite/tests/rename/should_fail/rnfail055.stderr @@ -20,8 +20,8 @@ RnFail055.hs-boot:6:1: RnFail055.hs-boot:8:1: Type constructor ‘S2’ has conflicting definitions in the module and its hs-boot file - Main module: type S2 a b = forall a. (a, b) - Boot file: type S2 a b = forall b. (a, b) + Main module: type S2 a b = forall a1. (a1, b) + Boot file: type S2 a b = forall b1. (a, b1) RnFail055.hs-boot:12:1: Type constructor ‘T1’ has conflicting definitions in the module @@ -33,9 +33,11 @@ RnFail055.hs-boot:14:1: Type constructor ‘T2’ has conflicting definitions in the module and its hs-boot file Main module: type role T2 representational nominal - data Eq b => T2 a b = T2 a + data Eq b => T2 a b + = T2 a Boot file: type role T2 nominal representational - data Eq a => T2 a b = T2 a + data Eq a => T2 a b + = T2 a RnFail055.hs-boot:16:11: T3 is exported by the hs-boot file, but not exported by the module @@ -60,7 +62,7 @@ RnFail055.hs-boot:25:1: and its hs-boot file Main module: type role T7 phantom data T7 a where - T7 :: a -> T7 a + T7 :: a1 -> T7 a Boot file: data T7 a = T7 a RnFail055.hs-boot:27:22: diff --git a/testsuite/tests/roles/should_compile/Roles1.stderr b/testsuite/tests/roles/should_compile/Roles1.stderr index cd027f13f2..96d5603bbf 100644 --- a/testsuite/tests/roles/should_compile/Roles1.stderr +++ b/testsuite/tests/roles/should_compile/Roles1.stderr @@ -1,54 +1,20 @@ TYPE SIGNATURES TYPE CONSTRUCTORS - T1 :: * -> * - data T1 a - No C type associated - Roles: [nominal] - RecFlag NonRecursive, Promotable - = K1 :: forall a. a -> T1 a Stricts: _ - FamilyInstance: none - T2 :: * -> * - data T2 a - No C type associated - Roles: [representational] - RecFlag NonRecursive, Promotable - = K2 :: forall a. a -> T2 a Stricts: _ - FamilyInstance: none - T3 :: k -> * - data T3 (k::BOX) (a::k) - No C type associated - Roles: [nominal, phantom] - RecFlag NonRecursive, Not promotable - = K3 :: forall (k::BOX) (a::k). T3 k a - FamilyInstance: none - T4 :: (* -> *) -> * -> * - data T4 (a::* -> *) b - No C type associated - Roles: [nominal, nominal] - RecFlag NonRecursive, Not promotable - = K4 :: forall (a::* -> *) b. (a b) -> T4 a b Stricts: _ - FamilyInstance: none - T5 :: * -> * - data T5 a - No C type associated - Roles: [representational] - RecFlag NonRecursive, Promotable - = K5 :: forall a. a -> T5 a Stricts: _ - FamilyInstance: none - T6 :: k -> * - data T6 (k::BOX) (a::k) - No C type associated - Roles: [nominal, phantom] - RecFlag NonRecursive, Not promotable - = K6 :: forall (k::BOX) (a::k). T6 k a - FamilyInstance: none - T7 :: k -> * -> * - data T7 (k::BOX) (a::k) b - No C type associated - Roles: [nominal, phantom, representational] - RecFlag NonRecursive, Not promotable - = K7 :: forall (k::BOX) (a::k) b. b -> T7 k a b Stricts: _ - FamilyInstance: none + type role T1 nominal + data T1 a = K1 a + Promotable + data T2 a = K2 a + Promotable + type role T3 phantom + data T3 (a :: k) = K3 + type role T4 nominal nominal + data T4 (a :: * -> *) b = K4 (a b) + data T5 a = K5 a + Promotable + type role T6 phantom + data T6 (a :: k) = K6 + type role T7 phantom representational + data T7 (a :: k) b = K7 b COERCION AXIOMS Dependent modules: [] Dependent packages: [base, ghc-prim, integer-gmp] diff --git a/testsuite/tests/roles/should_compile/Roles13.stderr b/testsuite/tests/roles/should_compile/Roles13.stderr index 647e59ba51..b0dda24f2c 100644 --- a/testsuite/tests/roles/should_compile/Roles13.stderr +++ b/testsuite/tests/roles/should_compile/Roles13.stderr @@ -13,8 +13,7 @@ Roles13.convert = `cast` (<Roles13.Wrap Roles13.Age>_R -> Roles13.NTCo:Wrap[0] Roles13.NTCo:Age[0] :: (Roles13.Wrap Roles13.Age -> Roles13.Wrap Roles13.Age) - ~# - (Roles13.Wrap Roles13.Age -> GHC.Types.Int)) + ~R# (Roles13.Wrap Roles13.Age -> GHC.Types.Int)) diff --git a/testsuite/tests/roles/should_compile/Roles14.stderr b/testsuite/tests/roles/should_compile/Roles14.stderr index 13231931e3..e0f26a14d3 100644 --- a/testsuite/tests/roles/should_compile/Roles14.stderr +++ b/testsuite/tests/roles/should_compile/Roles14.stderr @@ -1,9 +1,7 @@ TYPE SIGNATURES TYPE CONSTRUCTORS - C2 :: * -> Constraint - class C2 a - Roles: [representational] - RecFlag NonRecursive + type role C2 representational + class C2 a where meth2 :: a -> a COERCION AXIOMS axiom Roles12.NTCo:C2 :: C2 a = a -> a diff --git a/testsuite/tests/roles/should_compile/Roles2.stderr b/testsuite/tests/roles/should_compile/Roles2.stderr index f5bcbe6829..2c7ab6c66f 100644 --- a/testsuite/tests/roles/should_compile/Roles2.stderr +++ b/testsuite/tests/roles/should_compile/Roles2.stderr @@ -1,19 +1,8 @@ TYPE SIGNATURES TYPE CONSTRUCTORS - T1 :: * -> * - data T1 a - No C type associated - Roles: [representational] - RecFlag NonRecursive, Not promotable - = K1 :: forall a. (IO a) -> T1 a Stricts: _ - FamilyInstance: none - T2 :: * -> * - data T2 a - No C type associated - Roles: [representational] - RecFlag NonRecursive, Not promotable - = K2 :: forall a. (FunPtr a) -> T2 a Stricts: _ - FamilyInstance: none + data T1 a = K1 (IO a) + type role T2 phantom + data T2 a = K2 (FunPtr a) COERCION AXIOMS Dependent modules: [] Dependent packages: [base, ghc-prim, integer-gmp] diff --git a/testsuite/tests/roles/should_compile/Roles3.stderr b/testsuite/tests/roles/should_compile/Roles3.stderr index 62eb2a9474..270afca9cd 100644 --- a/testsuite/tests/roles/should_compile/Roles3.stderr +++ b/testsuite/tests/roles/should_compile/Roles3.stderr @@ -1,31 +1,16 @@ TYPE SIGNATURES TYPE CONSTRUCTORS - C1 :: * -> Constraint - class C1 a - Roles: [nominal] - RecFlag NonRecursive + class C1 a where meth1 :: a -> a - C2 :: * -> * -> Constraint - class C2 a b - Roles: [nominal, nominal] - RecFlag NonRecursive - meth2 :: (~) * a b -> a -> b - C3 :: * -> * -> Constraint - class C3 a b - Roles: [nominal, nominal] - RecFlag NonRecursive - type family F3 b :: * (open) + class C2 a b where + meth2 :: a ~ b => a -> b + class C3 a b where + type family F3 b :: * open meth3 :: a -> F3 b -> F3 b - C4 :: * -> * -> Constraint - class C4 a b - Roles: [nominal, nominal] - RecFlag NonRecursive + class C4 a b where meth4 :: a -> F4 b -> F4 b - F4 :: * -> * - type family F4 a :: * (open) - Syn1 :: * -> * + type family F4 a :: * open type Syn1 a = F4 a - Syn2 :: * -> * type Syn2 a = [a] COERCION AXIOMS axiom Roles3.NTCo:C1 :: C1 a = a -> a diff --git a/testsuite/tests/roles/should_compile/Roles4.stderr b/testsuite/tests/roles/should_compile/Roles4.stderr index 32862ea073..f2b590fadd 100644 --- a/testsuite/tests/roles/should_compile/Roles4.stderr +++ b/testsuite/tests/roles/should_compile/Roles4.stderr @@ -1,16 +1,9 @@ TYPE SIGNATURES TYPE CONSTRUCTORS - C1 :: * -> Constraint - class C1 a - Roles: [nominal] - RecFlag NonRecursive + class C1 a where meth1 :: a -> a - C3 :: * -> Constraint - class C3 a - Roles: [nominal] - RecFlag NonRecursive + class C3 a where meth3 :: a -> Syn1 a - Syn1 :: * -> * type Syn1 a = [a] COERCION AXIOMS axiom Roles4.NTCo:C1 :: C1 a = a -> a diff --git a/testsuite/tests/roles/should_compile/T8958.stderr b/testsuite/tests/roles/should_compile/T8958.stderr index 919530bb03..d400b9190c 100644 --- a/testsuite/tests/roles/should_compile/T8958.stderr +++ b/testsuite/tests/roles/should_compile/T8958.stderr @@ -1,49 +1,40 @@ -
-T8958.hs:1:31: Warning:
- -XDatatypeContexts is deprecated: It was widely considered a misfeature, and has been removed from the Haskell language.
-TYPE SIGNATURES
-TYPE CONSTRUCTORS
- Map :: * -> * -> *
- newtype (Nominal k, Representational v) => Map k v
- No C type associated
- Roles: [nominal, representational]
- RecFlag NonRecursive, Promotable
- = MkMap :: [(k, v)] -> Map k v Stricts: _
- FamilyInstance: none
- Nominal :: * -> Constraint
- class Nominal a
- Roles: [nominal]
- RecFlag NonRecursive
- Representational :: * -> Constraint
- class Representational a
- Roles: [representational]
- RecFlag NonRecursive
-COERCION AXIOMS
- axiom T8958.NTCo:Map :: Map k v = [(k, v)]
-INSTANCES
- instance [incoherent] Representational a
- -- Defined at T8958.hs:10:10
- instance [incoherent] Nominal a -- Defined at T8958.hs:7:10
-Dependent modules: []
-Dependent packages: [base, ghc-prim, integer-gmp]
-
-==================== Typechecker ====================
-AbsBinds [a] []
- {Exports: [T8958.$fRepresentationala <= $dRepresentational
- <>]
- Exported types: T8958.$fRepresentationala [InlPrag=[ALWAYS] CONLIKE]
- :: forall a. Representational a
- [LclIdX[DFunId],
- Str=DmdType,
- Unf=DFun: \ (@ a) -> T8958.D:Representational TYPE a]
- Binds: $dRepresentational = T8958.D:Representational}
-AbsBinds [a] []
- {Exports: [T8958.$fNominala <= $dNominal
- <>]
- Exported types: T8958.$fNominala [InlPrag=[ALWAYS] CONLIKE]
- :: forall a. Nominal a
- [LclIdX[DFunId],
- Str=DmdType,
- Unf=DFun: \ (@ a) -> T8958.D:Nominal TYPE a]
- Binds: $dNominal = T8958.D:Nominal}
-
+ +T8958.hs:1:31: Warning: + -XDatatypeContexts is deprecated: It was widely considered a misfeature, and has been removed from the Haskell language. +TYPE SIGNATURES +TYPE CONSTRUCTORS + type role Map nominal representational + newtype (Nominal k, Representational v) => Map k v = MkMap [(k, v)] + Promotable + class Nominal a + type role Representational representational + class Representational a +COERCION AXIOMS + axiom T8958.NTCo:Map :: Map k v = [(k, v)] +INSTANCES + instance [incoherent] Representational a + -- Defined at T8958.hs:10:10 + instance [incoherent] Nominal a -- Defined at T8958.hs:7:10 +Dependent modules: [] +Dependent packages: [base, ghc-prim, integer-gmp] + +==================== Typechecker ==================== +AbsBinds [a] [] + {Exports: [T8958.$fRepresentationala <= $dRepresentational + <>] + Exported types: T8958.$fRepresentationala [InlPrag=[ALWAYS] CONLIKE] + :: forall a. Representational a + [LclIdX[DFunId], + Str=DmdType, + Unf=DFun: \ (@ a) -> T8958.D:Representational TYPE a] + Binds: $dRepresentational = T8958.D:Representational} +AbsBinds [a] [] + {Exports: [T8958.$fNominala <= $dNominal + <>] + Exported types: T8958.$fNominala [InlPrag=[ALWAYS] CONLIKE] + :: forall a. Nominal a + [LclIdX[DFunId], + Str=DmdType, + Unf=DFun: \ (@ a) -> T8958.D:Nominal TYPE a] + Binds: $dNominal = T8958.D:Nominal} + diff --git a/testsuite/tests/roles/should_fail/Roles12.stderr b/testsuite/tests/roles/should_fail/Roles12.stderr index bb830beae3..9b0f2cfdb5 100644 --- a/testsuite/tests/roles/should_fail/Roles12.stderr +++ b/testsuite/tests/roles/should_fail/Roles12.stderr @@ -4,4 +4,4 @@ Roles12.hs:5:1: and its hs-boot file Main module: type role T phantom data T a - Boot file: data T a + Boot file: abstract T a diff --git a/testsuite/tests/rts/T9045.hs b/testsuite/tests/rts/T9045.hs new file mode 100644 index 0000000000..1e581efa35 --- /dev/null +++ b/testsuite/tests/rts/T9045.hs @@ -0,0 +1,22 @@ +-- This is nofib/smp/threads006. It fails in GHC 7.8.2 with a GC crash. + +{-# OPTIONS_GHC -O2 #-} +import System.IO +import System.Environment +import System.CPUTime +import Text.Printf +import Control.Monad +import Control.Concurrent +import Control.Concurrent.MVar +import Control.Exception + +main :: IO () +main = do + hSetBuffering stdout NoBuffering + [nthreads] <- fmap (map read) getArgs + tids <- replicateM nthreads . mask $ \_ -> forkIO $ return () + m <- newEmptyMVar + -- do it in a subthread to avoid bound-thread overhead + forkIO $ do mapM_ killThread tids; putMVar m () + takeMVar m + return () diff --git a/testsuite/tests/rts/T9078.hs b/testsuite/tests/rts/T9078.hs new file mode 100644 index 0000000000..d0389f1330 --- /dev/null +++ b/testsuite/tests/rts/T9078.hs @@ -0,0 +1,10 @@ +module Main where + +import Control.Monad +import System.Mem.StableName + +main :: IO () +main = replicateM_ 500000 (makeStableName foo) + +foo :: Int +foo = 1 diff --git a/testsuite/tests/rts/T9078.stderr b/testsuite/tests/rts/T9078.stderr new file mode 100644 index 0000000000..901a1ca49c --- /dev/null +++ b/testsuite/tests/rts/T9078.stderr @@ -0,0 +1,2 @@ +cap 0: initialised +cap 0: shutting down diff --git a/testsuite/tests/rts/all.T b/testsuite/tests/rts/all.T index 9239f44a21..d7c74c5847 100644 --- a/testsuite/tests/rts/all.T +++ b/testsuite/tests/rts/all.T @@ -222,3 +222,16 @@ test('T8124', [ only_ways(threaded_ways), omit_ways(['ghci']), # T8124_stub.h before compiling T8124_c.c, which # needs it. compile_and_run, ['T8124_c.c -no-hs-main']) + +# +RTS -A8k makes it fail faster +# The ghci way gets confused by the RTS options +test('T9045', [ omit_ways(['ghci']), extra_run_opts('10000 +RTS -A8k -RTS') ], compile_and_run, ['']) + +# I couldn't reproduce 9078 with the -threaded runtime, but could easily +# with the non-threaded one. +test('T9078', [ omit_ways(threaded_ways) ], compile_and_run, ['-with-rtsopts="-DS" -debug']) + +# 251 = RTS exit code for "out of memory" +test('overflow1', [ exit_code(251) ], compile_and_run, ['']) +test('overflow2', [ exit_code(251) ], compile_and_run, ['']) +test('overflow3', [ exit_code(251) ], compile_and_run, ['']) diff --git a/testsuite/tests/rts/exec_signals_prepare.c b/testsuite/tests/rts/exec_signals_prepare.c index 26f30acc57..2b01dd5d1c 100644 --- a/testsuite/tests/rts/exec_signals_prepare.c +++ b/testsuite/tests/rts/exec_signals_prepare.c @@ -2,6 +2,7 @@ #include <stdio.h> #include <errno.h> #include <string.h> +#include <unistd.h> // Invokes a process, making sure that the state of the signal // handlers has all been set back to the unix default. diff --git a/testsuite/tests/rts/overflow1.hs b/testsuite/tests/rts/overflow1.hs new file mode 100644 index 0000000000..63ed5a4e02 --- /dev/null +++ b/testsuite/tests/rts/overflow1.hs @@ -0,0 +1,11 @@ +module Main where + +import Data.Array.IO +import Data.Word + +-- Try to overflow BLOCK_ROUND_UP in the computation of req_blocks in allocate() +-- Here we invoke allocate() via newByteArray# and the array package. +-- Request a number of bytes close to HS_WORD_MAX, +-- subtracting a few words for overhead in newByteArray#. +-- Allocate Word32s (rather than Word8s) to get around bounds-checking in array. +main = newArray (0,maxBound `div` 4 - 10) 0 :: IO (IOUArray Word Word32) diff --git a/testsuite/tests/rts/overflow1.stderr b/testsuite/tests/rts/overflow1.stderr new file mode 100644 index 0000000000..734ca954ca --- /dev/null +++ b/testsuite/tests/rts/overflow1.stderr @@ -0,0 +1 @@ +overflow1: out of memory diff --git a/testsuite/tests/rts/overflow2.hs b/testsuite/tests/rts/overflow2.hs new file mode 100644 index 0000000000..ac72158f45 --- /dev/null +++ b/testsuite/tests/rts/overflow2.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE ForeignFunctionInterface #-} +module Main where + +import Foreign + +-- Test allocate(), the easy way. +data Cap = Cap +foreign import ccall "rts_unsafeGetMyCapability" myCapability :: IO (Ptr Cap) +foreign import ccall "allocate" allocate :: Ptr Cap -> Word -> IO (Ptr ()) + +-- Number of words n such that n * sizeof(W_) exactly overflows a word +-- (2^30 on a 32-bit system, 2^61 on a 64-bit system) +overflowWordCount :: Word +overflowWordCount = fromInteger $ + (fromIntegral (maxBound :: Word) + 1) `div` + fromIntegral (sizeOf (undefined :: Word)) + +main = do + cap <- myCapability + allocate cap (overflowWordCount - 1) diff --git a/testsuite/tests/rts/overflow2.stderr b/testsuite/tests/rts/overflow2.stderr new file mode 100644 index 0000000000..be65509ea9 --- /dev/null +++ b/testsuite/tests/rts/overflow2.stderr @@ -0,0 +1 @@ +overflow2: out of memory diff --git a/testsuite/tests/rts/overflow3.hs b/testsuite/tests/rts/overflow3.hs new file mode 100644 index 0000000000..31dfd5db53 --- /dev/null +++ b/testsuite/tests/rts/overflow3.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE ForeignFunctionInterface #-} +module Main where + +import Foreign + +-- Test allocate(), the easy way. +data Cap = Cap +foreign import ccall "rts_unsafeGetMyCapability" myCapability :: IO (Ptr Cap) +foreign import ccall "allocate" allocate :: Ptr Cap -> Word -> IO (Ptr ()) + +-- Number of words n such that n * sizeof(W_) exactly overflows a word +-- (2^30 on a 32-bit system, 2^61 on a 64-bit system) +overflowWordCount :: Word +overflowWordCount = fromInteger $ + (fromIntegral (maxBound :: Word) + 1) `div` + fromIntegral (sizeOf (undefined :: Word)) + +main = do + cap <- myCapability + allocate cap (overflowWordCount + 1) diff --git a/testsuite/tests/rts/overflow3.stderr b/testsuite/tests/rts/overflow3.stderr new file mode 100644 index 0000000000..6c804e5048 --- /dev/null +++ b/testsuite/tests/rts/overflow3.stderr @@ -0,0 +1 @@ +overflow3: out of memory diff --git a/testsuite/tests/simplCore/should_compile/T4201.stdout b/testsuite/tests/simplCore/should_compile/T4201.stdout index ed519ed02f..6ff4692854 100644 --- a/testsuite/tests/simplCore/should_compile/T4201.stdout +++ b/testsuite/tests/simplCore/should_compile/T4201.stdout @@ -1,3 +1,3 @@ - {- Arity: 1, HasNoCafRefs, Strictness: <S,1*U()>m, - Unfolding: InlineRule (0, True, True) - Eta.bof `cast` (Sym (Eta.NTCo:Foo[0]) ->_R <Eta.T>_R) -} + {- Arity: 1, HasNoCafRefs, Strictness: <S,1*U()>m, + Unfolding: InlineRule (0, True, True) + Eta.bof `cast` (Sym (Eta.NTCo:Foo[0]) ->_R <Eta.T>_R) -} diff --git a/testsuite/tests/simplCore/should_compile/T4918.stdout b/testsuite/tests/simplCore/should_compile/T4918.stdout index c79b116f03..708be353c4 100644 --- a/testsuite/tests/simplCore/should_compile/T4918.stdout +++ b/testsuite/tests/simplCore/should_compile/T4918.stdout @@ -1,2 +1,2 @@ - {- HasNoCafRefs, Strictness: m, Unfolding: (GHC.Types.C# 'p') -} - {- HasNoCafRefs, Strictness: m, Unfolding: (GHC.Types.C# 'q') -} + {- HasNoCafRefs, Strictness: m, Unfolding: (GHC.Types.C# 'p') -} + {- HasNoCafRefs, Strictness: m, Unfolding: (GHC.Types.C# 'q') -} diff --git a/testsuite/tests/simplCore/should_compile/all.T b/testsuite/tests/simplCore/should_compile/all.T index 616b6cc359..1ebc742f0f 100644 --- a/testsuite/tests/simplCore/should_compile/all.T +++ b/testsuite/tests/simplCore/should_compile/all.T @@ -198,7 +198,7 @@ test('T5996', ['$MAKE -s --no-print-directory T5996']) test('T8537', normal, compile, ['']) test('T8832', - extra_clean(['T8832.hi', 'T8832a.o']), + [when(wordsize(32), expect_fail), extra_clean(['T8832.hi', 'T8832a.o'])], run_command, ['$MAKE -s --no-print-directory T8832']) test('T8848', only_ways(['optasm']), compile, ['-ddump-rule-firings']) diff --git a/testsuite/tests/simplCore/should_compile/spec001.hs b/testsuite/tests/simplCore/should_compile/spec001.hs index f4b4dd0365..5a6fb039f4 100644 --- a/testsuite/tests/simplCore/should_compile/spec001.hs +++ b/testsuite/tests/simplCore/should_compile/spec001.hs @@ -1,6 +1,5 @@ {-# LANGUAGE CPP, UnboxedTuples, MagicHash, StandaloneDeriving, DeriveDataTypeable #-} {-# OPTIONS_GHC -O #-} -{-# OPTIONS_GHC -fno-warn-amp #-} -- In GHC 6.4, compiling this module gave a Core Lint failure following the -- specialier, because a function was floated out that had a RULE that diff --git a/testsuite/tests/simplCore/should_run/T9128.hs b/testsuite/tests/simplCore/should_run/T9128.hs new file mode 100644 index 0000000000..73aa39b31b --- /dev/null +++ b/testsuite/tests/simplCore/should_run/T9128.hs @@ -0,0 +1,12 @@ +module Main where + +newtype T a = MkT a + +-- Trac #9128: we treated x as absent!!!! + +f x = let {-# NOINLINE h #-} + h = case x of MkT g -> g + in + h (h (h (h (h (h True))))) + +main = print (f (MkT id)) diff --git a/testsuite/tests/simplCore/should_run/T9128.stdout b/testsuite/tests/simplCore/should_run/T9128.stdout new file mode 100644 index 0000000000..0ca95142bb --- /dev/null +++ b/testsuite/tests/simplCore/should_run/T9128.stdout @@ -0,0 +1 @@ +True diff --git a/testsuite/tests/simplCore/should_run/all.T b/testsuite/tests/simplCore/should_run/all.T index 530e4e58f2..e36fb00f0f 100644 --- a/testsuite/tests/simplCore/should_run/all.T +++ b/testsuite/tests/simplCore/should_run/all.T @@ -65,3 +65,5 @@ test('T7924', exit_code(1), compile_and_run, ['']) # Run this test *without* optimisation too test('T457', [ only_ways(['normal','optasm']), exit_code(1) ], compile_and_run, ['']) + +test('T9128', normal, compile_and_run, ['']) diff --git a/testsuite/tests/stranal/should_compile/T9208.hs b/testsuite/tests/stranal/should_compile/T9208.hs new file mode 100644 index 0000000000..bf7fb47729 --- /dev/null +++ b/testsuite/tests/stranal/should_compile/T9208.hs @@ -0,0 +1,98 @@ +{-# LANGUAGE CPP, LambdaCase, BangPatterns, MagicHash, TupleSections, ScopedTypeVariables #-} +{-# OPTIONS_GHC -w #-} -- Suppress warnings for unimplemented methods + +{- | Evaluate Template Haskell splices on node.js, + using pipes to communicate with GHCJS + -} + +-- module GHCJS.Prim.TH.Eval +module Eval ( + runTHServer + ) where + +import Control.Applicative +import Control.Monad + +import Data.Binary +import Data.Binary.Get +import Data.ByteString (ByteString) +import qualified Data.ByteString as B +import qualified Data.ByteString.Lazy as BL + +import GHC.Prim + +import qualified Language.Haskell.TH as TH +import qualified Language.Haskell.TH.Syntax as TH + +import Unsafe.Coerce + +data THResultType = THExp | THPat | THType | THDec + +data Message + -- | GHCJS compiler to node.js requests + = RunTH THResultType ByteString TH.Loc + -- | node.js to GHCJS compiler responses + | RunTH' THResultType ByteString [TH.Dec] -- ^ serialized AST and additional toplevel declarations + +instance Binary THResultType where + put _ = return () + get = return undefined + +instance Binary Message where + put _ = return () + get = return undefined + +data QState = QState + +data GHCJSQ a = GHCJSQ { runGHCJSQ :: QState -> IO (a, QState) } + +instance Functor GHCJSQ where + fmap f (GHCJSQ s) = GHCJSQ $ fmap (\(x,s') -> (f x,s')) . s + +instance Applicative GHCJSQ where + f <*> a = GHCJSQ $ \s -> + do (f',s') <- runGHCJSQ f s + (a', s'') <- runGHCJSQ a s' + return (f' a', s'') + pure x = GHCJSQ (\s -> return (x,s)) + +instance Monad GHCJSQ where + (>>=) m f = GHCJSQ $ \s -> + do (m', s') <- runGHCJSQ m s + (a, s'') <- runGHCJSQ (f m') s' + return (a, s'') + return = pure + +instance TH.Quasi GHCJSQ where qRunIO m = GHCJSQ $ \s -> fmap (,s) m + +-- | the Template Haskell server +runTHServer :: IO () +runTHServer = void $ runGHCJSQ server QState + where + server = TH.qRunIO awaitMessage >>= \case + RunTH t code loc -> do + a <- TH.qRunIO $ loadTHData code + runTH t a loc + _ -> TH.qRunIO (putStrLn "warning: ignoring unexpected message type") + +runTH :: THResultType -> Any -> TH.Loc -> GHCJSQ () +runTH rt obj loc = do + res <- case rt of + THExp -> runTHCode (unsafeCoerce obj :: TH.Q TH.Exp) + THPat -> runTHCode (unsafeCoerce obj :: TH.Q TH.Pat) + THType -> runTHCode (unsafeCoerce obj :: TH.Q TH.Type) + THDec -> runTHCode (unsafeCoerce obj :: TH.Q [TH.Dec]) + TH.qRunIO (sendResult $ RunTH' rt res []) + +runTHCode :: {- Binary a => -} TH.Q a -> GHCJSQ ByteString +runTHCode c = TH.runQ c >> return B.empty + +loadTHData :: ByteString -> IO Any +loadTHData bs = return (unsafeCoerce ()) + +awaitMessage :: IO Message +awaitMessage = fmap (runGet get) (return BL.empty) + +-- | send result back +sendResult :: Message -> IO () +sendResult msg = return ()
\ No newline at end of file diff --git a/testsuite/tests/stranal/should_compile/all.T b/testsuite/tests/stranal/should_compile/all.T index 0d10a99fe6..184ff1ec88 100644 --- a/testsuite/tests/stranal/should_compile/all.T +++ b/testsuite/tests/stranal/should_compile/all.T @@ -20,3 +20,6 @@ test('T8467', normal, compile, ['']) test('T8037', normal, compile, ['']) test('T8743', [ extra_clean(['T8743.o-boot', 'T8743.hi-boot']) ], multimod_compile, ['T8743', '-v0']) +test('T9208', when(compiler_debugged(), expect_broken(9208)), compile, ['']) +# T9208 fails (and should do so) if you have assertion checking on in the compiler +# Hence the above expect_broken. See comments in the Trac ticket
\ No newline at end of file diff --git a/testsuite/tests/stranal/should_run/T9254.hs b/testsuite/tests/stranal/should_run/T9254.hs new file mode 100644 index 0000000000..279eb5c1ec --- /dev/null +++ b/testsuite/tests/stranal/should_run/T9254.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE MagicHash, UnboxedTuples #-} +module Main where +import GHC.Exts + +f :: (() -> (# Int#, () #)) -> () +{-# NOINLINE f #-} +-- Strictness signature was (7.8.2) +-- <C(S(LS)), 1*C1(U(A,1*U()))> +-- I.e. calls k, but discards first component of result +f k = case k () of (# _, r #) -> r + +g :: Int -> () +g y = f (\n -> (# case y of I# y2 -> h (h (h (h (h (h (h y2)))))), n #)) + -- RHS is big enough to force worker/wrapper + +{-# NOINLINE h #-} +h :: Int# -> Int# +h n = n +# 1# + +main = print (g 1) diff --git a/testsuite/tests/stranal/should_run/T9254.stdout b/testsuite/tests/stranal/should_run/T9254.stdout new file mode 100644 index 0000000000..6a452c185a --- /dev/null +++ b/testsuite/tests/stranal/should_run/T9254.stdout @@ -0,0 +1 @@ +() diff --git a/testsuite/tests/stranal/should_run/all.T b/testsuite/tests/stranal/should_run/all.T index 0c43aac8c4..2ca65b5110 100644 --- a/testsuite/tests/stranal/should_run/all.T +++ b/testsuite/tests/stranal/should_run/all.T @@ -7,3 +7,4 @@ test('strun003', normal, compile_and_run, ['']) test('strun004', normal, compile_and_run, ['']) test('T2756b', normal, compile_and_run, ['']) test('T7649', normal, compile_and_run, ['']) +test('T9254', normal, compile_and_run, ['']) diff --git a/testsuite/tests/th/T7241.hs b/testsuite/tests/th/T7241.hs new file mode 100644 index 0000000000..971a2678f8 --- /dev/null +++ b/testsuite/tests/th/T7241.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TemplateHaskell #-} + +module T7241 where + +import Language.Haskell.TH + +$(newName "Foo" >>= \o -> return [DataD [] o [] [RecC o []] []]) diff --git a/testsuite/tests/th/T7241.stderr b/testsuite/tests/th/T7241.stderr new file mode 100644 index 0000000000..343cdc827d --- /dev/null +++ b/testsuite/tests/th/T7241.stderr @@ -0,0 +1,6 @@ + +T7241.hs:7:3: + Duplicate exact Name ‘Foo’ + Probable cause: you used a unique Template Haskell name (NameU), + perhaps via newName, but bound it multiple times + If that's it, then -ddump-splices might be useful diff --git a/testsuite/tests/th/T8932.stderr b/testsuite/tests/th/T8932.stderr index 0e0f9774d5..c861235091 100644 --- a/testsuite/tests/th/T8932.stderr +++ b/testsuite/tests/th/T8932.stderr @@ -1,5 +1,11 @@ -
-T8932.hs:11:1:
- Multiple declarations of ‘foo’
- Declared at: T8932.hs:5:3
- T8932.hs:11:1
+ +T8932.hs:5:3: + Duplicate exact Name ‘foo’ + Probable cause: you used a unique Template Haskell name (NameU), + perhaps via newName, but bound it multiple times + If that's it, then -ddump-splices might be useful + +T8932.hs:11:1: + Multiple declarations of ‘foo’ + Declared at: T8932.hs:5:3 + T8932.hs:11:1 diff --git a/testsuite/tests/th/T9199.hs b/testsuite/tests/th/T9199.hs new file mode 100644 index 0000000000..aa41198b57 --- /dev/null +++ b/testsuite/tests/th/T9199.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE TemplateHaskell, PolyKinds, TypeFamilies #-} + +module T9160 where + +$( [d| class C (a :: k) where + type F (a :: k) :: * + |] + ) + diff --git a/testsuite/tests/th/TH_Roles2.stderr b/testsuite/tests/th/TH_Roles2.stderr index bd44d12c6b..ab61060000 100644 --- a/testsuite/tests/th/TH_Roles2.stderr +++ b/testsuite/tests/th/TH_Roles2.stderr @@ -1,12 +1,7 @@ TYPE SIGNATURES TYPE CONSTRUCTORS - T :: k -> * - data T (k::BOX) (a::k) - No C type associated - Roles: [nominal, representational] - RecFlag NonRecursive, Not promotable - = - FamilyInstance: none + type role T representational + data T (a :: k) COERCION AXIOMS Dependent modules: [] Dependent packages: [base, ghc-prim, integer-gmp, pretty-1.1.1.1, diff --git a/testsuite/tests/th/all.T b/testsuite/tests/th/all.T index 22bb7cc637..6e86d303e5 100644 --- a/testsuite/tests/th/all.T +++ b/testsuite/tests/th/all.T @@ -326,4 +326,6 @@ test('T8884', normal, compile, ['-v0']) test('T8954', normal, compile, ['-v0']) test('T8932', normal, compile_fail, ['-v0']) test('T8987', normal, compile_fail, ['-v0']) +test('T7241', normal, compile_fail, ['-v0']) +test('T9199', normal, compile, ['-v0']) diff --git a/testsuite/tests/typecheck/should_compile/T4912.stderr b/testsuite/tests/typecheck/should_compile/T4912.stderr index 50d2deb3cd..0e0920f034 100644 --- a/testsuite/tests/typecheck/should_compile/T4912.stderr +++ b/testsuite/tests/typecheck/should_compile/T4912.stderr @@ -1,4 +1,12 @@ -T4912.hs:10:10: Warning: Orphan instance: instance Foo TheirData +T4912.hs:10:10: Warning: + Orphan instance: instance Foo TheirData + To avoid this + move the instance declaration to the module of the class or of the type, or + wrap the type with a newtype and declare the instance on the new type. -T4912.hs:13:10: Warning: Orphan instance: instance Bar OurData +T4912.hs:13:10: Warning: + Orphan instance: instance Bar OurData + To avoid this + move the instance declaration to the module of the class or of the type, or + wrap the type with a newtype and declare the instance on the new type. diff --git a/testsuite/tests/typecheck/should_compile/T5481.stderr b/testsuite/tests/typecheck/should_compile/T5481.stderr index df5d23b360..719c4ce5c7 100644 --- a/testsuite/tests/typecheck/should_compile/T5481.stderr +++ b/testsuite/tests/typecheck/should_compile/T5481.stderr @@ -1,8 +1,4 @@ -T5481.hs:6:5: - The RHS of an associated type declaration mentions type variable ‘b’ - All such variables must be bound on the LHS +T5481.hs:6:16: Not in scope: type variable ‘b’ -T5481.hs:8:5: - The RHS of an associated type declaration mentions type variable ‘a’ - All such variables must be bound on the LHS +T5481.hs:8:16: Not in scope: type variable ‘a’ diff --git a/testsuite/tests/typecheck/should_compile/T9117.hs b/testsuite/tests/typecheck/should_compile/T9117.hs new file mode 100644 index 0000000000..cb05bf2c23 --- /dev/null +++ b/testsuite/tests/typecheck/should_compile/T9117.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE RoleAnnotations #-} + +-- Also see Note [Order of Coercible Instances] + +module T9117 where + +import Data.Coerce + +newtype Phant a = MkPhant Char +type role Phant representational + +ex1 :: Phant Bool +ex1 = coerce (MkPhant 'x' :: Phant Int) diff --git a/testsuite/tests/typecheck/should_compile/T9117_2.hs b/testsuite/tests/typecheck/should_compile/T9117_2.hs new file mode 100644 index 0000000000..e7b08d8b6c --- /dev/null +++ b/testsuite/tests/typecheck/should_compile/T9117_2.hs @@ -0,0 +1,10 @@ +module T9117_2 where + + +import Data.Coerce + +newtype Foo a = Foo (Foo a) +newtype Age = MkAge Int + +ex1 :: (Foo Age) -> (Foo Int) +ex1 = coerce diff --git a/testsuite/tests/typecheck/should_compile/all.T b/testsuite/tests/typecheck/should_compile/all.T index 373e739a3f..07d05b8a0e 100644 --- a/testsuite/tests/typecheck/should_compile/all.T +++ b/testsuite/tests/typecheck/should_compile/all.T @@ -418,3 +418,5 @@ test('T8644', normal, compile, ['']) test('T8762', normal, compile, ['']) test('MutRec', normal, compile, ['']) test('T8856', normal, compile, ['']) +test('T9117', normal, compile, ['']) +test('T9117_2', expect_broken('9117'), compile, ['']) diff --git a/testsuite/tests/typecheck/should_compile/tc168.stderr b/testsuite/tests/typecheck/should_compile/tc168.stderr index de1467b2b4..b46cdd04b3 100644 --- a/testsuite/tests/typecheck/should_compile/tc168.stderr +++ b/testsuite/tests/typecheck/should_compile/tc168.stderr @@ -1,11 +1,11 @@ - -tc168.hs:17:1: - Could not deduce (C a1 (a, b0)) - arising from the ambiguity check for ‘g’ - from the context (C a1 (a, b)) - bound by the inferred type for ‘g’: C a1 (a, b) => a1 -> a - at tc168.hs:17:1-16 - The type variable ‘b0’ is ambiguous - When checking that ‘g’ - has the inferred type ‘forall a b a1. C a1 (a, b) => a1 -> a’ - Probable cause: the inferred type is ambiguous +
+tc168.hs:17:1:
+ Could not deduce (C a1 (a, b0))
+ arising from the ambiguity check for ‘g’
+ from the context (C a1 (a, b))
+ bound by the inferred type for ‘g’: C a1 (a, b) => a1 -> a
+ at tc168.hs:17:1-16
+ The type variable ‘b0’ is ambiguous
+ When checking that ‘g’ has the inferred type
+ g :: forall a b a1. C a1 (a, b) => a1 -> a
+ Probable cause: the inferred type is ambiguous
diff --git a/testsuite/tests/typecheck/should_compile/tc211.stderr b/testsuite/tests/typecheck/should_compile/tc211.stderr index bdc5bd1879..533155a657 100644 --- a/testsuite/tests/typecheck/should_compile/tc211.stderr +++ b/testsuite/tests/typecheck/should_compile/tc211.stderr @@ -1,82 +1,25 @@ - -tc211.hs:15:22: - Couldn't match type ‘forall a6. a6 -> a6’ with ‘a -> a’ - Expected type: [a -> a] - Actual type: [forall a. a -> a] - In the first argument of ‘head’, namely ‘foo’ - In the first argument of ‘(:) :: - (forall a. a -> a) - -> [forall a. a -> a] -> [forall a. a -> a]’, namely - ‘(head foo)’ - -tc211.hs:48:19: - Could not deduce (Num a2) arising from the literal ‘3’ - from the context (Num a) - bound by the inferred type of - h1 :: Num a => (forall a1. a1 -> a1) -> a - at tc211.hs:(47,1)-(49,9) - The type variable ‘a2’ is ambiguous - Relevant bindings include - y :: Pair a2 (Pair a3 b1) (bound at tc211.hs:48:10) - Note: there are several potential instances: - instance Num Double -- Defined in ‘GHC.Float’ - instance Num Float -- Defined in ‘GHC.Float’ - instance Integral a => Num (GHC.Real.Ratio a) - -- Defined in ‘GHC.Real’ - ...plus three others - In the first argument of ‘g’, namely ‘3’ - In the first argument of ‘P’, namely ‘(g 3)’ - In the expression: P (g 3) (g (P 3 4)) - -tc211.hs:48:28: - Could not deduce (Num a3) arising from the literal ‘3’ - from the context (Num a) - bound by the inferred type of - h1 :: Num a => (forall a1. a1 -> a1) -> a - at tc211.hs:(47,1)-(49,9) - The type variable ‘a3’ is ambiguous - Relevant bindings include - y :: Pair a2 (Pair a3 b1) (bound at tc211.hs:48:10) - Note: there are several potential instances: - instance Num Double -- Defined in ‘GHC.Float’ - instance Num Float -- Defined in ‘GHC.Float’ - instance Integral a => Num (GHC.Real.Ratio a) - -- Defined in ‘GHC.Real’ - ...plus three others - In the first argument of ‘P’, namely ‘3’ - In the first argument of ‘g’, namely ‘(P 3 4)’ - In the second argument of ‘P’, namely ‘(g (P 3 4))’ - -tc211.hs:48:30: - Could not deduce (Num b1) arising from the literal ‘4’ - from the context (Num a) - bound by the inferred type of - h1 :: Num a => (forall a1. a1 -> a1) -> a - at tc211.hs:(47,1)-(49,9) - The type variable ‘b1’ is ambiguous - Relevant bindings include - y :: Pair a2 (Pair a3 b1) (bound at tc211.hs:48:10) - Note: there are several potential instances: - instance Num Double -- Defined in ‘GHC.Float’ - instance Num Float -- Defined in ‘GHC.Float’ - instance Integral a => Num (GHC.Real.Ratio a) - -- Defined in ‘GHC.Real’ - ...plus three others - In the second argument of ‘P’, namely ‘4’ - In the first argument of ‘g’, namely ‘(P 3 4)’ - In the second argument of ‘P’, namely ‘(g (P 3 4))’ - -tc211.hs:70:9: - Couldn't match type ‘forall a7. a7 -> a7’ with ‘a6 -> a6’ - Expected type: List (forall a. a -> a) - -> (forall a. a -> a) -> a6 -> a6 - Actual type: List (forall a. a -> a) - -> (forall a. a -> a) -> forall a. a -> a - In the expression: - foo2 :: - List (forall a. a -> a) -> (forall a. a -> a) -> (forall a. a -> a) - In the expression: - (foo2 :: - List (forall a. a -> a) - -> (forall a. a -> a) -> (forall a. a -> a)) - xs1 (\ x -> x) +
+tc211.hs:15:22:
+ Couldn't match type ‘forall a1. a1 -> a1’ with ‘a -> a’
+ Expected type: [a -> a]
+ Actual type: [forall a. a -> a]
+ In the first argument of ‘head’, namely ‘foo’
+ In the first argument of ‘(:) ::
+ (forall a. a -> a)
+ -> [forall a. a -> a] -> [forall a. a -> a]’, namely
+ ‘(head foo)’
+
+tc211.hs:70:9:
+ Couldn't match type ‘forall a2. a2 -> a2’ with ‘a1 -> a1’
+ Expected type: List (forall a. a -> a)
+ -> (forall a. a -> a) -> a1 -> a1
+ Actual type: List (forall a. a -> a)
+ -> (forall a. a -> a) -> forall a. a -> a
+ In the expression:
+ foo2 ::
+ List (forall a. a -> a) -> (forall a. a -> a) -> (forall a. a -> a)
+ In the expression:
+ (foo2 ::
+ List (forall a. a -> a)
+ -> (forall a. a -> a) -> (forall a. a -> a))
+ xs1 (\ x -> x)
diff --git a/testsuite/tests/typecheck/should_compile/tc231.stderr b/testsuite/tests/typecheck/should_compile/tc231.stderr index 16ddddac09..4421e8aba3 100644 --- a/testsuite/tests/typecheck/should_compile/tc231.stderr +++ b/testsuite/tests/typecheck/should_compile/tc231.stderr @@ -5,24 +5,11 @@ TYPE SIGNATURES Q s (Z [Char]) chain -> ST s () s :: forall t t1. Q t (Z [Char]) t1 -> Q t (Z [Char]) t1 TYPE CONSTRUCTORS - Q :: * -> * -> * -> * - data Q s a chain - No C type associated - Roles: [representational, representational, representational] - RecFlag NonRecursive, Promotable - = Node :: s -> a -> chain -> Q s a chain Stricts: _ _ _ - FamilyInstance: none - Z :: * -> * - data Z a - No C type associated - Roles: [representational] - RecFlag NonRecursive, Promotable - = Z :: a -> Z a Stricts: _ - FamilyInstance: none - Zork :: * -> * -> * -> Constraint - class Zork s a b | a -> b - Roles: [nominal, nominal, nominal] - RecFlag NonRecursive + data Q s a chain = Node s a chain + Promotable + data Z a = Z a + Promotable + class Zork s a b | a -> b where huh :: Q s a chain -> ST s () COERCION AXIOMS axiom ShouldCompile.NTCo:Zork :: diff --git a/testsuite/tests/typecheck/should_compile/tc253.hs b/testsuite/tests/typecheck/should_compile/tc253.hs index 4771b82435..3ce439e4f2 100644 --- a/testsuite/tests/typecheck/should_compile/tc253.hs +++ b/testsuite/tests/typecheck/should_compile/tc253.hs @@ -4,8 +4,11 @@ module ShouldCompile where class Cls a where type Fam a b :: * -- Multiple defaults! - type Fam a Bool = Maybe a - type Fam a Int = (String, a) + type Fam a x = FamHelper a x + +type family FamHelper a x +type instance FamHelper a Bool = Maybe a +type instance FamHelper a Int = (String, a) instance Cls Int where -- Gets type family from default diff --git a/testsuite/tests/typecheck/should_fail/AssocTyDef02.stderr b/testsuite/tests/typecheck/should_fail/AssocTyDef02.stderr index 9b3ac0e364..b310a79a6f 100644 --- a/testsuite/tests/typecheck/should_fail/AssocTyDef02.stderr +++ b/testsuite/tests/typecheck/should_fail/AssocTyDef02.stderr @@ -1,6 +1,6 @@ -AssocTyDef02.hs:6:10: - Type indexes must match class instance head - Found ‘[b]’ but expected ‘a’ - In the type synonym instance default declaration for ‘Typ’ - In the class declaration for ‘Cls’ +AssocTyDef02.hs:6:14: + Unexpected type ‘[b]’ + In the default declaration for ‘Typ’ + A default declaration should have form + default Typ a = ... diff --git a/testsuite/tests/typecheck/should_fail/AssocTyDef03.stderr b/testsuite/tests/typecheck/should_fail/AssocTyDef03.stderr index e62a2afcc5..c0950bcc74 100644 --- a/testsuite/tests/typecheck/should_fail/AssocTyDef03.stderr +++ b/testsuite/tests/typecheck/should_fail/AssocTyDef03.stderr @@ -1,5 +1,5 @@ - -AssocTyDef03.hs:6:5: - Wrong category of family instance; declaration was for a data type - In the type instance declaration for ‘Typ’ - In the class declaration for ‘Cls’ +
+AssocTyDef03.hs:6:5:
+ Wrong category of family instance; declaration was for a data type
+ In the default type instance declaration for ‘Typ’
+ In the class declaration for ‘Cls’
diff --git a/testsuite/tests/typecheck/should_fail/AssocTyDef04.stderr b/testsuite/tests/typecheck/should_fail/AssocTyDef04.stderr index 550d09895f..4fbaaef199 100644 --- a/testsuite/tests/typecheck/should_fail/AssocTyDef04.stderr +++ b/testsuite/tests/typecheck/should_fail/AssocTyDef04.stderr @@ -1,7 +1,7 @@ - -AssocTyDef04.hs:6:18: - Expecting one more argument to ‘Maybe’ - Expected kind ‘*’, but ‘Maybe’ has kind ‘* -> *’ - In the type ‘Maybe’ - In the type instance declaration for ‘Typ’ - In the class declaration for ‘Cls’ +
+AssocTyDef04.hs:6:18:
+ Expecting one more argument to ‘Maybe’
+ Expected kind ‘*’, but ‘Maybe’ has kind ‘* -> *’
+ In the type ‘Maybe’
+ In the default type instance declaration for ‘Typ’
+ In the class declaration for ‘Cls’
diff --git a/testsuite/tests/typecheck/should_fail/AssocTyDef05.stderr b/testsuite/tests/typecheck/should_fail/AssocTyDef05.stderr index 8f5b5a5316..660d081ca3 100644 --- a/testsuite/tests/typecheck/should_fail/AssocTyDef05.stderr +++ b/testsuite/tests/typecheck/should_fail/AssocTyDef05.stderr @@ -1,5 +1,5 @@ - -AssocTyDef05.hs:6:10: - Number of parameters must match family declaration; expected 1 - In the type synonym instance default declaration for ‘Typ’ - In the class declaration for ‘Cls’ +
+AssocTyDef05.hs:6:5:
+ Number of parameters must match family declaration; expected 1
+ In the default type instance declaration for ‘Typ’
+ In the class declaration for ‘Cls’
diff --git a/testsuite/tests/typecheck/should_fail/AssocTyDef06.stderr b/testsuite/tests/typecheck/should_fail/AssocTyDef06.stderr index 29db541832..665ad223d2 100644 --- a/testsuite/tests/typecheck/should_fail/AssocTyDef06.stderr +++ b/testsuite/tests/typecheck/should_fail/AssocTyDef06.stderr @@ -1,5 +1,6 @@ - -AssocTyDef06.hs:6:10: - Number of parameters must match family declaration; expected no more than 1 - In the type instance declaration for ‘Typ’ - In the class declaration for ‘Cls’ +
+AssocTyDef06.hs:6:16:
+ Unexpected type ‘Int’
+ In the default declaration for ‘Typ’
+ A default declaration should have form
+ default Typ a b = ...
diff --git a/testsuite/tests/typecheck/should_fail/ContextStack2.stderr b/testsuite/tests/typecheck/should_fail/ContextStack2.stderr index a9c5cbc13a..e99e4c4264 100644 --- a/testsuite/tests/typecheck/should_fail/ContextStack2.stderr +++ b/testsuite/tests/typecheck/should_fail/ContextStack2.stderr @@ -6,4 +6,4 @@ ContextStack2.hs:8:6: TF (TF (TF (TF (TF (TF (TF (TF (TF (TF (TF Int))))))))))) ~ TF (TF (TF (TF (TF (TF (TF (TF (TF (TF a))))))))) In the ambiguity check for: forall a. a ~ TF (a, Int) => Int - In the type signature for ‘t’: t :: a ~ TF (a, Int) => Int + In the type signature for ‘t’: t :: (a ~ TF (a, Int)) => Int diff --git a/testsuite/tests/typecheck/should_fail/T1897a.stderr b/testsuite/tests/typecheck/should_fail/T1897a.stderr index 8a9e23bb9d..58f1a2d6ff 100644 --- a/testsuite/tests/typecheck/should_fail/T1897a.stderr +++ b/testsuite/tests/typecheck/should_fail/T1897a.stderr @@ -1,11 +1,11 @@ - -T1897a.hs:9:1: - Could not deduce (Wob a0 b) - arising from the ambiguity check for ‘foo’ - from the context (Wob a b) - bound by the inferred type for ‘foo’: Wob a b => b -> [b] - at T1897a.hs:9:1-24 - The type variable ‘a0’ is ambiguous - When checking that ‘foo’ - has the inferred type ‘forall a b. Wob a b => b -> [b]’ - Probable cause: the inferred type is ambiguous +
+T1897a.hs:9:1:
+ Could not deduce (Wob a0 b)
+ arising from the ambiguity check for ‘foo’
+ from the context (Wob a b)
+ bound by the inferred type for ‘foo’: Wob a b => b -> [b]
+ at T1897a.hs:9:1-24
+ The type variable ‘a0’ is ambiguous
+ When checking that ‘foo’ has the inferred type
+ foo :: forall a b. Wob a b => b -> [b]
+ Probable cause: the inferred type is ambiguous
diff --git a/testsuite/tests/typecheck/should_fail/T3468.stderr b/testsuite/tests/typecheck/should_fail/T3468.stderr index 17bc7fba01..26ec1920a6 100644 --- a/testsuite/tests/typecheck/should_fail/T3468.stderr +++ b/testsuite/tests/typecheck/should_fail/T3468.stderr @@ -5,4 +5,4 @@ T3468.hs-boot:3:1: Main module: type role Tool phantom data Tool d where F :: a -> Tool d - Boot file: data Tool + Boot file: abstract Tool diff --git a/testsuite/tests/typecheck/should_fail/T7019.stderr b/testsuite/tests/typecheck/should_fail/T7019.stderr index dd967c8785..6e47926037 100644 --- a/testsuite/tests/typecheck/should_fail/T7019.stderr +++ b/testsuite/tests/typecheck/should_fail/T7019.stderr @@ -1,6 +1,5 @@ -T7019.hs:14:10: - Illegal polymorphic or qualified type: C c - In the context: (C c) - While checking an instance declaration - In the instance declaration for ‘Monad (Free c)’ +T7019.hs:11:12: + Illegal constraint: forall a. c (Free c a) + In the type ‘forall a. c (Free c a)’ + In the type declaration for ‘C’ diff --git a/testsuite/tests/typecheck/should_fail/T7019a.stderr b/testsuite/tests/typecheck/should_fail/T7019a.stderr index 301a6cd11c..f88893153f 100644 --- a/testsuite/tests/typecheck/should_fail/T7019a.stderr +++ b/testsuite/tests/typecheck/should_fail/T7019a.stderr @@ -1,7 +1,4 @@ -T7019a.hs:11:1: - Illegal polymorphic or qualified type: - forall b. Context (Associated a b) - In the context: (forall b. Context (Associated a b)) - While checking the super-classes of class ‘Class’ +T7019a.hs:11:8: + Illegal constraint: forall b. Context (Associated a b) In the class declaration for ‘Class’ diff --git a/testsuite/tests/typecheck/should_fail/T7609.stderr b/testsuite/tests/typecheck/should_fail/T7609.stderr index 1b904bbec7..b02dbe20f8 100644 --- a/testsuite/tests/typecheck/should_fail/T7609.stderr +++ b/testsuite/tests/typecheck/should_fail/T7609.stderr @@ -1,10 +1,10 @@ -
-T7609.hs:7:16:
- Expecting one more argument to ‘Maybe’
- The second argument of a tuple should have kind ‘*’,
- but ‘Maybe’ has kind ‘* -> *’
- In the type signature for ‘f’: f :: (a `X` a, Maybe)
-
-T7609.hs:10:7:
- Expected a constraint, but ‘a `X` a’ has kind ‘*’
- In the type signature for ‘g’: g :: a `X` a => Maybe
+ +T7609.hs:7:16: + Expecting one more argument to ‘Maybe’ + The second argument of a tuple should have kind ‘*’, + but ‘Maybe’ has kind ‘* -> *’ + In the type signature for ‘f’: f :: (a `X` a, Maybe) + +T7609.hs:10:7: + Expected a constraint, but ‘a `X` a’ has kind ‘*’ + In the type signature for ‘g’: g :: (a `X` a) => Maybe diff --git a/testsuite/tests/typecheck/should_fail/T7778.stderr b/testsuite/tests/typecheck/should_fail/T7778.stderr index 714e2a6e27..136625af75 100644 --- a/testsuite/tests/typecheck/should_fail/T7778.stderr +++ b/testsuite/tests/typecheck/should_fail/T7778.stderr @@ -2,4 +2,4 @@ T7778.hs:3:19: Expecting one more argument to ‘Num’ Expected a type, but ‘Num’ has kind ‘* -> Constraint’ - In the type signature for ‘v’: v :: (Num Int => Num) () => () + In the type signature for ‘v’: v :: ((Num Int => Num) ()) => () diff --git a/testsuite/tests/typecheck/should_fail/T8142.stderr b/testsuite/tests/typecheck/should_fail/T8142.stderr index 5940df4384..d585abdcd2 100644 --- a/testsuite/tests/typecheck/should_fail/T8142.stderr +++ b/testsuite/tests/typecheck/should_fail/T8142.stderr @@ -1,28 +1,28 @@ - -T8142.hs:6:18: - Couldn't match type ‘Nu ((,) t0)’ with ‘Nu ((,) t)’ - NB: ‘Nu’ is a type function, and may not be injective - The type variable ‘t0’ is ambiguous - Expected type: Nu ((,) t) -> Nu f - Actual type: Nu ((,) t0) -> Nu f0 - When checking that ‘h’ - has the inferred type ‘forall t (f :: * -> *). Nu ((,) t) -> Nu f’ - Probable cause: the inferred type is ambiguous - In an equation for ‘tracer’: - tracer - = h - where - h = (\ (_, b) -> ((outI . fmap h) b)) . out - -T8142.hs:6:57: - Could not deduce (Nu ((,) t) ~ f1 (Nu ((,) t))) - from the context (Functor f, Coinductive f) - bound by the type signature for - tracer :: (Functor f, Coinductive f) => (c -> f c) -> c -> f c - at T8142.hs:5:11-64 - Expected type: Nu ((,) t) -> (t, f1 (Nu ((,) t))) - Actual type: Nu ((,) t) -> (t, Nu ((,) t)) - Relevant bindings include - h :: Nu ((,) t) -> Nu f1 (bound at T8142.hs:6:18) - In the second argument of ‘(.)’, namely ‘out’ - In the expression: (\ (_, b) -> ((outI . fmap h) b)) . out +
+T8142.hs:6:18:
+ Couldn't match type ‘Nu ((,) t0)’ with ‘Nu ((,) t)’
+ NB: ‘Nu’ is a type function, and may not be injective
+ The type variable ‘t0’ is ambiguous
+ Expected type: Nu ((,) t) -> Nu f
+ Actual type: Nu ((,) t0) -> Nu f0
+ When checking that ‘h’ has the inferred type
+ h :: forall t (f :: * -> *). Nu ((,) t) -> Nu f
+ Probable cause: the inferred type is ambiguous
+ In an equation for ‘tracer’:
+ tracer
+ = h
+ where
+ h = (\ (_, b) -> ((outI . fmap h) b)) . out
+
+T8142.hs:6:57:
+ Could not deduce (Nu ((,) t) ~ f1 (Nu ((,) t)))
+ from the context (Functor f, Coinductive f)
+ bound by the type signature for
+ tracer :: (Functor f, Coinductive f) => (c -> f c) -> c -> f c
+ at T8142.hs:5:11-64
+ Expected type: Nu ((,) t) -> (t, f1 (Nu ((,) t)))
+ Actual type: Nu ((,) t) -> (t, Nu ((,) t))
+ Relevant bindings include
+ h :: Nu ((,) t) -> Nu f1 (bound at T8142.hs:6:18)
+ In the second argument of ‘(.)’, namely ‘out’
+ In the expression: (\ (_, b) -> ((outI . fmap h) b)) . out
diff --git a/testsuite/tests/typecheck/should_fail/T8392a.stderr b/testsuite/tests/typecheck/should_fail/T8392a.stderr index ae7fc2ca6a..ed33600a1e 100644 --- a/testsuite/tests/typecheck/should_fail/T8392a.stderr +++ b/testsuite/tests/typecheck/should_fail/T8392a.stderr @@ -4,4 +4,4 @@ T8392a.hs:6:8: Inaccessible code in the type signature for foo :: Int ~ Bool => a -> a In the ambiguity check for: forall a. Int ~ Bool => a -> a - In the type signature for ‘foo’: foo :: Int ~ Bool => a -> a + In the type signature for ‘foo’: foo :: (Int ~ Bool) => a -> a diff --git a/testsuite/tests/typecheck/should_fail/T8603.stderr b/testsuite/tests/typecheck/should_fail/T8603.stderr index cf12725281..8ee8cccb4a 100644 --- a/testsuite/tests/typecheck/should_fail/T8603.stderr +++ b/testsuite/tests/typecheck/should_fail/T8603.stderr @@ -1,11 +1,11 @@ T8603.hs:29:17:
- Couldn't match type ‘(->) [a0]’ with ‘[t1]’
- Expected type: [t1] -> StateT s RV t0
- Actual type: t2 ((->) [a0]) (StateT s RV t0)
+ Couldn't match type ‘(->) [a0]’ with ‘[Integer]’
+ Expected type: [Integer] -> StateT s RV t0
+ Actual type: t1 ((->) [a0]) (StateT s RV t0)
The function ‘lift’ is applied to two arguments,
but its type ‘([a0] -> StateT s RV t0)
- -> t2 ((->) [a0]) (StateT s RV t0)’
+ -> t1 ((->) [a0]) (StateT s RV t0)’
has only one
In a stmt of a 'do' block: prize <- lift uniform [1, 2, 3]
In the expression:
diff --git a/testsuite/tests/typecheck/should_fail/T8806.stderr b/testsuite/tests/typecheck/should_fail/T8806.stderr index 0a5a3d731f..ab88b7f2eb 100644 --- a/testsuite/tests/typecheck/should_fail/T8806.stderr +++ b/testsuite/tests/typecheck/should_fail/T8806.stderr @@ -4,5 +4,5 @@ T8806.hs:5:6: In the type signature for ‘f’: f :: Int => Int T8806.hs:8:7: - Expected a constraint, but ‘Int’ has kind ‘*’ - In the type signature for ‘g’: g :: Int => Show a => Int + Illegal constraint: Int => Show a + In the type signature for ‘g’: g :: (Int => Show a) => Int diff --git a/testsuite/tests/typecheck/should_fail/T8883.stderr b/testsuite/tests/typecheck/should_fail/T8883.stderr index 0ea136869b..d02f02338e 100644 --- a/testsuite/tests/typecheck/should_fail/T8883.stderr +++ b/testsuite/tests/typecheck/should_fail/T8883.stderr @@ -1,7 +1,8 @@ - - -T8883.hs:17:1: - Non type-variable argument in the constraint: Functor (PF a) - (Use FlexibleContexts to permit this) - In the context: (Regular a, Functor (PF a)) - While checking the inferred type for ‘fold’ +
+T8883.hs:20:1:
+ Non type-variable argument in the constraint: Functor (PF a)
+ (Use FlexibleContexts to permit this)
+ When checking that ‘fold’ has the inferred type
+ fold :: forall a b.
+ (Regular a, Functor (PF a)) =>
+ (PF a b -> b) -> a -> b
diff --git a/testsuite/tests/typecheck/should_fail/T8912.stderr b/testsuite/tests/typecheck/should_fail/T8912.stderr index 24607c29be..ad343f33c5 100644 --- a/testsuite/tests/typecheck/should_fail/T8912.stderr +++ b/testsuite/tests/typecheck/should_fail/T8912.stderr @@ -1,6 +1,6 @@ T8912.hs:7:10: - Illegal implict parameter ‘?imp::Int’ + Illegal implicit parameter ‘?imp::Int’ In the context: (?imp::Int) While checking an instance declaration In the instance declaration for ‘C [a]’ diff --git a/testsuite/tests/typecheck/should_fail/T9033.hs b/testsuite/tests/typecheck/should_fail/T9033.hs new file mode 100644 index 0000000000..cc9277fc17 --- /dev/null +++ b/testsuite/tests/typecheck/should_fail/T9033.hs @@ -0,0 +1,7 @@ +module T9030 where + +bad :: Bool +bad = () + +square :: Integral i => i -> i +square x = x^2 diff --git a/testsuite/tests/typecheck/should_fail/T9033.stderr b/testsuite/tests/typecheck/should_fail/T9033.stderr new file mode 100644 index 0000000000..c2fd563124 --- /dev/null +++ b/testsuite/tests/typecheck/should_fail/T9033.stderr @@ -0,0 +1,5 @@ + +T9033.hs:4:7: + Couldn't match expected type ‘Bool’ with actual type ‘()’ + In the expression: () + In an equation for ‘bad’: bad = () diff --git a/testsuite/tests/typecheck/should_fail/T9196.hs b/testsuite/tests/typecheck/should_fail/T9196.hs new file mode 100644 index 0000000000..11d713b5e9 --- /dev/null +++ b/testsuite/tests/typecheck/should_fail/T9196.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE RankNTypes #-} +module T9196 where + +f :: (forall a. Eq a) => a -> a +f x = x + +g :: (Eq a => Ord a) => a -> a +g x = x diff --git a/testsuite/tests/typecheck/should_fail/T9196.stderr b/testsuite/tests/typecheck/should_fail/T9196.stderr new file mode 100644 index 0000000000..6f5a204edd --- /dev/null +++ b/testsuite/tests/typecheck/should_fail/T9196.stderr @@ -0,0 +1,8 @@ + +T9196.hs:4:7: + Illegal constraint: forall a. Eq a + In the type signature for ‘f’: f :: (forall a. Eq a) => a -> a + +T9196.hs:7:7: + Illegal constraint: Eq a => Ord a + In the type signature for ‘g’: g :: (Eq a => Ord a) => a -> a diff --git a/testsuite/tests/typecheck/should_fail/T9305.hs b/testsuite/tests/typecheck/should_fail/T9305.hs new file mode 100644 index 0000000000..b6ad3b780e --- /dev/null +++ b/testsuite/tests/typecheck/should_fail/T9305.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DeriveFunctor#-} +module Main where + +data Event a b = Event a deriving (Functor) + +newtype F f = F (f (F f)) + +data EventF a = EventF (F (Event a)) deriving (Functor) diff --git a/testsuite/tests/typecheck/should_fail/T9305.stderr b/testsuite/tests/typecheck/should_fail/T9305.stderr new file mode 100644 index 0000000000..16104237b9 --- /dev/null +++ b/testsuite/tests/typecheck/should_fail/T9305.stderr @@ -0,0 +1,8 @@ + +T9305.hs:8:48: + No instance for (Functor Event) + arising from the first field of ‘EventF’ (type ‘F (Event a)’) + Possible fix: + use a standalone 'deriving instance' declaration, + so you can specify the instance context yourself + When deriving the instance for (Functor EventF) diff --git a/testsuite/tests/typecheck/should_fail/T9323.hs b/testsuite/tests/typecheck/should_fail/T9323.hs new file mode 100644 index 0000000000..1aea288bbe --- /dev/null +++ b/testsuite/tests/typecheck/should_fail/T9323.hs @@ -0,0 +1,7 @@ +module T9323 where + +broken :: [Int] +broken = () + +ambiguous :: a -> String +ambiguous _ = show 0 diff --git a/testsuite/tests/typecheck/should_fail/T9323.stderr b/testsuite/tests/typecheck/should_fail/T9323.stderr new file mode 100644 index 0000000000..f98ce7bafe --- /dev/null +++ b/testsuite/tests/typecheck/should_fail/T9323.stderr @@ -0,0 +1,5 @@ + +T9323.hs:4:10: + Couldn't match expected type ‘[Int]’ with actual type ‘()’ + In the expression: () + In an equation for ‘broken’: broken = () diff --git a/testsuite/tests/typecheck/should_fail/TcNoNullaryTC.stderr b/testsuite/tests/typecheck/should_fail/TcNoNullaryTC.stderr index 9e8175d99f..80f6ec4ec0 100644 --- a/testsuite/tests/typecheck/should_fail/TcNoNullaryTC.stderr +++ b/testsuite/tests/typecheck/should_fail/TcNoNullaryTC.stderr @@ -1,5 +1,5 @@ TcNoNullaryTC.hs:3:1: No parameters for class ‘A’ - (Use NullaryTypeClasses to allow no-parameter classes) + (Use MultiParamTypeClasses to allow no-parameter classes) In the class declaration for ‘A’ diff --git a/testsuite/tests/typecheck/should_fail/TcNullaryTCFail.hs b/testsuite/tests/typecheck/should_fail/TcNullaryTCFail.hs index b127300b75..b00200db2a 100644 --- a/testsuite/tests/typecheck/should_fail/TcNullaryTCFail.hs +++ b/testsuite/tests/typecheck/should_fail/TcNullaryTCFail.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE NullaryTypeClasses #-} +{-# LANGUAGE MultiParamTypeClasses #-} module TcNullaryTCFail where class A diff --git a/testsuite/tests/typecheck/should_fail/all.T b/testsuite/tests/typecheck/should_fail/all.T index 2407af51be..cf2af3090d 100644 --- a/testsuite/tests/typecheck/should_fail/all.T +++ b/testsuite/tests/typecheck/should_fail/all.T @@ -331,3 +331,8 @@ test('T8570', extra_clean(['T85570a.o', 'T8570a.hi','T85570b.o', 'T8570b.hi']), test('T8603', normal, compile_fail, ['']) test('T8806', normal, compile_fail, ['']) test('T8912', normal, compile_fail, ['']) +test('T9033', normal, compile_fail, ['']) +test('T8883', normal, compile_fail, ['']) +test('T9196', normal, compile_fail, ['']) +test('T9305', normal, compile_fail, ['']) +test('T9323', normal, compile_fail, ['']) diff --git a/testsuite/tests/typecheck/should_fail/mc24.stderr b/testsuite/tests/typecheck/should_fail/mc24.stderr index 0ddc66d97a..495693c9f8 100644 --- a/testsuite/tests/typecheck/should_fail/mc24.stderr +++ b/testsuite/tests/typecheck/should_fail/mc24.stderr @@ -1,8 +1,8 @@ - -mc24.hs:10:31: - Couldn't match type ‘[a0]’ with ‘a -> a1’ - Expected type: (a -> a1) -> [a] -> t [a] - Actual type: [a0] -> [a0] - Possible cause: ‘take’ is applied to too many arguments - In the expression: take 2 - In a stmt of a monad comprehension: then group by x using take 2 +
+mc24.hs:10:31:
+ Couldn't match type ‘[a0]’ with ‘a -> Integer’
+ Expected type: (a -> Integer) -> [a] -> t [a]
+ Actual type: [a0] -> [a0]
+ Possible cause: ‘take’ is applied to too many arguments
+ In the expression: take 2
+ In a stmt of a monad comprehension: then group by x using take 2
diff --git a/testsuite/tests/typecheck/should_fail/tcfail004.stderr b/testsuite/tests/typecheck/should_fail/tcfail004.stderr index df54f950c6..48840e7298 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail004.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail004.stderr @@ -1,9 +1,9 @@ - -tcfail004.hs:3:9: - Couldn't match expected type ‘(t, t3)’ - with actual type ‘(t0, t1, t2)’ - Relevant bindings include - f :: t (bound at tcfail004.hs:3:2) - g :: t3 (bound at tcfail004.hs:3:4) - In the expression: (1, 2, 3) - In a pattern binding: (f, g) = (1, 2, 3) +
+tcfail004.hs:3:9:
+ Couldn't match expected type ‘(t, t1)’
+ with actual type ‘(Integer, Integer, Integer)’
+ Relevant bindings include
+ f :: t (bound at tcfail004.hs:3:2)
+ g :: t1 (bound at tcfail004.hs:3:4)
+ In the expression: (1, 2, 3)
+ In a pattern binding: (f, g) = (1, 2, 3)
diff --git a/testsuite/tests/typecheck/should_fail/tcfail005.stderr b/testsuite/tests/typecheck/should_fail/tcfail005.stderr index bae8697fe8..36f0e738e4 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail005.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail005.stderr @@ -1,8 +1,9 @@ - -tcfail005.hs:3:9: - Couldn't match expected type ‘[t]’ with actual type ‘(t0, Char)’ - Relevant bindings include - h :: t (bound at tcfail005.hs:3:2) - i :: [t] (bound at tcfail005.hs:3:4) - In the expression: (1, 'a') - In a pattern binding: (h : i) = (1, 'a') +
+tcfail005.hs:3:9:
+ Couldn't match expected type ‘[t]’
+ with actual type ‘(Integer, Char)’
+ Relevant bindings include
+ h :: t (bound at tcfail005.hs:3:2)
+ i :: [t] (bound at tcfail005.hs:3:4)
+ In the expression: (1, 'a')
+ In a pattern binding: (h : i) = (1, 'a')
diff --git a/testsuite/tests/typecheck/should_fail/tcfail032.stderr b/testsuite/tests/typecheck/should_fail/tcfail032.stderr index 954a6fd5ec..4d41c103da 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail032.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail032.stderr @@ -8,5 +8,5 @@ tcfail032.hs:14:8: Relevant bindings include x :: t (bound at tcfail032.hs:14:3) f :: t -> a -> Int (bound at tcfail032.hs:14:1) - In the expression: (x :: Eq a => a -> Int) - In an equation for ‘f’: f x = (x :: Eq a => a -> Int) + In the expression: (x :: (Eq a) => a -> Int) + In an equation for ‘f’: f x = (x :: (Eq a) => a -> Int) diff --git a/testsuite/tests/typecheck/should_fail/tcfail041.stderr b/testsuite/tests/typecheck/should_fail/tcfail041.stderr index ba5d4a15d7..c81d30979a 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail041.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail041.stderr @@ -1,6 +1,6 @@ tcfail041.hs:5:1: - Illegal implict parameter ‘?imp::Int’ + Illegal implicit parameter ‘?imp::Int’ In the context: (?imp::Int) While checking the super-classes of class ‘D’ In the class declaration for ‘D’ diff --git a/testsuite/tests/typecheck/should_fail/tcfail058.stderr b/testsuite/tests/typecheck/should_fail/tcfail058.stderr index 101a6a07d2..74db76afd8 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail058.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail058.stderr @@ -2,4 +2,4 @@ tcfail058.hs:6:7: Expecting one more argument to ‘Array a’ Expected a constraint, but ‘Array a’ has kind ‘* -> *’ - In the type signature for ‘f’: f :: Array a => a -> b + In the type signature for ‘f’: f :: (Array a) => a -> b diff --git a/testsuite/tests/typecheck/should_fail/tcfail062.stderr b/testsuite/tests/typecheck/should_fail/tcfail062.stderr index 1396b536ee..ff4915dfd2 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail062.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail062.stderr @@ -1,6 +1,8 @@ tcfail062.hs:34:6: Not in scope: type variable ‘behaviouralExpression’ + Perhaps you meant type constructor or class ‘BehaviouralExpression’ (line 25) tcfail062.hs:34:29: Not in scope: type variable ‘behaviouralExpression’ + Perhaps you meant type constructor or class ‘BehaviouralExpression’ (line 25) diff --git a/testsuite/tests/typecheck/should_fail/tcfail080.stderr b/testsuite/tests/typecheck/should_fail/tcfail080.stderr index 589d6cf342..4e02b3e012 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail080.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail080.stderr @@ -1,13 +1,11 @@ - -tcfail080.hs:27:1: - Could not deduce (Collection c0 a) - arising from the ambiguity check for ‘q’ - from the context (Collection c a) - bound by the inferred type for ‘q’: Collection c a => a -> Bool - at tcfail080.hs:27:1-27 - The type variable ‘c0’ is ambiguous - When checking that ‘q’ - has the inferred type ‘forall (c :: * -> *) a. - Collection c a => - a -> Bool’ - Probable cause: the inferred type is ambiguous +
+tcfail080.hs:27:1:
+ Could not deduce (Collection c0 a)
+ arising from the ambiguity check for ‘q’
+ from the context (Collection c a)
+ bound by the inferred type for ‘q’: Collection c a => a -> Bool
+ at tcfail080.hs:27:1-27
+ The type variable ‘c0’ is ambiguous
+ When checking that ‘q’ has the inferred type
+ q :: forall (c :: * -> *) a. Collection c a => a -> Bool
+ Probable cause: the inferred type is ambiguous
diff --git a/testsuite/tests/typecheck/should_fail/tcfail116.stderr b/testsuite/tests/typecheck/should_fail/tcfail116.stderr index 0fdafcfaba..0136173201 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail116.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail116.stderr @@ -1,6 +1,6 @@ tcfail116.hs:5:1: The class method ‘bug’ - mentions none of the type variables of the class Foo a + mentions none of the type or kind variables of the class ‘Foo a’ When checking the class method: bug :: () In the class declaration for ‘Foo’ diff --git a/testsuite/tests/typecheck/should_fail/tcfail140.stderr b/testsuite/tests/typecheck/should_fail/tcfail140.stderr index bb45df3dee..7593497fe2 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail140.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail140.stderr @@ -1,38 +1,38 @@ - -tcfail140.hs:10:7: - Couldn't match expected type ‘a0 -> t’ with actual type ‘Int’ - Relevant bindings include bar :: t (bound at tcfail140.hs:10:1) - The function ‘f’ is applied to two arguments, - but its type ‘Int -> Int’ has only one - In the expression: f 3 9 - In an equation for ‘bar’: bar = f 3 9 - -tcfail140.hs:12:10: - Couldn't match expected type ‘a1 -> t1’ with actual type ‘Int’ - Relevant bindings include - rot :: t -> t1 (bound at tcfail140.hs:12:1) - The operator ‘f’ takes two arguments, - but its type ‘Int -> Int’ has only one - In the expression: 3 `f` 4 - In an equation for ‘rot’: rot xs = 3 `f` 4 - -tcfail140.hs:14:15: - Couldn't match expected type ‘a -> b’ with actual type ‘Int’ - Relevant bindings include - xs :: [a] (bound at tcfail140.hs:14:5) - bot :: [a] -> [b] (bound at tcfail140.hs:14:1) - The operator ‘f’ takes two arguments, - but its type ‘Int -> Int’ has only one - In the first argument of ‘map’, namely ‘(3 `f`)’ - In the expression: map (3 `f`) xs - -tcfail140.hs:16:8: - Constructor ‘Just’ should have 1 argument, but has been given none - In the pattern: Just - In the expression: (\ Just x -> x) :: Maybe a -> a - In the expression: ((\ Just x -> x) :: Maybe a -> a) (Just 1) - -tcfail140.hs:19:1: - Couldn't match expected type ‘t0 -> Bool’ with actual type ‘Int’ - The equation(s) for ‘g’ have two arguments, - but its type ‘Int -> Int’ has only one +
+tcfail140.hs:10:7:
+ Couldn't match expected type ‘Integer -> t’ with actual type ‘Int’
+ Relevant bindings include bar :: t (bound at tcfail140.hs:10:1)
+ The function ‘f’ is applied to two arguments,
+ but its type ‘Int -> Int’ has only one
+ In the expression: f 3 9
+ In an equation for ‘bar’: bar = f 3 9
+
+tcfail140.hs:12:10:
+ Couldn't match expected type ‘Integer -> t1’ with actual type ‘Int’
+ Relevant bindings include
+ rot :: t -> t1 (bound at tcfail140.hs:12:1)
+ The operator ‘f’ takes two arguments,
+ but its type ‘Int -> Int’ has only one
+ In the expression: 3 `f` 4
+ In an equation for ‘rot’: rot xs = 3 `f` 4
+
+tcfail140.hs:14:15:
+ Couldn't match expected type ‘a -> b’ with actual type ‘Int’
+ Relevant bindings include
+ xs :: [a] (bound at tcfail140.hs:14:5)
+ bot :: [a] -> [b] (bound at tcfail140.hs:14:1)
+ The operator ‘f’ takes two arguments,
+ but its type ‘Int -> Int’ has only one
+ In the first argument of ‘map’, namely ‘(3 `f`)’
+ In the expression: map (3 `f`) xs
+
+tcfail140.hs:16:8:
+ Constructor ‘Just’ should have 1 argument, but has been given none
+ In the pattern: Just
+ In the expression: (\ Just x -> x) :: Maybe a -> a
+ In the expression: ((\ Just x -> x) :: Maybe a -> a) (Just 1)
+
+tcfail140.hs:19:1:
+ Couldn't match expected type ‘t0 -> Bool’ with actual type ‘Int’
+ The equation(s) for ‘g’ have two arguments,
+ but its type ‘Int -> Int’ has only one
diff --git a/testsuite/tests/typecheck/should_fail/tcfail189.stderr b/testsuite/tests/typecheck/should_fail/tcfail189.stderr index 69e8b3dbba..6bd08a266c 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail189.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail189.stderr @@ -1,8 +1,8 @@ - -tcfail189.hs:10:31: - Couldn't match type ‘[a0]’ with ‘a -> a1’ - Expected type: (a -> a1) -> [a] -> [[a]] - Actual type: [a0] -> [a0] - Possible cause: ‘take’ is applied to too many arguments - In the expression: take 2 - In a stmt of a list comprehension: then group by x using take 2 +
+tcfail189.hs:10:31:
+ Couldn't match type ‘[a0]’ with ‘a -> Integer’
+ Expected type: (a -> Integer) -> [a] -> [[a]]
+ Actual type: [a0] -> [a0]
+ Possible cause: ‘take’ is applied to too many arguments
+ In the expression: take 2
+ In a stmt of a list comprehension: then group by x using take 2
diff --git a/testsuite/tests/typecheck/should_fail/tcfail206.stderr b/testsuite/tests/typecheck/should_fail/tcfail206.stderr index 4fe402982a..3eec7088cd 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail206.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail206.stderr @@ -7,9 +7,9 @@ tcfail206.hs:5:5: In an equation for ‘a’: a = (, True)
tcfail206.hs:8:5:
- Couldn't match type ‘(t0, Int)’ with ‘Bool -> (Int, Bool)’
+ Couldn't match type ‘(Integer, Int)’ with ‘Bool -> (Int, Bool)’
Expected type: Int -> Bool -> (Int, Bool)
- Actual type: Int -> (t0, Int)
+ Actual type: Int -> (Integer, Int)
In the expression: (1,)
In an equation for ‘b’: b = (1,)
@@ -32,9 +32,10 @@ tcfail206.hs:14:5: In an equation for ‘d’: d = (# , True #)
tcfail206.hs:17:5:
- Couldn't match type ‘(# a0, Int #)’ with ‘Bool -> (# Int, Bool #)’
+ Couldn't match type ‘(# Integer, Int #)’
+ with ‘Bool -> (# Int, Bool #)’
Expected type: Int -> Bool -> (# Int, Bool #)
- Actual type: Int -> (# a0, Int #)
+ Actual type: Int -> (# Integer, Int #)
In the expression: (# 1, #)
In an equation for ‘e’: e = (# 1, #)
diff --git a/testsuite/tests/typecheck/should_fail/tcfail211.stderr b/testsuite/tests/typecheck/should_fail/tcfail211.stderr index 3adb97cd75..0d9d23d9b1 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail211.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail211.stderr @@ -1,6 +1,6 @@ tcfail211.hs:5:1: - Illegal implict parameter ‘?imp::Int’ + Illegal implicit parameter ‘?imp::Int’ In the context: (?imp::Int) While checking the super-classes of class ‘D’ In the class declaration for ‘D’ diff --git a/testsuite/tests/typecheck/should_fail/tcfail215.stderr b/testsuite/tests/typecheck/should_fail/tcfail215.stderr index d7fa2d84f7..2157561827 100644 --- a/testsuite/tests/typecheck/should_fail/tcfail215.stderr +++ b/testsuite/tests/typecheck/should_fail/tcfail215.stderr @@ -1,4 +1,4 @@ tcfail215.hs:8:15: Expecting a lifted type, but ‘Int#’ is unlifted - In the type signature for ‘foo’: foo :: ?x :: Int# => Int + In the type signature for ‘foo’: foo :: (?x :: Int#) => Int diff --git a/testsuite/tests/typecheck/should_run/TcCoercible.hs b/testsuite/tests/typecheck/should_run/TcCoercible.hs index 7bb8e48b51..284984029f 100644 --- a/testsuite/tests/typecheck/should_run/TcCoercible.hs +++ b/testsuite/tests/typecheck/should_run/TcCoercible.hs @@ -23,7 +23,8 @@ newtype NonEtad a b = NonEtad (Either b a) deriving Show newtype Fix f = Fix (f (Fix f)) deriving instance Show (f (Fix f)) => Show (Fix f) -newtype FixEither a = FixEither (Either a (FixEither a)) deriving Show +-- Later, however, this stopped working (#9117) +-- newtype FixEither a = FixEither (Either a (FixEither a)) deriving Show -- This ensures that explicitly given constraints are consulted, even -- at higher depths @@ -59,8 +60,8 @@ main = do print (coerce $ (Fix (Left ()) :: Fix (Either ())) :: Either () (Fix (Either ()))) print (coerce $ (Left () :: Either () (Fix (Either ()))) :: Fix (Either ())) - print (coerce $ (FixEither (Left age) :: FixEither Age) :: Either Int (FixEither Int)) - print (coerce $ (Left one :: Either Int (FixEither Age)) :: FixEither Age) + -- print (coerce $ (FixEither (Left age) :: FixEither Age) :: Either Int (FixEither Int)) + -- print (coerce $ (Left one :: Either Int (FixEither Age)) :: FixEither Age) print (coerce $ True :: Fam Int) print (coerce $ FamInt True :: Bool) diff --git a/testsuite/tests/typecheck/should_run/TcCoercible.stdout b/testsuite/tests/typecheck/should_run/TcCoercible.stdout index 7b8071fe12..8ac2181440 100644 --- a/testsuite/tests/typecheck/should_run/TcCoercible.stdout +++ b/testsuite/tests/typecheck/should_run/TcCoercible.stdout @@ -14,7 +14,5 @@ List [1] NonEtad (Right 1) Left () Fix (Left ()) -Left 1 -FixEither (Left (Age 1)) FamInt True True diff --git a/testsuite/tests/typecheck/should_run/TcNullaryTC.hs b/testsuite/tests/typecheck/should_run/TcNullaryTC.hs index a94d3058b0..17e3f4c425 100644 --- a/testsuite/tests/typecheck/should_run/TcNullaryTC.hs +++ b/testsuite/tests/typecheck/should_run/TcNullaryTC.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE NullaryTypeClasses #-} +{-# LANGUAGE MultiParamTypeClasses #-} module Main where diff --git a/testsuite/tests/typecheck/should_run/all.T b/testsuite/tests/typecheck/should_run/all.T index 735fa54fd5..760d5e1452 100755 --- a/testsuite/tests/typecheck/should_run/all.T +++ b/testsuite/tests/typecheck/should_run/all.T @@ -12,6 +12,8 @@ test('tcrun003', normal, compile_and_run, ['']) test('tcrun004', normal, compile_and_run, ['']) test('tcrun005', normal, compile_and_run, ['']) test('Defer01', normal, compile_and_run, ['']) +test('TcNullaryTC', when(compiler_lt('ghc', '7.7'), skip), compile_and_run, ['']) +test('TcCoercible', when(compiler_lt('ghc', '7.7'), skip), compile_and_run, ['']) # ----------------------------------------------------------------------------- # Skip everything else if fast is on @@ -35,9 +37,7 @@ test('tcrun017', normal, compile_and_run, ['']) test('tcrun018', normal, compile_and_run, ['']) test('tcrun019', normal, compile_and_run, ['']) test('tcrun020', normal, compile_and_run, ['']) -# Doesn't work with External Core due to datatype with no constructors -test('tcrun021', expect_fail_for(['extcore','optextcore']), - compile_and_run, ['-package containers']) +test('tcrun021', normal, compile_and_run, ['-package containers']) test('tcrun022', [omit_ways(['ghci']),only_compiler_types(['ghc'])], compile_and_run, ['-O']) test('tcrun023', normal, compile_and_run, ['-O']) @@ -46,8 +46,7 @@ test('tcrun025', extra_clean(['TcRun025_B.hi', 'TcRun025_B.o']), multimod_compile_and_run, ['tcrun025','']) test('tcrun026', normal, compile_and_run, ['']) test('tcrun027', normal, compile_and_run, ['']) -# Doesn't work with External Core due to datatype with no constructors -test('tcrun028', expect_fail_for(['extcore','optextcore']), compile_and_run, ['']) +test('tcrun028', normal, compile_and_run, ['']) test('tcrun029', normal, compile_and_run, ['']) test('tcrun030', normal, compile_and_run, ['']) test('tcrun031', only_compiler_types(['ghc']), compile_and_run, ['']) @@ -69,7 +68,7 @@ test('tcrun041', omit_ways(['ghci']), compile_and_run, ['']) test('tcrun042', normal, compile_and_run, ['']) test('tcrun043', normal, compile_and_run, ['']) test('tcrun044', normal, compile_and_run, ['']) -test('tcrun045', normal, compile_and_run, ['']) +test('tcrun045', normal, compile_fail, ['']) test('tcrun046', normal, compile_and_run, ['']) test('tcrun047', [omit_ways(['ghci']), only_compiler_types(['ghc'])], compile_and_run, ['']) @@ -108,10 +107,8 @@ test('T6117', normal, compile_and_run, ['']) test('T5751', normal, compile_and_run, ['']) test('T5913', normal, compile_and_run, ['']) test('T7748', normal, compile_and_run, ['']) -test('TcNullaryTC', when(compiler_lt('ghc', '7.7'), skip), compile_and_run, ['']) test('T7861', exit_code(1), compile_and_run, ['']) test('TcTypeNatSimpleRun', normal, compile_and_run, ['']) -test('TcCoercible', when(compiler_lt('ghc', '7.7'), skip), compile_and_run, ['']) test('T8119', normal, ghci_script, ['T8119.script']) test('T8492', normal, compile_and_run, ['']) test('T8739', normal, compile_and_run, ['']) diff --git a/testsuite/tests/typecheck/should_run/tcrun.stderr b/testsuite/tests/typecheck/should_run/tcrun.stderr new file mode 100644 index 0000000000..0519ecba6e --- /dev/null +++ b/testsuite/tests/typecheck/should_run/tcrun.stderr @@ -0,0 +1 @@ +
\ No newline at end of file diff --git a/testsuite/tests/typecheck/should_run/tcrun045.stderr b/testsuite/tests/typecheck/should_run/tcrun045.stderr new file mode 100644 index 0000000000..4017279ecc --- /dev/null +++ b/testsuite/tests/typecheck/should_run/tcrun045.stderr @@ -0,0 +1,6 @@ + +tcrun045.hs:24:1: + Illegal implicit parameter ‘?imp::Int’ + In the context: (?imp::Int) + While checking the super-classes of class ‘D’ + In the class declaration for ‘D’ diff --git a/testsuite/tests/warnings/should_compile/Makefile b/testsuite/tests/warnings/should_compile/Makefile new file mode 100644 index 0000000000..9101fbd40a --- /dev/null +++ b/testsuite/tests/warnings/should_compile/Makefile @@ -0,0 +1,3 @@ +TOP=../../.. +include $(TOP)/mk/boilerplate.mk +include $(TOP)/mk/test.mk diff --git a/testsuite/tests/warnings/should_compile/T9178.hs b/testsuite/tests/warnings/should_compile/T9178.hs new file mode 100644 index 0000000000..9171381e35 --- /dev/null +++ b/testsuite/tests/warnings/should_compile/T9178.hs @@ -0,0 +1,9 @@ + + +module T9178 where + +import T9178DataType + + +instance Show T9178_Type where + show _ = undefined
\ No newline at end of file diff --git a/testsuite/tests/warnings/should_compile/T9178.stderr b/testsuite/tests/warnings/should_compile/T9178.stderr new file mode 100644 index 0000000000..6f4b6c0295 --- /dev/null +++ b/testsuite/tests/warnings/should_compile/T9178.stderr @@ -0,0 +1,8 @@ +[1 of 2] Compiling T9178DataType ( T9178DataType.hs, T9178DataType.o ) +[2 of 2] Compiling T9178 ( T9178.hs, T9178.o ) + +T9178.hs:8:10: Warning: + Orphan instance: instance Show T9178_Type + To avoid this + move the instance declaration to the module of the class or of the type, or + wrap the type with a newtype and declare the instance on the new type. diff --git a/testsuite/tests/warnings/should_compile/T9178DataType.hs b/testsuite/tests/warnings/should_compile/T9178DataType.hs new file mode 100644 index 0000000000..e274117fe3 --- /dev/null +++ b/testsuite/tests/warnings/should_compile/T9178DataType.hs @@ -0,0 +1,5 @@ + + +module T9178DataType where + +data T9178_Type diff --git a/testsuite/tests/warnings/should_compile/all.T b/testsuite/tests/warnings/should_compile/all.T new file mode 100644 index 0000000000..f6747bf849 --- /dev/null +++ b/testsuite/tests/warnings/should_compile/all.T @@ -0,0 +1,3 @@ +test('T9178', extra_clean(['T9178.o', 'T9178DataType.o', + 'T9178.hi', 'T9178DataType.hi']), + multimod_compile, ['T9178', '-Wall'])
\ No newline at end of file diff --git a/testsuite/timeout/timeout.hs b/testsuite/timeout/timeout.hs index 08066879b0..f78baa10ea 100644 --- a/testsuite/timeout/timeout.hs +++ b/testsuite/timeout/timeout.hs @@ -33,10 +33,6 @@ main = do _ -> die ("Can't parse " ++ show secs ++ " as a number of seconds") _ -> die ("Bad arguments " ++ show args) -die :: String -> IO () -die msg = do hPutStrLn stderr ("timeout: " ++ msg) - exitWith (ExitFailure 1) - timeoutMsg :: String timeoutMsg = "Timeout happened...killing process..." diff --git a/utils/checkUniques/Makefile b/utils/checkUniques/Makefile index a7b2df17e2..b017473da3 100644 --- a/utils/checkUniques/Makefile +++ b/utils/checkUniques/Makefile @@ -13,4 +13,4 @@ check: checkUniques ./checkUniques mkPreludeMiscIdUnique $(PREL_NAMES) $(DS_META) checkUniques: checkUniques.hs - $(GHC) --make $@ + $(GHC) -O -XHaskell2010 --make $@ diff --git a/utils/checkUniques/checkUniques.hs b/utils/checkUniques/checkUniques.hs index d8858dee26..2eda188e3c 100644 --- a/utils/checkUniques/checkUniques.hs +++ b/utils/checkUniques/checkUniques.hs @@ -1,5 +1,3 @@ -{-# LANGUAGE PatternGuards #-} - -- Some things could be improved, e.g.: -- * Check that each file given contains at least one instance of the -- function diff --git a/utils/compare_sizes/Main.hs b/utils/compare_sizes/Main.hs index bb1685ff6a..c64a55485b 100644 --- a/utils/compare_sizes/Main.hs +++ b/utils/compare_sizes/Main.hs @@ -1,4 +1,4 @@ --- This program compares the sizes of corresponding files in two tress +-- This program compares the sizes of corresponding files in two trees -- $ ./compareSizes --hi ~/ghc/darcs/ghc ~/ghc/6.12-branch/ghc -- Size | Change | Filename diff --git a/utils/compare_sizes/compareSizes.cabal b/utils/compare_sizes/compareSizes.cabal index 32acb1d6e7..f8f42636a7 100644 --- a/utils/compare_sizes/compareSizes.cabal +++ b/utils/compare_sizes/compareSizes.cabal @@ -1,6 +1,6 @@ name: compareSizes version: 0.1.0.0 -cabal-version: >= 1.6 +cabal-version: >=1.10 license: BSD3 build-type: Simple license-file: LICENSE @@ -10,6 +10,8 @@ description: Size comparison util category: Development executable compareSizes + default-language: Haskell2010 + build-depends: base >= 4 && < 5, directory, diff --git a/utils/coverity/model.c b/utils/coverity/model.c new file mode 100644 index 0000000000..d0a3708b65 --- /dev/null +++ b/utils/coverity/model.c @@ -0,0 +1,112 @@ +/* Coverity Scan model + * This is a modeling file for Coverity Scan. Modeling helps to avoid false + * positives. + * + * - A model file can't import any header files. Some built-in primitives are + * available but not wchar_t, NULL etc. + * - Modeling doesn't need full structs and typedefs. Rudimentary structs + * and similar types are sufficient. + * - An uninitialized local variable signifies that the variable could be + * any value. + * + * The model file must be uploaded by an admin in the analysis settings of + * http://scan.coverity.com/projects/1919 + */ + +#define NULL ((void*)0) +#define assert(x) if (!(x)) __coverity_panic__(); + +/* type decls */ +typedef struct {} va_list; + +/* glibc functions */ +void *malloc (size_t); +void *calloc (size_t, size_t); +void *realloc (void *, size_t); +void free (void *); + +/* rts allocation functions */ + +void* stgMallocBytes(int n, char *msg) +{ + void *mem; + __coverity_negative_sink__((size_t)n); + mem = malloc((size_t)n); + assert(mem != NULL); + return mem; +} + +void* stgReallocBytes(void *p, int n, char *msg) +{ + void *mem; + __coverity_negative_sink__((size_t)n); + + /* man 3 realloc: if p == NULL, then realloc is equivalent to malloc() */ + if (p == NULL) { + mem = malloc((size_t)n); + assert(mem != NULL); + return mem; + } + + /* man 3 realloc: if n == 0, then realloc is equivalent to free() */ + if (n == 0) { + free(p); + return NULL; + } else { + mem = realloc(p, (size_t)n); + assert(mem != NULL); + return mem; + } +} + +void* stgCallocBytes(int n, int m, char *msg) +{ + void *mem; + __coverity_negative_sink__((size_t)n); + __coverity_negative_sink__((size_t)m); + mem = calloc(n, m); + assert(mem != NULL); + return mem; +} + +void stgFree(void* p) +{ + free(p); +} + +/* Kill paths */ + +void stg_exit(int n) +{ + __coverity_panic__(); +} + +void shutdownThread(void) +{ + __coverity_panic__(); +} + +void shutdownHaskellAndExit(int exitCode, int fastExit) +{ + __coverity_panic__(); +} + +void shutdownHaskellAndSignal(int sig, int fastExit) +{ + __coverity_panic__(); +} + +void _assertFail(const char *filename, unsigned int linenum) +{ + __coverity_panic__(); +} + +void barf(const char *s, ...) +{ + __coverity_panic__(); +} + +void vbarf(const char *s, va_list ap) +{ + __coverity_panic__(); +} diff --git a/utils/deriveConstants/DeriveConstants.hs b/utils/deriveConstants/DeriveConstants.hs index 8c943f0584..9bf21609f1 100644 --- a/utils/deriveConstants/DeriveConstants.hs +++ b/utils/deriveConstants/DeriveConstants.hs @@ -10,20 +10,20 @@ into non-C source containing this information. ------------------------------------------------------------------------ -} -import Control.Monad -import Data.Bits -import Data.Char -import Data.List +import Control.Monad (when, unless) +import Data.Bits (shiftL) +import Data.Char (toLower) +import Data.List (stripPrefix) import Data.Map (Map) import qualified Data.Map as Map -import Data.Maybe -import Numeric -import System.Environment -import System.Exit -import System.FilePath -import System.IO -import System.Info -import System.Process +import Data.Maybe (catMaybes) +import Numeric (readHex) +import System.Environment (getArgs) +import System.Exit (ExitCode(ExitSuccess), exitFailure) +import System.FilePath ((</>)) +import System.IO (stderr, hPutStrLn) +import System.Info (os) +import System.Process (showCommandForUser, readProcess, rawSystem) main :: IO () main = do opts <- parseArgs @@ -349,6 +349,8 @@ wanteds = concat ,structField C "Capability" "context_switch" ,structField C "Capability" "interrupt" ,structField C "Capability" "sparks" + ,structField C "Capability" "weak_ptr_list_hd" + ,structField C "Capability" "weak_ptr_list_tl" ,structField Both "bdescr" "start" ,structField Both "bdescr" "free" @@ -641,7 +643,7 @@ getWanted verbose tmpdir gccProgram gccFlags nmProgram oFile = tmpdir </> "tmp.o" writeFile cFile cStuff execute verbose gccProgram (gccFlags ++ ["-c", cFile, "-o", oFile]) - xs <- readProcess nmProgram [oFile] "" + xs <- readProcess nmProgram ["-P", oFile] "" let ls = lines xs ms = map parseNmLine ls m = Map.fromList $ catMaybes ms @@ -710,28 +712,21 @@ getWanted verbose tmpdir gccProgram gccFlags nmProgram doWanted (ClosurePayloadMacro {}) = [] doWanted (FieldTypeGcptrMacro {}) = [] - -- parseNmLine parses nm output that looks like - -- "0000000b C derivedConstantMAX_Vanilla_REG" + -- parseNmLine parses "nm -P" output that looks like + -- "derivedConstantMAX_Vanilla_REG C 0000000b 0000000b" (GNU nm) + -- "_derivedConstantMAX_Vanilla_REG C b 0" (Mac OS X) + -- "_derivedConstantMAX_Vanilla_REG C 000000b" (MinGW) + -- "derivedConstantMAX_Vanilla_REG D 1 b" (Solaris) -- and returns ("MAX_Vanilla_REG", 11) - parseNmLine xs0 = case break (' ' ==) xs0 of - (x1, ' ' : xs1) -> - case break (' ' ==) xs1 of - (x2, ' ' : x3) -> - case readHex x1 of - [(size, "")] -> - case x2 of - "C" -> - let x3' = case x3 of - '_' : rest -> rest - _ -> x3 - in case stripPrefix prefix x3' of - Just name -> - Just (name, size) - _ -> Nothing - _ -> Nothing - _ -> Nothing - _ -> Nothing - _ -> Nothing + parseNmLine line + = case words line of + ('_' : n) : "C" : s : _ -> mkP n s + n : "C" : s : _ -> mkP n s + [n, "D", _, s] -> mkP n s + _ -> Nothing + where mkP r s = case (stripPrefix prefix r, readHex s) of + (Just name, [(size, "")]) -> Just (name, size) + _ -> Nothing -- If an Int value is larger than 2^28 or smaller -- than -2^28, then fail. diff --git a/utils/dll-split/Main.hs b/utils/dll-split/Main.hs index c0e370641c..c3f5a15a4a 100644 --- a/utils/dll-split/Main.hs +++ b/utils/dll-split/Main.hs @@ -1,6 +1,3 @@ - -{-# LANGUAGE PatternGuards #-} - module Main (main) where import Control.Monad diff --git a/utils/dll-split/dll-split.cabal b/utils/dll-split/dll-split.cabal index bece0a4770..290af06472 100644 --- a/utils/dll-split/dll-split.cabal +++ b/utils/dll-split/dll-split.cabal @@ -10,9 +10,10 @@ Description: XXX Category: Development build-type: Simple -cabal-version: >=1.2 +cabal-version: >=1.10 Executable dll-split + Default-Language: Haskell2010 Main-Is: Main.hs Build-Depends: base >= 4 && < 5, diff --git a/utils/genapply/GenApply.hs b/utils/genapply/GenApply.hs index dab6e91fde..7b84a27d64 100644 --- a/utils/genapply/GenApply.hs +++ b/utils/genapply/GenApply.hs @@ -21,6 +21,7 @@ import Data.List ( intersperse, nub, sort ) import System.Exit import System.Environment import System.IO +import Control.Arrow ((***)) -- ----------------------------------------------------------------------------- -- Argument kinds (rougly equivalent to PrimRep) @@ -199,6 +200,45 @@ mb_tag_node arity | Just tag <- tagForArity arity = mkTagStmt tag <> semi mkTagStmt tag = text ("R1 = R1 + "++ show tag) +type StackUsage = (Int, Int) -- PROFILING, normal + +maxStack :: [StackUsage] -> StackUsage +maxStack = (maximum *** maximum) . unzip + +stackCheck + :: RegStatus -- Registerised status + -> [ArgRep] + -> Bool -- args in regs? + -> Doc -- fun_info_label + -> StackUsage + -> Doc +stackCheck regstatus args args_in_regs fun_info_label (prof_sp, norm_sp) = + let + (reg_locs, leftovers, sp_offset) = assignRegs regstatus 1 args + + cmp_sp n + | n > 0 = + text "if (Sp - WDS(" <> int n <> text ") < SpLim) {" $$ + nest 4 (vcat [ + if args_in_regs + then + text "Sp_adj" <> parens (int (-sp_offset)) <> semi $$ + saveRegOffs reg_locs + else + empty, + text "Sp(0) = " <> fun_info_label <> char ';', + mkJump regstatus (text "__stg_gc_enter_1") ["R1"] [] <> semi + ]) $$ + char '}' + | otherwise = empty + in + vcat [ text "#ifdef PROFILING", + cmp_sp prof_sp, + text "#else", + cmp_sp norm_sp, + text "#endif" + ] + genMkPAP :: RegStatus -- Register status -> String -- Macro -> String -- Jump target @@ -212,17 +252,19 @@ genMkPAP :: RegStatus -- Register status -> Int -- Size of all arguments -> Doc -- info label -> Bool -- Is a function - -> Doc + -> (Doc, StackUsage) genMkPAP regstatus macro jump live ticker disamb no_load_regs -- don't load argument regs before jumping args_in_regs -- arguments are already in regs is_pap args all_args_size fun_info_label is_fun_case - = smaller_arity_cases - $$ exact_arity_case - $$ larger_arity_case - + = (doc, stack_usage) + where + doc = vcat smaller_arity_doc $$ exact_arity_case $$ larger_arity_doc + + stack_usage = maxStack (larger_arity_stack : smaller_arity_stack) + n_args = length args -- offset of arguments on the stack at slow apply calls. @@ -237,10 +279,17 @@ genMkPAP regstatus macro jump live ticker disamb -- Sp[0] = Sp[1]; -- Sp[1] = (W_)&stg_ap_1_info; -- JMP_(GET_ENTRY(R1.cl)); - smaller_arity_cases = vcat [ smaller_arity i | i <- [1..n_args-1] ] + (smaller_arity_doc, smaller_arity_stack) + = unzip [ smaller_arity i | i <- [1..n_args-1] ] + + smaller_arity arity = (doc, stack_usage) + where + (save_regs, stack_usage) + | overflow_regs = save_extra_regs + | otherwise = shuffle_extra_args - smaller_arity arity - = text "if (arity == " <> int arity <> text ") {" $$ + doc = + text "if (arity == " <> int arity <> text ") {" $$ nest 4 (vcat [ -- text "TICK_SLOW_CALL_" <> text ticker <> text "_TOO_MANY();", @@ -253,9 +302,7 @@ genMkPAP regstatus macro jump live ticker disamb -- If the extra arguments are on the stack, then we must -- instead shuffle them down to make room for the info -- table for the follow-on call. - if overflow_regs - then save_extra_regs - else shuffle_extra_args, + save_regs, -- for a PAP, we have to arrange that the stack contains a -- return address in the event that stg_PAP_entry fails its @@ -271,81 +318,88 @@ genMkPAP regstatus macro jump live ticker disamb ]) $$ text "}" - where - -- offsets in case we need to save regs: - (reg_locs, _, _) - = assignRegs regstatus stk_args_offset args - - -- register assignment for *this function call* - (reg_locs', reg_call_leftovers, reg_call_sp_stk_args) - = assignRegs regstatus stk_args_offset (take arity args) - - load_regs - | no_load_regs || args_in_regs = empty - | otherwise = loadRegOffs reg_locs' - - (this_call_args, rest_args) = splitAt arity args - - -- the offset of the stack args from initial Sp - sp_stk_args - | args_in_regs = stk_args_offset - | no_load_regs = stk_args_offset - | otherwise = reg_call_sp_stk_args - - -- the stack args themselves - this_call_stack_args - | args_in_regs = reg_call_leftovers -- sp offsets are wrong - | no_load_regs = this_call_args - | otherwise = reg_call_leftovers - - stack_args_size = sum (map argSize this_call_stack_args) - - overflow_regs = args_in_regs && length reg_locs > length reg_locs' - - save_extra_regs - = -- we have extra arguments in registers to save - let - extra_reg_locs = drop (length reg_locs') (reverse reg_locs) - adj_reg_locs = [ (reg, off - adj + 1) | - (reg,off) <- extra_reg_locs ] - adj = case extra_reg_locs of - (reg, fst_off):_ -> fst_off - size = snd (last adj_reg_locs) - in - text "Sp_adj(" <> int (-size - 1) <> text ");" $$ - saveRegOffs adj_reg_locs $$ - loadSpWordOff "W_" 0 <> text " = " <> - mkApplyInfoName rest_args <> semi - - shuffle_extra_args - = vcat [text "#ifdef PROFILING", - shuffle True, + -- offsets in case we need to save regs: + (reg_locs, _, _) + = assignRegs regstatus stk_args_offset args + + -- register assignment for *this function call* + (reg_locs', reg_call_leftovers, reg_call_sp_stk_args) + = assignRegs regstatus stk_args_offset (take arity args) + + load_regs + | no_load_regs || args_in_regs = empty + | otherwise = loadRegOffs reg_locs' + + (this_call_args, rest_args) = splitAt arity args + + -- the offset of the stack args from initial Sp + sp_stk_args + | args_in_regs = stk_args_offset + | no_load_regs = stk_args_offset + | otherwise = reg_call_sp_stk_args + + -- the stack args themselves + this_call_stack_args + | args_in_regs = reg_call_leftovers -- sp offsets are wrong + | no_load_regs = this_call_args + | otherwise = reg_call_leftovers + + stack_args_size = sum (map argSize this_call_stack_args) + + overflow_regs = args_in_regs && length reg_locs > length reg_locs' + + save_extra_regs = (doc, (size,size)) + where + -- we have extra arguments in registers to save + extra_reg_locs = drop (length reg_locs') (reverse reg_locs) + adj_reg_locs = [ (reg, off - adj + 1) | + (reg,off) <- extra_reg_locs ] + adj = case extra_reg_locs of + (reg, fst_off):_ -> fst_off + size = snd (last adj_reg_locs) + 1 + + doc = + text "Sp_adj(" <> int (-size) <> text ");" $$ + saveRegOffs adj_reg_locs $$ + loadSpWordOff "W_" 0 <> text " = " <> + mkApplyInfoName rest_args <> semi + + shuffle_extra_args = (doc, (shuffle_prof_stack, shuffle_norm_stack)) + where + doc = vcat [ text "#ifdef PROFILING", + shuffle_prof_doc, text "#else", - shuffle False, + shuffle_norm_doc, text "#endif"] - where - -- Sadly here we have to insert an stg_restore_cccs frame - -- just underneath the stg_ap_*_info frame if we're - -- profiling; see Note [jump_SAVE_CCCS] - shuffle prof = - let offset = if prof then 2 else 0 in - vcat (map (shuffle_down (offset+1)) - [sp_stk_args .. sp_stk_args+stack_args_size-1]) $$ - (if prof - then - loadSpWordOff "W_" (sp_stk_args+stack_args_size-3) - <> text " = stg_restore_cccs_info;" $$ - loadSpWordOff "W_" (sp_stk_args+stack_args_size-2) - <> text " = CCCS;" - else empty) $$ - loadSpWordOff "W_" (sp_stk_args+stack_args_size-1) - <> text " = " - <> mkApplyInfoName rest_args <> semi $$ - text "Sp_adj(" <> int (sp_stk_args - 1 - offset) <> text ");" - - shuffle_down j i = - loadSpWordOff "W_" (i-j) <> text " = " <> - loadSpWordOff "W_" i <> semi + + (shuffle_prof_doc, shuffle_prof_stack) = shuffle True + (shuffle_norm_doc, shuffle_norm_stack) = shuffle False + + -- Sadly here we have to insert an stg_restore_cccs frame + -- just underneath the stg_ap_*_info frame if we're + -- profiling; see Note [jump_SAVE_CCCS] + shuffle prof = (doc, -sp_adj) + where + sp_adj = sp_stk_args - 1 - offset + offset = if prof then 2 else 0 + doc = + vcat (map (shuffle_down (offset+1)) + [sp_stk_args .. sp_stk_args+stack_args_size-1]) $$ + (if prof + then + loadSpWordOff "W_" (sp_stk_args+stack_args_size-3) + <> text " = stg_restore_cccs_info;" $$ + loadSpWordOff "W_" (sp_stk_args+stack_args_size-2) + <> text " = CCCS;" + else empty) $$ + loadSpWordOff "W_" (sp_stk_args+stack_args_size-1) + <> text " = " + <> mkApplyInfoName rest_args <> semi $$ + text "Sp_adj(" <> int sp_adj <> text ");" + + shuffle_down j i = + loadSpWordOff "W_" (i-j) <> text " = " <> + loadSpWordOff "W_" i <> semi -- The EXACT ARITY case @@ -378,7 +432,17 @@ genMkPAP regstatus macro jump live ticker disamb -- BUILD_PAP(1,0,(W_)&stg_ap_v_info); -- } - larger_arity_case = + (larger_arity_doc, larger_arity_stack) = (doc, stack) + where + -- offsets in case we need to save regs: + (reg_locs, leftovers, sp_offset) + = assignRegs regstatus stk_args_slow_offset args + -- BUILD_PAP assumes args start at offset 1 + + stack | args_in_regs = (sp_offset, sp_offset) + | otherwise = (0,0) + + doc = text "} else {" $$ let save_regs @@ -407,11 +471,7 @@ genMkPAP regstatus macro jump live ticker disamb text ");" ]) $$ char '}' - where - -- offsets in case we need to save regs: - (reg_locs, leftovers, sp_offset) - = assignRegs regstatus stk_args_slow_offset args - -- BUILD_PAP assumes args start at offset 1 + -- Note [jump_SAVE_CCCS] @@ -453,13 +513,14 @@ enterFastPathHelper :: Int -> [ArgRep] -> Doc enterFastPathHelper tag regstatus no_load_regs args_in_regs args = - vcat [text "if (GETTAG(R1)==" <> int tag <> text ") {", - reg_doc, - text " Sp_adj(" <> int sp' <> text ");", - -- enter, but adjust offset with tag - text " " <> mkJump regstatus (text "%GET_ENTRY(R1-" <> int tag <> text ")") ["R1"] args <> semi, - text "}" - ] + text "if (GETTAG(R1)==" <> int tag <> text ") {" $$ + nest 4 (vcat [ + reg_doc, + text "Sp_adj(" <> int sp' <> text ");", + -- enter, but adjust offset with tag + mkJump regstatus (text "%GET_ENTRY(R1-" <> int tag <> text ")") ["R1"] args <> semi + ]) $$ + text "}" -- I don't totally understand this code, I copied it from -- exact_arity_case -- TODO: refactor @@ -519,6 +580,23 @@ genApply regstatus args = fun_ret_label = mkApplyRetName args fun_info_label = mkApplyInfoName args all_args_size = sum (map argSize args) + + (bco_doc, bco_stack) = + genMkPAP regstatus "BUILD_PAP" "ENTRY_LBL(stg_BCO)" ["R1"] "FUN" "BCO" + True{-stack apply-} False{-args on stack-} False{-not a PAP-} + args all_args_size fun_info_label {- tag stmt -}False + + (fun_doc, fun_stack) = + genMkPAP regstatus "BUILD_PAP" "%GET_ENTRY(UNTAG(R1))" ["R1"] "FUN" "FUN" + False{-reg apply-} False{-args on stack-} False{-not a PAP-} + args all_args_size fun_info_label {- tag stmt -}True + + (pap_doc, pap_stack) = + genMkPAP regstatus "NEW_PAP" "stg_PAP_apply" ["R1", "R2"] "PAP" "PAP" + True{-stack apply-} False{-args on stack-} True{-is a PAP-} + args all_args_size fun_info_label {- tag stmt -}False + + stack_usage = maxStack [bco_stack, fun_stack, pap_stack] in vcat [ text "INFO_TABLE_RET(" <> mkApplyName args <> text ", " <> @@ -579,6 +657,9 @@ genApply regstatus args = -- if pointer is tagged enter it fast! enterFastPath regstatus False False args, + stackCheck regstatus args False{-args on stack-} + fun_info_label stack_usage, + -- Functions can be tagged, so we untag them! text "R1 = UNTAG(R1);", text "info = %INFO_PTR(R1);", @@ -596,9 +677,7 @@ genApply regstatus args = nest 4 (vcat [ text "arity = TO_W_(StgBCO_arity(R1));", text "ASSERT(arity > 0);", - genMkPAP regstatus "BUILD_PAP" "ENTRY_LBL(stg_BCO)" ["R1"] "FUN" "BCO" - True{-stack apply-} False{-args on stack-} False{-not a PAP-} - args all_args_size fun_info_label {- tag stmt -}False + bco_doc ]), text "}", @@ -615,9 +694,7 @@ genApply regstatus args = nest 4 (vcat [ text "arity = TO_W_(StgFunInfoExtra_arity(%FUN_INFO(info)));", text "ASSERT(arity > 0);", - genMkPAP regstatus "BUILD_PAP" "%GET_ENTRY(UNTAG(R1))" ["R1"] "FUN" "FUN" - False{-reg apply-} False{-args on stack-} False{-not a PAP-} - args all_args_size fun_info_label {- tag stmt -}True + fun_doc ]), text "}", @@ -629,9 +706,7 @@ genApply regstatus args = nest 4 (vcat [ text "arity = TO_W_(StgPAP_arity(R1));", text "ASSERT(arity > 0);", - genMkPAP regstatus "NEW_PAP" "stg_PAP_apply" ["R1", "R2"] "PAP" "PAP" - True{-stack apply-} False{-args on stack-} True{-is a PAP-} - args all_args_size fun_info_label {- tag stmt -}False + pap_doc ]), text "}", @@ -690,6 +765,7 @@ genApply regstatus args = ]), text "}" ]), + text "}" ] @@ -702,6 +778,15 @@ genApplyFast regstatus args = fun_ret_label = text "RET_LBL" <> parens (mkApplyName args) fun_info_label = mkApplyInfoName args all_args_size = sum (map argSize args) + + (fun_doc, fun_stack) = + genMkPAP regstatus "BUILD_PAP" "%GET_ENTRY(UNTAG(R1))" ["R1"] "FUN" "FUN" + False{-reg apply-} True{-args in regs-} False{-not a PAP-} + args all_args_size fun_info_label {- tag stmt -}True + + (reg_locs, leftovers, sp_offset) = assignRegs regstatus 1 args + + stack_usage = maxStack [fun_stack, (sp_offset,sp_offset)] in vcat [ fun_fast_label, @@ -715,6 +800,9 @@ genApplyFast regstatus args = -- if pointer is tagged enter it fast! enterFastPath regstatus False True args, + stackCheck regstatus args True{-args in regs-} + fun_info_label stack_usage, + -- Functions can be tagged, so we untag them! text "R1 = UNTAG(R1);", text "info = %GET_STD_INFO(R1);", @@ -730,18 +818,11 @@ genApplyFast regstatus args = nest 4 (vcat [ text "arity = TO_W_(StgFunInfoExtra_arity(%GET_FUN_INFO(R1)));", text "ASSERT(arity > 0);", - genMkPAP regstatus "BUILD_PAP" "%GET_ENTRY(UNTAG(R1))" ["R1"] "FUN" "FUN" - False{-reg apply-} True{-args in regs-} False{-not a PAP-} - args all_args_size fun_info_label {- tag stmt -}True + fun_doc ]), char '}', text "default: {", - let - (reg_locs, leftovers, sp_offset) = assignRegs regstatus 1 args - -- leave a one-word space on the top of the stack when - -- calling the slow version - in nest 4 (vcat [ text "Sp_adj" <> parens (int (-sp_offset)) <> semi, saveRegOffs reg_locs, @@ -749,8 +830,9 @@ genApplyFast regstatus args = ]), char '}' ]), - char '}' - ]), + + char '}' + ]), char '}' ] diff --git a/utils/genargs/genargs.pl b/utils/genargs/genargs.pl index 2ef2dfa3e6..33dd2a0c8c 100644 --- a/utils/genargs/genargs.pl +++ b/utils/genargs/genargs.pl @@ -1,4 +1,7 @@ -#!/usr/bin/perl +#!/usr/bin/env perl + +use warnings; + my $quote_open = 0; my $quote_char = ''; my $accum = ""; diff --git a/utils/genprimopcode/Main.hs b/utils/genprimopcode/Main.hs index aa64094add..7fe375a7d2 100644 --- a/utils/genprimopcode/Main.hs +++ b/utils/genprimopcode/Main.hs @@ -118,7 +118,7 @@ main = getArgs >>= \args -> do s <- getContents case parse s of Left err -> error ("parse error at " ++ (show err)) - Right p_o_specs@(Info _ entries) + Right p_o_specs@(Info _ _) -> seq (sanityTop p_o_specs) ( case head args of @@ -187,9 +187,6 @@ main = getArgs >>= \args -> "--make-haskell-source" -> putStr (gen_hs_source p_o_specs) - "--make-ext-core-source" - -> putStr (gen_ext_core_source entries) - "--make-latex-doc" -> putStr (gen_latex_doc p_o_specs) @@ -215,7 +212,6 @@ known_args "--primop-vector-tycons", "--make-haskell-wrappers", "--make-haskell-source", - "--make-ext-core-source", "--make-latex-doc" ] diff --git a/utils/ghc-cabal/ghc-cabal.cabal b/utils/ghc-cabal/ghc-cabal.cabal index 74399ce390..5437d63bb2 100644 --- a/utils/ghc-cabal/ghc-cabal.cabal +++ b/utils/ghc-cabal/ghc-cabal.cabal @@ -10,14 +10,15 @@ Description: XXX Category: Development build-type: Simple -cabal-version: >=1.2 +cabal-version: >=1.10 Executable ghc-cabal + Default-Language: Haskell2010 Main-Is: ghc-cabal.hs Build-Depends: base >= 3 && < 5, bytestring >= 0.10 && < 0.11, - Cabal >= 1.20 && < 1.21, + Cabal >= 1.20 && < 1.22, directory >= 1.1 && < 1.3, filepath >= 1.2 && < 1.4 diff --git a/utils/ghc-pkg/Main.hs b/utils/ghc-pkg/Main.hs index 30acbe2eb8..e51755ce2c 100644 --- a/utils/ghc-pkg/Main.hs +++ b/utils/ghc-pkg/Main.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE PatternGuards, CPP, ForeignFunctionInterface #-} +{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- -- (c) The University of Glasgow 2004-2009. @@ -114,6 +114,7 @@ data Flag | FlagForce | FlagForceFiles | FlagAutoGHCiLibs + | FlagMultiInstance | FlagExpandEnvVars | FlagExpandPkgroot | FlagNoExpandPkgroot @@ -146,6 +147,8 @@ flags = [ "ignore missing directories and libraries only", Option ['g'] ["auto-ghci-libs"] (NoArg FlagAutoGHCiLibs) "automatically build libs for GHCi (with register)", + Option [] ["enable-multi-instance"] (NoArg FlagMultiInstance) + "allow registering multiple instances of the same package version", Option [] ["expand-env-vars"] (NoArg FlagExpandEnvVars) "expand environment variables (${name}-style) in input package descriptions", Option [] ["expand-pkgroot"] (NoArg FlagExpandPkgroot) @@ -309,6 +312,7 @@ runit verbosity cli nonopts = do | FlagForceFiles `elem` cli = ForceFiles | otherwise = NoForce auto_ghci_libs = FlagAutoGHCiLibs `elem` cli + multi_instance = FlagMultiInstance `elem` cli expand_env_vars= FlagExpandEnvVars `elem` cli mexpand_pkgroot= foldl' accumExpandPkgroot Nothing cli where accumExpandPkgroot _ FlagExpandPkgroot = Just True @@ -355,10 +359,12 @@ runit verbosity cli nonopts = do initPackageDB filename verbosity cli ["register", filename] -> registerPackage filename verbosity cli - auto_ghci_libs expand_env_vars False force + auto_ghci_libs multi_instance + expand_env_vars False force ["update", filename] -> registerPackage filename verbosity cli - auto_ghci_libs expand_env_vars True force + auto_ghci_libs multi_instance + expand_env_vars True force ["unregister", pkgid_str] -> do pkgid <- readGlobPkgId pkgid_str unregisterPackage pkgid verbosity cli force @@ -593,9 +599,9 @@ lookForPackageDBIn dir = do let path_dir = dir </> "package.conf.d" exists_dir <- doesDirectoryExist path_dir if exists_dir then return (Just path_dir) else do - let path_file = dir </> "package.conf" - exists_file <- doesFileExist path_file - if exists_file then return (Just path_file) else return Nothing + let path_file = dir </> "package.conf" + exists_file <- doesFileExist path_file + if exists_file then return (Just path_file) else return Nothing readParseDatabase :: Verbosity -> Maybe (FilePath,Bool) @@ -782,11 +788,13 @@ registerPackage :: FilePath -> Verbosity -> [Flag] -> Bool -- auto_ghci_libs + -> Bool -- multi_instance -> Bool -- expand_env_vars -> Bool -- update -> Force -> IO () -registerPackage input verbosity my_flags auto_ghci_libs expand_env_vars update force = do +registerPackage input verbosity my_flags auto_ghci_libs multi_instance + expand_env_vars update force = do (db_stack, Just to_modify, _flag_dbs) <- getPkgDatabases verbosity True True False{-expand vars-} my_flags @@ -829,10 +837,16 @@ registerPackage input verbosity my_flags auto_ghci_libs expand_env_vars update f let truncated_stack = dropWhile ((/= to_modify).location) db_stack -- truncate the stack for validation, because we don't allow -- packages lower in the stack to refer to those higher up. - validatePackageConfig pkg_expanded verbosity truncated_stack auto_ghci_libs update force + validatePackageConfig pkg_expanded verbosity truncated_stack + auto_ghci_libs multi_instance update force let + -- In the normal mode, we only allow one version of each package, so we + -- remove all instances with the same source package id as the one we're + -- adding. In the multi instance mode we don't do that, thus allowing + -- multiple instances with the same source package id. removes = [ RemovePackage p - | p <- packages db_to_operate_on, + | not multi_instance, + p <- packages db_to_operate_on, sourcePackageId p == sourcePackageId pkg ] -- changeDB verbosity (removes ++ [AddPackage pkg]) db_to_operate_on @@ -1035,34 +1049,34 @@ listPackages verbosity my_flags mPackageName mModuleName = do if simple_output then show_simple stack else do #if defined(mingw32_HOST_OS) || defined(BOOTSTRAPPING) - mapM_ show_normal stack + mapM_ show_normal stack #else - let - show_colour withF db = - mconcat $ map (<#> termText "\n") $ - (termText (location db) : - map (termText " " <#>) (map pp_pkg (packages db))) - where - pp_pkg p - | sourcePackageId p `elem` broken = withF Red doc - | exposed p = doc - | otherwise = withF Blue doc - where doc | verbosity >= Verbose - = termText (printf "%s (%s)" pkg ipid) - | otherwise - = termText pkg - where - InstalledPackageId ipid = installedPackageId p - pkg = display (sourcePackageId p) - - is_tty <- hIsTerminalDevice stdout - if not is_tty - then mapM_ show_normal stack - else do tty <- Terminfo.setupTermFromEnv - case Terminfo.getCapability tty withForegroundColor of - Nothing -> mapM_ show_normal stack - Just w -> runTermOutput tty $ mconcat $ - map (show_colour w) stack + let + show_colour withF db = + mconcat $ map (<#> termText "\n") $ + (termText (location db) : + map (termText " " <#>) (map pp_pkg (packages db))) + where + pp_pkg p + | sourcePackageId p `elem` broken = withF Red doc + | exposed p = doc + | otherwise = withF Blue doc + where doc | verbosity >= Verbose + = termText (printf "%s (%s)" pkg ipid) + | otherwise + = termText pkg + where + InstalledPackageId ipid = installedPackageId p + pkg = display (sourcePackageId p) + + is_tty <- hIsTerminalDevice stdout + if not is_tty + then mapM_ show_normal stack + else do tty <- Terminfo.setupTermFromEnv + case Terminfo.getCapability tty withForegroundColor of + Nothing -> mapM_ show_normal stack + Just w -> runTermOutput tty $ mconcat $ + map (show_colour w) stack #endif simplePackageList :: [Flag] -> [InstalledPackageInfo] -> IO () @@ -1204,7 +1218,8 @@ checkConsistency verbosity my_flags = do let pkgs = allPackagesInStack db_stack checkPackage p = do - (_,es,ws) <- runValidate $ checkPackageConfig p verbosity db_stack False True + (_,es,ws) <- runValidate $ checkPackageConfig p verbosity db_stack + False True True if null es then do when (not simple_output) $ do _ <- reportValidateErrors [] ws "" Nothing @@ -1354,11 +1369,15 @@ validatePackageConfig :: InstalledPackageInfo -> Verbosity -> PackageDBStack -> Bool -- auto-ghc-libs + -> Bool -- multi_instance -> Bool -- update, or check -> Force -> IO () -validatePackageConfig pkg verbosity db_stack auto_ghci_libs update force = do - (_,es,ws) <- runValidate $ checkPackageConfig pkg verbosity db_stack auto_ghci_libs update +validatePackageConfig pkg verbosity db_stack auto_ghci_libs + multi_instance update force = do + (_,es,ws) <- runValidate $ + checkPackageConfig pkg verbosity db_stack + auto_ghci_libs multi_instance update ok <- reportValidateErrors es ws (display (sourcePackageId pkg) ++ ": ") (Just force) when (not ok) $ exitWith (ExitFailure 1) @@ -1366,12 +1385,14 @@ checkPackageConfig :: InstalledPackageInfo -> Verbosity -> PackageDBStack -> Bool -- auto-ghc-libs + -> Bool -- multi_instance -> Bool -- update, or check -> Validate () -checkPackageConfig pkg verbosity db_stack auto_ghci_libs update = do +checkPackageConfig pkg verbosity db_stack auto_ghci_libs + multi_instance update = do checkInstalledPackageId pkg db_stack update checkPackageId pkg - checkDuplicates db_stack pkg update + checkDuplicates db_stack pkg multi_instance update mapM_ (checkDep db_stack) (depends pkg) checkDuplicateDepends (depends pkg) mapM_ (checkDir False "import-dirs") (importDirs pkg) @@ -1410,15 +1431,17 @@ checkPackageId ipi = [] -> verror CannotForce ("invalid package identifier: " ++ str) _ -> verror CannotForce ("ambiguous package identifier: " ++ str) -checkDuplicates :: PackageDBStack -> InstalledPackageInfo -> Bool -> Validate () -checkDuplicates db_stack pkg update = do +checkDuplicates :: PackageDBStack -> InstalledPackageInfo + -> Bool -> Bool-> Validate () +checkDuplicates db_stack pkg multi_instance update = do let pkgid = sourcePackageId pkg pkgs = packages (head db_stack) -- -- Check whether this package id already exists in this DB -- - when (not update && (pkgid `elem` map sourcePackageId pkgs)) $ + when (not update && not multi_instance + && (pkgid `elem` map sourcePackageId pkgs)) $ verror CannotForce $ "package " ++ display pkgid ++ " is already installed" diff --git a/utils/ghc-pkg/ghc-pkg.cabal b/utils/ghc-pkg/ghc-pkg.cabal index 2f42e31f15..574301086e 100644 --- a/utils/ghc-pkg/ghc-pkg.cabal +++ b/utils/ghc-pkg/ghc-pkg.cabal @@ -11,12 +11,13 @@ Description: XXX Category: Development build-type: Simple -cabal-version: >=1.4 +cabal-version: >=1.10 Executable ghc-pkg + Default-Language: Haskell2010 Main-Is: Main.hs Other-Modules: Version - Extensions: CPP, ForeignFunctionInterface, NondecreasingIndentation + Other-Extensions: CPP Build-Depends: base >= 4 && < 5, directory >= 1 && < 1.3, diff --git a/utils/ghc-pwd/ghc-pwd.cabal b/utils/ghc-pwd/ghc-pwd.cabal index ba2eb63b82..4d155b0317 100644 --- a/utils/ghc-pwd/ghc-pwd.cabal +++ b/utils/ghc-pwd/ghc-pwd.cabal @@ -9,9 +9,10 @@ Synopsis: XXX Description: XXX build-type: Simple -cabal-version: >=1.2 +cabal-version: >=1.10 Executable ghc-pwd + Default-Language: Haskell2010 Main-Is: ghc-pwd.hs Build-Depends: base >= 3 && < 5, directory >= 1 && < 1.3 diff --git a/utils/ghctags/Main.hs b/utils/ghctags/Main.hs index a67891e16a..815cc7ca18 100644 --- a/utils/ghctags/Main.hs +++ b/utils/ghctags/Main.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE PatternGuards, ScopedTypeVariables #-} +{-# LANGUAGE ScopedTypeVariables #-} module Main where import Prelude hiding ( mod, id, mapM ) diff --git a/utils/ghctags/ghctags.cabal b/utils/ghctags/ghctags.cabal index 0e97ccade6..e9c784877b 100644 --- a/utils/ghctags/ghctags.cabal +++ b/utils/ghctags/ghctags.cabal @@ -10,13 +10,15 @@ Description: XXX Category: Development build-type: Simple -cabal-version: >=1.2 +cabal-version: >=1.10 Executable ghctags + Default-Language: Haskell2010 + Main-Is: Main.hs Build-Depends: base >= 4 && < 5, containers, - Cabal >= 1.20 && <1.21, + Cabal >= 1.20 && <1.22, ghc diff --git a/utils/haddock b/utils/haddock -Subproject 08aa509ebac58bfb202ea79c7c41291ec280a1c +Subproject cb96b4f1ed0462b4a394b9fda6612c3bea9886b diff --git a/utils/hp2ps/HpFile.c b/utils/hp2ps/HpFile.c index 5ee9cc259e..9459247a03 100644 --- a/utils/hp2ps/HpFile.c +++ b/utils/hp2ps/HpFile.c @@ -227,7 +227,7 @@ GetHpLine(FILE *infp) Error("%s, line %d: integer must follow identifier", hpfile, linenum); } - StoreSample(GetEntry(theident), nsamples, (floatish) theinteger); + StoreSample(GetEntry(theident), nsamples, thefloatish); GetHpTok(infp); break; @@ -358,8 +358,13 @@ GetNumber(FILE *infp) thefloatish = (floatish) atof(numberstring); return FLOAT_TOK; } else { - theinteger = atoi(numberstring); - return INTEGER_TOK; + theinteger = atoi(numberstring); + /* Set thefloatish too. + If this is an identifier line, the value might exceed + the size of 'int', and we are going to convert it to + a floatish anyways. */ + thefloatish = (floatish) atof(numberstring); + return INTEGER_TOK; } } diff --git a/utils/hpc/hpc-bin.cabal b/utils/hpc/hpc-bin.cabal index 4f94ab0fa0..8ec6e5b790 100644 --- a/utils/hpc/hpc-bin.cabal +++ b/utils/hpc/hpc-bin.cabal @@ -11,7 +11,7 @@ Description: XXX Category: Development build-type: Simple -cabal-version: >=1.2 +cabal-version: >=1.10 Flag base4 Description: Choose the even newer, even smaller, split-up base package. @@ -20,6 +20,7 @@ Flag base3 Description: Choose the new smaller, split-up base package. Executable hpc + Default-Language: Haskell2010 Main-Is: Hpc.hs Other-Modules: HpcParser HpcCombine @@ -45,5 +46,4 @@ Executable hpc containers >= 0.1 && < 0.6, array >= 0.1 && < 0.6 Build-Depends: hpc - Extensions: CPP diff --git a/utils/hsc2hs b/utils/hsc2hs new file mode 160000 +Subproject 4a0f67704d89712f8493a0c7eccffa9243d6ef0 diff --git a/utils/mkUserGuidePart/mkUserGuidePart.cabal b/utils/mkUserGuidePart/mkUserGuidePart.cabal index 3cadaacd47..112bbf6a81 100644 --- a/utils/mkUserGuidePart/mkUserGuidePart.cabal +++ b/utils/mkUserGuidePart/mkUserGuidePart.cabal @@ -9,9 +9,10 @@ Synopsis: XXX Description: XXX build-type: Simple -cabal-version: >=1.2 +cabal-version: >=1.10 Executable mkUserGuidePart + Default-Language: Haskell2010 Main-Is: Main.hs Build-Depends: base >= 3 && < 5, ghc diff --git a/utils/runghc/runghc.cabal.in b/utils/runghc/runghc.cabal.in index f9cbacca54..fde6b9a4d6 100644 --- a/utils/runghc/runghc.cabal.in +++ b/utils/runghc/runghc.cabal.in @@ -10,12 +10,13 @@ Description: XXX Category: Development build-type: Simple -cabal-version: >=1.2 +cabal-version: >=1.10 Flag base3 Description: Choose the new smaller, split-up base package. Executable runghc + Default-Language: Haskell2010 Main-Is: runghc.hs if flag(base3) diff --git a/utils/runghc/runghc.hs b/utils/runghc/runghc.hs index 5280cb3344..47a6bc57d5 100644 --- a/utils/runghc/runghc.hs +++ b/utils/runghc/runghc.hs @@ -1,4 +1,4 @@ -{-# LANGUAGE CPP, ForeignFunctionInterface #-} +{-# LANGUAGE CPP #-} #include "ghcconfig.h" ----------------------------------------------------------------------------- -- @@ -22,9 +22,10 @@ Flags: --fast Omit dyn way, omit binary distribution --slow Build stage2 with -DDEBUG. 2008-07-01: 14% slower than the default. - --no-dph: Skip requiring libraries/dph. In --slow mode, these tests - can take a substantial amount of time, and on some platforms - with broken linkers, we don't want to try compiling it. + --no-dph: Skip building libraries/dph and running associated tests. + In --slow mode, these tests can take a substantial amount + of time, and on some platforms with broken linkers, we + don't want to try compiling it. --help shows this usage help. Set environment variable 'CPUS' to number of cores, to exploit @@ -135,6 +136,12 @@ echo "Validating=YES" > mk/are-validating.mk echo "ValidateSpeed=$speed" >> mk/are-validating.mk echo "ValidateHpc=$hpc" >> mk/are-validating.mk +if [ $skip_dph -eq 1 ]; then + echo "BUILD_DPH=NO" >> mk/are-validating.mk +else + echo "BUILD_DPH=YES" >> mk/are-validating.mk +fi + $make -j$threads # For a "debug make", add "--debug=b --debug=m" |