/driver/split/dist/ghc-split
/driver/split/dist/ghc-split.prl
/driver/stamp-pkg-conf-rts
-/extra-gcc-opts
+/settings
/ghc.spec
/ghc/ghc-bin.cabal
/ghc/stage1/
# --------------------
# XXX
#
-# $1 = the command to look for
-# $2 = the variable to set
+# $1 = the variable to set
+# $2 = the command to look for
#
AC_DEFUN([FP_ARG_WITH_PATH_GNU_PROG],
[
])# FP_PROG_AR_NEEDS_RANLIB
-# FP_PROG_AR_SUPPORTS_INPUT
-# -------------------------
-# Sets the output variable ArSupportsInput to "-input" or "", depending on
-# whether ar supports -input flag is supported or not.
-AC_DEFUN([FP_PROG_AR_SUPPORTS_INPUT],
-[AC_REQUIRE([FP_PROG_AR_IS_GNU])
-AC_REQUIRE([FP_PROG_AR_ARGS])
-AC_CACHE_CHECK([whether $fp_prog_ar_raw supports -input], [fp_cv_prog_ar_supports_input],
-[fp_cv_prog_ar_supports_input=no
-if test $fp_prog_ar_is_gnu = no; then
- rm -f conftest*
- touch conftest.lst
- if FP_EVAL_STDERR(["$fp_prog_ar_raw" $fp_prog_ar_args conftest.a -input conftest.lst]) >/dev/null; then
- test -s conftest.err || fp_cv_prog_ar_supports_input=yes
- fi
- rm -f conftest*
-fi])
-if test $fp_cv_prog_ar_supports_input = yes; then
- ArSupportsInput="-input"
-else
- ArSupportsInput=""
-fi
-AC_SUBST([ArSupportsInput])
-])# FP_PROG_AR_SUPPORTS_INPUT
-
-
dnl
dnl AC_SHEBANG_PERL - can we she-bang perl?
dnl
])])
-# FP_HAVE_GCC
+# FP_GCC_VERSION
# -----------
# Extra testing of the result AC_PROG_CC, testing the gcc version no. Sets the
-# output variables HaveGcc and GccVersion.
-AC_DEFUN([FP_HAVE_GCC],
+# output variable GccVersion.
+AC_DEFUN([FP_GCC_VERSION],
[AC_REQUIRE([AC_PROG_CC])
-if test -z "$GCC"; then
- fp_have_gcc=NO
-else
- fp_have_gcc=YES
-fi
-if test "$fp_have_gcc" = "NO" -a -d $srcdir/ghc; then
+if test -z "$GCC"
+then
AC_MSG_ERROR([gcc is required])
fi
GccLT34=
AC_CACHE_CHECK([version of gcc], [fp_cv_gcc_version],
-[if test "$fp_have_gcc" = "YES"; then
- fp_cv_gcc_version="`$CC -v 2>&1 | grep 'version ' | sed -e 's/.*version [[^0-9]]*\([[0-9.]]*\).*/\1/g'`"
- FP_COMPARE_VERSIONS([$fp_cv_gcc_version], [-lt], [3.0],
- [AC_MSG_ERROR([Need at least gcc version 3.0 (3.4+ recommended)])])
- # See #2770: gcc 2.95 doesn't work any more, apparently. There probably
- # isn't a very good reason for that, but for now just make configure
- # fail.
- FP_COMPARE_VERSIONS([$fp_cv_gcc_version], [-lt], [3.4], GccLT34=YES)
- else
- fp_cv_gcc_version="not-installed"
- fi
+[
+ fp_cv_gcc_version="`$CC -v 2>&1 | grep 'version ' | sed -e 's/.*version [[^0-9]]*\([[0-9.]]*\).*/\1/g'`"
+ FP_COMPARE_VERSIONS([$fp_cv_gcc_version], [-lt], [3.0],
+ [AC_MSG_ERROR([Need at least gcc version 3.0 (3.4+ recommended)])])
+ # See #2770: gcc 2.95 doesn't work any more, apparently. There probably
+ # isn't a very good reason for that, but for now just make configure
+ # fail.
+ FP_COMPARE_VERSIONS([$fp_cv_gcc_version], [-lt], [3.4], GccLT34=YES)
])
-AC_SUBST([HaveGcc], [$fp_have_gcc])
AC_SUBST([GccVersion], [$fp_cv_gcc_version])
AC_SUBST(GccLT34)
-])# FP_HAVE_GCC
+])# FP_GCC_VERSION
dnl Small feature test for perl version. Assumes PerlCmd
dnl contains path to perl binary.
# integer wrap around. (Trac #952)
#
AC_DEFUN([FP_GCC_EXTRA_FLAGS],
-[AC_REQUIRE([FP_HAVE_GCC])
+[AC_REQUIRE([FP_GCC_VERSION])
AC_CACHE_CHECK([for extra options to pass gcc when compiling via C], [fp_cv_gcc_extra_opts],
[fp_cv_gcc_extra_opts=
FP_COMPARE_VERSIONS([$fp_cv_gcc_version], [-ge], [3.4],
esac
])
+# BOOTSTRAPPING_GHC_INFO_FIELD
+# --------------------------------
+# If the bootstrapping compiler is >= 7.1, then set the variable
+# $1 to the value of the ghc --info field $2. Otherwise, set it to
+# $3.
+AC_DEFUN([BOOTSTRAPPING_GHC_INFO_FIELD],[
+if test $GhcCanonVersion -ge 701
+then
+ $1=`"$WithGhc" --info | grep "^ ,(\"$2\"," | sed -e 's/.*","//' -e 's/")$//'`
+else
+ $1=$3
+fi
+AC_SUBST($1)
+])
+
# LIBRARY_VERSION(lib)
# --------------------------------
# Gets the version number of a library.
use strict;
use Cwd;
+use File::Path 'rmtree';
+use File::Basename;
my %required_tag;
my $validate;
+my $curdir;
$required_tag{"-"} = 1;
$validate = 0;
+$curdir = &cwd()
+ or die "Can't find current directory: $!";
+
while ($#ARGV ne -1) {
my $arg = shift @ARGV;
}
}
-{
+sub sanity_check_line_endings {
local $/ = undef;
open FILE, "packages" or die "Couldn't open file: $!";
binmode FILE;
}
}
-# Create libraries/*/{ghc.mk,GNUmakefile}
-system("/usr/bin/perl", "-w", "boot-pkgs") == 0
- or die "Running boot-pkgs failed: $?";
+sub sanity_check_tree {
+ my $tag;
+ my $dir;
-my $tag;
-my $dir;
-my $curdir;
+ # Check that we have all boot packages.
+ open PACKAGES, "< packages";
+ while (<PACKAGES>) {
+ if (/^#/) {
+ # Comment; do nothing
+ }
+ elsif (/^([a-zA-Z0-9\/.-]+) +([^ ]+) +[^ ]+ +[^ ]+ +[^ ]+$/) {
+ $dir = $1;
+ $tag = $2;
+
+ # If $tag is not "-" then it is an optional repository, so its
+ # absence isn't an error.
+ if (defined($required_tag{$tag})) {
+ # We would like to just check for a .git directory here,
+ # but in an lndir tree we avoid making .git directories,
+ # so it doesn't exist. We therefore require that every repo
+ # has a LICENSE file instead.
+ if (! -f "$dir/LICENSE") {
+ print STDERR "Error: $dir/LICENSE doesn't exist.\n";
+ die "Maybe you haven't done './sync-all get'?";
+ }
+ }
+ }
+ else {
+ die "Bad line in packages file: $_";
+ }
+ }
+ close PACKAGES;
+}
-$curdir = &cwd()
- or die "Can't find current directory: $!";
+# Create libraries/*/{ghc.mk,GNUmakefile}
+sub boot_pkgs {
+ my @library_dirs = ();
+ my @tarballs = glob("libraries/tarballs/*");
+
+ my $tarball;
+ my $package;
+ my $stamp;
+
+ for $tarball (@tarballs) {
+ $package = $tarball;
+ $package =~ s#^libraries/tarballs/##;
+ $package =~ s/-[0-9.]*(-snapshot)?\.tar\.gz$//;
+
+ # Sanity check, so we don't rmtree the wrong thing below
+ if (($package eq "") || ($package =~ m#[/.\\]#)) {
+ die "Bad package name: $package";
+ }
-# Check that we have all boot packages.
-open PACKAGES, "< packages";
-while (<PACKAGES>) {
- if (/^#/) {
- # Comment; do nothing
+ if (-d "libraries/$package/_darcs") {
+ print "Ignoring libraries/$package as it looks like a darcs checkout\n"
+ }
+ elsif (-d "libraries/$package/.git") {
+ print "Ignoring libraries/$package as it looks like a git checkout\n"
+ }
+ else {
+ if (! -d "libraries/stamp") {
+ mkdir "libraries/stamp";
+ }
+ $stamp = "libraries/stamp/$package";
+ if ((! -d "libraries/$package") || (! -f "$stamp")
+ || ((-M "libraries/stamp/$package") > (-M $tarball))) {
+ print "Unpacking $package\n";
+ if (-d "libraries/$package") {
+ &rmtree("libraries/$package")
+ or die "Can't remove libraries/$package: $!";
+ }
+ mkdir "libraries/$package"
+ or die "Can't create libraries/$package: $!";
+ system ("sh", "-c", "cd 'libraries/$package' && { cat ../../$tarball | gzip -d | tar xf - ; } && mv */* .") == 0
+ or die "Failed to unpack $package";
+ open STAMP, "> $stamp"
+ or die "Failed to open stamp file: $!";
+ close STAMP
+ or die "Failed to close stamp file: $!";
+ }
+ }
}
- elsif (/^([a-zA-Z0-9\/.-]+) +([^ ]+) +[^ ]+ +[^ ]+ +[^ ]+$/) {
- $dir = $1;
- $tag = $2;
-
- # If $tag is not "-" then it is an optional repository, so its
- # absence isn't an error.
- if (defined($required_tag{$tag})) {
- # We would like to just check for a .git directory here,
- # but in an lndir tree we avoid making .git directories,
- # so it doesn't exist. We therefore require that every repo
- # has a LICENSE file instead.
- if (! -f "$dir/LICENSE") {
- print STDERR "Error: $dir/LICENSE doesn't exist.\n";
- die "Maybe you haven't done './sync-all get'?";
+
+ for $package (glob "libraries/*/") {
+ $package =~ s/\/$//;
+ my $pkgs = "$package/ghc-packages";
+ if (-f $pkgs) {
+ open PKGS, "< $pkgs"
+ or die "Failed to open $pkgs: $!";
+ while (<PKGS>) {
+ chomp;
+ s/\r//g;
+ if (/.+/) {
+ push @library_dirs, "$package/$_";
+ }
}
}
+ else {
+ push @library_dirs, $package;
+ }
}
- else {
- die "Bad line in packages file: $_";
+
+ for $package (@library_dirs) {
+ my $dir = &basename($package);
+ my @cabals = glob("$package/*.cabal");
+ if ($#cabals > 0) {
+ die "Too many .cabal file in $package\n";
+ }
+ if ($#cabals eq 0) {
+ my $cabal = $cabals[0];
+ my $pkg;
+ my $top;
+ if (-f $cabal) {
+ $pkg = $cabal;
+ $pkg =~ s#.*/##;
+ $pkg =~ s/\.cabal$//;
+ $top = $package;
+ $top =~ s#[^/]+#..#g;
+ $dir = $package;
+ $dir =~ s#^libraries/##g;
+
+ print "Creating $package/ghc.mk\n";
+ open GHCMK, "> $package/ghc.mk"
+ or die "Opening $package/ghc.mk failed: $!";
+ print GHCMK "${package}_PACKAGE = ${pkg}\n";
+ print GHCMK "${package}_dist-install_GROUP = libraries\n";
+ print GHCMK "\$(eval \$(call build-package,${package},dist-install,\$(if \$(filter ${dir},\$(STAGE2_PACKAGES)),2,1)))\n";
+ close GHCMK
+ or die "Closing $package/ghc.mk failed: $!";
+
+ print "Creating $package/GNUmakefile\n";
+ open GNUMAKEFILE, "> $package/GNUmakefile"
+ or die "Opening $package/GNUmakefile failed: $!";
+ print GNUMAKEFILE "dir = ${package}\n";
+ print GNUMAKEFILE "TOP = ${top}\n";
+ print GNUMAKEFILE "include \$(TOP)/mk/sub-makefile.mk\n";
+ print GNUMAKEFILE "FAST_MAKE_OPTS += stage=0\n";
+ close GNUMAKEFILE
+ or die "Closing $package/GNUmakefile failed: $!";
+ }
+ }
}
}
-close PACKAGES;
# autoreconf everything that needs it.
-foreach $dir (".", glob("libraries/*/")) {
- if (-f "$dir/configure.ac") {
- print "Booting $dir\n";
- chdir $dir or die "can't change to $dir: $!";
- system("autoreconf") == 0
- or die "Running autoreconf failed with exitcode $?";
- chdir $curdir or die "can't change to $curdir: $!";
+sub autoreconf {
+ my $dir;
+
+ foreach $dir (".", glob("libraries/*/")) {
+ if (-f "$dir/configure.ac") {
+ print "Booting $dir\n";
+ chdir $dir or die "can't change to $dir: $!";
+ system("autoreconf") == 0
+ or die "Running autoreconf failed with exitcode $?";
+ chdir $curdir or die "can't change to $curdir: $!";
+ }
}
}
-if ($validate eq 0 && ! -f "mk/build.mk") {
- print <<EOF;
+sub checkBuildMk {
+ if ($validate eq 0 && ! -f "mk/build.mk") {
+ print <<EOF;
WARNING: You don't have a mk/build.mk file.
http://hackage.haskell.org/trac/ghc/wiki/Building/Using#Buildconfiguration
EOF
+ }
}
+&sanity_check_line_endings();
+&sanity_check_tree();
+&boot_pkgs();
+&autoreconf();
+&checkBuildMk();
+
+++ /dev/null
-#!/usr/bin/perl -w
-
-use strict;
-
-use File::Path 'rmtree';
-use File::Basename;
-
-my @library_dirs = ();
-my @tarballs = glob("libraries/tarballs/*");
-
-my $tarball;
-my $package;
-my $stamp;
-
-for $tarball (@tarballs) {
- $package = $tarball;
- $package =~ s#^libraries/tarballs/##;
- $package =~ s/-[0-9.]*(-snapshot)?\.tar\.gz$//;
-
- # Sanity check, so we don't rmtree the wrong thing below
- if (($package eq "") || ($package =~ m#[/.\\]#)) {
- die "Bad package name: $package";
- }
-
- if (-d "libraries/$package/_darcs") {
- print "Ignoring libraries/$package as it looks like a darcs checkout\n"
- }
- elsif (-d "libraries/$package/.git") {
- print "Ignoring libraries/$package as it looks like a git checkout\n"
- }
- else {
- if (! -d "libraries/stamp") {
- mkdir "libraries/stamp";
- }
- $stamp = "libraries/stamp/$package";
- if ((! -d "libraries/$package") || (! -f "$stamp")
- || ((-M "libraries/stamp/$package") > (-M $tarball))) {
- print "Unpacking $package\n";
- if (-d "libraries/$package") {
- &rmtree("libraries/$package")
- or die "Can't remove libraries/$package: $!";
- }
- mkdir "libraries/$package"
- or die "Can't create libraries/$package: $!";
- system ("sh", "-c", "cd 'libraries/$package' && { cat ../../$tarball | gzip -d | tar xf - ; } && mv */* .") == 0
- or die "Failed to unpack $package";
- open STAMP, "> $stamp"
- or die "Failed to open stamp file: $!";
- close STAMP
- or die "Failed to close stamp file: $!";
- }
- }
-}
-
-for $package (glob "libraries/*/") {
- $package =~ s/\/$//;
- my $pkgs = "$package/ghc-packages";
- if (-f $pkgs) {
- open PKGS, "< $pkgs"
- or die "Failed to open $pkgs: $!";
- while (<PKGS>) {
- chomp;
- s/\r//g;
- if (/.+/) {
- push @library_dirs, "$package/$_";
- }
- }
- }
- else {
- push @library_dirs, $package;
- }
-}
-
-for $package (@library_dirs) {
- my $dir = &basename($package);
- my @cabals = glob("$package/*.cabal");
- if ($#cabals > 0) {
- die "Too many .cabal file in $package\n";
- }
- if ($#cabals eq 0) {
- my $cabal = $cabals[0];
- my $pkg;
- my $top;
- if (-f $cabal) {
- $pkg = $cabal;
- $pkg =~ s#.*/##;
- $pkg =~ s/\.cabal$//;
- $top = $package;
- $top =~ s#[^/]+#..#g;
- $dir = $package;
- $dir =~ s#^libraries/##g;
-
- print "Creating $package/ghc.mk\n";
- open GHCMK, "> $package/ghc.mk"
- or die "Opening $package/ghc.mk failed: $!";
- print GHCMK "${package}_PACKAGE = ${pkg}\n";
- print GHCMK "${package}_dist-install_GROUP = libraries\n";
- print GHCMK "\$(eval \$(call build-package,${package},dist-install,\$(if \$(filter ${dir},\$(STAGE2_PACKAGES)),2,1)))\n";
- close GHCMK
- or die "Closing $package/ghc.mk failed: $!";
-
- print "Creating $package/GNUmakefile\n";
- open GNUMAKEFILE, "> $package/GNUmakefile"
- or die "Opening $package/GNUmakefile failed: $!";
- print GNUMAKEFILE "dir = ${package}\n";
- print GNUMAKEFILE "TOP = ${top}\n";
- print GNUMAKEFILE "include \$(TOP)/mk/sub-makefile.mk\n";
- print GNUMAKEFILE "FAST_MAKE_OPTS += stage=0\n";
- close GNUMAKEFILE
- or die "Closing $package/GNUmakefile failed: $!";
- }
- }
-}
-
+++ /dev/null
-# Local GHC-build-tree customization for Cabal makefiles. We want to build
-# libraries using flags that the user has put in build.mk/validate.mk and
-# appropriate flags for Mac OS X deployment targets.
-
-# Careful here: including boilerplate.mk breaks things, because paths.mk and
-# opts.mk overrides some of the variable settings in the Cabal Makefile, so
-# we just include config.mk and custom-settings.mk.
-TOP=..
-SAVE_GHC := $(GHC)
-SAVE_AR := $(AR)
-SAVE_LD := $(LD)
-include $(TOP)/mk/config.mk
-include $(TOP)/mk/custom-settings.mk
-GHC := $(SAVE_GHC)
-AR := $(SAVE_AR)
-LD := $(SAVE_LD)
-
-# Now add flags from the GHC build system to the Cabal build:
-GHC_CC_OPTS += $(addprefix -optc, $(MACOSX_DEPLOYMENT_CC_OPTS))
-GHC_OPTS += $(SRC_HC_OPTS)
-GHC_OPTS += $(GhcHcOpts)
-GHC_OPTS += $(GhcStage$(stage)HcOpts)
-GHC_OPTS += $(addprefix -optc, $(MACOSX_DEPLOYMENT_CC_OPTS))
-LIB_LD_OPTS += $(addprefix -optl, $(MACOSX_DEPLOYMENT_LD_OPTS))
-
-# XXX These didn't work in the old build system, according to the
-# comment at least. We should actually handle them properly at some
-# point:
-
-# Some .hs files #include other source files, but since ghc -M doesn't spit out
-# these dependencies we have to include them manually.
-
-# We don't add dependencies on HsVersions.h, ghcautoconf.h, or ghc_boot_platform.h,
-# because then modifying one of these files would force recompilation of everything,
-# which is probably not what you want. However, it does mean you have to be
-# careful to recompile stuff you need if you reconfigure or change HsVersions.h.
-
-# Aargh, these don't work properly anyway, because GHC's recompilation checker
-# just reports "compilation NOT required". Do we have to add -fforce-recomp for each
-# of these .hs files? I haven't done anything about this yet.
-
-# $(odir)/codeGen/Bitmap.$(way_)o : ../includes/MachDeps.h
-# $(odir)/codeGen/CgCallConv.$(way_)o : ../includes/StgFun.h
-# $(odir)/codeGen/CgProf.$(way_)o : ../includes/MachDeps.h
-# $(odir)/codeGen/CgProf.$(way_)o : ../includes/Constants.h
-# $(odir)/codeGen/CgProf.$(way_)o : ../includes/DerivedConstants.h
-# $(odir)/codeGen/CgTicky.$(way_)o : ../includes/DerivedConstants.h
-# $(odir)/codeGen/ClosureInfo.$(way_)o : ../includes/MachDeps.h
-# $(odir)/codeGen/SMRep.$(way_)o : ../includes/MachDeps.h
-# $(odir)/codeGen/SMRep.$(way_)o : ../includes/ClosureTypes.h
-# $(odir)/ghci/ByteCodeAsm.$(way_)o : ../includes/Bytecodes.h
-# $(odir)/ghci/ByteCodeFFI.$(way_)o : nativeGen/NCG.h
-# $(odir)/ghci/ByteCodeInstr.$(way_)o : ../includes/MachDeps.h
-# $(odir)/ghci/ByteCodeItbls.$(way_)o : ../includes/ClosureTypes.h
-# $(odir)/ghci/ByteCodeItbls.$(way_)o : nativeGen/NCG.h
-# $(odir)/main/Constants.$(way_)o : ../includes/MachRegs.h
-# $(odir)/main/Constants.$(way_)o : ../includes/Constants.h
-# $(odir)/main/Constants.$(way_)o : ../includes/MachDeps.h
-# $(odir)/main/Constants.$(way_)o : ../includes/DerivedConstants.h
-# $(odir)/main/Constants.$(way_)o : ../includes/GHCConstants.h
-# $(odir)/nativeGen/AsmCodeGen.$(way_)o : nativeGen/NCG.h
-# $(odir)/nativeGen/MachCodeGen.$(way_)o : nativeGen/NCG.h
-# $(odir)/nativeGen/MachCodeGen.$(way_)o : ../includes/MachDeps.h
-# $(odir)/nativeGen/MachInstrs.$(way_)o : nativeGen/NCG.h
-# $(odir)/nativeGen/MachRegs.$(way_)o : nativeGen/NCG.h
-# $(odir)/nativeGen/MachRegs.$(way_)o : ../includes/MachRegs.h
-# $(odir)/nativeGen/PositionIndependentCode.$(way_)o : nativeGen/NCG.h
-# $(odir)/nativeGen/PprMach.$(way_)o : nativeGen/NCG.h
-# $(odir)/nativeGen/RegAllocInfo.$(way_)o : nativeGen/NCG.h
-# $(odir)/typecheck/TcForeign.$(way_)o : nativeGen/NCG.h
-# $(odir)/utils/Binary.$(way_)o : ../includes/MachDeps.h
-# $(odir)/utils/FastMutInt.$(way_)o : ../includes/MachDeps.h
-# $(PRIMOP_BITS) is defined in Makefile
-# $(odir)/prelude/PrimOp.o: $(PRIMOP_BITS)
-
\begin{code}
-- | A ModuleName is essentially a simple string, e.g. @Data.List@.
newtype ModuleName = ModuleName FastString
+ deriving Typeable
instance Uniquable ModuleName where
getUnique (ModuleName nm) = getUnique nm
put_ bh (ModuleName fs) = put_ bh fs
get bh = do fs <- get bh; return (ModuleName fs)
-INSTANCE_TYPEABLE0(ModuleName,moduleNameTc,"ModuleName")
-
instance Data ModuleName where
-- don't traverse?
toConstr _ = abstractConstr "ModuleName"
modulePackageId :: !PackageId, -- pkg-1.0
moduleName :: !ModuleName -- A.B.C
}
- deriving (Eq, Ord)
+ deriving (Eq, Ord, Typeable)
instance Uniquable Module where
getUnique (Module p n) = getUnique (packageIdFS p `appendFS` moduleNameFS n)
put_ bh (Module p n) = put_ bh p >> put_ bh n
get bh = do p <- get bh; n <- get bh; return (Module p n)
-INSTANCE_TYPEABLE0(Module,moduleTc,"Module")
-
instance Data Module where
-- don't traverse?
toConstr _ = abstractConstr "Module"
\begin{code}
-- | Essentially just a string identifying a package, including the version: e.g. parsec-1.0
-newtype PackageId = PId FastString deriving( Eq )
+newtype PackageId = PId FastString deriving( Eq, Typeable )
-- here to avoid module loops with PackageConfig
instance Uniquable PackageId where
instance Ord PackageId where
nm1 `compare` nm2 = getUnique nm1 `compare` getUnique nm2
-INSTANCE_TYPEABLE0(PackageId,packageIdTc,"PackageId")
-
instance Data PackageId where
-- don't traverse?
toConstr _ = abstractConstr "PackageId"
--(note later when changing Int# -> FastInt: is that still true about UNPACK?)
n_loc :: !SrcSpan -- Definition site
}
+ deriving Typeable
-- NOTE: we make the n_loc field strict to eliminate some potential
-- (and real!) space leaks, due to the fact that we don't look at
instance NamedThing Name where
getName n = n
-INSTANCE_TYPEABLE0(Name,nameTc,"Name")
-
instance Data Name where
-- don't traverse?
toConstr _ = abstractConstr "Name"
\begin{code}
type NameSet = UniqSet Name
-INSTANCE_TYPEABLE0(NameSet,nameSetTc,"NameSet")
+-- TODO: These Data/Typeable instances look very dubious. Surely either
+-- UniqFM should have the instances, or this should be a newtype?
+
+nameSetTc :: TyCon
+nameSetTc = mkTyCon "NameSet"
+instance Typeable NameSet where { typeOf _ = mkTyConApp nameSetTc [] }
instance Data NameSet where
gfoldl k z s = z mkNameSet `k` nameSetToList s -- traverse abstractly
get (Just d1, _u1) d2 = d1 `unionNameSets` d2
allUses :: DefUses -> Uses
--- ^ Just like 'allUses', but 'Defs' are not eliminated from the 'Uses' returned
+-- ^ Just like 'duUses', but 'Defs' are not eliminated from the 'Uses' returned
allUses dus = foldr get emptyNameSet dus
where
get (_d1, u1) u2 = u1 `unionNameSets` u2
duUses :: DefUses -> Uses
-- ^ Collect all 'Uses', regardless of whether the group is itself used,
-- but remove 'Defs' on the way
-duUses dus
- = foldr get emptyNameSet dus
+duUses dus = foldr get emptyNameSet dus
where
get (Nothing, rhs_uses) uses = rhs_uses `unionNameSets` uses
get (Just defs, rhs_uses) uses = (rhs_uses `unionNameSets` uses)
{ occNameSpace :: !NameSpace
, occNameFS :: !FastString
}
+ deriving Typeable
\end{code}
compare (OccName sp1 s1) (OccName sp2 s2)
= (s1 `compare` s2) `thenCmp` (sp1 `compare` sp2)
-INSTANCE_TYPEABLE0(OccName,occNameTc,"OccName")
-
instance Data OccName where
-- don't traverse?
toConstr _ = abstractConstr "OccName"
ppr (UnhelpfulLoc s) = ftext s
-INSTANCE_TYPEABLE0(SrcSpan,srcSpanTc,"SrcSpan")
-
instance Data SrcSpan where
-- don't traverse?
toConstr _ = abstractConstr "SrcSpan"
-- also used to indicate an empty span
#ifdef DEBUG
- deriving (Eq, Show) -- Show is used by Lexer.x, becuase we
- -- derive Show for Token
+ deriving (Eq, Typeable, Show) -- Show is used by Lexer.x, becuase we
+ -- derive Show for Token
#else
- deriving Eq
+ deriving (Eq, Typeable)
#endif
-- | Built-in "bad" 'SrcSpan's for common sources of location uncertainty
idScope :: IdScope,
id_details :: IdDetails, -- Stable, doesn't change
id_info :: IdInfo } -- Unstable, updated by simplifier
+ deriving Typeable
data IdScope -- See Note [GlobalId/LocalId]
= GlobalId
a > b = realUnique a ># realUnique b
a `compare` b = varUnique a `compare` varUnique b
-INSTANCE_TYPEABLE0(Var,varTc,"Var")
-
instance Data Var where
-- don't traverse?
toConstr _ = abstractConstr "Var"
--------------- Stack layout ----------------
slotEnv <- run $ liveSlotAnal g
+ let spEntryMap = getSpEntryMap entry_off g
mbpprTrace "live slot analysis results: " (ppr slotEnv) $ return ()
- let areaMap = layout procPoints slotEnv entry_off g
+ let areaMap = layout procPoints spEntryMap slotEnv entry_off g
mbpprTrace "areaMap" (ppr areaMap) $ return ()
------------ Manifest the stack pointer --------
- g <- run $ manifestSP areaMap entry_off g
+ g <- run $ manifestSP spEntryMap areaMap entry_off g
dump Opt_D_dump_cmmz "after manifestSP" g
-- UGH... manifestSP can require updates to the procPointMap.
-- We can probably do something quicker here for the update...
checkCond :: CmmExpr -> CmmLint ()
checkCond (CmmMachOp mop _) | isComparisonMachOp mop = return ()
+checkCond (CmmLit (CmmInt x t)) | x == 0 || x == 1, t == wordWidth = return () -- constant values
checkCond expr = cmmLintErr (hang (text "expression is not a conditional:") 2
(ppr expr))
| NAME '(' exprs0 ')' ';'
{% stmtMacro $1 $3 }
| 'switch' maybe_range expr '{' arms default '}'
- { doSwitch $2 $3 $5 $6 }
+ { do as <- sequence $5; doSwitch $2 $3 as $6 }
| 'goto' NAME ';'
{ do l <- lookupLabel $2; stmtEC (CmmBranch l) }
| 'jump' expr maybe_actuals ';'
{ do e1 <- $2; e2 <- sequence $3; stmtEC (CmmJump e1 e2) }
| 'return' maybe_actuals ';'
{ do e <- sequence $2; stmtEC (CmmReturn e) }
+ | 'if' bool_expr 'goto' NAME
+ { do l <- lookupLabel $4; cmmRawIf $2 l }
| 'if' bool_expr '{' body '}' else
{ cmmIfThenElse $2 $4 $6 }
: '[' INT '..' INT ']' { Just (fromIntegral $2, fromIntegral $4) }
| {- empty -} { Nothing }
-arms :: { [([Int],ExtCode)] }
+arms :: { [ExtFCode ([Int],Either BlockId ExtCode)] }
: {- empty -} { [] }
| arm arms { $1 : $2 }
-arm :: { ([Int],ExtCode) }
- : 'case' ints ':' '{' body '}' { ($2, $5) }
+arm :: { ExtFCode ([Int],Either BlockId ExtCode) }
+ : 'case' ints ':' arm_body { do b <- $4; return ($2, b) }
+
+arm_body :: { ExtFCode (Either BlockId ExtCode) }
+ : '{' body '}' { return (Right $2) }
+ | 'goto' NAME ';' { do l <- lookupLabel $2; return (Left l) }
ints :: { [Int] }
: INT { [ fromIntegral $1 ] }
-- 'default' branches
| {- empty -} { Nothing }
+-- Note: OldCmm doesn't support a first class 'else' statement, though
+-- CmmNode does.
else :: { ExtCode }
: {- empty -} { nopEC }
| 'else' '{' body '}' { $3 }
-- fall through to join
code (labelC join_id)
+cmmRawIf cond then_id = do
+ c <- cond
+ emitCond c then_id
+
-- 'emitCond cond true_id' emits code to test whether the cond is true,
-- branching to true_id if so, and falling through otherwise.
emitCond (BoolTest e) then_id = do
-- optional range on the switch (eg. switch [0..7] {...}), or by
-- the minimum/maximum values from the branches.
-doSwitch :: Maybe (Int,Int) -> ExtFCode CmmExpr -> [([Int],ExtCode)]
+doSwitch :: Maybe (Int,Int) -> ExtFCode CmmExpr -> [([Int],Either BlockId ExtCode)]
-> Maybe ExtCode -> ExtCode
doSwitch mb_range scrut arms deflt
= do
-- ToDo: check for out of range and jump to default if necessary
stmtEC (CmmSwitch expr entries)
where
- emitArm :: ([Int],ExtCode) -> ExtFCode [(Int,BlockId)]
- emitArm (ints,code) = do
+ emitArm :: ([Int],Either BlockId ExtCode) -> ExtFCode [(Int,BlockId)]
+ emitArm (ints,Left blockid) = return [ (i,blockid) | i <- ints ]
+ emitArm (ints,Right code) = do
blockid <- forkLabelledCodeEC code
return [ (i,blockid) | i <- ints ]
-
-- -----------------------------------------------------------------------------
-- Putting it all together
-- 4. build info tables for the procedures -- and update the info table for
-- the SRTs in the entry procedure as well.
-- Input invariant: A block should only be reachable from a single ProcPoint.
+-- ToDo: use the _ret naming convention that the old code generator
+-- used. -- EZY
splitAtProcPoints :: CLabel -> ProcPointSet-> ProcPointSet -> BlockEnv Status ->
CmmTop -> FuelUniqSM [CmmTop]
splitAtProcPoints entry_label callPPs procPoints procMap
module CmmStackLayout
( SlotEnv, liveSlotAnal, liveSlotTransfers, removeLiveSlotDefs
- , layout, manifestSP, igraph, areaBuilder
+ , getSpEntryMap, layout, manifestSP, igraph, areaBuilder
, stubSlotsOnDeath ) -- to help crash early during debugging
where
type Set x = Map x ()
data IGraphBuilder n =
Builder { foldNodes :: forall z. SubArea -> (n -> z -> z) -> z -> z
- , _wordsOccupied :: AreaMap -> AreaMap -> n -> [Int]
+ , _wordsOccupied :: AreaSizeMap -> AreaMap -> n -> [Int]
}
areaBuilder :: IGraphBuilder Area
-- what's the highest offset (in bytes) used in each Area?
-- We'll need to allocate that much space for each Area.
+-- Mapping of areas to area sizes (not offsets!)
+type AreaSizeMap = AreaMap
+
-- JD: WHY CAN'T THIS COME FROM THE slot-liveness info?
-getAreaSize :: ByteOff -> CmmGraph -> AreaMap
+getAreaSize :: ByteOff -> CmmGraph -> AreaSizeMap
-- The domain of the returned mapping consists only of Areas
- -- used for (a) variable spill slots, and (b) parameter passing ares for calls
+ -- used for (a) variable spill slots, and (b) parameter passing areas for calls
getAreaSize entry_off g =
foldGraphBlocks (foldBlockNodesF3 (first, add_regslots, last))
(Map.singleton (CallArea Old) entry_off) g
-- The 'max' is important. Two calls, to f and g, might share a common
-- continuation (and hence a common CallArea), but their number of overflow
-- parameters might differ.
+ -- EZY: Ought to use insert with combining function...
-- Find the Stack slots occupied by the subarea's conflicts
-conflictSlots :: Ord x => IGPair x -> AreaMap -> AreaMap -> SubArea -> Set Int
+conflictSlots :: Ord x => IGPair x -> AreaSizeMap -> AreaMap -> SubArea -> Set Int
conflictSlots (ig, Builder foldNodes wordsOccupied) areaSize areaMap subarea =
foldNodes subarea foldNode Map.empty
where foldNode n set = Map.foldRightWithKey conflict set $ Map.findWithDefault Map.empty n ig
liveInSlots areaMap n set = foldr setAdd set (wordsOccupied areaSize areaMap n)
setAdd w s = Map.insert w () s
--- Find any open space on the stack, starting from the offset.
--- If the area is a CallArea or a spill slot for a pointer, then it must
--- be word-aligned.
-freeSlotFrom :: Ord x => IGPair x -> AreaMap -> Int -> AreaMap -> Area -> Int
+-- Find any open space for 'area' on the stack, starting from the
+-- 'offset'. If the area is a CallArea or a spill slot for a pointer,
+-- then it must be word-aligned.
+freeSlotFrom :: Ord x => IGPair x -> AreaSizeMap -> Int -> AreaMap -> Area -> Int
freeSlotFrom ig areaSize offset areaMap area =
let size = Map.lookup area areaSize `orElse` 0
conflicts = conflictSlots ig areaSize areaMap (area, size, size)
in findSpace (align (offset + size)) size
-- Find an open space on the stack, and assign it to the area.
-allocSlotFrom :: Ord x => IGPair x -> AreaMap -> Int -> AreaMap -> Area -> AreaMap
+allocSlotFrom :: Ord x => IGPair x -> AreaSizeMap -> Int -> AreaMap -> Area -> AreaMap
allocSlotFrom ig areaSize from areaMap area =
if Map.member area areaMap then areaMap
else Map.insert area (freeSlotFrom ig areaSize from areaMap area) areaMap
+-- Figure out all of the offsets from the slot location; this will be
+-- non-zero for procpoints.
+type SpEntryMap = BlockEnv Int
+getSpEntryMap :: Int -> CmmGraph -> SpEntryMap
+getSpEntryMap entry_off g@(CmmGraph {g_entry = entry})
+ = foldGraphBlocks add_sp_off (mapInsert entry entry_off emptyBlockMap) g
+ where add_sp_off :: CmmBlock -> BlockEnv Int -> BlockEnv Int
+ add_sp_off b env =
+ case lastNode b of
+ CmmCall {cml_cont=Just succ, cml_ret_args=off} -> mapInsert succ off env
+ CmmForeignCall {succ=succ} -> mapInsert succ wORD_SIZE env
+ _ -> env
+
-- | Greedy stack layout.
-- Compute liveness, build the interference graph, and allocate slots for the areas.
-- We visit each basic block in a (generally) forward order.
-- Note: The stack pointer only has to be younger than the youngest live stack slot
-- at proc points. Otherwise, the stack pointer can point anywhere.
-layout :: ProcPointSet -> SlotEnv -> ByteOff -> CmmGraph -> AreaMap
+layout :: ProcPointSet -> SpEntryMap -> SlotEnv -> ByteOff -> CmmGraph -> AreaMap
-- The domain of the returned map includes an Area for EVERY block
-- including each block that is not the successor of a call (ie is not a proc-point)
--- That's how we return the info of what the SP should be at the entry of every block
+-- That's how we return the info of what the SP should be at the entry of every non
+-- procpoint block. However, note that procpoint blocks have their
+-- /slot/ stored, which is not necessarily the value of the SP on entry
+-- to the block (in fact, it probably isn't, due to argument passing).
+-- See [Procpoint Sp offset]
-layout procPoints env entry_off g =
+layout procPoints spEntryMap env entry_off g =
let ig = (igraph areaBuilder env g, areaBuilder)
env' bid = mapLookup bid env `orElse` panic "unknown blockId in igraph"
areaSize = getAreaSize entry_off g
allocMid m areaMap = foldSlotsDefd alloc' (foldSlotsUsed alloc' areaMap m) m
allocLast bid l areaMap =
foldr (setSuccSPs inSp) areaMap' (successors l)
- where inSp = expectJust "sp in" $ Map.lookup (CallArea (Young bid)) areaMap
+ where inSp = slot + spOffset -- [Procpoint Sp offset]
+ -- If it's not in the map, we should use our previous
+ -- calculation unchanged.
+ spOffset = mapLookup bid spEntryMap `orElse` 0
+ slot = expectJust "slot in" $ Map.lookup (CallArea (Young bid)) areaMap
areaMap' = foldSlotsDefd alloc' (foldSlotsUsed alloc' areaMap l) l
alloc' areaMap (a@(RegSlot _), _, _) = allocVarSlot areaMap a
alloc' areaMap _ = areaMap
- initMap = Map.insert (CallArea (Young (g_entry g))) 0 $
- Map.insert (CallArea Old) 0 Map.empty
-
+ initMap = Map.insert (CallArea (Young (g_entry g))) 0
+ . Map.insert (CallArea Old) 0
+ $ Map.empty
+
areaMap = foldl layoutAreas initMap (postorderDfs g)
in -- pprTrace "ProcPoints" (ppr procPoints) $
- -- pprTrace "Area SizeMap" (ppr areaSize) $
- -- pprTrace "Entry SP" (ppr entrySp) $
- -- pprTrace "Area Map" (ppr areaMap) $
+ -- pprTrace "Area SizeMap" (ppr areaSize) $
+ -- pprTrace "Entry offset" (ppr entry_off) $
+ -- pprTrace "Area Map" (ppr areaMap) $
areaMap
+{- Note [Procpoint Sp offset]
+
+The calculation of inSp is a little tricky. (Un)fortunately, if you get
+it wrong, you will get inefficient but correct code. You know you've
+got it wrong if the generated stack pointer bounces up and down for no
+good reason.
+
+Why can't we just set inSp to the location of the slot? (This is what
+the code used to do.) The trouble is when we actually hit the proc
+point the start of the slot will not be the same as the actual Sp due
+to argument passing:
+
+ a:
+ I32[(young<b> + 4)] = cde;
+ // Stack pointer is moved to young end (bottom) of young<b> for call
+ // +-------+
+ // | arg 1 |
+ // +-------+ <- Sp
+ call (I32[foobar::I32])(...) returns to Just b (4) (4) with update frame 4;
+ b:
+ // After call, stack pointer is above the old end (top) of
+ // young<b> (the difference is spOffset)
+ // +-------+ <- Sp
+ // | arg 1 |
+ // +-------+
+
+If we blithely set the Sp to be the same as the slot (the young end of
+young<b>), an adjustment will be necessary when we go to the next block.
+This is wasteful. So, instead, for the next block after a procpoint,
+the actual Sp should be set to the same as the true Sp when we just
+entered the procpoint. Then manifestSP will automatically do the right
+thing.
+
+Questions you may ask:
+
+1. Why don't we need to change the mapping for the procpoint itself?
+ Because manifestSP does its own calculation of the true stack value,
+ manifestSP will notice the discrepancy between the actual stack
+ pointer and the slot start, and adjust all of its memory accesses
+ accordingly. So the only problem is when we adjust the Sp in
+ preparation for the successor block; that's why this code is here and
+ not in setSuccSPs.
+
+2. Why don't we make the procpoint call area and the true offset match
+ up? If we did that, we would never use memory above the true value
+ of the stack pointer, thus wasting all of the stack we used to store
+ arguments. You might think that some clever changes to the slot
+ offsets, using negative offsets, might fix it, but this does not make
+ semantic sense.
+
+3. If manifestSP is already calculating the true stack value, why we can't
+ do this trick inside manifestSP itself? The reason is that if two
+ branches join with inconsistent SPs, one of them has to be fixed: we
+ can't know what the fix should be without already knowing what the
+ chosen location of SP is on the next successor. (This is
+ the "succ already knows incoming SP" case), This calculation cannot
+ be easily done in manifestSP, since it processes the nodes
+ /backwards/. So we need to have figured this out before we hit
+ manifestSP.
+-}
+
-- After determining the stack layout, we can:
-- 1. Replace references to stack Areas with addresses relative to the stack
-- pointer.
-- stack pointer to be younger than the live values on the stack at proc points.
-- 3. Compute the maximum stack offset used in the procedure and replace
-- the stack high-water mark with that offset.
-manifestSP :: AreaMap -> ByteOff -> CmmGraph -> FuelUniqSM CmmGraph
-manifestSP areaMap entry_off g@(CmmGraph {g_entry=entry}) =
+manifestSP :: SpEntryMap -> AreaMap -> ByteOff -> CmmGraph -> FuelUniqSM CmmGraph
+manifestSP spEntryMap areaMap entry_off g@(CmmGraph {g_entry=entry}) =
ofBlockMap entry `liftM` foldl replB (return mapEmpty) (postorderDfs g)
where slot a = -- pprTrace "slot" (ppr a) $
Map.lookup a areaMap `orElse` panic "unallocated Area"
sp_high = maxSlot slot g
proc_entry_sp = slot (CallArea Old) + entry_off
- add_sp_off :: CmmBlock -> BlockEnv Int -> BlockEnv Int
- add_sp_off b env =
- case lastNode b of
- CmmCall {cml_cont=Just succ, cml_ret_args=off} -> mapInsert succ off env
- CmmForeignCall {succ=succ} -> mapInsert succ wORD_SIZE env
- _ -> env
- spEntryMap = foldGraphBlocks add_sp_off (mapInsert entry entry_off emptyBlockMap) g
spOffset id = mapLookup id spEntryMap `orElse` 0
sp_on_entry id | id == entry = proc_entry_sp
where spIn = sp_on_entry (entryLabel block)
middle spOff m = mapExpDeep (replSlot spOff) m
+ -- XXX there shouldn't be any global registers in the
+ -- CmmCall, so there shouldn't be any slots in
+ -- CmmCall... check that...
last spOff l = mapExpDeep (replSlot spOff) l
replSlot spOff (CmmStackSlot a i) = CmmRegOff (CmmGlobal Sp) (spOff - (slot a + i))
replSlot _ (CmmLit CmmHighStackMark) = -- replacing the high water mark
CmmLit (CmmInt (toInteger (max 0 (sp_high - proc_entry_sp))) (typeWidth bWord))
+ -- Invariant: Sp is always greater than SpLim. Thus, if
+ -- the high water mark is zero, we can optimize away the
+ -- conditional branch. Relies on dead code elimination
+ -- to get rid of the dead GC blocks.
+ -- EZY: Maybe turn this into a guard that checks if a
+ -- statement is stack-check ish? Maybe we should make
+ -- an actual mach-op for it, so there's no chance of
+ -- mixing this up with something else...
+ replSlot _ (CmmMachOp (MO_U_Lt _)
+ [CmmMachOp (MO_Sub _)
+ [ CmmReg (CmmGlobal Sp)
+ , CmmLit (CmmInt 0 _)],
+ CmmReg (CmmGlobal SpLim)]) = CmmLit (CmmInt 0 wordWidth)
replSlot _ e = e
replLast :: MaybeC C (CmmNode C O) -> [CmmNode O O] -> CmmNode O C -> FuelUniqSM [CmmBlock]
This will fix the spill before stack check problem but only really as a side\r
effect. A 'real fix' probably requires making the spiller know about sp checks.\r
\r
- - There is some silly stuff happening with the Sp. We end up with code like:\r
- Sp = Sp + 8; R1 = _vwf::I64; Sp = Sp -8\r
- Seems to be perhaps caused by the issue above but also maybe a optimisation\r
- pass needed?\r
+ EZY: I don't understand this comment. David Terei, can you clarify?\r
\r
- - Proc pass all arguments on the stack, adding more code and slowing down things\r
- a lot. We either need to fix this or even better would be to get rid of\r
- proc points.\r
+ - Proc points pass all arguments on the stack, adding more code and\r
+ slowing down things a lot. We either need to fix this or even better\r
+ would be to get rid of proc points.\r
\r
- CmmInfo.cmmToRawCmm uses Old.Cmm, so it is called after converting Cmm.Cmm to\r
Old.Cmm. We should abstract it to work on both representations, it needs only to\r
we could convert codeGen/StgCmm* clients to the Hoopl's semantics?\r
It's all deeply unsatisfactory.\r
\r
- - Improve preformance of Hoopl.\r
+ - Improve performance of Hoopl.\r
\r
A nofib comparison of -fasm vs -fnewcodegen nofib compilation parameters\r
(using the same ghc-cmm branch +libraries compiled by the old codegenerator)\r
\r
So we generate a bit better code, but it takes us longer!\r
\r
+ EZY: Also importantly, Hoopl uses dramatically more memory than the\r
+ old code generator.\r
+\r
- Are all blockToNodeList and blockOfNodeList really needed? Maybe we could\r
splice blocks instead?\r
\r
a block catenation function would be probably nicer than blockToNodeList\r
/ blockOfNodeList combo.\r
\r
- - loweSafeForeignCall seems too lowlevel. Just use Dataflow. After that\r
+ - lowerSafeForeignCall seems too lowlevel. Just use Dataflow. After that\r
delete splitEntrySeq from HooplUtils.\r
\r
- manifestSP seems to touch a lot of the graph representation. It is\r
calling convention, and the code for calling foreign calls is generated\r
\r
- AsmCodeGen has a generic Cmm optimiser; move this into new pipeline\r
+ EZY (2011-04-16): The mini-inliner has been generalized and ported,\r
+ but the constant folding and other optimizations need to still be\r
+ ported.\r
\r
- AsmCodeGen has post-native-cg branch eliminator (shortCutBranches);\r
we ultimately want to share this with the Cmm branch eliminator.\r
- See "CAFs" below; we want to totally refactor the way SRTs are calculated\r
\r
- Pull out Areas into its own module\r
- Parameterise AreaMap\r
+ Parameterise AreaMap (note there are type synonyms in CmmStackLayout!)\r
Add ByteWidth = Int\r
type SubArea = (Area, ByteOff, ByteWidth) \r
ByteOff should not be defined in SMRep -- that is too high up the hierarchy\r
insert spills/reloads across \r
LastCalls, and \r
Branches to proc-points\r
- Now sink those reloads:\r
- - CmmSpillReload.insertLateReloads\r
+ Now sink those reloads (and other instructions):\r
+ - CmmSpillReload.rewriteAssignments\r
- CmmSpillReload.removeDeadAssignmentsAndReloads\r
\r
* CmmStackLayout.stubSlotsOnDeath\r
never pass variables to join points via arguments.)\r
\r
Furthermore, there is *no way* to pass q to J in a register (other\r
-than a paramter register).\r
+than a parameter register).\r
\r
What we want is to do register allocation across the whole caboodle.\r
Then we could drop all the code that deals with the above awkward\r
@echo 'cLeadingUnderscore = "$(LeadingUnderscore)"' >> $@
@echo 'cRAWCPP_FLAGS :: String' >> $@
@echo 'cRAWCPP_FLAGS = "$(RAWCPP_FLAGS)"' >> $@
- @echo 'cGCC :: String' >> $@
- @echo 'cGCC = "$(WhatGccIsCalled)"' >> $@
@echo 'cMKDLL :: String' >> $@
@echo 'cMKDLL = "$(BLD_DLL)"' >> $@
@echo 'cLdIsGNULd :: String' >> $@
@echo 'cGHC_SYSMAN_PGM = "$(GHC_SYSMAN)"' >> $@
@echo 'cGHC_SYSMAN_DIR :: String' >> $@
@echo 'cGHC_SYSMAN_DIR = "$(GHC_SYSMAN_DIR)"' >> $@
- @echo 'cGHC_PERL :: String' >> $@
- @echo 'cGHC_PERL = "$(GHC_PERL)"' >> $@
@echo 'cDEFAULT_TMPDIR :: String' >> $@
@echo 'cDEFAULT_TMPDIR = "$(DEFAULT_TMPDIR)"' >> $@
@echo 'cRelocatableBuild :: Bool' >> $@
% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
%
-
-
\begin{code}
-{-# OPTIONS -fno-warn-incomplete-patterns #-}
--- The above warning supression flag is a temporary kludge.
--- While working on this module you are encouraged to remove it and fix
--- any warnings in the module. See
--- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#Warnings
--- for details
{-# LANGUAGE DeriveDataTypeable #-}
-- | Abstract syntax of global declarations.
(ppr new_or_data <+>
(if isJust typats then ptext (sLit "instance") else empty) <+>
pp_decl_head (unLoc context) ltycon tyvars typats <+>
- ppr_sig mb_sig)
+ ppr_sigx mb_sig)
(pp_condecls condecls)
derivings
where
- ppr_sig Nothing = empty
- ppr_sig (Just kind) = dcolon <+> pprKind kind
+ ppr_sigx Nothing = empty
+ ppr_sigx (Just kind) = dcolon <+> pprKind kind
ppr (ClassDecl {tcdCtxt = context, tcdLName = lclas, tcdTyVars = tyvars,
- tcdFDs = fds,
+ tcdFDs = fds,
tcdSigs = sigs, tcdMeths = methods, tcdATs = ats})
| null sigs && null ats -- No "where" part
= top_matter
ppr = pprConDecl
pprConDecl :: OutputableBndr name => ConDecl name -> SDoc
-pprConDecl (ConDecl { con_name =con, con_explicit = expl, con_qvars = tvs
+pprConDecl (ConDecl { con_name = con, con_explicit = expl, con_qvars = tvs
, con_cxt = cxt, con_details = details
, con_res = ResTyH98, con_doc = doc })
- = sep [ppr_mbDoc doc, pprHsForAll expl tvs cxt, ppr_details con details]
+ = sep [ppr_mbDoc doc, pprHsForAll expl tvs cxt, ppr_details details]
where
- ppr_details con (InfixCon t1 t2) = hsep [ppr t1, pprHsInfix con, ppr t2]
- ppr_details con (PrefixCon tys) = hsep (pprHsVar con : map ppr tys)
- ppr_details con (RecCon fields) = ppr con <+> pprConDeclFields fields
+ ppr_details (InfixCon t1 t2) = hsep [ppr t1, pprHsInfix con, ppr t2]
+ ppr_details (PrefixCon tys) = hsep (pprHsVar con : map ppr tys)
+ ppr_details (RecCon fields) = ppr con <+> pprConDeclFields fields
pprConDecl (ConDecl { con_name = con, con_explicit = expl, con_qvars = tvs
, con_cxt = cxt, con_details = PrefixCon arg_tys
%************************************************************************
%* *
-\subsection[InstDecl]{An instance declaration
+\subsection[InstDecl]{An instance declaration}
%* *
%************************************************************************
%************************************************************************
%* *
-\subsection[DerivDecl]{A stand-alone instance deriving declaration
+\subsection[DerivDecl]{A stand-alone instance deriving declaration}
%* *
%************************************************************************
HsImpExp: Abstract syntax: imports, exports, interfaces
\begin{code}
-{-# OPTIONS -fno-warn-incomplete-patterns #-}
--- The above warning supression flag is a temporary kludge.
--- While working on this module you are encouraged to remove it and fix
--- any warnings in the module. See
--- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#Warnings
--- for details
{-# LANGUAGE DeriveDataTypeable #-}
module HsImpExp where
ieName (IEThingAbs n) = n
ieName (IEThingWith n _) = n
ieName (IEThingAll n) = n
+ieName _ = panic "ieName failed pattern match!"
ieNames :: IE a -> [a]
ieNames (IEVar n ) = [n]
ppr (IEThingAll thing) = hcat [ppr thing, text "(..)"]
ppr (IEThingWith thing withs)
= ppr thing <> parens (fsep (punctuate comma (map pprHsVar withs)))
- ppr (IEModuleContents mod)
- = ptext (sLit "module") <+> ppr mod
+ ppr (IEModuleContents mod')
+ = ptext (sLit "module") <+> ppr mod'
ppr (IEGroup n _) = text ("<IEGroup: " ++ (show n) ++ ">")
ppr (IEDoc doc) = ppr doc
ppr (IEDocNamed string) = text ("<IEDocNamed: " ++ string ++ ">")
finsts_mod = mi_finsts iface
hash_env = mi_hash_fn iface
mod_hash = mi_mod_hash iface
- export_hash | depend_on_exports mod = Just (mi_exp_hash iface)
- | otherwise = Nothing
+ export_hash | depend_on_exports = Just (mi_exp_hash iface)
+ | otherwise = Nothing
used_occs = lookupModuleEnv ent_map mod `orElse` []
Nothing -> pprPanic "mkUsage" (ppr mod <+> ppr occ <+> ppr used_names)
Just r -> r
- depend_on_exports mod =
- case lookupModuleEnv direct_imports mod of
- Just _ -> True
- -- Even if we used 'import M ()', we have to register a
- -- usage on the export list because we are sensitive to
- -- changes in orphan instances/rules.
- Nothing -> False
- -- In GHC 6.8.x the above line read "True", and in
- -- fact it recorded a dependency on *all* the
- -- modules underneath in the dependency tree. This
- -- happens to make orphans work right, but is too
- -- expensive: it'll read too many interface files.
- -- The 'isNothing maybe_iface' check above saved us
- -- from generating many of these usages (at least in
- -- one-shot mode), but that's even more bogus!
+ depend_on_exports = is_direct_import
+ {- True
+ Even if we used 'import M ()', we have to register a
+ usage on the export list because we are sensitive to
+ changes in orphan instances/rules.
+ False
+ In GHC 6.8.x we always returned true, and in
+ fact it recorded a dependency on *all* the
+ modules underneath in the dependency tree. This
+ happens to make orphans work right, but is too
+ expensive: it'll read too many interface files.
+ The 'isNothing maybe_iface' check above saved us
+ from generating many of these usages (at least in
+ one-shot mode), but that's even more bogus!
+ -}
\end{code}
\begin{code}
#include "HsVersions.h"
import qualified GHC
--- import GHC ( ModSummary(..), GhcMonad )
import GhcMonad
import HsSyn ( ImportDecl(..) )
import DynFlags
import Exception
import ErrUtils
--- import MonadUtils ( liftIO )
import System.Directory
import System.FilePath
src_opts <- io $ getOptionsFromFile dflags0 output_fn
(dflags2, unhandled_flags, warns)
<- io $ parseDynamicNoPackageFlags dflags0 src_opts
+ io $ checkProcessArgsResult unhandled_flags
unless (dopt Opt_Pp dflags2) $ io $ handleFlagWarnings dflags2 warns
-- the HsPp pass below will emit warnings
- io $ checkProcessArgsResult unhandled_flags
setDynFlags dflags2
(dflags1, unhandled_flags, warns)
<- io $ parseDynamicNoPackageFlags dflags src_opts
setDynFlags dflags1
- io $ handleFlagWarnings dflags1 warns
io $ checkProcessArgsResult unhandled_flags
+ io $ handleFlagWarnings dflags1 warns
return (Hsc sf, output_fn)
(cmdline_include_paths ++ pkg_include_dirs)
let md_c_flags = machdepCCOpts dflags
- gcc_extra_viac_flags <- io $ getExtraViaCOpts dflags
+ let gcc_extra_viac_flags = extraGccViaCFlags dflags
let pic_c_flags = picCCOpts dflags
let verbFlags = getVerbFlags dflags
-{-# OPTIONS_GHC -w #-}
--- Temporary, until rtsIsProfiled is fixed
-
-- |
-- Dynamic flags
--
DPHBackend(..), dphPackageMaybe,
wayNames,
+ Settings(..),
+ ghcUsagePath, ghciUsagePath, topDir, tmpDir, rawSettings,
+ extraGccViaCFlags, systemPackageConfig,
+ pgm_L, pgm_P, pgm_F, pgm_c, pgm_s, pgm_a, pgm_l, pgm_dll, pgm_T,
+ pgm_sysman, pgm_windres, pgm_lo, pgm_lc,
+ opt_L, opt_P, opt_F, opt_c, opt_m, opt_a, opt_l,
+ opt_windres, opt_lo, opt_lc,
+
+
-- ** Manipulating DynFlags
- defaultDynFlags, -- DynFlags
+ defaultDynFlags, -- Settings -> DynFlags
initDynFlags, -- DynFlags -> IO DynFlags
getOpts, -- DynFlags -> (DynFlags -> [a]) -> [a]
getStgToDo,
-- * Compiler configuration suitable for display to the user
- Printable(..),
compilerInfo
#ifdef GHCI
-- Only in stage 2 can we be sure that the RTS
import SrcLoc
import FastString
import Outputable
+#ifdef GHCI
import Foreign.C ( CInt )
+#endif
import {-# SOURCE #-} ErrUtils ( Severity(..), Message, mkLocMessage )
+#ifdef GHCI
import System.IO.Unsafe ( unsafePerformIO )
+#endif
import Data.IORef
import Control.Monad ( when )
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
-import Data.Maybe
+-- import Data.Maybe
import System.FilePath
import System.IO ( stderr, hPutChar )
libraryPaths :: [String],
frameworkPaths :: [String], -- used on darwin only
cmdlineFrameworks :: [String], -- ditto
- tmpDir :: String, -- no trailing '/'
- ghcUsagePath :: FilePath, -- Filled in by SysTools
- ghciUsagePath :: FilePath, -- ditto
rtsOpts :: Maybe String,
rtsOptsEnabled :: RtsOptsEnabled,
hpcDir :: String, -- ^ Path to store the .mix files
- -- options for particular phases
- opt_L :: [String],
- opt_P :: [String],
- opt_F :: [String],
- opt_c :: [String],
- opt_m :: [String],
- opt_a :: [String],
- opt_l :: [String],
- opt_windres :: [String],
- opt_lo :: [String], -- LLVM: llvm optimiser
- opt_lc :: [String], -- LLVM: llc static compiler
-
- -- commands for particular phases
- pgm_L :: String,
- pgm_P :: (String,[Option]),
- pgm_F :: String,
- pgm_c :: (String,[Option]),
- pgm_s :: (String,[Option]),
- pgm_a :: (String,[Option]),
- pgm_l :: (String,[Option]),
- pgm_dll :: (String,[Option]),
- pgm_T :: String,
- pgm_sysman :: String,
- pgm_windres :: String,
- pgm_lo :: (String,[Option]), -- LLVM: opt llvm optimiser
- pgm_lc :: (String,[Option]), -- LLVM: llc static compiler
+ settings :: Settings,
-- For ghc -M
depMakefile :: FilePath,
-- Package flags
extraPkgConfs :: [FilePath],
- topDir :: FilePath, -- filled in by SysTools
- systemPackageConfig :: FilePath, -- ditto
-- ^ The @-package-conf@ flags given on the command line, in the order
-- they appeared.
haddockOptions :: Maybe String
}
+data Settings = Settings {
+ sGhcUsagePath :: FilePath, -- Filled in by SysTools
+ sGhciUsagePath :: FilePath, -- ditto
+ sTopDir :: FilePath,
+ sTmpDir :: String, -- no trailing '/'
+ -- You shouldn't need to look things up in rawSettings directly.
+ -- They should have their own fields instead.
+ sRawSettings :: [(String, String)],
+ sExtraGccViaCFlags :: [String],
+ sSystemPackageConfig :: FilePath,
+ -- commands for particular phases
+ sPgm_L :: String,
+ sPgm_P :: (String,[Option]),
+ sPgm_F :: String,
+ sPgm_c :: (String,[Option]),
+ sPgm_s :: (String,[Option]),
+ sPgm_a :: (String,[Option]),
+ sPgm_l :: (String,[Option]),
+ sPgm_dll :: (String,[Option]),
+ sPgm_T :: String,
+ sPgm_sysman :: String,
+ sPgm_windres :: String,
+ sPgm_lo :: (String,[Option]), -- LLVM: opt llvm optimiser
+ sPgm_lc :: (String,[Option]), -- LLVM: llc static compiler
+ -- options for particular phases
+ sOpt_L :: [String],
+ sOpt_P :: [String],
+ sOpt_F :: [String],
+ sOpt_c :: [String],
+ sOpt_m :: [String],
+ sOpt_a :: [String],
+ sOpt_l :: [String],
+ sOpt_windres :: [String],
+ sOpt_lo :: [String], -- LLVM: llvm optimiser
+ sOpt_lc :: [String] -- LLVM: llc static compiler
+
+ }
+
+ghcUsagePath :: DynFlags -> FilePath
+ghcUsagePath dflags = sGhcUsagePath (settings dflags)
+ghciUsagePath :: DynFlags -> FilePath
+ghciUsagePath dflags = sGhciUsagePath (settings dflags)
+topDir :: DynFlags -> FilePath
+topDir dflags = sTopDir (settings dflags)
+tmpDir :: DynFlags -> String
+tmpDir dflags = sTmpDir (settings dflags)
+rawSettings :: DynFlags -> [(String, String)]
+rawSettings dflags = sRawSettings (settings dflags)
+extraGccViaCFlags :: DynFlags -> [String]
+extraGccViaCFlags dflags = sExtraGccViaCFlags (settings dflags)
+systemPackageConfig :: DynFlags -> FilePath
+systemPackageConfig dflags = sSystemPackageConfig (settings dflags)
+pgm_L :: DynFlags -> String
+pgm_L dflags = sPgm_L (settings dflags)
+pgm_P :: DynFlags -> (String,[Option])
+pgm_P dflags = sPgm_P (settings dflags)
+pgm_F :: DynFlags -> String
+pgm_F dflags = sPgm_F (settings dflags)
+pgm_c :: DynFlags -> (String,[Option])
+pgm_c dflags = sPgm_c (settings dflags)
+pgm_s :: DynFlags -> (String,[Option])
+pgm_s dflags = sPgm_s (settings dflags)
+pgm_a :: DynFlags -> (String,[Option])
+pgm_a dflags = sPgm_a (settings dflags)
+pgm_l :: DynFlags -> (String,[Option])
+pgm_l dflags = sPgm_l (settings dflags)
+pgm_dll :: DynFlags -> (String,[Option])
+pgm_dll dflags = sPgm_dll (settings dflags)
+pgm_T :: DynFlags -> String
+pgm_T dflags = sPgm_T (settings dflags)
+pgm_sysman :: DynFlags -> String
+pgm_sysman dflags = sPgm_sysman (settings dflags)
+pgm_windres :: DynFlags -> String
+pgm_windres dflags = sPgm_windres (settings dflags)
+pgm_lo :: DynFlags -> (String,[Option])
+pgm_lo dflags = sPgm_lo (settings dflags)
+pgm_lc :: DynFlags -> (String,[Option])
+pgm_lc dflags = sPgm_lc (settings dflags)
+opt_L :: DynFlags -> [String]
+opt_L dflags = sOpt_L (settings dflags)
+opt_P :: DynFlags -> [String]
+opt_P dflags = sOpt_P (settings dflags)
+opt_F :: DynFlags -> [String]
+opt_F dflags = sOpt_F (settings dflags)
+opt_c :: DynFlags -> [String]
+opt_c dflags = sOpt_c (settings dflags)
+opt_m :: DynFlags -> [String]
+opt_m dflags = sOpt_m (settings dflags)
+opt_a :: DynFlags -> [String]
+opt_a dflags = sOpt_a (settings dflags)
+opt_l :: DynFlags -> [String]
+opt_l dflags = sOpt_l (settings dflags)
+opt_windres :: DynFlags -> [String]
+opt_windres dflags = sOpt_windres (settings dflags)
+opt_lo :: DynFlags -> [String]
+opt_lo dflags = sOpt_lo (settings dflags)
+opt_lc :: DynFlags -> [String]
+opt_lc dflags = sOpt_lc (settings dflags)
+
wayNames :: DynFlags -> [WayName]
wayNames = map wayName . ways
-- | The normal 'DynFlags'. Note that they is not suitable for use in this form
-- and must be fully initialized by 'GHC.newSession' first.
-defaultDynFlags :: DynFlags
-defaultDynFlags =
+defaultDynFlags :: Settings -> DynFlags
+defaultDynFlags mySettings =
DynFlags {
ghcMode = CompManager,
ghcLink = LinkBinary,
libraryPaths = [],
frameworkPaths = [],
cmdlineFrameworks = [],
- tmpDir = cDEFAULT_TMPDIR,
rtsOpts = Nothing,
rtsOptsEnabled = RtsOptsSafeOnly,
hpcDir = ".hpc",
- opt_L = [],
- opt_P = (if opt_PIC
- then ["-D__PIC__", "-U __PIC__"] -- this list is reversed
- else []),
- opt_F = [],
- opt_c = [],
- opt_a = [],
- opt_m = [],
- opt_l = [],
- opt_windres = [],
- opt_lo = [],
- opt_lc = [],
-
extraPkgConfs = [],
packageFlags = [],
pkgDatabase = Nothing,
buildTag = panic "defaultDynFlags: No buildTag",
rtsBuildTag = panic "defaultDynFlags: No rtsBuildTag",
splitInfo = Nothing,
- -- initSysTools fills all these in
- ghcUsagePath = panic "defaultDynFlags: No ghciUsagePath",
- ghciUsagePath = panic "defaultDynFlags: No ghciUsagePath",
- topDir = panic "defaultDynFlags: No topDir",
- systemPackageConfig = panic "no systemPackageConfig: call GHC.setSessionDynFlags",
- pgm_L = panic "defaultDynFlags: No pgm_L",
- pgm_P = panic "defaultDynFlags: No pgm_P",
- pgm_F = panic "defaultDynFlags: No pgm_F",
- pgm_c = panic "defaultDynFlags: No pgm_c",
- pgm_s = panic "defaultDynFlags: No pgm_s",
- pgm_a = panic "defaultDynFlags: No pgm_a",
- pgm_l = panic "defaultDynFlags: No pgm_l",
- pgm_dll = panic "defaultDynFlags: No pgm_dll",
- pgm_T = panic "defaultDynFlags: No pgm_T",
- pgm_sysman = panic "defaultDynFlags: No pgm_sysman",
- pgm_windres = panic "defaultDynFlags: No pgm_windres",
- pgm_lo = panic "defaultDynFlags: No pgm_lo",
- pgm_lc = panic "defaultDynFlags: No pgm_lc",
- -- end of initSysTools values
+ settings = mySettings,
-- ghc -M values
depMakefile = "Makefile",
depIncludePkgDeps = False,
-- XXX HACK: Prelude> words "'does not' work" ===> ["'does","not'","work"]
-- Config.hs should really use Option.
-setPgmP f d = let (pgm:args) = words f in d{ pgm_P = (pgm, map Option args)}
-addOptl f d = d{ opt_l = f : opt_l d}
-addOptP f d = d{ opt_P = f : opt_P d}
+setPgmP f = let (pgm:args) = words f in alterSettings (\s -> s { sPgm_P = (pgm, map Option args)})
+addOptl f = alterSettings (\s -> s { sOpt_l = f : sOpt_l s})
+addOptP f = alterSettings (\s -> s { sOpt_P = f : sOpt_P s})
setDepMakefile :: FilePath -> DynFlags -> DynFlags
------- Specific phases --------------------------------------------
-- need to appear before -pgmL to be parsed as LLVM flags.
- , Flag "pgmlo" (hasArg (\f d -> d{ pgm_lo = (f,[])}))
- , Flag "pgmlc" (hasArg (\f d -> d{ pgm_lc = (f,[])}))
- , Flag "pgmL" (hasArg (\f d -> d{ pgm_L = f}))
+ , Flag "pgmlo" (hasArg (\f -> alterSettings (\s -> s { sPgm_lo = (f,[])})))
+ , Flag "pgmlc" (hasArg (\f -> alterSettings (\s -> s { sPgm_lc = (f,[])})))
+ , Flag "pgmL" (hasArg (\f -> alterSettings (\s -> s { sPgm_L = f})))
, Flag "pgmP" (hasArg setPgmP)
- , Flag "pgmF" (hasArg (\f d -> d{ pgm_F = f}))
- , Flag "pgmc" (hasArg (\f d -> d{ pgm_c = (f,[])}))
+ , Flag "pgmF" (hasArg (\f -> alterSettings (\s -> s { sPgm_F = f})))
+ , Flag "pgmc" (hasArg (\f -> alterSettings (\s -> s { sPgm_c = (f,[])})))
, Flag "pgmm" (HasArg (\_ -> addWarn "The -keep-raw-s-files flag does nothing; it will be removed in a future GHC release"))
- , Flag "pgms" (hasArg (\f d -> d{ pgm_s = (f,[])}))
- , Flag "pgma" (hasArg (\f d -> d{ pgm_a = (f,[])}))
- , Flag "pgml" (hasArg (\f d -> d{ pgm_l = (f,[])}))
- , Flag "pgmdll" (hasArg (\f d -> d{ pgm_dll = (f,[])}))
- , Flag "pgmwindres" (hasArg (\f d -> d{ pgm_windres = f}))
+ , Flag "pgms" (hasArg (\f -> alterSettings (\s -> s { sPgm_s = (f,[])})))
+ , Flag "pgma" (hasArg (\f -> alterSettings (\s -> s { sPgm_a = (f,[])})))
+ , Flag "pgml" (hasArg (\f -> alterSettings (\s -> s { sPgm_l = (f,[])})))
+ , Flag "pgmdll" (hasArg (\f -> alterSettings (\s -> s { sPgm_dll = (f,[])})))
+ , Flag "pgmwindres" (hasArg (\f -> alterSettings (\s -> s { sPgm_windres = f})))
-- need to appear before -optl/-opta to be parsed as LLVM flags.
- , Flag "optlo" (hasArg (\f d -> d{ opt_lo = f : opt_lo d}))
- , Flag "optlc" (hasArg (\f d -> d{ opt_lc = f : opt_lc d}))
- , Flag "optL" (hasArg (\f d -> d{ opt_L = f : opt_L d}))
+ , Flag "optlo" (hasArg (\f -> alterSettings (\s -> s { sOpt_lo = f : sOpt_lo s})))
+ , Flag "optlc" (hasArg (\f -> alterSettings (\s -> s { sOpt_lc = f : sOpt_lc s})))
+ , Flag "optL" (hasArg (\f -> alterSettings (\s -> s { sOpt_L = f : sOpt_L s})))
, Flag "optP" (hasArg addOptP)
- , Flag "optF" (hasArg (\f d -> d{ opt_F = f : opt_F d}))
- , Flag "optc" (hasArg (\f d -> d{ opt_c = f : opt_c d}))
- , Flag "optm" (hasArg (\f d -> d{ opt_m = f : opt_m d}))
- , Flag "opta" (hasArg (\f d -> d{ opt_a = f : opt_a d}))
+ , Flag "optF" (hasArg (\f -> alterSettings (\s -> s { sOpt_F = f : sOpt_F s})))
+ , Flag "optc" (hasArg (\f -> alterSettings (\s -> s { sOpt_c = f : sOpt_c s})))
+ , Flag "optm" (hasArg (\f -> alterSettings (\s -> s { sOpt_m = f : sOpt_m s})))
+ , Flag "opta" (hasArg (\f -> alterSettings (\s -> s { sOpt_a = f : sOpt_a s})))
, Flag "optl" (hasArg addOptl)
- , Flag "optwindres" (hasArg (\f d -> d{ opt_windres = f : opt_windres d}))
+ , Flag "optwindres" (hasArg (\f -> alterSettings (\s -> s { sOpt_windres = f : sOpt_windres s})))
, Flag "split-objs"
(NoArg (if can_split
, Flag "fcontext-stack" (intSuffix (\n d -> d{ ctxtStkDepth = n }))
, Flag "fstrictness-before" (intSuffix (\n d -> d{ strictnessBefore = n : strictnessBefore d }))
, Flag "ffloat-lam-args" (intSuffix (\n d -> d{ floatLamArgs = Just n }))
- , Flag "ffloat-all-lams" (intSuffix (\n d -> d{ floatLamArgs = Nothing }))
+ , Flag "ffloat-all-lams" (noArg (\d -> d{ floatLamArgs = Nothing }))
------ Profiling ----------------------------------------------------
rtsIsProfiled :: Bool
rtsIsProfiled = unsafePerformIO rtsIsProfiledIO /= 0
+#endif
checkTemplateHaskellOk :: Bool -> DynP ()
-checkTemplateHaskellOk turn_on
+#ifdef GHCI
+checkTemplateHaskellOk turn_on
| turn_on && rtsIsProfiled
= addErr "You can't use Template Haskell with a profiled compiler"
| otherwise
= return ()
#else
--- In stage 1 we don't know that the RTS has rts_isProfiled,
+-- In stage 1 we don't know that the RTS has rts_isProfiled,
-- so we simply say "ok". It doesn't matter because TH isn't
-- available in stage 1 anyway.
-checkTemplateHaskellOk turn_on = return ()
+checkTemplateHaskellOk _ = return ()
#endif
{- **********************************************************************
-- (except for -fno-glasgow-exts, which is treated specially)
--------------------------
+alterSettings :: (Settings -> Settings) -> DynFlags -> DynFlags
+alterSettings f dflags = dflags { settings = f (settings dflags) }
+
+--------------------------
setDumpFlag' :: DynFlag -> DynP ()
setDumpFlag' dump_flag
= do { setDynFlag dump_flag
-- tmpDir, where we store temporary files.
setTmpDir :: FilePath -> DynFlags -> DynFlags
-setTmpDir dir dflags = dflags{ tmpDir = normalise dir }
+setTmpDir dir = alterSettings (\s -> s { sTmpDir = normalise dir })
-- we used to fix /cygdrive/c/.. on Windows, but this doesn't
-- seem necessary now --SDM 7/2/2008
-- There are some options that we need to pass to gcc when compiling
-- Haskell code via C, but are only supported by recent versions of
-- gcc. The configure script decides which of these options we need,
--- and puts them in the file "extra-gcc-opts" in $topdir, which is
--- read before each via-C compilation. The advantage of having these
--- in a separate file is that the file can be created at install-time
--- depending on the available gcc version, and even re-generated later
--- if gcc is upgraded.
+-- and puts them in the "settings" file in $topdir. The advantage of
+-- having these in a separate file is that the file can be created at
+-- install-time depending on the available gcc version, and even
+-- re-generated later if gcc is upgraded.
--
-- The options below are not dependent on the version of gcc, only the
-- platform.
machdepCCOpts :: DynFlags -> [String] -- flags for all C compilations
-machdepCCOpts dflags = cCcOpts ++ machdepCCOpts'
+machdepCCOpts _ = cCcOpts ++ machdepCCOpts'
machdepCCOpts' :: [String] -- flags for all C compilations
machdepCCOpts'
-- -----------------------------------------------------------------------------
-- Compiler Info
-data Printable = String String
- | FromDynFlags (DynFlags -> String)
-
-compilerInfo :: [(String, Printable)]
-compilerInfo = [("Project name", String cProjectName),
- ("Project version", String cProjectVersion),
- ("Booter version", String cBooterVersion),
- ("Stage", String cStage),
- ("Build platform", String cBuildPlatformString),
- ("Host platform", String cHostPlatformString),
- ("Target platform", String cTargetPlatformString),
- ("Have interpreter", String cGhcWithInterpreter),
- ("Object splitting supported", String cSupportsSplitObjs),
- ("Have native code generator", String cGhcWithNativeCodeGen),
- ("Support SMP", String cGhcWithSMP),
- ("Unregisterised", String cGhcUnregisterised),
- ("Tables next to code", String cGhcEnableTablesNextToCode),
- ("RTS ways", String cGhcRTSWays),
- ("Leading underscore", String cLeadingUnderscore),
- ("Debug on", String (show debugIsOn)),
- ("LibDir", FromDynFlags topDir),
- ("Global Package DB", FromDynFlags systemPackageConfig),
- ("C compiler flags", String (show cCcOpts)),
- ("Gcc Linker flags", String (show cGccLinkerOpts)),
- ("Ld Linker flags", String (show cLdLinkerOpts))
- ]
+compilerInfo :: DynFlags -> [(String, String)]
+compilerInfo dflags
+ = -- We always make "Project name" be first to keep parsing in
+ -- other languages simple, i.e. when looking for other fields,
+ -- you don't have to worry whether there is a leading '[' or not
+ ("Project name", cProjectName)
+ -- Next come the settings, so anything else can be overridden
+ -- in the settings file (as "lookup" uses the first match for the
+ -- key)
+ : rawSettings dflags
+ ++ [("Project version", cProjectVersion),
+ ("Booter version", cBooterVersion),
+ ("Stage", cStage),
+ ("Build platform", cBuildPlatformString),
+ ("Host platform", cHostPlatformString),
+ ("Target platform", cTargetPlatformString),
+ ("Have interpreter", cGhcWithInterpreter),
+ ("Object splitting supported", cSupportsSplitObjs),
+ ("Have native code generator", cGhcWithNativeCodeGen),
+ ("Support SMP", cGhcWithSMP),
+ ("Unregisterised", cGhcUnregisterised),
+ ("Tables next to code", cGhcEnableTablesNextToCode),
+ ("RTS ways", cGhcRTSWays),
+ ("Leading underscore", cLeadingUnderscore),
+ ("Debug on", show debugIsOn),
+ ("LibDir", topDir dflags),
+ ("Global Package DB", systemPackageConfig dflags),
+ ("C compiler flags", show cCcOpts),
+ ("Gcc Linker flags", show cGccLinkerOpts),
+ ("Ld Linker flags", show cLdLinkerOpts)
+ ]
liftIO $ StaticFlags.initStaticOpts
- dflags0 <- liftIO $ initDynFlags defaultDynFlags
- dflags <- liftIO $ initSysTools mb_top_dir dflags0
+ mySettings <- liftIO $ initSysTools mb_top_dir
+ dflags <- liftIO $ initDynFlags (defaultDynFlags mySettings)
env <- liftIO $ newHscEnv dflags
setSession env
preprocessFile hsc_env src_fn mb_phase (Just (buf, _time))
= do
let dflags = hsc_dflags hsc_env
- -- case we bypass the preprocessing stage?
- let
- local_opts = getOptions dflags buf src_fn
- --
+ let local_opts = getOptions dflags buf src_fn
+
(dflags', leftovers, warns)
<- parseDynamicNoPackageFlags dflags local_opts
checkProcessArgsResult leftovers
handleFlagWarnings dflags' warns
- let
- needs_preprocessing
+ let needs_preprocessing
| Just (Unlit _) <- mb_phase = True
| Nothing <- mb_phase, Unlit _ <- startPhase src_fn = True
-- note: local_opts is only required if there's no Unlit phase
hscTcExpr hsc_env expr = runHsc hsc_env $ do
maybe_stmt <- hscParseStmt expr
case maybe_stmt of
- Just (L _ (ExprStmt expr _ _ _)) ->
- ioMsgMaybe $ tcRnExpr hsc_env (hsc_IC hsc_env) expr
- _ ->
- liftIO $ throwIO $ mkSrcErr $ unitBag $
- mkPlainErrMsg noSrcSpan
- (text "not an expression:" <+> quotes (text expr))
+ Just (L _ (ExprStmt expr _ _ _)) ->
+ ioMsgMaybe $ tcRnExpr hsc_env (hsc_IC hsc_env) expr
+ _ ->
+ liftIO $ throwIO $ mkSrcErr $ unitBag $ mkPlainErrMsg noSrcSpan
+ (text "not an expression:" <+> quotes (text expr))
-- | Find the kind of a type
hscKcType
-- | A ModGuts is carried through the compiler, accumulating stuff as it goes
-- There is only one ModGuts at any time, the one for the module
-- being compiled right now. Once it is compiled, a 'ModIface' and
--- 'ModDetails' are extracted and the ModGuts is dicarded.
+-- 'ModDetails' are extracted and the ModGuts is discarded.
data ModGuts
= ModGuts {
mg_module :: !Module, -- ^ Module being compiled
#include "HsVersions.h"
import PackageConfig
-import DynFlags ( dopt, DynFlag(..), DynFlags(..), PackageFlag(..), DPHBackend(..) )
+import DynFlags
import StaticFlags
import Config ( cProjectVersion )
import Name ( Name, nameModule_maybe )
= case reads str of
((x,_):_) -> x -- Be forgiving: ignore trailing goop, and alternative parses
[] -> ghcError (UsageError ("Malformed argument " ++ str ++ " for flag " ++ sw))
- -- ToDo: hack alert. We should really parse the arugments
+ -- ToDo: hack alert. We should really parse the arguments
-- and announce errors in a more civilised way.
-- debugging options
-- | Suppress all that is suppressable in core dumps.
+-- Except for uniques, as some simplifier phases introduce new varibles that
+-- have otherwise identical names.
opt_SuppressAll :: Bool
opt_SuppressAll
= lookUp (fsLit "-dsuppress-all")
--- | Suppress unique ids on variables.
-opt_SuppressUniques :: Bool
-opt_SuppressUniques
- = lookUp (fsLit "-dsuppress-all")
- || lookUp (fsLit "-dsuppress-uniques")
-
-- | Suppress all coercions, them replacing with '...'
opt_SuppressCoercions :: Bool
opt_SuppressCoercions
= lookUp (fsLit "-dsuppress-all")
|| lookUp (fsLit "-dsuppress-type-signatures")
+-- | Suppress unique ids on variables.
+-- Except for uniques, as some simplifier phases introduce new variables that
+-- have otherwise identical names.
+opt_SuppressUniques :: Bool
+opt_SuppressUniques
+ = lookUp (fsLit "-dsuppress-uniques")
-- | Display case expressions with a single alternative as strict let bindings
opt_PprCaseAsLet :: Bool
-opt_PprCaseAsLet = lookUp (fsLit "-dppr-case-as-let")
+opt_PprCaseAsLet = lookUp (fsLit "-dppr-case-as-let")
-- | Set the maximum width of the dumps
-- If GHC's command line options are bad then the options parser uses the
touch, -- String -> String -> IO ()
copy,
copyWithHeader,
- getExtraViaCOpts,
-- Temporary-file management
setTmpDir,
import Panic
import Util
import DynFlags
+import StaticFlags
import Exception
import Data.IORef
\begin{code}
initSysTools :: Maybe String -- Maybe TopDir path (without the '-B' prefix)
-
- -> DynFlags
- -> IO DynFlags -- Set all the mutable variables above, holding
+ -> IO Settings -- Set all the mutable variables above, holding
-- (a) the system programs
-- (b) the package-config file
-- (c) the GHC usage message
-
-
-initSysTools mbMinusB dflags0
+initSysTools mbMinusB
= do { top_dir <- findTopDir mbMinusB
-- see [Note topdir]
-- NB: top_dir is assumed to be in standard Unix
-- format, '/' separated
- ; let installed :: FilePath -> FilePath
+ ; let settingsFile = top_dir </> "settings"
+ installed :: FilePath -> FilePath
installed file = top_dir </> file
installed_mingw_bin file = top_dir </> ".." </> "mingw" </> "bin" </> file
installed_perl_bin file = top_dir </> ".." </> "perl" </> file
+ ; settingsStr <- readFile settingsFile
+ ; mySettings <- case maybeReadFuzzy settingsStr of
+ Just s ->
+ return s
+ Nothing ->
+ pgmError ("Can't parse " ++ show settingsFile)
+ ; let getSetting key = case lookup key mySettings of
+ Just xs ->
+ return xs
+ Nothing -> pgmError ("No entry for " ++ show key ++ " in " ++ show settingsFile)
+ ; myExtraGccViaCFlags <- getSetting "GCC extra via C opts"
+ -- On Windows, mingw is distributed with GHC,
+ -- so we look in TopDir/../mingw/bin
+ -- It would perhaps be nice to be able to override this
+ -- with the settings file, but it would be a little fiddly
+ -- to make that possible, so for now you can't.
+ ; gcc_prog <- if isWindowsHost then return $ installed_mingw_bin "gcc"
+ else getSetting "C compiler command"
+ ; perl_path <- if isWindowsHost
+ then return $ installed_perl_bin "perl"
+ else getSetting "perl command"
+
; let pkgconfig_path = installed "package.conf.d"
ghc_usage_msg_path = installed "ghc-usage.txt"
ghci_usage_msg_path = installed "ghci-usage.txt"
windres_path = installed_mingw_bin "windres"
; tmpdir <- getTemporaryDirectory
- ; let dflags1 = setTmpDir tmpdir dflags0
- -- On Windows, mingw is distributed with GHC,
- -- so we look in TopDir/../mingw/bin
; let
- gcc_prog
- | isWindowsHost = installed_mingw_bin "gcc"
- | otherwise = cGCC
- perl_path
- | isWindowsHost = installed_perl_bin cGHC_PERL
- | otherwise = cGHC_PERL
-- 'touch' is a GHC util for Windows
touch_path
| isWindowsHost = installed cGHC_TOUCHY_PGM
; let lc_prog = "llc"
lo_prog = "opt"
- ; return dflags1{
- ghcUsagePath = ghc_usage_msg_path,
- ghciUsagePath = ghci_usage_msg_path,
- topDir = top_dir,
- systemPackageConfig = pkgconfig_path,
- pgm_L = unlit_path,
- pgm_P = cpp_path,
- pgm_F = "",
- pgm_c = (gcc_prog,[]),
- pgm_s = (split_prog,split_args),
- pgm_a = (as_prog,[]),
- pgm_l = (ld_prog,[]),
- pgm_dll = (mkdll_prog,mkdll_args),
- pgm_T = touch_path,
- pgm_sysman = top_dir ++ "/ghc/rts/parallel/SysMan",
- pgm_windres = windres_path,
- pgm_lo = (lo_prog,[]),
- pgm_lc = (lc_prog,[])
+ ; return $ Settings {
+ sTmpDir = normalise tmpdir,
+ sGhcUsagePath = ghc_usage_msg_path,
+ sGhciUsagePath = ghci_usage_msg_path,
+ sTopDir = top_dir,
+ sRawSettings = mySettings,
+ sExtraGccViaCFlags = words myExtraGccViaCFlags,
+ sSystemPackageConfig = pkgconfig_path,
+ sPgm_L = unlit_path,
+ sPgm_P = cpp_path,
+ sPgm_F = "",
+ sPgm_c = (gcc_prog,[]),
+ sPgm_s = (split_prog,split_args),
+ sPgm_a = (as_prog,[]),
+ sPgm_l = (ld_prog,[]),
+ sPgm_dll = (mkdll_prog,mkdll_args),
+ sPgm_T = touch_path,
+ sPgm_sysman = top_dir ++ "/ghc/rts/parallel/SysMan",
+ sPgm_windres = windres_path,
+ sPgm_lo = (lo_prog,[]),
+ sPgm_lc = (lc_prog,[]),
-- Hans: this isn't right in general, but you can
-- elaborate it in the same way as the others
+ sOpt_L = [],
+ sOpt_P = (if opt_PIC
+ then -- this list gets reversed
+ ["-D__PIC__", "-U __PIC__"]
+ else []),
+ sOpt_F = [],
+ sOpt_c = [],
+ sOpt_a = [],
+ sOpt_m = [],
+ sOpt_l = [],
+ sOpt_windres = [],
+ sOpt_lo = [],
+ sOpt_lc = []
}
}
\end{code}
hClose hout
hClose hin
-getExtraViaCOpts :: DynFlags -> IO [String]
-getExtraViaCOpts dflags = do
- f <- readFile (topDir dflags </> "extra-gcc-opts")
- return (words f)
-
-- | read the contents of the named section in an ELF object as a
-- String.
readElfSection :: DynFlags -> String -> FilePath -> IO (Maybe String)
-- return our temporary directory within tmp_dir, creating one if we
-- don't have one yet
getTempDir :: DynFlags -> IO FilePath
-getTempDir dflags@(DynFlags{tmpDir=tmp_dir})
+getTempDir dflags
= do let ref = dirsToClean dflags
+ tmp_dir = tmpDir dflags
mapping <- readIORef ref
case Map.lookup tmp_dir mapping of
Nothing ->
, Nothing
, mPprStats)
+ ---- generate jump tables
+ let tabled =
+ {-# SCC "generateJumpTables" #-}
+ alloced ++ generateJumpTables alloced
+
---- shortcut branches
let shorted =
{-# SCC "shortcutBranches" #-}
- shortcutBranches dflags alloced
+ shortcutBranches dflags tabled
---- sequence blocks
let sequenced =
#endif
-- -----------------------------------------------------------------------------
+-- Generate jump tables
+
+-- Analyzes all native code and generates data sections for all jump
+-- table instructions.
+generateJumpTables
+ :: [NatCmmTop Instr] -> [NatCmmTop Instr]
+generateJumpTables xs = concatMap f xs
+ where f (CmmProc _ _ (ListGraph xs)) = concatMap g xs
+ f _ = []
+ g (BasicBlock _ xs) = catMaybes (map generateJumpTableForInstr xs)
+
+-- -----------------------------------------------------------------------------
-- Shortcut branches
shortcutBranches
module PPC.CodeGen (
cmmTopCodeGen,
+ generateJumpTableForInstr,
InstrBlock
)
genJump tree
= do
(target,code) <- getSomeReg tree
- return (code `snocOL` MTCTR target `snocOL` BCTR [])
+ return (code `snocOL` MTCTR target `snocOL` BCTR [] Nothing)
-- -----------------------------------------------------------------------------
dflags <- getDynFlagsNat
dynRef <- cmmMakeDynamicReference dflags addImportNat DataReference lbl
(tableReg,t_code) <- getSomeReg $ dynRef
- let
- jumpTable = map jumpTableEntryRel ids
-
- jumpTableEntryRel Nothing
- = CmmStaticLit (CmmInt 0 wordWidth)
- jumpTableEntryRel (Just blockid)
- = CmmStaticLit (CmmLabelDiffOff blockLabel lbl 0)
- where blockLabel = mkAsmTempLabel (getUnique blockid)
-
- code = e_code `appOL` t_code `appOL` toOL [
- LDATA ReadOnlyData (CmmDataLabel lbl : jumpTable),
+ let code = e_code `appOL` t_code `appOL` toOL [
SLW tmp reg (RIImm (ImmInt 2)),
LD II32 tmp (AddrRegReg tableReg tmp),
ADD tmp tmp (RIReg tableReg),
MTCTR tmp,
- BCTR [ id | Just id <- ids ]
+ BCTR ids (Just lbl)
]
return code
| otherwise
(reg,e_code) <- getSomeReg expr
tmp <- getNewRegNat II32
lbl <- getNewLabelNat
- let
- jumpTable = map jumpTableEntry ids
-
- code = e_code `appOL` toOL [
- LDATA ReadOnlyData (CmmDataLabel lbl : jumpTable),
+ let code = e_code `appOL` toOL [
SLW tmp reg (RIImm (ImmInt 2)),
ADDIS tmp tmp (HA (ImmCLbl lbl)),
LD II32 tmp (AddrRegImm tmp (LO (ImmCLbl lbl))),
MTCTR tmp,
- BCTR [ id | Just id <- ids ]
+ BCTR ids (Just lbl)
]
return code
+generateJumpTableForInstr :: Instr -> Maybe (NatCmmTop Instr)
+generateJumpTableForInstr (BCTR ids (Just lbl)) =
+ let jumpTable
+ | opt_PIC = map jumpTableEntryRel ids
+ | otherwise = map jumpTableEntry ids
+ where jumpTableEntryRel Nothing
+ = CmmStaticLit (CmmInt 0 wordWidth)
+ jumpTableEntryRel (Just blockid)
+ = CmmStaticLit (CmmLabelDiffOff blockLabel lbl 0)
+ where blockLabel = mkAsmTempLabel (getUnique blockid)
+ in Just (CmmData ReadOnlyData (CmmDataLabel lbl : jumpTable))
+generateJumpTableForInstr _ = Nothing
-- -----------------------------------------------------------------------------
-- 'condIntReg' and 'condFltReg': condition codes into registers
| JMP CLabel -- same as branch,
-- but with CLabel instead of block ID
| MTCTR Reg
- | BCTR [BlockId] -- with list of local destinations
+ | BCTR [Maybe BlockId] (Maybe CLabel) -- with list of local destinations, and jump table location if necessary
| BL CLabel [Reg] -- with list of argument regs
| BCTRL [Reg]
BCC _ _ -> noUsage
BCCFAR _ _ -> noUsage
MTCTR reg -> usage ([reg],[])
- BCTR _ -> noUsage
+ BCTR _ _ -> noUsage
BL _ params -> usage (params, callClobberedRegs)
BCTRL params -> usage (params, callClobberedRegs)
ADD reg1 reg2 ri -> usage (reg2 : regRI ri, [reg1])
BCC cond lbl -> BCC cond lbl
BCCFAR cond lbl -> BCCFAR cond lbl
MTCTR reg -> MTCTR (env reg)
- BCTR targets -> BCTR targets
+ BCTR targets lbl -> BCTR targets lbl
BL imm argRegs -> BL imm argRegs -- argument regs
BCTRL argRegs -> BCTRL argRegs -- cannot be remapped
ADD reg1 reg2 ri -> ADD (env reg1) (env reg2) (fixRI ri)
= case insn of
BCC _ id -> [id]
BCCFAR _ id -> [id]
- BCTR targets -> targets
+ BCTR targets _ -> [id | Just id <- targets]
_ -> []
= case insn of
BCC cc id -> BCC cc (patchF id)
BCCFAR cc id -> BCCFAR cc (patchF id)
- BCTR _ -> error "Cannot patch BCTR"
+ BCTR ids lbl -> BCTR (map (fmap patchF) ids) lbl
_ -> insn
char '\t',
pprReg reg
]
-pprInstr (BCTR _) = hcat [
+pprInstr (BCTR _ _) = hcat [
char '\t',
ptext (sLit "bctr")
]
_ -> let instr' = patchJumpInstr instr
(\bid -> if bid == dest
then mkBlockId fixup_block_id
- else dest)
+ else bid) -- no change!
in joinToTargets' block_live (block : new_blocks) block_id instr' dests
module SPARC.CodeGen (
cmmTopCodeGen,
+ generateJumpTableForInstr,
InstrBlock
)
dst <- getNewRegNat II32
label <- getNewLabelNat
- let jumpTable = map jumpTableEntry ids
return $ e_code `appOL`
toOL
- -- the jump table
- [ LDATA ReadOnlyData (CmmDataLabel label : jumpTable)
-
- -- load base of jump table
- , SETHI (HI (ImmCLbl label)) base_reg
+ [ -- load base of jump table
+ SETHI (HI (ImmCLbl label)) base_reg
, OR False base_reg (RIImm $ LO $ ImmCLbl label) base_reg
-- the addrs in the table are 32 bits wide..
-- load and jump to the destination
, LD II32 (AddrRegReg base_reg offset_reg) dst
- , JMP_TBL (AddrRegImm dst (ImmInt 0)) [i | Just i <- ids]
+ , JMP_TBL (AddrRegImm dst (ImmInt 0)) ids label
, NOP ]
+generateJumpTableForInstr :: Instr -> Maybe (NatCmmTop Instr)
+generateJumpTableForInstr (JMP_TBL _ ids label) =
+ let jumpTable = map jumpTableEntry ids
+ in Just (CmmData ReadOnlyData (CmmDataLabel label : jumpTable))
+generateJumpTableForInstr _ = Nothing
import Reg
import Size
+import CLabel
import BlockId
import OldCmm
import FastString
-- With a tabled jump we know all the possible destinations.
-- We also need this info so we can work out what regs are live across the jump.
--
- | JMP_TBL AddrMode [BlockId]
+ | JMP_TBL AddrMode [Maybe BlockId] CLabel
| CALL (Either Imm Reg) Int Bool -- target, args, terminal
FxTOy _ _ r1 r2 -> usage ([r1], [r2])
JMP addr -> usage (regAddr addr, [])
- JMP_TBL addr _ -> usage (regAddr addr, [])
+ JMP_TBL addr _ _ -> usage (regAddr addr, [])
CALL (Left _ ) _ True -> noUsage
CALL (Left _ ) n False -> usage (argRegs n, callClobberedRegs)
FxTOy s1 s2 r1 r2 -> FxTOy s1 s2 (env r1) (env r2)
JMP addr -> JMP (fixAddr addr)
- JMP_TBL addr ids -> JMP_TBL (fixAddr addr) ids
+ JMP_TBL addr ids l -> JMP_TBL (fixAddr addr) ids l
CALL (Left i) n t -> CALL (Left i) n t
CALL (Right r) n t -> CALL (Right (env r)) n t
= case insn of
BI _ _ id -> [id]
BF _ _ id -> [id]
- JMP_TBL _ ids -> ids
+ JMP_TBL _ ids _ -> [id | Just id <- ids]
_ -> []
= case insn of
BI cc annul id -> BI cc annul (patchF id)
BF cc annul id -> BF cc annul (patchF id)
+ JMP_TBL n ids l -> JMP_TBL n (map (fmap patchF) ids) l
_ -> insn
]
pprInstr (JMP addr) = (<>) (ptext (sLit "\tjmp\t")) (pprAddr addr)
-pprInstr (JMP_TBL op _) = pprInstr (JMP op)
+pprInstr (JMP_TBL op _ _) = pprInstr (JMP op)
pprInstr (CALL (Left imm) n _)
= hcat [ ptext (sLit "\tcall\t"), pprImm imm, comma, int n ]
module X86.CodeGen (
cmmTopCodeGen,
+ generateJumpTableForInstr,
InstrBlock
)
dflags <- getDynFlagsNat
dynRef <- cmmMakeDynamicReference dflags addImportNat DataReference lbl
(tableReg,t_code) <- getSomeReg $ dynRef
- let
- jumpTable = map jumpTableEntryRel ids
-
- jumpTableEntryRel Nothing
- = CmmStaticLit (CmmInt 0 wordWidth)
- jumpTableEntryRel (Just blockid)
- = CmmStaticLit (CmmLabelDiffOff blockLabel lbl 0)
- where blockLabel = mkAsmTempLabel (getUnique blockid)
-
- op = OpAddr (AddrBaseIndex (EABaseReg tableReg)
+ let op = OpAddr (AddrBaseIndex (EABaseReg tableReg)
(EAIndex reg wORD_SIZE) (ImmInt 0))
#if x86_64_TARGET_ARCH
code = e_code `appOL` t_code `appOL` toOL [
ADD (intSize wordWidth) op (OpReg tableReg),
- JMP_TBL (OpReg tableReg) [ id | Just id <- ids ],
- LDATA Text (CmmDataLabel lbl : jumpTable)
+ JMP_TBL (OpReg tableReg) ids Text lbl,
]
#else
-- HACK: On x86_64 binutils<2.17 is only able to generate PC32
-- conjunction with the hack in PprMach.hs/pprDataItem once
-- binutils 2.17 is standard.
code = e_code `appOL` t_code `appOL` toOL [
- LDATA ReadOnlyData (CmmDataLabel lbl : jumpTable),
MOVSxL II32
(OpAddr (AddrBaseIndex (EABaseReg tableReg)
(EAIndex reg wORD_SIZE) (ImmInt 0)))
(OpReg reg),
ADD (intSize wordWidth) (OpReg reg) (OpReg tableReg),
- JMP_TBL (OpReg tableReg) [ id | Just id <- ids ]
+ JMP_TBL (OpReg tableReg) ids ReadOnlyData lbl
]
#endif
#else
code = e_code `appOL` t_code `appOL` toOL [
- LDATA ReadOnlyData (CmmDataLabel lbl : jumpTable),
ADD (intSize wordWidth) op (OpReg tableReg),
- JMP_TBL (OpReg tableReg) [ id | Just id <- ids ]
+ JMP_TBL (OpReg tableReg) ids ReadOnlyData lbl
]
#endif
return code
(reg,e_code) <- getSomeReg expr
lbl <- getNewLabelNat
let
- jumpTable = map jumpTableEntry ids
op = OpAddr (AddrBaseIndex EABaseNone (EAIndex reg wORD_SIZE) (ImmCLbl lbl))
code = e_code `appOL` toOL [
- LDATA ReadOnlyData (CmmDataLabel lbl : jumpTable),
- JMP_TBL op [ id | Just id <- ids ]
+ JMP_TBL op ids ReadOnlyData lbl
]
-- in
return code
+generateJumpTableForInstr :: Instr -> Maybe (NatCmmTop Instr)
+generateJumpTableForInstr (JMP_TBL _ ids section lbl) = Just (createJumpTable ids section lbl)
+generateJumpTableForInstr _ = Nothing
+
+createJumpTable ids section lbl
+ = let jumpTable
+ | opt_PIC =
+ let jumpTableEntryRel Nothing
+ = CmmStaticLit (CmmInt 0 wordWidth)
+ jumpTableEntryRel (Just blockid)
+ = CmmStaticLit (CmmLabelDiffOff blockLabel lbl 0)
+ where blockLabel = mkAsmTempLabel (getUnique blockid)
+ in map jumpTableEntryRel ids
+ | otherwise = map jumpTableEntry ids
+ in CmmData section (CmmDataLabel lbl : jumpTable)
-- -----------------------------------------------------------------------------
-- 'condIntReg' and 'condFltReg': condition codes into registers
| JMP Operand
| JXX Cond BlockId -- includes unconditional branches
| JXX_GBL Cond Imm -- non-local version of JXX
- | JMP_TBL Operand [BlockId] -- table jump
+ -- Table jump
+ | JMP_TBL Operand -- Address to jump to
+ [Maybe BlockId] -- Blocks in the jump table
+ Section -- Data section jump table should be put in
+ CLabel -- Label of jump table
| CALL (Either Imm Reg) [Reg]
-- Other things.
JXX _ _ -> mkRU [] []
JXX_GBL _ _ -> mkRU [] []
JMP op -> mkRUR (use_R op)
- JMP_TBL op _ -> mkRUR (use_R op)
+ JMP_TBL op _ _ _ -> mkRUR (use_R op)
CALL (Left _) params -> mkRU params callClobberedRegs
CALL (Right reg) params -> mkRU (reg:params) callClobberedRegs
CLTD _ -> mkRU [eax] [edx]
POP sz op -> patch1 (POP sz) op
SETCC cond op -> patch1 (SETCC cond) op
JMP op -> patch1 JMP op
- JMP_TBL op ids -> patch1 JMP_TBL op $ ids
+ JMP_TBL op ids s lbl-> JMP_TBL (patchOp op) ids s lbl
GMOV src dst -> GMOV (env src) (env dst)
GLD sz src dst -> GLD sz (lookupAddr src) (env dst)
x86_jumpDestsOfInstr insn
= case insn of
JXX _ id -> [id]
- JMP_TBL _ ids -> ids
+ JMP_TBL _ ids _ _ -> [id | Just id <- ids]
_ -> []
x86_patchJumpInstr insn patchF
= case insn of
JXX cc id -> JXX cc (patchF id)
- JMP_TBL _ _ -> error "Cannot patch JMP_TBL"
+ JMP_TBL op ids section lbl
+ -> JMP_TBL op (map (fmap patchF) ids) section lbl
_ -> insn
<+> pprCLabel_asm (mkDeadStripPreventer $ entryLblToInfoLbl lbl)
else empty
#endif
+ $$ pprSizeDecl (if null info then lbl else entryLblToInfoLbl lbl)
+-- | Output the ELF .size directive.
+pprSizeDecl :: CLabel -> Doc
+#if elf_OBJ_FORMAT
+pprSizeDecl lbl =
+ ptext (sLit "\t.size") <+> pprCLabel_asm lbl
+ <> ptext (sLit ", .-") <> pprCLabel_asm lbl
+#else
+pprSizeDecl _ = empty
+#endif
pprBasicBlock :: NatBasicBlock Instr -> Doc
pprBasicBlock (BasicBlock blockid instrs) =
pprInstr (JMP (OpImm imm)) = (<>) (ptext (sLit "\tjmp ")) (pprImm imm)
pprInstr (JMP op) = (<>) (ptext (sLit "\tjmp *")) (pprOperand archWordSize op)
-pprInstr (JMP_TBL op _) = pprInstr (JMP op)
+pprInstr (JMP_TBL op _ _ _) = pprInstr (JMP op)
pprInstr (CALL (Left imm) _) = (<>) (ptext (sLit "\tcall ")) (pprImm imm)
pprInstr (CALL (Right reg) _) = (<>) (ptext (sLit "\tcall *")) (pprReg archWordSize reg)
mkPState :: DynFlags -> StringBuffer -> SrcLoc -> PState
mkPState flags buf loc =
PState {
- buffer = buf,
+ buffer = buf,
dflags = flags,
messages = emptyMessages,
last_loc = mkSrcSpan loc loc,
alr_justClosedExplicitLetBlock = False
}
where
- bitmap = genericsBit `setBitIf` xopt Opt_Generics flags
- .|. ffiBit `setBitIf` xopt Opt_ForeignFunctionInterface flags
- .|. parrBit `setBitIf` xopt Opt_ParallelArrays flags
- .|. arrowsBit `setBitIf` xopt Opt_Arrows flags
- .|. thBit `setBitIf` xopt Opt_TemplateHaskell flags
- .|. qqBit `setBitIf` xopt Opt_QuasiQuotes flags
- .|. ipBit `setBitIf` xopt Opt_ImplicitParams flags
- .|. explicitForallBit `setBitIf` xopt Opt_ExplicitForAll flags
- .|. bangPatBit `setBitIf` xopt Opt_BangPatterns flags
- .|. tyFamBit `setBitIf` xopt Opt_TypeFamilies flags
- .|. haddockBit `setBitIf` dopt Opt_Haddock flags
- .|. magicHashBit `setBitIf` xopt Opt_MagicHash flags
- .|. kindSigsBit `setBitIf` xopt Opt_KindSignatures flags
- .|. recursiveDoBit `setBitIf` xopt Opt_RecursiveDo flags
- .|. recBit `setBitIf` xopt Opt_DoRec flags
- .|. recBit `setBitIf` xopt Opt_Arrows flags
- .|. unicodeSyntaxBit `setBitIf` xopt Opt_UnicodeSyntax flags
- .|. unboxedTuplesBit `setBitIf` xopt Opt_UnboxedTuples flags
+ bitmap = genericsBit `setBitIf` xopt Opt_Generics flags
+ .|. ffiBit `setBitIf` xopt Opt_ForeignFunctionInterface flags
+ .|. parrBit `setBitIf` xopt Opt_ParallelArrays flags
+ .|. arrowsBit `setBitIf` xopt Opt_Arrows flags
+ .|. thBit `setBitIf` xopt Opt_TemplateHaskell flags
+ .|. qqBit `setBitIf` xopt Opt_QuasiQuotes flags
+ .|. ipBit `setBitIf` xopt Opt_ImplicitParams flags
+ .|. explicitForallBit `setBitIf` xopt Opt_ExplicitForAll flags
+ .|. bangPatBit `setBitIf` xopt Opt_BangPatterns flags
+ .|. tyFamBit `setBitIf` xopt Opt_TypeFamilies flags
+ .|. haddockBit `setBitIf` dopt Opt_Haddock flags
+ .|. magicHashBit `setBitIf` xopt Opt_MagicHash flags
+ .|. kindSigsBit `setBitIf` xopt Opt_KindSignatures flags
+ .|. recursiveDoBit `setBitIf` xopt Opt_RecursiveDo flags
+ .|. recBit `setBitIf` xopt Opt_DoRec flags
+ .|. recBit `setBitIf` xopt Opt_Arrows flags
+ .|. unicodeSyntaxBit `setBitIf` xopt Opt_UnicodeSyntax flags
+ .|. unboxedTuplesBit `setBitIf` xopt Opt_UnboxedTuples flags
.|. datatypeContextsBit `setBitIf` xopt Opt_DatatypeContexts flags
.|. transformComprehensionsBit `setBitIf` xopt Opt_TransformListComp flags
.|. transformComprehensionsBit `setBitIf` xopt Opt_MonadComprehensions flags
.|. rawTokenStreamBit `setBitIf` dopt Opt_KeepRawTokenStream flags
.|. alternativeLayoutRuleBit `setBitIf` xopt Opt_AlternativeLayoutRule flags
- .|. relaxedLayoutBit `setBitIf` xopt Opt_RelaxedLayout flags
+ .|. relaxedLayoutBit `setBitIf` xopt Opt_RelaxedLayout flags
.|. nondecreasingIndentationBit `setBitIf` xopt Opt_NondecreasingIndentation flags
--
setBitIf :: Int -> Bool -> Int
b `setBitIf` cond | cond = bit b
- | otherwise = 0
+ | otherwise = 0
addWarning :: DynFlag -> SrcSpan -> SDoc -> P ()
addWarning option srcspan warning
like that, so we use a BuiltinRule instead, so that we
can match in any two literal values. So the rule is really
more like
- (Lit 4) +# (Lit y) = Lit (x+#y)
+ (Lit x) +# (Lit y) = Lit (x+#y)
where the (+#) on the rhs is done at compile time
That is why these rules are built in here. Other rules
add_sig :: LSig a -> HsValBinds a -> HsValBinds a
add_sig s (ValBindsIn bs sigs) = ValBindsIn bs (s:sigs)
add_sig _ (ValBindsOut {}) = panic "RdrHsSyn:add_sig"
-\end{code}
\ No newline at end of file
+\end{code}
simpl_phase phase names iter
= CoreDoPasses
- [ maybe_strictness_before phase
+ $ [ maybe_strictness_before phase
, CoreDoSimplify iter
(base_mode { sm_phase = Phase phase
, sm_names = names })
- , maybe_rule_check (Phase phase)
- ]
+ , maybe_rule_check (Phase phase) ]
+
+ -- Vectorisation can introduce a fair few common sub expressions involving
+ -- DPH primitives. For example, see the Reverse test from dph-examples.
+ -- We need to eliminate these common sub expressions before their definitions
+ -- are inlined in phase 2. The CSE introduces lots of v1 = v2 bindings,
+ -- so we also run simpl_gently to inline them.
+ ++ (if dopt Opt_Vectorise dflags && phase == 3
+ then [CoreCSE, simpl_gently]
+ else [])
vectorisation
= runWhen (dopt Opt_Vectorise dflags) $
module checks to see if a foreign declaration has got a legal type.
\begin{code}
-module TcForeign
- (
- tcForeignImports
+module TcForeign
+ (
+ tcForeignImports
, tcForeignExports
- ) where
+ ) where
#include "HsVersions.h"
-- Defines a binding
isForeignImport :: LForeignDecl name -> Bool
isForeignImport (L _ (ForeignImport _ _ _)) = True
-isForeignImport _ = False
+isForeignImport _ = False
-- Exports a binding
isForeignExport :: LForeignDecl name -> Bool
isForeignExport (L _ (ForeignExport _ _ _)) = True
-isForeignExport _ = False
+isForeignExport _ = False
\end{code}
%************************************************************************
-%* *
+%* *
\subsection{Imports}
-%* *
+%* *
%************************************************************************
\begin{code}
tcFImport :: ForeignDecl Name -> TcM (Id, ForeignDecl Id)
tcFImport fo@(ForeignImport (L loc nm) hs_ty imp_decl)
- = addErrCtxt (foreignDeclCtxt fo) $
- do { sig_ty <- tcHsSigType (ForSigCtxt nm) hs_ty
- ; let
- -- Drop the foralls before inspecting the
- -- structure of the foreign type.
- (_, t_ty) = tcSplitForAllTys sig_ty
- (arg_tys, res_ty) = tcSplitFunTys t_ty
- id = mkLocalId nm sig_ty
- -- Use a LocalId to obey the invariant that locally-defined
- -- things are LocalIds. However, it does not need zonking,
- -- (so TcHsSyn.zonkForeignExports ignores it).
-
- ; imp_decl' <- tcCheckFIType sig_ty arg_tys res_ty imp_decl
- -- Can't use sig_ty here because sig_ty :: Type and
- -- we need HsType Id hence the undefined
- ; return (id, ForeignImport (L loc id) undefined imp_decl') }
+ = addErrCtxt (foreignDeclCtxt fo) $
+ do { sig_ty <- tcHsSigType (ForSigCtxt nm) hs_ty
+ ; let
+ -- Drop the foralls before inspecting the
+ -- structure of the foreign type.
+ (_, t_ty) = tcSplitForAllTys sig_ty
+ (arg_tys, res_ty) = tcSplitFunTys t_ty
+ id = mkLocalId nm sig_ty
+ -- Use a LocalId to obey the invariant that locally-defined
+ -- things are LocalIds. However, it does not need zonking,
+ -- (so TcHsSyn.zonkForeignExports ignores it).
+
+ ; imp_decl' <- tcCheckFIType sig_ty arg_tys res_ty imp_decl
+ -- Can't use sig_ty here because sig_ty :: Type and
+ -- we need HsType Id hence the undefined
+ ; return (id, ForeignImport (L loc id) undefined imp_decl') }
tcFImport d = pprPanic "tcFImport" (ppr d)
\end{code}
do { checkCg checkCOrAsmOrLlvmOrInterp
; checkSafety safety
; check (isFFILabelTy res_ty) (illegalForeignTyErr empty sig_ty)
- ; return idecl } -- NB check res_ty not sig_ty!
- -- In case sig_ty is (forall a. ForeignPtr a)
+ ; return idecl } -- NB check res_ty not sig_ty!
+ -- In case sig_ty is (forall a. ForeignPtr a)
tcCheckFIType sig_ty arg_tys res_ty idecl@(CImport cconv safety _ CWrapper) = do
- -- Foreign wrapper (former f.e.d.)
- -- The type must be of the form ft -> IO (FunPtr ft), where ft is a
- -- valid foreign type. For legacy reasons ft -> IO (Ptr ft) as well
- -- as ft -> IO Addr is accepted, too. The use of the latter two forms
- -- is DEPRECATED, though.
+ -- Foreign wrapper (former f.e.d.)
+ -- The type must be of the form ft -> IO (FunPtr ft), where ft is a
+ -- valid foreign type. For legacy reasons ft -> IO (Ptr ft) as well
+ -- as ft -> IO Addr is accepted, too. The use of the latter two forms
+ -- is DEPRECATED, though.
checkCg checkCOrAsmOrLlvmOrInterp
checkCConv cconv
checkSafety safety
\end{code}
%************************************************************************
-%* *
+%* *
\subsection{Exports}
-%* *
+%* *
%************************************************************************
\begin{code}
-tcForeignExports :: [LForeignDecl Name]
- -> TcM (LHsBinds TcId, [LForeignDecl TcId])
+tcForeignExports :: [LForeignDecl Name]
+ -> TcM (LHsBinds TcId, [LForeignDecl TcId])
tcForeignExports decls
= foldlM combine (emptyLHsBinds, []) (filter isForeignExport decls)
where
return (b `consBag` binds, f:fs)
tcFExport :: ForeignDecl Name -> TcM (LHsBind Id, ForeignDecl Id)
-tcFExport fo@(ForeignExport (L loc nm) hs_ty spec) =
- addErrCtxt (foreignDeclCtxt fo) $ do
+tcFExport fo@(ForeignExport (L loc nm) hs_ty spec)
+ = addErrCtxt (foreignDeclCtxt fo) $ do
- sig_ty <- tcHsSigType (ForSigCtxt nm) hs_ty
- rhs <- tcPolyExpr (nlHsVar nm) sig_ty
+ sig_ty <- tcHsSigType (ForSigCtxt nm) hs_ty
+ rhs <- tcPolyExpr (nlHsVar nm) sig_ty
- tcCheckFEType sig_ty spec
+ tcCheckFEType sig_ty spec
- -- we're exporting a function, but at a type possibly more
- -- constrained than its declared/inferred type. Hence the need
- -- to create a local binding which will call the exported function
- -- at a particular type (and, maybe, overloading).
+ -- we're exporting a function, but at a type possibly more
+ -- constrained than its declared/inferred type. Hence the need
+ -- to create a local binding which will call the exported function
+ -- at a particular type (and, maybe, overloading).
- -- We need to give a name to the new top-level binding that
- -- is *stable* (i.e. the compiler won't change it later),
- -- because this name will be referred to by the C code stub.
- id <- mkStableIdFromName nm sig_ty loc mkForeignExportOcc
- return (mkVarBind id rhs, ForeignExport (L loc id) undefined spec)
+ -- We need to give a name to the new top-level binding that
+ -- is *stable* (i.e. the compiler won't change it later),
+ -- because this name will be referred to by the C code stub.
+ id <- mkStableIdFromName nm sig_ty loc mkForeignExportOcc
+ return (mkVarBind id rhs, ForeignExport (L loc id) undefined spec)
tcFExport d = pprPanic "tcFExport" (ppr d)
\end{code}
%************************************************************************
-%* *
+%* *
\subsection{Miscellaneous}
-%* *
+%* *
%************************************************************************
\begin{code}
go ty = check (pred ty) (illegalForeignTyErr argument ty)
------------ Checking result types for foreign calls ----------------------
--- Check that the type has the form
+-- Check that the type has the form
-- (IO t) or (t) , and that t satisfies the given predicate.
--
checkForeignRes :: Bool -> (Type -> Bool) -> Type -> TcM ()
mustBeIO = False
checkForeignRes non_io_result_ok pred_res_ty ty
- -- (IO t) is ok, and so is any newtype wrapping thereof
+ -- (IO t) is ok, and so is any newtype wrapping thereof
| Just (_, res_ty, _) <- tcSplitIOType_maybe ty,
pred_res_ty res_ty
= return ()
-
+
| otherwise
- = check (non_io_result_ok && pred_res_ty ty)
- (illegalForeignTyErr result ty)
+ = check (non_io_result_ok && pred_res_ty ty)
+ (illegalForeignTyErr result ty)
\end{code}
\begin{code}
checkCOrAsmOrLlvm HscAsm = Nothing
checkCOrAsmOrLlvm HscLlvm = Nothing
checkCOrAsmOrLlvm _
- = Just (text "requires via-C, llvm (-fllvm) or native code generation (-fvia-C)")
+ = Just (text "requires via-C, llvm (-fllvm) or native code generation (-fvia-C)")
checkCOrAsmOrLlvmOrInterp :: HscTarget -> Maybe SDoc
checkCOrAsmOrLlvmOrInterp HscC = Nothing
checkCOrAsmOrLlvmOrInterp HscLlvm = Nothing
checkCOrAsmOrLlvmOrInterp HscInterpreted = Nothing
checkCOrAsmOrLlvmOrInterp _
- = Just (text "requires interpreted, C, Llvm or native code generation")
+ = Just (text "requires interpreted, C, Llvm or native code generation")
checkCOrAsmOrLlvmOrDotNetOrInterp :: HscTarget -> Maybe SDoc
checkCOrAsmOrLlvmOrDotNetOrInterp HscC = Nothing
checkCOrAsmOrLlvmOrDotNetOrInterp HscLlvm = Nothing
checkCOrAsmOrLlvmOrDotNetOrInterp HscInterpreted = Nothing
checkCOrAsmOrLlvmOrDotNetOrInterp _
- = Just (text "requires interpreted, C, Llvm or native code generation")
+ = Just (text "requires interpreted, C, Llvm or native code generation")
checkCg :: (HscTarget -> Maybe SDoc) -> TcM ()
checkCg check = do
- dflags <- getDOpts
- let target = hscTarget dflags
- case target of
- HscNothing -> return ()
- _ ->
- case check target of
- Nothing -> return ()
- Just err -> addErrTc (text "Illegal foreign declaration:" <+> err)
+ dflags <- getDOpts
+ let target = hscTarget dflags
+ case target of
+ HscNothing -> return ()
+ _ ->
+ case check target of
+ Nothing -> return ()
+ Just err -> addErrTc (text "Illegal foreign declaration:" <+> err)
\end{code}
-
+
Calling conventions
\begin{code}
checkCConv :: CCallConv -> TcM ()
-checkCConv CCallConv = return ()
+checkCConv CCallConv = return ()
#if i386_TARGET_ARCH
-checkCConv StdCallConv = return ()
+checkCConv StdCallConv = return ()
#else
-- This is a warning, not an error. see #3336
-checkCConv StdCallConv = addWarnTc (text "the 'stdcall' calling convention is unsupported on this platform,"$$ text "treating as ccall")
+checkCConv StdCallConv = addWarnTc (text "the 'stdcall' calling convention is unsupported on this platform," $$ text "treating as ccall")
#endif
checkCConv PrimCallConv = addErrTc (text "The `prim' calling convention can only be used with `foreign import'")
-checkCConv CmmCallConv = panic "checkCConv CmmCallConv"
+checkCConv CmmCallConv = panic "checkCConv CmmCallConv"
\end{code}
Deprecated "threadsafe" calls
\begin{code}
check :: Bool -> Message -> TcM ()
-check True _ = return ()
+check True _ = return ()
check _ the_err = addErrTc the_err
illegalForeignTyErr :: SDoc -> Type -> SDoc
illegalForeignTyErr arg_or_res ty
- = hang (hsep [ptext (sLit "Unacceptable"), arg_or_res,
+ = hang (hsep [ptext (sLit "Unacceptable"), arg_or_res,
ptext (sLit "type in foreign declaration:")])
2 (hsep [ppr ty])
result = text "result"
badCName :: CLabelString -> Message
-badCName target
- = sep [quotes (ppr target) <+> ptext (sLit "is not a valid C identifier")]
+badCName target
+ = sep [quotes (ppr target) <+> ptext (sLit "is not a valid C identifier")]
foreignDeclCtxt :: ForeignDecl Name -> SDoc
foreignDeclCtxt fo
= hang (ptext (sLit "When checking declaration:"))
2 (ppr fo)
\end{code}
-
| nm1 == nm2
= -- See Note [When improvement happens]
do { co_var <- newCoVar ty2 ty1 -- See Note [Efficient Orientation]
- ; let flav = Wanted (combineCtLoc ifl wfl)
- ; cans <- mkCanonical flav co_var
- ; mkIRContinue "IP/IP fundep" workItem KeepInert cans }
+ ; let flav = Wanted (combineCtLoc ifl wfl)
+ ; cans <- mkCanonical flav co_var
+ ; case wfl of
+ Given {} -> pprPanic "Unexpected given IP" (ppr workItem)
+ Derived {} -> pprPanic "Unexpected derived IP" (ppr workItem)
+ Wanted {} ->
+ do { setIPBind (cc_id workItem) $
+ EvCast id1 (mkSymCoercion (mkCoVarCoercion co_var))
+ ; mkIRStopK "IP/IP interaction (solved)" cans }
+ }
-- Never rewrite a given with a wanted equality, and a type function
-- equality can never rewrite an equality. We rewrite LHS *and* RHS
(ImportAvails { imp_mods = mods2,
imp_dep_mods = dmods2, imp_dep_pkgs = dpkgs2,
imp_orphs = orphs2, imp_finsts = finsts2 })
- = ImportAvails { imp_mods = plusModuleEnv_C (++) mods1 mods2,
+ = ImportAvails { imp_mods = plusModuleEnv_C (++) mods1 mods2,
imp_dep_mods = plusUFM_C plus_mod_dep dmods1 dmods2,
imp_dep_pkgs = dpkgs1 `unionLists` dpkgs2,
imp_orphs = orphs1 `unionLists` orphs2,
| UnitBag a
| TwoBags (Bag a) (Bag a) -- INVARIANT: neither branch is empty
| ListBag [a] -- INVARIANT: the list is non-empty
+ deriving Typeable
emptyBag :: Bag a
emptyBag = EmptyBag
instance (Outputable a) => Outputable (Bag a) where
ppr bag = braces (pprWithCommas ppr (bagToList bag))
-INSTANCE_TYPEABLE1(Bag,bagTc,"Bag")
-
instance Data a => Data (Bag a) where
gfoldl k z b = z listToBag `k` bagToList b -- traverse abstract type abstractly
toConstr _ = abstractConstr $ "Bag("++show (typeOf (undefined::a))++")"
-- * Floating point
readRational,
+ -- * read helpers
+ maybeReadFuzzy,
+
-- * IO-ish utilities
createDirectoryHierarchy,
doesDirNameExist,
-----------------------------------------------------------------------------
+-- read helpers
+
+maybeReadFuzzy :: Read a => String -> Maybe a
+maybeReadFuzzy str = case reads str of
+ [(x, s)]
+ | all isSpace s ->
+ Just x
+ _ ->
+ Nothing
+
+-----------------------------------------------------------------------------
-- Create a hierarchy of directories
createDirectoryHierarchy :: FilePath -> IO ()
GhcCanonVersion="$GhcMajVersion$GhcMinVersion2"
if test $GhcCanonVersion -ge 613; then ghc_ge_613=YES; else ghc_ge_613=NO; fi
AC_SUBST(ghc_ge_613)dnl
+
+ BOOTSTRAPPING_GHC_INFO_FIELD([CC_STAGE0],[C compiler command],['$(CC)'])
fi
dnl ** Must have GHC to build GHC, unless --enable-hc-boot is on
-if test "$BootingFromHc" = "NO" -a -d "$srcdir/compiler"; then
+if test "$BootingFromHc" = "NO"; then
if test "$WithGhc" = ""; then
AC_MSG_ERROR([GHC is required unless bootstrapping from .hc files.])
fi
# Verify that the installed (bootstrap) GHC is capable of generating
# code for the requested build platform.
-if test "$BuildPlatform" != "$bootstrap_target"
+if test "$BootingFromHc" = "NO"
then
- echo "This GHC (${WithGhc}) does not generate code for the build platform"
- echo " GHC target platform : $bootstrap_target"
- echo " Desired build platform : $BuildPlatform"
- exit 1
+ if test "$BuildPlatform" != "$bootstrap_target"
+ then
+ echo "This GHC (${WithGhc}) does not generate code for the build platform"
+ echo " GHC target platform : $bootstrap_target"
+ echo " Desired build platform : $BuildPlatform"
+ exit 1
+ fi
fi
echo "GHC build : $BuildPlatform"
dnl Figure out which C compiler to use. Gcc is preferred.
dnl If gcc, make sure it's at least 2.1
dnl
-FP_HAVE_GCC
+FP_GCC_VERSION
FPTOOLS_SET_C_LD_FLAGS([target],[CFLAGS],[LDFLAGS],[IGNORE_LINKER_LD_FLAGS],[CPPFLAGS])
FPTOOLS_SET_C_LD_FLAGS([build],[CONF_CC_OPTS_STAGE0],[CONF_GCC_LINKER_OPTS_STAGE0],[CONF_LD_LINKER_OPTS_STAGE0],[CONF_CPP_OPTS_STAGE0])
dnl ** how to invoke `ar' and `ranlib'
FP_PROG_AR_SUPPORTS_ATFILE
FP_PROG_AR_NEEDS_RANLIB
-FP_PROG_AR_SUPPORTS_INPUT
dnl ** Check to see whether ln -s works
AC_PROG_LN_S
AC_MSG_ERROR([compiler/ghc.cabal.in contains tab characters; please remove them])
fi
-AC_CONFIG_FILES([mk/config.mk mk/install.mk mk/project.mk compiler/ghc.cabal ghc/ghc-bin.cabal utils/runghc/runghc.cabal ghc.spec extra-gcc-opts docs/users_guide/ug-book.xml docs/users_guide/ug-ent.xml docs/index.html libraries/prologue.txt distrib/ghc.iss distrib/configure.ac])
+AC_CONFIG_FILES([mk/config.mk mk/install.mk mk/project.mk compiler/ghc.cabal ghc/ghc-bin.cabal utils/runghc/runghc.cabal ghc.spec settings docs/users_guide/ug-book.xml docs/users_guide/ug-ent.xml docs/index.html libraries/prologue.txt distrib/ghc.iss distrib/configure.ac])
AC_CONFIG_COMMANDS([mk/stamp-h],[echo timestamp > mk/stamp-h])
AC_OUTPUT
$(MAKE) -C gmp install DOING_BIN_DIST=YES
$(MAKE) -C docs install-docs DOING_BIN_DIST=YES
$(MAKE) -C libraries/Cabal/doc install-docs DOING_BIN_DIST=YES
- $(INSTALL_DATA) $(INSTALL_OPTS) extra-gcc-opts $(libdir)
+ $(INSTALL_DATA) $(INSTALL_OPTS) settings $(libdir)
install :: postinstall denounce
WhatGccIsCalled="$CC"
AC_SUBST(WhatGccIsCalled)
-FP_HAVE_GCC
+FP_GCC_VERSION
AC_PROG_CPP
#
FP_PROG_AR_NEEDS_RANLIB
#
-AC_CONFIG_FILES(extra-gcc-opts mk/config.mk mk/install.mk)
+AC_CONFIG_FILES(settings mk/config.mk mk/install.mk)
AC_OUTPUT
# We get caught by
style.</para>
</listitem>
</varlistentry>
+ </variablelist>
+ </sect2>
+
+ <sect2 id="formatting dumps">
+ <title>Formatting dumps</title>
+
+ <indexterm><primary>formatting dumps</primary></indexterm>
+
+ <variablelist>
+ <varlistentry>
+ <term>
+ <option>-dppr-user-length</option>
+ <indexterm><primary><option>-dppr-user-length</option></primary></indexterm>
+ </term>
+ <listitem>
+ <para>In error messages, expressions are printed to a
+ certain “depth”, with subexpressions beyond the
+ depth replaced by ellipses. This flag sets the
+ depth. Its default value is 5.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>
+ <option>-dppr-colsNNN</option>
+ <indexterm><primary><option>-dppr-colsNNN</option></primary></indexterm>
+ </term>
+ <listitem>
+ <para>Set the width of debugging output. Use this if your code is wrapping too much.
+ For example: <option>-dppr-cols200</option>.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>
+ <option>-dppr-case-as-let</option>
+ <indexterm><primary><option>-dppr-case-as-let</option></primary></indexterm>
+ </term>
+ <listitem>
+ <para>Print single alternative case expressions as though they were strict
+ let expressions. This is helpful when your code does a lot of unboxing.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>
+ <option>-dno-debug-output</option>
+ <indexterm><primary><option>-dno-debug-output</option></primary></indexterm>
+ </term>
+ <listitem>
+ <para>Suppress any unsolicited debugging output. When GHC
+ has been built with the <literal>DEBUG</literal> option it
+ occasionally emits debug output of interest to developers.
+ The extra output can confuse the testing framework and
+ cause bogus test failures, so this flag is provided to
+ turn it off.</para>
+ </listitem>
+ </varlistentry>
+ </variablelist>
+
+ </sect2>
+
+ <sect2 id="supression">
+ <title>Suppressing unwanted information</title>
+
+ <indexterm><primary>suppression</primary></indexterm>
+
+ Core dumps contain a large amount of information. Depending on what you are doing, not all of it will be useful.
+ Use these flags to suppress the parts that you are not interested in.
+
+ <variablelist>
+ <varlistentry>
+ <term>
+ <option>-dsuppress-all</option>
+ <indexterm><primary><option>-dsuppress-all</option></primary></indexterm>
+ </term>
+ <listitem>
+ <para>Suppress everything that can be suppressed, except for unique ids as this often
+ makes the printout ambiguous. If you just want to see the overall structure of
+ the code, then start here.</para>
+ </listitem>
+ </varlistentry>
<varlistentry>
<term>
<indexterm><primary><option>-dsuppress-uniques</option></primary></indexterm>
</term>
<listitem>
- <para>Suppress the printing of uniques in debugging output. This may make
+ <para>Suppress the printing of uniques. This may make
the printout ambiguous (e.g. unclear where an occurrence of 'x' is bound), but
it makes the output of two compiler runs have many fewer gratuitous differences,
so you can realistically apply <command>diff</command>. Once <command>diff</command>
<varlistentry>
<term>
- <option>-dsuppress-coercions</option>
- <indexterm><primary><option>-dsuppress-coercions</option></primary></indexterm>
+ <option>-dsuppress-idinfo</option>
+ <indexterm><primary><option>-dsuppress-idinfo</option></primary></indexterm>
</term>
<listitem>
- <para>Suppress the printing of coercions in Core dumps to make them
-shorter.</para>
+ <para>Suppress extended information about identifiers where they are bound. This includes
+ strictness information and inliner templates. Using this flag can cut the size
+ of the core dump in half, due to the lack of inliner templates</para>
</listitem>
</varlistentry>
<indexterm><primary><option>-dsuppress-module-prefixes</option></primary></indexterm>
</term>
<listitem>
- <para>Suppress the printing of module qualification prefixes in Core dumps to make them easier to read.</para>
+ <para>Suppress the printing of module qualification prefixes.
+ This is the <constant>Data.List</constant> in <constant>Data.List.length</constant>.</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
- <option>-dppr-user-length</option>
- <indexterm><primary><option>-dppr-user-length</option></primary></indexterm>
+ <option>-dsuppress-type-signatures</option>
+ <indexterm><primary><option>-dsuppress-type-signatures</option></primary></indexterm>
</term>
<listitem>
- <para>In error messages, expressions are printed to a
- certain “depth”, with subexpressions beyond the
- depth replaced by ellipses. This flag sets the
- depth. Its default value is 5.</para>
+ <para>Suppress the printing of type signatures.</para>
</listitem>
</varlistentry>
<varlistentry>
- <term>
- <option>-dno-debug-output</option>
- <indexterm><primary><option>-dno-debug-output</option></primary></indexterm>
+ <term>
+ <option>-dsuppress-type-applications</option>
+ <indexterm><primary><option>-dsuppress-type-applications</option></primary></indexterm>
</term>
- <listitem>
- <para>Suppress any unsolicited debugging output. When GHC
- has been built with the <literal>DEBUG</literal> option it
- occasionally emits debug output of interest to developers.
- The extra output can confuse the testing framework and
- cause bogus test failures, so this flag is provided to
- turn it off.</para>
- </listitem>
+ <listitem>
+ <para>Suppress the printing of type applications.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>
+ <option>-dsuppress-coercions</option>
+ <indexterm><primary><option>-dsuppress-coercions</option></primary></indexterm>
+ </term>
+ <listitem>
+ <para>Suppress the printing of type coercions.</para>
+ </listitem>
</varlistentry>
</variablelist>
</sect2>
<entry>-</entry>
</row>
<row>
+ <entry><option>-dppr-noprags</option></entry>
+ <entry>Don't output pragma info in dumps</entry>
+ <entry>static</entry>
+ <entry>-</entry>
+ </row>
+ <row>
+ <entry><option>-dppr-user-length</option></entry>
+ <entry>Set the depth for printing expressions in error msgs</entry>
+ <entry>static</entry>
+ <entry>-</entry>
+ </row>
+ <row>
+ <entry><option>-dppr-colsNNN</option></entry>
+ <entry>Set the width of debugging output. For example <option>-dppr-cols200</option></entry>
+ <entry>static</entry>
+ <entry>-</entry>
+ </row>
+ <row>
+ <entry><option>-dppr-case-as-let</option></entry>
+ <entry>Print single alternative case expressions as strict lets.</entry>
+ <entry>static</entry>
+ <entry>-</entry>
+ </row>
+ <row>
+ <entry><option>-dsuppress-all</option></entry>
+ <entry>In core dumps, suppress everything that is suppressable.</entry>
+ <entry>static</entry>
+ <entry>-</entry>
+ </row>
+ <row>
<entry><option>-dsuppress-uniques</option></entry>
- <entry>Suppress the printing of uniques in debug output (easier to use <command>diff</command>.</entry>
+ <entry>Suppress the printing of uniques in debug output (easier to use <command>diff</command>)</entry>
<entry>static</entry>
<entry>-</entry>
</row>
<row>
- <entry><option>-dsuppress-coercions</option></entry>
- <entry>Suppress the printing of coercions in Core dumps to make them shorter.</entry>
+ <entry><option>-dsuppress-idinfo</option></entry>
+ <entry>Suppress extended information about identifiers where they are bound</entry>
<entry>static</entry>
<entry>-</entry>
</row>
<row>
<entry><option>-dsuppress-module-prefixes</option></entry>
- <entry>Suppress the printing of module qualification prefixes in Core dumps to make them easier to read.</entry>
+ <entry>Suppress the printing of module qualification prefixes</entry>
<entry>static</entry>
<entry>-</entry>
</row>
<row>
- <entry><option>-dppr-noprags</option></entry>
- <entry>Don't output pragma info in dumps</entry>
+ <entry><option>-dsuppress-type-signatures</option></entry>
+ <entry>Suppress type signatures</entry>
<entry>static</entry>
<entry>-</entry>
</row>
<row>
- <entry><option>-dppr-user-length</option></entry>
- <entry>Set the depth for printing expressions in error msgs</entry>
+ <entry><option>-dsuppress-type-applications</option></entry>
+ <entry>Suppress type applications</entry>
+ <entry>static</entry>
+ <entry>-</entry>
+ </row>
+ <row>
+ <entry><option>-dsuppress-coercions</option></entry>
+ <entry>Suppress the printing of coercions in Core dumps to make them shorter</entry>
<entry>static</entry>
<entry>-</entry>
</row>
shared between several programs. In contrast, with static linking the
code is copied into each program. Using shared libraries can thus save
disk space. They also allow a single copy of code to be shared in memory
- between several programs that use it. Shared libraires are often used as
+ between several programs that use it. Shared libraries are often used as
a way of structuring large projects, especially where different parts are
written in different programming languages. Shared libraries are also
commonly used as a plugin mechanism by various applications. This is
+++ /dev/null
-@GccExtraViaCOpts@
# -----------------------------------------------------------------------------
# Installation
-install: install_packages install_libs install_libexecs install_headers \
+install: install_libs install_packages install_libexecs install_headers \
install_libexec_scripts install_bins install_topdirs
ifeq "$(HADDOCK_DOCS)" "YES"
install: install_docs
README \
INSTALL \
configure config.sub config.guess install-sh \
- extra-gcc-opts.in \
+ settings.in \
packages \
Makefile \
mk/config.mk.in \
compiler/stage2/doc \
$(wildcard libraries/*/dist-install/doc/) \
$(wildcard libraries/*/*/dist-install/doc/) \
- $(filter-out extra-gcc-opts,$(INSTALL_LIBS)) \
+ $(filter-out settings,$(INSTALL_LIBS)) \
$(filter-out %/project.mk mk/config.mk %/mk/install.mk,$(MAKEFILE_LIST)) \
mk/project.mk \
mk/install.mk.in \
unix-binary-dist-prep:
"$(RM)" $(RM_OPTS_REC) bindistprep/
"$(MKDIRHIER)" $(BIN_DIST_PREP_DIR)
- set -e; for i in packages LICENSE compiler ghc rts libraries utils docs libffi includes driver mk rules Makefile aclocal.m4 config.sub config.guess install-sh extra-gcc-opts.in ghc.mk inplace distrib/configure.ac distrib/README distrib/INSTALL; do ln -s ../../$$i $(BIN_DIST_PREP_DIR)/; done
+ set -e; for i in packages LICENSE compiler ghc rts libraries utils docs libffi includes driver mk rules Makefile aclocal.m4 config.sub config.guess install-sh settings.in ghc.mk inplace distrib/configure.ac distrib/README distrib/INSTALL; do ln -s ../../$$i $(BIN_DIST_PREP_DIR)/; done
echo "HADDOCK_DOCS = $(HADDOCK_DOCS)" >> $(BIN_DIST_MK)
echo "LATEX_DOCS = $(LATEX_DOCS)" >> $(BIN_DIST_MK)
echo "BUILD_DOCBOOK_HTML = $(BUILD_DOCBOOK_HTML)" >> $(BIN_DIST_MK)
SRC_DIST_FILES += \
configure.ac config.guess config.sub configure \
aclocal.m4 README ANNOUNCE HACKING LICENSE Makefile install-sh \
- ghc.spec.in ghc.spec extra-gcc-opts.in VERSION \
+ ghc.spec.in ghc.spec settings.in VERSION \
boot boot-pkgs packages ghc.mk
SRC_DIST_TARBALL = $(SRC_DIST_NAME)-src.tar.bz2
"$(RM)" $(RM_OPTS) config.cache config.status config.log mk/config.h mk/stamp-h
"$(RM)" $(RM_OPTS) mk/config.mk mk/are-validating.mk mk/project.mk
"$(RM)" $(RM_OPTS) mk/config.mk.old mk/project.mk.old
- "$(RM)" $(RM_OPTS) extra-gcc-opts docs/users_guide/ug-book.xml
+ "$(RM)" $(RM_OPTS) settings docs/users_guide/ug-book.xml
"$(RM)" $(RM_OPTS) compiler/ghc.cabal compiler/ghc.cabal.old
"$(RM)" $(RM_OPTS) ghc/ghc-bin.cabal
"$(RM)" $(RM_OPTS) libraries/base/include/HsBaseConfig.h
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
- GHC.defaultErrorHandler defaultDynFlags $ do
+ let defaultErrorHandlerDynFlags = defaultDynFlags (panic "No settings")
+ GHC.defaultErrorHandler defaultErrorHandlerDynFlags $ do
-- 1. extract the -B flag from the args
argv0 <- getArgs
showNumVersionMode = mkPreStartupMode ShowNumVersion
showSupportedExtensionsMode = mkPreStartupMode ShowSupportedExtensions
-printMode :: String -> Mode
-printMode str = mkPreStartupMode (Print str)
-
mkPreStartupMode :: PreStartupMode -> Mode
mkPreStartupMode = Left
showGhciUsageMode = mkPreLoadMode ShowGhciUsage
showInfoMode = mkPreLoadMode ShowInfo
-printWithDynFlagsMode :: (DynFlags -> String) -> Mode
-printWithDynFlagsMode f = mkPreLoadMode (PrintWithDynFlags f)
+printSetting :: String -> Mode
+printSetting k = mkPreLoadMode (PrintWithDynFlags f)
+ where f dflags = fromMaybe (panic ("Setting not found: " ++ show k))
+ $ lookup k (compilerInfo dflags)
mkPreLoadMode :: PreLoadMode -> Mode
mkPreLoadMode = Right . Left
, Flag "-supported-languages" (PassFlag (setMode showSupportedExtensionsMode))
, Flag "-supported-extensions" (PassFlag (setMode showSupportedExtensionsMode))
] ++
- [ Flag k' (PassFlag (setMode mode))
- | (k, v) <- compilerInfo,
+ [ Flag k' (PassFlag (setMode (printSetting k)))
+ | k <- ["Project version",
+ "Booter version",
+ "Stage",
+ "Build platform",
+ "Host platform",
+ "Target platform",
+ "Have interpreter",
+ "Object splitting supported",
+ "Have native code generator",
+ "Support SMP",
+ "Unregisterised",
+ "Tables next to code",
+ "RTS ways",
+ "Leading underscore",
+ "Debug on",
+ "LibDir",
+ "Global Package DB",
+ "C compiler flags",
+ "Gcc Linker flags",
+ "Ld Linker flags"],
let k' = "-print-" ++ map (replaceSpace . toLower) k
replaceSpace ' ' = '-'
replaceSpace c = c
- mode = case v of
- String str -> printMode str
- FromDynFlags f -> printWithDynFlagsMode f
] ++
------- interfaces ----------------------------------------------------
[ Flag "-show-iface" (HasArg (\f -> setMode (showInterfaceMode f)
showInfo :: DynFlags -> IO ()
showInfo dflags = do
let sq x = " [" ++ x ++ "\n ]"
- putStrLn $ sq $ concat $ intersperse "\n ," $ map (show . flatten) compilerInfo
- where flatten (k, String v) = (k, v)
- flatten (k, FromDynFlags f) = (k, f dflags)
+ putStrLn $ sq $ intercalate "\n ," $ map show $ compilerInfo dflags
showSupportedExtensions :: IO ()
showSupportedExtensions = mapM_ putStrLn supportedLanguagesAndExtensions
XXX
Category: XXX
Data-Dir: ..
-Data-Files: extra-gcc-opts
+Data-Files: settings
Build-Type: Simple
Cabal-Version: >= 1.2
all_ghc_stage2 : $(GHC_STAGE2)
all_ghc_stage3 : $(GHC_STAGE3)
-$(INPLACE_LIB)/extra-gcc-opts : extra-gcc-opts
+$(INPLACE_LIB)/settings : settings
"$(CP)" $< $@
# The GHC programs need to depend on all the helper programs they might call,
# and the settings files they use
-$(GHC_STAGE1) : | $(UNLIT) $(INPLACE_LIB)/extra-gcc-opts
-$(GHC_STAGE2) : | $(UNLIT) $(INPLACE_LIB)/extra-gcc-opts
-$(GHC_STAGE3) : | $(UNLIT) $(INPLACE_LIB)/extra-gcc-opts
+$(GHC_STAGE1) : | $(UNLIT) $(INPLACE_LIB)/settings
+$(GHC_STAGE2) : | $(UNLIT) $(INPLACE_LIB)/settings
+$(GHC_STAGE3) : | $(UNLIT) $(INPLACE_LIB)/settings
ifeq "$(GhcUnregisterised)" "NO"
$(GHC_STAGE1) : | $(SPLIT)
endif
-INSTALL_LIBS += extra-gcc-opts
+INSTALL_LIBS += settings
ifeq "$(Windows)" "NO"
install: install_ghc_link
#
# We use libffi's own configuration stuff.
-PLATFORM := $(shell echo $(HOSTPLATFORM) | sed 's/i[567]86/i486/g')
-
# 2007-07-05
# Passing
# as_ln_s='cp -p'
PATH=`pwd`:$$PATH; \
export PATH; \
cd build && \
- CC=$(WhatGccIsCalled) \
+ CC=$(CC_STAGE1) \
LD=$(LD) \
- AR=$(AR) \
+ AR=$(AR_STAGE1) \
NM=$(NM) \
CFLAGS="$(SRC_CC_OPTS) $(CONF_CC_OPTS_STAGE1) -w" \
LDFLAGS="$(SRC_LD_OPTS) $(CONF_GCC_LINKER_OPTS_STAGE1) -w" \
"$(SHELL)" configure \
--enable-static=yes \
--enable-shared=$(libffi_EnableShared) \
- --host=$(PLATFORM) --build=$(PLATFORM)
+ --host=$(HOSTPLATFORM) --build=$(BUILDPLATFORM)
# libffi.so needs to be built with the correct soname.
# NOTE: this builds libffi_convience.so with the incorrect
libffi/dist-install/build/HSffi.o: libffi/dist-install/build/libHSffi.a
cd libffi/dist-install/build && \
touch empty.c && \
- "$(CC)" $(SRC_CC_OPTS) $(CONF_CC_OPTS_STAGE1) -c empty.c -o HSffi.o
+ "$(CC_STAGE1)" $(SRC_CC_OPTS) $(CONF_CC_OPTS_STAGE1) -c empty.c -o HSffi.o
$(eval $(call all-target,libffi,libffi/dist-install/build/HSffi.o))
# binary-dist
BINDIST_EXTRAS += libffi/package.conf.in
-
+++ /dev/null
-# This Makefile.common is used only in an nhc98 build of the libraries.
-# It is included from each package's individual Makefile.nhc98.
-# We assume the following definitions have already been made in
-# the importing Makefile.
-#
-# THISPKG = e.g. mypkg
-# SEARCH = e.g. -P../IO -P../PreludeIO -package base
-# SRCS = all .hs .gc and .c files
-#
-# EXTRA_H_FLAGS = e.g. -prelude
-# EXTRA_C_FLAGS = e.g. -I../Binary
-include ../Makefile.inc
-
-# nasty hack - replace flags for ghc, nhc98, with hbc specific ones
-ifeq "hbc" "${BUILDCOMP}"
-EXTRA_H_FLAGS := ${EXTRA_HBC_FLAGS}
-endif
-
-DIRS = $(shell ${LOCAL}pkgdirlist ${THISPKG})
-
-OBJDIR = ${BUILDDIR}/${OBJ}/libraries/${THISPKG}
-OBJDIRS = $(patsubst %, ${OBJDIR}/%, ${DIRS})
-FINALLIB = ${DST}/libHS${THISPKG}.$A
-INCDIRS = ${INCDIR}/packages/${THISPKG} \
- $(patsubst %, ${INCDIR}/packages/${THISPKG}/%, ${DIRS})
-.SUFFIXES: .hi .hs .lhs .o .gc .c .hc .p.o .p.c .z.o .z.c .hsc
-
-SRCS_HS = $(filter %.hs, ${SRCS})
-SRCS_LHS = $(filter %.lhs,${SRCS})
-SRCS_GC = $(filter %.gc, ${SRCS})
-SRCS_HSC = $(filter %.hsc,${SRCS})
-SRCS_C = $(filter %.c, ${SRCS})
-SRCS_HASK= $(SRCS_HS) $(SRCS_LHS) $(SRCS_GC) $(SRCS_HSC)
-
-OBJS_HS = $(patsubst %.hs, ${OBJDIR}/%.$O, ${SRCS_HS})
-OBJS_LHS = $(patsubst %.lhs,${OBJDIR}/%.$O, ${SRCS_LHS})
-OBJS_GC = $(patsubst %.gc, ${OBJDIR}/%.$O, ${SRCS_GC})
-OBJS_HSC = $(patsubst %.hsc,${OBJDIR}/%.$O, ${SRCS_HSC})
-OBJS_C = $(patsubst %.c, ${OBJDIR}/%.$O, ${SRCS_C})
-OBJS_HASK= ${OBJS_HS} ${OBJS_LHS} ${OBJS_GC} ${OBJS_HSC}
-OBJS = $(OBJS_HASK) $(OBJS_C)
-
-CFILES_HS = $(patsubst %.hs, %.$C, ${SRCS_HS})
-CFILES_LHS = $(patsubst %.lhs,%.$C, ${SRCS_LHS})
-CFILES_GC = $(patsubst %.gc, %.$C, ${SRCS_GC})
-CFILES_XS = $(patsubst %.gc, %_.$C, ${SRCS_GC}) \
- $(patsubst %.gc, %_.hs, ${SRCS_GC})
-CFILES_HSC = $(patsubst %.hsc,%.$C, ${SRCS_HSC})
-CFILES_GEN = ${CFILES_HS} ${CFILES_LHS} ${CFILES_GC} ${CFILES_HSC}
-
-ifeq "p" "${PROFILING}"
-HC += -p
-endif
-
-ifeq "z" "${TPROF}"
-HC += -z
-endif
-
-all: ${OBJDIR} ${OBJDIRS} ${INCDIRS} extra ${OBJS} ${FINALLIB}
-extra:
-cfiles: extracfiles ${CFILES_GEN}
-extracfiles:
-fromC: ${OBJDIR} ${OBJS_C} ${OBJDIRS}
- $(HC) -c -d $(OBJDIR) $(EXTRA_C_FLAGS) ${SEARCH} ${CFILES_GEN}
- echo $(OBJS) | xargs ar cr ${FINALLIB}
-objdir: ${OBJDIR} ${OBJDIRS} ${INCDIRS}
-${OBJDIR} ${OBJDIRS} ${INCDIRS}:
- mkdir -p $@
-${FINALLIB}: ${OBJS}
- echo $(OBJS) | xargs ar cr $@
-cleanhi:
- -rm -f $(patsubst %, %/*.hi, ${DIRS})
-cleanC: cleanExtraC
- -rm -f ${CFILES_GEN} ${CFILES_XS}
-clean: cleanhi
- -rm -f $(patsubst %, ${OBJDIR}/%/*.$O, ${DIRS})
- -rm -f $(patsubst %.gc, %_.hs, $(filter %.gc, $(SRCS)))
- -rm -f $(patsubst %.gc, %_.$C, $(filter %.gc, $(SRCS)))
-cleanExtraC:
-
-# general build rules for making objects from Haskell files
-$(OBJS_HASK): #$(OBJDIR) $(OBJDIRS) $(SRCS_HASK)
- $(LOCAL)hmake -hc=$(HC) -hidir $(INCDIR)/packages/$(THISPKG) \
- $(SEARCH) $(EXTRA_H_FLAGS) -d$(OBJDIR) \
- $(SRCS_HASK)
-${OBJS_HS}: ${OBJDIR}/%.$O : %.hs
-${OBJS_LHS}: ${OBJDIR}/%.$O : %.lhs
-${OBJS_GC}: ${OBJDIR}/%.$O : %.gc
-${OBJS_HSC}: ${OBJDIR}/%.$O : %.hsc
-
-# general build rule for making objects from C files
-${OBJS_C}: ${OBJDIR}/%.$O : cbits/%.c
- $(CC) -c -I$(INCDIR) $(ENDIAN) $(filter -I%, ${SEARCH}) \
- $(EXTRA_C_FLAGS) -o $@ $<
-
-# general build rules for making bootstrap C files from Haskell files
-$(CFILES_GEN):
- $(LOCAL)hmake -hc=$(HC) -C -hidir $(INCDIR)/packages/$(THISPKG) \
- $(SEARCH) $(EXTRA_H_FLAGS) \
- $(SRCS_HASK)
-${CFILES_HS}: %.$C : %.hs
-${CFILES_LHS}: %.$C : %.lhs
-${CFILES_GC}: %.$C : %.gc
-${CFILES_HSC}: %.$C : %.hsc
-
-# hack to get round mutual recursion between libraries
-HIFILES = $(patsubst %.hs,../${THISLIB}/%.${HISUFFIX},$(filter %.hs, ${SRCS}))
-${HIFILES}: ../${THISLIB}/%.${HISUFFIX} : %.hs
- $(HC) -c $(PART_FLAGS) -o /dev/null $<
-
-# The importing Makefile may now define extra individual dependencies
-# e.g.
-# ${OBJDIR}/Function.$O: Function.hs ${OBJDIR}/Other.$O
-#
-# and C-files dependencies likewise
-# e.g.
-# AlignBin.c: BinHandle.c
-
+++ /dev/null
-ifeq "" "${MKDIR}"
-MKDIR:=$(shell pwd)
-#MKDIR:=$(PWD)
-else
-MKDIR:=$(patsubst %/$(notdir ${MKDIR}),%, ${MKDIR})
-endif
-include ${MKDIR}/Makefile.inc
-
+++ /dev/null
-# Local GHC-build-tree customization for Cabal makefiles. We want to build
-# libraries using flags that the user has put in build.mk/validate.mk and
-# appropriate flags for Mac OS X deployment targets.
-
-# Careful here: including boilerplate.mk breaks things, because paths.mk and
-# opts.mk overrides some of the variable settings in the Cabal Makefile, so
-# we just include config.mk and custom-settings.mk.
-include ../defineTOP.mk
-SAVE_GHC := $(GHC)
-SAVE_AR := $(AR)
-SAVE_LD := $(LD)
-include $(TOP)/mk/config.mk
-include $(TOP)/mk/custom-settings.mk
-GHC := $(SAVE_GHC)
-AR := $(SAVE_AR)
-LD := $(SAVE_LD)
-
-# We want all warnings on
-GhcLibHcOpts += -Wall
-
-# Cabal has problems with deprecated flag warnings, as it needs to pass
-# deprecated flags in pragmas in order to support older GHCs. Thus for
-# now at least we just disable them completely.
-GhcLibHcOpts += -fno-warn-deprecated-flags
-
-ifeq "$(filter-out Win32-% dph%,$(package))" ""
-# XXX We are one of the above list, i.e. we are a package that is not
-# yet warning-clean. Thus turn warnings off for now so that validate
-# goes through.
-GhcLibHcOpts += -w
-endif
-
-# Now add flags from the GHC build system to the Cabal build:
-GHC_OPTS += $(SRC_HC_OPTS)
-GHC_OPTS += $(GhcLibHcOpts)
-
-include $(TOP)/mk/bindist.mk
-
# the flag --with-gcc=<blah> instead. The reason is that the configure script
# needs to know which gcc you're using in order to perform its tests.
-HaveGcc = @HaveGcc@
-UseGcc = YES
WhatGccIsCalled = @WhatGccIsCalled@
GccVersion = @GccVersion@
-GccLT34 = @GccLT34@
-ifeq "$(strip $(HaveGcc))" "YES"
-ifneq "$(strip $(UseGcc))" "YES"
- CC = cc
-else
- CC = $(WhatGccIsCalled)
-endif
-endif
+GccLT34 = @GccLT34@
+CC = $(WhatGccIsCalled)
+CC_STAGE0 = @CC_STAGE0@
+CC_STAGE1 = $(CC)
+CC_STAGE2 = $(CC)
+CC_STAGE3 = $(CC)
# C compiler and linker flags from configure (e.g. -m<blah> to select
# correct C compiler backend). The stage number is the stage of GHC
AR = @ArCmd@
AR_OPTS = @ArArgs@
-ArSupportsInput = @ArSupportsInput@
ArSupportsAtFile = @ArSupportsAtFile@
-# Yuckage: for ghc/utils/parallel -- todo: nuke this dependency!!
-BASH = /usr/local/bin/bash
+
+AR_STAGE0 = $(AR)
+AR_STAGE1 = $(AR)
+AR_STAGE2 = $(AR)
+AR_STAGE3 = $(AR)
+AR_OPTS_STAGE0 = $(AR_OPTS)
+AR_OPTS_STAGE1 = $(AR_OPTS)
+AR_OPTS_STAGE2 = $(AR_OPTS)
+AR_OPTS_STAGE3 = $(AR_OPTS)
+EXTRA_AR_ARGS_STAGE0 = $(EXTRA_AR_ARGS)
+EXTRA_AR_ARGS_STAGE1 = $(EXTRA_AR_ARGS)
+EXTRA_AR_ARGS_STAGE2 = $(EXTRA_AR_ARGS)
+EXTRA_AR_ARGS_STAGE3 = $(EXTRA_AR_ARGS)
+ArSupportsAtFile_STAGE0 = $(ArSupportsAtFile)
+ArSupportsAtFile_STAGE1 = $(ArSupportsAtFile)
+ArSupportsAtFile_STAGE2 = $(ArSupportsAtFile)
+ArSupportsAtFile_STAGE3 = $(ArSupportsAtFile)
CONTEXT_DIFF = @ContextDiffCmd@
CP = cp
PERL = @PerlCmd@
PYTHON = @PythonCmd@
PIC = pic
-PREPROCESSCMD = $(CC) -E
RANLIB = @RANLIB@
SED = @SedCmd@
TR = tr
else
$$(rts_$1_LIB) : $$(rts_$1_OBJS) $$(rts_$1_DTRACE_OBJS)
"$$(RM)" $$(RM_OPTS) $$@
- echo $$(rts_$1_OBJS) $$(rts_$1_DTRACE_OBJS) | "$$(XARGS)" $$(XARGS_OPTS) "$$(AR)" \
- $$(AR_OPTS) $$(EXTRA_AR_ARGS) $$@
+ echo $$(rts_$1_OBJS) $$(rts_$1_DTRACE_OBJS) | "$$(XARGS)" $$(XARGS_OPTS) "$$(AR_STAGE1)" \
+ $$(AR_OPTS_STAGE1) $$(EXTRA_AR_ARGS_STAGE1) $$@
endif
endif
ifneq "$(BINDIST)" "YES"
rts/dist/build/libHSrtsmain.a : rts/dist/build/Main.o
"$(RM)" $(RM_OPTS) $@
- "$(AR)" $(AR_OPTS) $(EXTRA_AR_ARGS) $@ $<
+ "$(AR_STAGE1)" $(AR_OPTS_STAGE1) $(EXTRA_AR_ARGS_STAGE1) $@ $<
endif
# -----------------------------------------------------------------------------
else
echo $$($1_$2_$3_ALL_OBJS) >> $$@.contents
endif
-ifeq "$$(ArSupportsAtFile)" "YES"
- "$$(AR)" $$(AR_OPTS) $$(EXTRA_AR_ARGS) $$@ @$$@.contents
+ifeq "$$($1_$2_ArSupportsAtFile)" "YES"
+ "$$($1_$2_AR)" $$($1_$2_AR_OPTS) $$($1_$2_EXTRA_AR_ARGS) $$@ @$$@.contents
else
- "$$(XARGS)" $$(XARGS_OPTS) "$$(AR)" $$(AR_OPTS) $$(EXTRA_AR_ARGS) $$@ < $$@.contents
+ "$$(XARGS)" $$(XARGS_OPTS) "$$($1_$2_AR)" $$($1_$2_AR_OPTS) $$($1_$2_EXTRA_AR_ARGS) $$@ < $$@.contents
endif
"$$(RM)" $$(RM_OPTS) $$@.contents
endif
"$$($1_$2_HC)" -o $$@ $$($1_$2_v_ALL_HC_OPTS) $$(LD_OPTS) $$($1_$2_GHC_LD_OPTS) $$($1_$2_v_HS_OBJS) $$($1_$2_v_C_OBJS) $$($1_$2_v_S_OBJS) $$($1_$2_OTHER_OBJS) $$(addprefix -l,$$($1_$2_EXTRA_LIBRARIES))
else
$1/$2/build/tmp/$$($1_$2_PROG) : $$($1_$2_v_HS_OBJS) $$($1_$2_v_C_OBJS) $$($1_$2_v_S_OBJS) $$($1_$2_OTHER_OBJS) | $$$$(dir $$$$@)/.
- "$$(CC)" -o $$@ $$($1_$2_v_ALL_CC_OPTS) $$(LD_OPTS) $$($1_$2_v_HS_OBJS) $$($1_$2_v_C_OBJS) $$($1_$2_v_S_OBJS) $$($1_$2_OTHER_OBJS) $$($1_$2_v_EXTRA_CC_OPTS) $$(addprefix -l,$$($1_$2_EXTRA_LIBRARIES))
+ "$$($1_$2_CC)" -o $$@ $$($1_$2_v_ALL_CC_OPTS) $$(LD_OPTS) $$($1_$2_v_HS_OBJS) $$($1_$2_v_C_OBJS) $$($1_$2_v_S_OBJS) $$($1_$2_OTHER_OBJS) $$($1_$2_v_EXTRA_CC_OPTS) $$(addprefix -l,$$($1_$2_EXTRA_LIBRARIES))
endif
# Note [lib-depends] if this program is built with stage1 or greater, we
else
$1/$2/build/%.$$($3_osuf) : $1/%.c | $$$$(dir $$$$@)/.
- "$$(CC)" $$($1_$2_$3_ALL_CC_OPTS) -c $$< -o $$@
+ "$$($1_$2_CC)" $$($1_$2_$3_ALL_CC_OPTS) -c $$< -o $$@
$1/$2/build/%.$$($3_osuf) : $1/$2/build/%.c
- "$$(CC)" $$($1_$2_$3_ALL_CC_OPTS) -c $$< -o $$@
+ "$$($1_$2_CC)" $$($1_$2_$3_ALL_CC_OPTS) -c $$< -o $$@
$1/$2/build/%.$$($3_osuf) : $1/$2/build/%.$$($3_way_)s
"$$(AS)" $$($1_$2_$3_ALL_AS_OPTS) -o $$@ $$<
$1/$2/build/%.$$($3_osuf) : $1/%.S | $$$$(dir $$$$@)/.
- "$$(CC)" $$($1_$2_$3_ALL_CC_OPTS) -c $$< -o $$@
+ "$$($1_$2_CC)" $$($1_$2_$3_ALL_CC_OPTS) -c $$< -o $$@
$1/$2/build/%.$$($3_way_)s : $1/$2/build/%.c
- "$$(CC)" $$($1_$2_$3_ALL_CC_OPTS) -S $$< -o $$@
+ "$$($1_$2_CC)" $$($1_$2_$3_ALL_CC_OPTS) -S $$< -o $$@
endif
# Options for a Haskell compilation:
# - CONF_HC_OPTS source-tree-wide options, selected at
-# configure-time
+# configure-time
# - SRC_HC_OPTS source-tree-wide options from build.mk
-# (optimisation, heap settings)
+# (optimisation, heap settings)
# - libraries/base_HC_OPTS options from Cabal for libraries/base
# for all ways
# - libraries/base_MORE_HC_OPTS options from elsewhere in the build
# - libraries/base_v_HC_OPTS options from libraries/base for way v
# - WAY_v_HC_OPTS options for this way
# - EXTRA_HC_OPTS options from the command-line
-# - -Idir1 -Idir2 ... include-dirs from this package
+# - -Idir1 -Idir2 ... include-dirs from this package
# - -odir/-hidir/-stubdir put the output files under $3/build
# - -osuf/-hisuf/-hcsuf suffixes for the output files in this way
--cflag=-D__GLASGOW_HASKELL__=$$(ProjectVersionInt) \
$$($1_$2_$3_HSC2HS_CC_OPTS) \
$$($1_$2_$3_HSC2HS_LD_OPTS) \
+ --cflag=-I$1/$2/build/autogen \
+ $$(if $$($1_PACKAGE),--cflag=-include --cflag=$1/$2/build/autogen/cabal_macros.h) \
$$($$(basename $$<)_HSC2HS_OPTS) \
$$(EXTRA_HSC2HS_OPTS)
# .hs->.o rule, I don't know why --SDM
$1/$2/build/%.$$($3_osuf) : $1/$4/%.hc includes/ghcautoconf.h includes/ghcplatform.h | $$$$(dir $$$$@)/.
- "$$(CC)" $$($1_$2_$3_ALL_CC_OPTS) -Iincludes -x c -c $$< -o $$@
+ "$$($1_$2_CC)" $$($1_$2_$3_ALL_CC_OPTS) -Iincludes -x c -c $$< -o $$@
$1/$2/build/%.$$($3_osuf) : $1/$2/build/%.hc includes/ghcautoconf.h includes/ghcplatform.h
- "$$(CC)" $$($1_$2_$3_ALL_CC_OPTS) -Iincludes -x c -c $$< -o $$@
+ "$$($1_$2_CC)" $$($1_$2_$3_ALL_CC_OPTS) -Iincludes -x c -c $$< -o $$@
# $1/$2/build/%.$$($3_osuf) : $1/$2/build/%.$$($3_way_)hc
# "$$($1_$2_HC)" $$($1_$2_$3_ALL_HC_OPTS) -c $$< -o $$@
$(call profStart, package-config($1,$2,$3))
$1_$2_HC = $$(GHC_STAGE$3)
+$1_$2_CC = $$(CC_STAGE$3)
+$1_$2_AR = $$(AR_STAGE$3)
+$1_$2_AR_OPTS = $$(AR_OPTS_STAGE$3)
+$1_$2_EXTRA_AR_ARGS = $$(EXTRA_AR_ARGS_STAGE$3)
+$1_$2_ArSupportsAtFile = $$(ArSupportsAtFile_STAGE$3)
# configuration stuff that depends on which GHC we're building with
ifeq "$3" "0"
--- /dev/null
+[("GCC extra via C opts", "@GccExtraViaCOpts@"),
+ ("C compiler command", "@WhatGccIsCalled@"),
+ ("perl command", "@PerlCmd@")]
+
if (-d "$localpath/.git") {
die "Found both _darcs and .git in $localpath";
}
- else {
- $scm = "darcs";
- }
- }
- else {
- if (-d "$localpath/.git") {
- $scm = "git";
- }
- elsif ($tag eq "") {
- die "Required repo $localpath is missing";
- }
- else {
- message "== $localpath repo not present; skipping";
- }
+ $scm = "darcs";
+ } elsif (-d "$localpath/.git") {
+ $scm = "git";
+ } elsif ($tag eq "") {
+ die "Required repo $localpath is missing";
+ } else {
+ message "== $localpath repo not present; skipping";
+ next;
}
# Work out the arguments we should give to the SCM
}
scm ($localpath, $scm, @scm_args, @args);
}
+ elsif ($command =~ /^checkout$/) {
+ # Not all repos are necessarily branched, so ignore failure
+ $ignore_failure = 1;
+ scm ($localpath, $scm, "checkout", @args)
+ unless $scm eq "darcs";
+ }
elsif ($command =~ /^grep$/) {
# Hack around 'git grep' failing if there are no matches
$ignore_failure = 1;
* remote add <branch-name>
* remote rm <branch-name>
* remote set-url [--push] <branch-name>
+ * checkout
* grep
* clean
* reset
import HscTypes ( msHsFilePath )
import Name ( getOccString )
--import ErrUtils ( printBagOfErrors )
+import Panic ( panic )
import DynFlags ( defaultDynFlags )
import Bag
import Exception
then Just `liftM` openFile "TAGS" openFileMode
else return Nothing
- GHC.defaultErrorHandler defaultDynFlags $
+ GHC.defaultErrorHandler (defaultDynFlags (panic "No settings")) $
runGhc (Just ghc_topdir) $ do
--liftIO $ print "starting up session"
dflags <- getSessionDynFlags