diff options
-rw-r--r-- | .gitignore | 2 | ||||
-rw-r--r-- | HACKING | 14 | ||||
-rw-r--r-- | Makefile.am | 187 | ||||
-rw-r--r-- | doc/automake-ng.texi | 6 | ||||
-rwxr-xr-x | lib/config.sub | 8 | ||||
-rwxr-xr-x | lib/gendocs.sh | 434 | ||||
-rw-r--r-- | lib/gendocs_template | 87 |
7 files changed, 677 insertions, 61 deletions
diff --git a/.gitignore b/.gitignore index 0c80ef7c9..e3870367d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ /lib/am-ng +/announcement /ChangeLog /aclocal.m4 /configure @@ -35,6 +36,7 @@ /doc/amhello/depcomp /doc/amhello/install-sh /doc/amhello/missing +/doc/web-manuals /lib/Automake/Config.pm /test-suite.log /t/wrap/aclocal-1.* @@ -254,13 +254,9 @@ use "make GNUPLOADFLAGS='--user KEY' git-upload-release". * For stable releases, update the manuals at www.gnu.org: - - Generate manuals: - cd doc - wget "http://savannah.gnu.org/cgi-bin/viewcvs/~checkout~/texinfo/texinfo/util/gendocs.sh" - wget "http://savannah.gnu.org/cgi-bin/viewcvs/~checkout~/texinfo/texinfo/util/gendocs_template" - sh ./gendocs.sh --email bug-automake@gnu.org automake "GNU Automake" - - copy manuals recursively to web cvs, - - commit. + - Generate manuals, running "make web-manuals". + - Copy manuals recursively to web CVS. + - Commit in CVS. - Check for link errors, fix them, recheck until convergence: <http://validator.w3.org/checklink> @@ -270,6 +266,10 @@ * Don't forget to "git push" your changes so they appear in the public git tree. +* Create an announcement message with "make announcement". Edit the + generated 'announcement' file appropriately, in particularly filling + in by hand any "TODO" left in there. + * Send the announcement at least to <autotools-announce@gnu.org> and <automake@gnu.org>. If the release is a stable one, the announcement must also go to <info-gnu@gnu.org>; if it is an alpha or beta release, diff --git a/Makefile.am b/Makefile.am index 6b2df5d66..4edf9f377 100644 --- a/Makefile.am +++ b/Makefile.am @@ -735,9 +735,10 @@ GIT = git EXTRA_DIST += lib/gnupload -version_rx = ^[1-9][0-9]*\.[0-9][0-9]*(\.[0-9][0-9]*)? -stable_version_rx = $(version_rx)$$ -beta_version_rx = $(version_rx)[bdfhjlnprtvxz]$$ +base_version_rx = ^[1-9][0-9]*\.[0-9][0-9]* +stable_major_version_rx = $(base_version_rx)$$ +stable_minor_version_rx = $(base_version_rx)\.[0-9][0-9]*$$ +beta_version_rx = $(base_version_rx)(\.[0-9][0-9]*)?[bdfhjlnprtvxz]$$ match_version = echo "$(VERSION)" | $(EGREP) >/dev/null ## Check that we don't have uncommitted or unstaged changes. @@ -752,14 +753,30 @@ git_must_have_clean_workdir = \ || fatal "you have uncommitted or unstaged changes" determine_release_type = \ - if $(match_version) '$(stable_version_rx)'; then \ - release_type='Release' dest=ftp; \ + if $(match_version) '$(stable_major_version_rx)'; then \ + release_type='Major release'; \ + announcement_type='major release'; \ + dest=ftp; \ + elif $(match_version) '$(stable_minor_version_rx)'; then \ + release_type='Minor release'; \ + announcement_type='maintenance release'; \ + dest=ftp; \ elif $(match_version) '$(beta_version_rx)'; then \ - release_type='Beta release' dest=alpha; \ + release_type='Beta release'; \ + announcement_type='test release'; \ + dest=alpha; \ else \ fatal "invalid version '$(VERSION)' for a release"; \ fi +# Help the debugging of $(determine_release_type) and related code. +print-release-type: + @set -e -u \ + && fatal () { echo "$@: $$*"; exit 0; } \ + && $(determine_release_type) \ + && echo "$$release_type $(VERSION);" \ + "it will be announced as a $$announcement_type" + git-tag-release: maintainer-check @set -e; set -u; \ fatal () { echo "$@: $$*; not tagging" >&2; exit 1; }; \ @@ -787,7 +804,7 @@ git-upload-release: echo Will upload to $$dest: $(DIST_ARCHIVES); \ $(srcdir)/lib/gnupload $(GNUPLOADFLAGS) --to $$dest $(DIST_ARCHIVES) -.PHONY: git-upload-release git-tag-release +.PHONY: print-release-type git-upload-release git-tag-release ## ------------------------------------------------------------------ ## @@ -864,58 +881,130 @@ compare-autodiffs: autodiffs exit $$st .PHONY: autodiffs compare-autodiffs +## ---------------------------------------------- ## +## Help writing the announcement for a release. ## +## ---------------------------------------------- ## + +PACKAGE_MAILINGLIST = automake@gnu.org + +announcement: NEWS + $(AM_V_GEN): \ + && rm -f $@ $@-t \ + && fatal () { echo "$@: $$*" >&2; exit 1; } \ + && $(determine_release_type) \ + && ftp_base="ftp://$$dest.gnu.org/gnu/$(PACKAGE)" \ + && X () { printf '%s\n' "$$*" >> $@-t; } \ + && X "We are pleased to announce the $(PACKAGE_NAME) $(VERSION)" \ + "$$announcement_type." \ + && X \ + && X "**TODO** Brief description of the release here." \ + && X \ + && X "**TODO** This description can span multiple paragraphs." \ + && X \ + && X "See below for the detailed list of changes since the" \ + && X "previous version, as summarized by the NEWS file." \ + && X \ + && X "Download here:" \ + && X \ + && X " $$ftp_base/$(PACKAGE)-$(VERSION).tar.gz" \ + && X " $$ftp_base/$(PACKAGE)-$(VERSION).tar.xz" \ + && X \ + && X "Please report bugs and problems to" \ + "<$(PACKAGE_BUGREPORT)>," \ + && X "and send general comments and feedback to" \ + "<$(PACKAGE_MAILINGLIST)>." \ + && X \ + && X "Thanks to everyone who has reported problems, contributed" \ + && X "patches, and helped testing Automake!" \ + && X \ + && X "-*-*-*-" \ + && X \ + && sed -n -e '/^~~~/q' -e p $(srcdir)/NEWS >> $@-t \ + && mv -f $@-t $@ +.PHONY: announcement +CLEANFILES += announcement ## --------------------------------------------------------------------- ## ## Synchronize third-party files that are committed in our repository. ## ## --------------------------------------------------------------------- ## -## Program to use to fetch files. +# Program to use to fetch files. WGET = wget -WGET_SV_CVS = $(WGET) http://savannah.gnu.org/cgi-bin/viewcvs/~checkout~/ -WGET_SV_GIT_CF = $(WGET) 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;hb=HEAD;f=' -WGET_SV_GIT_AC = $(WGET) 'http://git.savannah.gnu.org/gitweb/?p=autoconf.git;a=blob_plain;hb=HEAD;f=' -WGET_SV_GIT_GL = $(WGET) 'http://git.savannah.gnu.org/gitweb/?p=gnulib.git;a=blob_plain;hb=HEAD;f=' -## Files that we fetch and which we compare against. -## The 'lib/COPYING' file must still be synced by hand. +# Some repositories we sync files from. +SV_CVS = 'http://savannah.gnu.org/cgi-bin/viewcvs/~checkout~/' +SV_GIT_CF = 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;hb=HEAD;f=' +SV_GIT_AC = 'http://git.savannah.gnu.org/gitweb/?p=autoconf.git;a=blob_plain;hb=HEAD;f=' +SV_GIT_GL = 'http://git.savannah.gnu.org/gitweb/?p=gnulib.git;a=blob_plain;hb=HEAD;f=' + +# Files that we fetch and which we compare against. +# Note that the 'lib/COPYING' file must still be synced by hand. FETCHFILES = \ -INSTALL \ -config.guess \ -config.sub \ -gnupload \ -gitlog-to-changelog \ -texinfo.tex \ -update-copyright - -## Fetch the latest versions of files we care about. + $(SV_GIT_CF)config.guess \ + $(SV_GIT_CF)config.sub \ + $(SV_CVS)texinfo/texinfo/doc/texinfo.tex \ + $(SV_CVS)texinfo/texinfo/util/gendocs.sh \ + $(SV_CVS)texinfo/texinfo/util/gendocs_template \ + $(SV_GIT_GL)build-aux/gitlog-to-changelog \ + $(SV_GIT_GL)build-aux/gnupload \ + $(SV_GIT_GL)build-aux/update-copyright \ + $(SV_GIT_GL)doc/INSTALL + +# Fetch the latest versions of few scripts and files we care about. fetch: - rm -rf Fetchdir > /dev/null 2>&1 - mkdir Fetchdir -## If a get fails then that is a problem. - (cd Fetchdir && \ - $(WGET_SV_GIT_CF)config.guess -O config.guess && \ - $(WGET_SV_GIT_CF)config.sub -O config.sub && \ - $(WGET_SV_CVS)texinfo/texinfo/doc/texinfo.tex -O texinfo.tex && \ - $(WGET_SV_GIT_GL)doc/INSTALL -O INSTALL && \ - $(WGET_SV_GIT_GL)build-aux/gnupload -O gnupload && \ - $(WGET_SV_GIT_GL)build-aux/update-copyright -O update-copyright && \ - $(WGET_SV_GIT_GL)build-aux/gitlog-to-changelog -O gitlog-to-changelog) -## Don't exit after test because we want to give as many errors as -## possible. - @stat=0; for file in $(FETCHFILES); do \ - if diff -u $(srcdir)/lib/$$file Fetchdir/$$file \ - >>Fetchdir/update.patch 2>/dev/null; then :; \ - else \ - stat=1; \ - echo "Updating $(srcdir)/lib/$$file ..."; \ - cp Fetchdir/$$file $(srcdir)/lib/$$file; \ - fi; \ - done; \ - test $$stat = 0 || \ - echo "See Fetchdir/update.patch for a log of the changes."; \ - exit $$stat + $(AM_V_at)rm -rf Fetchdir + $(AM_V_at)mkdir Fetchdir + $(AM_V_GEN)set -e; \ + if $(AM_V_P); then wget_opts=; else wget_opts=-nv; fi; \ + for url in $(FETCHFILES); do \ + file=`printf '%s\n' "$$url" | sed 's|^.*/||; s|^.*=||'`; \ +## A retrieval failure usually means a serious problem. Just bail out. + $(WGET) $$wget_opts "$$url" -O Fetchdir/$$file || exit 1; \ + if cmp Fetchdir/$$file $(srcdir)/lib/$$file >/dev/null; then \ + : Nothing to do; \ + else \ + echo "$@: updating file $$file"; \ +## Ditto for a copying failure. + cp Fetchdir/$$file $(srcdir)/lib/$$file || exit 1; \ + fi; \ + done + $(AM_V_at)rm -rf Fetchdir .PHONY: fetch +## --------------------------------------------------------------------- ## +## Generate manuals in several formats, for upload on the GNU website. ## +## --------------------------------------------------------------------- ## + +# The gendocs.sh script sadly leaves TeX and Texinfo auxiliary files +# in the directory where it's invoked. +clean_texinfo_clutter_cmd = \ + cd doc && rm -f *.ac *.aux *.cm *.cp *.cps *.fn *.fns *.ky \ + *.log *.op *.pg *.toc *.tp *.tr *.vr *.vrs + +clean-web-manuals: + $(AM_V_at)rm -rf doc/web-manuals +clean-texinfo-clutter: + $(AM_V_at)$(clean_texinfo_clutter_cmd) +clean-local: clean-web-manuals clean-texinfo-clutter +.PHONY: clean-web-manuals clean-texinfo-clutter + +web-manuals: + $(AM_V_at)rm -rf doc/web-manuals + $(AM_V_GEN): \ +## The gendocs.sh script only works from the srcdir, sadly. + && cd $(srcdir)/doc \ + && GENDOCS_TEMPLATE_DIR=../lib \ + && export GENDOCS_TEMPLATE_DIR \ +## Try to respect silent rules. + && if $(AM_V_P); then :; else exec >/dev/null 2>&1; fi \ +## Finally generate the manual in several formats. + && $(SHELL) ../lib/gendocs.sh -o web-manuals \ + --email $(PACKAGE_BUGREPORT) $(PACKAGE) '$(PACKAGE_NAME)' + $(AM_V_at)$(clean_texinfo_clutter_cmd) + $(AM_V_at)if $(AM_V_P); then ls -l doc/web-manuals; else :; fi +.PHONY: web-manuals + +EXTRA_DIST += lib/gendocs.sh lib/gendocs_template ## ------------------------------------------------ ## ## Update copyright years of all committed files. ## @@ -935,7 +1024,7 @@ update-copyright: sed -i "/^RELEASE_YEAR=/s/=.*$$/=$$current_year/" \ bootstrap.sh configure.ac; \ excluded_re=`echo $(FETCHFILES) \ - | sed -e 's|^|lib/|' -e 's| | lib/|g' -e 's, ,|,g'`; \ + | sed -e 's|^.*/|lib/|' -e 's| | lib/|g' -e 's, ,|,g'`; \ $(GIT) ls-files \ | grep -Ev '^(lib/)?(COPYING|INSTALL)$$' \ | grep -Ev "^($$excluded_re)$$" \ diff --git a/doc/automake-ng.texi b/doc/automake-ng.texi index c850e3dc7..8bdfec171 100644 --- a/doc/automake-ng.texi +++ b/doc/automake-ng.texi @@ -6047,9 +6047,9 @@ per-library) @code{_CPPFLAGS} variable if it is defined. @item AM_CFLAGS This is the variable the @file{Makefile.am} author can use to pass -in additional C compiler flags. It is more fully documented elsewhere. -In some situations, this is not used, in preference to the -per-executable (or per-library) @code{_CFLAGS}. +in additional C compiler flags. In some situations, this is +not used, in preference to the per-executable (or per-library) +@code{_CFLAGS}. @item COMPILE This is the command used to actually compile a C source file. The diff --git a/lib/config.sub b/lib/config.sub index 89b128630..8df551109 100755 --- a/lib/config.sub +++ b/lib/config.sub @@ -4,7 +4,7 @@ # 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, # 2011, 2012 Free Software Foundation, Inc. -timestamp='2012-10-10' +timestamp='2012-12-06' # This file is (in principle) common to ALL GNU software. # The presence of a machine in this file suggests that SOME GNU software @@ -1026,7 +1026,11 @@ case $basic_machine in basic_machine=i586-unknown os=-pw32 ;; - rdos) + rdos | rdos64) + basic_machine=x86_64-pc + os=-rdos + ;; + rdos32) basic_machine=i386-pc os=-rdos ;; diff --git a/lib/gendocs.sh b/lib/gendocs.sh new file mode 100755 index 000000000..0c0bc4b0f --- /dev/null +++ b/lib/gendocs.sh @@ -0,0 +1,434 @@ +#!/bin/sh -e +# gendocs.sh -- generate a GNU manual in many formats. This script is +# mentioned in maintain.texi. See the help message below for usage details. + +scriptversion=2012-10-27.11 + +# Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 +# Free Software Foundation, Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. +# +# Original author: Mohit Agarwal. +# Send bug reports and any other correspondence to bug-texinfo@gnu.org. +# +# The latest version of this script, and the companion template, is +# available from Texinfo CVS: +# http://savannah.gnu.org/cgi-bin/viewcvs/texinfo/texinfo/util/gendocs.sh +# http://savannah.gnu.org/cgi-bin/viewcvs/texinfo/texinfo/util/gendocs_template +# +# An up-to-date copy is also maintained in Gnulib (gnu.org/software/gnulib). + +# TODO: +# - image importation was only implemented for HTML generated by +# makeinfo. But it should be simple enough to adjust. +# - images are not imported in the source tarball. All the needed +# formats (PDF, PNG, etc.) should be included. + +prog=`basename "$0"` +srcdir=`pwd` + +scripturl="http://savannah.gnu.org/cgi-bin/viewcvs/~checkout~/texinfo/texinfo/util/gendocs.sh" +templateurl="http://savannah.gnu.org/cgi-bin/viewcvs/~checkout~/texinfo/texinfo/util/gendocs_template" + +: ${SETLANG="env LANG= LC_MESSAGES= LC_ALL= LANGUAGE="} +: ${MAKEINFO="makeinfo"} +: ${TEXI2DVI="texi2dvi -t @finalout"} +: ${DOCBOOK2HTML="docbook2html"} +: ${DOCBOOK2PDF="docbook2pdf"} +: ${DOCBOOK2TXT="docbook2txt"} +: ${GENDOCS_TEMPLATE_DIR="."} +: ${PERL='perl'} +: ${TEXI2HTML="texi2html"} +unset CDPATH +unset use_texi2html + +version="gendocs.sh $scriptversion + +Copyright 2012 Free Software Foundation, Inc. +There is NO warranty. You may redistribute this software +under the terms of the GNU General Public License. +For more information about these matters, see the files named COPYING." + +usage="Usage: $prog [OPTION]... PACKAGE MANUAL-TITLE + +Generate output in various formats from PACKAGE.texinfo (or .texi or +.txi) source. See the GNU Maintainers document for a more extensive +discussion: + http://www.gnu.org/prep/maintain_toc.html + +Options: + -s SRCFILE read Texinfo from SRCFILE, instead of PACKAGE.{texinfo|texi|txi} + -o OUTDIR write files into OUTDIR, instead of manual/. + -I DIR append DIR to the Texinfo search path. + --email ADR use ADR as contact in generated web pages. + --docbook convert through DocBook too (xml, txt, html, pdf). + --html ARG pass indicated ARG to makeinfo or texi2html for HTML targets. + --info ARG pass indicated ARG to makeinfo for Info, instead of --no-split. + --texi2html use texi2html to generate HTML targets. + --help display this help and exit successfully. + --version display version information and exit successfully. + +Simple example: $prog --email bug-gnu-emacs@gnu.org emacs \"GNU Emacs Manual\" + +Typical sequence: + cd PACKAGESOURCE/doc + wget \"$scripturl\" + wget \"$templateurl\" + $prog --email BUGLIST MANUAL \"GNU MANUAL - One-line description\" + +Output will be in a new subdirectory \"manual\" (by default; +use -o OUTDIR to override). Move all the new files into your web CVS +tree, as explained in the Web Pages node of maintain.texi. + +Please do use the --email ADDRESS option to specify your bug-reporting +address in the generated HTML pages. + +MANUAL-TITLE is included as part of the HTML <title> of the overall +manual/index.html file. It should include the name of the package being +documented. manual/index.html is created by substitution from the file +$GENDOCS_TEMPLATE_DIR/gendocs_template. (Feel free to modify the +generic template for your own purposes.) + +If you have several manuals, you'll need to run this script several +times with different MANUAL values, specifying a different output +directory with -o each time. Then write (by hand) an overall index.html +with links to them all. + +If a manual's Texinfo sources are spread across several directories, +first copy or symlink all Texinfo sources into a single directory. +(Part of the script's work is to make a tar.gz of the sources.) + +As implied above, by default monolithic Info files are generated. +If you want split Info, or other Info options, use --info to override. + +You can set the environment variables MAKEINFO, TEXI2DVI, TEXI2HTML, +and PERL to control the programs that get executed, and +GENDOCS_TEMPLATE_DIR to control where the gendocs_template file is +looked for. With --docbook, the environment variables DOCBOOK2HTML, +DOCBOOK2PDF, and DOCBOOK2TXT are also respected. + +By default, makeinfo and texi2dvi are run in the default (English) +locale, since that's the language of most Texinfo manuals. If you +happen to have a non-English manual and non-English web site, see the +SETLANG setting in the source. + +Email bug reports or enhancement requests to bug-texinfo@gnu.org. +" + +MANUAL_TITLE= +PACKAGE= +EMAIL=webmasters@gnu.org # please override with --email +commonarg= # Options passed to all the tools (-I dir). +dirs= # -I's directories. +htmlarg= +infoarg=--no-split +outdir=manual +srcfile= + +while test $# -gt 0; do + case $1 in + --email) shift; EMAIL=$1;; + --help) echo "$usage"; exit 0;; + --version) echo "$version"; exit 0;; + -s) shift; srcfile=$1;; + -o) shift; outdir=$1;; + -I) shift; commonarg="$commonarg -I '$1'"; dirs="$dirs $1";; + --docbook) docbook=yes;; + --html) shift; htmlarg=$1;; + --info) shift; infoarg=$1;; + --texi2html) use_texi2html=1;; + -*) + echo "$0: Unknown option \`$1'." >&2 + echo "$0: Try \`--help' for more information." >&2 + exit 1;; + *) + if test -z "$PACKAGE"; then + PACKAGE=$1 + elif test -z "$MANUAL_TITLE"; then + MANUAL_TITLE=$1 + else + echo "$0: extra non-option argument \`$1'." >&2 + exit 1 + fi;; + esac + shift +done + +# For most of the following, the base name is just $PACKAGE +base=$PACKAGE + +if test -n "$srcfile"; then + # but here, we use the basename of $srcfile + base=`basename "$srcfile"` + case $base in + *.txi|*.texi|*.texinfo) base=`echo "$base"|sed 's/\.[texinfo]*$//'`;; + esac + PACKAGE=$base +elif test -s "$srcdir/$PACKAGE.texinfo"; then + srcfile=$srcdir/$PACKAGE.texinfo +elif test -s "$srcdir/$PACKAGE.texi"; then + srcfile=$srcdir/$PACKAGE.texi +elif test -s "$srcdir/$PACKAGE.txi"; then + srcfile=$srcdir/$PACKAGE.txi +else + echo "$0: cannot find .texinfo or .texi or .txi for $PACKAGE in $srcdir." >&2 + exit 1 +fi + +if test ! -r $GENDOCS_TEMPLATE_DIR/gendocs_template; then + echo "$0: cannot read $GENDOCS_TEMPLATE_DIR/gendocs_template." >&2 + echo "$0: it is available from $templateurl." >&2 + exit 1 +fi + +# Function to return size of $1 in something resembling kilobytes. +calcsize() +{ + size=`ls -ksl $1 | awk '{print $1}'` + echo $size +} + +# copy_images OUTDIR HTML-FILE... +# ------------------------------- +# Copy all the images needed by the HTML-FILEs into OUTDIR. Look +# for them in the -I directories. +copy_images() +{ + local odir + odir=$1 + shift + $PERL -n -e " +BEGIN { + \$me = '$prog'; + \$odir = '$odir'; + @dirs = qw($dirs); +} +" -e ' +/<img src="(.*?)"/g && ++$need{$1}; + +END { + #print "$me: @{[keys %need]}\n"; # for debugging, show images found. + FILE: for my $f (keys %need) { + for my $d (@dirs) { + if (-f "$d/$f") { + use File::Basename; + my $dest = dirname ("$odir/$f"); + # + use File::Path; + -d $dest || mkpath ($dest) + || die "$me: cannot mkdir $dest: $!\n"; + # + use File::Copy; + copy ("$d/$f", $dest) + || die "$me: cannot copy $d/$f to $dest: $!\n"; + next FILE; + } + } + die "$me: $ARGV: cannot find image $f\n"; + } +} +' -- "$@" || exit 1 +} + +case $outdir in + /*) abs_outdir=$outdir;; + *) abs_outdir=$srcdir/$outdir;; +esac + +echo "Generating output formats for $srcfile" + +cmd="$SETLANG $MAKEINFO -o $PACKAGE.info $commonarg $infoarg \"$srcfile\"" +echo "Generating info file(s)... ($cmd)" +eval "$cmd" +mkdir -p "$outdir/" +tar czf "$outdir/$PACKAGE.info.tar.gz" $PACKAGE.info* +info_tgz_size=`calcsize "$outdir/$PACKAGE.info.tar.gz"` +# do not mv the info files, there's no point in having them available +# separately on the web. + +cmd="$SETLANG $TEXI2DVI $commonarg \"$srcfile\"" +echo "Generating dvi ... ($cmd)" +eval "$cmd" + +# compress/finish dvi: +gzip -f -9 $PACKAGE.dvi +dvi_gz_size=`calcsize $PACKAGE.dvi.gz` +mv $PACKAGE.dvi.gz "$outdir/" + +cmd="$SETLANG $TEXI2DVI --pdf $commonarg \"$srcfile\"" +echo "Generating pdf ... ($cmd)" +eval "$cmd" +pdf_size=`calcsize $PACKAGE.pdf` +mv $PACKAGE.pdf "$outdir/" + +opt="-o $PACKAGE.txt --no-split --no-headers $commonarg" +cmd="$SETLANG $MAKEINFO $opt \"$srcfile\"" +echo "Generating ASCII... ($cmd)" +eval "$cmd" +ascii_size=`calcsize $PACKAGE.txt` +gzip -f -9 -c $PACKAGE.txt >"$outdir/$PACKAGE.txt.gz" +ascii_gz_size=`calcsize "$outdir/$PACKAGE.txt.gz"` +mv $PACKAGE.txt "$outdir/" + +html_split() +{ + opt="--split=$1 $commonarg $htmlarg --node-files" + cmd="$SETLANG $TEXI2HTML --output $PACKAGE.html $opt \"$srcfile\"" + echo "Generating html by $1... ($cmd)" + eval "$cmd" + split_html_dir=$PACKAGE.html + ( + cd ${split_html_dir} || exit 1 + ln -sf ${PACKAGE}.html index.html + tar -czf "$abs_outdir/${PACKAGE}.html_$1.tar.gz" -- *.html + ) + eval html_$1_tgz_size=`calcsize "$outdir/${PACKAGE}.html_$1.tar.gz"` + rm -f "$outdir"/html_$1/*.html + mkdir -p "$outdir/html_$1/" + mv ${split_html_dir}/*.html "$outdir/html_$1/" + rmdir ${split_html_dir} +} + +if test -z "$use_texi2html"; then + opt="--no-split --html -o $PACKAGE.html $commonarg $htmlarg" + cmd="$SETLANG $MAKEINFO $opt \"$srcfile\"" + echo "Generating monolithic html... ($cmd)" + rm -rf $PACKAGE.html # in case a directory is left over + eval "$cmd" + html_mono_size=`calcsize $PACKAGE.html` + gzip -f -9 -c $PACKAGE.html >"$outdir/$PACKAGE.html.gz" + html_mono_gz_size=`calcsize "$outdir/$PACKAGE.html.gz"` + copy_images "$outdir/" $PACKAGE.html + mv $PACKAGE.html "$outdir/" + + opt="--html -o $PACKAGE.html $commonarg $htmlarg" + cmd="$SETLANG $MAKEINFO $opt \"$srcfile\"" + echo "Generating html by node... ($cmd)" + eval "$cmd" + split_html_dir=$PACKAGE.html + copy_images $split_html_dir/ $split_html_dir/*.html + ( + cd $split_html_dir || exit 1 + tar -czf "$abs_outdir/$PACKAGE.html_node.tar.gz" -- * + ) + html_node_tgz_size=`calcsize "$outdir/$PACKAGE.html_node.tar.gz"` + rm -rf "$outdir/html_node/" + mv $split_html_dir "$outdir/html_node/" +else + opt="--output $PACKAGE.html $commonarg $htmlarg" + cmd="$SETLANG $TEXI2HTML $opt \"$srcfile\"" + echo "Generating monolithic html... ($cmd)" + rm -rf $PACKAGE.html # in case a directory is left over + eval "$cmd" + html_mono_size=`calcsize $PACKAGE.html` + gzip -f -9 -c $PACKAGE.html >"$outdir/$PACKAGE.html.gz" + html_mono_gz_size=`calcsize "$outdir/$PACKAGE.html.gz"` + mv $PACKAGE.html "$outdir/" + + html_split node + html_split chapter + html_split section +fi + +echo Making .tar.gz for sources... +d=`dirname $srcfile` +( + cd "$d" + srcfiles=`ls *.texinfo *.texi *.txi *.eps 2>/dev/null` || true + tar cvzfh "$abs_outdir/$PACKAGE.texi.tar.gz" $srcfiles +) +texi_tgz_size=`calcsize "$outdir/$PACKAGE.texi.tar.gz"` + +if test -n "$docbook"; then + opt="-o - --docbook $commonarg" + cmd="$SETLANG $MAKEINFO $opt \"$srcfile\" >${srcdir}/$PACKAGE-db.xml" + echo "Generating docbook XML... ($cmd)" + eval "$cmd" + docbook_xml_size=`calcsize $PACKAGE-db.xml` + gzip -f -9 -c $PACKAGE-db.xml >"$outdir/$PACKAGE-db.xml.gz" + docbook_xml_gz_size=`calcsize "$outdir/$PACKAGE-db.xml.gz"` + mv $PACKAGE-db.xml "$outdir/" + + split_html_db_dir=html_node_db + opt="$commonarg -o $split_html_db_dir" + cmd="$DOCBOOK2HTML $opt \"${outdir}/$PACKAGE-db.xml\"" + echo "Generating docbook HTML... ($cmd)" + eval "$cmd" + ( + cd ${split_html_db_dir} || exit 1 + tar -czf "$abs_outdir/${PACKAGE}.html_node_db.tar.gz" -- *.html + ) + html_node_db_tgz_size=`calcsize "$outdir/${PACKAGE}.html_node_db.tar.gz"` + rm -f "$outdir"/html_node_db/*.html + mkdir -p "$outdir/html_node_db" + mv ${split_html_db_dir}/*.html "$outdir/html_node_db/" + rmdir ${split_html_db_dir} + + cmd="$DOCBOOK2TXT \"${outdir}/$PACKAGE-db.xml\"" + echo "Generating docbook ASCII... ($cmd)" + eval "$cmd" + docbook_ascii_size=`calcsize $PACKAGE-db.txt` + mv $PACKAGE-db.txt "$outdir/" + + cmd="$DOCBOOK2PDF \"${outdir}/$PACKAGE-db.xml\"" + echo "Generating docbook PDF... ($cmd)" + eval "$cmd" + docbook_pdf_size=`calcsize $PACKAGE-db.pdf` + mv $PACKAGE-db.pdf "$outdir/" +fi + +echo "Writing index file..." +if test -z "$use_texi2html"; then + CONDS="/%%IF *HTML_SECTION%%/,/%%ENDIF *HTML_SECTION%%/d;\ + /%%IF *HTML_CHAPTER%%/,/%%ENDIF *HTML_CHAPTER%%/d" +else + CONDS="/%%ENDIF.*%%/d;/%%IF *HTML_SECTION%%/d;/%%IF *HTML_CHAPTER%%/d" +fi + +curdate=`$SETLANG date '+%B %d, %Y'` +sed \ + -e "s!%%TITLE%%!$MANUAL_TITLE!g" \ + -e "s!%%EMAIL%%!$EMAIL!g" \ + -e "s!%%PACKAGE%%!$PACKAGE!g" \ + -e "s!%%DATE%%!$curdate!g" \ + -e "s!%%HTML_MONO_SIZE%%!$html_mono_size!g" \ + -e "s!%%HTML_MONO_GZ_SIZE%%!$html_mono_gz_size!g" \ + -e "s!%%HTML_NODE_TGZ_SIZE%%!$html_node_tgz_size!g" \ + -e "s!%%HTML_SECTION_TGZ_SIZE%%!$html_section_tgz_size!g" \ + -e "s!%%HTML_CHAPTER_TGZ_SIZE%%!$html_chapter_tgz_size!g" \ + -e "s!%%INFO_TGZ_SIZE%%!$info_tgz_size!g" \ + -e "s!%%DVI_GZ_SIZE%%!$dvi_gz_size!g" \ + -e "s!%%PDF_SIZE%%!$pdf_size!g" \ + -e "s!%%ASCII_SIZE%%!$ascii_size!g" \ + -e "s!%%ASCII_GZ_SIZE%%!$ascii_gz_size!g" \ + -e "s!%%TEXI_TGZ_SIZE%%!$texi_tgz_size!g" \ + -e "s!%%DOCBOOK_HTML_NODE_TGZ_SIZE%%!$html_node_db_tgz_size!g" \ + -e "s!%%DOCBOOK_ASCII_SIZE%%!$docbook_ascii_size!g" \ + -e "s!%%DOCBOOK_PDF_SIZE%%!$docbook_pdf_size!g" \ + -e "s!%%DOCBOOK_XML_SIZE%%!$docbook_xml_size!g" \ + -e "s!%%DOCBOOK_XML_GZ_SIZE%%!$docbook_xml_gz_size!g" \ + -e "s,%%SCRIPTURL%%,$scripturl,g" \ + -e "s!%%SCRIPTNAME%%!$prog!g" \ + -e "$CONDS" \ +$GENDOCS_TEMPLATE_DIR/gendocs_template >"$outdir/index.html" + +echo "Done, see $outdir/ subdirectory for new files." + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "scriptversion=" +# time-stamp-format: "%:y-%02m-%02d.%02H" +# time-stamp-end: "$" +# End: diff --git a/lib/gendocs_template b/lib/gendocs_template new file mode 100644 index 000000000..a62ad6167 --- /dev/null +++ b/lib/gendocs_template @@ -0,0 +1,87 @@ +<!--#include virtual="/server/header.html" --> +<title>%%TITLE%% - GNU Project - Free Software Foundation (FSF)</title> +<!--#include virtual="/server/banner.html" --> +<h2>%%TITLE%%</h2> + +<address>Free Software Foundation</address> +<address>last updated %%DATE%%</address> + +<p>This manual (%%PACKAGE%%) is available in the following formats:</p> + +<ul> +<li><a href="%%PACKAGE%%.html">HTML + (%%HTML_MONO_SIZE%%K bytes)</a> - entirely on one web page.</li> +<li><a href="html_node/index.html">HTML</a> - with one web page per + node.</li> +%%IF HTML_SECTION%% +<li><a href="html_section/index.html">HTML</a> - with one web page per + section.</li> +%%ENDIF HTML_SECTION%% +%%IF HTML_CHAPTER%% +<li><a href="html_chapter/index.html">HTML</a> - with one web page per + chapter.</li> +%%ENDIF HTML_CHAPTER%% +<li><a href="%%PACKAGE%%.html.gz">HTML compressed + (%%HTML_MONO_GZ_SIZE%%K gzipped characters)</a> - entirely on + one web page.</li> +<li><a href="%%PACKAGE%%.html_node.tar.gz">HTML compressed + (%%HTML_NODE_TGZ_SIZE%%K gzipped tar file)</a> - + with one web page per node.</li> +%%IF HTML_SECTION%% +<li><a href="%%PACKAGE%%.html_section.tar.gz">HTML compressed + (%%HTML_SECTION_TGZ_SIZE%%K gzipped tar file)</a> - + with one web page per section.</li> +%%ENDIF HTML_SECTION%% +%%IF HTML_CHAPTER%% +<li><a href="%%PACKAGE%%.html_chapter.tar.gz">HTML compressed + (%%HTML_CHAPTER_TGZ_SIZE%%K gzipped tar file)</a> - + with one web page per chapter.</li> +%%ENDIF HTML_CHAPTER%% +<li><a href="%%PACKAGE%%.info.tar.gz">Info document + (%%INFO_TGZ_SIZE%%K bytes gzipped tar file)</a>.</li> +<li><a href="%%PACKAGE%%.txt">ASCII text + (%%ASCII_SIZE%%K bytes)</a>.</li> +<li><a href="%%PACKAGE%%.txt.gz">ASCII text compressed + (%%ASCII_GZ_SIZE%%K bytes gzipped)</a>.</li> +<li><a href="%%PACKAGE%%.dvi.gz">TeX dvi file + (%%DVI_GZ_SIZE%%K bytes gzipped)</a>.</li> +<li><a href="%%PACKAGE%%.pdf">PDF file + (%%PDF_SIZE%%K bytes)</a>.</li> +<li><a href="%%PACKAGE%%.texi.tar.gz">Texinfo source + (%%TEXI_TGZ_SIZE%%K bytes gzipped tar file).</a></li> +</ul> + +<p>You can <a href="http://shop.fsf.org/">buy printed copies of +some manuals</a> (among other items) from the Free Software Foundation; +this helps support FSF activities.</p> + +<p>(This page generated by the <a href="%%SCRIPTURL%%">%%SCRIPTNAME%% +script</a>.)</p> + +<!-- If needed, change the copyright block at the bottom. In general, + all pages on the GNU web server should have the section about + verbatim copying. Please do NOT remove this without talking + with the webmasters first. + Please make sure the copyright date is consistent with the document + and that it is like this: "2001, 2002", not this: "2001-2002". --> +</div><!-- for id="content", starts in the include above --> +<!--#include virtual="/server/footer.html" --> +<div id="footer"> + +<p>Please send general FSF & GNU inquiries to +<a href="mailto:gnu@gnu.org"><gnu@gnu.org></a>. +There are also <a href="/contact/">other ways to contact</a> +the FSF.<br /> +Please send broken links and other corrections or suggestions to +<a href="mailto:%%EMAIL%%"><%%EMAIL%%></a>.</p> + +<p>Copyright © 2012 Free Software Foundation, Inc.</p> + +<p>Verbatim copying and distribution of this entire article are +permitted worldwide, without royalty, in any medium, provided this +notice, and the copyright notice, are preserved.</p> + +</div> +</div> +</body> +</html> |