summaryrefslogtreecommitdiff
path: root/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'contrib')
-rw-r--r--contrib/blameview/README9
-rwxr-xr-xcontrib/blameview/blameview.perl155
-rw-r--r--contrib/completion/git-completion.bash190
-rw-r--r--contrib/completion/git-completion.zsh27
-rw-r--r--contrib/completion/git-prompt.sh345
-rw-r--r--contrib/continuous/cidaemon503
-rw-r--r--contrib/continuous/post-receive-cinotify104
-rw-r--r--contrib/credential/osxkeychain/git-credential-osxkeychain.c12
-rw-r--r--contrib/mw-to-git/.perlcriticrc28
-rw-r--r--contrib/mw-to-git/Git/Mediawiki.pm100
-rw-r--r--contrib/mw-to-git/Makefile35
-rwxr-xr-xcontrib/mw-to-git/bin-wrapper/git14
-rwxr-xr-xcontrib/mw-to-git/git-mw.perl368
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki.perl667
-rwxr-xr-xcontrib/mw-to-git/t/test-gitmw-lib.sh27
-rw-r--r--contrib/mw-to-git/t/test.config4
-rw-r--r--contrib/patches/docbook-xsl-manpages-charmap.patch21
-rwxr-xr-xcontrib/remote-helpers/git-remote-bzr117
-rwxr-xr-xcontrib/remote-helpers/git-remote-hg510
-rwxr-xr-xcontrib/remote-helpers/test-bzr.sh492
-rwxr-xr-xcontrib/remote-helpers/test-hg-bidi.sh35
-rwxr-xr-xcontrib/remote-helpers/test-hg-hg-git.sh55
-rwxr-xr-xcontrib/remote-helpers/test-hg.sh725
-rwxr-xr-xcontrib/subtree/git-subtree.sh5
24 files changed, 2493 insertions, 2055 deletions
diff --git a/contrib/blameview/README b/contrib/blameview/README
deleted file mode 100644
index fada5ce909..0000000000
--- a/contrib/blameview/README
+++ /dev/null
@@ -1,9 +0,0 @@
-This is a sample program to use 'git-blame --incremental', based
-on this message.
-
-From: Jeff King <peff@peff.net>
-Subject: Re: More precise tag following
-To: Linus Torvalds <torvalds@linux-foundation.org>
-Cc: git@vger.kernel.org
-Date: Sat, 27 Jan 2007 18:52:38 -0500
-Message-ID: <20070127235238.GA28706@coredump.intra.peff.net>
diff --git a/contrib/blameview/blameview.perl b/contrib/blameview/blameview.perl
deleted file mode 100755
index 1dec00137b..0000000000
--- a/contrib/blameview/blameview.perl
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/perl
-
-use Gtk2 -init;
-use Gtk2::SimpleList;
-
-my $hash;
-my $fn;
-if ( @ARGV == 1 ) {
- $hash = "HEAD";
- $fn = shift;
-} elsif ( @ARGV == 2 ) {
- $hash = shift;
- $fn = shift;
-} else {
- die "Usage blameview [<rev>] <filename>";
-}
-
-Gtk2::Rc->parse_string(<<'EOS');
-style "treeview_style"
-{
- GtkTreeView::vertical-separator = 0
-}
-class "GtkTreeView" style "treeview_style"
-EOS
-
-my $window = Gtk2::Window->new('toplevel');
-$window->signal_connect(destroy => sub { Gtk2->main_quit });
-my $vpan = Gtk2::VPaned->new();
-$window->add($vpan);
-my $scrolled_window = Gtk2::ScrolledWindow->new;
-$vpan->pack1($scrolled_window, 1, 1);
-my $fileview = Gtk2::SimpleList->new(
- 'Commit' => 'text',
- 'FileLine' => 'text',
- 'Data' => 'text'
-);
-$scrolled_window->add($fileview);
-$fileview->get_column(0)->set_spacing(0);
-$fileview->set_size_request(1024, 768);
-$fileview->set_rules_hint(1);
-$fileview->signal_connect (row_activated => sub {
- my ($sl, $path, $column) = @_;
- my $row_ref = $sl->get_row_data_from_path ($path);
- system("blameview @$row_ref[0]~1 $fn &");
- });
-
-my $commitwindow = Gtk2::ScrolledWindow->new();
-$commitwindow->set_policy ('GTK_POLICY_AUTOMATIC','GTK_POLICY_AUTOMATIC');
-$vpan->pack2($commitwindow, 1, 1);
-my $commit_text = Gtk2::TextView->new();
-my $commit_buffer = Gtk2::TextBuffer->new();
-$commit_text->set_buffer($commit_buffer);
-$commitwindow->add($commit_text);
-
-$fileview->signal_connect (cursor_changed => sub {
- my ($sl) = @_;
- my ($path, $focus_column) = $sl->get_cursor();
- my $row_ref = $sl->get_row_data_from_path ($path);
- my $c_fh;
- open($c_fh, '-|', "git cat-file commit @$row_ref[0]")
- or die "unable to find commit @$row_ref[0]";
- my @buffer = <$c_fh>;
- $commit_buffer->set_text("@buffer");
- close($c_fh);
- });
-
-my $fh;
-open($fh, '-|', "git cat-file blob $hash:$fn")
- or die "unable to open $fn: $!";
-
-while(<$fh>) {
- chomp;
- $fileview->{data}->[$.] = ['HEAD', "$fn:$.", $_];
-}
-
-my $blame;
-open($blame, '-|', qw(git blame --incremental --), $fn, $hash)
- or die "cannot start git-blame $fn";
-
-Glib::IO->add_watch(fileno($blame), 'in', \&read_blame_line);
-
-$window->show_all;
-Gtk2->main;
-exit 0;
-
-my %commitinfo = ();
-
-sub flush_blame_line {
- my ($attr) = @_;
-
- return unless defined $attr;
-
- my ($commit, $s_lno, $lno, $cnt) =
- @{$attr}{qw(COMMIT S_LNO LNO CNT)};
-
- my ($filename, $author, $author_time, $author_tz) =
- @{$commitinfo{$commit}}{qw(FILENAME AUTHOR AUTHOR-TIME AUTHOR-TZ)};
- my $info = $author . ' ' . format_time($author_time, $author_tz);
-
- for(my $i = 0; $i < $cnt; $i++) {
- @{$fileview->{data}->[$lno+$i-1]}[0,1,2] =
- (substr($commit, 0, 8), $filename . ':' . ($s_lno+$i));
- }
-}
-
-my $buf;
-my $current;
-sub read_blame_line {
-
- my $r = sysread($blame, $buf, 1024, length($buf));
- die "I/O error" unless defined $r;
-
- if ($r == 0) {
- flush_blame_line($current);
- $current = undef;
- return 0;
- }
-
- while ($buf =~ s/([^\n]*)\n//) {
- my $line = $1;
-
- if (($commit, $s_lno, $lno, $cnt) =
- ($line =~ /^([0-9a-f]{40}) (\d+) (\d+) (\d+)$/)) {
- flush_blame_line($current);
- $current = +{
- COMMIT => $1,
- S_LNO => $2,
- LNO => $3,
- CNT => $4,
- };
- next;
- }
-
- # extended attribute values
- if ($line =~ /^(author|author-mail|author-time|author-tz|committer|committer-mail|committer-time|committer-tz|summary|filename) (.*)$/) {
- my $commit = $current->{COMMIT};
- $commitinfo{$commit}{uc($1)} = $2;
- next;
- }
- }
- return 1;
-}
-
-sub format_time {
- my $time = shift;
- my $tz = shift;
-
- my $minutes = $tz < 0 ? 0-$tz : $tz;
- $minutes = ($minutes / 100)*60 + ($minutes % 100);
- $minutes = $tz < 0 ? 0-$minutes : $minutes;
- $time += $minutes * 60;
- my @t = gmtime($time);
- return sprintf('%04d-%02d-%02d %02d:%02d:%02d %s',
- $t[5] + 1900, @t[4,3,2,1,0], $tz);
-}
diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash
index 1c35eef56a..cd509a5d63 100644
--- a/contrib/completion/git-completion.bash
+++ b/contrib/completion/git-completion.bash
@@ -33,8 +33,6 @@ esac
# returns location of .git repo
__gitdir ()
{
- # Note: this function is duplicated in git-prompt.sh
- # When updating it, make sure you update the other one to match.
if [ -z "${1-}" ]; then
if [ -n "${__git_dir-}" ]; then
echo "$__git_dir"
@@ -252,106 +250,50 @@ __gitcomp_file ()
# since tilde expansion is not applied.
# This means that COMPREPLY will be empty and Bash default
# completion will be used.
- COMPREPLY=($(compgen -P "${2-}" -W "$1" -- "${3-$cur}"))
+ __gitcompadd "$1" "${2-}" "${3-$cur}" ""
- # Tell Bash that compspec generates filenames.
- compopt -o filenames 2>/dev/null
+ # use a hack to enable file mode in bash < 4
+ compopt -o filenames +o nospace 2>/dev/null ||
+ compgen -f /non-existing-dir/ > /dev/null
}
-__git_index_file_list_filter_compat ()
-{
- local path
-
- while read -r path; do
- case "$path" in
- ?*/*) echo "${path%%/*}/" ;;
- *) echo "$path" ;;
- esac
- done
-}
-
-__git_index_file_list_filter_bash ()
-{
- local path
-
- while read -r path; do
- case "$path" in
- ?*/*)
- # XXX if we append a slash to directory names when using
- # `compopt -o filenames`, Bash will append another slash.
- # This is pretty stupid, and this the reason why we have to
- # define a compatible version for this function.
- echo "${path%%/*}" ;;
- *)
- echo "$path" ;;
- esac
- done
-}
-
-# Process path list returned by "ls-files" and "diff-index --name-only"
-# commands, in order to list only file names relative to a specified
-# directory, and append a slash to directory names.
-__git_index_file_list_filter ()
-{
- # Default to Bash >= 4.x
- __git_index_file_list_filter_bash
-}
-
-# Execute git ls-files, returning paths relative to the directory
-# specified in the first argument, and using the options specified in
-# the second argument.
+# Execute 'git ls-files', unless the --committable option is specified, in
+# which case it runs 'git diff-index' to find out the files that can be
+# committed. It return paths relative to the directory specified in the first
+# argument, and using the options specified in the second argument.
__git_ls_files_helper ()
{
(
test -n "${CDPATH+set}" && unset CDPATH
- # NOTE: $2 is not quoted in order to support multiple options
- cd "$1" && git ls-files --exclude-standard $2
+ cd "$1"
+ if [ "$2" == "--committable" ]; then
+ git diff-index --name-only --relative HEAD
+ else
+ # NOTE: $2 is not quoted in order to support multiple options
+ git ls-files --exclude-standard $2
+ fi
) 2>/dev/null
}
-# Execute git diff-index, returning paths relative to the directory
-# specified in the first argument, and using the tree object id
-# specified in the second argument.
-__git_diff_index_helper ()
-{
- (
- test -n "${CDPATH+set}" && unset CDPATH
- cd "$1" && git diff-index --name-only --relative "$2"
- ) 2>/dev/null
-}
-
# __git_index_files accepts 1 or 2 arguments:
# 1: Options to pass to ls-files (required).
-# Supported options are --cached, --modified, --deleted, --others,
-# and --directory.
# 2: A directory path (optional).
# If provided, only files within the specified directory are listed.
# Sub directories are never recursed. Path must have a trailing
# slash.
__git_index_files ()
{
- local dir="$(__gitdir)" root="${2-.}"
+ local dir="$(__gitdir)" root="${2-.}" file
if [ -d "$dir" ]; then
- __git_ls_files_helper "$root" "$1" | __git_index_file_list_filter |
- sort | uniq
- fi
-}
-
-# __git_diff_index_files accepts 1 or 2 arguments:
-# 1) The id of a tree object.
-# 2) A directory path (optional).
-# If provided, only files within the specified directory are listed.
-# Sub directories are never recursed. Path must have a trailing
-# slash.
-__git_diff_index_files ()
-{
- local dir="$(__gitdir)" root="${2-.}"
-
- if [ -d "$dir" ]; then
- __git_diff_index_helper "$root" "$1" | __git_index_file_list_filter |
- sort | uniq
+ __git_ls_files_helper "$root" "$1" |
+ while read -r file; do
+ case "$file" in
+ ?*/*) echo "${file%%/*}" ;;
+ *) echo "$file" ;;
+ esac
+ done | sort | uniq
fi
}
@@ -427,14 +369,8 @@ __git_refs ()
done
;;
*)
- git ls-remote "$dir" HEAD ORIG_HEAD 'refs/tags/*' 'refs/heads/*' 'refs/remotes/*' 2>/dev/null | \
- while read -r hash i; do
- case "$i" in
- *^{}) ;;
- refs/*) echo "${i#refs/*/}" ;;
- *) echo "$i" ;;
- esac
- done
+ echo "HEAD"
+ git for-each-ref --format="%(refname:short)" -- "refs/remotes/$dir/" | sed -e "s#^$dir/##"
;;
esac
}
@@ -552,44 +488,23 @@ __git_complete_revlist_file ()
}
-# __git_complete_index_file requires 1 argument: the options to pass to
-# ls-file
+# __git_complete_index_file requires 1 argument:
+# 1: the options to pass to ls-file
+#
+# The exception is --committable, which finds the files appropriate commit.
__git_complete_index_file ()
{
- local pfx cur_="$cur"
+ local pfx="" cur_="$cur"
case "$cur_" in
?*/*)
pfx="${cur_%/*}"
cur_="${cur_##*/}"
pfx="${pfx}/"
-
- __gitcomp_file "$(__git_index_files "$1" "$pfx")" "$pfx" "$cur_"
- ;;
- *)
- __gitcomp_file "$(__git_index_files "$1")" "" "$cur_"
;;
esac
-}
-
-# __git_complete_diff_index_file requires 1 argument: the id of a tree
-# object
-__git_complete_diff_index_file ()
-{
- local pfx cur_="$cur"
-
- case "$cur_" in
- ?*/*)
- pfx="${cur_%/*}"
- cur_="${cur_##*/}"
- pfx="${pfx}/"
- __gitcomp_file "$(__git_diff_index_files "$1" "$pfx")" "$pfx" "$cur_"
- ;;
- *)
- __gitcomp_file "$(__git_diff_index_files "$1")" "" "$cur_"
- ;;
- esac
+ __gitcomp_file "$(__git_index_files "$1" "$pfx")" "$pfx" "$cur_"
}
__git_complete_file ()
@@ -733,6 +648,7 @@ __git_list_porcelain_commands ()
cat-file) : plumbing;;
check-attr) : plumbing;;
check-ignore) : plumbing;;
+ check-mailmap) : plumbing;;
check-ref-format) : plumbing;;
checkout-index) : plumbing;;
commit-tree) : plumbing;;
@@ -1213,7 +1129,7 @@ _git_commit ()
esac
if git rev-parse --verify --quiet HEAD >/dev/null; then
- __git_complete_diff_index_file "HEAD"
+ __git_complete_index_file "--committable"
else
# This is the first commit
__git_complete_index_file "--cached"
@@ -1246,7 +1162,7 @@ __git_diff_common_options="--stat --numstat --shortstat --summary
--no-prefix --src-prefix= --dst-prefix=
--inter-hunk-context=
--patience --histogram --minimal
- --raw
+ --raw --word-diff
--dirstat --dirstat= --dirstat-by-file
--dirstat-by-file= --cumulative
--diff-algorithm=
@@ -1294,7 +1210,7 @@ _git_difftool ()
return
;;
esac
- __git_complete_file
+ __git_complete_revlist_file
}
__git_fetch_options="
@@ -2360,7 +2276,7 @@ _git_show ()
return
;;
esac
- __git_complete_file
+ __git_complete_revlist_file
}
_git_show_branch ()
@@ -2575,9 +2491,10 @@ __git_main ()
i="${words[c]}"
case "$i" in
--git-dir=*) __git_dir="${i#--git-dir=}" ;;
+ --git-dir) ((c++)) ; __git_dir="${words[c]}" ;;
--bare) __git_dir="." ;;
--help) command="help"; break ;;
- -c) c=$((++c)) ;;
+ -c|--work-tree|--namespace) ((c++)) ;;
-*) ;;
*) command="$i"; break ;;
esac
@@ -2595,6 +2512,7 @@ __git_main ()
--exec-path
--exec-path=
--html-path
+ --man-path
--info-path
--work-tree=
--namespace=
@@ -2663,7 +2581,7 @@ if [[ -n ${ZSH_VERSION-} ]]; then
--*=*|*.) ;;
*) c="$c " ;;
esac
- array[$#array+1]="$c"
+ array+=("$c")
done
compset -P '*[=:]'
compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
@@ -2689,35 +2607,19 @@ if [[ -n ${ZSH_VERSION-} ]]; then
compadd -Q -p "${2-}" -f -- ${=1} && _ret=0
}
- __git_zsh_helper ()
- {
- emulate -L ksh
- local cur cword prev
- cur=${words[CURRENT-1]}
- prev=${words[CURRENT-2]}
- let cword=CURRENT-1
- __${service}_main
- }
-
_git ()
{
- emulate -L zsh
- local _ret=1
- __git_zsh_helper
- let _ret && _default -S '' && _ret=0
+ local _ret=1 cur cword prev
+ cur=${words[CURRENT]}
+ prev=${words[CURRENT-1]}
+ let cword=CURRENT-1
+ emulate ksh -c __${service}_main
+ let _ret && _default && _ret=0
return _ret
}
compdef _git git gitk
return
-elif [[ -n ${BASH_VERSION-} ]]; then
- if ((${BASH_VERSINFO[0]} < 4)); then
- # compopt is not supported
- __git_index_file_list_filter ()
- {
- __git_index_file_list_filter_compat
- }
- fi
fi
__git_func_wrap ()
diff --git a/contrib/completion/git-completion.zsh b/contrib/completion/git-completion.zsh
index 2565d2eef4..fac5e711eb 100644
--- a/contrib/completion/git-completion.zsh
+++ b/contrib/completion/git-completion.zsh
@@ -4,18 +4,17 @@
#
# Copyright (c) 2012-2013 Felipe Contreras <felipe.contreras@gmail.com>
#
-# You need git's bash completion script installed somewhere, by default on the
-# same directory as this script.
+# You need git's bash completion script installed somewhere, by default it
+# would be the location bash-completion uses.
#
-# If your script is on ~/.git-completion.sh instead, you can configure it on
-# your ~/.zshrc:
+# If your script is somewhere else, you can configure it on your ~/.zshrc:
#
# zstyle ':completion:*:*:git:*' script ~/.git-completion.sh
#
-# The recommended way to install this script is to copy to
-# '~/.zsh/completion/_git', and then add the following to your ~/.zshrc file:
+# The recommended way to install this script is to copy to '~/.zsh/_git', and
+# then add the following to your ~/.zshrc file:
#
-# fpath=(~/.zsh/completion $fpath)
+# fpath=(~/.zsh $fpath)
complete ()
{
@@ -27,7 +26,19 @@ zstyle -T ':completion:*:*:git:*' tag-order && \
zstyle ':completion:*:*:git:*' tag-order 'common-commands'
zstyle -s ":completion:*:*:git:*" script script
-test -z "$script" && script="$(dirname ${funcsourcetrace[1]%:*})"/git-completion.bash
+if [ -z "$script" ]; then
+ local -a locations
+ local e
+ locations=(
+ '/etc/bash_completion.d/git' # fedora, old debian
+ '/usr/share/bash-completion/completions/git' # arch, ubuntu, new debian
+ '/usr/share/bash-completion/git' # gentoo
+ $(dirname ${funcsourcetrace[1]%:*})/git-completion.bash
+ )
+ for e in $locations; do
+ test -f $e && script="$e" && break
+ done
+fi
ZSH_VERSION='' . "$script"
__gitcomp ()
diff --git a/contrib/completion/git-prompt.sh b/contrib/completion/git-prompt.sh
index eaf5c369aa..a81ef5a482 100644
--- a/contrib/completion/git-prompt.sh
+++ b/contrib/completion/git-prompt.sh
@@ -3,7 +3,7 @@
# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
# Distributed under the GNU General Public License, version 2.0.
#
-# This script allows you to see the current branch in your prompt.
+# This script allows you to see repository status in your prompt.
#
# To enable:
#
@@ -13,23 +13,27 @@
# 3a) Change your PS1 to call __git_ps1 as
# command-substitution:
# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
-# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
+# ZSH: setopt PROMPT_SUBST ; PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
# the optional argument will be used as format string.
-# 3b) Alternatively, if you are using bash, __git_ps1 can be
-# used for PROMPT_COMMAND with two parameters, <pre> and
-# <post>, which are strings you would put in $PS1 before
-# and after the status string generated by the git-prompt
-# machinery. e.g.
-# PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "'
-# will show username, at-sign, host, colon, cwd, then
-# various status string, followed by dollar and SP, as
-# your prompt.
+# 3b) Alternatively, for a slightly faster prompt, __git_ps1 can
+# be used for PROMPT_COMMAND in Bash or for precmd() in Zsh
+# with two parameters, <pre> and <post>, which are strings
+# you would put in $PS1 before and after the status string
+# generated by the git-prompt machinery. e.g.
+# Bash: PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "'
+# will show username, at-sign, host, colon, cwd, then
+# various status string, followed by dollar and SP, as
+# your prompt.
+# ZSH: precmd () { __git_ps1 "%n" ":%~$ " "|%s" }
+# will show username, pipe, then various status string,
+# followed by colon, cwd, dollar and SP, as your prompt.
# Optionally, you can supply a third argument with a printf
# format string to finetune the output of the branch status
#
-# The argument to __git_ps1 will be displayed only if you are currently
-# in a git repository. The %s token will be the name of the current
-# branch.
+# The repository status will be displayed only if you are currently in a
+# git repository. The %s token is the placeholder for the shown status.
+#
+# The prompt status always includes the current branch name.
#
# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty value,
# unstaged (*) and staged (+) changes will be shown next to the branch
@@ -77,31 +81,8 @@
#
# If you would like a colored hint about the current dirty state, set
# GIT_PS1_SHOWCOLORHINTS to a nonempty value. The colors are based on
-# the colored output of "git status -sb".
-
-# __gitdir accepts 0 or 1 arguments (i.e., location)
-# returns location of .git repo
-__gitdir ()
-{
- # Note: this function is duplicated in git-completion.bash
- # When updating it, make sure you update the other one to match.
- if [ -z "${1-}" ]; then
- if [ -n "${__git_dir-}" ]; then
- echo "$__git_dir"
- elif [ -n "${GIT_DIR-}" ]; then
- test -d "${GIT_DIR-}" || return 1
- echo "$GIT_DIR"
- elif [ -d .git ]; then
- echo .git
- else
- git rev-parse --git-dir 2>/dev/null
- fi
- elif [ -d "$1/.git" ]; then
- echo "$1/.git"
- else
- echo "$1"
- fi
-}
+# the colored output of "git status -sb" and are available only when
+# using __git_ps1 for PROMPT_COMMAND or precmd.
# stores the divergence from upstream in $p
# used by GIT_PS1_SHOWUPSTREAM
@@ -124,7 +105,7 @@ __git_ps1_show_upstream ()
fi
;;
svn-remote.*.url)
- svn_remote[ $((${#svn_remote[@]} + 1)) ]="$value"
+ svn_remote[$((${#svn_remote[@]} + 1))]="$value"
svn_url_pattern+="\\|$value"
upstream=svn+git # default upstream is SVN if available, else git
;;
@@ -146,10 +127,11 @@ __git_ps1_show_upstream ()
svn*)
# get the upstream from the "git-svn-id: ..." in a commit message
# (git-svn uses essentially the same procedure internally)
- local svn_upstream=($(git log --first-parent -1 \
+ local -a svn_upstream
+ svn_upstream=($(git log --first-parent -1 \
--grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null))
if [[ 0 -ne ${#svn_upstream[@]} ]]; then
- svn_upstream=${svn_upstream[ ${#svn_upstream[@]} - 2 ]}
+ svn_upstream=${svn_upstream[${#svn_upstream[@]} - 2]}
svn_upstream=${svn_upstream%@*}
local n_stop="${#svn_remote[@]}"
for ((n=1; n <= n_stop; n++)); do
@@ -222,6 +204,51 @@ __git_ps1_show_upstream ()
}
+# Helper function that is meant to be called from __git_ps1. It
+# injects color codes into the appropriate gitstring variables used
+# to build a gitstring.
+__git_ps1_colorize_gitstring ()
+{
+ if [[ -n ${ZSH_VERSION-} ]]; then
+ local c_red='%F{red}'
+ local c_green='%F{green}'
+ local c_lblue='%F{blue}'
+ local c_clear='%f'
+ else
+ # Using \[ and \] around colors is necessary to prevent
+ # issues with command line editing/browsing/completion!
+ local c_red='\[\e[31m\]'
+ local c_green='\[\e[32m\]'
+ local c_lblue='\[\e[1;34m\]'
+ local c_clear='\[\e[0m\]'
+ fi
+ local bad_color=$c_red
+ local ok_color=$c_green
+ local flags_color="$c_lblue"
+
+ local branch_color=""
+ if [ $detached = no ]; then
+ branch_color="$ok_color"
+ else
+ branch_color="$bad_color"
+ fi
+ c="$branch_color$c"
+
+ z="$c_clear$z"
+ if [ "$w" = "*" ]; then
+ w="$bad_color$w"
+ fi
+ if [ -n "$i" ]; then
+ i="$ok_color$i"
+ fi
+ if [ -n "$s" ]; then
+ s="$flags_color$s"
+ fi
+ if [ -n "$u" ]; then
+ u="$bad_color$u"
+ fi
+ r="$c_clear$r"
+}
# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
# when called from PS1 using command substitution
@@ -254,48 +281,83 @@ __git_ps1 ()
;;
esac
- local g="$(__gitdir)"
- if [ -z "$g" ]; then
+ local repo_info rev_parse_exit_code
+ repo_info="$(git rev-parse --git-dir --is-inside-git-dir \
+ --is-bare-repository --is-inside-work-tree \
+ --short HEAD 2>/dev/null)"
+ rev_parse_exit_code="$?"
+
+ if [ -z "$repo_info" ]; then
if [ $pcmode = yes ]; then
#In PC mode PS1 always needs to be set
PS1="$ps1pc_start$ps1pc_end"
fi
+ return
+ fi
+
+ local short_sha
+ if [ "$rev_parse_exit_code" = "0" ]; then
+ short_sha="${repo_info##*$'\n'}"
+ repo_info="${repo_info%$'\n'*}"
+ fi
+ local inside_worktree="${repo_info##*$'\n'}"
+ repo_info="${repo_info%$'\n'*}"
+ local bare_repo="${repo_info##*$'\n'}"
+ repo_info="${repo_info%$'\n'*}"
+ local inside_gitdir="${repo_info##*$'\n'}"
+ local g="${repo_info%$'\n'*}"
+
+ local r=""
+ local b=""
+ local step=""
+ local total=""
+ if [ -d "$g/rebase-merge" ]; then
+ read b 2>/dev/null <"$g/rebase-merge/head-name"
+ read step 2>/dev/null <"$g/rebase-merge/msgnum"
+ read total 2>/dev/null <"$g/rebase-merge/end"
+ if [ -f "$g/rebase-merge/interactive" ]; then
+ r="|REBASE-i"
+ else
+ r="|REBASE-m"
+ fi
else
- local r=""
- local b=""
- local step=""
- local total=""
- if [ -d "$g/rebase-merge" ]; then
- b="$(cat "$g/rebase-merge/head-name")"
- step=$(cat "$g/rebase-merge/msgnum")
- total=$(cat "$g/rebase-merge/end")
- if [ -f "$g/rebase-merge/interactive" ]; then
- r="|REBASE-i"
+ if [ -d "$g/rebase-apply" ]; then
+ read step 2>/dev/null <"$g/rebase-apply/next"
+ read total 2>/dev/null <"$g/rebase-apply/last"
+ if [ -f "$g/rebase-apply/rebasing" ]; then
+ read b 2>/dev/null <"$g/rebase-apply/head-name"
+ r="|REBASE"
+ elif [ -f "$g/rebase-apply/applying" ]; then
+ r="|AM"
else
- r="|REBASE-m"
+ r="|AM/REBASE"
fi
+ elif [ -f "$g/MERGE_HEAD" ]; then
+ r="|MERGING"
+ elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
+ r="|CHERRY-PICKING"
+ elif [ -f "$g/REVERT_HEAD" ]; then
+ r="|REVERTING"
+ elif [ -f "$g/BISECT_LOG" ]; then
+ r="|BISECTING"
+ fi
+
+ if [ -n "$b" ]; then
+ :
+ elif [ -h "$g/HEAD" ]; then
+ # symlink symbolic ref
+ b="$(git symbolic-ref HEAD 2>/dev/null)"
else
- if [ -d "$g/rebase-apply" ]; then
- step=$(cat "$g/rebase-apply/next")
- total=$(cat "$g/rebase-apply/last")
- if [ -f "$g/rebase-apply/rebasing" ]; then
- r="|REBASE"
- elif [ -f "$g/rebase-apply/applying" ]; then
- r="|AM"
- else
- r="|AM/REBASE"
+ local head=""
+ if ! read head 2>/dev/null <"$g/HEAD"; then
+ if [ $pcmode = yes ]; then
+ PS1="$ps1pc_start$ps1pc_end"
fi
- elif [ -f "$g/MERGE_HEAD" ]; then
- r="|MERGING"
- elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
- r="|CHERRY-PICKING"
- elif [ -f "$g/REVERT_HEAD" ]; then
- r="|REVERTING"
- elif [ -f "$g/BISECT_LOG" ]; then
- r="|BISECTING"
+ return
fi
-
- b="$(git symbolic-ref HEAD 2>/dev/null)" || {
+ # is it a symbolic ref?
+ b="${head#ref: }"
+ if [ "$head" = "$b" ]; then
detached=yes
b="$(
case "${GIT_PS1_DESCRIBE_STYLE-}" in
@@ -309,104 +371,75 @@ __git_ps1 ()
git describe --tags --exact-match HEAD ;;
esac 2>/dev/null)" ||
- b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." ||
- b="unknown"
+ b="$short_sha..."
b="($b)"
- }
+ fi
fi
+ fi
- if [ -n "$step" ] && [ -n "$total" ]; then
- r="$r $step/$total"
- fi
+ if [ -n "$step" ] && [ -n "$total" ]; then
+ r="$r $step/$total"
+ fi
- local w=""
- local i=""
- local s=""
- local u=""
- local c=""
- local p=""
+ local w=""
+ local i=""
+ local s=""
+ local u=""
+ local c=""
+ local p=""
- if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then
- if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then
- c="BARE:"
+ if [ "true" = "$inside_gitdir" ]; then
+ if [ "true" = "$bare_repo" ]; then
+ c="BARE:"
+ else
+ b="GIT_DIR!"
+ fi
+ elif [ "true" = "$inside_worktree" ]; then
+ if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ] &&
+ [ "$(git config --bool bash.showDirtyState)" != "false" ]
+ then
+ git diff --no-ext-diff --quiet --exit-code || w="*"
+ if [ -n "$short_sha" ]; then
+ git diff-index --cached --quiet HEAD -- || i="+"
else
- b="GIT_DIR!"
- fi
- elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then
- if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ] &&
- [ "$(git config --bool bash.showDirtyState)" != "false" ]
- then
- git diff --no-ext-diff --quiet --exit-code || w="*"
- if git rev-parse --quiet --verify HEAD >/dev/null; then
- git diff-index --cached --quiet HEAD -- || i="+"
- else
- i="#"
- fi
- fi
- if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then
- git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$"
+ i="#"
fi
+ fi
+ if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ] &&
+ [ -r "$g/refs/stash" ]; then
+ s="$"
+ fi
- if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ] &&
- [ "$(git config --bool bash.showUntrackedFiles)" != "false" ] &&
- [ -n "$(git ls-files --others --exclude-standard)" ]
- then
- u="%${ZSH_VERSION+%}"
- fi
+ if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ] &&
+ [ "$(git config --bool bash.showUntrackedFiles)" != "false" ] &&
+ git ls-files --others --exclude-standard --error-unmatch -- '*' >/dev/null 2>/dev/null
+ then
+ u="%${ZSH_VERSION+%}"
+ fi
- if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
- __git_ps1_show_upstream
- fi
+ if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
+ __git_ps1_show_upstream
fi
+ fi
- local f="$w$i$s$u"
- if [ $pcmode = yes ]; then
- local gitstring=
- if [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then
- local c_red='\e[31m'
- local c_green='\e[32m'
- local c_lblue='\e[1;34m'
- local c_clear='\e[0m'
- local bad_color=$c_red
- local ok_color=$c_green
- local branch_color="$c_clear"
- local flags_color="$c_lblue"
- local branchstring="$c${b##refs/heads/}"
+ local z="${GIT_PS1_STATESEPARATOR-" "}"
- if [ $detached = no ]; then
- branch_color="$ok_color"
- else
- branch_color="$bad_color"
- fi
+ # NO color option unless in PROMPT_COMMAND mode
+ if [ $pcmode = yes ] && [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then
+ __git_ps1_colorize_gitstring
+ fi
- # Setting gitstring directly with \[ and \] around colors
- # is necessary to prevent wrapping issues!
- gitstring="\[$branch_color\]$branchstring\[$c_clear\]"
+ local f="$w$i$s$u"
+ local gitstring="$c${b##refs/heads/}${f:+$z$f}$r$p"
- if [ -n "$w$i$s$u$r$p" ]; then
- gitstring="$gitstring "
- fi
- if [ "$w" = "*" ]; then
- gitstring="$gitstring\[$bad_color\]$w"
- fi
- if [ -n "$i" ]; then
- gitstring="$gitstring\[$ok_color\]$i"
- fi
- if [ -n "$s" ]; then
- gitstring="$gitstring\[$flags_color\]$s"
- fi
- if [ -n "$u" ]; then
- gitstring="$gitstring\[$bad_color\]$u"
- fi
- gitstring="$gitstring\[$c_clear\]$r$p"
- else
- gitstring="$c${b##refs/heads/}${f:+ $f}$r$p"
- fi
+ if [ $pcmode = yes ]; then
+ if [[ -n ${ZSH_VERSION-} ]]; then
gitstring=$(printf -- "$printf_format" "$gitstring")
- PS1="$ps1pc_start$gitstring$ps1pc_end"
else
- # NO color option unless in PROMPT_COMMAND mode
- printf -- "$printf_format" "$c${b##refs/heads/}${f:+ $f}$r$p"
+ printf -v gitstring -- "$printf_format" "$gitstring"
fi
+ PS1="$ps1pc_start$gitstring$ps1pc_end"
+ else
+ printf -- "$printf_format" "$gitstring"
fi
}
diff --git a/contrib/continuous/cidaemon b/contrib/continuous/cidaemon
deleted file mode 100644
index 4009a151de..0000000000
--- a/contrib/continuous/cidaemon
+++ /dev/null
@@ -1,503 +0,0 @@
-#!/usr/bin/perl
-#
-# A daemon that waits for update events sent by its companion
-# post-receive-cinotify hook, checks out a new copy of source,
-# compiles it, and emails the guilty parties if the compile
-# (and optionally test suite) fails.
-#
-# To use this daemon, configure it and run it. It will disconnect
-# from your terminal and fork into the background. The daemon must
-# have local filesystem access to the source repositories, as it
-# uses objects/info/alternates to avoid copying objects.
-#
-# Add its companion post-receive-cinotify hook as the post-receive
-# hook to each repository that the daemon should monitor. Yes, a
-# single daemon can monitor more than one repository.
-#
-# To use multiple daemons on the same system, give them each a
-# unique queue file and tmpdir.
-#
-# Global Config
-# -------------
-# Reads from a Git style configuration file. This will be
-# ~/.gitconfig by default but can be overridden by setting
-# the GIT_CONFIG_FILE environment variable before starting.
-#
-# cidaemon.smtpHost
-# Hostname of the SMTP server the daemon will send email
-# through. Defaults to 'localhost'.
-#
-# cidaemon.smtpUser
-# Username to authenticate to the SMTP server as. This
-# variable is optional; if it is not supplied then no
-# authentication will be performed.
-#
-# cidaemon.smtpPassword
-# Password to authenticate to the SMTP server as. This
-# variable is optional. If not supplied but smtpUser was,
-# the daemon prompts for the password before forking into
-# the background.
-#
-# cidaemon.smtpAuth
-# Type of authentication to perform with the SMTP server.
-# If set to 'login' and smtpUser was defined, this will
-# use the AUTH LOGIN command, which is suitable for use
-# with at least one version of Microsoft Exchange Server.
-# If not set the daemon will use whatever auth methods
-# are supported by your version of Net::SMTP.
-#
-# cidaemon.email
-# Email address that daemon generated emails will be sent
-# from. This should be a useful email address within your
-# organization. Required.
-#
-# cidaemon.name
-# Human friendly name that the daemon will send emails as.
-# Defaults to 'cidaemon'.
-#
-# cidaemon.scanDelay
-# Number of seconds to sleep between polls of the queue file.
-# Defaults to 60.
-#
-# cidaemon.recentCache
-# Number of recent commit SHA-1s per repository to cache and
-# skip building if they appear again. This is useful to avoid
-# rebuilding the same commit multiple times just because it was
-# pushed into more than one branch. Defaults to 100.
-#
-# cidaemon.tmpdir
-# Scratch directory to create the builds within. The daemon
-# makes a new subdirectory for each build, then deletes it when
-# the build has finished. The pid file is also placed here.
-# Defaults to '/tmp'.
-#
-# cidaemon.queue
-# Path to the queue file that the post-receive-cinotify hook
-# appends events to. This file is polled by the daemon. It
-# must not be on an NFS mount (uses flock). Required.
-#
-# cidaemon.nocc
-# Perl regex patterns to match against author and committer
-# lines. If a pattern matches, that author or committer will
-# not be notified of a build failure.
-#
-# Per Repository Config
-# ----------------------
-# Read from the source repository's config file.
-#
-# builder.command
-# Shell command to execute the build. This command must
-# return 0 on "success" and non-zero on failure. If you
-# also want to run a test suite, make sure your command
-# does that too. Required.
-#
-# builder.queue
-# Queue file to notify the cidaemon through. Should match
-# cidaemon.queue. If not set the hook will not notify the
-# cidaemon.
-#
-# builder.skip
-# Perl regex patterns of refs that should not be sent to
-# cidaemon. Updates of these refs will be ignored.
-#
-# builder.newBranchBase
-# Glob patterns of refs that should be used to form the
-# 'old' revions of a newly created ref. This should set
-# to be globs that match your 'mainline' branches. This
-# way a build failure of a brand new topic branch does not
-# attempt to email everyone since the beginning of time;
-# instead it only emails those authors of commits not in
-# these 'mainline' branches.
-
-local $ENV{PATH} = join ':', qw(
- /opt/git/bin
- /usr/bin
- /bin
- );
-
-use strict;
-use warnings;
-use FindBin qw($RealBin);
-use File::Spec;
-use lib File::Spec->catfile($RealBin, '..', 'perl5');
-use Storable qw(retrieve nstore);
-use Fcntl ':flock';
-use POSIX qw(strftime);
-use Getopt::Long qw(:config no_auto_abbrev auto_help);
-
-sub git_config ($;$)
-{
- my $var = shift;
- my $required = shift || 0;
- local *GIT;
- open GIT, '-|','git','config','--get',$var;
- my $r = <GIT>;
- chop $r if $r;
- close GIT;
- die "error: $var not set.\n" if ($required && !$r);
- return $r;
-}
-
-package EXCHANGE_NET_SMTP;
-
-# Microsoft Exchange Server requires an 'AUTH LOGIN'
-# style of authentication. This is different from
-# the default supported by Net::SMTP so we subclass
-# and override the auth method to support that.
-
-use Net::SMTP;
-use Net::Cmd;
-use MIME::Base64 qw(encode_base64);
-our @ISA = qw(Net::SMTP);
-our $auth_type = ::git_config 'cidaemon.smtpAuth';
-
-sub new
-{
- my $self = shift;
- my $type = ref($self) || $self;
- $type->SUPER::new(@_);
-}
-
-sub auth
-{
- my $self = shift;
- return $self->SUPER::auth(@_) unless $auth_type eq 'login';
-
- my $user = encode_base64 shift, '';
- my $pass = encode_base64 shift, '';
- return 0 unless CMD_MORE == $self->command("AUTH LOGIN")->response;
- return 0 unless CMD_MORE == $self->command($user)->response;
- CMD_OK == $self->command($pass)->response;
-}
-
-package main;
-
-my ($debug_flag, %recent);
-
-my $ex_host = git_config('cidaemon.smtpHost') || 'localhost';
-my $ex_user = git_config('cidaemon.smtpUser');
-my $ex_pass = git_config('cidaemon.smtpPassword');
-
-my $ex_from_addr = git_config('cidaemon.email', 1);
-my $ex_from_name = git_config('cidaemon.name') || 'cidaemon';
-
-my $scan_delay = git_config('cidaemon.scanDelay') || 60;
-my $recent_size = git_config('cidaemon.recentCache') || 100;
-my $tmpdir = git_config('cidaemon.tmpdir') || '/tmp';
-my $queue_name = git_config('cidaemon.queue', 1);
-my $queue_lock = "$queue_name.lock";
-
-my @nocc_list;
-open GIT,'git config --get-all cidaemon.nocc|';
-while (<GIT>) {
- chop;
- push @nocc_list, $_;
-}
-close GIT;
-
-sub nocc_author ($)
-{
- local $_ = shift;
- foreach my $pat (@nocc_list) {
- return 1 if /$pat/;
- }
- 0;
-}
-
-sub input_echo ($)
-{
- my $prompt = shift;
-
- local $| = 1;
- print $prompt;
- my $input = <STDIN>;
- chop $input;
- return $input;
-}
-
-sub input_noecho ($)
-{
- my $prompt = shift;
-
- my $end = sub {system('stty','echo');print "\n";exit};
- local $SIG{TERM} = $end;
- local $SIG{INT} = $end;
- system('stty','-echo');
-
- local $| = 1;
- print $prompt;
- my $input = <STDIN>;
- system('stty','echo');
- print "\n";
- chop $input;
- return $input;
-}
-
-sub rfc2822_date ()
-{
- strftime("%a, %d %b %Y %H:%M:%S %Z", localtime);
-}
-
-sub send_email ($$$)
-{
- my ($subj, $body, $to) = @_;
- my $now = rfc2822_date;
- my $to_str = '';
- my @rcpt_to;
- foreach (@$to) {
- my $s = $_;
- $s =~ s/^/"/;
- $s =~ s/(\s+<)/"$1/;
- $to_str .= ', ' if $to_str;
- $to_str .= $s;
- push @rcpt_to, $1 if $s =~ /<(.*)>/;
- }
- die "Nobody to send to.\n" unless @rcpt_to;
- my $msg = <<EOF;
-From: "$ex_from_name" <$ex_from_addr>
-To: $to_str
-Date: $now
-Subject: $subj
-
-$body
-EOF
-
- my $smtp = EXCHANGE_NET_SMTP->new(Host => $ex_host)
- or die "Cannot connect to $ex_host: $!\n";
- if ($ex_user && $ex_pass) {
- $smtp->auth($ex_user,$ex_pass)
- or die "$ex_host rejected $ex_user\n";
- }
- $smtp->mail($ex_from_addr)
- or die "$ex_host rejected $ex_from_addr\n";
- scalar($smtp->recipient(@rcpt_to, { SkipBad => 1 }))
- or die "$ex_host did not accept any addresses.\n";
- $smtp->data($msg)
- or die "$ex_host rejected message data\n";
- $smtp->quit;
-}
-
-sub pop_queue ()
-{
- open LOCK, ">$queue_lock" or die "Can't open $queue_lock: $!";
- flock LOCK, LOCK_EX;
-
- my $queue = -f $queue_name ? retrieve $queue_name : [];
- my $ent = shift @$queue;
- nstore $queue, $queue_name;
-
- flock LOCK, LOCK_UN;
- close LOCK;
- $ent;
-}
-
-sub git_exec (@)
-{
- system('git',@_) == 0 or die "Cannot git " . join(' ', @_) . "\n";
-}
-
-sub git_val (@)
-{
- open(C, '-|','git',@_);
- my $r = <C>;
- chop $r if $r;
- close C;
- $r;
-}
-
-sub do_build ($$)
-{
- my ($git_dir, $new) = @_;
-
- my $tmp = File::Spec->catfile($tmpdir, "builder$$");
- system('rm','-rf',$tmp) == 0 or die "Cannot clear $tmp\n";
- die "Cannot clear $tmp.\n" if -e $tmp;
-
- my $result = 1;
- eval {
- my $command;
- {
- local $ENV{GIT_DIR} = $git_dir;
- $command = git_val 'config','builder.command';
- }
- die "No builder.command for $git_dir.\n" unless $command;
-
- git_exec 'clone','-n','-l','-s',$git_dir,$tmp;
- chmod 0700, $tmp or die "Cannot lock $tmp\n";
- chdir $tmp or die "Cannot enter $tmp\n";
-
- git_exec 'update-ref','HEAD',$new;
- git_exec 'read-tree','-m','-u','HEAD','HEAD';
- system $command;
- if ($? == -1) {
- print STDERR "failed to execute '$command': $!\n";
- $result = 1;
- } elsif ($? & 127) {
- my $sig = $? & 127;
- print STDERR "'$command' died from signal $sig\n";
- $result = 1;
- } else {
- my $r = $? >> 8;
- print STDERR "'$command' exited with $r\n" if $r;
- $result = $r;
- }
- };
- if ($@) {
- $result = 2;
- print STDERR "$@\n";
- }
-
- chdir '/';
- system('rm','-rf',$tmp);
- rmdir $tmp;
- $result;
-}
-
-sub build_failed ($$$$$)
-{
- my ($git_dir, $ref, $old, $new, $msg) = @_;
-
- $git_dir =~ m,/([^/]+)$,;
- my $repo_name = $1;
- $ref =~ s,^refs/(heads|tags)/,,;
-
- my %authors;
- my $shortlog;
- my $revstr;
- {
- local $ENV{GIT_DIR} = $git_dir;
- my @revs = ($new);
- push @revs, '--not', @$old if @$old;
- open LOG,'-|','git','rev-list','--pretty=raw',@revs;
- while (<LOG>) {
- if (s/^(author|committer) //) {
- chomp;
- s/>.*$/>/;
- $authors{$_} = 1 unless nocc_author $_;
- }
- }
- close LOG;
- open LOG,'-|','git','shortlog',@revs;
- $shortlog .= $_ while <LOG>;
- close LOG;
- $revstr = join(' ', @revs);
- }
-
- my @to = sort keys %authors;
- unless (@to) {
- print STDERR "error: No authors in $revstr\n";
- return;
- }
-
- my $subject = "[$repo_name] $ref : Build Failed";
- my $body = <<EOF;
-Project: $git_dir
-Branch: $ref
-Commits: $revstr
-
-$shortlog
-Build Output:
---------------------------------------------------------------
-$msg
-EOF
- send_email($subject, $body, \@to);
-}
-
-sub run_build ($$$$)
-{
- my ($git_dir, $ref, $old, $new) = @_;
-
- if ($debug_flag) {
- my @revs = ($new);
- push @revs, '--not', @$old if @$old;
- print "BUILDING $git_dir\n";
- print " BRANCH: $ref\n";
- print " COMMITS: ", join(' ', @revs), "\n";
- }
-
- local(*R, *W);
- pipe R, W or die "cannot pipe builder: $!";
-
- my $builder = fork();
- if (!defined $builder) {
- die "cannot fork builder: $!";
- } elsif (0 == $builder) {
- close R;
- close STDIN;open(STDIN, '/dev/null');
- open(STDOUT, '>&W');
- open(STDERR, '>&W');
- exit do_build $git_dir, $new;
- } else {
- close W;
- my $out = '';
- $out .= $_ while <R>;
- close R;
- waitpid $builder, 0;
- build_failed $git_dir, $ref, $old, $new, $out if $?;
- }
-
- print "DONE\n\n" if $debug_flag;
-}
-
-sub daemon_loop ()
-{
- my $run = 1;
- my $stop_sub = sub {$run = 0};
- $SIG{HUP} = $stop_sub;
- $SIG{INT} = $stop_sub;
- $SIG{TERM} = $stop_sub;
-
- mkdir $tmpdir, 0755;
- my $pidfile = File::Spec->catfile($tmpdir, "cidaemon.pid");
- open(O, ">$pidfile"); print O "$$\n"; close O;
-
- while ($run) {
- my $ent = pop_queue;
- if ($ent) {
- my ($git_dir, $ref, $old, $new) = @$ent;
-
- $ent = $recent{$git_dir};
- $recent{$git_dir} = $ent = [[], {}] unless $ent;
- my ($rec_arr, $rec_hash) = @$ent;
- next if $rec_hash->{$new}++;
- while (@$rec_arr >= $recent_size) {
- my $to_kill = shift @$rec_arr;
- delete $rec_hash->{$to_kill};
- }
- push @$rec_arr, $new;
-
- run_build $git_dir, $ref, $old, $new;
- } else {
- sleep $scan_delay;
- }
- }
-
- unlink $pidfile;
-}
-
-$debug_flag = 0;
-GetOptions(
- 'debug|d' => \$debug_flag,
- 'smtp-user=s' => \$ex_user,
-) or die "usage: $0 [--debug] [--smtp-user=user]\n";
-
-$ex_pass = input_noecho("$ex_user SMTP password: ")
- if ($ex_user && !$ex_pass);
-
-if ($debug_flag) {
- daemon_loop;
- exit 0;
-}
-
-my $daemon = fork();
-if (!defined $daemon) {
- die "cannot fork daemon: $!";
-} elsif (0 == $daemon) {
- close STDIN;open(STDIN, '/dev/null');
- close STDOUT;open(STDOUT, '>/dev/null');
- close STDERR;open(STDERR, '>/dev/null');
- daemon_loop;
- exit 0;
-} else {
- print "Daemon $daemon running in the background.\n";
-}
diff --git a/contrib/continuous/post-receive-cinotify b/contrib/continuous/post-receive-cinotify
deleted file mode 100644
index b8f5a609af..0000000000
--- a/contrib/continuous/post-receive-cinotify
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/perl
-#
-# A hook that notifies its companion cidaemon through a simple
-# queue file that a ref has been updated via a push (actually
-# by a receive-pack running on the server).
-#
-# See cidaemon for per-repository configuration details.
-#
-# To use this hook, add it as the post-receive hook, make it
-# executable, and set its configuration options.
-#
-
-local $ENV{PATH} = '/opt/git/bin';
-
-use strict;
-use warnings;
-use File::Spec;
-use Storable qw(retrieve nstore);
-use Fcntl ':flock';
-
-my $git_dir = File::Spec->rel2abs($ENV{GIT_DIR});
-my $queue_name = `git config --get builder.queue`;chop $queue_name;
-$queue_name =~ m,^([^\s]+)$,; $queue_name = $1; # untaint
-unless ($queue_name) {
- 1 while <STDIN>;
- print STDERR "\nerror: builder.queue not set. Not enqueing.\n\n";
- exit;
-}
-my $queue_lock = "$queue_name.lock";
-
-my @skip;
-open S, "git config --get-all builder.skip|";
-while (<S>) {
- chop;
- push @skip, $_;
-}
-close S;
-
-my @new_branch_base;
-open S, "git config --get-all builder.newBranchBase|";
-while (<S>) {
- chop;
- push @new_branch_base, $_;
-}
-close S;
-
-sub skip ($)
-{
- local $_ = shift;
- foreach my $p (@skip) {
- return 1 if /^$p/;
- }
- 0;
-}
-
-open LOCK, ">$queue_lock" or die "Can't open $queue_lock: $!";
-flock LOCK, LOCK_EX;
-
-my $queue = -f $queue_name ? retrieve $queue_name : [];
-my %existing;
-foreach my $r (@$queue) {
- my ($gd, $ref) = @$r;
- $existing{$gd}{$ref} = $r;
-}
-
-my @new_branch_commits;
-my $loaded_new_branch_commits = 0;
-
-while (<STDIN>) {
- chop;
- my ($old, $new, $ref) = split / /, $_, 3;
-
- next if $old eq $new;
- next if $new =~ /^0{40}$/;
- next if skip $ref;
-
- my $r = $existing{$git_dir}{$ref};
- if ($r) {
- $r->[3] = $new;
- } else {
- if ($old =~ /^0{40}$/) {
- if (!$loaded_new_branch_commits && @new_branch_base) {
- open M,'-|','git','show-ref',@new_branch_base;
- while (<M>) {
- ($_) = split / /, $_;
- push @new_branch_commits, $_;
- }
- close M;
- $loaded_new_branch_commits = 1;
- }
- $old = [@new_branch_commits];
- } else {
- $old = [$old];
- }
-
- $r = [$git_dir, $ref, $old, $new];
- $existing{$git_dir}{$ref} = $r;
- push @$queue, $r;
- }
-}
-nstore $queue, $queue_name;
-
-flock LOCK, LOCK_UN;
-close LOCK;
diff --git a/contrib/credential/osxkeychain/git-credential-osxkeychain.c b/contrib/credential/osxkeychain/git-credential-osxkeychain.c
index 3940202b36..bcd3f575a3 100644
--- a/contrib/credential/osxkeychain/git-credential-osxkeychain.c
+++ b/contrib/credential/osxkeychain/git-credential-osxkeychain.c
@@ -127,10 +127,20 @@ static void read_credential(void)
*v++ = '\0';
if (!strcmp(buf, "protocol")) {
- if (!strcmp(v, "https"))
+ if (!strcmp(v, "imap"))
+ protocol = kSecProtocolTypeIMAP;
+ else if (!strcmp(v, "imaps"))
+ protocol = kSecProtocolTypeIMAPS;
+ else if (!strcmp(v, "ftp"))
+ protocol = kSecProtocolTypeFTP;
+ else if (!strcmp(v, "ftps"))
+ protocol = kSecProtocolTypeFTPS;
+ else if (!strcmp(v, "https"))
protocol = kSecProtocolTypeHTTPS;
else if (!strcmp(v, "http"))
protocol = kSecProtocolTypeHTTP;
+ else if (!strcmp(v, "smtp"))
+ protocol = kSecProtocolTypeSMTP;
else /* we don't yet handle other protocols */
exit(0);
}
diff --git a/contrib/mw-to-git/.perlcriticrc b/contrib/mw-to-git/.perlcriticrc
new file mode 100644
index 0000000000..5a9955d757
--- /dev/null
+++ b/contrib/mw-to-git/.perlcriticrc
@@ -0,0 +1,28 @@
+# These 3 rules demand to add the s, m and x flag to *every* regexp. This is
+# overkill and would be harmful for readability.
+[-RegularExpressions::RequireExtendedFormatting]
+[-RegularExpressions::RequireDotMatchAnything]
+[-RegularExpressions::RequireLineBoundaryMatching]
+
+# This rule says that builtin functions should not be called with parentheses
+# e.g.: (taken from CPAN's documentation)
+# open($handle, '>', $filename); #not ok
+# open $handle, '>', $filename; #ok
+# Applying such a rule would mean modifying a huge number of lines for a
+# question of style.
+[-CodeLayout::ProhibitParensWithBuiltins]
+
+# This rule states that each system call should have its return value checked
+# The problem is that it includes the print call. Checking every print call's
+# return value would be harmful to the code readabilty.
+# This configuration keeps all default function but print.
+[InputOutput::RequireCheckedSyscalls]
+functions = open say close
+
+# This rules demands to add a dependancy for the Readonly module. This is not
+# wished.
+[-ValuesAndExpressions::ProhibitConstantPragma]
+
+# This rule is not really useful (rather a question of style) and produces many
+# warnings among the code.
+[-ValuesAndExpressions::ProhibitNoisyQuotes]
diff --git a/contrib/mw-to-git/Git/Mediawiki.pm b/contrib/mw-to-git/Git/Mediawiki.pm
new file mode 100644
index 0000000000..d13c4dfa7d
--- /dev/null
+++ b/contrib/mw-to-git/Git/Mediawiki.pm
@@ -0,0 +1,100 @@
+package Git::Mediawiki;
+
+use 5.008;
+use strict;
+use Git;
+
+BEGIN {
+
+our ($VERSION, @ISA, @EXPORT, @EXPORT_OK);
+
+# Totally unstable API.
+$VERSION = '0.01';
+
+require Exporter;
+
+@ISA = qw(Exporter);
+
+@EXPORT = ();
+
+# Methods which can be called as standalone functions as well:
+@EXPORT_OK = qw(clean_filename smudge_filename connect_maybe
+ EMPTY HTTP_CODE_OK HTTP_CODE_PAGE_NOT_FOUND);
+}
+
+# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
+use constant SLASH_REPLACEMENT => '%2F';
+
+# Used to test for empty strings
+use constant EMPTY => q{};
+
+# HTTP codes
+use constant HTTP_CODE_OK => 200;
+use constant HTTP_CODE_PAGE_NOT_FOUND => 404;
+
+sub clean_filename {
+ my $filename = shift;
+ $filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
+ # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
+ # Do a variant of URL-encoding, i.e. looks like URL-encoding,
+ # but with _ added to prevent MediaWiki from thinking this is
+ # an actual special character.
+ $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
+ # If we use the uri escape before
+ # we should unescape here, before anything
+
+ return $filename;
+}
+
+sub smudge_filename {
+ my $filename = shift;
+ $filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g;
+ $filename =~ s/ /_/g;
+ # Decode forbidden characters encoded in clean_filename
+ $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf('%c', hex($1))/ge;
+ return $filename;
+}
+
+sub connect_maybe {
+ my $wiki = shift;
+ if ($wiki) {
+ return $wiki;
+ }
+
+ my $remote_name = shift;
+ my $remote_url = shift;
+ my ($wiki_login, $wiki_password, $wiki_domain);
+
+ $wiki_login = Git::config("remote.${remote_name}.mwLogin");
+ $wiki_password = Git::config("remote.${remote_name}.mwPassword");
+ $wiki_domain = Git::config("remote.${remote_name}.mwDomain");
+
+ $wiki = MediaWiki::API->new;
+ $wiki->{config}->{api_url} = "${remote_url}/api.php";
+ if ($wiki_login) {
+ my %credential = (
+ 'url' => $remote_url,
+ 'username' => $wiki_login,
+ 'password' => $wiki_password
+ );
+ Git::credential(\%credential);
+ my $request = {lgname => $credential{username},
+ lgpassword => $credential{password},
+ lgdomain => $wiki_domain};
+ if ($wiki->login($request)) {
+ Git::credential(\%credential, 'approve');
+ print {*STDERR} qq(Logged in mediawiki user "$credential{username}".\n);
+ } else {
+ print {*STDERR} qq(Failed to log in mediawiki user "$credential{username}" on ${remote_url}\n);
+ print {*STDERR} ' (error ' .
+ $wiki->{error}->{code} . ': ' .
+ $wiki->{error}->{details} . ")\n";
+ Git::credential(\%credential, 'reject');
+ exit 1;
+ }
+ }
+
+ return $wiki;
+}
+
+1; # Famous last words
diff --git a/contrib/mw-to-git/Makefile b/contrib/mw-to-git/Makefile
index f14971987c..76fcd4defc 100644
--- a/contrib/mw-to-git/Makefile
+++ b/contrib/mw-to-git/Makefile
@@ -2,16 +2,43 @@
# Copyright (C) 2013
# Matthieu Moy <Matthieu.Moy@imag.fr>
#
-## Build git-remote-mediawiki
+# To build and test:
+#
+# make
+# bin-wrapper/git mw preview Some_page.mw
+# bin-wrapper/git clone mediawiki::http://example.com/wiki/
+#
+# To install, run Git's toplevel 'make install' then run:
+#
+# make install
+GIT_MEDIAWIKI_PM=Git/Mediawiki.pm
SCRIPT_PERL=git-remote-mediawiki.perl
+SCRIPT_PERL+=git-mw.perl
GIT_ROOT_DIR=../..
HERE=contrib/mw-to-git/
SCRIPT_PERL_FULL=$(patsubst %,$(HERE)/%,$(SCRIPT_PERL))
+INSTLIBDIR=$(shell $(MAKE) -C $(GIT_ROOT_DIR)/perl \
+ -s --no-print-directory instlibdir)
all: build
-build install clean:
- $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL=$(SCRIPT_PERL_FULL) \
- $@-perl-script
+install_pm:
+ install $(GIT_MEDIAWIKI_PM) $(INSTLIBDIR)/$(GIT_MEDIAWIKI_PM)
+
+build:
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+ build-perl-script
+
+install: install_pm
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+ install-perl-script
+
+clean:
+ $(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+ clean-perl-script
+ rm $(INSTLIBDIR)/$(GIT_MEDIAWIKI_PM)
+
+perlcritic:
+ perlcritic -2 *.perl
diff --git a/contrib/mw-to-git/bin-wrapper/git b/contrib/mw-to-git/bin-wrapper/git
new file mode 100755
index 0000000000..6663ae57e8
--- /dev/null
+++ b/contrib/mw-to-git/bin-wrapper/git
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# git executable wrapper script for Git-Mediawiki to run tests without
+# installing all the scripts and perl packages.
+
+GIT_ROOT_DIR=../../..
+GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd ${GIT_ROOT_DIR} && pwd)
+
+GITPERLLIB="$GIT_EXEC_PATH"'/contrib/mw-to-git'"${GITPERLLIB:+:$GITPERLLIB}"
+PATH="$GIT_EXEC_PATH"'/contrib/mw-to-git:'"$PATH"
+
+export GITPERLLIB PATH
+
+exec "${GIT_EXEC_PATH}/bin-wrappers/git" "$@"
diff --git a/contrib/mw-to-git/git-mw.perl b/contrib/mw-to-git/git-mw.perl
new file mode 100755
index 0000000000..28df3ee321
--- /dev/null
+++ b/contrib/mw-to-git/git-mw.perl
@@ -0,0 +1,368 @@
+#!/usr/bin/perl
+
+# Copyright (C) 2013
+# Benoit Person <benoit.person@ensimag.imag.fr>
+# Celestin Matte <celestin.matte@ensimag.imag.fr>
+# License: GPL v2 or later
+
+# Set of tools for git repo with a mediawiki remote.
+# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
+
+use strict;
+use warnings;
+
+use Getopt::Long;
+use URI::URL qw(url);
+use LWP::UserAgent;
+use HTML::TreeBuilder;
+
+use Git;
+use MediaWiki::API;
+use Git::Mediawiki qw(clean_filename connect_maybe
+ EMPTY HTTP_CODE_PAGE_NOT_FOUND);
+
+# By default, use UTF-8 to communicate with Git and the user
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
+
+# Global parameters
+my $verbose = 0;
+sub v_print {
+ if ($verbose) {
+ return print {*STDERR} @_;
+ }
+ return;
+}
+
+# Preview parameters
+my $file_name = EMPTY;
+my $remote_name = EMPTY;
+my $preview_file_name = EMPTY;
+my $autoload = 0;
+sub file {
+ $file_name = shift;
+ return $file_name;
+}
+
+my %commands = (
+ 'help' =>
+ [\&help, {}, \&help],
+ 'preview' =>
+ [\&preview, {
+ '<>' => \&file,
+ 'output|o=s' => \$preview_file_name,
+ 'remote|r=s' => \$remote_name,
+ 'autoload|a' => \$autoload
+ }, \&preview_help]
+);
+
+# Search for sub-command
+my $cmd = $commands{'help'};
+for (0..@ARGV-1) {
+ if (defined $commands{$ARGV[$_]}) {
+ $cmd = $commands{$ARGV[$_]};
+ splice @ARGV, $_, 1;
+ last;
+ }
+};
+GetOptions( %{$cmd->[1]},
+ 'help|h' => \&{$cmd->[2]},
+ 'verbose|v' => \$verbose);
+
+# Launch command
+&{$cmd->[0]};
+
+############################# Preview Functions ################################
+
+sub preview_help {
+ print {*STDOUT} <<'END';
+USAGE: git mw preview [--remote|-r <remote name>] [--autoload|-a]
+ [--output|-o <output filename>] [--verbose|-v]
+ <blob> | <filename>
+
+DESCRIPTION:
+Preview is an utiliy to preview local content of a mediawiki repo as if it was
+pushed on the remote.
+
+For that, preview searches for the remote name of the current branch's
+upstream if --remote is not set. If that remote is not found or if it
+is not a mediawiki, it lists all mediawiki remotes configured and asks
+you to replay your command with the --remote option set properly.
+
+Then, it searches for a file named 'filename'. If it's not found in
+the current dir, it will assume it's a blob.
+
+The content retrieved in the file (or in the blob) will then be parsed
+by the remote mediawiki and combined with a template retrieved from
+the mediawiki.
+
+Finally, preview will save the HTML result in a file. and autoload it
+in your default web browser if the option --autoload is present.
+
+OPTIONS:
+ -r <remote name>, --remote <remote name>
+ If the remote is a mediawiki, the template and the parse engine
+ used for the preview will be those of that remote.
+ If not, a list of valid remotes will be shown.
+
+ -a, --autoload
+ Try to load the HTML output in a new tab (or new window) of your
+ default web browser.
+
+ -o <output filename>, --output <output filename>
+ Change the HTML output filename. Default filename is based on the
+ input filename with its extension replaced by '.html'.
+
+ -v, --verbose
+ Show more information on what's going on under the hood.
+END
+ exit;
+}
+
+sub preview {
+ my $wiki;
+ my ($remote_url, $wiki_page_name);
+ my ($new_content, $template);
+ my $file_content;
+
+ if ($file_name eq EMPTY) {
+ die "Missing file argument, see `git mw help`\n";
+ }
+
+ v_print("### Selecting remote\n");
+ if ($remote_name eq EMPTY) {
+ $remote_name = find_upstream_remote_name();
+ if ($remote_name) {
+ $remote_url = mediawiki_remote_url_maybe($remote_name);
+ }
+
+ if (! $remote_url) {
+ my @valid_remotes = find_mediawiki_remotes();
+
+ if ($#valid_remotes == 0) {
+ print {*STDERR} "No mediawiki remote in this repo. \n";
+ exit 1;
+ } else {
+ my $remotes_list = join("\n\t", @valid_remotes);
+ print {*STDERR} <<"MESSAGE";
+There are multiple mediawiki remotes, which of:
+ ${remotes_list}
+do you want ? Use the -r option to specify the remote.
+MESSAGE
+ }
+
+ exit 1;
+ }
+ } else {
+ if (!is_valid_remote($remote_name)) {
+ die "${remote_name} is not a remote\n";
+ }
+
+ $remote_url = mediawiki_remote_url_maybe($remote_name);
+ if (! $remote_url) {
+ die "${remote_name} is not a mediawiki remote\n";
+ }
+ }
+ v_print("selected remote:\n\tname: ${remote_name}\n\turl: ${remote_url}\n");
+
+ $wiki = connect_maybe($wiki, $remote_name, $remote_url);
+
+ # Read file content
+ if (! -e $file_name) {
+ $file_content = git_cmd_try {
+ Git::command('cat-file', 'blob', $file_name); }
+ "%s failed w/ code %d";
+
+ if ($file_name =~ /(.+):(.+)/) {
+ $file_name = $2;
+ }
+ } else {
+ open my $read_fh, "<", $file_name
+ or die "could not open ${file_name}: $!\n";
+ $file_content = do { local $/ = undef; <$read_fh> };
+ close $read_fh
+ or die "unable to close: $!\n";
+ }
+
+ v_print("### Retrieving template\n");
+ ($wiki_page_name = clean_filename($file_name)) =~ s/\.[^.]+$//;
+ $template = get_template($remote_url, $wiki_page_name);
+
+ v_print("### Parsing local content\n");
+ $new_content = $wiki->api({
+ action => 'parse',
+ text => $file_content,
+ title => $wiki_page_name
+ }, {
+ skip_encoding => 1
+ }) or die "No response from remote mediawiki\n";
+ $new_content = $new_content->{'parse'}->{'text'}->{'*'};
+
+ v_print("### Merging contents\n");
+ if ($preview_file_name eq EMPTY) {
+ ($preview_file_name = $file_name) =~ s/\.[^.]+$/.html/;
+ }
+ open(my $save_fh, '>:encoding(UTF-8)', $preview_file_name)
+ or die "Could not open: $!\n";
+ print {$save_fh} merge_contents($template, $new_content, $remote_url);
+ close($save_fh)
+ or die "Could not close: $!\n";
+
+ v_print("### Results\n");
+ if ($autoload) {
+ v_print("Launching browser w/ file: ${preview_file_name}");
+ system('git', 'web--browse', $preview_file_name);
+ } else {
+ print {*STDERR} "Preview file saved as: ${preview_file_name}\n";
+ }
+
+ exit;
+}
+
+# uses global scope variable: $remote_name
+sub merge_contents {
+ my $template = shift;
+ my $content = shift;
+ my $remote_url = shift;
+ my ($content_tree, $html_tree, $mw_content_text);
+ my $template_content_id = 'bodyContent';
+
+ $html_tree = HTML::TreeBuilder->new;
+ $html_tree->parse($template);
+
+ $content_tree = HTML::TreeBuilder->new;
+ $content_tree->parse($content);
+
+ $template_content_id = Git::config("remote.${remote_name}.mwIDcontent")
+ || $template_content_id;
+ v_print("Using '${template_content_id}' as the content ID\n");
+
+ $mw_content_text = $html_tree->look_down('id', $template_content_id);
+ if (!defined $mw_content_text) {
+ print {*STDERR} <<"CONFIG";
+Could not combine the new content with the template. You might want to
+configure `mediawiki.IDContent` in your config:
+ git config --add remote.${remote_name}.mwIDcontent <id>
+and re-run the command afterward.
+CONFIG
+ exit 1;
+ }
+ $mw_content_text->delete_content();
+ $mw_content_text->push_content($content_tree);
+
+ make_links_absolute($html_tree, $remote_url);
+
+ return $html_tree->as_HTML;
+}
+
+sub make_links_absolute {
+ my $html_tree = shift;
+ my $remote_url = shift;
+ for (@{ $html_tree->extract_links() }) {
+ my ($link, $element, $attr) = @{ $_ };
+ my $url = url($link)->canonical;
+ if ($url !~ /#/) {
+ $element->attr($attr, URI->new_abs($url, $remote_url));
+ }
+ }
+ return $html_tree;
+}
+
+sub is_valid_remote {
+ my $remote = shift;
+ my @remotes = git_cmd_try {
+ Git::command('remote') }
+ "%s failed w/ code %d";
+ my $found_remote = 0;
+ foreach my $remote (@remotes) {
+ if ($remote eq $remote) {
+ $found_remote = 1;
+ last;
+ }
+ }
+ return $found_remote;
+}
+
+sub find_mediawiki_remotes {
+ my @remotes = git_cmd_try {
+ Git::command('remote'); }
+ "%s failed w/ code %d";
+ my $remote_url;
+ my @valid_remotes = ();
+ foreach my $remote (@remotes) {
+ $remote_url = mediawiki_remote_url_maybe($remote);
+ if ($remote_url) {
+ push(@valid_remotes, $remote);
+ }
+ }
+ return @valid_remotes;
+}
+
+sub find_upstream_remote_name {
+ my $current_branch = git_cmd_try {
+ Git::command_oneline('symbolic-ref', '--short', 'HEAD') }
+ "%s failed w/ code %d";
+ return Git::config("branch.${current_branch}.remote");
+}
+
+sub mediawiki_remote_url_maybe {
+ my $remote = shift;
+
+ # Find remote url
+ my $remote_url = Git::config("remote.${remote}.url");
+ if ($remote_url =~ s/mediawiki::(.*)/$1/) {
+ return url($remote_url)->canonical;
+ }
+
+ return;
+}
+
+sub get_template {
+ my $url = shift;
+ my $page_name = shift;
+ my ($req, $res, $code, $url_after);
+
+ $req = LWP::UserAgent->new;
+ if ($verbose) {
+ $req->show_progress(1);
+ }
+
+ $res = $req->get("${url}/index.php?title=${page_name}");
+ if (!$res->is_success) {
+ $code = $res->code;
+ $url_after = $res->request()->uri(); # resolve all redirections
+ if ($code == HTTP_CODE_PAGE_NOT_FOUND) {
+ if ($verbose) {
+ print {*STDERR} <<"WARNING";
+Warning: Failed to retrieve '$page_name'. Create it on the mediawiki if you want
+all the links to work properly.
+Trying to use the mediawiki homepage as a fallback template ...
+WARNING
+ }
+
+ # LWP automatically redirects GET request
+ $res = $req->get("${url}/index.php");
+ if (!$res->is_success) {
+ $url_after = $res->request()->uri(); # resolve all redirections
+ die "Failed to get homepage @ ${url_after} w/ code ${code}\n";
+ }
+ } else {
+ die "Failed to get '${page_name}' @ ${url_after} w/ code ${code}\n";
+ }
+ }
+
+ return $res->decoded_content;
+}
+
+############################## Help Functions ##################################
+
+sub help {
+ print {*STDOUT} <<'END';
+usage: git mw <command> <args>
+
+git mw commands are:
+ help Display help information about git mw
+ preview Parse and render local file into HTML
+END
+ exit;
+}
diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
index 9c14c1f88d..f8d7d2ca6c 100755
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -13,19 +13,17 @@
use strict;
use MediaWiki::API;
+use Git;
+use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
+ EMPTY HTTP_CODE_OK);
use DateTime::Format::ISO8601;
+use warnings;
# By default, use UTF-8 to communicate with Git and the user
-binmode STDERR, ":utf8";
-binmode STDOUT, ":utf8";
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
use URI::Escape;
-use IPC::Open2;
-
-use warnings;
-
-# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
-use constant SLASH_REPLACEMENT => "%2F";
# It's not always possible to delete pages (may require some
# privileges). Deleted pages are replaced with this content.
@@ -36,45 +34,57 @@ use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
use constant EMPTY_CONTENT => "<!-- empty page -->\n";
# used to reflect file creation or deletion in diff.
-use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
# Used on Git's side to reflect empty edit messages on the wiki
use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+# Number of pages taken into account at once in submodule get_mw_page_list
+use constant SLICE_SIZE => 50;
+
+# Number of linked mediafile to get at once in get_linked_mediafiles
+# The query is split in small batches because of the MW API limit of
+# the number of links to be returned (500 links max).
+use constant BATCH_SIZE => 10;
+
+if (@ARGV != 2) {
+ exit_error_usage();
+}
+
my $remotename = $ARGV[0];
my $url = $ARGV[1];
# Accept both space-separated and multiple keys in config file.
# Spaces should be written as _ anyway because we'll use chomp.
-my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".pages"));
+my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
chomp(@tracked_pages);
# Just like @tracked_pages, but for MediaWiki categories.
-my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
+my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
chomp(@tracked_categories);
# Import media files on pull
-my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
+my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
chomp($import_media);
-$import_media = ($import_media eq "true");
+$import_media = ($import_media eq 'true');
# Export media files on push
-my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
+my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
chomp($export_media);
-$export_media = !($export_media eq "false");
+$export_media = !($export_media eq 'false');
-my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
+my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
# Note: mwPassword is discourraged. Use the credential system instead.
-my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
-my $wiki_domain = run_git("config --get remote.". $remotename .".mwDomain");
+my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
+my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
chomp($wiki_login);
chomp($wiki_passwd);
chomp($wiki_domain);
# Import only last revisions (both for clone and fetch)
-my $shallow_import = run_git("config --get --bool remote.". $remotename .".shallow");
+my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
chomp($shallow_import);
-$shallow_import = ($shallow_import eq "true");
+$shallow_import = ($shallow_import eq 'true');
# Fetch (clone and pull) by revisions instead of by pages. This behavior
# is more efficient when we have a wiki with lots of pages and we fetch
@@ -82,15 +92,18 @@ $shallow_import = ($shallow_import eq "true");
# Possible values:
# - by_rev: perform one query per new revision on the remote wiki
# - by_page: query each tracked page for new revision
-my $fetch_strategy = run_git("config --get remote.$remotename.fetchStrategy");
-unless ($fetch_strategy) {
- $fetch_strategy = run_git("config --get mediawiki.fetchStrategy");
+my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
+if (!$fetch_strategy) {
+ $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
}
chomp($fetch_strategy);
-unless ($fetch_strategy) {
- $fetch_strategy = "by_page";
+if (!$fetch_strategy) {
+ $fetch_strategy = 'by_page';
}
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
# Dumb push: don't update notes and mediawiki ref to reflect the last push.
#
# Configurable with mediawiki.dumbPush, or per-remote with
@@ -105,48 +118,25 @@ unless ($fetch_strategy) {
# will get the history with information lost). If the import is
# deterministic, this means everybody gets the same sha1 for each
# MediaWiki revision.
-my $dumb_push = run_git("config --get --bool remote.$remotename.dumbPush");
-unless ($dumb_push) {
- $dumb_push = run_git("config --get --bool mediawiki.dumbPush");
+my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
+if (!$dumb_push) {
+ $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
}
chomp($dumb_push);
-$dumb_push = ($dumb_push eq "true");
+$dumb_push = ($dumb_push eq 'true');
my $wiki_name = $url;
-$wiki_name =~ s/[^\/]*:\/\///;
+$wiki_name =~ s{[^/]*://}{};
# If URL is like http://user:password@example.com/, we clearly don't
# want the password in $wiki_name. While we're there, also remove user
# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
$wiki_name =~ s/^.*@//;
# Commands parser
-my $entry;
-my @cmd;
while (<STDIN>) {
chomp;
- @cmd = split(/ /);
- if (defined($cmd[0])) {
- # Line not blank
- if ($cmd[0] eq "capabilities") {
- die("Too many arguments for capabilities") unless (!defined($cmd[1]));
- mw_capabilities();
- } elsif ($cmd[0] eq "list") {
- die("Too many arguments for list") unless (!defined($cmd[2]));
- mw_list($cmd[1]);
- } elsif ($cmd[0] eq "import") {
- die("Invalid arguments for import") unless ($cmd[1] ne "" && !defined($cmd[2]));
- mw_import($cmd[1]);
- } elsif ($cmd[0] eq "option") {
- die("Too many arguments for option") unless ($cmd[1] ne "" && $cmd[2] ne "" && !defined($cmd[3]));
- mw_option($cmd[1],$cmd[2]);
- } elsif ($cmd[0] eq "push") {
- mw_push($cmd[1]);
- } else {
- print STDERR "Unknown command. Aborting...\n";
- last;
- }
- } else {
- # blank line: we should terminate
+
+ if (!parse_command($_)) {
last;
}
@@ -156,107 +146,91 @@ while (<STDIN>) {
########################## Functions ##############################
-## credential API management (generic functions)
-
-sub credential_read {
- my %credential;
- my $reader = shift;
- my $op = shift;
- while (<$reader>) {
- my ($key, $value) = /([^=]*)=(.*)/;
- if (not defined $key) {
- die "ERROR receiving response from git credential $op:\n$_\n";
- }
- $credential{$key} = $value;
- }
- return %credential;
+## error handling
+sub exit_error_usage {
+ die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
+ "parameters\n" .
+ "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
+ "module directly.\n" .
+ "This module can be used the following way:\n" .
+ "\tgit clone mediawiki://<address of a mediawiki>\n" .
+ "Then, use git commit, push and pull as with every normal git repository.\n";
}
-sub credential_write {
- my $credential = shift;
- my $writer = shift;
- # url overwrites other fields, so it must come first
- print $writer "url=$credential->{url}\n" if exists $credential->{url};
- while (my ($key, $value) = each(%$credential) ) {
- if (length $value && $key ne 'url') {
- print $writer "$key=$value\n";
- }
+sub parse_command {
+ my ($line) = @_;
+ my @cmd = split(/ /, $line);
+ if (!defined $cmd[0]) {
+ return 0;
}
-}
-
-sub credential_run {
- my $op = shift;
- my $credential = shift;
- my $pid = open2(my $reader, my $writer, "git credential $op");
- credential_write($credential, $writer);
- print $writer "\n";
- close($writer);
-
- if ($op eq "fill") {
- %$credential = credential_read($reader, $op);
+ if ($cmd[0] eq 'capabilities') {
+ die("Too many arguments for capabilities\n")
+ if (defined($cmd[1]));
+ mw_capabilities();
+ } elsif ($cmd[0] eq 'list') {
+ die("Too many arguments for list\n") if (defined($cmd[2]));
+ mw_list($cmd[1]);
+ } elsif ($cmd[0] eq 'import') {
+ die("Invalid argument for import\n")
+ if ($cmd[1] eq EMPTY);
+ die("Too many arguments for import\n")
+ if (defined($cmd[2]));
+ mw_import($cmd[1]);
+ } elsif ($cmd[0] eq 'option') {
+ die("Invalid arguments for option\n")
+ if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
+ die("Too many arguments for option\n")
+ if (defined($cmd[3]));
+ mw_option($cmd[1],$cmd[2]);
+ } elsif ($cmd[0] eq 'push') {
+ mw_push($cmd[1]);
} else {
- if (<$reader>) {
- die "ERROR while running git credential $op:\n$_";
- }
- }
- close($reader);
- waitpid($pid, 0);
- my $child_exit_status = $? >> 8;
- if ($child_exit_status != 0) {
- die "'git credential $op' failed with code $child_exit_status.";
+ print {*STDERR} "Unknown command. Aborting...\n";
+ return 0;
}
+ return 1;
}
# MediaWiki API instance, created lazily.
my $mediawiki;
-sub mw_connect_maybe {
- if ($mediawiki) {
- return;
- }
- $mediawiki = MediaWiki::API->new;
- $mediawiki->{config}->{api_url} = "$url/api.php";
- if ($wiki_login) {
- my %credential = (url => $url);
- $credential{username} = $wiki_login;
- $credential{password} = $wiki_passwd;
- credential_run("fill", \%credential);
- my $request = {lgname => $credential{username},
- lgpassword => $credential{password},
- lgdomain => $wiki_domain};
- if ($mediawiki->login($request)) {
- credential_run("approve", \%credential);
- print STDERR "Logged in mediawiki user \"$credential{username}\".\n";
- } else {
- print STDERR "Failed to log in mediawiki user \"$credential{username}\" on $url\n";
- print STDERR " (error " .
- $mediawiki->{error}->{code} . ': ' .
- $mediawiki->{error}->{details} . ")\n";
- credential_run("reject", \%credential);
- exit 1;
- }
+sub fatal_mw_error {
+ my $action = shift;
+ print STDERR "fatal: could not $action.\n";
+ print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+ if ($url =~ /^https/) {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
+ print STDERR "fatal: and the SSL certificate is correct.\n";
+ } else {
+ print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
}
+ print STDERR "fatal: (error " .
+ $mediawiki->{error}->{code} . ': ' .
+ $mediawiki->{error}->{details} . ")\n";
+ exit 1;
}
## Functions for listing pages on the remote wiki
sub get_mw_tracked_pages {
my $pages = shift;
get_mw_page_list(\@tracked_pages, $pages);
+ return;
}
sub get_mw_page_list {
my $page_list = shift;
my $pages = shift;
- my @some_pages = @$page_list;
+ my @some_pages = @{$page_list};
while (@some_pages) {
- my $last = 50;
- if ($#some_pages < $last) {
- $last = $#some_pages;
+ my $last_page = SLICE_SIZE;
+ if ($#some_pages < $last_page) {
+ $last_page = $#some_pages;
}
- my @slice = @some_pages[0..$last];
+ my @slice = @some_pages[0..$last_page];
get_mw_first_pages(\@slice, $pages);
- @some_pages = @some_pages[51..$#some_pages];
+ @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
}
+ return;
}
sub get_mw_tracked_categories {
@@ -266,7 +240,7 @@ sub get_mw_tracked_categories {
# Mediawiki requires the Category
# prefix, but let's not force the user
# to specify it.
- $category = "Category:" . $category;
+ $category = "Category:${category}";
}
my $mw_pages = $mediawiki->list( {
action => 'query',
@@ -274,11 +248,12 @@ sub get_mw_tracked_categories {
cmtitle => $category,
cmlimit => 'max' } )
|| die $mediawiki->{error}->{code} . ': '
- . $mediawiki->{error}->{details};
+ . $mediawiki->{error}->{details} . "\n";
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
}
+ return;
}
sub get_mw_all_pages {
@@ -290,14 +265,12 @@ sub get_mw_all_pages {
aplimit => 'max'
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of wiki pages.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
- exit 1;
+ fatal_mw_error("get the list of wiki pages");
}
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
+ return;
}
# queries the wiki for a set of pages. Meant to be used within a loop
@@ -316,25 +289,23 @@ sub get_mw_first_pages {
titles => $titles,
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not query the list of wiki pages.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
- exit 1;
+ fatal_mw_error("query the list of wiki pages");
}
while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
if ($id < 0) {
- print STDERR "Warning: page $page->{title} not found on wiki\n";
+ print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
} else {
$pages->{$page->{title}} = $page;
}
}
+ return;
}
# Get the list of pages to be fetched according to configuration.
sub get_mw_pages {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
- print STDERR "Listing pages on remote wiki...\n";
+ print {*STDERR} "Listing pages on remote wiki...\n";
my %pages; # hash on page titles to avoid duplicates
my $user_defined;
@@ -352,14 +323,14 @@ sub get_mw_pages {
get_mw_all_pages(\%pages);
}
if ($import_media) {
- print STDERR "Getting media files for selected pages...\n";
+ print {*STDERR} "Getting media files for selected pages...\n";
if ($user_defined) {
get_linked_mediafiles(\%pages);
} else {
get_all_mediafiles(\%pages);
}
}
- print STDERR (scalar keys %pages) . " pages found.\n";
+ print {*STDERR} (scalar keys %pages) . " pages found.\n";
return %pages;
}
@@ -367,9 +338,13 @@ sub get_mw_pages {
# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
sub run_git {
my $args = shift;
- my $encoding = (shift || "encoding(UTF-8)");
- open(my $git, "-|:$encoding", "git " . $args);
- my $res = do { local $/; <$git> };
+ my $encoding = (shift || 'encoding(UTF-8)');
+ open(my $git, "-|:${encoding}", "git ${args}")
+ or die "Unable to fork: $!\n";
+ my $res = do {
+ local $/ = undef;
+ <$git>
+ };
close($git);
return $res;
@@ -384,27 +359,26 @@ sub get_all_mediafiles {
my $mw_pages = $mediawiki->list({
action => 'query',
list => 'allpages',
- apnamespace => get_mw_namespace_id("File"),
+ apnamespace => get_mw_namespace_id('File'),
aplimit => 'max'
});
if (!defined($mw_pages)) {
- print STDERR "fatal: could not get the list of pages for media files.\n";
- print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
- print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+ print {*STDERR} "fatal: could not get the list of pages for media files.\n";
+ print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
+ print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
exit 1;
}
foreach my $page (@{$mw_pages}) {
$pages->{$page->{title}} = $page;
}
+ return;
}
sub get_linked_mediafiles {
my $pages = shift;
- my @titles = map $_->{title}, values(%{$pages});
+ my @titles = map { $_->{title} } values(%{$pages});
- # The query is split in small batches because of the MW API limit of
- # the number of links to be returned (500 links max).
- my $batch = 10;
+ my $batch = BATCH_SIZE;
while (@titles) {
if ($#titles < $batch) {
$batch = $#titles;
@@ -420,7 +394,7 @@ sub get_linked_mediafiles {
action => 'query',
prop => 'links|images',
titles => $mw_titles,
- plnamespace => get_mw_namespace_id("File"),
+ plnamespace => get_mw_namespace_id('File'),
pllimit => 'max'
};
my $result = $mediawiki->api($query);
@@ -428,11 +402,13 @@ sub get_linked_mediafiles {
while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
my @media_titles;
if (defined($page->{links})) {
- my @link_titles = map $_->{title}, @{$page->{links}};
+ my @link_titles
+ = map { $_->{title} } @{$page->{links}};
push(@media_titles, @link_titles);
}
if (defined($page->{images})) {
- my @image_titles = map $_->{title}, @{$page->{images}};
+ my @image_titles
+ = map { $_->{title} } @{$page->{images}};
push(@media_titles, @image_titles);
}
if (@media_titles) {
@@ -442,6 +418,7 @@ sub get_linked_mediafiles {
@titles = @titles[($batch+1)..$#titles];
}
+ return;
}
sub get_mw_mediafile_for_page_revision {
@@ -455,7 +432,7 @@ sub get_mw_mediafile_for_page_revision {
my $query = {
action => 'query',
prop => 'imageinfo',
- titles => "File:" . $filename,
+ titles => "File:${filename}",
iistart => $timestamp,
iiend => $timestamp,
iiprop => 'timestamp|archivename|url',
@@ -473,53 +450,50 @@ sub get_mw_mediafile_for_page_revision {
$mediafile{timestamp} = $fileinfo->{timestamp};
# Mediawiki::API's download function doesn't support https URLs
# and can't download old versions of files.
- print STDERR "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+ print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
$mediafile{content} = download_mw_mediafile($fileinfo->{url});
}
return %mediafile;
}
sub download_mw_mediafile {
- my $url = shift;
+ my $download_url = shift;
- my $response = $mediawiki->{ua}->get($url);
- if ($response->code == 200) {
+ my $response = $mediawiki->{ua}->get($download_url);
+ if ($response->code == HTTP_CODE_OK) {
return $response->decoded_content;
} else {
- print STDERR "Error downloading mediafile from :\n";
- print STDERR "URL: $url\n";
- print STDERR "Server response: " . $response->code . " " . $response->message . "\n";
+ print {*STDERR} "Error downloading mediafile from :\n";
+ print {*STDERR} "URL: ${download_url}\n";
+ print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
exit 1;
}
}
sub get_last_local_revision {
# Get note regarding last mediawiki revision
- my $note = run_git("notes --ref=$remotename/mediawiki show refs/mediawiki/$remotename/master 2>/dev/null");
+ my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
my @note_info = split(/ /, $note);
my $lastrevision_number;
- if (!(defined($note_info[0]) && $note_info[0] eq "mediawiki_revision:")) {
- print STDERR "No previous mediawiki revision found";
+ if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
+ print {*STDERR} 'No previous mediawiki revision found';
$lastrevision_number = 0;
} else {
# Notes are formatted : mediawiki_revision: #number
$lastrevision_number = $note_info[1];
chomp($lastrevision_number);
- print STDERR "Last local mediawiki revision found is $lastrevision_number";
+ print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
}
return $lastrevision_number;
}
-# Remember the timestamp corresponding to a revision id.
-my %basetimestamps;
-
# Get the last remote revision without taking in account which pages are
# tracked or not. This function makes a single request to the wiki thus
# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
# option.
sub get_last_global_remote_rev {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $query = {
action => 'query',
@@ -535,14 +509,14 @@ sub get_last_global_remote_rev {
# Get the last remote revision concerning the tracked pages and the tracked
# categories.
sub get_last_remote_revision {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my %pages_hash = get_mw_pages();
my @pages = values(%pages_hash);
my $max_rev_num = 0;
- print STDERR "Getting last revision id on tracked pages...\n";
+ print {*STDERR} "Getting last revision id on tracked pages...\n";
foreach my $page (@pages) {
my $id = $page->{pageid};
@@ -563,7 +537,7 @@ sub get_last_remote_revision {
$max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
}
- print STDERR "Last remote revision found is $max_rev_num.\n";
+ print {*STDERR} "Last remote revision found is $max_rev_num.\n";
return $max_rev_num;
}
@@ -574,7 +548,7 @@ sub mediawiki_clean {
# Mediawiki does not allow blank space at the end of a page and ends with a single \n.
# This function right trims a string and adds a \n at the end to follow this rule
$string =~ s/\s+$//;
- if ($string eq "" && $page_created) {
+ if ($string eq EMPTY && $page_created) {
# Creating empty pages is forbidden.
$string = EMPTY_CONTENT;
}
@@ -585,38 +559,16 @@ sub mediawiki_clean {
sub mediawiki_smudge {
my $string = shift;
if ($string eq EMPTY_CONTENT) {
- $string = "";
+ $string = EMPTY;
}
# This \n is important. This is due to mediawiki's way to handle end of files.
- return $string."\n";
-}
-
-sub mediawiki_clean_filename {
- my $filename = shift;
- $filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
- # [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
- # Do a variant of URL-encoding, i.e. looks like URL-encoding,
- # but with _ added to prevent MediaWiki from thinking this is
- # an actual special character.
- $filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
- # If we use the uri escape before
- # we should unescape here, before anything
-
- return $filename;
-}
-
-sub mediawiki_smudge_filename {
- my $filename = shift;
- $filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
- $filename =~ s/ /_/g;
- # Decode forbidden characters encoded in mediawiki_clean_filename
- $filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
- return $filename;
+ return "${string}\n";
}
sub literal_data {
my ($content) = @_;
- print STDOUT "data ", bytes::length($content), "\n", $content;
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ return;
}
sub literal_data_raw {
@@ -624,33 +576,37 @@ sub literal_data_raw {
my ($content) = @_;
# Avoid confusion between size in bytes and in characters
utf8::downgrade($content);
- binmode STDOUT, ":raw";
- print STDOUT "data ", bytes::length($content), "\n", $content;
- binmode STDOUT, ":utf8";
+ binmode STDOUT, ':raw';
+ print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+ binmode STDOUT, ':encoding(UTF-8)';
+ return;
}
sub mw_capabilities {
# Revisions are imported to the private namespace
# refs/mediawiki/$remotename/ by the helper and fetched into
# refs/remotes/$remotename later by fetch.
- print STDOUT "refspec refs/heads/*:refs/mediawiki/$remotename/*\n";
- print STDOUT "import\n";
- print STDOUT "list\n";
- print STDOUT "push\n";
- print STDOUT "\n";
+ print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
+ print {*STDOUT} "import\n";
+ print {*STDOUT} "list\n";
+ print {*STDOUT} "push\n";
+ print {*STDOUT} "\n";
+ return;
}
sub mw_list {
# MediaWiki do not have branches, we consider one branch arbitrarily
# called master, and HEAD pointing to it.
- print STDOUT "? refs/heads/master\n";
- print STDOUT "\@refs/heads/master HEAD\n";
- print STDOUT "\n";
+ print {*STDOUT} "? refs/heads/master\n";
+ print {*STDOUT} "\@refs/heads/master HEAD\n";
+ print {*STDOUT} "\n";
+ return;
}
sub mw_option {
- print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
- print STDOUT "unsupported\n";
+ print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
+ print {*STDOUT} "unsupported\n";
+ return;
}
sub fetch_mw_revisions_for_page {
@@ -681,15 +637,15 @@ sub fetch_mw_revisions_for_page {
push(@page_revs, $page_rev_ids);
$revnum++;
}
- last unless $result->{'query-continue'};
+ last if (!$result->{'query-continue'});
$query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
}
if ($shallow_import && @page_revs) {
- print STDERR " Found 1 revision (shallow import).\n";
+ print {*STDERR} " Found 1 revision (shallow import).\n";
@page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
return $page_revs[0];
}
- print STDERR " Found ", $revnum, " revision(s).\n";
+ print {*STDERR} " Found ${revnum} revision(s).\n";
return @page_revs;
}
@@ -701,8 +657,7 @@ sub fetch_mw_revisions {
my $n = 1;
foreach my $page (@pages) {
my $id = $page->{pageid};
-
- print STDERR "page $n/", scalar(@pages), ": ". $page->{title} ."\n";
+ print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
$n++;
my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
@revisions = (@page_revs, @revisions);
@@ -716,7 +671,7 @@ sub fe_escape_path {
$path =~ s/\\/\\\\/g;
$path =~ s/"/\\"/g;
$path =~ s/\n/\\n/g;
- return '"' . $path . '"';
+ return qq("${path}");
}
sub import_file_revision {
@@ -736,42 +691,43 @@ sub import_file_revision {
my $author = $commit{author};
my $date = $commit{date};
- print STDOUT "commit refs/mediawiki/$remotename/master\n";
- print STDOUT "mark :$n\n";
- print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
+ print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
+ print {*STDOUT} "mark :${n}\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
literal_data($comment);
# If it's not a clone, we need to know where to start from
if (!$full_import && $n == 1) {
- print STDOUT "from refs/mediawiki/$remotename/master^0\n";
+ print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
}
if ($content ne DELETED_CONTENT) {
- print STDOUT "M 644 inline " .
- fe_escape_path($title . ".mw") . "\n";
+ print {*STDOUT} 'M 644 inline ' .
+ fe_escape_path("${title}.mw") . "\n";
literal_data($content);
if (%mediafile) {
- print STDOUT "M 644 inline "
+ print {*STDOUT} 'M 644 inline '
. fe_escape_path($mediafile{title}) . "\n";
literal_data_raw($mediafile{content});
}
- print STDOUT "\n\n";
+ print {*STDOUT} "\n\n";
} else {
- print STDOUT "D " . fe_escape_path($title . ".mw") . "\n";
+ print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
}
# mediawiki revision number in the git note
if ($full_import && $n == 1) {
- print STDOUT "reset refs/notes/$remotename/mediawiki\n";
+ print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
}
- print STDOUT "commit refs/notes/$remotename/mediawiki\n";
- print STDOUT "committer $author <$author\@$wiki_name> ", $date->epoch, " +0000\n";
- literal_data("Note added by git-mediawiki during import");
+ print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
+ print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+ literal_data('Note added by git-mediawiki during import');
if (!$full_import && $n == 1) {
- print STDOUT "from refs/notes/$remotename/mediawiki^0\n";
+ print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
}
- print STDOUT "N inline :$n\n";
- literal_data("mediawiki_revision: " . $commit{mw_revision});
- print STDOUT "\n\n";
+ print {*STDOUT} "N inline :${n}\n";
+ literal_data("mediawiki_revision: $commit{mw_revision}");
+ print {*STDOUT} "\n\n";
+ return;
}
# parse a sequence of
@@ -784,23 +740,25 @@ sub get_more_refs {
my @refs;
while (1) {
my $line = <STDIN>;
- if ($line =~ m/^$cmd (.*)$/) {
+ if ($line =~ /^$cmd (.*)$/) {
push(@refs, $1);
} elsif ($line eq "\n") {
return @refs;
} else {
- die("Invalid command in a '$cmd' batch: ". $_);
+ die("Invalid command in a '$cmd' batch: $_\n");
}
}
+ return;
}
sub mw_import {
# multiple import commands can follow each other.
- my @refs = (shift, get_more_refs("import"));
+ my @refs = (shift, get_more_refs('import'));
foreach my $ref (@refs) {
mw_import_ref($ref);
}
- print STDOUT "done\n";
+ print {*STDOUT} "done\n";
+ return;
}
sub mw_import_ref {
@@ -810,40 +768,41 @@ sub mw_import_ref {
# Since HEAD is a symbolic ref to master (by convention,
# followed by the output of the command "list" that we gave),
# we don't need to do anything in this case.
- if ($ref eq "HEAD") {
+ if ($ref eq 'HEAD') {
return;
}
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
- print STDERR "Searching revisions...\n";
+ print {*STDERR} "Searching revisions...\n";
my $last_local = get_last_local_revision();
my $fetch_from = $last_local + 1;
if ($fetch_from == 1) {
- print STDERR ", fetching from beginning.\n";
+ print {*STDERR} ", fetching from beginning.\n";
} else {
- print STDERR ", fetching from here.\n";
+ print {*STDERR} ", fetching from here.\n";
}
my $n = 0;
- if ($fetch_strategy eq "by_rev") {
- print STDERR "Fetching & writing export data by revs...\n";
+ if ($fetch_strategy eq 'by_rev') {
+ print {*STDERR} "Fetching & writing export data by revs...\n";
$n = mw_import_ref_by_revs($fetch_from);
- } elsif ($fetch_strategy eq "by_page") {
- print STDERR "Fetching & writing export data by pages...\n";
+ } elsif ($fetch_strategy eq 'by_page') {
+ print {*STDERR} "Fetching & writing export data by pages...\n";
$n = mw_import_ref_by_pages($fetch_from);
} else {
- print STDERR "fatal: invalid fetch strategy \"$fetch_strategy\".\n";
- print STDERR "Check your configuration variables remote.$remotename.fetchStrategy and mediawiki.fetchStrategy\n";
+ print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
+ print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
exit 1;
}
if ($fetch_from == 1 && $n == 0) {
- print STDERR "You appear to have cloned an empty MediaWiki.\n";
+ print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
# Something has to be done remote-helper side. If nothing is done, an error is
# thrown saying that HEAD is referring to unknown object 0000000000000000000
# and the clone fails.
}
+ return;
}
sub mw_import_ref_by_pages {
@@ -855,7 +814,7 @@ sub mw_import_ref_by_pages {
my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
@revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
- my @revision_ids = map $_->{revid}, @revisions;
+ my @revision_ids = map { $_->{revid} } @revisions;
return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
}
@@ -882,7 +841,7 @@ sub mw_import_revids {
my $n_actual = 0;
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
- foreach my $pagerevid (@$revision_ids) {
+ foreach my $pagerevid (@{$revision_ids}) {
# Count page even if we skip it, since we display
# $n/$total and $total includes skipped pages.
$n++;
@@ -898,7 +857,7 @@ sub mw_import_revids {
my $result = $mediawiki->api($query);
if (!$result) {
- die "Failed to retrieve modified page for revision $pagerevid";
+ die "Failed to retrieve modified page for revision $pagerevid\n";
}
if (defined($result->{query}->{badrevids}->{$pagerevid})) {
@@ -907,7 +866,7 @@ sub mw_import_revids {
}
if (!defined($result->{query}->{pages})) {
- die "Invalid revision $pagerevid.";
+ die "Invalid revision ${pagerevid}.\n";
}
my @result_pages = values(%{$result->{query}->{pages}});
@@ -917,8 +876,8 @@ sub mw_import_revids {
my $page_title = $result_page->{title};
if (!exists($pages->{$page_title})) {
- print STDERR "$n/", scalar(@$revision_ids),
- ": Skipping revision #$rev->{revid} of $page_title\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}),
+ ": Skipping revision #$rev->{revid} of ${page_title}\n";
next;
}
@@ -927,7 +886,7 @@ sub mw_import_revids {
my %commit;
$commit{author} = $rev->{user} || 'Anonymous';
$commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
- $commit{title} = mediawiki_smudge_filename($page_title);
+ $commit{title} = smudge_filename($page_title);
$commit{mw_revision} = $rev->{revid};
$commit{content} = mediawiki_smudge($rev->{'*'});
@@ -943,14 +902,14 @@ sub mw_import_revids {
my %mediafile;
if ($namespace) {
my $id = get_mw_namespace_id($namespace);
- if ($id && $id == get_mw_namespace_id("File")) {
+ if ($id && $id == get_mw_namespace_id('File')) {
%mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
}
}
# If this is a revision of the media page for new version
# of a file do one common commit for both file and media page.
# Else do commit only for that page.
- print STDERR "$n/", scalar(@$revision_ids), ": Revision #$rev->{revid} of $commit{title}\n";
+ print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
}
@@ -958,17 +917,17 @@ sub mw_import_revids {
}
sub error_non_fast_forward {
- my $advice = run_git("config --bool advice.pushNonFastForward");
+ my $advice = run_git('config --bool advice.pushNonFastForward');
chomp($advice);
- if ($advice ne "false") {
+ if ($advice ne 'false') {
# Native git-push would show this after the summary.
# We can't ask it to display it cleanly, so print it
# ourselves before.
- print STDERR "To prevent you from losing history, non-fast-forward updates were rejected\n";
- print STDERR "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
- print STDERR "'Note about fast-forwards' section of 'git push --help' for details.\n";
+ print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
+ print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+ print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
}
- print STDOUT "error $_[0] \"non-fast-forward\"\n";
+ print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
return 0;
}
@@ -979,34 +938,34 @@ sub mw_upload_file {
my $file_deleted = shift;
my $summary = shift;
my $newrevid;
- my $path = "File:" . $complete_file_name;
+ my $path = "File:${complete_file_name}";
my %hashFiles = get_allowed_file_extensions();
if (!exists($hashFiles{$extension})) {
- print STDERR "$complete_file_name is not a permitted file on this wiki.\n";
- print STDERR "Check the configuration of file uploads in your mediawiki.\n";
+ print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
+ print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
return $newrevid;
}
# Deleting and uploading a file requires a priviledged user
if ($file_deleted) {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $query = {
action => 'delete',
title => $path,
reason => $summary
};
if (!$mediawiki->edit($query)) {
- print STDERR "Failed to delete file on remote wiki\n";
- print STDERR "Check your permissions on the remote site. Error code:\n";
- print STDERR $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+ print {*STDERR} "Failed to delete file on remote wiki\n";
+ print {*STDERR} "Check your permissions on the remote site. Error code:\n";
+ print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
exit 1;
}
} else {
# Don't let perl try to interpret file content as UTF-8 => use "raw"
- my $content = run_git("cat-file blob $new_sha1", "raw");
- if ($content ne "") {
- mw_connect_maybe();
+ my $content = run_git("cat-file blob ${new_sha1}", 'raw');
+ if ($content ne EMPTY) {
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
$mediawiki->{config}->{upload_url} =
- "$url/index.php/Special:Upload";
+ "${url}/index.php/Special:Upload";
$mediawiki->edit({
action => 'upload',
filename => $complete_file_name,
@@ -1018,12 +977,12 @@ sub mw_upload_file {
}, {
skip_encoding => 1
} ) || die $mediawiki->{error}->{code} . ':'
- . $mediawiki->{error}->{details};
+ . $mediawiki->{error}->{details} . "\n";
my $last_file_page = $mediawiki->get_page({title => $path});
$newrevid = $last_file_page->{revid};
- print STDERR "Pushed file: $new_sha1 - $complete_file_name.\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
} else {
- print STDERR "Empty file $complete_file_name not pushed.\n";
+ print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
}
}
return $newrevid;
@@ -1045,24 +1004,24 @@ sub mw_push_file {
my $newrevid;
if ($summary eq EMPTY_MESSAGE) {
- $summary = '';
+ $summary = EMPTY;
}
my $new_sha1 = $diff_info_split[3];
my $old_sha1 = $diff_info_split[2];
my $page_created = ($old_sha1 eq NULL_SHA1);
my $page_deleted = ($new_sha1 eq NULL_SHA1);
- $complete_file_name = mediawiki_clean_filename($complete_file_name);
+ $complete_file_name = clean_filename($complete_file_name);
my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
if (!defined($extension)) {
- $extension = "";
+ $extension = EMPTY;
}
- if ($extension eq "mw") {
+ if ($extension eq 'mw') {
my $ns = get_mw_namespace_id_for_page($complete_file_name);
- if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
- print STDERR "Ignoring media file related page: $complete_file_name\n";
- return ($oldrevid, "ok");
+ if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
+ print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
+ return ($oldrevid, 'ok');
}
my $file_content;
if ($page_deleted) {
@@ -1072,10 +1031,10 @@ sub mw_push_file {
# with this content instead:
$file_content = DELETED_CONTENT;
} else {
- $file_content = run_git("cat-file blob $new_sha1");
+ $file_content = run_git("cat-file blob ${new_sha1}");
}
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $result = $mediawiki->edit( {
action => 'edit',
@@ -1089,49 +1048,49 @@ sub mw_push_file {
if (!$result) {
if ($mediawiki->{error}->{code} == 3) {
# edit conflicts, considered as non-fast-forward
- print STDERR 'Warning: Error ' .
+ print {*STDERR} 'Warning: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details} .
+ ' from mediawiki: ' . $mediawiki->{error}->{details} .
".\n";
- return ($oldrevid, "non-fast-forward");
+ return ($oldrevid, 'non-fast-forward');
} else {
# Other errors. Shouldn't happen => just die()
die 'Fatal: Error ' .
$mediawiki->{error}->{code} .
- ' from mediwiki: ' . $mediawiki->{error}->{details};
+ ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
}
}
$newrevid = $result->{edit}->{newrevid};
- print STDERR "Pushed file: $new_sha1 - $title\n";
+ print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
} elsif ($export_media) {
$newrevid = mw_upload_file($complete_file_name, $new_sha1,
$extension, $page_deleted,
$summary);
} else {
- print STDERR "Ignoring media file $title\n";
+ print {*STDERR} "Ignoring media file ${title}\n";
}
$newrevid = ($newrevid or $oldrevid);
- return ($newrevid, "ok");
+ return ($newrevid, 'ok');
}
sub mw_push {
# multiple push statements can follow each other
- my @refsspecs = (shift, get_more_refs("push"));
+ my @refsspecs = (shift, get_more_refs('push'));
my $pushed;
for my $refspec (@refsspecs) {
my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
- or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>");
+ or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
if ($force) {
- print STDERR "Warning: forced push not allowed on a MediaWiki.\n";
+ print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
}
- if ($local eq "") {
- print STDERR "Cannot delete remote branch on a MediaWiki\n";
- print STDOUT "error $remote cannot delete\n";
+ if ($local eq EMPTY) {
+ print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} cannot delete\n";
next;
}
- if ($remote ne "refs/heads/master") {
- print STDERR "Only push to the branch 'master' is supported on a MediaWiki\n";
- print STDOUT "error $remote only master allowed\n";
+ if ($remote ne 'refs/heads/master') {
+ print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
+ print {*STDOUT} "error ${remote} only master allowed\n";
next;
}
if (mw_push_revision($local, $remote)) {
@@ -1140,30 +1099,32 @@ sub mw_push {
}
# Notify Git that the push is done
- print STDOUT "\n";
+ print {*STDOUT} "\n";
if ($pushed && $dumb_push) {
- print STDERR "Just pushed some revisions to MediaWiki.\n";
- print STDERR "The pushed revisions now have to be re-imported, and your current branch\n";
- print STDERR "needs to be updated with these re-imported commits. You can do this with\n";
- print STDERR "\n";
- print STDERR " git pull --rebase\n";
- print STDERR "\n";
+ print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
+ print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
+ print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
+ print {*STDERR} "\n";
+ print {*STDERR} " git pull --rebase\n";
+ print {*STDERR} "\n";
}
+ return;
}
sub mw_push_revision {
my $local = shift;
my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
my $last_local_revid = get_last_local_revision();
- print STDERR ".\n"; # Finish sentence started by get_last_local_revision()
+ print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
my $last_remote_revid = get_last_remote_revision();
my $mw_revision = $last_remote_revid;
# Get sha1 of commit pointed by local HEAD
- my $HEAD_sha1 = run_git("rev-parse $local 2>/dev/null"); chomp($HEAD_sha1);
+ my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
+ chomp($HEAD_sha1);
# Get sha1 of commit pointed by remotes/$remotename/master
- my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/$remotename/master 2>/dev/null");
+ my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
chomp($remoteorigin_sha1);
if ($last_local_revid > 0 &&
@@ -1182,22 +1143,22 @@ sub mw_push_revision {
if ($last_local_revid > 0) {
my $parsed_sha1 = $remoteorigin_sha1;
# Find a path from last MediaWiki commit to pushed commit
- print STDERR "Computing path from local to remote ...\n";
- my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
+ print {*STDERR} "Computing path from local to remote ...\n";
+ my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
my %local_ancestry;
foreach my $line (@local_ancestry) {
- if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
- foreach my $parent (split(' ', $parents)) {
+ if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
+ foreach my $parent (split(/ /, $parents)) {
$local_ancestry{$parent} = $child;
}
- } elsif (!$line =~ m/^([a-f0-9]+)/) {
- die "Unexpected output from git rev-list: $line";
+ } elsif (!$line =~ /^([a-f0-9]+)/) {
+ die "Unexpected output from git rev-list: ${line}\n";
}
}
while ($parsed_sha1 ne $HEAD_sha1) {
my $child = $local_ancestry{$parsed_sha1};
if (!$child) {
- printf STDERR "Cannot find a path in history from remote commit to last commit\n";
+ print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
return error_non_fast_forward($remote);
}
push(@commit_pairs, [$parsed_sha1, $child]);
@@ -1206,12 +1167,12 @@ sub mw_push_revision {
} else {
# No remote mediawiki revision. Export the whole
# history (linearized with --first-parent)
- print STDERR "Warning: no common ancestor, pushing complete history\n";
- my $history = run_git("rev-list --first-parent --children $local");
- my @history = split('\n', $history);
+ print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
+ my $history = run_git("rev-list --first-parent --children ${local}");
+ my @history = split(/\n/, $history);
@history = @history[1..$#history];
foreach my $line (reverse @history) {
- my @commit_info_split = split(/ |\n/, $line);
+ my @commit_info_split = split(/[ \n]/, $line);
push(@commit_pairs, \@commit_info_split);
}
}
@@ -1219,12 +1180,12 @@ sub mw_push_revision {
foreach my $commit_info_split (@commit_pairs) {
my $sha1_child = @{$commit_info_split}[0];
my $sha1_commit = @{$commit_info_split}[1];
- my $diff_infos = run_git("diff-tree -r --raw -z $sha1_child $sha1_commit");
+ my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
# TODO: we could detect rename, and encode them with a #redirect on the wiki.
# TODO: for now, it's just a delete+add
my @diff_info_list = split(/\0/, $diff_infos);
# Keep the subject line of the commit message as mediawiki comment for the revision
- my $commit_msg = run_git("log --no-walk --format=\"%s\" $sha1_commit");
+ my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
chomp($commit_msg);
# Push every blob
while (@diff_info_list) {
@@ -1236,7 +1197,7 @@ sub mw_push_revision {
my $info = shift(@diff_info_list);
my $file = shift(@diff_info_list);
($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
- if ($status eq "non-fast-forward") {
+ if ($status eq 'non-fast-forward') {
# we may already have sent part of the
# commit to MediaWiki, but it's too
# late to cancel it. Stop the push in
@@ -1244,22 +1205,22 @@ sub mw_push_revision {
# accurate error message.
return error_non_fast_forward($remote);
}
- if ($status ne "ok") {
- die("Unknown error from mw_push_file()");
+ if ($status ne 'ok') {
+ die("Unknown error from mw_push_file()\n");
}
}
- unless ($dumb_push) {
- run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
- run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
+ if (!$dumb_push) {
+ run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
+ run_git(qq(update-ref -m "Git-MediaWiki push" refs/mediawiki/${remotename}/master ${sha1_commit} ${sha1_child}));
}
}
- print STDOUT "ok $remote\n";
+ print {*STDOUT} "ok ${remote}\n";
return 1;
}
sub get_allowed_file_extensions {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $query = {
action => 'query',
@@ -1267,8 +1228,8 @@ sub get_allowed_file_extensions {
siprop => 'fileextensions'
};
my $result = $mediawiki->api($query);
- my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
- my %hashFile = map {$_ => 1}@file_extensions;
+ my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
+ my %hashFile = map { $_ => 1 } @file_extensions;
return %hashFile;
}
@@ -1283,15 +1244,15 @@ my %cached_mw_namespace_id;
# Return MediaWiki id for a canonical namespace name.
# Ex.: "File", "Project".
sub get_mw_namespace_id {
- mw_connect_maybe();
+ $mediawiki = connect_maybe($mediawiki, $remotename, $url);
my $name = shift;
if (!exists $namespace_id{$name}) {
# Look at configuration file, if the record for that namespace is
# already cached. Namespaces are stored in form:
# "Name_of_namespace:Id_namespace", ex.: "File:6".
- my @temp = split(/[\n]/, run_git("config --get-all remote."
- . $remotename .".namespaceCache"));
+ my @temp = split(/\n/,
+ run_git("config --get-all remote.${remotename}.namespaceCache"));
chomp(@temp);
foreach my $ns (@temp) {
my ($n, $id) = split(/:/, $ns);
@@ -1305,7 +1266,7 @@ sub get_mw_namespace_id {
}
if (!exists $namespace_id{$name}) {
- print STDERR "Namespace $name not found in cache, querying the wiki ...\n";
+ print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
# NS not found => get namespace id from MW and store it in
# configuration file.
my $query = {
@@ -1329,8 +1290,8 @@ sub get_mw_namespace_id {
my $ns = $namespace_id{$name};
my $id;
- unless (defined $ns) {
- print STDERR "No such namespace $name on MediaWiki.\n";
+ if (!defined $ns) {
+ print {*STDERR} "No such namespace ${name} on MediaWiki.\n";
$ns = {is_namespace => 0};
$namespace_id{$name} = $ns;
}
@@ -1344,15 +1305,15 @@ sub get_mw_namespace_id {
# Store explicitely requested namespaces on disk
if (!exists $cached_mw_namespace_id{$name}) {
- run_git("config --add remote.". $remotename
- .".namespaceCache \"". $name .":". $store_id ."\"");
+ run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
$cached_mw_namespace_id{$name} = 1;
}
return $id;
}
sub get_mw_namespace_id_for_page {
- if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
+ my $namespace = shift;
+ if ($namespace =~ /^([^:]*):/) {
return get_mw_namespace_id($namespace);
} else {
return;
diff --git a/contrib/mw-to-git/t/test-gitmw-lib.sh b/contrib/mw-to-git/t/test-gitmw-lib.sh
index 3b2cfacf51..ca6860ff30 100755
--- a/contrib/mw-to-git/t/test-gitmw-lib.sh
+++ b/contrib/mw-to-git/t/test-gitmw-lib.sh
@@ -62,12 +62,8 @@ test_check_precond () {
test_done
fi
- if [ ! -f "$GIT_BUILD_DIR"/git-remote-mediawiki ];
- then
- echo "No remote mediawiki for git found. Copying it in git"
- echo "cp $GIT_BUILD_DIR/contrib/mw-to-git/git-remote-mediawiki $GIT_BUILD_DIR/"
- ln -s "$GIT_BUILD_DIR"/contrib/mw-to-git/git-remote-mediawiki "$GIT_BUILD_DIR"
- fi
+ GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd "../.." && pwd)
+ PATH="$GIT_EXEC_PATH"'/bin-wrapper:'"$PATH"
if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
then
@@ -336,20 +332,21 @@ wiki_install () {
fi
# Fetch MediaWiki's archive if not already present in the TMP directory
+ MW_FILENAME="mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
cd "$TMP"
- if [ ! -f "$MW_VERSION.tar.gz" ] ; then
- echo "Downloading $MW_VERSION sources ..."
- wget "http://download.wikimedia.org/mediawiki/1.19/mediawiki-1.19.0.tar.gz" ||
+ if [ ! -f $MW_FILENAME ] ; then
+ echo "Downloading $MW_VERSION_MAJOR.$MW_VERSION_MINOR sources ..."
+ wget "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/$MW_FILENAME" ||
error "Unable to download "\
- "http://download.wikimedia.org/mediawiki/1.19/"\
- "mediawiki-1.19.0.tar.gz. "\
+ "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\
+ "$MW_FILENAME. "\
"Please fix your connection and launch the script again."
- echo "$MW_VERSION.tar.gz downloaded in `pwd`. "\
+ echo "$MW_FILENAME downloaded in `pwd`. "\
"You can delete it later if you want."
else
- echo "Reusing existing $MW_VERSION.tar.gz downloaded in `pwd`."
+ echo "Reusing existing $MW_FILENAME downloaded in `pwd`."
fi
- archive_abs_path=$(pwd)/"$MW_VERSION.tar.gz"
+ archive_abs_path=$(pwd)/$MW_FILENAME
cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
tar xzf "$archive_abs_path" --strip-components=1 ||
@@ -431,5 +428,5 @@ wiki_delete () {
# Delete the wiki's SQLite database
rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
rm -f "$FILES_FOLDER/$DB_FILE"
- rm -rf "$TMP/$MW_VERSION"
+ rm -rf "$TMP/mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
}
diff --git a/contrib/mw-to-git/t/test.config b/contrib/mw-to-git/t/test.config
index 958b37b4a7..4cfebe9c69 100644
--- a/contrib/mw-to-git/t/test.config
+++ b/contrib/mw-to-git/t/test.config
@@ -30,6 +30,8 @@ WEB_WWW=$WEB/www
# The variables below are used by the script to install a wiki.
# You should not modify these unless you are modifying the script itself.
-MW_VERSION=mediawiki-1.19.0
+# tested versions: 1.19.X -> 1.21.1
+MW_VERSION_MAJOR=1.21
+MW_VERSION_MINOR=1
FILES_FOLDER=install-wiki
DB_INSTALL_SCRIPT=db_install.php
diff --git a/contrib/patches/docbook-xsl-manpages-charmap.patch b/contrib/patches/docbook-xsl-manpages-charmap.patch
deleted file mode 100644
index f2b08b4f4a..0000000000
--- a/contrib/patches/docbook-xsl-manpages-charmap.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-From: Ismail Dönmez <ismail@pardus.org.tr>
-
-Trying to build the documentation with docbook-xsl 1.73 may result in
-the following error. This patch fixes it.
-
-$ xmlto -m callouts.xsl man git-add.xml
-runtime error: file
-file:///usr/share/sgml/docbook/xsl-stylesheets-1.73.0/manpages/other.xsl line
-129 element call-template
-The called template 'read-character-map' was not found.
-
---- docbook-xsl-1.73.0/manpages/docbook.xsl.manpages-charmap 2007-07-23 16:24:23.000000000 +0100
-+++ docbook-xsl-1.73.0/manpages/docbook.xsl 2007-07-23 16:25:16.000000000 +0100
-@@ -37,6 +37,7 @@
- <xsl:include href="lists.xsl"/>
- <xsl:include href="endnotes.xsl"/>
- <xsl:include href="table.xsl"/>
-+ <xsl:include href="../common/charmap.xsl"/>
-
- <!-- * we rename the following just to avoid using params with "man" -->
- <!-- * prefixes in the table.xsl stylesheet (because that stylesheet -->
diff --git a/contrib/remote-helpers/git-remote-bzr b/contrib/remote-helpers/git-remote-bzr
index 10300c63d1..c3a3cac77b 100755
--- a/contrib/remote-helpers/git-remote-bzr
+++ b/contrib/remote-helpers/git-remote-bzr
@@ -116,7 +116,10 @@ class Marks:
self.last_mark = mark
def get_tip(self, branch):
- return self.tips.get(branch, None)
+ try:
+ return str(self.tips[branch])
+ except KeyError:
+ return None
def set_tip(self, branch, tip):
self.tips[branch] = tip
@@ -278,7 +281,7 @@ def export_branch(repo, name):
ref = '%s/heads/%s' % (prefix, name)
tip = marks.get_tip(name)
- branch = bzrlib.branch.Branch.open(branches[name])
+ branch = get_remote_branch(name)
repo = branch.repository
branch.lock_read()
@@ -590,7 +593,7 @@ def parse_commit(parser):
if ref.startswith('refs/heads/'):
name = ref[len('refs/heads/'):]
- branch = bzrlib.branch.Branch.open(branches[name])
+ branch = get_remote_branch(name)
else:
die('unknown ref')
@@ -621,7 +624,7 @@ def parse_commit(parser):
mark = int(mark_ref[1:])
f = { 'mode' : m, 'mark' : mark }
elif parser.check('D'):
- t, path = line.split(' ')
+ t, path = line.split(' ', 1)
f = { 'deleted' : True }
else:
die('Unknown file command: %s' % line)
@@ -692,7 +695,7 @@ def do_export(parser):
for ref, revid in parsed_refs.iteritems():
if ref.startswith('refs/heads/'):
name = ref[len('refs/heads/'):]
- branch = bzrlib.branch.Branch.open(branches[name])
+ branch = get_remote_branch(name)
branch.generate_revision_history(revid, marks.get_tip(name))
if name in peers:
@@ -749,7 +752,7 @@ def do_list(parser):
master_branch = name
print "? refs/heads/%s" % name
- branch = bzrlib.branch.Branch.open(branches[master_branch])
+ branch = get_remote_branch(master_branch)
branch.lock_read()
for tag, revid in branch.tags.get_tag_dict().items():
try:
@@ -765,30 +768,40 @@ def do_list(parser):
print "@refs/heads/%s HEAD" % master_branch
print
-def get_remote_branch(origin, remote_branch, name):
- global dirname, peers
+def clone(path, remote_branch):
+ try:
+ bdir = bzrlib.bzrdir.BzrDir.create(path)
+ except bzrlib.errors.AlreadyControlDirError:
+ bdir = bzrlib.bzrdir.BzrDir.open(path)
+ repo = bdir.find_repository()
+ repo.fetch(remote_branch.repository)
+ return remote_branch.sprout(bdir, repository=repo)
+
+def get_remote_branch(name):
+ global dirname, branches
+
+ remote_branch = bzrlib.branch.Branch.open(branches[name])
+ if isinstance(remote_branch.user_transport, bzrlib.transport.local.LocalTransport):
+ return remote_branch
branch_path = os.path.join(dirname, 'clone', name)
- if os.path.exists(branch_path):
+
+ try:
+ branch = bzrlib.branch.Branch.open(branch_path)
+ except bzrlib.errors.NotBranchError:
+ # clone
+ branch = clone(branch_path, remote_branch)
+ else:
# pull
- d = bzrlib.bzrdir.BzrDir.open(branch_path)
- branch = d.open_branch()
try:
- branch.pull(remote_branch, [], None, False)
+ branch.pull(remote_branch, overwrite=True)
except bzrlib.errors.DivergedBranches:
# use remote branch for now
return remote_branch
- else:
- # clone
- d = origin.sprout(branch_path, None,
- hardlink=True, create_tree_if_local=False,
- force_new_repo=False,
- source_branch=remote_branch)
- branch = d.open_branch()
return branch
-def find_branches(repo, wanted):
+def find_branches(repo):
transport = repo.bzrdir.root_transport
for fn in transport.iter_files_recursive():
@@ -799,16 +812,13 @@ def find_branches(repo, wanted):
name = name if name != '' else 'master'
name = name.replace('/', '+')
- if wanted and not name in wanted:
- continue
-
try:
cur = transport.clone(subdir)
branch = bzrlib.branch.Branch.open_from_transport(cur)
except bzrlib.errors.NotBranchError:
continue
else:
- yield name, branch
+ yield name, branch.base
def get_repo(url, alias):
global dirname, peer, branches
@@ -841,44 +851,35 @@ def get_repo(url, alias):
except bzrlib.errors.NoRepositoryPresent:
pass
- try:
- repo = origin.open_repository()
- if not repo.user_transport.listable():
- # this repository is not usable for us
- raise bzrlib.errors.NoRepositoryPresent(repo.bzrdir)
- except bzrlib.errors.NoRepositoryPresent:
- # branch
-
- name = 'master'
- remote_branch = origin.open_branch()
-
- if not is_local:
- peers[name] = remote_branch.base
- branch = get_remote_branch(origin, remote_branch, name)
- else:
- branch = remote_branch
-
- branches[name] = branch.base
+ wanted = get_config('remote-bzr.branches').rstrip().split(', ')
+ # stupid python
+ wanted = [e for e in wanted if e]
- return branch.repository
+ if not wanted:
+ try:
+ repo = origin.open_repository()
+ if not repo.user_transport.listable():
+ # this repository is not usable for us
+ raise bzrlib.errors.NoRepositoryPresent(repo.bzrdir)
+ except bzrlib.errors.NoRepositoryPresent:
+ wanted = ['master']
+
+ if wanted:
+ def list_wanted(url, wanted):
+ for name in wanted:
+ subdir = name if name != 'master' else ''
+ yield name, bzrlib.urlutils.join(url, subdir)
+
+ branch_list = list_wanted(url, wanted)
else:
- # repository
-
- wanted = get_config('remote-bzr.branches').rstrip().split(', ')
- # stupid python
- wanted = [e for e in wanted if e]
-
- for name, remote_branch in find_branches(repo, wanted):
+ branch_list = find_branches(repo)
- if not is_local:
- peers[name] = remote_branch.base
- branch = get_remote_branch(origin, remote_branch, name)
- else:
- branch = remote_branch
-
- branches[name] = branch.base
+ for name, url in branch_list:
+ if not is_local:
+ peers[name] = url
+ branches[name] = url
- return repo
+ return origin
def fix_path(alias, orig_url):
url = urlparse.urlparse(orig_url, 'file')
diff --git a/contrib/remote-helpers/git-remote-hg b/contrib/remote-helpers/git-remote-hg
index 1dd3d7030e..0194c67fb1 100755
--- a/contrib/remote-helpers/git-remote-hg
+++ b/contrib/remote-helpers/git-remote-hg
@@ -12,7 +12,7 @@
# For remote repositories a local clone is stored in
# "$GIT_DIR/hg/origin/clone/.hg/".
-from mercurial import hg, ui, bookmarks, context, encoding, node, error, extensions
+from mercurial import hg, ui, bookmarks, context, encoding, node, error, extensions, discovery, util
import re
import sys
@@ -29,9 +29,6 @@ import urlparse, hashlib
# named branches:
# git config --global remote-hg.track-branches false
#
-# If you don't want to force pushes (and thus risk creating new remote heads):
-# git config --global remote-hg.force-push false
-#
# If you want the equivalent of hg's clone/pull--insecure option:
# git config --global remote-hg.insecure true
#
@@ -55,6 +52,8 @@ EMAIL_RE = re.compile('^([^<>]+[^ \\\t<>])?\\b(?:[ \\t<>]*?)\\b([^ \\t<>]+@[^ \\
AUTHOR_HG_RE = re.compile('^(.*?) ?<(.*?)(?:>(.+)?)?$')
RAW_AUTHOR_RE = re.compile('^(\w+) (?:(.+)? )?<(.*)> (\d+) ([+-]\d+)')
+VERSION = 2
+
def die(msg, *args):
sys.stderr.write('ERROR: %s\n' % (msg % args))
sys.exit(1)
@@ -72,8 +71,11 @@ def hgmode(mode):
m = { '100755': 'x', '120000': 'l' }
return m.get(mode, '')
-def hghex(node):
- return hg.node.hex(node)
+def hghex(n):
+ return node.hex(n)
+
+def hgbin(n):
+ return node.bin(n)
def hgref(ref):
return ref.replace('___', ' ')
@@ -81,6 +83,11 @@ def hgref(ref):
def gitref(ref):
return ref.replace(' ', '___')
+def check_version(*check):
+ if not hg_version:
+ return True
+ return hg_version >= check
+
def get_config(config):
cmd = ['git', 'config', '--get', config]
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
@@ -98,14 +105,27 @@ def get_config_bool(config, default=False):
class Marks:
- def __init__(self, path):
+ def __init__(self, path, repo):
self.path = path
+ self.repo = repo
+ self.clear()
+ self.load()
+
+ if self.version < VERSION:
+ if self.version == 1:
+ self.upgrade_one()
+
+ # upgraded?
+ if self.version < VERSION:
+ self.clear()
+ self.version = VERSION
+
+ def clear(self):
self.tips = {}
self.marks = {}
self.rev_marks = {}
self.last_mark = 0
-
- self.load()
+ self.version = 0
def load(self):
if not os.path.exists(self.path):
@@ -116,12 +136,21 @@ class Marks:
self.tips = tmp['tips']
self.marks = tmp['marks']
self.last_mark = tmp['last-mark']
+ self.version = tmp.get('version', 1)
for rev, mark in self.marks.iteritems():
- self.rev_marks[mark] = int(rev)
+ self.rev_marks[mark] = rev
+
+ def upgrade_one(self):
+ def get_id(rev):
+ return hghex(self.repo.changelog.node(int(rev)))
+ self.tips = dict((name, get_id(rev)) for name, rev in self.tips.iteritems())
+ self.marks = dict((get_id(rev), mark) for rev, mark in self.marks.iteritems())
+ self.rev_marks = dict((mark, get_id(rev)) for mark, rev in self.rev_marks.iteritems())
+ self.version = 2
def dict(self):
- return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark }
+ return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark, 'version' : self.version }
def store(self):
json.dump(self.dict(), open(self.path, 'w'))
@@ -130,10 +159,10 @@ class Marks:
return str(self.dict())
def from_rev(self, rev):
- return self.marks[str(rev)]
+ return self.marks[rev]
def to_rev(self, mark):
- return self.rev_marks[mark]
+ return str(self.rev_marks[mark])
def next_mark(self):
self.last_mark += 1
@@ -141,19 +170,19 @@ class Marks:
def get_mark(self, rev):
self.last_mark += 1
- self.marks[str(rev)] = self.last_mark
+ self.marks[rev] = self.last_mark
return self.last_mark
def new_mark(self, rev, mark):
- self.marks[str(rev)] = mark
+ self.marks[rev] = mark
self.rev_marks[mark] = rev
self.last_mark = mark
def is_marked(self, rev):
- return str(rev) in self.marks
+ return rev in self.marks
def get_tip(self, branch):
- return self.tips.get(branch, 0)
+ return str(self.tips[branch])
def set_tip(self, branch, tip):
self.tips[branch] = tip
@@ -261,7 +290,7 @@ def get_filechanges(repo, ctx, parent):
removed = set()
# load earliest manifest first for caching reasons
- prev = repo[parent].manifest().copy()
+ prev = parent.manifest().copy()
cur = ctx.manifest()
for fn in cur:
@@ -329,6 +358,21 @@ def fixup_user(user):
return '%s <%s>' % (name, mail)
+def updatebookmarks(repo, peer):
+ remotemarks = peer.listkeys('bookmarks')
+ localmarks = repo._bookmarks
+
+ if not remotemarks:
+ return
+
+ for k, v in remotemarks.iteritems():
+ localmarks[k] = hgbin(v)
+
+ if hasattr(localmarks, 'write'):
+ localmarks.write()
+ else:
+ bookmarks.write(repo)
+
def get_repo(url, alias):
global dirname, peer
@@ -339,35 +383,41 @@ def get_repo(url, alias):
if get_config_bool('remote-hg.insecure'):
myui.setconfig('web', 'cacerts', '')
- try:
- mod = extensions.load(myui, 'hgext.schemes', None)
- mod.extsetup(myui)
- except ImportError:
- pass
+ extensions.loadall(myui)
- if hg.islocal(url):
+ if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'):
repo = hg.repository(myui, url)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
else:
- local_path = os.path.join(dirname, 'clone')
- if not os.path.exists(local_path):
- try:
- peer, dstpeer = hg.clone(myui, {}, url, local_path, update=True, pull=True)
- except:
- die('Repository error')
- repo = dstpeer.local()
- else:
- repo = hg.repository(myui, local_path)
+ shared_path = os.path.join(gitdir, 'hg')
+ if not os.path.exists(shared_path):
try:
- peer = hg.peer(myui, {}, url)
+ hg.clone(myui, {}, url, shared_path, update=False, pull=True)
except:
die('Repository error')
- repo.pull(peer, heads=None, force=True)
+
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ local_path = os.path.join(dirname, 'clone')
+ if not os.path.exists(local_path):
+ hg.share(myui, shared_path, local_path, update=False)
+
+ repo = hg.repository(myui, local_path)
+ try:
+ peer = hg.peer(myui, {}, url)
+ except:
+ die('Repository error')
+ repo.pull(peer, heads=None, force=True)
+
+ updatebookmarks(repo, peer)
return repo
def rev_to_mark(rev):
global marks
- return marks.from_rev(rev)
+ return marks.from_rev(rev.hex())
def mark_to_rev(mark):
global marks
@@ -377,17 +427,24 @@ def export_ref(repo, name, kind, head):
global prefix, marks, mode
ename = '%s/%s' % (kind, name)
- tip = marks.get_tip(ename)
+ try:
+ tip = marks.get_tip(ename)
+ tip = repo[tip].rev()
+ except:
+ tip = 0
revs = xrange(tip, head.rev() + 1)
- count = 0
-
- revs = [rev for rev in revs if not marks.is_marked(rev)]
+ total = len(revs)
for rev in revs:
c = repo[rev]
- (manifest, user, (time, tz), files, desc, extra) = repo.changelog.read(c.node())
+ node = c.node()
+
+ if marks.is_marked(c.hex()):
+ continue
+
+ (manifest, user, (time, tz), files, desc, extra) = repo.changelog.read(node)
rev_branch = extra['branch']
author = "%s %d %s" % (fixup_user(user), time, gittz(tz))
@@ -397,7 +454,7 @@ def export_ref(repo, name, kind, head):
else:
committer = author
- parents = [p for p in repo.changelog.parentrevs(rev) if p >= 0]
+ parents = [repo[p] for p in repo.changelog.parentrevs(rev) if p >= 0]
if len(parents) == 0:
modified = c.manifest().keys()
@@ -439,7 +496,7 @@ def export_ref(repo, name, kind, head):
modified_final = export_files(c.filectx(f) for f in modified)
print "commit %s/%s" % (prefix, ename)
- print "mark :%d" % (marks.get_mark(rev))
+ print "mark :%d" % (marks.get_mark(c.hex()))
print "author %s" % (author)
print "committer %s" % (committer)
print "data %d" % (len(desc))
@@ -450,22 +507,22 @@ def export_ref(repo, name, kind, head):
if len(parents) > 1:
print "merge :%s" % (rev_to_mark(parents[1]))
- for f in modified_final:
- print "M %s :%u %s" % f
for f in removed:
print "D %s" % (fix_file_path(f))
+ for f in modified_final:
+ print "M %s :%u %s" % f
print
- count += 1
- if (count % 100 == 0):
- print "progress revision %d '%s' (%d/%d)" % (rev, name, count, len(revs))
+ progress = (rev - tip)
+ if (progress % 100 == 0):
+ print "progress revision %d '%s' (%d/%d)" % (rev, name, progress, total)
# make sure the ref is updated
print "reset %s/%s" % (prefix, ename)
- print "from :%u" % rev_to_mark(rev)
+ print "from :%u" % rev_to_mark(head)
print
- marks.set_tip(ename, rev)
+ marks.set_tip(ename, head.hex())
def export_tag(repo, tag):
export_ref(repo, tag, 'tags', repo[hgref(tag)])
@@ -497,15 +554,12 @@ def do_capabilities(parser):
if os.path.exists(path):
print "*import-marks %s" % path
print "*export-marks %s" % path
+ print "option"
print
-def branch_tip(repo, branch):
- # older versions of mercurial don't have this
- if hasattr(repo, 'branchtip'):
- return repo.branchtip(branch)
- else:
- return repo.branchtags()[branch]
+def branch_tip(branch):
+ return branches[branch][-1]
def get_branch_tip(repo, branch):
global branches
@@ -517,27 +571,21 @@ def get_branch_tip(repo, branch):
# verify there's only one head
if (len(heads) > 1):
warn("Branch '%s' has more than one head, consider merging" % branch)
- return branch_tip(repo, hgref(branch))
+ return branch_tip(hgref(branch))
return heads[0]
def list_head(repo, cur):
- global g_head, bmarks
+ global g_head, bmarks, fake_bmark
- head = bookmarks.readcurrent(repo)
- if head:
- node = repo[head]
- else:
- # fake bookmark from current branch
- head = cur
- node = repo['.']
- if not node:
- node = repo['tip']
- if not node:
- return
- if head == 'default':
- head = 'master'
- bmarks[head] = node
+ if 'default' not in branches:
+ # empty repo
+ return
+
+ node = repo[branch_tip('default')]
+ head = 'master' if not 'master' in bmarks else 'default'
+ fake_bmark = head
+ bmarks[head] = node
head = gitref(head)
print "@refs/heads/%s HEAD" % head
@@ -551,15 +599,17 @@ def do_list(parser):
bmarks[bmark] = repo[node]
cur = repo.dirstate.branch()
+ orig = peer if peer else repo
+
+ for branch, heads in orig.branchmap().iteritems():
+ # only open heads
+ heads = [h for h in heads if 'close' not in repo.changelog.read(h)[5]]
+ if heads:
+ branches[branch] = heads
list_head(repo, cur)
if track_branches:
- for branch in repo.branchmap():
- heads = repo.branchheads(branch)
- if len(heads):
- branches[branch] = heads
-
for branch in branches:
print "? refs/heads/branches/%s" % gitref(branch)
@@ -582,6 +632,7 @@ def do_import(parser):
if os.path.exists(path):
print "feature import-marks=%s" % path
print "feature export-marks=%s" % path
+ print "feature force"
sys.stdout.flush()
tmp = encoding.encoding
@@ -671,6 +722,11 @@ def parse_commit(parser):
die('Unknown file command: %s' % line)
files[path] = f
+ # only export the commits if we are on an internal proxy repo
+ if dry_run and not peer:
+ parsed_refs[ref] = None
+ return
+
def getfilectx(repo, memctx, f):
of = files[f]
if 'deleted' in of:
@@ -692,14 +748,14 @@ def parse_commit(parser):
extra['committer'] = "%s %u %u" % committer
if from_mark:
- p1 = repo.changelog.node(mark_to_rev(from_mark))
+ p1 = mark_to_rev(from_mark)
else:
- p1 = '\0' * 20
+ p1 = '0' * 40
if merge_mark:
- p2 = repo.changelog.node(mark_to_rev(merge_mark))
+ p2 = mark_to_rev(merge_mark)
else:
- p2 = '\0' * 20
+ p2 = '0' * 40
#
# If files changed from any of the parents, hg wants to know, but in git if
@@ -735,14 +791,12 @@ def parse_commit(parser):
tmp = encoding.encoding
encoding.encoding = 'utf-8'
- node = repo.commitctx(ctx)
+ node = hghex(repo.commitctx(ctx))
encoding.encoding = tmp
- rev = repo[node].rev()
-
parsed_refs[ref] = node
- marks.new_mark(rev, commit_mark)
+ marks.new_mark(node, commit_mark)
def parse_reset(parser):
global parsed_refs
@@ -758,8 +812,11 @@ def parse_reset(parser):
from_mark = parser.get_mark()
parser.next()
- node = parser.repo.changelog.node(mark_to_rev(from_mark))
- parsed_refs[ref] = node
+ try:
+ rev = mark_to_rev(from_mark)
+ except KeyError:
+ rev = None
+ parsed_refs[ref] = rev
def parse_tag(parser):
name = parser[1]
@@ -775,7 +832,7 @@ def parse_tag(parser):
def write_tag(repo, tag, node, msg, author):
branch = repo[node].branch()
- tip = branch_tip(repo, branch)
+ tip = branch_tip(branch)
tip = repo[tip]
def getfilectx(repo, memctx, f):
@@ -784,18 +841,28 @@ def write_tag(repo, tag, node, msg, author):
data = fctx.data()
except error.ManifestLookupError:
data = ""
- content = data + "%s %s\n" % (hghex(node), tag)
+ content = data + "%s %s\n" % (node, tag)
return context.memfilectx(f, content, False, False, None)
p1 = tip.hex()
- p2 = '\0' * 20
- if not author:
- author = (None, 0, 0)
- user, date, tz = author
+ p2 = '0' * 40
+ if author:
+ user, date, tz = author
+ date_tz = (date, tz)
+ else:
+ cmd = ['git', 'var', 'GIT_COMMITTER_IDENT']
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ output, _ = process.communicate()
+ m = re.match('^.* <.*>', output)
+ if m:
+ user = m.group(0)
+ else:
+ user = repo.ui.username()
+ date_tz = None
ctx = context.memctx(repo, (p1, p2), msg,
['.hgtags'], getfilectx,
- user, (date, tz), {'branch' : branch})
+ user, date_tz, {'branch' : branch})
tmp = encoding.encoding
encoding.encoding = 'utf-8'
@@ -804,12 +871,132 @@ def write_tag(repo, tag, node, msg, author):
encoding.encoding = tmp
- return tagnode
+ return (tagnode, branch)
+
+def checkheads_bmark(repo, ref, ctx):
+ bmark = ref[len('refs/heads/'):]
+ if not bmark in bmarks:
+ # new bmark
+ return True
+
+ ctx_old = bmarks[bmark]
+ ctx_new = ctx
+ if not repo.changelog.descendant(ctx_old.rev(), ctx_new.rev()):
+ if force_push:
+ print "ok %s forced update" % ref
+ else:
+ print "error %s non-fast forward" % ref
+ return False
+
+ return True
+
+def checkheads(repo, remote, p_revs):
+
+ remotemap = remote.branchmap()
+ if not remotemap:
+ # empty repo
+ return True
+
+ new = {}
+ ret = True
+
+ for node, ref in p_revs.iteritems():
+ ctx = repo[node]
+ branch = ctx.branch()
+ if not branch in remotemap:
+ # new branch
+ continue
+ if not ref.startswith('refs/heads/branches'):
+ if ref.startswith('refs/heads/'):
+ if not checkheads_bmark(repo, ref, ctx):
+ ret = False
+
+ # only check branches
+ continue
+ new.setdefault(branch, []).append(ctx.rev())
+
+ for branch, heads in new.iteritems():
+ old = [repo.changelog.rev(x) for x in remotemap[branch]]
+ for rev in heads:
+ if check_version(2, 3):
+ ancestors = repo.changelog.ancestors([rev], stoprev=min(old))
+ else:
+ ancestors = repo.changelog.ancestors(rev)
+ found = False
+
+ for x in old:
+ if x in ancestors:
+ found = True
+ break
+
+ if found:
+ continue
+
+ node = repo.changelog.node(rev)
+ ref = p_revs[node]
+ if force_push:
+ print "ok %s forced update" % ref
+ else:
+ print "error %s non-fast forward" % ref
+ ret = False
+
+ return ret
+
+def push_unsafe(repo, remote, parsed_refs, p_revs):
+
+ force = force_push
+
+ fci = discovery.findcommonincoming
+ commoninc = fci(repo, remote, force=force)
+ common, _, remoteheads = commoninc
+
+ if not checkheads(repo, remote, p_revs):
+ return None
+
+ cg = repo.getbundle('push', heads=list(p_revs), common=common)
+
+ unbundle = remote.capable('unbundle')
+ if unbundle:
+ if force:
+ remoteheads = ['force']
+ return remote.unbundle(cg, remoteheads, 'push')
+ else:
+ return remote.addchangegroup(cg, 'push', repo.url())
+
+def push(repo, remote, parsed_refs, p_revs):
+ if hasattr(remote, 'canpush') and not remote.canpush():
+ print "error cannot push"
+
+ if not p_revs:
+ # nothing to push
+ return
+
+ lock = None
+ unbundle = remote.capable('unbundle')
+ if not unbundle:
+ lock = remote.lock()
+ try:
+ ret = push_unsafe(repo, remote, parsed_refs, p_revs)
+ finally:
+ if lock is not None:
+ lock.release()
+
+ return ret
+
+def check_tip(ref, kind, name, heads):
+ try:
+ ename = '%s/%s' % (kind, name)
+ tip = marks.get_tip(ename)
+ except KeyError:
+ return True
+ else:
+ return tip in heads
def do_export(parser):
global parsed_refs, bmarks, peer
p_bmarks = []
+ p_revs = {}
parser.next()
@@ -827,72 +1014,114 @@ def do_export(parser):
else:
die('unhandled export command: %s' % line)
+ need_fetch = False
+
for ref, node in parsed_refs.iteritems():
+ bnode = hgbin(node) if node else None
if ref.startswith('refs/heads/branches'):
branch = ref[len('refs/heads/branches/'):]
- if branch in branches and node in branches[branch]:
+ if branch in branches and bnode in branches[branch]:
# up to date
continue
+
+ if peer:
+ remotemap = peer.branchmap()
+ if remotemap and branch in remotemap:
+ heads = [hghex(e) for e in remotemap[branch]]
+ if not check_tip(ref, 'branches', branch, heads):
+ print "error %s fetch first" % ref
+ need_fetch = True
+ continue
+
+ p_revs[bnode] = ref
print "ok %s" % ref
elif ref.startswith('refs/heads/'):
bmark = ref[len('refs/heads/'):]
- p_bmarks.append((bmark, node))
- continue
+ new = node
+ old = bmarks[bmark].hex() if bmark in bmarks else ''
+
+ if old == new:
+ continue
+
+ print "ok %s" % ref
+ if bmark != fake_bmark and \
+ not (bmark == 'master' and bmark not in parser.repo._bookmarks):
+ p_bmarks.append((ref, bmark, old, new))
+
+ if peer:
+ remote_old = peer.listkeys('bookmarks').get(bmark)
+ if remote_old:
+ if not check_tip(ref, 'bookmarks', bmark, remote_old):
+ print "error %s fetch first" % ref
+ need_fetch = True
+ continue
+
+ p_revs[bnode] = ref
elif ref.startswith('refs/tags/'):
+ if dry_run:
+ print "ok %s" % ref
+ continue
tag = ref[len('refs/tags/'):]
tag = hgref(tag)
author, msg = parsed_tags.get(tag, (None, None))
if mode == 'git':
if not msg:
- msg = 'Added tag %s for changeset %s' % (tag, hghex(node[:6]));
- write_tag(parser.repo, tag, node, msg, author)
+ msg = 'Added tag %s for changeset %s' % (tag, node[:12]);
+ tagnode, branch = write_tag(parser.repo, tag, node, msg, author)
+ p_revs[tagnode] = 'refs/heads/branches/' + gitref(branch)
else:
fp = parser.repo.opener('localtags', 'a')
- fp.write('%s %s\n' % (hghex(node), tag))
+ fp.write('%s %s\n' % (node, tag))
fp.close()
+ p_revs[bnode] = ref
print "ok %s" % ref
else:
# transport-helper/fast-export bugs
continue
- if peer:
- parser.repo.push(peer, force=force_push, newbranch=True)
-
- # handle bookmarks
- for bmark, node in p_bmarks:
- ref = 'refs/heads/' + bmark
- new = hghex(node)
-
- if bmark in bmarks:
- old = bmarks[bmark].hex()
- else:
- old = ''
+ if need_fetch:
+ print
+ return
- if old == new:
- continue
+ if dry_run:
+ if peer and not force_push:
+ checkheads(parser.repo, peer, p_revs)
+ print
+ return
- if bmark == 'master' and 'master' not in parser.repo._bookmarks:
- # fake bookmark
- print "ok %s" % ref
- continue
- elif bookmarks.pushbookmark(parser.repo, bmark, old, new):
- # updated locally
- pass
- else:
- print "error %s" % ref
- continue
+ if peer:
+ if not push(parser.repo, peer, parsed_refs, p_revs):
+ # do not update bookmarks
+ print
+ return
- if peer:
- rb = peer.listkeys('bookmarks')
- old = rb.get(bmark, '')
+ # update remote bookmarks
+ remote_bmarks = peer.listkeys('bookmarks')
+ for ref, bmark, old, new in p_bmarks:
+ if force_push:
+ old = remote_bmarks.get(bmark, '')
if not peer.pushkey('bookmarks', bmark, old, new):
print "error %s" % ref
- continue
-
- print "ok %s" % ref
+ else:
+ # update local bookmarks
+ for ref, bmark, old, new in p_bmarks:
+ if not bookmarks.pushbookmark(parser.repo, bmark, old, new):
+ print "error %s" % ref
print
+def do_option(parser):
+ global dry_run, force_push
+ _, key, value = parser.line.split(' ')
+ if key == 'dry-run':
+ dry_run = (value == 'true')
+ print 'ok'
+ elif key == 'force':
+ force_push = (value == 'true')
+ print 'ok'
+ else:
+ print 'unsupported'
+
def fix_path(alias, repo, orig_url):
url = urlparse.urlparse(orig_url, 'file')
if url.scheme != 'file' or os.path.isabs(url.path):
@@ -902,12 +1131,14 @@ def fix_path(alias, repo, orig_url):
subprocess.call(cmd)
def main(args):
- global prefix, dirname, branches, bmarks
+ global prefix, gitdir, dirname, branches, bmarks
global marks, blob_marks, parsed_refs
global peer, mode, bad_mail, bad_name
global track_branches, force_push, is_tmp
global parsed_tags
global filenodes
+ global fake_bmark, hg_version
+ global dry_run
alias = args[1]
url = args[2]
@@ -915,7 +1146,7 @@ def main(args):
hg_git_compat = get_config_bool('remote-hg.hg-git-compat')
track_branches = get_config_bool('remote-hg.track-branches', True)
- force_push = get_config_bool('remote-hg.force-push')
+ force_push = False
if hg_git_compat:
mode = 'hg'
@@ -941,6 +1172,12 @@ def main(args):
marks = None
parsed_tags = {}
filenodes = {}
+ fake_bmark = None
+ try:
+ hg_version = tuple(int(e) for e in util.version().split('.'))
+ except:
+ hg_version = None
+ dry_run = False
repo = get_repo(url, alias)
prefix = 'refs/hg/%s' % alias
@@ -948,11 +1185,8 @@ def main(args):
if not is_tmp:
fix_path(alias, peer or repo, url)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
-
marks_path = os.path.join(dirname, 'marks-hg')
- marks = Marks(marks_path)
+ marks = Marks(marks_path, repo)
if sys.platform == 'win32':
import msvcrt
@@ -968,6 +1202,8 @@ def main(args):
do_import(parser)
elif parser.check('export'):
do_export(parser)
+ elif parser.check('option'):
+ do_option(parser)
else:
die('unhandled command: %s' % line)
sys.stdout.flush()
diff --git a/contrib/remote-helpers/test-bzr.sh b/contrib/remote-helpers/test-bzr.sh
index 5dfa070b64..dce281f911 100755
--- a/contrib/remote-helpers/test-bzr.sh
+++ b/contrib/remote-helpers/test-bzr.sh
@@ -12,86 +12,90 @@ if ! test_have_prereq PYTHON; then
test_done
fi
-if ! "$PYTHON_PATH" -c 'import bzrlib'; then
+if ! python -c 'import bzrlib'; then
skip_all='skipping remote-bzr tests; bzr not available'
test_done
fi
check () {
- (cd $1 &&
- git log --format='%s' -1 &&
- git symbolic-ref HEAD) > actual &&
- (echo $2 &&
- echo "refs/heads/$3") > expected &&
+ echo $3 > expected &&
+ git --git-dir=$1/.git log --format='%s' -1 $2 > actual
test_cmp expected actual
}
bzr whoami "A U Thor <author@example.com>"
test_expect_success 'cloning' '
- (bzr init bzrrepo &&
- cd bzrrepo &&
- echo one > content &&
- bzr add content &&
- bzr commit -m one
- ) &&
-
- git clone "bzr::$PWD/bzrrepo" gitrepo &&
- check gitrepo one master
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+ echo one > content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ git clone "bzr::bzrrepo" gitrepo &&
+ check gitrepo HEAD one
'
test_expect_success 'pulling' '
- (cd bzrrepo &&
- echo two > content &&
- bzr commit -m two
- ) &&
+ (
+ cd bzrrepo &&
+ echo two > content &&
+ bzr commit -m two
+ ) &&
- (cd gitrepo && git pull) &&
+ (cd gitrepo && git pull) &&
- check gitrepo two master
+ check gitrepo HEAD two
'
test_expect_success 'pushing' '
- (cd gitrepo &&
- echo three > content &&
- git commit -a -m three &&
- git push
- ) &&
-
- echo three > expected &&
- cat bzrrepo/content > actual &&
- test_cmp expected actual
+ (
+ cd gitrepo &&
+ echo three > content &&
+ git commit -a -m three &&
+ git push
+ ) &&
+
+ echo three > expected &&
+ cat bzrrepo/content > actual &&
+ test_cmp expected actual
'
test_expect_success 'roundtrip' '
- (cd gitrepo &&
- git pull &&
- git log --format="%s" -1 origin/master > actual) &&
- echo three > expected &&
- test_cmp expected actual &&
+ (
+ cd gitrepo &&
+ git pull &&
+ git log --format="%s" -1 origin/master > actual
+ ) &&
+ echo three > expected &&
+ test_cmp expected actual &&
- (cd gitrepo && git push && git pull) &&
+ (cd gitrepo && git push && git pull) &&
- (cd bzrrepo &&
- echo four > content &&
- bzr commit -m four
- ) &&
+ (
+ cd bzrrepo &&
+ echo four > content &&
+ bzr commit -m four
+ ) &&
- (cd gitrepo && git pull && git push) &&
+ (cd gitrepo && git pull && git push) &&
- check gitrepo four master &&
+ check gitrepo HEAD four &&
- (cd gitrepo &&
- echo five > content &&
- git commit -a -m five &&
- git push && git pull
- ) &&
+ (
+ cd gitrepo &&
+ echo five > content &&
+ git commit -a -m five &&
+ git push && git pull
+ ) &&
- (cd bzrrepo && bzr revert) &&
+ (cd bzrrepo && bzr revert) &&
- echo five > expected &&
- cat bzrrepo/content > actual &&
- test_cmp expected actual
+ echo five > expected &&
+ cat bzrrepo/content > actual &&
+ test_cmp expected actual
'
cat > expected <<EOF
@@ -101,29 +105,35 @@ cat > expected <<EOF
EOF
test_expect_success 'special modes' '
- (cd bzrrepo &&
- echo exec > executable
- chmod +x executable &&
- bzr add executable
- bzr commit -m exec &&
- ln -s content link
- bzr add link
- bzr commit -m link &&
- mkdir dir &&
- bzr add dir &&
- bzr commit -m dir) &&
-
- (cd gitrepo &&
- git pull
- git ls-tree HEAD > ../actual) &&
-
- test_cmp expected actual &&
-
- (cd gitrepo &&
- git cat-file -p HEAD:link > ../actual) &&
-
- printf content > expected &&
- test_cmp expected actual
+ (
+ cd bzrrepo &&
+ echo exec > executable
+ chmod +x executable &&
+ bzr add executable
+ bzr commit -m exec &&
+ ln -s content link
+ bzr add link
+ bzr commit -m link &&
+ mkdir dir &&
+ bzr add dir &&
+ bzr commit -m dir
+ ) &&
+
+ (
+ cd gitrepo &&
+ git pull
+ git ls-tree HEAD > ../actual
+ ) &&
+
+ test_cmp expected actual &&
+
+ (
+ cd gitrepo &&
+ git cat-file -p HEAD:link > ../actual
+ ) &&
+
+ printf content > expected &&
+ test_cmp expected actual
'
cat > expected <<EOF
@@ -134,134 +144,145 @@ cat > expected <<EOF
EOF
test_expect_success 'moving directory' '
- (cd bzrrepo &&
- mkdir movedir &&
- echo one > movedir/one &&
- echo two > movedir/two &&
- bzr add movedir &&
- bzr commit -m movedir &&
- bzr mv movedir movedir-new &&
- bzr commit -m movedir-new) &&
-
- (cd gitrepo &&
- git pull &&
- git ls-tree HEAD > ../actual) &&
-
- test_cmp expected actual
+ (
+ cd bzrrepo &&
+ mkdir movedir &&
+ echo one > movedir/one &&
+ echo two > movedir/two &&
+ bzr add movedir &&
+ bzr commit -m movedir &&
+ bzr mv movedir movedir-new &&
+ bzr commit -m movedir-new
+ ) &&
+
+ (
+ cd gitrepo &&
+ git pull &&
+ git ls-tree HEAD > ../actual
+ ) &&
+
+ test_cmp expected actual
'
test_expect_success 'different authors' '
- (cd bzrrepo &&
- echo john >> content &&
- bzr commit -m john \
- --author "Jane Rey <jrey@example.com>" \
- --author "John Doe <jdoe@example.com>") &&
-
- (cd gitrepo &&
- git pull &&
- git show --format="%an <%ae>, %cn <%ce>" --quiet > ../actual) &&
-
- echo "Jane Rey <jrey@example.com>, A U Thor <author@example.com>" > expected &&
- test_cmp expected actual
+ (
+ cd bzrrepo &&
+ echo john >> content &&
+ bzr commit -m john \
+ --author "Jane Rey <jrey@example.com>" \
+ --author "John Doe <jdoe@example.com>"
+ ) &&
+
+ (
+ cd gitrepo &&
+ git pull &&
+ git show --format="%an <%ae>, %cn <%ce>" --quiet > ../actual
+ ) &&
+
+ echo "Jane Rey <jrey@example.com>, A U Thor <author@example.com>" > expected &&
+ test_cmp expected actual
'
+# cleanup previous stuff
+rm -rf bzrrepo gitrepo
+
test_expect_success 'fetch utf-8 filenames' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp && LC_ALL=C" &&
-
- LC_ALL=en_US.UTF-8
- export LC_ALL
- (
- bzr init bzrrepo &&
- cd bzrrepo &&
-
- echo test >> "ærø" &&
- bzr add "ærø" &&
- echo test >> "ø~?" &&
- bzr add "ø~?" &&
- bzr commit -m add-utf-8 &&
- echo test >> "ærø" &&
- bzr commit -m test-utf-8 &&
- bzr rm "ø~?" &&
- bzr mv "ærø" "ø~?" &&
- bzr commit -m bzr-mv-utf-8
- ) &&
-
- (
- git clone "bzr::$PWD/bzrrepo" gitrepo &&
- cd gitrepo &&
- git -c core.quotepath=false ls-files > ../actual
- ) &&
- echo "ø~?" > expected &&
- test_cmp expected actual
+ test_when_finished "rm -rf bzrrepo gitrepo && LC_ALL=C" &&
+
+ LC_ALL=en_US.UTF-8
+ export LC_ALL
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+
+ echo test >> "ærø" &&
+ bzr add "ærø" &&
+ echo test >> "ø~?" &&
+ bzr add "ø~?" &&
+ bzr commit -m add-utf-8 &&
+ echo test >> "ærø" &&
+ bzr commit -m test-utf-8 &&
+ bzr rm "ø~?" &&
+ bzr mv "ærø" "ø~?" &&
+ bzr commit -m bzr-mv-utf-8
+ ) &&
+
+ (
+ git clone "bzr::bzrrepo" gitrepo &&
+ cd gitrepo &&
+ git -c core.quotepath=false ls-files > ../actual
+ ) &&
+ echo "ø~?" > expected &&
+ test_cmp expected actual
'
test_expect_success 'push utf-8 filenames' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp && LC_ALL=C" &&
+ test_when_finished "rm -rf bzrrepo gitrepo && LC_ALL=C" &&
+
+ mkdir -p tmp && cd tmp &&
- LC_ALL=en_US.UTF-8
- export LC_ALL
+ LC_ALL=en_US.UTF-8
+ export LC_ALL
- (
- bzr init bzrrepo &&
- cd bzrrepo &&
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
- echo one >> content &&
- bzr add content &&
- bzr commit -m one
- ) &&
+ echo one >> content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
- (
- git clone "bzr::$PWD/bzrrepo" gitrepo &&
- cd gitrepo &&
+ (
+ git clone "bzr::bzrrepo" gitrepo &&
+ cd gitrepo &&
- echo test >> "ærø" &&
- git add "ærø" &&
- git commit -m utf-8 &&
+ echo test >> "ærø" &&
+ git add "ærø" &&
+ git commit -m utf-8 &&
- git push
- ) &&
+ git push
+ ) &&
- (cd bzrrepo && bzr ls > ../actual) &&
- printf "content\nærø\n" > expected &&
- test_cmp expected actual
+ (cd bzrrepo && bzr ls > ../actual) &&
+ printf "content\nærø\n" > expected &&
+ test_cmp expected actual
'
test_expect_success 'pushing a merge' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
-
- (
- bzr init bzrrepo &&
- cd bzrrepo &&
- echo one > content &&
- bzr add content &&
- bzr commit -m one
- ) &&
-
- git clone "bzr::$PWD/bzrrepo" gitrepo &&
-
- (
- cd bzrrepo &&
- echo two > content &&
- bzr commit -m two
- ) &&
-
- (
- cd gitrepo &&
- echo three > content &&
- git commit -a -m three &&
- git fetch &&
- git merge origin/master || true &&
- echo three > content &&
- git commit -a --no-edit &&
- git push
- ) &&
-
- echo three > expected &&
- cat bzrrepo/content > actual &&
- test_cmp expected actual
+ test_when_finished "rm -rf bzrrepo gitrepo" &&
+
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
+ echo one > content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ git clone "bzr::bzrrepo" gitrepo &&
+
+ (
+ cd bzrrepo &&
+ echo two > content &&
+ bzr commit -m two
+ ) &&
+
+ (
+ cd gitrepo &&
+ echo three > content &&
+ git commit -a -m three &&
+ git fetch &&
+ git merge origin/master || true &&
+ echo three > content &&
+ git commit -a --no-edit &&
+ git push
+ ) &&
+
+ echo three > expected &&
+ cat bzrrepo/content > actual &&
+ test_cmp expected actual
'
cat > expected <<EOF
@@ -271,71 +292,70 @@ origin/trunk
EOF
test_expect_success 'proper bzr repo' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
-
- bzr init-repo bzrrepo &&
-
- bzr init bzrrepo/trunk &&
- (
- cd bzrrepo/trunk &&
- echo one >> content &&
- bzr add content &&
- bzr commit -m one
- ) &&
-
- bzr branch bzrrepo/trunk bzrrepo/branch &&
- (
- cd bzrrepo/branch &&
- echo two >> content &&
- bzr commit -m one
- ) &&
-
- git clone "bzr::$PWD/bzrrepo" gitrepo &&
- (
- cd gitrepo &&
- git for-each-ref --format "%(refname:short)" refs/remotes/origin > ../actual
- ) &&
-
- test_cmp ../expected actual
+ test_when_finished "rm -rf bzrrepo gitrepo" &&
+
+ bzr init-repo bzrrepo &&
+
+ (
+ bzr init bzrrepo/trunk &&
+ cd bzrrepo/trunk &&
+ echo one >> content &&
+ bzr add content &&
+ bzr commit -m one
+ ) &&
+
+ (
+ bzr branch bzrrepo/trunk bzrrepo/branch &&
+ cd bzrrepo/branch &&
+ echo two >> content &&
+ bzr commit -m one
+ ) &&
+
+ (
+ git clone "bzr::bzrrepo" gitrepo &&
+ cd gitrepo &&
+ git for-each-ref --format "%(refname:short)" refs/remotes/origin > ../actual
+ ) &&
+
+ test_cmp expected actual
'
test_expect_success 'strip' '
- # Do not imitate this style; always chdir inside a subshell instead
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf bzrrepo gitrepo" &&
- (
- bzr init bzrrepo &&
- cd bzrrepo &&
+ (
+ bzr init bzrrepo &&
+ cd bzrrepo &&
- echo one >> content &&
- bzr add content &&
- bzr commit -m one &&
+ echo one >> content &&
+ bzr add content &&
+ bzr commit -m one &&
- echo two >> content &&
- bzr commit -m two
- ) &&
+ echo two >> content &&
+ bzr commit -m two
+ ) &&
- git clone "bzr::$PWD/bzrrepo" gitrepo &&
+ git clone "bzr::bzrrepo" gitrepo &&
- (
- cd bzrrepo &&
- bzr uncommit --force &&
+ (
+ cd bzrrepo &&
+ bzr uncommit --force &&
- echo three >> content &&
- bzr commit -m three &&
+ echo three >> content &&
+ bzr commit -m three &&
- echo four >> content &&
- bzr commit -m four &&
- bzr log --line | sed -e "s/^[0-9][0-9]*: //" > ../expected
- ) &&
+ echo four >> content &&
+ bzr commit -m four &&
+ bzr log --line | sed -e "s/^[0-9][0-9]*: //" > ../expected
+ ) &&
- (cd gitrepo &&
- git fetch &&
- git log --format="%an %ad %s" --date=short origin/master > ../actual) &&
+ (
+ cd gitrepo &&
+ git fetch &&
+ git log --format="%an %ad %s" --date=short origin/master > ../actual
+ ) &&
- test_cmp expected actual
+ test_cmp expected actual
'
test_done
diff --git a/contrib/remote-helpers/test-hg-bidi.sh b/contrib/remote-helpers/test-hg-bidi.sh
index f569697734..f83d67d74f 100755
--- a/contrib/remote-helpers/test-hg-bidi.sh
+++ b/contrib/remote-helpers/test-hg-bidi.sh
@@ -15,23 +15,22 @@ if ! test_have_prereq PYTHON; then
test_done
fi
-if ! "$PYTHON_PATH" -c 'import mercurial'; then
+if ! python -c 'import mercurial'; then
skip_all='skipping remote-hg tests; mercurial not available'
test_done
fi
# clone to a git repo
git_clone () {
- git clone -q "hg::$PWD/$1" $2
+ git clone -q "hg::$1" $2
}
# clone to an hg repo
hg_clone () {
(
hg init $2 &&
- hg -R $2 bookmark -i master &&
cd $1 &&
- git push -q "hg::$PWD/../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
+ git push -q "hg::../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
) &&
(cd $2 && hg -q update)
@@ -41,17 +40,15 @@ hg_clone () {
hg_push () {
(
cd $2
- old=$(git symbolic-ref --short HEAD)
git checkout -q -b tmp &&
- git fetch -q "hg::$PWD/../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
- git checkout -q $old &&
+ git fetch -q "hg::../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
+ git checkout -q @{-1} &&
git branch -q -D tmp 2> /dev/null || true
)
}
hg_log () {
- hg -R $1 log --graph --debug >log &&
- grep -v 'tag: *default/' log
+ hg -R $1 log --graph --debug
}
setup () {
@@ -67,6 +64,7 @@ setup () {
echo "graphlog ="
) >> "$HOME"/.hgrc &&
git config --global remote-hg.hg-git-compat true
+ git config --global remote-hg.track-branches true
HGEDITOR=/usr/bin/true
GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230"
@@ -77,8 +75,7 @@ setup () {
setup
test_expect_success 'encoding' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -115,8 +112,7 @@ test_expect_success 'encoding' '
'
test_expect_success 'file removal' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -148,8 +144,7 @@ test_expect_success 'file removal' '
'
test_expect_success 'git tags' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -177,8 +172,7 @@ test_expect_success 'git tags' '
'
test_expect_success 'hg branch' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -194,7 +188,7 @@ test_expect_success 'hg branch' '
hg_clone gitrepo hgrepo &&
cd hgrepo &&
- hg -q co master &&
+ hg -q co default &&
hg mv alpha beta &&
hg -q commit -m "rename alpha to beta" &&
hg branch gamma | grep -v "permanent and global" &&
@@ -214,8 +208,7 @@ test_expect_success 'hg branch' '
'
test_expect_success 'hg tags' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -231,7 +224,7 @@ test_expect_success 'hg tags' '
hg_clone gitrepo hgrepo &&
cd hgrepo &&
- hg co master &&
+ hg co default &&
hg tag alpha
) &&
diff --git a/contrib/remote-helpers/test-hg-hg-git.sh b/contrib/remote-helpers/test-hg-hg-git.sh
index 7f579c8436..2219284382 100755
--- a/contrib/remote-helpers/test-hg-hg-git.sh
+++ b/contrib/remote-helpers/test-hg-hg-git.sh
@@ -15,19 +15,20 @@ if ! test_have_prereq PYTHON; then
test_done
fi
-if ! "$PYTHON_PATH" -c 'import mercurial'; then
+if ! python -c 'import mercurial'; then
skip_all='skipping remote-hg tests; mercurial not available'
test_done
fi
-if ! "$PYTHON_PATH" -c 'import hggit'; then
+if ! python -c 'import hggit'; then
skip_all='skipping remote-hg tests; hg-git not available'
test_done
fi
# clone to a git repo with git
git_clone_git () {
- git clone -q "hg::$PWD/$1" $2
+ git clone -q "hg::$1" $2 &&
+ (cd $2 && git checkout master && git branch -D default)
}
# clone to an hg repo with git
@@ -36,7 +37,7 @@ hg_clone_git () {
hg init $2 &&
hg -R $2 bookmark -i master &&
cd $1 &&
- git push -q "hg::$PWD/../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
+ git push -q "hg::../$2" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*'
) &&
(cd $2 && hg -q update)
@@ -61,10 +62,10 @@ hg_clone_hg () {
hg_push_git () {
(
cd $2
- old=$(git symbolic-ref --short HEAD)
git checkout -q -b tmp &&
- git fetch -q "hg::$PWD/../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
- git checkout -q $old &&
+ git fetch -q "hg::../$1" 'refs/tags/*:refs/tags/*' 'refs/heads/*:refs/heads/*' &&
+ git branch -D default &&
+ git checkout -q @{-1} &&
git branch -q -D tmp 2> /dev/null || true
)
}
@@ -104,18 +105,18 @@ setup () {
git config --global remote-hg.hg-git-compat true
git config --global remote-hg.track-branches false
- HGEDITOR=/usr/bin/true
+ HGEDITOR=true
+ HGMERGE=true
GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230"
GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"
- export HGEDITOR GIT_AUTHOR_DATE GIT_COMMITTER_DATE
+ export HGEDITOR HGMERGE GIT_AUTHOR_DATE GIT_COMMITTER_DATE
}
setup
test_expect_success 'executable bit' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -150,8 +151,7 @@ test_expect_success 'executable bit' '
'
test_expect_success 'symlink' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -181,8 +181,7 @@ test_expect_success 'symlink' '
'
test_expect_success 'merge conflict 1' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
hg init hgrepo1 &&
@@ -198,7 +197,7 @@ test_expect_success 'merge conflict 1' '
echo C > afile &&
hg ci -m "A->C" &&
- hg merge -r1 || true &&
+ hg merge -r1 &&
echo C > afile &&
hg resolve -m afile &&
hg ci -m "merge to C"
@@ -216,8 +215,7 @@ test_expect_success 'merge conflict 1' '
'
test_expect_success 'merge conflict 2' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
hg init hgrepo1 &&
@@ -251,8 +249,7 @@ test_expect_success 'merge conflict 2' '
'
test_expect_success 'converged merge' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
hg init hgrepo1 &&
@@ -287,8 +284,7 @@ test_expect_success 'converged merge' '
'
test_expect_success 'encoding' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -327,8 +323,7 @@ test_expect_success 'encoding' '
'
test_expect_success 'file removal' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -367,8 +362,7 @@ test_expect_success 'file removal' '
'
test_expect_success 'git tags' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
(
git init -q gitrepo &&
@@ -394,8 +388,7 @@ test_expect_success 'git tags' '
'
test_expect_success 'hg author' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
for x in hg git; do
(
@@ -461,8 +454,7 @@ test_expect_success 'hg author' '
'
test_expect_success 'hg branch' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
for x in hg git; do
(
@@ -498,8 +490,7 @@ test_expect_success 'hg branch' '
'
test_expect_success 'hg tags' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
+ test_when_finished "rm -rf gitrepo* hgrepo*" &&
for x in hg git; do
(
diff --git a/contrib/remote-helpers/test-hg.sh b/contrib/remote-helpers/test-hg.sh
index 8de2aa7fec..f7ce8aa853 100755
--- a/contrib/remote-helpers/test-hg.sh
+++ b/contrib/remote-helpers/test-hg.sh
@@ -15,143 +15,678 @@ if ! test_have_prereq PYTHON; then
test_done
fi
-if ! "$PYTHON_PATH" -c 'import mercurial'; then
+if ! python -c 'import mercurial'; then
skip_all='skipping remote-hg tests; mercurial not available'
test_done
fi
check () {
- (cd $1 &&
- git log --format='%s' -1 &&
- git symbolic-ref HEAD) > actual &&
- (echo $2 &&
- echo "refs/heads/$3") > expected &&
+ echo $3 > expected &&
+ git --git-dir=$1/.git log --format='%s' -1 $2 > actual
test_cmp expected actual
}
+check_branch () {
+ if [ -n "$3" ]; then
+ echo $3 > expected &&
+ hg -R $1 log -r $2 --template '{desc}\n' > actual &&
+ test_cmp expected actual
+ else
+ hg -R $1 branches > out &&
+ ! grep $2 out
+ fi
+}
+
+check_bookmark () {
+ if [ -n "$3" ]; then
+ echo $3 > expected &&
+ hg -R $1 log -r "bookmark('$2')" --template '{desc}\n' > actual &&
+ test_cmp expected actual
+ else
+ hg -R $1 bookmarks > out &&
+ ! grep $2 out
+ fi
+}
+
+check_push () {
+ local expected_ret=$1 ret=0 ref_ret=0 IFS=':'
+
+ shift
+ git push origin "$@" 2> error
+ ret=$?
+ cat error
+
+ while read branch kind
+ do
+ case "$kind" in
+ 'new')
+ grep "^ \* \[new branch\] *${branch} -> ${branch}$" error || ref_ret=1
+ ;;
+ 'non-fast-forward')
+ grep "^ ! \[rejected\] *${branch} -> ${branch} (non-fast-forward)$" error || ref_ret=1
+ ;;
+ 'fetch-first')
+ grep "^ ! \[rejected\] *${branch} -> ${branch} (fetch first)$" error || ref_ret=1
+ ;;
+ 'forced-update')
+ grep "^ + [a-f0-9]*\.\.\.[a-f0-9]* *${branch} -> ${branch} (forced update)$" error || ref_ret=1
+ ;;
+ '')
+ grep "^ [a-f0-9]*\.\.[a-f0-9]* *${branch} -> ${branch}$" error || ref_ret=1
+ ;;
+ esac
+ let 'ref_ret' && echo "match for '$branch' failed" && break
+ done
+
+ if let 'expected_ret != ret || ref_ret'
+ then
+ return 1
+ fi
+
+ return 0
+}
+
setup () {
(
echo "[ui]"
echo "username = H G Wells <wells@example.com>"
- ) >> "$HOME"/.hgrc
+ echo "[extensions]"
+ echo "mq ="
+ ) >> "$HOME"/.hgrc &&
+
+ GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0230" &&
+ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE" &&
+ export GIT_COMMITTER_DATE GIT_AUTHOR_DATE
}
setup
test_expect_success 'cloning' '
- test_when_finished "rm -rf gitrepo*" &&
-
- (
- hg init hgrepo &&
- cd hgrepo &&
- echo zero > content &&
- hg add content &&
- hg commit -m zero
- ) &&
+ test_when_finished "rm -rf gitrepo*" &&
- git clone "hg::$PWD/hgrepo" gitrepo &&
- check gitrepo zero master
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo HEAD zero
'
test_expect_success 'cloning with branches' '
- test_when_finished "rm -rf gitrepo*" &&
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg branch next &&
+ echo next > content &&
+ hg commit -m next
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo origin/branches/next next
+'
+
+test_expect_success 'cloning with bookmarks' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg checkout default &&
+ hg bookmark feature-a &&
+ echo feature-a > content &&
+ hg commit -m feature-a
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo origin/feature-a feature-a
+'
+
+test_expect_success 'update bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
- (
- cd hgrepo &&
- hg branch next &&
- echo next > content &&
- hg commit -m next
- ) &&
+ (
+ cd hgrepo &&
+ hg bookmark devel
+ ) &&
- git clone "hg::$PWD/hgrepo" gitrepo &&
- check gitrepo next next &&
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet devel &&
+ echo devel > content &&
+ git commit -a -m devel &&
+ git push --quiet
+ ) &&
+
+ check_bookmark hgrepo devel devel
+'
- (cd hgrepo && hg checkout default) &&
+test_expect_success 'new bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
- git clone "hg::$PWD/hgrepo" gitrepo2 &&
- check gitrepo2 zero master
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet -b feature-b &&
+ echo feature-b > content &&
+ git commit -a -m feature-b &&
+ git push --quiet origin feature-b
+ ) &&
+
+ check_bookmark hgrepo feature-b feature-b
'
-test_expect_success 'cloning with bookmarks' '
- test_when_finished "rm -rf gitrepo*" &&
+# cleanup previous stuff
+rm -rf hgrepo
+
+author_test () {
+ echo $1 >> content &&
+ hg commit -u "$2" -m "add $1" &&
+ echo "$3" >> ../expected
+}
- (
- cd hgrepo &&
- hg bookmark feature-a &&
- echo feature-a > content &&
- hg commit -m feature-a
- ) &&
+test_expect_success 'authors' '
+ test_when_finished "rm -rf hgrepo gitrepo" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+
+ touch content &&
+ hg add content &&
+
+ > ../expected &&
+ author_test alpha "" "H G Wells <wells@example.com>" &&
+ author_test beta "test" "test <unknown>" &&
+ author_test beta "test <test@example.com> (comment)" "test <test@example.com>" &&
+ author_test gamma "<test@example.com>" "Unknown <test@example.com>" &&
+ author_test delta "name<test@example.com>" "name <test@example.com>" &&
+ author_test epsilon "name <test@example.com" "name <test@example.com>" &&
+ author_test zeta " test " "test <unknown>" &&
+ author_test eta "test < test@example.com >" "test <test@example.com>" &&
+ author_test theta "test >test@example.com>" "test <test@example.com>" &&
+ author_test iota "test < test <at> example <dot> com>" "test <unknown>" &&
+ author_test kappa "test@example.com" "Unknown <test@example.com>"
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ git --git-dir=gitrepo/.git log --reverse --format="%an <%ae>" > actual &&
- git clone "hg::$PWD/hgrepo" gitrepo &&
- check gitrepo feature-a feature-a
+ test_cmp expected actual
'
-test_expect_success 'cloning with detached head' '
- test_when_finished "rm -rf gitrepo*" &&
+test_expect_success 'strip' '
+ test_when_finished "rm -rf hgrepo gitrepo" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+
+ echo one >> content &&
+ hg add content &&
+ hg commit -m one &&
+
+ echo two >> content &&
+ hg commit -m two
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ hg strip 1 &&
- (
- cd hgrepo &&
- hg update -r 0
- ) &&
+ echo three >> content &&
+ hg commit -m three &&
- git clone "hg::$PWD/hgrepo" gitrepo &&
- check gitrepo zero master
+ echo four >> content &&
+ hg commit -m four
+ ) &&
+
+ (
+ cd gitrepo &&
+ git fetch &&
+ git log --format="%s" origin/master > ../actual
+ ) &&
+
+ hg -R hgrepo log --template "{desc}\n" > expected &&
+ test_cmp actual expected
'
-test_expect_success 'update bookmark' '
- test_when_finished "rm -rf gitrepo*" &&
+test_expect_success 'remote push with master bookmark' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
- (
- cd hgrepo &&
- hg bookmark devel
- ) &&
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero &&
+ hg bookmark master &&
+ echo one > content &&
+ hg commit -m one
+ ) &&
- (
- git clone "hg::$PWD/hgrepo" gitrepo &&
- cd gitrepo &&
- git checkout devel &&
- echo devel > content &&
- git commit -a -m devel &&
- git push
- ) &&
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ echo two > content &&
+ git commit -a -m two &&
+ git push
+ ) &&
+
+ check_branch hgrepo default two
+'
- hg -R hgrepo bookmarks | egrep "devel[ ]+3:"
+cat > expected <<EOF
+changeset: 0:6e2126489d3d
+tag: tip
+user: A U Thor <author@example.com>
+date: Mon Jan 01 00:00:00 2007 +0230
+summary: one
+
+EOF
+
+test_expect_success 'remote push from master branch' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ hg init hgrepo &&
+
+ (
+ git init gitrepo &&
+ cd gitrepo &&
+ git remote add origin "hg::../hgrepo" &&
+ echo one > content &&
+ git add content &&
+ git commit -a -m one &&
+ git push origin master
+ ) &&
+
+ hg -R hgrepo log > actual &&
+ cat actual &&
+ test_cmp expected actual &&
+
+ check_branch hgrepo default one
'
-author_test () {
- echo $1 >> content &&
- hg commit -u "$2" -m "add $1" &&
- echo "$3" >> ../expected
+GIT_REMOTE_HG_TEST_REMOTE=1
+export GIT_REMOTE_HG_TEST_REMOTE
+
+test_expect_success 'remote cloning' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+ check gitrepo HEAD zero
+'
+
+test_expect_success 'remote update bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg bookmark devel
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet devel &&
+ echo devel > content &&
+ git commit -a -m devel &&
+ git push --quiet
+ ) &&
+
+ check_bookmark hgrepo devel devel
+'
+
+test_expect_success 'remote new bookmark' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet -b feature-b &&
+ echo feature-b > content &&
+ git commit -a -m feature-b &&
+ git push --quiet origin feature-b
+ ) &&
+
+ check_bookmark hgrepo feature-b feature-b
+'
+
+test_expect_success 'remote push diverged' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ hg checkout default &&
+ echo bump > content &&
+ hg commit -m bump
+ ) &&
+
+ (
+ cd gitrepo &&
+ echo diverge > content &&
+ git commit -a -m diverged &&
+ check_push 1 <<-EOF
+ master:non-fast-forward
+ EOF
+ ) &&
+
+ check_branch hgrepo default bump
+'
+
+test_expect_success 'remote update bookmark diverge' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ cd hgrepo &&
+ hg checkout tip^ &&
+ hg bookmark diverge
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ echo "bump bookmark" > content &&
+ hg commit -m "bump bookmark"
+ ) &&
+
+ (
+ cd gitrepo &&
+ git checkout --quiet diverge &&
+ echo diverge > content &&
+ git commit -a -m diverge &&
+ check_push 1 <<-EOF
+ diverge:fetch-first
+ EOF
+ ) &&
+
+ check_bookmark hgrepo diverge "bump bookmark"
+'
+
+test_expect_success 'remote new bookmark multiple branch head' '
+ test_when_finished "rm -rf gitrepo*" &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git checkout --quiet -b feature-c HEAD^ &&
+ echo feature-c > content &&
+ git commit -a -m feature-c &&
+ git push --quiet origin feature-c
+ ) &&
+
+ check_bookmark hgrepo feature-c feature-c
+'
+
+# cleanup previous stuff
+rm -rf hgrepo
+
+setup_big_push () {
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero &&
+ hg bookmark bad_bmark1 &&
+ echo one > content &&
+ hg commit -m one &&
+ hg bookmark bad_bmark2 &&
+ hg bookmark good_bmark &&
+ hg bookmark -i good_bmark &&
+ hg -q branch good_branch &&
+ echo "good branch" > content &&
+ hg commit -m "good branch" &&
+ hg -q branch bad_branch &&
+ echo "bad branch" > content &&
+ hg commit -m "bad branch"
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd gitrepo &&
+ echo two > content &&
+ git commit -q -a -m two &&
+
+ git checkout -q good_bmark &&
+ echo three > content &&
+ git commit -q -a -m three &&
+
+ git checkout -q bad_bmark1 &&
+ git reset --hard HEAD^ &&
+ echo four > content &&
+ git commit -q -a -m four &&
+
+ git checkout -q bad_bmark2 &&
+ git reset --hard HEAD^ &&
+ echo five > content &&
+ git commit -q -a -m five &&
+
+ git checkout -q -b new_bmark master &&
+ echo six > content &&
+ git commit -q -a -m six &&
+
+ git checkout -q branches/good_branch &&
+ echo seven > content &&
+ git commit -q -a -m seven &&
+ echo eight > content &&
+ git commit -q -a -m eight &&
+
+ git checkout -q branches/bad_branch &&
+ git reset --hard HEAD^ &&
+ echo nine > content &&
+ git commit -q -a -m nine &&
+
+ git checkout -q -b branches/new_branch master &&
+ echo ten > content &&
+ git commit -q -a -m ten
+ )
}
-test_expect_success 'authors' '
- mkdir -p tmp && cd tmp &&
- test_when_finished "cd .. && rm -rf tmp" &&
-
- (
- hg init hgrepo &&
- cd hgrepo &&
-
- touch content &&
- hg add content &&
-
- author_test alpha "" "H G Wells <wells@example.com>" &&
- author_test beta "test" "test <unknown>" &&
- author_test beta "test <test@example.com> (comment)" "test <test@example.com>" &&
- author_test gamma "<test@example.com>" "Unknown <test@example.com>" &&
- author_test delta "name<test@example.com>" "name <test@example.com>" &&
- author_test epsilon "name <test@example.com" "name <test@example.com>" &&
- author_test zeta " test " "test <unknown>" &&
- author_test eta "test < test@example.com >" "test <test@example.com>" &&
- author_test theta "test >test@example.com>" "test <test@example.com>" &&
- author_test iota "test < test <at> example <dot> com>" "test <unknown>" &&
- author_test kappa "test@example.com" "Unknown <test@example.com>"
- ) &&
-
- git clone "hg::$PWD/hgrepo" gitrepo &&
- git --git-dir=gitrepo/.git log --reverse --format="%an <%ae>" > actual &&
-
- test_cmp expected actual
+test_expect_success 'remote big push' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ setup_big_push
+
+ (
+ cd gitrepo &&
+
+ check_push 1 --all <<-EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ bad_bmark1:non-fast-forward
+ bad_bmark2:non-fast-forward
+ branches/bad_branch:non-fast-forward
+ EOF
+ ) &&
+
+ check_branch hgrepo default one &&
+ check_branch hgrepo good_branch "good branch" &&
+ check_branch hgrepo bad_branch "bad branch" &&
+ check_branch hgrepo new_branch '' &&
+ check_bookmark hgrepo good_bmark one &&
+ check_bookmark hgrepo bad_bmark1 one &&
+ check_bookmark hgrepo bad_bmark2 one &&
+ check_bookmark hgrepo new_bmark ''
+'
+
+test_expect_success 'remote big push fetch first' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero &&
+ hg bookmark bad_bmark &&
+ hg bookmark good_bmark &&
+ hg bookmark -i good_bmark &&
+ hg -q branch good_branch &&
+ echo "good branch" > content &&
+ hg commit -m "good branch" &&
+ hg -q branch bad_branch &&
+ echo "bad branch" > content &&
+ hg commit -m "bad branch"
+ ) &&
+
+ git clone "hg::hgrepo" gitrepo &&
+
+ (
+ cd hgrepo &&
+ hg bookmark -f bad_bmark &&
+ echo update_bmark > content &&
+ hg commit -m "update bmark"
+ ) &&
+
+ (
+ cd gitrepo &&
+ echo two > content &&
+ git commit -q -a -m two &&
+
+ git checkout -q good_bmark &&
+ echo three > content &&
+ git commit -q -a -m three &&
+
+ git checkout -q bad_bmark &&
+ echo four > content &&
+ git commit -q -a -m four &&
+
+ git checkout -q branches/bad_branch &&
+ echo five > content &&
+ git commit -q -a -m five &&
+
+ check_push 1 --all <<-EOF
+ master
+ good_bmark
+ new_bmark:new
+ new_branch:new
+ bad_bmark:fetch-first
+ branches/bad_branch:festch-first
+ EOF
+
+ git fetch &&
+
+ check_push 1 --all <<-EOF
+ master
+ good_bmark
+ bad_bmark:non-fast-forward
+ branches/bad_branch:non-fast-forward
+ EOF
+ )
+'
+
+test_expect_failure 'remote big push force' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ setup_big_push
+
+ (
+ cd gitrepo &&
+
+ check_push 0 --force --all <<-EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ bad_bmark1:forced-update
+ bad_bmark2:forced-update
+ branches/bad_branch:forced-update
+ EOF
+ ) &&
+
+ check_branch hgrepo default six &&
+ check_branch hgrepo good_branch eight &&
+ check_branch hgrepo bad_branch nine &&
+ check_branch hgrepo new_branch ten &&
+ check_bookmark hgrepo good_bmark three &&
+ check_bookmark hgrepo bad_bmark1 four &&
+ check_bookmark hgrepo bad_bmark2 five &&
+ check_bookmark hgrepo new_bmark six
+'
+
+test_expect_failure 'remote big push dry-run' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ setup_big_push
+
+ (
+ cd gitrepo &&
+
+ check_push 0 --dry-run --all <<-EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ bad_bmark1:non-fast-forward
+ bad_bmark2:non-fast-forward
+ branches/bad_branch:non-fast-forward
+ EOF
+
+ check_push 0 --dry-run master good_bmark new_bmark branches/good_branch branches/new_branch <<-EOF
+ master
+ good_bmark
+ branches/good_branch
+ new_bmark:new
+ branches/new_branch:new
+ EOF
+ ) &&
+
+ check_branch hgrepo default one &&
+ check_branch hgrepo good_branch "good branch" &&
+ check_branch hgrepo bad_branch "bad branch" &&
+ check_branch hgrepo new_branch '' &&
+ check_bookmark hgrepo good_bmark one &&
+ check_bookmark hgrepo bad_bmark1 one &&
+ check_bookmark hgrepo bad_bmark2 one &&
+ check_bookmark hgrepo new_bmark ''
+'
+
+test_expect_success 'remote double failed push' '
+ test_when_finished "rm -rf hgrepo gitrepo*" &&
+
+ (
+ hg init hgrepo &&
+ cd hgrepo &&
+ echo zero > content &&
+ hg add content &&
+ hg commit -m zero &&
+ echo one > content &&
+ hg commit -m one
+ ) &&
+
+ (
+ git clone "hg::hgrepo" gitrepo &&
+ cd gitrepo &&
+ git reset --hard HEAD^ &&
+ echo two > content &&
+ git commit -a -m two &&
+ test_expect_code 1 git push &&
+ test_expect_code 1 git push
+ )
'
test_done
diff --git a/contrib/subtree/git-subtree.sh b/contrib/subtree/git-subtree.sh
index 8a23f58ba0..51ae932e5e 100755
--- a/contrib/subtree/git-subtree.sh
+++ b/contrib/subtree/git-subtree.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
#
# git-subtree.sh: split/join git repositories in subdirectories of this one
#
@@ -715,7 +715,8 @@ cmd_push()
repository=$1
refspec=$2
echo "git push using: " $repository $refspec
- git push $repository $(git subtree split --prefix=$prefix):refs/heads/$refspec
+ localrev=$(git subtree split --prefix="$prefix") || die
+ git push $repository $localrev:refs/heads/$refspec
else
die "'$dir' must already exist. Try 'git subtree add'."
fi