diff options
149 files changed, 7129 insertions, 3571 deletions
diff --git a/.gitignore b/.gitignore index d99372afc4..eb8a1f8606 100644 --- a/.gitignore +++ b/.gitignore @@ -33,7 +33,6 @@ git-daemon git-diff git-diff-files git-diff-index -git-diff-stages git-diff-tree git-describe git-fast-import @@ -101,7 +100,6 @@ git-repo-config git-request-pull git-rerere git-reset -git-resolve git-rev-list git-rev-parse git-revert @@ -141,6 +139,7 @@ git-whatchanged git-write-tree git-core-*/?* gitweb/gitweb.cgi +test-chmtime test-date test-delta test-dump-cache-tree @@ -27,6 +27,7 @@ Nguyễn Thái Ngọc Duy <pclouds@gmail.com> Ramsay Allan Jones <ramsay@ramsay1.demon.co.uk> René Scharfe <rene.scharfe@lsrfire.ath.cx> Robert Fitzsimons <robfitz@273k.net> +Sam Vilain <sam@vilain.net> Santi Béjar <sbejar@gmail.com> Sean Estabrooks <seanlkml@sympatico.ca> Shawn O. Pearce <spearce@spearce.org> diff --git a/Documentation/RelNotes-1.5.1.txt b/Documentation/RelNotes-1.5.1.txt new file mode 100644 index 0000000000..4d371866c3 --- /dev/null +++ b/Documentation/RelNotes-1.5.1.txt @@ -0,0 +1,44 @@ +GIT v1.5.1 Release Notes +======================== + +Updates since v1.5.0 +-------------------- + +* Deprecated commands and options. + + - git-diff-stages and git-resolve have been removed. + +* New commands and options. + + - "git log" and friends take --reverse. This makes output + that typically goes reverse order in chronological order. + "git shortlog" usually lists commits in chronological order, + but with "--reverse", they are shown in reverse + chronological order. + + - "git diff" learned --ignore-space-at-eol. This is a weaker + form of --ignore-space-change. + + - "git name-rev" learned --refs=<pattern>, to limit the tags + used for naming the given revisions only to the ones + matching the given pattern. + +* Updated behaviour of existing commands. + + - "git diff" outputs a trailing HT when pathnames have embedded + SP on +++/--- header lines, in order to help "GNU patch" to + parse its output. "git apply" was already updated to accept + this modified output format since ce74618d (Sep 22, 2006). + +* Hooks + + - The sample update hook to show how to send out notification + e-mail was updated to show only new commits that appeared in + the repository. Earlier, it showed new commits that appeared + on the branch. + +-- +exec >/var/tmp/1 +O=v1.5.0-49-g69bc0e2 +echo O=`git describe master` +git shortlog --no-merges $O..master ^maint diff --git a/Documentation/cmd-list.perl b/Documentation/cmd-list.perl index 75f4791055..a2d6268e2b 100755 --- a/Documentation/cmd-list.perl +++ b/Documentation/cmd-list.perl @@ -90,7 +90,6 @@ git-describe mainporcelain git-diff-files plumbinginterrogators git-diff-index plumbinginterrogators git-diff mainporcelain -git-diff-stages plumbinginterrogators git-diff-tree plumbinginterrogators git-fast-import ancillarymanipulators git-fetch mainporcelain @@ -150,7 +149,6 @@ git-remote ancillarymanipulators git-request-pull foreignscminterface git-rerere ancillaryinterrogators git-reset mainporcelain -git-resolve mainporcelain git-revert mainporcelain git-rev-list plumbinginterrogators git-rev-parse ancillaryinterrogators diff --git a/Documentation/config.txt b/Documentation/config.txt index 9fec76935e..d2b4a05ca5 100644 --- a/Documentation/config.txt +++ b/Documentation/config.txt @@ -5,7 +5,8 @@ The git configuration file contains a number of variables that affect the git command's behavior. `.git/config` file for each repository is used to store the information for that repository, and `$HOME/.gitconfig` is used to store per user information to give -fallback values for `.git/config` file. +fallback values for `.git/config` file. The file `/etc/gitconfig` +can be used to store system-wide defaults. They can be used by both the git plumbing and the porcelains. The variables are divided into sections, where @@ -470,6 +471,10 @@ remote.<name>.push:: The default set of "refspec" for gitlink:git-push[1]. See gitlink:git-push[1]. +remote.<name>.skipDefaultUpdate:: + If true, this remote will be skipped by default when updating + using the remote subcommand of gitlink:git-remote[1]. + remote.<name>.receivepack:: The default program to execute on the remote side when pushing. See option \--exec of gitlink:git-push[1]. @@ -478,6 +483,14 @@ remote.<name>.uploadpack:: The default program to execute on the remote side when fetching. See option \--exec of gitlink:git-fetch-pack[1]. +remote.<name>.tagopt:: + Setting this value to --no-tags disables automatic tag following when fetching + from remote <name> + +remotes.<group>:: + The list of remotes which are fetched by "git remote update + <group>". See gitlink:git-remote[1]. + repack.usedeltabaseoffset:: Allow gitlink:git-repack[1] to create packs that uses delta-base offset. Defaults to false. diff --git a/Documentation/core-intro.txt b/Documentation/core-intro.txt index 6bee448e7d..eea44d9d56 100644 --- a/Documentation/core-intro.txt +++ b/Documentation/core-intro.txt @@ -588,4 +588,5 @@ stages to temporary files and calls a "merge" script on it: git-merge-index git-merge-one-file hello.c -and that is what higher level `git resolve` is implemented with. +and that is what higher level `git merge -s resolve` is implemented +with. diff --git a/Documentation/core-tutorial.txt b/Documentation/core-tutorial.txt index 9c28bea62e..97cdb90cb4 100644 --- a/Documentation/core-tutorial.txt +++ b/Documentation/core-tutorial.txt @@ -977,7 +977,7 @@ see more complex cases. Now, let's pretend you are the one who did all the work in `mybranch`, and the fruit of your hard work has finally been merged to the `master` branch. Let's go back to `mybranch`, and run -resolve to get the "upstream changes" back to your branch. +`git merge` to get the "upstream changes" back to your branch. ------------ $ git checkout mybranch @@ -996,7 +996,7 @@ Fast forward ---------------- Because your branch did not contain anything more than what are -already merged into the `master` branch, the resolve operation did +already merged into the `master` branch, the merge operation did not actually do a merge. Instead, it just updated the top of the tree of your branch to that of the `master` branch. This is often called 'fast forward' merge. @@ -1099,11 +1099,11 @@ programs, which are 'commit walkers'; they outlived their usefulness when git Native and SSH transports were introduced, and not used by `git pull` or `git push` scripts. -Once you fetch from the remote repository, you `resolve` that +Once you fetch from the remote repository, you `merge` that with your current branch. However -- it's such a common thing to `fetch` and then -immediately `resolve`, that it's called `git pull`, and you can +immediately `merge`, that it's called `git pull`, and you can simply do ---------------- diff --git a/Documentation/diff-options.txt b/Documentation/diff-options.txt index 019a39f2bf..d8696b7b36 100644 --- a/Documentation/diff-options.txt +++ b/Documentation/diff-options.txt @@ -140,6 +140,9 @@ -a:: Shorthand for "--text". +--ignore-space-at-eol:: + Ignore changes in white spaces at EOL. + --ignore-space-change:: Ignore changes in amount of white space. This ignores white space at line end, and consider all other sequences of one or diff --git a/Documentation/diffcore.txt b/Documentation/diffcore.txt index cb4e562004..34cd306bb1 100644 --- a/Documentation/diffcore.txt +++ b/Documentation/diffcore.txt @@ -6,8 +6,8 @@ June 2005 Introduction ------------ -The diff commands git-diff-index, git-diff-files, git-diff-tree, and -git-diff-stages can be told to manipulate differences they find in +The diff commands git-diff-index, git-diff-files, and git-diff-tree +can be told to manipulate differences they find in unconventional ways before showing diff(1) output. The manipulation is collectively called "diffcore transformation". This short note describes what they are and how to use them to produce diff outputs @@ -30,9 +30,6 @@ files: - git-diff-tree compares contents of two "tree" objects; - - git-diff-stages compares contents of blobs at two stages in an - unmerged index file. - In all of these cases, the commands themselves compare corresponding paths in the two sets of files. The result of comparison is passed from these commands to what is internally diff --git a/Documentation/git-cvsexportcommit.txt b/Documentation/git-cvsexportcommit.txt index 27d531b888..555b8234f0 100644 --- a/Documentation/git-cvsexportcommit.txt +++ b/Documentation/git-cvsexportcommit.txt @@ -8,7 +8,7 @@ git-cvsexportcommit - Export a single commit to a CVS checkout SYNOPSIS -------- -'git-cvsexportcommit' [-h] [-v] [-c] [-P] [-p] [-a] [-f] [-m msgprefix] [PARENTCOMMIT] COMMITID +'git-cvsexportcommit' [-h] [-v] [-c] [-P] [-p] [-a] [-d cvsroot] [-f] [-m msgprefix] [PARENTCOMMIT] COMMITID DESCRIPTION @@ -43,6 +43,11 @@ OPTIONS Add authorship information. Adds Author line, and Committer (if different from Author) to the message. +-d:: + Set an alternative CVSROOT to use. This corresponds to the CVS + -d parameter. Usually users will not want to set this, except + if using CVS in an asymmetric fashion. + -f:: Force the merge even if the files are not up to date. diff --git a/Documentation/git-diff-files.txt b/Documentation/git-diff-files.txt index 7248b35d95..b78c4c64f1 100644 --- a/Documentation/git-diff-files.txt +++ b/Documentation/git-diff-files.txt @@ -8,7 +8,7 @@ git-diff-files - Compares files in the working tree and the index SYNOPSIS -------- -'git-diff-files' [-q] [-0|-1|-2|-3|-c|--cc] [<common diff options>] [<path>...] +'git-diff-files' [-q] [-0|-1|-2|-3|-c|--cc|-n|--no-index] [<common diff options>] [<path>...] DESCRIPTION ----------- @@ -36,6 +36,9 @@ omit diff output for unmerged entries and just show "Unmerged". diff, similar to the way 'diff-tree' shows a merge commit with these flags. +\-n,\--no-index:: + Compare the two given files / directories. + -q:: Remain silent even on nonexistent files diff --git a/Documentation/git-diff-stages.txt b/Documentation/git-diff-stages.txt deleted file mode 100644 index b8f45b8cdc..0000000000 --- a/Documentation/git-diff-stages.txt +++ /dev/null @@ -1,42 +0,0 @@ -git-diff-stages(1) -================== - -NAME ----- -git-diff-stages - Compares two merge stages in the index - - -SYNOPSIS --------- -'git-diff-stages' [<common diff options>] <stage1> <stage2> [<path>...] - -DESCRIPTION ------------ -DEPRECATED and will be removed in 1.5.1. - -Compares the content and mode of the blobs in two stages in an -unmerged index file. - -OPTIONS -------- -include::diff-options.txt[] - -<stage1>,<stage2>:: - The stage number to be compared. - -Output format -------------- -include::diff-format.txt[] - - -Author ------- -Written by Junio C Hamano <junkio@cox.net> - -Documentation --------------- -Documentation by Junio C Hamano. - -GIT ---- -Part of the gitlink:git[7] suite diff --git a/Documentation/git-diff.txt b/Documentation/git-diff.txt index 6a098df26b..12a531d1e9 100644 --- a/Documentation/git-diff.txt +++ b/Documentation/git-diff.txt @@ -23,6 +23,10 @@ tree and the index file, or the index file and the working tree. further add to the index but you still haven't. You can stage these changes by using gitlink:git-add[1]. + If exactly two paths are given, and at least one is untracked, + compare the two files / directories. This behavior can be + forced by --no-index. + 'git-diff' [--options] --cached [<commit>] [--] [<path>...]:: This form is to view the changes you staged for the next diff --git a/Documentation/git-name-rev.txt b/Documentation/git-name-rev.txt index 37fbf66efb..5b5c4c865f 100644 --- a/Documentation/git-name-rev.txt +++ b/Documentation/git-name-rev.txt @@ -8,7 +8,8 @@ git-name-rev - Find symbolic names for given revs SYNOPSIS -------- -'git-name-rev' [--tags] ( --all | --stdin | <committish>... ) +'git-name-rev' [--tags] [--refs=<pattern>] + ( --all | --stdin | <committish>... ) DESCRIPTION ----------- @@ -22,6 +23,9 @@ OPTIONS --tags:: Do not use branch names, but only tags to name the commits +--refs=<pattern>:: + Only use refs whose names match a given shell pattern. + --all:: List all commits reachable from all refs diff --git a/Documentation/git-remote.txt b/Documentation/git-remote.txt index f96b30429c..a9fb6a9a5e 100644 --- a/Documentation/git-remote.txt +++ b/Documentation/git-remote.txt @@ -13,6 +13,7 @@ SYNOPSIS 'git-remote' add [-t <branch>] [-m <branch>] [-f] <name> <url> 'git-remote' show <name> 'git-remote' prune <name> +'git-remote' update [group] DESCRIPTION ----------- @@ -53,7 +54,17 @@ Gives some information about the remote <name>. Deletes all stale tracking branches under <name>. These stale branches have already been removed from the remote repository -referenced by <name>, but are still locally available in "remotes/<name>". +referenced by <name>, but are still locally available in +"remotes/<name>". + +'update':: + +Fetch updates for a named set of remotes in the repository as defined by +remotes.<group>. If a named group is not specified on the command line, +the configuration parameter remotes.default will get used; if +remotes.default is not defined, all remotes which do not the +configuration parameter remote.<name>.skipDefaultUpdate set to true will +be updated. (See gitlink:git-config[1]). DISCUSSION diff --git a/Documentation/git-resolve.txt b/Documentation/git-resolve.txt deleted file mode 100644 index 7fde665fb5..0000000000 --- a/Documentation/git-resolve.txt +++ /dev/null @@ -1,38 +0,0 @@ -git-resolve(1) -============== - -NAME ----- -git-resolve - Merge two commits - - -SYNOPSIS --------- -'git-resolve' <current> <merged> <message> - -DESCRIPTION ------------ -DEPRECATED and will be removed in 1.5.1. Use `git-merge` instead. - -Given two commits and a merge message, merge the <merged> commit -into <current> commit, with the commit log message <message>. - -When <current> is a descendant of <merged>, or <current> is an -ancestor of <merged>, no new commit is created and the <message> -is ignored. The former is informally called "already up to -date", and the latter is often called "fast forward". - - -Author ------- -Written by Linus Torvalds <torvalds@osdl.org> and -Dan Holmsand <holmsand@gmail.com>. - -Documentation --------------- -Documentation by David Greaves, Junio C Hamano and the git-list <git@vger.kernel.org>. - -GIT ---- -Part of the gitlink:git[7] suite - diff --git a/Documentation/git-rev-list.txt b/Documentation/git-rev-list.txt index c742117595..4f145eaba4 100644 --- a/Documentation/git-rev-list.txt +++ b/Documentation/git-rev-list.txt @@ -27,6 +27,7 @@ SYNOPSIS [ \--pretty | \--header ] [ \--bisect ] [ \--merge ] + [ \--reverse ] [ \--walk-reflogs ] <commit>... [ \-- <paths>... ] @@ -266,6 +267,10 @@ By default, the commits are shown in reverse chronological order. parent comes before all of its children, but otherwise things are still ordered in the commit timestamp order. +--reverse:: + + Output the commits in reverse order. + Object Traversal ~~~~~~~~~~~~~~~~ diff --git a/Documentation/git-svn.txt b/Documentation/git-svn.txt index 6ce6a3944d..cf094ca357 100644 --- a/Documentation/git-svn.txt +++ b/Documentation/git-svn.txt @@ -13,14 +13,13 @@ DESCRIPTION ----------- git-svn is a simple conduit for changesets between Subversion and git. It is not to be confused with gitlink:git-svnimport[1], which is -read-only and geared towards tracking multiple branches. +read-only. git-svn was originally designed for an individual developer who wants a bidirectional flow of changesets between a single branch in Subversion and an arbitrary number of branches in git. Since its inception, git-svn has gained the ability to track multiple branches in a manner -similar to git-svnimport; but it cannot (yet) automatically detect new -branches and tags like git-svnimport does. +similar to git-svnimport. git-svn is especially useful when it comes to tracking repositories not organized in the way Subversion developers recommend (trunk, @@ -31,26 +30,80 @@ COMMANDS -- 'init':: - Creates an empty git repository with additional metadata - directories for git-svn. The Subversion URL must be specified - as a command-line argument. Optionally, the target directory - to operate on can be specified as a second argument. Normally - this command initializes the current directory. + Initializes an empty git repository with additional + metadata directories for git-svn. The Subversion URL + may be specified as a command-line argument, or as full + URL arguments to -T/-t/-b. Optionally, the target + directory to operate on can be specified as a second + argument. Normally this command initializes the current + directory. -'fetch':: +-T<trunk_subdir>:: +--trunk=<trunk_subdir>:: +-t<tags_subdir>:: +--tags=<tags_subdir>:: +-b<branches_subdir>:: +--branches=<branches_subdir>:: + These are optional command-line options for init. Each of + these flags can point to a relative repository path + (--tags=project/tags') or a full url + (--tags=https://foo.org/project/tags) -Fetch unfetched revisions from the Subversion URL we are -tracking. refs/remotes/git-svn will be updated to the -latest revision. +--no-metadata:: + Set the 'noMetadata' option in the [svn-remote] config. +--use-svm-props:: + Set the 'useSvmProps' option in the [svn-remote] config. +--use-svnsync-props:: + Set the 'useSvnsyncProps' option in the [svn-remote] config. +--rewrite-root=<URL>:: + Set the 'rewriteRoot' option in the [svn-remote] config. +--username=<USER>:: + For transports that SVN handles authentication for (http, + https, and plain svn), specify the username. For other + transports (eg svn+ssh://), you must include the username in + the URL, eg svn+ssh://foo@svn.bar.com/project -Note: You should never attempt to modify the remotes/git-svn -branch outside of git-svn. Instead, create a branch from -remotes/git-svn and work on that branch. Use the 'dcommit' -command (see below) to write git commits back to -remotes/git-svn. +--prefix=<prefix> + This allows one to specify a prefix which is prepended + to the names of remotes if trunk/branches/tags are + specified. The prefix does not automatically include a + trailing slash, so be sure you include one in the + argument if that is what you want. This is useful if + you wish to track multiple projects that share a common + repository. + +'fetch':: -See '<<fetch-args,Additional Fetch Arguments>>' if you are interested in -manually joining branches on commit. + Fetch unfetched revisions from the Subversion remote we are + tracking. The name of the [svn-remote "..."] section in the + .git/config file may be specified as an optional command-line + argument. + +'clone':: + Runs 'init' and 'fetch'. It will automatically create a + directory based on the basename of the URL passed to it; + or if a second argument is passed; it will create a directory + and work within that. It accepts all arguments that the + 'init' and 'fetch' commands accept; with the exception of + '--fetch-all'. After a repository is cloned, the 'fetch' + command will be able to update revisions without affecting + the working tree; and the 'rebase' command will be able + to update the working tree with the latest changes. + +'rebase':: + This fetches revisions from the SVN parent of the current HEAD + and rebases the current (uncommitted to SVN) work against it. + + This works similarly to 'svn update' or 'git-pull' except that + it preserves linear history with 'git-rebase' instead of + 'git-merge' for ease of dcommit-ing with git-svn. + + This accepts all options that 'git-svn fetch' and 'git-rebase' + accepts. However '--fetch-all' only fetches from the current + [svn-remote], and not all [svn-remote] definitions. + + Like 'git-rebase'; this requires that the working tree be clean + and have no uncommitted changes. 'dcommit':: Commit each diff from a specified head directly to the SVN @@ -96,16 +149,6 @@ manually joining branches on commit. commit. All merging is assumed to have taken place independently of git-svn functions. -'rebuild':: - Not a part of daily usage, but this is a useful command if - you've just cloned a repository (using gitlink:git-clone[1]) that was - tracked with git-svn. Unfortunately, git-clone does not clone - git-svn metadata and the svn working tree that git-svn uses for - its operations. This rebuilds the metadata so git-svn can - resume fetch operations. A Subversion URL may be optionally - specified at the command-line if the directory/repository you're - tracking has moved or changed protocols. - 'show-ignore':: Recursively finds and lists the svn:ignore property on directories. The output is suitable for appending to @@ -122,53 +165,13 @@ manually joining branches on commit. repository (that has been init-ed with git-svn). The -r<revision> option is required for this. -'graft-branches':: - This command attempts to detect merges/branches from already - imported history. Techniques used currently include regexes, - file copies, and tree-matches). This command generates (or - modifies) the $GIT_DIR/info/grafts file. This command is - considered experimental, and inherently flawed because - merge-tracking in SVN is inherently flawed and inconsistent - across different repositories. - -'multi-init':: - This command supports git-svnimport-like command-line syntax for - importing repositories that are laid out as recommended by the - SVN folks. This is a bit more tolerant than the git-svnimport - command-line syntax and doesn't require the user to figure out - where the repository URL ends and where the repository path - begins. - --T<trunk_subdir>:: ---trunk=<trunk_subdir>:: --t<tags_subdir>:: ---tags=<tags_subdir>:: --b<branches_subdir>:: ---branches=<branches_subdir>:: - These are the command-line options for multi-init. Each of - these flags can point to a relative repository path - (--tags=project/tags') or a full url - (--tags=https://foo.org/project/tags) - ---prefix=<prefix> - This allows one to specify a prefix which is prepended to the - names of remotes. The prefix does not automatically include a - trailing slash, so be sure you include one in the argument if - that is what you want. This is useful if you wish to track - multiple projects that share a common repository. - -'multi-fetch':: - This runs fetch on all known SVN branches we're tracking. This - will NOT discover new branches (unlike git-svnimport), so - multi-init will need to be re-run (it's idempotent). - -- OPTIONS ------- -- ---shared:: +--shared[={false|true|umask|group|all|world|everybody}]:: --template=<template_directory>:: Only used with the 'init' command. These are passed directly to gitlink:git-init[1]. @@ -176,14 +179,15 @@ OPTIONS -r <ARG>:: --revision <ARG>:: -Only used with the 'fetch' command. +Used with the 'fetch' command. -Takes any valid -r<argument> svn would accept and passes it -directly to svn. -r<ARG1>:<ARG2> ranges and "{" DATE "}" syntax -is also supported. This is passed directly to svn, see svn -documentation for more details. +This allows revision ranges for partial/cauterized history +to be supported. $NUMBER, $NUMBER1:$NUMBER2 (numeric ranges), +$NUMBER:HEAD, and BASE:$NUMBER are all supported. -This can allow you to make partial mirrors when running fetch. +This can allow you to make partial mirrors when running fetch; +but is generally not recommended because history will be skipped +and lost. -:: --stdin:: @@ -270,7 +274,7 @@ config key: svn.repackflags -s<strategy>:: --strategy=<strategy>:: -These are only used with the 'dcommit' command. +These are only used with the 'dcommit' and 'rebase' commands. Passed directly to git-rebase when using 'dcommit' if a 'git-reset' cannot be used (see dcommit). @@ -289,75 +293,79 @@ ADVANCED OPTIONS ---------------- -- --b<refname>:: ---branch <refname>:: -Used with 'fetch', 'dcommit' or 'set-tree'. - -This can be used to join arbitrary git branches to remotes/git-svn -on new commits where the tree object is equivalent. - -When used with different GIT_SVN_ID values, tags and branches in -SVN can be tracked this way, as can some merges where the heads -end up having completely equivalent content. This can even be -used to track branches across multiple SVN _repositories_. - -This option may be specified multiple times, once for each -branch. - -config key: svn.branch - -i<GIT_SVN_ID>:: --id <GIT_SVN_ID>:: -This sets GIT_SVN_ID (instead of using the environment). See the -section on -'<<tracking-multiple-repos,Tracking Multiple Repositories or Branches>>' -for more information on using GIT_SVN_ID. +This sets GIT_SVN_ID (instead of using the environment). This +allows the user to override the default refname to fetch from +when tracking a single URL. The 'log' and 'dcommit' commands +no longer require this switch as an argument. + +-R<remote name>:: +--svn-remote <remote name>:: + Specify the [svn-remote "<remote name>"] section to use, + this allows SVN multiple repositories to be tracked. + Default: "svn" --follow-parent:: This is especially helpful when we're tracking a directory that has been moved around within the repository, or if we started tracking a branch and never tracked the trunk it was - descended from. + descended from. This feature is enabled by default, use + --no-follow-parent to disable it. config key: svn.followparent ---no-metadata:: - This gets rid of the git-svn-id: lines at the end of every commit. - - With this, you lose the ability to use the rebuild command. If - you ever lose your .git/svn/git-svn/.rev_db file, you won't be - able to fetch again, either. This is fine for one-shot imports. - - The 'git-svn log' command will not work on repositories using this, - either. - -config key: svn.nometadata - -- - -COMPATIBILITY OPTIONS ---------------------- +CONFIG FILE-ONLY OPTIONS +------------------------ -- ---upgrade:: -Only used with the 'rebuild' command. - -Run this if you used an old version of git-svn that used -"git-svn-HEAD" instead of "remotes/git-svn" as the branch -for tracking the remote. - ---ignore-nodate:: -Only used with the 'fetch' command. - -By default git-svn will crash if it tries to import a revision -from SVN which has '(no date)' listed as the date of the revision. -This is repository corruption on SVN's part, plain and simple. -But sometimes you really need those revisions anyway. +svn.noMetadata:: +svn-remote.<name>.noMetadata:: + This gets rid of the git-svn-id: lines at the end of every commit. -If supplied git-svn will convert '(no date)' entries to the UNIX -epoch (midnight on Jan. 1, 1970). Yes, that's probably very wrong. -SVN was very wrong. + If you lose your .git/svn/git-svn/.rev_db file, git-svn will not + be able to rebuild it and you won't be able to fetch again, + either. This is fine for one-shot imports. + + The 'git-svn log' command will not work on repositories using + this, either. Using this conflicts with the 'useSvmProps' + option for (hopefully) obvious reasons. + +svn.useSvmProps:: +svn-remote.<name>.useSvmProps:: + This allows git-svn to re-map repository URLs and UUIDs from + mirrors created using SVN::Mirror (or svk) for metadata. + + If an SVN revision has a property, "svm:headrev", it is likely + that the revision was created by SVN::Mirror (also used by SVK). + The property contains a repository UUID and a revision. We want + to make it look like we are mirroring the original URL, so + introduce a helper function that returns the original identity + URL and UUID, and use it when generating metadata in commit + messages. + +svn.useSvnsyncProps:: +svn-remote.<name>.useSvnsyncprops:: + Similar to the useSvmProps option; this is for users + of the svnsync(1) command distributed with SVN 1.4.x and + later. + +svn-remote.<name>.rewriteRoot:: + This allows users to create repositories from alternate + URLs. For example, an administrator could run git-svn on the + server locally (accessing via file://) but wish to distribute + the repository with a public http:// or svn:// URL in the + metadata so users of it will see the public URL. + +Since the noMetadata, rewriteRoot, useSvnsyncProps and useSvmProps +options all affect the metadata generated and used by git-svn; they +*must* be set in the configuration file before any history is imported +and these settings should never be changed once they are set. + +Additionally, only one of these four options can be used per-svn-remote +section because they affect the 'git-svn-id:' metadata line. -- @@ -367,43 +375,37 @@ Basic Examples Tracking and contributing to a the trunk of a Subversion-managed project: ------------------------------------------------------------------------ -# Initialize a repo (like git init): - git-svn init http://svn.foo.org/project/trunk -# Fetch remote revisions: - git-svn fetch -# Create your own branch to hack on: - git checkout -b my-branch remotes/git-svn -# Do some work, and then commit your new changes to SVN, as well as -# automatically updating your working HEAD: +# Clone a repo (like git clone): + git-svn clone http://svn.foo.org/project/trunk +# Enter the newly cloned directory: + cd trunk +# You should be on master branch, double-check with git-branch + git branch +# Do some work and commit locally to git: + git commit ... +# Something is committed to SVN, rebase your local changes against the +# latest changes in SVN: + git-svn rebase +# Now commit your changes (that were committed previously using git) to SVN, +# as well as automatically updating your working HEAD: git-svn dcommit -# Something is committed to SVN, rebase the latest into your branch: - git-svn fetch && git rebase remotes/git-svn # Append svn:ignore settings to the default git exclude file: git-svn show-ignore >> .git/info/exclude ------------------------------------------------------------------------ Tracking and contributing to an entire Subversion-managed project (complete with a trunk, tags and branches): -See also: -'<<tracking-multiple-repos,Tracking Multiple Repositories or Branches>>' ------------------------------------------------------------------------ -# Initialize a repo (like git init): - git-svn multi-init http://svn.foo.org/project \ - -T trunk -b branches -t tags -# Fetch remote revisions: - git-svn multi-fetch -# Create your own branch of trunk to hack on: - git checkout -b my-trunk remotes/trunk -# Do some work, and then commit your new changes to SVN, as well as -# automatically updating your working HEAD: - git-svn dcommit -i trunk -# Something has been committed to trunk, rebase the latest into your branch: - git-svn multi-fetch && git rebase remotes/trunk -# Append svn:ignore settings of trunk to the default git exclude file: - git-svn show-ignore -i trunk >> .git/info/exclude -# Check for new branches and tags (no arguments are needed): - git-svn multi-init +# Clone a repo (like git clone): + git-svn clone http://svn.foo.org/project -T trunk -b branches -t tags +# View all branches and tags you have cloned: + git branch -r +# Reset your master to trunk (or any other branch, replacing 'trunk' +# with the appropriate name): + git reset --hard remotes/trunk +# You may only dcommit to one branch/tag/trunk at a time. The usage +# of dcommit/rebase/show-ignore should be teh same as above. ------------------------------------------------------------------------ REBASE VS. PULL/MERGE @@ -416,7 +418,7 @@ pulled or merged from. This is because the author favored If you use 'git-svn set-tree A..B' to commit several diffs and you do not have the latest remotes/git-svn merged into my-branch, you should -use 'git rebase' to update your work branch instead of 'git pull' or +use 'git-svn rebase' to update your work branch instead of 'git pull' or 'git merge'. 'pull/merge' can cause non-linear history to be flattened when committing into SVN, which can lead to merge commits reversing previous commits in SVN. @@ -426,67 +428,49 @@ DESIGN PHILOSOPHY Merge tracking in Subversion is lacking and doing branched development with Subversion is cumbersome as a result. git-svn does not do automated merge/branch tracking by default and leaves it entirely up to -the user on the git side. - -[[tracking-multiple-repos]] -TRACKING MULTIPLE REPOSITORIES OR BRANCHES ------------------------------------------- -Because git-svn does not care about relationships between different -branches or directories in a Subversion repository, git-svn has a simple -hack to allow it to track an arbitrary number of related _or_ unrelated -SVN repositories via one git repository. Simply use the --id/-i flag or -set the GIT_SVN_ID environment variable to a name other other than -"git-svn" (the default) and git-svn will ignore the contents of the -$GIT_DIR/svn/git-svn directory and instead do all of its work in -$GIT_DIR/svn/$GIT_SVN_ID for that invocation. The interface branch will -be remotes/$GIT_SVN_ID, instead of remotes/git-svn. Any -remotes/$GIT_SVN_ID branch should never be modified by the user outside -of git-svn commands. - -[[fetch-args]] -ADDITIONAL FETCH ARGUMENTS --------------------------- -This is for advanced users, most users should ignore this section. - -Unfetched SVN revisions may be imported as children of existing commits -by specifying additional arguments to 'fetch'. Additional parents may -optionally be specified in the form of sha1 hex sums at the -command-line. Unfetched SVN revisions may also be tied to particular -git commits with the following syntax: - ------------------------------------------------- - svn_revision_number=git_commit_sha1 ------------------------------------------------- - -This allows you to tie unfetched SVN revision 375 to your current HEAD: - ------------------------------------------------- - git-svn fetch 375=$(git-rev-parse HEAD) ------------------------------------------------- - -If you're tracking a directory that has moved, or otherwise been -branched or tagged off of another directory in the repository and you -care about the full history of the project, then you can use -the --follow-parent option. - ------------------------------------------------- - git-svn fetch --follow-parent ------------------------------------------------- +the user on the git side. git-svn does however follow copy +history of the directory that it is tracking, however (much like +how 'svn log' works). BUGS ---- -We ignore all SVN properties except svn:executable. Too difficult to -map them since we rely heavily on git write-tree being _exactly_ the -same on both the SVN and git working trees and I prefer not to clutter -working trees with metadata files. +We ignore all SVN properties except svn:executable. Any unhandled +properties are logged to $GIT_DIR/svn/<refname>/unhandled.log Renamed and copied directories are not detected by git and hence not tracked when committing to SVN. I do not plan on adding support for this as it's quite difficult and time-consuming to get working for all -the possible corner cases (git doesn't do it, either). Renamed and -copied files are fully supported if they're similar enough for git to -detect them. +the possible corner cases (git doesn't do it, either). Committing +renamed and copied files are fully supported if they're similar enough +for git to detect them. + +CONFIGURATION +------------- + +git-svn stores [svn-remote] configuration information in the +repository .git/config file. It is similar the core git +[remote] sections except 'fetch' keys do not accept glob +arguments; but they are instead handled by the 'branches' +and 'tags' keys. Since some SVN repositories are oddly +configured with multiple projects glob expansions such those +listed below are allowed: + +------------------------------------------------------------------------ +[svn-remote "project-a"] + url = http://server.org/svn + branches = branches/*/project-a:refs/remotes/project-a/branches/* + tags = tags/*/project-a:refs/remotes/project-a/tags/* + trunk = trunk/project-a:refs/remotes/project-a/trunk +------------------------------------------------------------------------ + +Keep in mind that the '*' (asterisk) wildcard of the local ref +(left of the ':') *must* be the farthest right path component; +however the remote wildcard may be anywhere as long as it's own +independent path componet (surrounded by '/' or EOL). This +type of configuration is not automatically created by 'init' and +should be manually entered with a text-editor or using +gitlink:git-config[1] SEE ALSO -------- diff --git a/Documentation/git.txt b/Documentation/git.txt index c0fa0d4b17..9a74747989 100644 --- a/Documentation/git.txt +++ b/Documentation/git.txt @@ -35,6 +35,16 @@ ifdef::stalenotes[] You are reading the documentation for the latest version of git. Documentation for older releases are available here: +* link:v1.5.0.2/git.html[documentation for release 1.5.0.2] + +* link:v1.5.0.2/RelNotes-1.5.0.2.txt[release notes for 1.5.0.2] + +* link:v1.5.0.1/RelNotes-1.5.0.1.txt[release notes for 1.5.0.1] + +* link:v1.5.0/git.html[documentation for release 1.5.0] + +* link:v1.5.0/RelNotes-1.5.0.txt[release notes for 1.5.0] + * link:v1.4.4.4/git.html[documentation for release 1.4.4.4] * link:v1.3.3/git.html[documentation for release 1.3.3] diff --git a/Documentation/howto/revert-branch-rebase.txt b/Documentation/howto/revert-branch-rebase.txt index d10476b56e..d88ec23a97 100644 --- a/Documentation/howto/revert-branch-rebase.txt +++ b/Documentation/howto/revert-branch-rebase.txt @@ -85,7 +85,7 @@ Fortunately I did not have to; what I have in the current branch ------------------------------------------------ $ git checkout master -$ git resolve master revert-c99 fast ;# this should be a fast forward +$ git merge revert-c99 ;# this should be a fast forward Updating from 10d781b9caa4f71495c7b34963bef137216f86a8 to e3a693c... cache.h | 8 ++++---- commit.c | 2 +- @@ -95,13 +95,6 @@ Updating from 10d781b9caa4f71495c7b34963bef137216f86a8 to e3a693c... 5 files changed, 8 insertions(+), 8 deletions(-) ------------------------------------------------ -The 'fast' in the above 'git resolve' is not a magic. I knew this -'resolve' would result in a fast forward merge, and if not, there is -something very wrong (so I would do 'git reset' on the 'master' branch -and examine the situation). When a fast forward merge is done, the -message parameter to 'git resolve' is discarded, because no new commit -is created. You could have said 'junk' or 'nothing' there as well. - There is no need to redo the test at this point. We fast forwarded and we know 'master' matches 'revert-c99' exactly. In fact: diff --git a/Documentation/user-manual.txt b/Documentation/user-manual.txt index 7b6dc22e7b..34e965104b 100644 --- a/Documentation/user-manual.txt +++ b/Documentation/user-manual.txt @@ -2752,7 +2752,7 @@ stages to temporary files and calls a "merge" script on it: $ git-merge-index git-merge-one-file hello.c ------------------------------------------------- -and that is what higher level `git resolve` is implemented with. +and that is what higher level `git merge -s resolve` is implemented with. How git stores objects efficiently: pack files ---------------------------------------------- diff --git a/GIT-VERSION-GEN b/GIT-VERSION-GEN index 32a9422336..6abde8d7b3 100755 --- a/GIT-VERSION-GEN +++ b/GIT-VERSION-GEN @@ -1,7 +1,7 @@ #!/bin/sh GVF=GIT-VERSION-FILE -DEF_VER=v1.5.0.2.GIT +DEF_VER=v1.5.0.GIT LF=' ' @@ -128,6 +128,7 @@ prefix = $(HOME) bindir = $(prefix)/bin gitexecdir = $(bindir) template_dir = $(prefix)/share/git-core/templates/ +ETC_GITCONFIG = $(prefix)/etc/gitconfig # DESTDIR= # default configuration for gitweb @@ -176,7 +177,7 @@ SCRIPT_SH = \ git-merge-one-file.sh git-parse-remote.sh \ git-pull.sh git-rebase.sh \ git-repack.sh git-request-pull.sh git-reset.sh \ - git-resolve.sh git-revert.sh git-sh-setup.sh \ + git-revert.sh git-sh-setup.sh \ git-tag.sh git-verify-tag.sh \ git-applymbox.sh git-applypatch.sh git-am.sh \ git-merge.sh git-merge-stupid.sh git-merge-octopus.sh \ @@ -266,7 +267,8 @@ LIB_OBJS = \ revision.o pager.o tree-walk.o xdiff-interface.o \ write_or_die.o trace.o list-objects.o grep.o \ alloc.o merge-file.o path-list.o help.o unpack-trees.o $(DIFF_OBJS) \ - color.o wt-status.o archive-zip.o archive-tar.o shallow.o utf8.o + color.o wt-status.o archive-zip.o archive-tar.o shallow.o utf8.o \ + convert.o BUILTIN_OBJS = \ builtin-add.o \ @@ -284,7 +286,6 @@ BUILTIN_OBJS = \ builtin-diff.o \ builtin-diff-files.o \ builtin-diff-index.o \ - builtin-diff-stages.o \ builtin-diff-tree.o \ builtin-fmt-merge-msg.o \ builtin-for-each-ref.o \ @@ -296,6 +297,7 @@ BUILTIN_OBJS = \ builtin-ls-tree.o \ builtin-mailinfo.o \ builtin-mailsplit.o \ + builtin-merge-base.o \ builtin-merge-file.o \ builtin-mv.o \ builtin-name-rev.o \ @@ -597,6 +599,7 @@ endif # Shell quote (do not use $(call) to accommodate ancient setups); SHA1_HEADER_SQ = $(subst ','\'',$(SHA1_HEADER)) +ETC_GITCONFIG_SQ = $(subst ','\'',$(ETC_GITCONFIG)) DESTDIR_SQ = $(subst ','\'',$(DESTDIR)) bindir_SQ = $(subst ','\'',$(bindir)) @@ -609,7 +612,8 @@ PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH)) LIBS = $(GITLIBS) $(EXTLIBS) -BASIC_CFLAGS += -DSHA1_HEADER='$(SHA1_HEADER_SQ)' $(COMPAT_CFLAGS) +BASIC_CFLAGS += -DSHA1_HEADER='$(SHA1_HEADER_SQ)' \ + -DETC_GITCONFIG='"$(ETC_GITCONFIG_SQ)"' $(COMPAT_CFLAGS) LIB_OBJS += $(COMPAT_OBJS) ALL_CFLAGS += $(BASIC_CFLAGS) @@ -825,7 +829,7 @@ GIT-CFLAGS: .FORCE-GIT-CFLAGS export NO_SVN_TESTS -test: all +test: all test-chmtime$X $(MAKE) -C t/ all test-date$X: test-date.c date.o ctype.o @@ -840,6 +844,9 @@ test-dump-cache-tree$X: dump-cache-tree.o $(GITLIBS) test-sha1$X: test-sha1.o $(GITLIBS) $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $(filter %.o,$^) $(LIBS) +test-chmtime$X: test-chmtime.c + $(CC) $(ALL_CFLAGS) -o $@ $(ALL_LDFLAGS) $< + check-sha1:: test-sha1$X ./test-sha1.sh @@ -1 +1 @@ -Documentation/RelNotes-1.5.0.3.txt
\ No newline at end of file +Documentation/RelNotes-1.5.1.txt
\ No newline at end of file diff --git a/archive-tar.c b/archive-tar.c index 7d52a061f4..d9c30d33dc 100644 --- a/archive-tar.c +++ b/archive-tar.c @@ -262,7 +262,7 @@ static int write_tar_entry(const unsigned char *sha1, static struct strbuf path; int filenamelen = strlen(filename); void *buffer; - char type[20]; + enum object_type type; unsigned long size; if (!path.alloc) { @@ -283,7 +283,7 @@ static int write_tar_entry(const unsigned char *sha1, buffer = NULL; size = 0; } else { - buffer = read_sha1_file(sha1, type, &size); + buffer = read_sha1_file(sha1, &type, &size); if (!buffer) die("cannot read %s", sha1_to_hex(sha1)); } diff --git a/archive-zip.c b/archive-zip.c index f31b8ed823..7c4984886f 100644 --- a/archive-zip.c +++ b/archive-zip.c @@ -167,7 +167,7 @@ static int write_zip_entry(const unsigned char *sha1, int pathlen; unsigned char *out; char *path; - char type[20]; + enum object_type type; void *buffer = NULL; void *deflated = NULL; @@ -195,7 +195,7 @@ static int write_zip_entry(const unsigned char *sha1, if (S_ISREG(mode) && zlib_compression_level != 0) method = 8; result = 0; - buffer = read_sha1_file(sha1, type, &size); + buffer = read_sha1_file(sha1, &type, &size); if (!buffer) die("cannot read %s", sha1_to_hex(sha1)); crc = crc32(crc, buffer, size); @@ -30,18 +30,18 @@ int parse_blob_buffer(struct blob *item, void *buffer, unsigned long size) int parse_blob(struct blob *item) { - char type[20]; + enum object_type type; void *buffer; unsigned long size; int ret; if (item->object.parsed) return 0; - buffer = read_sha1_file(item->object.sha1, type, &size); + buffer = read_sha1_file(item->object.sha1, &type, &size); if (!buffer) return error("Could not read %s", sha1_to_hex(item->object.sha1)); - if (strcmp(type, blob_type)) + if (type != OBJ_BLOB) return error("Object %s not a blob", sha1_to_hex(item->object.sha1)); ret = parse_blob_buffer(item, buffer, size); diff --git a/builtin-apply.c b/builtin-apply.c index bec95d6c8a..38f647510a 100644 --- a/builtin-apply.c +++ b/builtin-apply.c @@ -28,6 +28,7 @@ static int newfd = -1; static int unidiff_zero; static int p_value = 1; +static int p_value_known; static int check_index; static int write_index; static int cached; @@ -144,6 +145,7 @@ struct patch { unsigned long deflate_origlen; int lines_added, lines_deleted; int score; + unsigned int is_toplevel_relative:1; unsigned int inaccurate_eof:1; unsigned int is_binary:1; unsigned int is_copy:1; @@ -238,7 +240,7 @@ static int name_terminate(const char *name, int namelen, int c, int terminate) return 1; } -static char * find_name(const char *line, char *def, int p_value, int terminate) +static char *find_name(const char *line, char *def, int p_value, int terminate) { int len; const char *start = line; @@ -311,11 +313,54 @@ static char * find_name(const char *line, char *def, int p_value, int terminate) return name; } +static int count_slashes(const char *cp) +{ + int cnt = 0; + char ch; + + while ((ch = *cp++)) + if (ch == '/') + cnt++; + return cnt; +} + +/* + * Given the string after "--- " or "+++ ", guess the appropriate + * p_value for the given patch. + */ +static int guess_p_value(const char *nameline) +{ + char *name, *cp; + int val = -1; + + if (is_dev_null(nameline)) + return -1; + name = find_name(nameline, NULL, 0, TERM_SPACE | TERM_TAB); + if (!name) + return -1; + cp = strchr(name, '/'); + if (!cp) + val = 0; + else if (prefix) { + /* + * Does it begin with "a/$our-prefix" and such? Then this is + * very likely to apply to our directory. + */ + if (!strncmp(name, prefix, prefix_length)) + val = count_slashes(prefix); + else { + cp++; + if (!strncmp(cp, prefix, prefix_length)) + val = count_slashes(prefix) + 1; + } + } + free(name); + return val; +} + /* * Get the name etc info from the --/+++ lines of a traditional patch header * - * NOTE! This hardcodes "-p1" behaviour in filename detection. - * * FIXME! The end-of-filename heuristics are kind of screwy. For existing * files, we can happily check the index for a match, but for creating a * new file we should try to match whatever "patch" does. I have no idea. @@ -326,6 +371,16 @@ static void parse_traditional_patch(const char *first, const char *second, struc first += 4; /* skip "--- " */ second += 4; /* skip "+++ " */ + if (!p_value_known) { + int p, q; + p = guess_p_value(first); + q = guess_p_value(second); + if (p < 0) p = q; + if (0 <= p && p == q) { + p_value = p; + p_value_known = 1; + } + } if (is_dev_null(first)) { patch->is_new = 1; patch->is_delete = 0; @@ -787,6 +842,7 @@ static int find_header(char *line, unsigned long size, int *hdrsize, struct patc { unsigned long offset, len; + patch->is_toplevel_relative = 0; patch->is_rename = patch->is_copy = 0; patch->is_new = patch->is_delete = -1; patch->old_mode = patch->new_mode = 0; @@ -831,6 +887,7 @@ static int find_header(char *line, unsigned long size, int *hdrsize, struct patc die("git diff header lacks filename information (line %d)", linenr); patch->old_name = patch->new_name = patch->def_name; } + patch->is_toplevel_relative = 1; *hdrsize = git_hdr_len; return offset; } @@ -1129,11 +1186,11 @@ static struct fragment *parse_binary_hunk(char **buf_p, *status_p = 0; - if (!strncmp(buffer, "delta ", 6)) { + if (!prefixcmp(buffer, "delta ")) { patch_method = BINARY_DELTA_DEFLATED; origlen = strtoul(buffer + 6, NULL, 10); } - else if (!strncmp(buffer, "literal ", 8)) { + else if (!prefixcmp(buffer, "literal ")) { patch_method = BINARY_LITERAL_DEFLATED; origlen = strtoul(buffer + 8, NULL, 10); } @@ -1393,28 +1450,39 @@ static void show_stats(struct patch *patch) free(qname); } -static int read_old_data(struct stat *st, const char *path, void *buf, unsigned long size) +static int read_old_data(struct stat *st, const char *path, char **buf_p, unsigned long *alloc_p, unsigned long *size_p) { int fd; unsigned long got; + unsigned long nsize; + char *nbuf; + unsigned long size = *size_p; + char *buf = *buf_p; switch (st->st_mode & S_IFMT) { case S_IFLNK: - return readlink(path, buf, size); + return readlink(path, buf, size) != size; case S_IFREG: fd = open(path, O_RDONLY); if (fd < 0) return error("unable to open %s", path); got = 0; for (;;) { - int ret = xread(fd, (char *) buf + got, size - got); + int ret = xread(fd, buf + got, size - got); if (ret <= 0) break; got += ret; } close(fd); - return got; - + nsize = got; + nbuf = buf; + if (convert_to_git(path, &nbuf, &nsize)) { + free(buf); + *buf_p = nbuf; + *alloc_p = nsize; + *size_p = nsize; + } + return got != size; default: return -1; } @@ -1656,6 +1724,8 @@ static int apply_one_fragment(struct buffer_desc *desc, struct fragment *frag, i /* Ignore it, we already handled it */ break; default: + if (apply_verbosely) + error("invalid start of line: '%c'", first); return -1; } patch += len; @@ -1753,6 +1823,9 @@ static int apply_one_fragment(struct buffer_desc *desc, struct fragment *frag, i } } + if (offset && apply_verbosely) + error("while searching for:\n%.*s", oldsize, oldlines); + free(old); free(new); return offset; @@ -1839,11 +1912,11 @@ static int apply_binary(struct buffer_desc *desc, struct patch *patch) if (has_sha1_file(sha1)) { /* We already have the postimage */ - char type[10]; + enum object_type type; unsigned long size; free(desc->buffer); - desc->buffer = read_sha1_file(sha1, type, &size); + desc->buffer = read_sha1_file(sha1, &type, &size); if (!desc->buffer) return error("the necessary postimage %s for " "'%s' cannot be read", @@ -1899,8 +1972,8 @@ static int apply_data(struct patch *patch, struct stat *st, struct cache_entry * buf = NULL; if (cached) { if (ce) { - char type[20]; - buf = read_sha1_file(ce->sha1, type, &size); + enum object_type type; + buf = read_sha1_file(ce->sha1, &type, &size); if (!buf) return error("read of %s failed", patch->old_name); @@ -1911,7 +1984,7 @@ static int apply_data(struct patch *patch, struct stat *st, struct cache_entry * size = st->st_size; alloc = size + 8192; buf = xmalloc(alloc); - if (read_old_data(st, patch->old_name, buf, alloc) != size) + if (read_old_data(st, patch->old_name, &buf, &alloc, &size)) return error("read of %s failed", patch->old_name); } @@ -2233,7 +2306,7 @@ static void patch_stats(struct patch *patch) } } -static void remove_file(struct patch *patch) +static void remove_file(struct patch *patch, int rmdir_empty) { if (write_index) { if (remove_file_from_cache(patch->old_name) < 0) @@ -2241,7 +2314,7 @@ static void remove_file(struct patch *patch) cache_tree_invalidate_path(active_cache_tree, patch->old_name); } if (!cached) { - if (!unlink(patch->old_name)) { + if (!unlink(patch->old_name) && rmdir_empty) { char *name = xstrdup(patch->old_name); char *end = strrchr(name, '/'); while (end) { @@ -2283,12 +2356,22 @@ static void add_index_file(const char *path, unsigned mode, void *buf, unsigned static int try_create_file(const char *path, unsigned int mode, const char *buf, unsigned long size) { int fd; + char *nbuf; + unsigned long nsize; if (S_ISLNK(mode)) /* Although buf:size is counted string, it also is NUL * terminated. */ return symlink(buf, path); + nsize = size; + nbuf = (char *) buf; + if (convert_to_working_tree(path, &nbuf, &nsize)) { + free((char *) buf); + buf = nbuf; + size = nsize; + } + fd = open(path, O_CREAT | O_EXCL | O_WRONLY, (mode & 0100) ? 0777 : 0666); if (fd < 0) return -1; @@ -2374,7 +2457,7 @@ static void write_out_one_result(struct patch *patch, int phase) { if (patch->is_delete > 0) { if (phase == 0) - remove_file(patch); + remove_file(patch, 1); return; } if (patch->is_new > 0 || patch->is_copy) { @@ -2387,7 +2470,7 @@ static void write_out_one_result(struct patch *patch, int phase) * thing: remove the old, write the new */ if (phase == 0) - remove_file(patch); + remove_file(patch, 0); if (phase == 1) create_file(patch); } @@ -2509,6 +2592,32 @@ static int use_patch(struct patch *p) return 1; } +static void prefix_one(char **name) +{ + char *old_name = *name; + if (!old_name) + return; + *name = xstrdup(prefix_filename(prefix, prefix_length, *name)); + free(old_name); +} + +static void prefix_patches(struct patch *p) +{ + if (!prefix || p->is_toplevel_relative) + return; + for ( ; p; p = p->next) { + if (p->new_name == p->old_name) { + char *prefixed = p->new_name; + prefix_one(&prefixed); + p->new_name = p->old_name = prefixed; + } + else { + prefix_one(&p->new_name); + prefix_one(&p->old_name); + } + } +} + static int apply_patch(int fd, const char *filename, int inaccurate_eof) { unsigned long offset, size; @@ -2531,11 +2640,14 @@ static int apply_patch(int fd, const char *filename, int inaccurate_eof) break; if (apply_in_reverse) reverse_patches(patch); + if (prefix) + prefix_patches(patch); if (use_patch(patch)) { patch_stats(patch); *listp = patch; listp = &patch->next; - } else { + } + else { /* perhaps free it a bit better? */ free(patch); skipped_patch++; @@ -2596,9 +2708,16 @@ int cmd_apply(int argc, const char **argv, const char *unused_prefix) int read_stdin = 1; int inaccurate_eof = 0; int errs = 0; + int is_not_gitdir = 0; const char *whitespace_option = NULL; + prefix = setup_git_directory_gently(&is_not_gitdir); + prefix_length = prefix ? strlen(prefix) : 0; + git_config(git_apply_config); + if (apply_default_whitespace) + parse_whitespace_option(apply_default_whitespace); + for (i = 1; i < argc; i++) { const char *arg = argv[i]; char *end; @@ -2609,15 +2728,16 @@ int cmd_apply(int argc, const char **argv, const char *unused_prefix) read_stdin = 0; continue; } - if (!strncmp(arg, "--exclude=", 10)) { + if (!prefixcmp(arg, "--exclude=")) { struct excludes *x = xmalloc(sizeof(*x)); x->path = arg + 10; x->next = excludes; excludes = x; continue; } - if (!strncmp(arg, "-p", 2)) { + if (!prefixcmp(arg, "-p")) { p_value = atoi(arg + 2); + p_value_known = 1; continue; } if (!strcmp(arg, "--no-add")) { @@ -2649,10 +2769,14 @@ int cmd_apply(int argc, const char **argv, const char *unused_prefix) continue; } if (!strcmp(arg, "--index")) { + if (is_not_gitdir) + die("--index outside a repository"); check_index = 1; continue; } if (!strcmp(arg, "--cached")) { + if (is_not_gitdir) + die("--cached outside a repository"); check_index = 1; cached = 1; continue; @@ -2670,13 +2794,13 @@ int cmd_apply(int argc, const char **argv, const char *unused_prefix) line_termination = 0; continue; } - if (!strncmp(arg, "-C", 2)) { + if (!prefixcmp(arg, "-C")) { p_context = strtoul(arg + 2, &end, 0); if (*end != '\0') die("unrecognized context count '%s'", arg + 2); continue; } - if (!strncmp(arg, "--whitespace=", 13)) { + if (!prefixcmp(arg, "--whitespace=")) { whitespace_option = arg + 13; parse_whitespace_option(arg + 13); continue; @@ -2693,7 +2817,7 @@ int cmd_apply(int argc, const char **argv, const char *unused_prefix) apply = apply_with_reject = apply_verbosely = 1; continue; } - if (!strcmp(arg, "--verbose")) { + if (!strcmp(arg, "-v") || !strcmp(arg, "--verbose")) { apply_verbosely = 1; continue; } @@ -2701,14 +2825,6 @@ int cmd_apply(int argc, const char **argv, const char *unused_prefix) inaccurate_eof = 1; continue; } - - if (check_index && prefix_length < 0) { - prefix = setup_git_directory(); - prefix_length = prefix ? strlen(prefix) : 0; - git_config(git_apply_config); - if (!whitespace_option && apply_default_whitespace) - parse_whitespace_option(apply_default_whitespace); - } if (0 < prefix_length) arg = prefix_filename(prefix, prefix_length, arg); diff --git a/builtin-archive.c b/builtin-archive.c index f613ac2516..8ea6cb1efc 100644 --- a/builtin-archive.c +++ b/builtin-archive.c @@ -35,7 +35,7 @@ static int run_remote_archiver(const char *remote, int argc, for (i = 1; i < argc; i++) { const char *arg = argv[i]; - if (!strncmp("--exec=", arg, 7)) { + if (!prefixcmp(arg, "--exec=")) { if (exec_at) die("multiple --exec specified"); exec = arg + 7; @@ -62,7 +62,7 @@ static int run_remote_archiver(const char *remote, int argc, if (buf[len-1] == '\n') buf[--len] = 0; if (strcmp(buf, "ACK")) { - if (len > 5 && !strncmp(buf, "NACK ", 5)) + if (len > 5 && !prefixcmp(buf, "NACK ")) die("git-archive: NACK %s", buf + 5); die("git-archive: protocol error"); } @@ -166,11 +166,11 @@ int parse_archive_args(int argc, const char **argv, struct archiver *ar) verbose = 1; continue; } - if (!strncmp(arg, "--format=", 9)) { + if (!prefixcmp(arg, "--format=")) { format = arg + 9; continue; } - if (!strncmp(arg, "--prefix=", 9)) { + if (!prefixcmp(arg, "--prefix=")) { base = arg + 9; continue; } @@ -218,7 +218,7 @@ static const char *extract_remote_arg(int *ac, const char **av) if (!strcmp(arg, "--")) no_more_options = 1; if (!no_more_options) { - if (!strncmp(arg, "--remote=", 9)) { + if (!prefixcmp(arg, "--remote=")) { if (remote) die("Multiple --remote specified"); remote = arg + 9; diff --git a/builtin-blame.c b/builtin-blame.c index 1a752b95bb..9f7dd4e19f 100644 --- a/builtin-blame.c +++ b/builtin-blame.c @@ -87,9 +87,9 @@ struct origin { static char *fill_origin_blob(struct origin *o, mmfile_t *file) { if (!o->file.ptr) { - char type[10]; + enum object_type type; num_read_blob++; - file->ptr = read_sha1_file(o->blob_sha1, type, + file->ptr = read_sha1_file(o->blob_sha1, &type, (unsigned long *)(&(file->size))); o->file = *file; } @@ -263,7 +263,6 @@ static struct origin *get_origin(struct scoreboard *sb, static int fill_blob_sha1(struct origin *origin) { unsigned mode; - char type[10]; if (!is_null_sha1(origin->blob_sha1)) return 0; @@ -271,8 +270,7 @@ static int fill_blob_sha1(struct origin *origin) origin->path, origin->blob_sha1, &mode)) goto error_out; - if (sha1_object_info(origin->blob_sha1, type, NULL) || - strcmp(type, blob_type)) + if (sha1_object_info(origin->blob_sha1, NULL) != OBJ_BLOB) goto error_out; return 0; error_out: @@ -1322,10 +1320,10 @@ static void get_commit_info(struct commit *commit, * we now need to populate them for output. */ if (!commit->buffer) { - char type[20]; + enum object_type type; unsigned long size; commit->buffer = - read_sha1_file(commit->object.sha1, type, &size); + read_sha1_file(commit->object.sha1, &type, &size); } ret->author = author_buf; get_ac_line(commit->buffer, "\nauthor ", @@ -2006,7 +2004,7 @@ static struct commit *fake_working_tree_commit(const char *path, const char *con buf[fin_size] = 0; origin->file.ptr = buf; origin->file.size = fin_size; - pretend_sha1_file(buf, fin_size, blob_type, origin->blob_sha1); + pretend_sha1_file(buf, fin_size, OBJ_BLOB, origin->blob_sha1); commit->util = origin; /* @@ -2065,9 +2063,10 @@ int cmd_blame(int argc, const char **argv, const char *prefix) int i, seen_dashdash, unk, opt; long bottom, top, lno; int output_option = 0; + int show_stats = 0; const char *revs_file = NULL; const char *final_commit_name = NULL; - char type[10]; + enum object_type type; const char *bottomtop = NULL; const char *contents_from = NULL; @@ -2086,6 +2085,8 @@ int cmd_blame(int argc, const char **argv, const char *prefix) blank_boundary = 1; else if (!strcmp("--root", arg)) show_root = 1; + else if (!strcmp(arg, "--show-stats")) + show_stats = 1; else if (!strcmp("-c", arg)) output_option |= OUTPUT_ANNOTATE_COMPAT; else if (!strcmp("-t", arg)) @@ -2094,17 +2095,17 @@ int cmd_blame(int argc, const char **argv, const char *prefix) output_option |= OUTPUT_LONG_OBJECT_NAME; else if (!strcmp("-S", arg) && ++i < argc) revs_file = argv[i]; - else if (!strncmp("-M", arg, 2)) { + else if (!prefixcmp(arg, "-M")) { opt |= PICKAXE_BLAME_MOVE; blame_move_score = parse_score(arg+2); } - else if (!strncmp("-C", arg, 2)) { + else if (!prefixcmp(arg, "-C")) { if (opt & PICKAXE_BLAME_COPY) opt |= PICKAXE_BLAME_COPY_HARDER; opt |= PICKAXE_BLAME_COPY | PICKAXE_BLAME_MOVE; blame_copy_score = parse_score(arg+2); } - else if (!strncmp("-L", arg, 2)) { + else if (!prefixcmp(arg, "-L")) { if (!arg[2]) { if (++i >= argc) usage(blame_usage); @@ -2299,7 +2300,7 @@ int cmd_blame(int argc, const char **argv, const char *prefix) if (fill_blob_sha1(o)) die("no such path %s in %s", path, final_commit_name); - sb.final_buf = read_sha1_file(o->blob_sha1, type, + sb.final_buf = read_sha1_file(o->blob_sha1, &type, &sb.final_buf_size); } num_read_blob++; @@ -2351,7 +2352,7 @@ int cmd_blame(int argc, const char **argv, const char *prefix) ent = e; } - if (DEBUG) { + if (show_stats) { printf("num read blob: %d\n", num_read_blob); printf("num get patch: %d\n", num_get_patch); printf("num commits: %d\n", num_commits); diff --git a/builtin-branch.c b/builtin-branch.c index 2d8d61b453..d0179b00a2 100644 --- a/builtin-branch.c +++ b/builtin-branch.c @@ -59,7 +59,7 @@ int git_branch_config(const char *var, const char *value) branch_use_color = git_config_colorbool(var, value); return 0; } - if (!strncmp(var, "color.branch.", 13)) { + if (!prefixcmp(var, "color.branch.")) { int slot = parse_branch_color_slot(var, 13); color_parse(value, var, branch_colors[slot]); return 0; @@ -134,7 +134,7 @@ static int delete_branches(int argc, const char **argv, int force, int kinds) */ if (!force && - !in_merge_bases(rev, head_rev)) { + !in_merge_bases(rev, &head_rev, 1)) { error("The branch '%s' is not a strict subset of " "your current HEAD.\n" "If you are sure you want to delete it, " @@ -178,13 +178,13 @@ static int append_ref(const char *refname, const unsigned char *sha1, int flags, int len; /* Detect kind */ - if (!strncmp(refname, "refs/heads/", 11)) { + if (!prefixcmp(refname, "refs/heads/")) { kind = REF_LOCAL_BRANCH; refname += 11; - } else if (!strncmp(refname, "refs/remotes/", 13)) { + } else if (!prefixcmp(refname, "refs/remotes/")) { kind = REF_REMOTE_BRANCH; refname += 13; - } else if (!strncmp(refname, "refs/tags/", 10)) { + } else if (!prefixcmp(refname, "refs/tags/")) { kind = REF_TAG; refname += 10; } @@ -446,7 +446,7 @@ int cmd_branch(int argc, const char **argv, const char *prefix) reflog = 1; continue; } - if (!strncmp(arg, "--abbrev=", 9)) { + if (!prefixcmp(arg, "--abbrev=")) { abbrev = atoi(arg+9); continue; } @@ -476,7 +476,7 @@ int cmd_branch(int argc, const char **argv, const char *prefix) detached = 1; } else { - if (strncmp(head, "refs/heads/", 11)) + if (prefixcmp(head, "refs/heads/")) die("HEAD not found below refs/heads!"); head += 11; } diff --git a/builtin-cat-file.c b/builtin-cat-file.c index 6c16bfa1ae..d61d3d5b74 100644 --- a/builtin-cat-file.c +++ b/builtin-cat-file.c @@ -79,7 +79,7 @@ static void pprint_tag(const unsigned char *sha1, const char *buf, unsigned long int cmd_cat_file(int argc, const char **argv, const char *prefix) { unsigned char sha1[20]; - char type[20]; + enum object_type type; void *buf; unsigned long size; int opt; @@ -100,14 +100,16 @@ int cmd_cat_file(int argc, const char **argv, const char *prefix) buf = NULL; switch (opt) { case 't': - if (!sha1_object_info(sha1, type, NULL)) { - printf("%s\n", type); + type = sha1_object_info(sha1, NULL); + if (type > 0) { + printf("%s\n", typename(type)); return 0; } break; case 's': - if (!sha1_object_info(sha1, type, &size)) { + type = sha1_object_info(sha1, &size); + if (type > 0) { printf("%lu\n", size); return 0; } @@ -117,17 +119,18 @@ int cmd_cat_file(int argc, const char **argv, const char *prefix) return !has_sha1_file(sha1); case 'p': - if (sha1_object_info(sha1, type, NULL)) + type = sha1_object_info(sha1, NULL); + if (type < 0) die("Not a valid object name %s", argv[2]); /* custom pretty-print here */ - if (!strcmp(type, tree_type)) + if (type == OBJ_TREE) return cmd_ls_tree(2, argv + 1, NULL); - buf = read_sha1_file(sha1, type, &size); + buf = read_sha1_file(sha1, &type, &size); if (!buf) die("Cannot read object %s", argv[2]); - if (!strcmp(type, tag_type)) { + if (type == OBJ_TAG) { pprint_tag(sha1, buf, size); return 0; } diff --git a/builtin-checkout-index.c b/builtin-checkout-index.c index b097c888a0..afe4b0e452 100644 --- a/builtin-checkout-index.c +++ b/builtin-checkout-index.c @@ -223,12 +223,12 @@ int cmd_checkout_index(int argc, const char **argv, const char *prefix) to_tempfile = 1; continue; } - if (!strncmp(arg, "--prefix=", 9)) { + if (!prefixcmp(arg, "--prefix=")) { state.base_dir = arg+9; state.base_dir_len = strlen(state.base_dir); continue; } - if (!strncmp(arg, "--stage=", 8)) { + if (!prefixcmp(arg, "--stage=")) { if (!strcmp(arg + 8, "all")) { to_tempfile = 1; checkout_stage = CHECKOUT_ALL; diff --git a/builtin-commit-tree.c b/builtin-commit-tree.c index 2a818a0a2c..4a8d8d8b67 100644 --- a/builtin-commit-tree.c +++ b/builtin-commit-tree.c @@ -45,15 +45,14 @@ static void add_buffer(char **bufp, unsigned int *sizep, const char *fmt, ...) memcpy(buf + size, one_line, len); } -static void check_valid(unsigned char *sha1, const char *expect) +static void check_valid(unsigned char *sha1, enum object_type expect) { - char type[20]; - - if (sha1_object_info(sha1, type, NULL)) + enum object_type type = sha1_object_info(sha1, NULL); + if (type < 0) die("%s is not a valid object", sha1_to_hex(sha1)); - if (expect && strcmp(type, expect)) + if (type != expect) die("%s is not a valid '%s' object", sha1_to_hex(sha1), - expect); + typename(expect)); } /* @@ -101,7 +100,7 @@ int cmd_commit_tree(int argc, const char **argv, const char *prefix) if (get_sha1(argv[1], tree_sha1)) die("Not a valid object name %s", argv[1]); - check_valid(tree_sha1, tree_type); + check_valid(tree_sha1, OBJ_TREE); for (i = 2; i < argc; i += 2) { const char *a, *b; a = argv[i]; b = argv[i+1]; @@ -112,7 +111,7 @@ int cmd_commit_tree(int argc, const char **argv, const char *prefix) die("Too many parents (%d max)", MAXPARENT); if (get_sha1(b, parent_sha1[parents])) die("Not a valid object name %s", b); - check_valid(parent_sha1[parents], commit_type); + check_valid(parent_sha1[parents], OBJ_COMMIT); if (new_parent(parents)) parents++; } diff --git a/builtin-config.c b/builtin-config.c index 0f9051da17..f1433a4ab6 100644 --- a/builtin-config.c +++ b/builtin-config.c @@ -64,7 +64,7 @@ static int get_value(const char* key_, const char* regex_) int ret = -1; char *tl; char *global = NULL, *repo_config = NULL; - const char *local; + const char *system_wide = NULL, *local; local = getenv(CONFIG_ENVIRONMENT); if (!local) { @@ -74,6 +74,7 @@ static int get_value(const char* key_, const char* regex_) local = repo_config = xstrdup(git_path("config")); if (home) global = xstrdup(mkpath("%s/.gitconfig", home)); + system_wide = ETC_GITCONFIG; } key = xstrdup(key_); @@ -103,11 +104,15 @@ static int get_value(const char* key_, const char* regex_) } } + if (do_all && system_wide) + git_config_from_file(show_config, system_wide); if (do_all && global) git_config_from_file(show_config, global); git_config_from_file(show_config, local); if (!do_all && !seen && global) git_config_from_file(show_config, global); + if (!do_all && !seen && system_wide) + git_config_from_file(show_config, system_wide); free(key); if (regexp) { @@ -147,7 +152,10 @@ int cmd_config(int argc, const char **argv, const char *prefix) } else { die("$HOME not set"); } - } else if (!strcmp(argv[1], "--rename-section")) { + } + else if (!strcmp(argv[1], "--system")) + setenv("GIT_CONFIG", ETC_GITCONFIG, 1); + else if (!strcmp(argv[1], "--rename-section")) { int ret; if (argc != 4) usage(git_config_set_usage); @@ -159,7 +167,8 @@ int cmd_config(int argc, const char **argv, const char *prefix) return 1; } return 0; - } else + } + else break; argc--; argv++; diff --git a/builtin-describe.c b/builtin-describe.c index bcc645622a..165917e40d 100644 --- a/builtin-describe.c +++ b/builtin-describe.c @@ -52,7 +52,7 @@ static int get_name(const char *path, const unsigned char *sha1, int flag, void * If --tags, then any tags are used. * Otherwise only annotated tags are used. */ - if (!strncmp(path, "refs/tags/", 10)) { + if (!prefixcmp(path, "refs/tags/")) { if (object->type == OBJ_TAG) prio = 2; else @@ -254,12 +254,12 @@ int cmd_describe(int argc, const char **argv, const char *prefix) all = 1; else if (!strcmp(arg, "--tags")) tags = 1; - else if (!strncmp(arg, "--abbrev=", 9)) { + else if (!prefixcmp(arg, "--abbrev=")) { abbrev = strtoul(arg + 9, NULL, 10); if (abbrev != 0 && (abbrev < MINIMUM_ABBREV || 40 < abbrev)) abbrev = DEFAULT_ABBREV; } - else if (!strncmp(arg, "--candidates=", 13)) { + else if (!prefixcmp(arg, "--candidates=")) { max_candidates = strtoul(arg + 13, NULL, 10); if (max_candidates < 1) max_candidates = 1; diff --git a/builtin-diff-files.c b/builtin-diff-files.c index 5d4a5c5828..e1199f80ae 100644 --- a/builtin-diff-files.c +++ b/builtin-diff-files.c @@ -10,42 +10,21 @@ #include "builtin.h" static const char diff_files_usage[] = -"git-diff-files [-q] [-0/-1/2/3 |-c|--cc] [<common diff options>] [<path>...]" +"git-diff-files [-q] [-0/-1/2/3 |-c|--cc|-n|--no-index] [<common diff options>] [<path>...]" COMMON_DIFF_OPTIONS_HELP; int cmd_diff_files(int argc, const char **argv, const char *prefix) { struct rev_info rev; - int silent = 0; + int nongit = 0; + prefix = setup_git_directory_gently(&nongit); init_revisions(&rev, prefix); git_config(git_default_config); /* no "diff" UI options */ rev.abbrev = 0; argc = setup_revisions(argc, argv, &rev, NULL); - while (1 < argc && argv[1][0] == '-') { - if (!strcmp(argv[1], "--base")) - rev.max_count = 1; - else if (!strcmp(argv[1], "--ours")) - rev.max_count = 2; - else if (!strcmp(argv[1], "--theirs")) - rev.max_count = 3; - else if (!strcmp(argv[1], "-q")) - silent = 1; - else - usage(diff_files_usage); - argv++; argc--; - } if (!rev.diffopt.output_format) rev.diffopt.output_format = DIFF_FORMAT_RAW; - - /* - * Make sure there are NO revision (i.e. pending object) parameter, - * rev.max_count is reasonable (0 <= n <= 3), - * there is no other revision filtering parameters. - */ - if (rev.pending.nr || - rev.min_age != -1 || rev.max_age != -1) - usage(diff_files_usage); - return run_diff_files(&rev, silent); + return run_diff_files_cmd(&rev, argc, argv); } diff --git a/builtin-diff-index.c b/builtin-diff-index.c index 95a3db156b..083599d5c4 100644 --- a/builtin-diff-index.c +++ b/builtin-diff-index.c @@ -38,5 +38,9 @@ int cmd_diff_index(int argc, const char **argv, const char *prefix) if (rev.pending.nr != 1 || rev.max_count != -1 || rev.min_age != -1 || rev.max_age != -1) usage(diff_cache_usage); + if (read_cache() < 0) { + perror("read_cache"); + return -1; + } return run_diff_index(&rev, cached); } diff --git a/builtin-diff-stages.c b/builtin-diff-stages.c deleted file mode 100644 index 70bb89808d..0000000000 --- a/builtin-diff-stages.c +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2005 Junio C Hamano - */ - -#include "cache.h" -#include "diff.h" -#include "builtin.h" - -static struct diff_options diff_options; - -static const char diff_stages_usage[] = -"git-diff-stages [<common diff options>] <stage1> <stage2> [<path>...]" -COMMON_DIFF_OPTIONS_HELP; - -static void diff_stages(int stage1, int stage2, const char **pathspec) -{ - int i = 0; - while (i < active_nr) { - struct cache_entry *ce, *stages[4] = { NULL, }; - struct cache_entry *one, *two; - const char *name; - int len, skip; - - ce = active_cache[i]; - skip = !ce_path_match(ce, pathspec); - len = ce_namelen(ce); - name = ce->name; - for (;;) { - int stage = ce_stage(ce); - stages[stage] = ce; - if (active_nr <= ++i) - break; - ce = active_cache[i]; - if (ce_namelen(ce) != len || - memcmp(name, ce->name, len)) - break; - } - one = stages[stage1]; - two = stages[stage2]; - - if (skip || (!one && !two)) - continue; - if (!one) - diff_addremove(&diff_options, '+', ntohl(two->ce_mode), - two->sha1, name, NULL); - else if (!two) - diff_addremove(&diff_options, '-', ntohl(one->ce_mode), - one->sha1, name, NULL); - else if (hashcmp(one->sha1, two->sha1) || - (one->ce_mode != two->ce_mode) || - diff_options.find_copies_harder) - diff_change(&diff_options, - ntohl(one->ce_mode), ntohl(two->ce_mode), - one->sha1, two->sha1, name, NULL); - } -} - -int cmd_diff_stages(int ac, const char **av, const char *prefix) -{ - int stage1, stage2; - const char **pathspec = NULL; - - git_config(git_default_config); /* no "diff" UI options */ - read_cache(); - diff_setup(&diff_options); - while (1 < ac && av[1][0] == '-') { - const char *arg = av[1]; - if (!strcmp(arg, "-r")) - ; /* as usual */ - else { - int diff_opt_cnt; - diff_opt_cnt = diff_opt_parse(&diff_options, - av+1, ac-1); - if (diff_opt_cnt < 0) - usage(diff_stages_usage); - else if (diff_opt_cnt) { - av += diff_opt_cnt; - ac -= diff_opt_cnt; - continue; - } - else - usage(diff_stages_usage); - } - ac--; av++; - } - - if (!diff_options.output_format) - diff_options.output_format = DIFF_FORMAT_RAW; - - if (ac < 3 || - sscanf(av[1], "%d", &stage1) != 1 || - ! (0 <= stage1 && stage1 <= 3) || - sscanf(av[2], "%d", &stage2) != 1 || - ! (0 <= stage2 && stage2 <= 3)) - usage(diff_stages_usage); - - av += 3; /* The rest from av[0] are for paths restriction. */ - pathspec = get_pathspec(prefix, av); - - if (diff_setup_done(&diff_options) < 0) - usage(diff_stages_usage); - - diff_stages(stage1, stage2, pathspec); - diffcore_std(&diff_options); - diff_flush(&diff_options); - return 0; -} diff --git a/builtin-diff.c b/builtin-diff.c index c387ebb16c..28b660a780 100644 --- a/builtin-diff.c +++ b/builtin-diff.c @@ -25,40 +25,6 @@ struct blobinfo { static const char builtin_diff_usage[] = "git-diff <options> <rev>{0,2} -- <path>*"; -static int builtin_diff_files(struct rev_info *revs, - int argc, const char **argv) -{ - int silent = 0; - while (1 < argc) { - const char *arg = argv[1]; - if (!strcmp(arg, "--base")) - revs->max_count = 1; - else if (!strcmp(arg, "--ours")) - revs->max_count = 2; - else if (!strcmp(arg, "--theirs")) - revs->max_count = 3; - else if (!strcmp(arg, "-q")) - silent = 1; - else - usage(builtin_diff_usage); - argv++; argc--; - } - /* - * Make sure there are NO revision (i.e. pending object) parameter, - * specified rev.max_count is reasonable (0 <= n <= 3), and - * there is no other revision filtering parameter. - */ - if (revs->pending.nr || - revs->min_age != -1 || - revs->max_age != -1 || - 3 < revs->max_count) - usage(builtin_diff_usage); - if (revs->max_count < 0 && - (revs->diffopt.output_format & DIFF_FORMAT_PATCH)) - revs->combine_merges = revs->dense_combined_merges = 1; - return run_diff_files(revs, silent); -} - static void stuff_change(struct diff_options *opt, unsigned old_mode, unsigned new_mode, const unsigned char *old_sha1, @@ -151,6 +117,10 @@ static int builtin_diff_index(struct rev_info *revs, revs->max_count != -1 || revs->min_age != -1 || revs->max_age != -1) usage(builtin_diff_usage); + if (read_cache() < 0) { + perror("read_cache"); + return -1; + } return run_diff_index(revs, cached); } @@ -219,6 +189,7 @@ int cmd_diff(int argc, const char **argv, const char *prefix) int ents = 0, blobs = 0, paths = 0; const char *path = NULL; struct blobinfo blob[2]; + int nongit = 0; /* * We could get N tree-ish in the rev.pending_objects list. @@ -240,6 +211,7 @@ int cmd_diff(int argc, const char **argv, const char *prefix) * Other cases are errors. */ + prefix = setup_git_directory_gently(&nongit); git_config(git_diff_ui_config); init_revisions(&rev, prefix); @@ -261,6 +233,8 @@ int cmd_diff(int argc, const char **argv, const char *prefix) break; else if (!strcmp(arg, "--cached")) { add_head(&rev); + if (!rev.pending.nr) + die("No HEAD commit to compare with (yet)"); break; } } @@ -315,7 +289,7 @@ int cmd_diff(int argc, const char **argv, const char *prefix) if (!ents) { switch (blobs) { case 0: - return builtin_diff_files(&rev, argc, argv); + return run_diff_files_cmd(&rev, argc, argv); break; case 1: if (paths != 1) diff --git a/builtin-fmt-merge-msg.c b/builtin-fmt-merge-msg.c index 5be6fb4388..5c145d2165 100644 --- a/builtin-fmt-merge-msg.c +++ b/builtin-fmt-merge-msg.c @@ -81,7 +81,7 @@ static int handle_line(char *line) if (len < 43 || line[40] != '\t') return 1; - if (!strncmp(line + 41, "not-for-merge", 13)) + if (!prefixcmp(line + 41, "not-for-merge")) return 0; if (line[41] != '\t') @@ -119,15 +119,15 @@ static int handle_line(char *line) if (pulling_head) { origin = xstrdup(src); src_data->head_status |= 1; - } else if (!strncmp(line, "branch ", 7)) { + } else if (!prefixcmp(line, "branch ")) { origin = xstrdup(line + 7); append_to_list(&src_data->branch, origin, NULL); src_data->head_status |= 2; - } else if (!strncmp(line, "tag ", 4)) { + } else if (!prefixcmp(line, "tag ")) { origin = line; append_to_list(&src_data->tag, xstrdup(origin + 4), NULL); src_data->head_status |= 2; - } else if (!strncmp(line, "remote branch ", 14)) { + } else if (!prefixcmp(line, "remote branch ")) { origin = xstrdup(line + 14); append_to_list(&src_data->r_branch, origin, NULL); src_data->head_status |= 2; @@ -282,7 +282,7 @@ int cmd_fmt_merge_msg(int argc, const char **argv, const char *prefix) current_branch = resolve_ref("HEAD", head_sha1, 1, NULL); if (!current_branch) die("No current branch"); - if (!strncmp(current_branch, "refs/heads/", 11)) + if (!prefixcmp(current_branch, "refs/heads/")) current_branch += 11; while (fgets(line, sizeof(line), in)) { diff --git a/builtin-for-each-ref.c b/builtin-for-each-ref.c index 16c785f047..b11ca928d6 100644 --- a/builtin-for-each-ref.c +++ b/builtin-for-each-ref.c @@ -173,8 +173,8 @@ static void verify_format(const char *format) */ static void *get_obj(const unsigned char *sha1, struct object **obj, unsigned long *sz, int *eaten) { - char type[20]; - void *buf = read_sha1_file(sha1, type, sz); + enum object_type type; + void *buf = read_sha1_file(sha1, &type, sz); if (buf) *obj = parse_object_buffer(sha1, type, *sz, buf, eaten); @@ -196,7 +196,7 @@ static void grab_common_values(struct atom_value *val, int deref, struct object if (deref) name++; if (!strcmp(name, "objecttype")) - v->s = type_names[obj->type]; + v->s = typename(obj->type); else if (!strcmp(name, "objectsize")) { char *s = xmalloc(40); sprintf(s, "%lu", sz); @@ -814,7 +814,7 @@ int cmd_for_each_ref(int ac, const char **av, char *prefix) i++; break; } - if (!strncmp(arg, "--format=", 9)) { + if (!prefixcmp(arg, "--format=")) { if (format) die("more than one --format?"); format = arg + 9; @@ -844,7 +844,7 @@ int cmd_for_each_ref(int ac, const char **av, char *prefix) quote_style = QUOTE_TCL; continue; } - if (!strncmp(arg, "--count=", 8)) { + if (!prefixcmp(arg, "--count=")) { if (maxcount) die("more than one --count?"); maxcount = atoi(arg + 8); @@ -852,7 +852,7 @@ int cmd_for_each_ref(int ac, const char **av, char *prefix) die("The number %s did not parse", arg); continue; } - if (!strncmp(arg, "--sort=", 7)) { + if (!prefixcmp(arg, "--sort=")) { struct ref_sort *s = xcalloc(1, sizeof(*s)); int len; diff --git a/builtin-fsck.c b/builtin-fsck.c index 6da3814d59..6abf498d2b 100644 --- a/builtin-fsck.c +++ b/builtin-fsck.c @@ -546,7 +546,7 @@ static int fsck_head_link(void) if (!head_points_at || !(flag & REF_ISSYMREF)) return error("HEAD is not a symbolic ref"); - if (strncmp(head_points_at, "refs/heads/", 11)) + if (prefixcmp(head_points_at, "refs/heads/")) return error("HEAD points to something strange (%s)", head_points_at); if (is_null_sha1(sha1)) diff --git a/builtin-grep.c b/builtin-grep.c index 2bfbdb7140..96b70227cf 100644 --- a/builtin-grep.c +++ b/builtin-grep.c @@ -84,11 +84,11 @@ static int grep_sha1(struct grep_opt *opt, const unsigned char *sha1, const char { unsigned long size; char *data; - char type[20]; + enum object_type type; char *to_free = NULL; int hit; - data = read_sha1_file(sha1, type, &size); + data = read_sha1_file(sha1, &type, &size); if (!data) { error("'%s': unable to read %s", name, sha1_to_hex(sha1)); return 0; @@ -380,10 +380,10 @@ static int grep_tree(struct grep_opt *opt, const char **paths, else if (S_ISREG(entry.mode)) hit |= grep_sha1(opt, entry.sha1, path_buf, tn_len); else if (S_ISDIR(entry.mode)) { - char type[20]; + enum object_type type; struct tree_desc sub; void *data; - data = read_sha1_file(entry.sha1, type, &sub.size); + data = read_sha1_file(entry.sha1, &type, &sub.size); if (!data) die("unable to read tree (%s)", sha1_to_hex(entry.sha1)); @@ -527,9 +527,9 @@ int cmd_grep(int argc, const char **argv, const char *prefix) opt.word_regexp = 1; continue; } - if (!strncmp("-A", arg, 2) || - !strncmp("-B", arg, 2) || - !strncmp("-C", arg, 2) || + if (!prefixcmp(arg, "-A") || + !prefixcmp(arg, "-B") || + !prefixcmp(arg, "-C") || (arg[0] == '-' && '1' <= arg[1] && arg[1] <= '9')) { unsigned num; const char *scan; diff --git a/builtin-init-db.c b/builtin-init-db.c index 12e43d0db4..4df9fd0fad 100644 --- a/builtin-init-db.c +++ b/builtin-init-db.c @@ -283,11 +283,11 @@ int cmd_init_db(int argc, const char **argv, const char *prefix) for (i = 1; i < argc; i++, argv++) { const char *arg = argv[1]; - if (!strncmp(arg, "--template=", 11)) + if (!prefixcmp(arg, "--template=")) template_dir = arg+11; else if (!strcmp(arg, "--shared")) shared_repository = PERM_GROUP; - else if (!strncmp(arg, "--shared=", 9)) + else if (!prefixcmp(arg, "--shared=")) shared_repository = git_config_perm("arg", arg+9); else usage(init_db_usage); diff --git a/builtin-log.c b/builtin-log.c index a5e4b625f8..1c9f7d02a8 100644 --- a/builtin-log.c +++ b/builtin-log.c @@ -32,7 +32,7 @@ static void cmd_log_init(int argc, const char **argv, const char *prefix, rev->always_show_header = 0; for (i = 1; i < argc; i++) { const char *arg = argv[i]; - if (!strncmp(arg, "--encoding=", 11)) { + if (!prefixcmp(arg, "--encoding=")) { arg += 11; if (strcmp(arg, "none")) git_log_output_encoding = strdup(arg); @@ -89,8 +89,8 @@ int cmd_whatchanged(int argc, const char **argv, const char *prefix) static int show_object(const unsigned char *sha1, int suppress_header) { unsigned long size; - char type[20]; - char *buf = read_sha1_file(sha1, type, &size); + enum object_type type; + char *buf = read_sha1_file(sha1, &type, &size); int offset = 0; if (!buf) @@ -293,7 +293,7 @@ static int reopen_stdout(struct commit *commit, int nr, int keep_subject) sol += 2; /* strip [PATCH] or [PATCH blabla] */ - if (!keep_subject && !strncmp(sol, "[PATCH", 6)) { + if (!keep_subject && !prefixcmp(sol, "[PATCH")) { char *eos = strchr(sol + 6, ']'); if (eos) { while (isspace(*eos)) @@ -448,7 +448,7 @@ int cmd_format_patch(int argc, const char **argv, const char *prefix) else if (!strcmp(argv[i], "-n") || !strcmp(argv[i], "--numbered")) numbered = 1; - else if (!strncmp(argv[i], "--start-number=", 15)) + else if (!prefixcmp(argv[i], "--start-number=")) start_number = strtol(argv[i] + 15, NULL, 10); else if (!strcmp(argv[i], "--start-number")) { i++; @@ -484,13 +484,13 @@ int cmd_format_patch(int argc, const char **argv, const char *prefix) } else if (!strcmp(argv[i], "--attach")) rev.mime_boundary = git_version_string; - else if (!strncmp(argv[i], "--attach=", 9)) + else if (!prefixcmp(argv[i], "--attach=")) rev.mime_boundary = argv[i] + 9; else if (!strcmp(argv[i], "--ignore-if-in-upstream")) ignore_if_in_upstream = 1; else if (!strcmp(argv[i], "--thread")) thread = 1; - else if (!strncmp(argv[i], "--in-reply-to=", 14)) + else if (!prefixcmp(argv[i], "--in-reply-to=")) in_reply_to = argv[i] + 14; else if (!strcmp(argv[i], "--in-reply-to")) { i++; @@ -498,7 +498,7 @@ int cmd_format_patch(int argc, const char **argv, const char *prefix) die("Need a Message-Id for --in-reply-to"); in_reply_to = argv[i]; } - else if (!strncmp(argv[i], "--suffix=", 9)) + else if (!prefixcmp(argv[i], "--suffix=")) fmt_patch_suffix = argv[i] + 9; else argv[j++] = argv[i]; diff --git a/builtin-ls-files.c b/builtin-ls-files.c index ac89eb2f77..4e1d5af634 100644 --- a/builtin-ls-files.c +++ b/builtin-ls-files.c @@ -406,7 +406,7 @@ int cmd_ls_files(int argc, const char **argv, const char *prefix) add_exclude(argv[++i], "", 0, &dir.exclude_list[EXC_CMDL]); continue; } - if (!strncmp(arg, "--exclude=", 10)) { + if (!prefixcmp(arg, "--exclude=")) { exc_given = 1; add_exclude(arg+10, "", 0, &dir.exclude_list[EXC_CMDL]); continue; @@ -416,12 +416,12 @@ int cmd_ls_files(int argc, const char **argv, const char *prefix) add_excludes_from_file(&dir, argv[++i]); continue; } - if (!strncmp(arg, "--exclude-from=", 15)) { + if (!prefixcmp(arg, "--exclude-from=")) { exc_given = 1; add_excludes_from_file(&dir, arg+15); continue; } - if (!strncmp(arg, "--exclude-per-directory=", 24)) { + if (!prefixcmp(arg, "--exclude-per-directory=")) { exc_given = 1; dir.exclude_per_dir = arg + 24; continue; @@ -434,7 +434,7 @@ int cmd_ls_files(int argc, const char **argv, const char *prefix) error_unmatch = 1; continue; } - if (!strncmp(arg, "--abbrev=", 9)) { + if (!prefixcmp(arg, "--abbrev=")) { abbrev = strtoul(arg+9, NULL, 10); if (abbrev && abbrev < MINIMUM_ABBREV) abbrev = MINIMUM_ABBREV; diff --git a/builtin-ls-tree.c b/builtin-ls-tree.c index 201defd934..6472610ac2 100644 --- a/builtin-ls-tree.c +++ b/builtin-ls-tree.c @@ -118,7 +118,7 @@ int cmd_ls_tree(int argc, const char **argv, const char *prefix) chomp_prefix = 0; break; } - if (!strncmp(argv[1]+2, "abbrev=",7)) { + if (!prefixcmp(argv[1]+2, "abbrev=")) { abbrev = strtoul(argv[1]+9, NULL, 10); if (abbrev && abbrev < MINIMUM_ABBREV) abbrev = MINIMUM_ABBREV; diff --git a/builtin-mailinfo.c b/builtin-mailinfo.c index cf5ef29c0f..766a37ebe2 100644 --- a/builtin-mailinfo.c +++ b/builtin-mailinfo.c @@ -847,7 +847,7 @@ int cmd_mailinfo(int argc, const char **argv, const char *prefix) metainfo_charset = def_charset; else if (!strcmp(argv[1], "-n")) metainfo_charset = NULL; - else if (!strncmp(argv[1], "--encoding=", 11)) + else if (!prefixcmp(argv[1], "--encoding=")) metainfo_charset = argv[1] + 11; else usage(mailinfo_usage); diff --git a/merge-base.c b/builtin-merge-base.c index 385f4ba386..e35d362f26 100644 --- a/merge-base.c +++ b/builtin-merge-base.c @@ -1,9 +1,7 @@ #include "cache.h" #include "commit.h" -static int show_all; - -static int merge_base(struct commit *rev1, struct commit *rev2) +static int show_merge_base(struct commit *rev1, struct commit *rev2, int show_all) { struct commit_list *result = get_merge_bases(rev1, rev2, 0); @@ -23,16 +21,16 @@ static int merge_base(struct commit *rev1, struct commit *rev2) static const char merge_base_usage[] = "git-merge-base [--all] <commit-id> <commit-id>"; -int main(int argc, char **argv) +int cmd_merge_base(int argc, const char **argv, const char *prefix) { struct commit *rev1, *rev2; unsigned char rev1key[20], rev2key[20]; + int show_all = 0; - setup_git_directory(); git_config(git_default_config); while (1 < argc && argv[1][0] == '-') { - char *arg = argv[1]; + const char *arg = argv[1]; if (!strcmp(arg, "-a") || !strcmp(arg, "--all")) show_all = 1; else @@ -49,5 +47,5 @@ int main(int argc, char **argv) rev2 = lookup_commit_reference(rev2key); if (!rev1 || !rev2) return 1; - return merge_base(rev1, rev2); + return show_merge_base(rev1, rev2, show_all); } diff --git a/builtin-name-rev.c b/builtin-name-rev.c index b4f15cc38a..c022224361 100644 --- a/builtin-name-rev.c +++ b/builtin-name-rev.c @@ -5,7 +5,7 @@ #include "refs.h" static const char name_rev_usage[] = - "git-name-rev [--tags] ( --all | --stdin | committish [committish...] )\n"; + "git-name-rev [--tags | --refs=<pattern>] ( --all | --stdin | committish [committish...] )\n"; typedef struct rev_name { const char *tip_name; @@ -57,13 +57,17 @@ copy_data: parents; parents = parents->next, parent_number++) { if (parent_number > 1) { - char *new_name = xmalloc(strlen(tip_name)+8); + int len = strlen(tip_name); + char *new_name = xmalloc(len + 8); + if (len > 2 && !strcmp(tip_name + len - 2, "^0")) + len -= 2; if (generation > 0) - sprintf(new_name, "%s~%d^%d", tip_name, + sprintf(new_name, "%.*s~%d^%d", len, tip_name, generation, parent_number); else - sprintf(new_name, "%s^%d", tip_name, parent_number); + sprintf(new_name, "%.*s^%d", len, tip_name, + parent_number); name_rev(parents->item, new_name, merge_traversals + 1 , 0, 0); @@ -74,13 +78,21 @@ copy_data: } } +struct name_ref_data { + int tags_only; + const char *ref_filter; +}; + static int name_ref(const char *path, const unsigned char *sha1, int flags, void *cb_data) { struct object *o = parse_object(sha1); - int tags_only = *(int*)cb_data; + struct name_ref_data *data = cb_data; int deref = 0; - if (tags_only && strncmp(path, "refs/tags/", 10)) + if (data->tags_only && prefixcmp(path, "refs/tags/")) + return 0; + + if (data->ref_filter && fnmatch(data->ref_filter, path, 0)) return 0; while (o && o->type == OBJ_TAG) { @@ -93,9 +105,9 @@ static int name_ref(const char *path, const unsigned char *sha1, int flags, void if (o && o->type == OBJ_COMMIT) { struct commit *commit = (struct commit *)o; - if (!strncmp(path, "refs/heads/", 11)) + if (!prefixcmp(path, "refs/heads/")) path = path + 11; - else if (!strncmp(path, "refs/", 5)) + else if (!prefixcmp(path, "refs/")) path = path + 5; name_rev(commit, xstrdup(path), 0, 0, deref); @@ -119,17 +131,22 @@ static const char* get_rev_name(struct object *o) if (!n->generation) return n->tip_name; - - snprintf(buffer, sizeof(buffer), "%s~%d", n->tip_name, n->generation); - - return buffer; + else { + int len = strlen(n->tip_name); + if (len > 2 && !strcmp(n->tip_name + len - 2, "^0")) + len -= 2; + snprintf(buffer, sizeof(buffer), "%.*s~%d", len, n->tip_name, + n->generation); + + return buffer; + } } int cmd_name_rev(int argc, const char **argv, const char *prefix) { struct object_array revs = { 0, 0, NULL }; int as_is = 0, all = 0, transform_stdin = 0; - int tags_only = 0; + struct name_ref_data data = { 0, NULL }; git_config(git_default_config); @@ -146,7 +163,10 @@ int cmd_name_rev(int argc, const char **argv, const char *prefix) as_is = 1; continue; } else if (!strcmp(*argv, "--tags")) { - tags_only = 1; + data.tags_only = 1; + continue; + } else if (!prefixcmp(*argv, "--refs=")) { + data.ref_filter = *argv + 7; continue; } else if (!strcmp(*argv, "--all")) { if (argc > 1) @@ -185,7 +205,7 @@ int cmd_name_rev(int argc, const char **argv, const char *prefix) add_object_array((struct object *)commit, *argv, &revs); } - for_each_ref(name_ref, &tags_only); + for_each_ref(name_ref, &data); if (transform_stdin) { char buffer[2048]; diff --git a/builtin-pack-objects.c b/builtin-pack-objects.c index 971388276a..8cf24f4079 100644 --- a/builtin-pack-objects.c +++ b/builtin-pack-objects.c @@ -230,8 +230,8 @@ static unsigned char *find_packed_object_name(struct packed_git *p, static void *delta_against(void *buf, unsigned long size, struct object_entry *entry) { unsigned long othersize, delta_size; - char type[10]; - void *otherbuf = read_sha1_file(entry->delta->sha1, type, &othersize); + enum object_type type; + void *otherbuf = read_sha1_file(entry->delta->sha1, &type, &othersize); void *delta_buf; if (!otherbuf) @@ -375,7 +375,7 @@ static unsigned long write_object(struct sha1file *f, struct object_entry *entry) { unsigned long size; - char type[10]; + enum object_type type; void *buf; unsigned char header[10]; unsigned hdrlen, datalen; @@ -416,7 +416,7 @@ static unsigned long write_object(struct sha1file *f, } if (!to_reuse) { - buf = read_sha1_file(entry->sha1, type, &size); + buf = read_sha1_file(entry->sha1, &type, &size); if (!buf) die("unable to read %s", sha1_to_hex(entry->sha1)); if (size != entry->size) @@ -765,7 +765,7 @@ static struct pbase_tree_cache *pbase_tree_get(const unsigned char *sha1) struct pbase_tree_cache *ent, *nent; void *data; unsigned long size; - char type[20]; + enum object_type type; int neigh; int my_ix = pbase_tree_cache_ix(sha1); int available_ix = -1; @@ -792,10 +792,10 @@ static struct pbase_tree_cache *pbase_tree_get(const unsigned char *sha1) /* Did not find one. Either we got a bogus request or * we need to read and perhaps cache. */ - data = read_sha1_file(sha1, type, &size); + data = read_sha1_file(sha1, &type, &size); if (!data) return NULL; - if (strcmp(type, tree_type)) { + if (type != OBJ_TREE) { free(data); return NULL; } @@ -854,19 +854,19 @@ static void add_pbase_object(struct tree_desc *tree, while (tree_entry(tree,&entry)) { unsigned long size; - char type[20]; + enum object_type type; if (entry.pathlen != cmplen || memcmp(entry.path, name, cmplen) || !has_sha1_file(entry.sha1) || - sha1_object_info(entry.sha1, type, &size)) + (type = sha1_object_info(entry.sha1, &size)) < 0) continue; if (name[cmplen] != '/') { unsigned hash = name_hash(fullname); add_object_entry(entry.sha1, hash, 1); return; } - if (!strcmp(type, tree_type)) { + if (type == OBJ_TREE) { struct tree_desc sub; struct pbase_tree_cache *tree; const char *down = name+cmplen+1; @@ -978,8 +978,6 @@ static void add_preferred_base(unsigned char *sha1) static void check_object(struct object_entry *entry) { - char type[20]; - if (entry->in_pack && !entry->preferred_base) { struct packed_git *p = entry->in_pack; struct pack_window *w_curs = NULL; @@ -1062,21 +1060,10 @@ static void check_object(struct object_entry *entry) /* Otherwise we would do the usual */ } - if (sha1_object_info(entry->sha1, type, &entry->size)) + entry->type = sha1_object_info(entry->sha1, &entry->size); + if (entry->type < 0) die("unable to get type of object %s", sha1_to_hex(entry->sha1)); - - if (!strcmp(type, commit_type)) { - entry->type = OBJ_COMMIT; - } else if (!strcmp(type, tree_type)) { - entry->type = OBJ_TREE; - } else if (!strcmp(type, blob_type)) { - entry->type = OBJ_BLOB; - } else if (!strcmp(type, tag_type)) { - entry->type = OBJ_TAG; - } else - die("unable to pack object %s of type %s", - sha1_to_hex(entry->sha1), type); } static unsigned int check_delta_limit(struct object_entry *me, unsigned int n) @@ -1206,7 +1193,7 @@ static int try_delta(struct unpacked *trg, struct unpacked *src, struct object_entry *trg_entry = trg->entry; struct object_entry *src_entry = src->entry; unsigned long trg_size, src_size, delta_size, sizediff, max_size, sz; - char type[10]; + enum object_type type; void *delta_buf; /* Don't bother doing diffs between different types */ @@ -1257,13 +1244,13 @@ static int try_delta(struct unpacked *trg, struct unpacked *src, /* Load data if not already done */ if (!trg->data) { - trg->data = read_sha1_file(trg_entry->sha1, type, &sz); + trg->data = read_sha1_file(trg_entry->sha1, &type, &sz); if (sz != trg_size) die("object %s inconsistent object length (%lu vs %lu)", sha1_to_hex(trg_entry->sha1), sz, trg_size); } if (!src->data) { - src->data = read_sha1_file(src_entry->sha1, type, &sz); + src->data = read_sha1_file(src_entry->sha1, &type, &sz); if (sz != src_size) die("object %s inconsistent object length (%lu vs %lu)", sha1_to_hex(src_entry->sha1), sz, src_size); @@ -1582,14 +1569,14 @@ int cmd_pack_objects(int argc, const char **argv, const char *prefix) incremental = 1; continue; } - if (!strncmp("--window=", arg, 9)) { + if (!prefixcmp(arg, "--window=")) { char *end; window = strtoul(arg+9, &end, 0); if (!arg[9] || *end) usage(pack_usage); continue; } - if (!strncmp("--depth=", arg, 8)) { + if (!prefixcmp(arg, "--depth=")) { char *end; depth = strtoul(arg+8, &end, 0); if (!arg[8] || *end) @@ -1625,7 +1612,7 @@ int cmd_pack_objects(int argc, const char **argv, const char *prefix) continue; } if (!strcmp("--unpacked", arg) || - !strncmp("--unpacked=", arg, 11) || + !prefixcmp(arg, "--unpacked=") || !strcmp("--reflog", arg) || !strcmp("--all", arg)) { use_internal_rev_list = 1; diff --git a/builtin-pack-refs.c b/builtin-pack-refs.c index 3de9b3eefd..d080e30d67 100644 --- a/builtin-pack-refs.c +++ b/builtin-pack-refs.c @@ -36,7 +36,7 @@ static int handle_one_ref(const char *path, const unsigned char *sha1, /* Do not pack the symbolic refs */ if ((flags & REF_ISSYMREF)) return 0; - is_tag_ref = !strncmp(path, "refs/tags/", 10); + is_tag_ref = !prefixcmp(path, "refs/tags/"); /* ALWAYS pack refs that were already packed or are tags */ if (!cb->all && !is_tag_ref && !(flags & REF_ISPACKED)) diff --git a/builtin-prune.c b/builtin-prune.c index 6f0ba0d04d..09864b7a6d 100644 --- a/builtin-prune.c +++ b/builtin-prune.c @@ -10,15 +10,10 @@ static int show_only; static int prune_object(char *path, const char *filename, const unsigned char *sha1) { - char buf[20]; - const char *type; - if (show_only) { - if (sha1_object_info(sha1, buf, NULL)) - type = "unknown"; - else - type = buf; - printf("%s %s\n", sha1_to_hex(sha1), type); + enum object_type type = sha1_object_info(sha1, NULL); + printf("%s %s\n", sha1_to_hex(sha1), + (type > 0) ? typename(type) : "unknown"); return 0; } unlink(mkpath("%s/%s", path, filename)); diff --git a/builtin-push.c b/builtin-push.c index c45649e26c..979efcc45f 100644 --- a/builtin-push.c +++ b/builtin-push.c @@ -32,7 +32,7 @@ static int expand_one_ref(const char *ref, const unsigned char *sha1, int flag, /* Ignore the "refs/" at the beginning of the refname */ ref += 5; - if (!strncmp(ref, "tags/", 5)) + if (!prefixcmp(ref, "tags/")) add_refspec(xstrdup(ref)); return 0; } @@ -149,10 +149,10 @@ static int get_remotes_uri(const char *repo, const char *uri[MAX_URI]) int is_refspec; char *s, *p; - if (!strncmp("URL:", buffer, 4)) { + if (!prefixcmp(buffer, "URL:")) { is_refspec = 0; s = buffer + 4; - } else if (!strncmp("Push:", buffer, 5)) { + } else if (!prefixcmp(buffer, "Push:")) { is_refspec = 1; s = buffer + 5; } else @@ -195,7 +195,7 @@ static int config_get_receivepack; static int get_remote_config(const char* key, const char* value) { - if (!strncmp(key, "remote.", 7) && + if (!prefixcmp(key, "remote.") && !strncmp(key + 7, config_repo, config_repo_len)) { if (!strcmp(key + 7 + config_repo_len, ".url")) { if (config_current_uri < MAX_URI) @@ -324,8 +324,8 @@ static int do_push(const char *repo) const char **dest_refspec = refspec; const char *dest = uri[i]; const char *sender = "git-send-pack"; - if (!strncmp(dest, "http://", 7) || - !strncmp(dest, "https://", 8)) + if (!prefixcmp(dest, "http://") || + !prefixcmp(dest, "https://")) sender = "git-http-push"; else if (thin) argv[dest_argc++] = "--thin"; @@ -373,7 +373,7 @@ int cmd_push(int argc, const char **argv, const char *prefix) verbose=1; continue; } - if (!strncmp(arg, "--repo=", 7)) { + if (!prefixcmp(arg, "--repo=")) { repo = arg+7; continue; } @@ -397,11 +397,11 @@ int cmd_push(int argc, const char **argv, const char *prefix) thin = 0; continue; } - if (!strncmp(arg, "--receive-pack=", 15)) { + if (!prefixcmp(arg, "--receive-pack=")) { receivepack = arg; continue; } - if (!strncmp(arg, "--exec=", 7)) { + if (!prefixcmp(arg, "--exec=")) { receivepack = arg; continue; } diff --git a/builtin-read-tree.c b/builtin-read-tree.c index 8ba436dbac..e47715538b 100644 --- a/builtin-read-tree.c +++ b/builtin-read-tree.c @@ -133,7 +133,7 @@ int cmd_read_tree(int argc, const char **argv, const char *unused_prefix) * entries and put the entries from the tree under the * given subdirectory. */ - if (!strncmp(arg, "--prefix=", 9)) { + if (!prefixcmp(arg, "--prefix=")) { if (stage || opts.merge || opts.prefix) usage(read_tree_usage); opts.prefix = arg + 9; @@ -179,7 +179,7 @@ int cmd_read_tree(int argc, const char **argv, const char *unused_prefix) continue; } - if (!strncmp(arg, "--exclude-per-directory=", 24)) { + if (!prefixcmp(arg, "--exclude-per-directory=")) { struct dir_struct *dir; if (opts.dir) diff --git a/builtin-reflog.c b/builtin-reflog.c index 65b845b447..186aabce04 100644 --- a/builtin-reflog.c +++ b/builtin-reflog.c @@ -55,8 +55,8 @@ static int tree_is_complete(const unsigned char *sha1) desc.buf = tree->buffer; desc.size = tree->size; if (!desc.buf) { - char type[20]; - void *data = read_sha1_file(sha1, type, &desc.size); + enum object_type type; + void *data = read_sha1_file(sha1, &type, &desc.size); if (!data) { tree->object.flags |= INCOMPLETE; return 0; @@ -215,8 +215,8 @@ static int expire_reflog_ent(unsigned char *osha1, unsigned char *nsha1, old = lookup_commit_reference_gently(osha1, 1); if (!new && !is_null_sha1(nsha1)) new = lookup_commit_reference_gently(nsha1, 1); - if ((old && !in_merge_bases(old, cb->ref_commit)) || - (new && !in_merge_bases(new, cb->ref_commit))) + if ((old && !in_merge_bases(old, &cb->ref_commit, 1)) || + (new && !in_merge_bases(new, &cb->ref_commit, 1))) goto prune; } @@ -321,9 +321,9 @@ static int cmd_reflog_expire(int argc, const char **argv, const char *prefix) const char *arg = argv[i]; if (!strcmp(arg, "--dry-run") || !strcmp(arg, "-n")) cb.dry_run = 1; - else if (!strncmp(arg, "--expire=", 9)) + else if (!prefixcmp(arg, "--expire=")) cb.expire_total = approxidate(arg + 9); - else if (!strncmp(arg, "--expire-unreachable=", 21)) + else if (!prefixcmp(arg, "--expire-unreachable=")) cb.expire_unreachable = approxidate(arg + 21); else if (!strcmp(arg, "--stale-fix")) cb.stalefix = 1; diff --git a/builtin-rerere.c b/builtin-rerere.c index 58c5fed91d..b8867ab4ad 100644 --- a/builtin-rerere.c +++ b/builtin-rerere.c @@ -105,11 +105,11 @@ static int handle_file(const char *path, SHA1_Init(&ctx); while (fgets(buf, sizeof(buf), f)) { - if (!strncmp("<<<<<<< ", buf, 8)) + if (!prefixcmp(buf, "<<<<<<< ")) hunk = 1; - else if (!strncmp("=======", buf, 7)) + else if (!prefixcmp(buf, "=======")) hunk = 2; - else if (!strncmp(">>>>>>> ", buf, 8)) { + else if (!prefixcmp(buf, ">>>>>>> ")) { hunk_no++; hunk = 0; if (memcmp(one->ptr, two->ptr, one->nr < two->nr ? diff --git a/builtin-rev-parse.c b/builtin-rev-parse.c index d53deaa369..37addb25fa 100644 --- a/builtin-rev-parse.c +++ b/builtin-rev-parse.c @@ -233,7 +233,7 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix) } continue; } - if (!strncmp(arg,"-n",2)) { + if (!prefixcmp(arg, "-n")) { if ((filter & DO_FLAGS) && (filter & DO_REVS)) show(arg); continue; @@ -274,7 +274,7 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix) continue; } if (!strcmp(arg, "--short") || - !strncmp(arg, "--short=", 8)) { + !prefixcmp(arg, "--short=")) { filter &= ~(DO_FLAGS|DO_NOREV); verify = 1; abbrev = DEFAULT_ABBREV; @@ -352,19 +352,19 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix) : "false"); continue; } - if (!strncmp(arg, "--since=", 8)) { + if (!prefixcmp(arg, "--since=")) { show_datestring("--max-age=", arg+8); continue; } - if (!strncmp(arg, "--after=", 8)) { + if (!prefixcmp(arg, "--after=")) { show_datestring("--max-age=", arg+8); continue; } - if (!strncmp(arg, "--before=", 9)) { + if (!prefixcmp(arg, "--before=")) { show_datestring("--min-age=", arg+9); continue; } - if (!strncmp(arg, "--until=", 8)) { + if (!prefixcmp(arg, "--until=")) { show_datestring("--min-age=", arg+8); continue; } diff --git a/builtin-shortlog.c b/builtin-shortlog.c index edb40429ec..2f71a2a6e2 100644 --- a/builtin-shortlog.c +++ b/builtin-shortlog.c @@ -124,7 +124,7 @@ static void insert_author_oneline(struct path_list *list, else free(buffer); - if (!strncmp(oneline, "[PATCH", 6)) { + if (!prefixcmp(oneline, "[PATCH")) { char *eob = strchr(oneline, ']'); if (eob) { @@ -179,7 +179,7 @@ static void read_from_stdin(struct path_list *list) while (fgets(buffer, sizeof(buffer), stdin) != NULL) { char *bob; if ((buffer[0] == 'A' || buffer[0] == 'a') && - !strncmp(buffer + 1, "uthor: ", 7) && + !prefixcmp(buffer + 1, "uthor: ") && (bob = strchr(buffer + 7, '<')) != NULL) { char buffer2[1024], offset = 0; @@ -230,7 +230,7 @@ static void get_from_rev(struct rev_info *rev, struct path_list *list) else eol++; - if (!strncmp(buffer, "author ", 7)) { + if (!prefixcmp(buffer, "author ")) { char *bracket = strchr(buffer, '<'); if (bracket == NULL || bracket > eol) diff --git a/builtin-show-branch.c b/builtin-show-branch.c index 0d94e40df8..67ae6bacda 100644 --- a/builtin-show-branch.c +++ b/builtin-show-branch.c @@ -266,7 +266,7 @@ static void show_one_commit(struct commit *commit, int no_name) pretty, sizeof(pretty), 0, NULL, NULL, 0); else strcpy(pretty, "(unavailable)"); - if (!strncmp(pretty, "[PATCH] ", 8)) + if (!prefixcmp(pretty, "[PATCH] ")) cp = pretty + 8; else cp = pretty; @@ -378,7 +378,7 @@ static int append_head_ref(const char *refname, const unsigned char *sha1, int f { unsigned char tmp[20]; int ofs = 11; - if (strncmp(refname, "refs/heads/", ofs)) + if (prefixcmp(refname, "refs/heads/")) return 0; /* If both heads/foo and tags/foo exists, get_sha1 would * get confused. @@ -392,7 +392,7 @@ static int append_remote_ref(const char *refname, const unsigned char *sha1, int { unsigned char tmp[20]; int ofs = 13; - if (strncmp(refname, "refs/remotes/", ofs)) + if (prefixcmp(refname, "refs/remotes/")) return 0; /* If both heads/foo and tags/foo exists, get_sha1 would * get confused. @@ -404,7 +404,7 @@ static int append_remote_ref(const char *refname, const unsigned char *sha1, int static int append_tag_ref(const char *refname, const unsigned char *sha1, int flag, void *cb_data) { - if (strncmp(refname, "refs/tags/", 10)) + if (prefixcmp(refname, "refs/tags/")) return 0; return append_ref(refname + 5, sha1, 0); } @@ -435,9 +435,9 @@ static int append_matching_ref(const char *refname, const unsigned char *sha1, i return 0; if (fnmatch(match_ref_pattern, tail, 0)) return 0; - if (!strncmp("refs/heads/", refname, 11)) + if (!prefixcmp(refname, "refs/heads/")) return append_head_ref(refname, sha1, flag, cb_data); - if (!strncmp("refs/tags/", refname, 10)) + if (!prefixcmp(refname, "refs/tags/")) return append_tag_ref(refname, sha1, flag, cb_data); return append_ref(refname, sha1, 0); } @@ -462,11 +462,11 @@ static int rev_is_head(char *head, int headlen, char *name, if ((!head[0]) || (head_sha1 && sha1 && hashcmp(head_sha1, sha1))) return 0; - if (!strncmp(head, "refs/heads/", 11)) + if (!prefixcmp(head, "refs/heads/")) head += 11; - if (!strncmp(name, "refs/heads/", 11)) + if (!prefixcmp(name, "refs/heads/")) name += 11; - else if (!strncmp(name, "heads/", 6)) + else if (!prefixcmp(name, "heads/")) name += 6; return !strcmp(head, name); } @@ -635,7 +635,7 @@ int cmd_show_branch(int ac, const char **av, const char *prefix) with_current_branch = 1; else if (!strcmp(arg, "--sha1-name")) sha1_name = 1; - else if (!strncmp(arg, "--more=", 7)) + else if (!prefixcmp(arg, "--more=")) extra = atoi(arg + 7); else if (!strcmp(arg, "--merge-base")) merge_base = 1; @@ -652,9 +652,9 @@ int cmd_show_branch(int ac, const char **av, const char *prefix) else if (!strcmp(arg, "--reflog") || !strcmp(arg, "-g")) { reflog = DEFAULT_REFLOG; } - else if (!strncmp(arg, "--reflog=", 9)) + else if (!prefixcmp(arg, "--reflog=")) parse_reflog_param(arg + 9, &reflog, &reflog_base); - else if (!strncmp(arg, "-g=", 3)) + else if (!prefixcmp(arg, "-g=")) parse_reflog_param(arg + 3, &reflog, &reflog_base); else usage(show_branch_usage); diff --git a/builtin-show-ref.c b/builtin-show-ref.c index 75211e64f9..9463ff0e69 100644 --- a/builtin-show-ref.c +++ b/builtin-show-ref.c @@ -28,8 +28,8 @@ static int show_ref(const char *refname, const unsigned char *sha1, int flag, vo if (tags_only || heads_only) { int match; - match = heads_only && !strncmp(refname, "refs/heads/", 11); - match |= tags_only && !strncmp(refname, "refs/tags/", 10); + match = heads_only && !prefixcmp(refname, "refs/heads/"); + match |= tags_only && !prefixcmp(refname, "refs/tags/"); if (!match) return 0; } @@ -178,8 +178,8 @@ int cmd_show_ref(int argc, const char **argv, const char *prefix) hash_only = 1; continue; } - if (!strncmp(arg, "--hash=", 7) || - (!strncmp(arg, "--abbrev", 8) && + if (!prefixcmp(arg, "--hash=") || + (!prefixcmp(arg, "--abbrev") && (arg[8] == '=' || arg[8] == '\0'))) { if (arg[2] != 'h' && !arg[8]) /* --abbrev only */ @@ -215,7 +215,7 @@ int cmd_show_ref(int argc, const char **argv, const char *prefix) } if (!strcmp(arg, "--exclude-existing")) return exclude_existing(NULL); - if (!strncmp(arg, "--exclude-existing=", 19)) + if (!prefixcmp(arg, "--exclude-existing=")) return exclude_existing(arg + 19); usage(show_ref_usage); } @@ -226,7 +226,7 @@ int cmd_show_ref(int argc, const char **argv, const char *prefix) while (*pattern) { unsigned char sha1[20]; - if (!strncmp(*pattern, "refs/", 5) && + if (!prefixcmp(*pattern, "refs/") && resolve_ref(*pattern, sha1, 1, NULL)) { if (!quiet) show_one(*pattern, sha1); diff --git a/builtin-tar-tree.c b/builtin-tar-tree.c index 8055ddab9b..b04719ef20 100644 --- a/builtin-tar-tree.c +++ b/builtin-tar-tree.c @@ -31,7 +31,7 @@ int cmd_tar_tree(int argc, const char **argv, const char *prefix) nargv[nargc++] = "git-archive"; nargv[nargc++] = "--format=tar"; - if (2 <= argc && !strncmp("--remote=", argv[1], 9)) { + if (2 <= argc && !prefixcmp(argv[1], "--remote=")) { nargv[nargc++] = argv[1]; argv++; argc--; diff --git a/builtin-unpack-objects.c b/builtin-unpack-objects.c index d351e02649..3956c56334 100644 --- a/builtin-unpack-objects.c +++ b/builtin-unpack-objects.c @@ -119,18 +119,18 @@ struct obj_info { static struct obj_info *obj_list; -static void added_object(unsigned nr, const char *type, void *data, - unsigned long size); +static void added_object(unsigned nr, enum object_type type, + void *data, unsigned long size); -static void write_object(unsigned nr, void *buf, unsigned long size, - const char *type) +static void write_object(unsigned nr, enum object_type type, + void *buf, unsigned long size) { - if (write_sha1_file(buf, size, type, obj_list[nr].sha1) < 0) + if (write_sha1_file(buf, size, typename(type), obj_list[nr].sha1) < 0) die("failed to write object"); added_object(nr, type, buf, size); } -static void resolve_delta(unsigned nr, const char *type, +static void resolve_delta(unsigned nr, enum object_type type, void *base, unsigned long base_size, void *delta, unsigned long delta_size) { @@ -143,12 +143,12 @@ static void resolve_delta(unsigned nr, const char *type, if (!result) die("failed to apply delta"); free(delta); - write_object(nr, result, result_size, type); + write_object(nr, type, result, result_size); free(result); } -static void added_object(unsigned nr, const char *type, void *data, - unsigned long size) +static void added_object(unsigned nr, enum object_type type, + void *data, unsigned long size) { struct delta_info **p = &delta_list; struct delta_info *info; @@ -167,33 +167,24 @@ static void added_object(unsigned nr, const char *type, void *data, } } -static void unpack_non_delta_entry(enum object_type kind, unsigned long size, +static void unpack_non_delta_entry(enum object_type type, unsigned long size, unsigned nr) { void *buf = get_data(size); - const char *type; - - switch (kind) { - case OBJ_COMMIT: type = commit_type; break; - case OBJ_TREE: type = tree_type; break; - case OBJ_BLOB: type = blob_type; break; - case OBJ_TAG: type = tag_type; break; - default: die("bad type %d", kind); - } + if (!dry_run && buf) - write_object(nr, buf, size, type); + write_object(nr, type, buf, size); free(buf); } -static void unpack_delta_entry(enum object_type kind, unsigned long delta_size, +static void unpack_delta_entry(enum object_type type, unsigned long delta_size, unsigned nr) { void *delta_data, *base; unsigned long base_size; - char type[20]; unsigned char base_sha1[20]; - if (kind == OBJ_REF_DELTA) { + if (type == OBJ_REF_DELTA) { hashcpy(base_sha1, fill(20)); use(20); delta_data = get_data(delta_size); @@ -255,7 +246,7 @@ static void unpack_delta_entry(enum object_type kind, unsigned long delta_size, } } - base = read_sha1_file(base_sha1, type, &base_size); + base = read_sha1_file(base_sha1, &type, &base_size); if (!base) { error("failed to read delta-pack base object %s", sha1_to_hex(base_sha1)); @@ -369,7 +360,7 @@ int cmd_unpack_objects(int argc, const char **argv, const char *prefix) recover = 1; continue; } - if (!strncmp(arg, "--pack_header=", 14)) { + if (!prefixcmp(arg, "--pack_header=")) { struct pack_header *hdr; char *c; diff --git a/builtin-update-index.c b/builtin-update-index.c index 772aaba7bb..65246dad8d 100644 --- a/builtin-update-index.c +++ b/builtin-update-index.c @@ -487,6 +487,7 @@ int cmd_update_index(int argc, const char **argv, const char *prefix) int prefix_length = prefix ? strlen(prefix) : 0; char set_executable_bit = 0; unsigned int refresh_flags = 0; + int lock_error = 0; struct lock_file *lock_file; git_config(git_default_config); @@ -494,7 +495,9 @@ int cmd_update_index(int argc, const char **argv, const char *prefix) /* We can't free this memory, it becomes part of a linked list parsed atexit() */ lock_file = xcalloc(1, sizeof(struct lock_file)); - newfd = hold_lock_file_for_update(lock_file, get_index_file(), 1); + newfd = hold_lock_file_for_update(lock_file, get_index_file(), 0); + if (newfd < 0) + lock_error = errno; entries = read_cache(); if (entries < 0) @@ -651,6 +654,12 @@ int cmd_update_index(int argc, const char **argv, const char *prefix) finish: if (active_cache_changed) { + if (newfd < 0) { + if (refresh_flags & REFRESH_QUIET) + exit(128); + die("unable to create '%s.lock': %s", + get_index_file(), strerror(lock_error)); + } if (write_cache(newfd, active_cache, active_nr) || close(newfd) || commit_lock_file(lock_file)) die("Unable to write new index file"); diff --git a/builtin-write-tree.c b/builtin-write-tree.c index 50670dc7bf..90fc1cfcf4 100644 --- a/builtin-write-tree.c +++ b/builtin-write-tree.c @@ -70,7 +70,7 @@ int cmd_write_tree(int argc, const char **argv, const char *unused_prefix) const char *arg = argv[1]; if (!strcmp(arg, "--missing-ok")) missing_ok = 1; - else if (!strncmp(arg, "--prefix=", 9)) + else if (!prefixcmp(arg, "--prefix=")) prefix = arg + 9; else usage(write_tree_usage); @@ -29,7 +29,6 @@ extern int cmd_describe(int argc, const char **argv, const char *prefix); extern int cmd_diff_files(int argc, const char **argv, const char *prefix); extern int cmd_diff_index(int argc, const char **argv, const char *prefix); extern int cmd_diff(int argc, const char **argv, const char *prefix); -extern int cmd_diff_stages(int argc, const char **argv, const char *prefix); extern int cmd_diff_tree(int argc, const char **argv, const char *prefix); extern int cmd_fmt_merge_msg(int argc, const char **argv, const char *prefix); extern int cmd_for_each_ref(int argc, const char **argv, const char *prefix); @@ -45,6 +44,7 @@ extern int cmd_ls_files(int argc, const char **argv, const char *prefix); extern int cmd_ls_tree(int argc, const char **argv, const char *prefix); extern int cmd_mailinfo(int argc, const char **argv, const char *prefix); extern int cmd_mailsplit(int argc, const char **argv, const char *prefix); +extern int cmd_merge_base(int argc, const char **argv, const char *prefix); extern int cmd_merge_file(int argc, const char **argv, const char *prefix); extern int cmd_mv(int argc, const char **argv, const char *prefix); extern int cmd_name_rev(int argc, const char **argv, const char *prefix); @@ -127,6 +127,19 @@ extern unsigned int active_nr, active_alloc, active_cache_changed; extern struct cache_tree *active_cache_tree; extern int cache_errno; +enum object_type { + OBJ_BAD = -1, + OBJ_NONE = 0, + OBJ_COMMIT = 1, + OBJ_TREE = 2, + OBJ_BLOB = 3, + OBJ_TAG = 4, + /* 5 for future expansion */ + OBJ_OFS_DELTA = 6, + OBJ_REF_DELTA = 7, + OBJ_MAX, +}; + #define GIT_DIR_ENVIRONMENT "GIT_DIR" #define DEFAULT_GIT_DIR_ENVIRONMENT ".git" #define DB_ENVIRONMENT "GIT_OBJECT_DIRECTORY" @@ -177,7 +190,7 @@ extern int ce_same_name(struct cache_entry *a, struct cache_entry *b); extern int ce_match_stat(struct cache_entry *ce, struct stat *st, int); extern int ce_modified(struct cache_entry *ce, struct stat *st, int); extern int ce_path_match(const struct cache_entry *ce, const char **pathspec); -extern int index_fd(unsigned char *sha1, int fd, struct stat *st, int write_object, const char *type); +extern int index_fd(unsigned char *sha1, int fd, struct stat *st, int write_object, enum object_type type, const char *path); extern int read_pipe(int fd, char** return_buf, unsigned long* return_size); extern int index_pipe(unsigned char *sha1, int fd, const char *type, int write_object); extern int index_path(unsigned char *sha1, const char *path, struct stat *st, int write_object); @@ -211,6 +224,7 @@ extern const char *apply_default_whitespace; extern int zlib_compression_level; extern size_t packed_git_window_size; extern size_t packed_git_limit; +extern int auto_crlf; #define GIT_REPO_VERSION 0 extern int repository_format_version; @@ -262,12 +276,12 @@ int safe_create_leading_directories(char *path); char *enter_repo(char *path, int strict); /* Read and unpack a sha1 file into memory, write memory to a sha1 file */ -extern int sha1_object_info(const unsigned char *, char *, unsigned long *); -extern void * unpack_sha1_file(void *map, unsigned long mapsize, char *type, unsigned long *size); -extern void * read_sha1_file(const unsigned char *sha1, char *type, unsigned long *size); +extern int sha1_object_info(const unsigned char *, unsigned long *); +extern void * unpack_sha1_file(void *map, unsigned long mapsize, enum object_type *type, unsigned long *size); +extern void * read_sha1_file(const unsigned char *sha1, enum object_type *type, unsigned long *size); extern int hash_sha1_file(void *buf, unsigned long len, const char *type, unsigned char *sha1); extern int write_sha1_file(void *buf, unsigned long len, const char *type, unsigned char *return_sha1); -extern int pretend_sha1_file(void *, unsigned long, const char *, unsigned char *); +extern int pretend_sha1_file(void *, unsigned long, enum object_type, unsigned char *); extern int check_sha1_signature(const unsigned char *sha1, void *buf, unsigned long size, const char *type); @@ -284,18 +298,6 @@ extern int legacy_loose_object(unsigned char *); extern int has_pack_file(const unsigned char *sha1); extern int has_pack_index(const unsigned char *sha1); -enum object_type { - OBJ_NONE = 0, - OBJ_COMMIT = 1, - OBJ_TREE = 2, - OBJ_BLOB = 3, - OBJ_TAG = 4, - /* 5 for future expansion */ - OBJ_OFS_DELTA = 6, - OBJ_REF_DELTA = 7, - OBJ_BAD, -}; - extern signed char hexval_table[256]; static inline unsigned int hexval(unsigned int c) { @@ -421,9 +423,9 @@ extern struct packed_git *add_packed_git(char *, int, int); extern int num_packed_objects(const struct packed_git *p); extern int nth_packed_object_sha1(const struct packed_git *, int, unsigned char*); extern unsigned long find_pack_entry_one(const unsigned char *, struct packed_git *); -extern void *unpack_entry(struct packed_git *, unsigned long, char *, unsigned long *); +extern void *unpack_entry(struct packed_git *, unsigned long, enum object_type *, unsigned long *); extern unsigned long unpack_object_header_gently(const unsigned char *buf, unsigned long len, enum object_type *type, unsigned long *sizep); -extern void packed_object_info_detail(struct packed_git *, unsigned long, char *, unsigned long *, unsigned long *, unsigned int *, unsigned char *); +extern const char *packed_object_info_detail(struct packed_git *, unsigned long, unsigned long *, unsigned long *, unsigned int *, unsigned char *); /* Dumb servers support */ extern int update_server_info(int); @@ -478,4 +480,8 @@ extern int nfvasprintf(char **str, const char *fmt, va_list va); extern void trace_printf(const char *format, ...); extern void trace_argv_printf(const char **argv, int count, const char *format, ...); +/* convert.c */ +extern int convert_to_git(const char *path, char **bufp, unsigned long *sizep); +extern int convert_to_working_tree(const char *path, char **bufp, unsigned long *sizep); + #endif /* CACHE_H */ diff --git a/combine-diff.c b/combine-diff.c index 044633d164..9daa0cb9a9 100644 --- a/combine-diff.c +++ b/combine-diff.c @@ -92,14 +92,14 @@ struct sline { static char *grab_blob(const unsigned char *sha1, unsigned long *size) { char *blob; - char type[20]; + enum object_type type; if (is_null_sha1(sha1)) { /* deleted blob */ *size = 0; return xcalloc(1, 1); } - blob = read_sha1_file(sha1, type, size); - if (strcmp(type, blob_type)) + blob = read_sha1_file(sha1, &type, size); + if (type != OBJ_BLOB) die("object '%s' is not a blob!", sha1_to_hex(sha1)); return blob; } @@ -342,18 +342,18 @@ int parse_commit_buffer(struct commit *item, void *buffer, unsigned long size) int parse_commit(struct commit *item) { - char type[20]; + enum object_type type; void *buffer; unsigned long size; int ret; if (item->object.parsed) return 0; - buffer = read_sha1_file(item->object.sha1, type, &size); + buffer = read_sha1_file(item->object.sha1, &type, &size); if (!buffer) return error("Could not read %s", sha1_to_hex(item->object.sha1)); - if (strcmp(type, commit_type)) { + if (type != OBJ_COMMIT) { free(buffer); return error("Object %s not a commit", sha1_to_hex(item->object.sha1)); @@ -1187,14 +1187,17 @@ struct commit_list *get_merge_bases(struct commit *one, return result; } -int in_merge_bases(struct commit *rev1, struct commit *rev2) +int in_merge_bases(struct commit *commit, struct commit **reference, int num) { struct commit_list *bases, *b; int ret = 0; - bases = get_merge_bases(rev1, rev2, 1); + if (num == 1) + bases = get_merge_bases(commit, *reference, 1); + else + die("not yet"); for (b = bases; b; b = b->next) { - if (!hashcmp(rev1->object.sha1, b->item->object.sha1)) { + if (!hashcmp(commit->object.sha1, b->item->object.sha1)) { ret = 1; break; } @@ -114,5 +114,5 @@ extern int is_repository_shallow(void); extern struct commit_list *get_shallow_commits(struct object_array *heads, int depth, int shallow_flag, int not_shallow_flag); -int in_merge_bases(struct commit *rev1, struct commit *rev2); +int in_merge_bases(struct commit *, struct commit **, int); #endif /* COMMIT_H */ @@ -326,6 +326,15 @@ int git_default_config(const char *var, const char *value) return 0; } + if (!strcmp(var, "core.autocrlf")) { + if (value && !strcasecmp(value, "input")) { + auto_crlf = -1; + return 0; + } + auto_crlf = git_config_bool(var, value); + return 0; + } + if (!strcmp(var, "user.name")) { strlcpy(git_default_name, value, sizeof(git_default_name)); return 0; @@ -385,6 +394,8 @@ int git_config(config_fn_t fn) * config file otherwise. */ filename = getenv(CONFIG_ENVIRONMENT); if (!filename) { + if (!access(ETC_GITCONFIG, R_OK)) + ret += git_config_from_file(fn, ETC_GITCONFIG); home = getenv("HOME"); filename = getenv(CONFIG_LOCAL_ENVIRONMENT); if (!filename) diff --git a/configure.ac b/configure.ac index 7cfb3a0666..3a8e778def 100644 --- a/configure.ac +++ b/configure.ac @@ -114,13 +114,32 @@ AC_CHECK_LIB([expat], [XML_ParserCreate], [NO_EXPAT=YesPlease]) AC_SUBST(NO_EXPAT) # -# Define NEEDS_LIBICONV if linking with libc is not enough (Darwin). +# Define NEEDS_LIBICONV if linking with libc is not enough (Darwin and +# some Solaris installations). # Define NO_ICONV if neither libc nor libiconv support iconv. -AC_CHECK_LIB([c], [iconv], - [NEEDS_LIBICONV=], - AC_CHECK_LIB([iconv], [iconv], - [NEEDS_LIBICONV=YesPlease], - [NO_ICONV=YesPlease])) +AC_DEFUN([ICONVTEST_SRC], [ +#include <iconv.h> + +int main(void) +{ + iconv_open("", ""); + return 0; +} +]) +AC_MSG_CHECKING([for iconv in -lc]) +AC_LINK_IFELSE(ICONVTEST_SRC, + [AC_MSG_RESULT([yes]) + NEEDS_LIBICONV=], + [AC_MSG_RESULT([no]) + old_LIBS="$LIBS" + LIBS="$LIBS -liconv" + AC_MSG_CHECKING([for iconv in -liconv]) + AC_LINK_IFELSE(ICONVTEST_SRC, + [AC_MSG_RESULT([yes]) + NEEDS_LIBICONV=YesPlease], + [AC_MSG_RESULT([no]) + NO_ICONV=YesPlease]) + LIBS="$old_LIBS"]) AC_SUBST(NEEDS_LIBICONV) AC_SUBST(NO_ICONV) test -n "$NEEDS_LIBICONV" && LIBS="$LIBS -liconv" @@ -96,7 +96,7 @@ int get_ack(int fd, unsigned char *result_sha1) line[--len] = 0; if (!strcmp(line, "NAK")) return 0; - if (!strncmp(line, "ACK ", 4)) { + if (!prefixcmp(line, "ACK ")) { if (!get_sha1_hex(line+4, result_sha1)) { if (strstr(line+45, "continue")) return 2; @@ -196,8 +196,8 @@ static int count_refspec_match(const char *pattern, */ if (namelen != patlen && patlen != namelen - 5 && - strncmp(name, "refs/heads/", 11) && - strncmp(name, "refs/tags/", 10)) { + prefixcmp(name, "refs/heads/") && + prefixcmp(name, "refs/tags/")) { /* We want to catch the case where only weak * matches are found and there are multiple * matches, and where more than one strong diff --git a/contrib/completion/git-completion.bash b/contrib/completion/git-completion.bash index 5d3d402051..7c03403484 100755 --- a/contrib/completion/git-completion.bash +++ b/contrib/completion/git-completion.bash @@ -269,7 +269,6 @@ __git_commands () cvsimport) : import;; cvsserver) : daemon;; daemon) : daemon;; - diff-stages) : nobody uses it;; fast-import) : import;; fsck-objects) : plumbing;; fetch-pack) : plumbing;; @@ -298,7 +297,6 @@ __git_commands () reflog) : plumbing;; repo-config) : plumbing;; rerere) : plumbing;; - resolve) : dead dont use;; rev-list) : plumbing;; rev-parse) : plumbing;; runstatus) : plumbing;; diff --git a/git-resolve.sh b/contrib/examples/git-resolve.sh index 36b90e3849..36b90e3849 100755 --- a/git-resolve.sh +++ b/contrib/examples/git-resolve.sh diff --git a/convert-objects.c b/convert-objects.c index a630132985..b5f41ae2e3 100644 --- a/convert-objects.c +++ b/convert-objects.c @@ -284,27 +284,27 @@ static void convert_commit(void *buffer, unsigned long size, unsigned char *resu static struct entry * convert_entry(unsigned char *sha1) { struct entry *entry = lookup_entry(sha1); - char type[20]; + enum object_type type; void *buffer, *data; unsigned long size; if (entry->converted) return entry; - data = read_sha1_file(sha1, type, &size); + data = read_sha1_file(sha1, &type, &size); if (!data) die("unable to read object %s", sha1_to_hex(sha1)); buffer = xmalloc(size); memcpy(buffer, data, size); - if (!strcmp(type, blob_type)) { + if (type == OBJ_BLOB) { write_sha1_file(buffer, size, blob_type, entry->new_sha1); - } else if (!strcmp(type, tree_type)) + } else if (type == OBJ_TREE) convert_tree(buffer, size, entry->new_sha1); - else if (!strcmp(type, commit_type)) + else if (type == OBJ_COMMIT) convert_commit(buffer, size, entry->new_sha1); else - die("unknown object type '%s' in %s", type, sha1_to_hex(sha1)); + die("unknown object type %d in %s", type, sha1_to_hex(sha1)); entry->converted = 1; free(buffer); free(data); diff --git a/convert.c b/convert.c new file mode 100644 index 0000000000..898bfe3eb2 --- /dev/null +++ b/convert.c @@ -0,0 +1,186 @@ +#include "cache.h" +/* + * convert.c - convert a file when checking it out and checking it in. + * + * This should use the pathname to decide on whether it wants to do some + * more interesting conversions (automatic gzip/unzip, general format + * conversions etc etc), but by default it just does automatic CRLF<->LF + * translation when the "auto_crlf" option is set. + */ + +struct text_stat { + /* CR, LF and CRLF counts */ + unsigned cr, lf, crlf; + + /* These are just approximations! */ + unsigned printable, nonprintable; +}; + +static void gather_stats(const char *buf, unsigned long size, struct text_stat *stats) +{ + unsigned long i; + + memset(stats, 0, sizeof(*stats)); + + for (i = 0; i < size; i++) { + unsigned char c = buf[i]; + if (c == '\r') { + stats->cr++; + if (i+1 < size && buf[i+1] == '\n') + stats->crlf++; + continue; + } + if (c == '\n') { + stats->lf++; + continue; + } + if (c == 127) + /* DEL */ + stats->nonprintable++; + else if (c < 32) { + switch (c) { + /* BS, HT, ESC and FF */ + case '\b': case '\t': case '\033': case '\014': + stats->printable++; + break; + default: + stats->nonprintable++; + } + } + else + stats->printable++; + } +} + +/* + * The same heuristics as diff.c::mmfile_is_binary() + */ +static int is_binary(unsigned long size, struct text_stat *stats) +{ + + if ((stats->printable >> 7) < stats->nonprintable) + return 1; + /* + * Other heuristics? Average line length might be relevant, + * as might LF vs CR vs CRLF counts.. + * + * NOTE! It might be normal to have a low ratio of CRLF to LF + * (somebody starts with a LF-only file and edits it with an editor + * that adds CRLF only to lines that are added..). But do we + * want to support CR-only? Probably not. + */ + return 0; +} + +int convert_to_git(const char *path, char **bufp, unsigned long *sizep) +{ + char *buffer, *nbuf; + unsigned long size, nsize; + struct text_stat stats; + + /* + * FIXME! Other pluggable conversions should go here, + * based on filename patterns. Right now we just do the + * stupid auto-CRLF one. + */ + if (!auto_crlf) + return 0; + + size = *sizep; + if (!size) + return 0; + buffer = *bufp; + + gather_stats(buffer, size, &stats); + + /* No CR? Nothing to convert, regardless. */ + if (!stats.cr) + return 0; + + /* + * We're currently not going to even try to convert stuff + * that has bare CR characters. Does anybody do that crazy + * stuff? + */ + if (stats.cr != stats.crlf) + return 0; + + /* + * And add some heuristics for binary vs text, of course... + */ + if (is_binary(size, &stats)) + return 0; + + /* + * Ok, allocate a new buffer, fill it in, and return true + * to let the caller know that we switched buffers on it. + */ + nsize = size - stats.crlf; + nbuf = xmalloc(nsize); + *bufp = nbuf; + *sizep = nsize; + do { + unsigned char c = *buffer++; + if (c != '\r') + *nbuf++ = c; + } while (--size); + + return 1; +} + +int convert_to_working_tree(const char *path, char **bufp, unsigned long *sizep) +{ + char *buffer, *nbuf; + unsigned long size, nsize; + struct text_stat stats; + unsigned char last; + + /* + * FIXME! Other pluggable conversions should go here, + * based on filename patterns. Right now we just do the + * stupid auto-CRLF one. + */ + if (auto_crlf <= 0) + return 0; + + size = *sizep; + if (!size) + return 0; + buffer = *bufp; + + gather_stats(buffer, size, &stats); + + /* No LF? Nothing to convert, regardless. */ + if (!stats.lf) + return 0; + + /* Was it already in CRLF format? */ + if (stats.lf == stats.crlf) + return 0; + + /* If we have any bare CR characters, we're not going to touch it */ + if (stats.cr != stats.crlf) + return 0; + + if (is_binary(size, &stats)) + return 0; + + /* + * Ok, allocate a new buffer, fill it in, and return true + * to let the caller know that we switched buffers on it. + */ + nsize = size + stats.lf - stats.crlf; + nbuf = xmalloc(nsize); + *bufp = nbuf; + *sizep = nsize; + last = 0; + do { + unsigned char c = *buffer++; + if (c == '\n' && last != '\r') + *nbuf++ = '\r'; + *nbuf++ = c; + last = c; + } while (--size); + + return 1; +} @@ -286,7 +286,7 @@ static int service_enabled; static int git_daemon_config(const char *var, const char *value) { - if (!strncmp(var, "daemon.", 7) && + if (!prefixcmp(var, "daemon.") && !strcmp(var + 7, service_looking_at->config_name)) { service_enabled = git_config_bool(var, value); return 0; @@ -562,7 +562,7 @@ static int execute(struct sockaddr *addr) for (i = 0; i < ARRAY_SIZE(daemon_service); i++) { struct daemon_service *s = &(daemon_service[i]); int namelen = strlen(s->name); - if (!strncmp("git-", line, 4) && + if (!prefixcmp(line, "git-") && !strncmp(s->name, line + 4, namelen) && line[namelen + 4] == ' ') { /* @@ -1011,7 +1011,7 @@ int main(int argc, char **argv) for (i = 1; i < argc; i++) { char *arg = argv[i]; - if (!strncmp(arg, "--listen=", 9)) { + if (!prefixcmp(arg, "--listen=")) { char *p = arg + 9; char *ph = listen_addr = xmalloc(strlen(arg + 9) + 1); while (*p) @@ -1019,7 +1019,7 @@ int main(int argc, char **argv) *ph = 0; continue; } - if (!strncmp(arg, "--port=", 7)) { + if (!prefixcmp(arg, "--port=")) { char *end; unsigned long n; n = strtoul(arg+7, &end, 0); @@ -1045,11 +1045,11 @@ int main(int argc, char **argv) export_all_trees = 1; continue; } - if (!strncmp(arg, "--timeout=", 10)) { + if (!prefixcmp(arg, "--timeout=")) { timeout = atoi(arg+10); continue; } - if (!strncmp(arg, "--init-timeout=", 15)) { + if (!prefixcmp(arg, "--init-timeout=")) { init_timeout = atoi(arg+15); continue; } @@ -1057,11 +1057,11 @@ int main(int argc, char **argv) strict_paths = 1; continue; } - if (!strncmp(arg, "--base-path=", 12)) { + if (!prefixcmp(arg, "--base-path=")) { base_path = arg+12; continue; } - if (!strncmp(arg, "--interpolated-path=", 20)) { + if (!prefixcmp(arg, "--interpolated-path=")) { interpolated_path = arg+20; continue; } @@ -1073,11 +1073,11 @@ int main(int argc, char **argv) user_path = ""; continue; } - if (!strncmp(arg, "--user-path=", 12)) { + if (!prefixcmp(arg, "--user-path=")) { user_path = arg + 12; continue; } - if (!strncmp(arg, "--pid-file=", 11)) { + if (!prefixcmp(arg, "--pid-file=")) { pid_file = arg + 11; continue; } @@ -1086,27 +1086,27 @@ int main(int argc, char **argv) log_syslog = 1; continue; } - if (!strncmp(arg, "--user=", 7)) { + if (!prefixcmp(arg, "--user=")) { user_name = arg + 7; continue; } - if (!strncmp(arg, "--group=", 8)) { + if (!prefixcmp(arg, "--group=")) { group_name = arg + 8; continue; } - if (!strncmp(arg, "--enable=", 9)) { + if (!prefixcmp(arg, "--enable=")) { enable_service(arg + 9, 1); continue; } - if (!strncmp(arg, "--disable=", 10)) { + if (!prefixcmp(arg, "--disable=")) { enable_service(arg + 10, 0); continue; } - if (!strncmp(arg, "--allow-override=", 17)) { + if (!prefixcmp(arg, "--allow-override=")) { make_service_overridable(arg + 17, 1); continue; } - if (!strncmp(arg, "--forbid-override=", 18)) { + if (!prefixcmp(arg, "--forbid-override=")) { make_service_overridable(arg + 18, 0); continue; } diff --git a/diff-lib.c b/diff-lib.c index 60c0fa6488..2e91619906 100644 --- a/diff-lib.c +++ b/diff-lib.c @@ -8,11 +8,227 @@ #include "diffcore.h" #include "revision.h" #include "cache-tree.h" +#include "path-list.h" /* * diff-files */ +static int read_directory(const char *path, struct path_list *list) +{ + DIR *dir; + struct dirent *e; + + if (!(dir = opendir(path))) + return error("Could not open directory %s", path); + + while ((e = readdir(dir))) + if (strcmp(".", e->d_name) && strcmp("..", e->d_name)) + path_list_insert(xstrdup(e->d_name), list); + + closedir(dir); + return 0; +} + +static int queue_diff(struct diff_options *o, + const char *name1, const char *name2) +{ + struct stat st; + int mode1 = 0, mode2 = 0; + + if (name1) { + if (stat(name1, &st)) + return error("Could not access '%s'", name1); + mode1 = st.st_mode; + } + if (name2) { + if (stat(name2, &st)) + return error("Could not access '%s'", name2); + mode2 = st.st_mode; + } + + if (mode1 && mode2 && S_ISDIR(mode1) != S_ISDIR(mode2)) + return error("file/directory conflict: %s, %s", name1, name2); + + if (S_ISDIR(mode1) || S_ISDIR(mode2)) { + char buffer1[PATH_MAX], buffer2[PATH_MAX]; + struct path_list p1 = {NULL, 0, 0, 1}, p2 = {NULL, 0, 0, 1}; + int len1 = 0, len2 = 0, i1, i2, ret = 0; + + if (name1 && read_directory(name1, &p1)) + return -1; + if (name2 && read_directory(name2, &p2)) { + path_list_clear(&p1, 0); + return -1; + } + + if (name1) { + len1 = strlen(name1); + if (len1 > 0 && name1[len1 - 1] == '/') + len1--; + memcpy(buffer1, name1, len1); + buffer1[len1++] = '/'; + } + + if (name2) { + len2 = strlen(name2); + if (len2 > 0 && name2[len2 - 1] == '/') + len2--; + memcpy(buffer2, name2, len2); + buffer2[len2++] = '/'; + } + + for (i1 = i2 = 0; !ret && (i1 < p1.nr || i2 < p2.nr); ) { + const char *n1, *n2; + int comp; + + if (i1 == p1.nr) + comp = 1; + else if (i2 == p2.nr) + comp = -1; + else + comp = strcmp(p1.items[i1].path, + p2.items[i2].path); + + if (comp > 0) + n1 = NULL; + else { + n1 = buffer1; + strncpy(buffer1 + len1, p1.items[i1++].path, + PATH_MAX - len1); + } + + if (comp < 0) + n2 = NULL; + else { + n2 = buffer2; + strncpy(buffer2 + len2, p2.items[i2++].path, + PATH_MAX - len2); + } + + ret = queue_diff(o, n1, n2); + } + path_list_clear(&p1, 0); + path_list_clear(&p2, 0); + + return ret; + } else { + struct diff_filespec *d1, *d2; + + if (o->reverse_diff) { + unsigned tmp; + const char *tmp_c; + tmp = mode1; mode1 = mode2; mode2 = tmp; + tmp_c = name1; name1 = name2; name2 = tmp_c; + } + + if (!name1) + name1 = "/dev/null"; + if (!name2) + name2 = "/dev/null"; + d1 = alloc_filespec(name1); + d2 = alloc_filespec(name2); + fill_filespec(d1, null_sha1, mode1); + fill_filespec(d2, null_sha1, mode2); + + diff_queue(&diff_queued_diff, d1, d2); + return 0; + } +} + +static int is_in_index(const char *path) +{ + int len = strlen(path); + int pos = cache_name_pos(path, len); + char c; + + if (pos < 0) + return 0; + if (strncmp(active_cache[pos]->name, path, len)) + return 0; + c = active_cache[pos]->name[len]; + return c == '\0' || c == '/'; +} + +static int handle_diff_files_args(struct rev_info *revs, + int argc, const char **argv, int *silent) +{ + *silent = 0; + + /* revs->max_count == -2 means --no-index */ + while (1 < argc && argv[1][0] == '-') { + if (!strcmp(argv[1], "--base")) + revs->max_count = 1; + else if (!strcmp(argv[1], "--ours")) + revs->max_count = 2; + else if (!strcmp(argv[1], "--theirs")) + revs->max_count = 3; + else if (!strcmp(argv[1], "-n") || + !strcmp(argv[1], "--no-index")) + revs->max_count = -2; + else if (!strcmp(argv[1], "-q")) + *silent = 1; + else + return error("invalid option: %s", argv[1]); + argv++; argc--; + } + + if (revs->max_count == -1 && revs->diffopt.nr_paths == 2) { + /* + * If two files are specified, and at least one is untracked, + * default to no-index. + */ + read_cache(); + if (!is_in_index(revs->diffopt.paths[0]) || + !is_in_index(revs->diffopt.paths[1])) + revs->max_count = -2; + } + + /* + * Make sure there are NO revision (i.e. pending object) parameter, + * rev.max_count is reasonable (0 <= n <= 3), + * there is no other revision filtering parameters. + */ + if (revs->pending.nr || revs->max_count > 3 || + revs->min_age != -1 || revs->max_age != -1) + return error("no revision allowed with diff-files"); + + if (revs->max_count == -1 && + (revs->diffopt.output_format & DIFF_FORMAT_PATCH)) + revs->combine_merges = revs->dense_combined_merges = 1; + + return 0; +} + +int run_diff_files_cmd(struct rev_info *revs, int argc, const char **argv) +{ + int silent_on_removed; + + if (handle_diff_files_args(revs, argc, argv, &silent_on_removed)) + return -1; + + if (revs->max_count == -2) { + if (revs->diffopt.nr_paths != 2) + return error("need two files/directories with --no-index"); + if (queue_diff(&revs->diffopt, revs->diffopt.paths[0], + revs->diffopt.paths[1])) + return -1; + diffcore_std(&revs->diffopt); + diff_flush(&revs->diffopt); + /* + * The return code for --no-index imitates diff(1): + * 0 = no changes, 1 = changes, else error + */ + return revs->diffopt.found_changes; + } + + if (read_cache() < 0) { + perror("read_cache"); + return -1; + } + return run_diff_files(revs, silent_on_removed); +} + int run_diff_files(struct rev_info *revs, int silent_on_removed) { int entries, i; @@ -20,11 +236,7 @@ int run_diff_files(struct rev_info *revs, int silent_on_removed) if (diff_unmerged_stage < 0) diff_unmerged_stage = 2; - entries = read_cache(); - if (entries < 0) { - perror("read_cache"); - return -1; - } + entries = active_nr; for (i = 0; i < entries; i++) { struct stat st; unsigned int oldmode, newmode; @@ -362,10 +574,6 @@ int run_diff_index(struct rev_info *revs, int cached) if (!revs->ignore_merges) match_missing = 1; - if (read_cache() < 0) { - perror("read_cache"); - return -1; - } mark_merge_entries(); ent = revs->pending.objects[0].item; @@ -77,7 +77,7 @@ int git_diff_ui_config(const char *var, const char *value) diff_detect_rename_default = DIFF_DETECT_RENAME; return 0; } - if (!strncmp(var, "diff.color.", 11) || !strncmp(var, "color.diff.", 11)) { + if (!prefixcmp(var, "diff.color.") || !prefixcmp(var, "color.diff.")) { int slot = parse_diff_color_slot(var, 11); color_parse(value, var, diff_colors[slot]); return 0; @@ -184,44 +184,61 @@ static void print_line_count(int count) } } -static void copy_file(int prefix, const char *data, int size) +static void copy_file(int prefix, const char *data, int size, + const char *set, const char *reset) { int ch, nl_just_seen = 1; while (0 < size--) { ch = *data++; - if (nl_just_seen) + if (nl_just_seen) { + fputs(set, stdout); putchar(prefix); - putchar(ch); - if (ch == '\n') + } + if (ch == '\n') { nl_just_seen = 1; - else + fputs(reset, stdout); + } else nl_just_seen = 0; + putchar(ch); } if (!nl_just_seen) - printf("\n\\ No newline at end of file\n"); + printf("%s\n\\ No newline at end of file\n", reset); } static void emit_rewrite_diff(const char *name_a, const char *name_b, struct diff_filespec *one, - struct diff_filespec *two) + struct diff_filespec *two, + int color_diff) { int lc_a, lc_b; + const char *name_a_tab, *name_b_tab; + const char *metainfo = diff_get_color(color_diff, DIFF_METAINFO); + const char *fraginfo = diff_get_color(color_diff, DIFF_FRAGINFO); + const char *old = diff_get_color(color_diff, DIFF_FILE_OLD); + const char *new = diff_get_color(color_diff, DIFF_FILE_NEW); + const char *reset = diff_get_color(color_diff, DIFF_RESET); + + name_a += (*name_a == '/'); + name_b += (*name_b == '/'); + name_a_tab = strchr(name_a, ' ') ? "\t" : ""; + name_b_tab = strchr(name_b, ' ') ? "\t" : ""; + diff_populate_filespec(one, 0); diff_populate_filespec(two, 0); lc_a = count_lines(one->data, one->size); lc_b = count_lines(two->data, two->size); - name_a += (*name_a == '/'); - name_b += (*name_b == '/'); - printf("--- a/%s\n+++ b/%s\n@@ -", name_a, name_b); + printf("%s--- a/%s%s%s\n%s+++ b/%s%s%s\n%s@@ -", + metainfo, name_a, name_a_tab, reset, + metainfo, name_b, name_b_tab, reset, fraginfo); print_line_count(lc_a); printf(" +"); print_line_count(lc_b); - printf(" @@\n"); + printf(" @@%s\n", reset); if (lc_a) - copy_file('-', one->data, one->size); + copy_file('-', one->data, one->size, old, reset); if (lc_b) - copy_file('+', two->data, two->size); + copy_file('+', two->data, two->size, new, reset); } static int fill_mmfile(mmfile_t *mf, struct diff_filespec *one) @@ -365,6 +382,7 @@ struct emit_callback { int nparents, color_diff; const char **label_path; struct diff_words_data *diff_words; + int *found_changesp; }; static void free_diff_words_data(struct emit_callback *ecbdata) @@ -400,22 +418,16 @@ static void emit_line(const char *set, const char *reset, const char *line, int puts(reset); } -static void emit_add_line(const char *reset, struct emit_callback *ecbdata, const char *line, int len) +static void emit_line_with_ws(int nparents, + const char *set, const char *reset, const char *ws, + const char *line, int len) { - int col0 = ecbdata->nparents; + int col0 = nparents; int last_tab_in_indent = -1; int last_space_in_indent = -1; int i; int tail = len; int need_highlight_leading_space = 0; - const char *ws = diff_get_color(ecbdata->color_diff, DIFF_WHITESPACE); - const char *set = diff_get_color(ecbdata->color_diff, DIFF_FILE_NEW); - - if (!*ws) { - emit_line(set, reset, line, len); - return; - } - /* The line is a newly added line. Does it have funny leading * whitespaces? In indent, SP should never precede a TAB. */ @@ -470,6 +482,18 @@ static void emit_add_line(const char *reset, struct emit_callback *ecbdata, cons emit_line(set, reset, line + i, len - i); } +static void emit_add_line(const char *reset, struct emit_callback *ecbdata, const char *line, int len) +{ + const char *ws = diff_get_color(ecbdata->color_diff, DIFF_WHITESPACE); + const char *set = diff_get_color(ecbdata->color_diff, DIFF_FILE_NEW); + + if (!*ws) + emit_line(set, reset, line, len); + else + emit_line_with_ws(ecbdata->nparents, set, reset, ws, + line, len); +} + static void fn_out_consume(void *priv, char *line, unsigned long len) { int i; @@ -478,9 +502,18 @@ static void fn_out_consume(void *priv, char *line, unsigned long len) const char *set = diff_get_color(ecbdata->color_diff, DIFF_METAINFO); const char *reset = diff_get_color(ecbdata->color_diff, DIFF_RESET); + *(ecbdata->found_changesp) = 1; + if (ecbdata->label_path[0]) { - printf("%s--- %s%s\n", set, ecbdata->label_path[0], reset); - printf("%s+++ %s%s\n", set, ecbdata->label_path[1], reset); + const char *name_a_tab, *name_b_tab; + + name_a_tab = strchr(ecbdata->label_path[0], ' ') ? "\t" : ""; + name_b_tab = strchr(ecbdata->label_path[1], ' ') ? "\t" : ""; + + printf("%s--- %s%s%s\n", + set, ecbdata->label_path[0], reset, name_a_tab); + printf("%s+++ %s%s%s\n", + set, ecbdata->label_path[1], reset, name_b_tab); ecbdata->label_path[0] = ecbdata->label_path[1] = NULL; } @@ -872,30 +905,44 @@ static void show_numstat(struct diffstat_t* data, struct diff_options *options) struct checkdiff_t { struct xdiff_emit_state xm; const char *filename; - int lineno; + int lineno, color_diff; }; static void checkdiff_consume(void *priv, char *line, unsigned long len) { struct checkdiff_t *data = priv; + const char *ws = diff_get_color(data->color_diff, DIFF_WHITESPACE); + const char *reset = diff_get_color(data->color_diff, DIFF_RESET); + const char *set = diff_get_color(data->color_diff, DIFF_FILE_NEW); if (line[0] == '+') { - int i, spaces = 0; + int i, spaces = 0, space_before_tab = 0, white_space_at_end = 0; /* check space before tab */ for (i = 1; i < len && (line[i] == ' ' || line[i] == '\t'); i++) if (line[i] == ' ') spaces++; if (line[i - 1] == '\t' && spaces) - printf("%s:%d: space before tab:%.*s\n", - data->filename, data->lineno, (int)len, line); + space_before_tab = 1; /* check white space at line end */ if (line[len - 1] == '\n') len--; if (isspace(line[len - 1])) - printf("%s:%d: white space at end: %.*s\n", - data->filename, data->lineno, (int)len, line); + white_space_at_end = 1; + + if (space_before_tab || white_space_at_end) { + printf("%s:%d: %s", data->filename, data->lineno, ws); + if (space_before_tab) { + printf("space before tab"); + if (white_space_at_end) + putchar(','); + } + if (white_space_at_end) + printf("white space at end"); + printf(":%s ", reset); + emit_line_with_ws(1, set, reset, ws, line, len); + } data->lineno++; } else if (line[0] == ' ') @@ -1052,7 +1099,9 @@ static void builtin_diff(const char *name_a, if ((one->mode ^ two->mode) & S_IFMT) goto free_ab_and_return; if (complete_rewrite) { - emit_rewrite_diff(name_a, name_b, one, two); + emit_rewrite_diff(name_a, name_b, one, two, + o->color_diff); + o->found_changes = 1; goto free_ab_and_return; } } @@ -1070,6 +1119,7 @@ static void builtin_diff(const char *name_a, else printf("Binary files %s and %s differ\n", lbl[0], lbl[1]); + o->found_changes = 1; } else { /* Crazy xdl interfaces.. */ @@ -1082,14 +1132,15 @@ static void builtin_diff(const char *name_a, memset(&ecbdata, 0, sizeof(ecbdata)); ecbdata.label_path = lbl; ecbdata.color_diff = o->color_diff; + ecbdata.found_changesp = &o->found_changes; xpp.flags = XDF_NEED_MINIMAL | o->xdl_opts; xecfg.ctxlen = o->context; xecfg.flags = XDL_EMIT_FUNCNAMES; if (!diffopts) ; - else if (!strncmp(diffopts, "--unified=", 10)) + else if (!prefixcmp(diffopts, "--unified=")) xecfg.ctxlen = strtoul(diffopts + 10, NULL, 10); - else if (!strncmp(diffopts, "-u", 2)) + else if (!prefixcmp(diffopts, "-u")) xecfg.ctxlen = strtoul(diffopts + 2, NULL, 10); ecb.outf = xdiff_outf; ecb.priv = &ecbdata; @@ -1153,7 +1204,7 @@ static void builtin_diffstat(const char *name_a, const char *name_b, static void builtin_checkdiff(const char *name_a, const char *name_b, struct diff_filespec *one, - struct diff_filespec *two) + struct diff_filespec *two, struct diff_options *o) { mmfile_t mf1, mf2; struct checkdiff_t data; @@ -1165,6 +1216,7 @@ static void builtin_checkdiff(const char *name_a, const char *name_b, data.xm.consume = checkdiff_consume; data.filename = name_b ? name_b : name_a; data.lineno = 0; + data.color_diff = o->color_diff; if (fill_mmfile(&mf1, one) < 0 || fill_mmfile(&mf2, two) < 0) die("unable to read files to diff"); @@ -1334,6 +1386,9 @@ int diff_populate_filespec(struct diff_filespec *s, int size_only) reuse_worktree_file(s->path, s->sha1, 0)) { struct stat st; int fd; + char *buf; + unsigned long size; + if (lstat(s->path, &st) < 0) { if (errno == ENOENT) { err_empty: @@ -1366,10 +1421,22 @@ int diff_populate_filespec(struct diff_filespec *s, int size_only) s->data = xmmap(NULL, s->size, PROT_READ, MAP_PRIVATE, fd, 0); close(fd); s->should_munmap = 1; - /* FIXME! CRLF -> LF conversion goes here, based on "s->path" */ + + /* + * Convert from working tree format to canonical git format + */ + buf = s->data; + size = s->size; + if (convert_to_git(s->path, &buf, &size)) { + munmap(s->data, s->size); + s->should_munmap = 0; + s->data = buf; + s->size = size; + s->should_free = 1; + } } else { - char type[20]; + enum object_type type; struct sha1_size_cache *e; if (size_only) { @@ -1378,11 +1445,12 @@ int diff_populate_filespec(struct diff_filespec *s, int size_only) s->size = e->size; return 0; } - if (!sha1_object_info(s->sha1, type, &s->size)) + type = sha1_object_info(s->sha1, &s->size); + if (type < 0) locate_size_cache(s->sha1, 0, s->size); } else { - s->data = read_sha1_file(s->sha1, type, &s->size); + s->data = read_sha1_file(s->sha1, &type, &s->size); s->should_free = 1; } } @@ -1775,7 +1843,7 @@ static void run_checkdiff(struct diff_filepair *p, struct diff_options *o) diff_fill_sha1_info(p->one); diff_fill_sha1_info(p->two); - builtin_checkdiff(name, other, p->one, p->two); + builtin_checkdiff(name, other, p->one, p->two, o); } void diff_setup(struct diff_options *options) @@ -1924,7 +1992,7 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac) else if (!strcmp(arg, "--shortstat")) { options->output_format |= DIFF_FORMAT_SHORTSTAT; } - else if (!strncmp(arg, "--stat", 6)) { + else if (!prefixcmp(arg, "--stat")) { char *end; int width = options->stat_width; int name_width = options->stat_name_width; @@ -1933,9 +2001,9 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac) switch (*arg) { case '-': - if (!strncmp(arg, "-width=", 7)) + if (!prefixcmp(arg, "-width=")) width = strtoul(arg + 7, &end, 10); - else if (!strncmp(arg, "-name-width=", 12)) + else if (!prefixcmp(arg, "-name-width=")) name_width = strtoul(arg + 12, &end, 10); break; case '=': @@ -1960,7 +2028,7 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac) } else if (!strcmp(arg, "-z")) options->line_termination = 0; - else if (!strncmp(arg, "-l", 2)) + else if (!prefixcmp(arg, "-l")) options->rename_limit = strtoul(arg+2, NULL, 10); else if (!strcmp(arg, "--full-index")) options->full_index = 1; @@ -1977,31 +2045,31 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac) options->output_format |= DIFF_FORMAT_NAME_STATUS; else if (!strcmp(arg, "-R")) options->reverse_diff = 1; - else if (!strncmp(arg, "-S", 2)) + else if (!prefixcmp(arg, "-S")) options->pickaxe = arg + 2; else if (!strcmp(arg, "-s")) { options->output_format |= DIFF_FORMAT_NO_OUTPUT; } - else if (!strncmp(arg, "-O", 2)) + else if (!prefixcmp(arg, "-O")) options->orderfile = arg + 2; - else if (!strncmp(arg, "--diff-filter=", 14)) + else if (!prefixcmp(arg, "--diff-filter=")) options->filter = arg + 14; else if (!strcmp(arg, "--pickaxe-all")) options->pickaxe_opts = DIFF_PICKAXE_ALL; else if (!strcmp(arg, "--pickaxe-regex")) options->pickaxe_opts = DIFF_PICKAXE_REGEX; - else if (!strncmp(arg, "-B", 2)) { + else if (!prefixcmp(arg, "-B")) { if ((options->break_opt = diff_scoreopt_parse(arg)) == -1) return -1; } - else if (!strncmp(arg, "-M", 2)) { + else if (!prefixcmp(arg, "-M")) { if ((options->rename_score = diff_scoreopt_parse(arg)) == -1) return -1; options->detect_rename = DIFF_DETECT_RENAME; } - else if (!strncmp(arg, "-C", 2)) { + else if (!prefixcmp(arg, "-C")) { if ((options->rename_score = diff_scoreopt_parse(arg)) == -1) return -1; @@ -2011,7 +2079,7 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac) options->find_copies_harder = 1; else if (!strcmp(arg, "--abbrev")) options->abbrev = DEFAULT_ABBREV; - else if (!strncmp(arg, "--abbrev=", 9)) { + else if (!prefixcmp(arg, "--abbrev=")) { options->abbrev = strtoul(arg + 9, NULL, 10); if (options->abbrev < MINIMUM_ABBREV) options->abbrev = MINIMUM_ABBREV; @@ -2026,6 +2094,8 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac) options->xdl_opts |= XDF_IGNORE_WHITESPACE; else if (!strcmp(arg, "-b") || !strcmp(arg, "--ignore-space-change")) options->xdl_opts |= XDF_IGNORE_WHITESPACE_CHANGE; + else if (!strcmp(arg, "--ignore-space-at-eol")) + options->xdl_opts |= XDF_IGNORE_WHITESPACE_AT_EOL; else if (!strcmp(arg, "--color-words")) options->color_diff = options->color_diff_words = 1; else if (!strcmp(arg, "--no-renames")) @@ -2392,7 +2462,8 @@ static void diff_resolve_rename_copy(void) p->status = DIFF_STATUS_RENAMED; } else if (hashcmp(p->one->sha1, p->two->sha1) || - p->one->mode != p->two->mode) + p->one->mode != p->two->mode || + is_null_sha1(p->one->sha1)) p->status = DIFF_STATUS_MODIFIED; else { /* This is a "no-change" entry and should not @@ -2518,7 +2589,7 @@ static void patch_id_consume(void *priv, char *line, unsigned long len) int new_len; /* Ignore line numbers when computing the SHA1 of the patch */ - if (!strncmp(line, "@@ -", 4)) + if (!prefixcmp(line, "@@ -")) return; new_len = remove_space(line, len); @@ -75,6 +75,9 @@ struct diff_options { int stat_width; int stat_name_width; + /* this is set by diffcore for DIFF_FORMAT_PATCH */ + int found_changes; + int nr_paths; const char **paths; int *pathlens; @@ -219,6 +222,7 @@ extern void diff_flush(struct diff_options*); extern const char *diff_unique_abbrev(const unsigned char *, int); extern int run_diff_files(struct rev_info *revs, int silent_on_removed); +extern int run_diff_files_cmd(struct rev_info *revs, int argc, const char **argv); extern int run_diff_index(struct rev_info *revs, int cached); @@ -68,16 +68,19 @@ static int write_entry(struct cache_entry *ce, char *path, struct checkout *stat void *new; unsigned long size; long wrote; - char type[20]; + enum object_type type; - new = read_sha1_file(ce->sha1, type, &size); - if (!new || strcmp(type, blob_type)) { + new = read_sha1_file(ce->sha1, &type, &size); + if (!new || type != OBJ_BLOB) { if (new) free(new); return error("git-checkout-index: unable to read sha1 file of %s (%s)", path, sha1_to_hex(ce->sha1)); } switch (ntohl(ce->ce_mode) & S_IFMT) { + char *buf; + unsigned long nsize; + case S_IFREG: if (to_tempfile) { strcpy(path, ".merge_file_XXXXXX"); @@ -89,7 +92,18 @@ static int write_entry(struct cache_entry *ce, char *path, struct checkout *stat return error("git-checkout-index: unable to create file %s (%s)", path, strerror(errno)); } - /* FIXME: LF -> CRLF conversion goes here, based on "ce->name" */ + + /* + * Convert from git internal format to working tree format + */ + buf = new; + nsize = size; + if (convert_to_working_tree(ce->name, &buf, &nsize)) { + free(new); + new = buf; + size = nsize; + } + wrote = write_in_full(fd, new, size); close(fd); free(new); diff --git a/environment.c b/environment.c index 54c22f8248..570e32ac3c 100644 --- a/environment.c +++ b/environment.c @@ -28,6 +28,7 @@ size_t packed_git_window_size = DEFAULT_PACKED_GIT_WINDOW_SIZE; size_t packed_git_limit = DEFAULT_PACKED_GIT_LIMIT; int pager_in_use; int pager_use_color = 1; +int auto_crlf = 0; /* 1: both ways, -1: only when adding git objects */ static const char *git_dir; static char *git_object_dir, *git_index_file, *git_refs_dir, *git_graft_file; diff --git a/exec_cmd.c b/exec_cmd.c index 3996bce33f..9b74ed2f42 100644 --- a/exec_cmd.c +++ b/exec_cmd.c @@ -56,7 +56,7 @@ int execv_git_cmd(const char **argv) len = strlen(git_command); /* Trivial cleanup */ - while (!strncmp(exec_dir, "./", 2)) { + while (!prefixcmp(exec_dir, "./")) { exec_dir += 2; while (*exec_dir == '/') exec_dir++; diff --git a/fast-import.c b/fast-import.c index 1ae125a040..65e99c2e8b 100644 --- a/fast-import.c +++ b/fast-import.c @@ -891,7 +891,7 @@ static int store_object( SHA_CTX c; z_stream s; - hdrlen = sprintf((char*)hdr,"%s %lu", type_names[type], + hdrlen = sprintf((char*)hdr,"%s %lu", typename(type), (unsigned long)datlen) + 1; SHA1_Init(&c); SHA1_Update(&c, hdr, hdrlen); @@ -1008,11 +1008,11 @@ static void *gfi_unpack_entry( struct object_entry *oe, unsigned long *sizep) { - static char type[20]; + enum object_type type; struct packed_git *p = all_packs[oe->pack_id]; if (p == pack_data) p->pack_size = pack_size + 20; - return unpack_entry(p, oe->offset, type, sizep); + return unpack_entry(p, oe->offset, &type, sizep); } static const char *get_mode(const char *str, uint16_t *modep) @@ -1049,9 +1049,9 @@ static void load_tree(struct tree_entry *root) t->delta_depth = 0; buf = gfi_unpack_entry(myoe, &size); } else { - char type[20]; - buf = read_sha1_file(sha1, type, &size); - if (!buf || strcmp(type, tree_type)) + enum object_type type; + buf = read_sha1_file(sha1, &type, &size); + if (!buf || type != OBJ_TREE) die("Can't load tree %s", sha1_to_hex(sha1)); } @@ -1312,7 +1312,7 @@ static int update_branch(struct branch *b) return error("Branch %s is missing commits.", b->name); } - if (!in_merge_bases(old_cmit, new_cmit)) { + if (!in_merge_bases(old_cmit, &new_cmit, 1)) { unlock_ref(lock); warn("Not updating %s" " (new tip %s does not contain %s)", @@ -1392,7 +1392,7 @@ static void read_next_command(void) static void cmd_mark(void) { - if (!strncmp("mark :", command_buf.buf, 6)) { + if (!prefixcmp(command_buf.buf, "mark :")) { next_mark = strtoumax(command_buf.buf + 6, NULL, 10); read_next_command(); } @@ -1405,10 +1405,10 @@ static void *cmd_data (size_t *size) size_t length; char *buffer; - if (strncmp("data ", command_buf.buf, 5)) + if (prefixcmp(command_buf.buf, "data ")) die("Expected 'data n' command, found: %s", command_buf.buf); - if (!strncmp("<<", command_buf.buf + 5, 2)) { + if (!prefixcmp(command_buf.buf + 5, "<<")) { char *term = xstrdup(command_buf.buf + 5 + 2); size_t sz = 8192, term_len = command_buf.len - 5 - 2; length = 0; @@ -1573,7 +1573,6 @@ static void file_change_m(struct branch *b) struct object_entry *oe = oe; unsigned char sha1[20]; uint16_t mode, inline_data = 0; - char type[20]; p = get_mode(p, &mode); if (!p) @@ -1595,7 +1594,7 @@ static void file_change_m(struct branch *b) oe = find_mark(strtoumax(p + 1, &x, 10)); hashcpy(sha1, oe->sha1); p = x; - } else if (!strncmp("inline", p, 6)) { + } else if (!prefixcmp(p, "inline")) { inline_data = 1; p += 6; } else { @@ -1626,13 +1625,14 @@ static void file_change_m(struct branch *b) } else if (oe) { if (oe->type != OBJ_BLOB) die("Not a blob (actually a %s): %s", - command_buf.buf, type_names[oe->type]); + command_buf.buf, typename(oe->type)); } else { - if (sha1_object_info(sha1, type, NULL)) + enum object_type type = sha1_object_info(sha1, NULL); + if (type < 0) die("Blob not found: %s", command_buf.buf); - if (strcmp(blob_type, type)) + if (type != OBJ_BLOB) die("Not a blob (actually a %s): %s", - command_buf.buf, type); + typename(type), command_buf.buf); } tree_content_set(&b->branch_tree, p, sha1, S_IFREG | mode); @@ -1668,7 +1668,7 @@ static void cmd_from(struct branch *b) const char *from; struct branch *s; - if (strncmp("from ", command_buf.buf, 5)) + if (prefixcmp(command_buf.buf, "from ")) return; if (b->branch_tree.tree) { @@ -1711,7 +1711,7 @@ static void cmd_from(struct branch *b) char *buf; buf = read_object_with_reference(b->sha1, - type_names[OBJ_COMMIT], &size, b->sha1); + commit_type, &size, b->sha1); if (!buf || size < 46) die("Not a valid commit: %s", from); if (memcmp("tree ", buf, 5) @@ -1734,7 +1734,7 @@ static struct hash_list *cmd_merge(unsigned int *count) struct branch *s; *count = 0; - while (!strncmp("merge ", command_buf.buf, 6)) { + while (!prefixcmp(command_buf.buf, "merge ")) { from = strchr(command_buf.buf, ' ') + 1; n = xmalloc(sizeof(*n)); s = lookup_branch(from); @@ -1780,11 +1780,11 @@ static void cmd_new_commit(void) read_next_command(); cmd_mark(); - if (!strncmp("author ", command_buf.buf, 7)) { + if (!prefixcmp(command_buf.buf, "author ")) { author = parse_ident(command_buf.buf + 7); read_next_command(); } - if (!strncmp("committer ", command_buf.buf, 10)) { + if (!prefixcmp(command_buf.buf, "committer ")) { committer = parse_ident(command_buf.buf + 10); read_next_command(); } @@ -1805,9 +1805,9 @@ static void cmd_new_commit(void) for (;;) { if (1 == command_buf.len) break; - else if (!strncmp("M ", command_buf.buf, 2)) + else if (!prefixcmp(command_buf.buf, "M ")) file_change_m(b); - else if (!strncmp("D ", command_buf.buf, 2)) + else if (!prefixcmp(command_buf.buf, "D ")) file_change_d(b); else if (!strcmp("deleteall", command_buf.buf)) file_change_deleteall(b); @@ -1877,7 +1877,7 @@ static void cmd_new_tag(void) read_next_command(); /* from ... */ - if (strncmp("from ", command_buf.buf, 5)) + if (prefixcmp(command_buf.buf, "from ")) die("Expected from command, got %s", command_buf.buf); from = strchr(command_buf.buf, ' ') + 1; s = lookup_branch(from); @@ -1895,7 +1895,7 @@ static void cmd_new_tag(void) char *buf; buf = read_object_with_reference(sha1, - type_names[OBJ_COMMIT], &size, sha1); + commit_type, &size, sha1); if (!buf || size < 46) die("Not a valid commit: %s", from); free(buf); @@ -1904,7 +1904,7 @@ static void cmd_new_tag(void) read_next_command(); /* tagger ... */ - if (strncmp("tagger ", command_buf.buf, 7)) + if (prefixcmp(command_buf.buf, "tagger ")) die("Expected tagger command, got %s", command_buf.buf); tagger = parse_ident(command_buf.buf + 7); @@ -1916,7 +1916,7 @@ static void cmd_new_tag(void) size_dbuf(&new_data, 67+strlen(t->name)+strlen(tagger)+msglen); sp = new_data.buffer; sp += sprintf(sp, "object %s\n", sha1_to_hex(sha1)); - sp += sprintf(sp, "type %s\n", type_names[OBJ_COMMIT]); + sp += sprintf(sp, "type %s\n", commit_type); sp += sprintf(sp, "tag %s\n", t->name); sp += sprintf(sp, "tagger %s\n", tagger); *sp++ = '\n'; @@ -1981,7 +1981,7 @@ int main(int argc, const char **argv) if (*a != '-' || !strcmp(a, "--")) break; - else if (!strncmp(a, "--date-format=", 14)) { + else if (!prefixcmp(a, "--date-format=")) { const char *fmt = a + 14; if (!strcmp(fmt, "raw")) whenspec = WHENSPEC_RAW; @@ -1992,15 +1992,15 @@ int main(int argc, const char **argv) else die("unknown --date-format argument %s", fmt); } - else if (!strncmp(a, "--max-pack-size=", 16)) + else if (!prefixcmp(a, "--max-pack-size=")) max_packsize = strtoumax(a + 16, NULL, 0) * 1024 * 1024; - else if (!strncmp(a, "--depth=", 8)) + else if (!prefixcmp(a, "--depth=")) max_depth = strtoul(a + 8, NULL, 0); - else if (!strncmp(a, "--active-branches=", 18)) + else if (!prefixcmp(a, "--active-branches=")) max_active_branches = strtoul(a + 18, NULL, 0); - else if (!strncmp(a, "--export-marks=", 15)) + else if (!prefixcmp(a, "--export-marks=")) mark_file = a + 15; - else if (!strncmp(a, "--export-pack-edges=", 20)) { + else if (!prefixcmp(a, "--export-pack-edges=")) { if (pack_edges) fclose(pack_edges); pack_edges = fopen(a + 20, "a"); @@ -2033,11 +2033,11 @@ int main(int argc, const char **argv) break; else if (!strcmp("blob", command_buf.buf)) cmd_new_blob(); - else if (!strncmp("commit ", command_buf.buf, 7)) + else if (!prefixcmp(command_buf.buf, "commit ")) cmd_new_commit(); - else if (!strncmp("tag ", command_buf.buf, 4)) + else if (!prefixcmp(command_buf.buf, "tag ")) cmd_new_tag(); - else if (!strncmp("reset ", command_buf.buf, 6)) + else if (!prefixcmp(command_buf.buf, "reset ")) cmd_reset_branch(); else if (!strcmp("checkpoint", command_buf.buf)) cmd_checkpoint(); diff --git a/fetch-pack.c b/fetch-pack.c index c787106764..41bdd27b8f 100644 --- a/fetch-pack.c +++ b/fetch-pack.c @@ -198,13 +198,13 @@ static int find_common(int fd[2], unsigned char *result_sha1, int len; while ((len = packet_read_line(fd[0], line, sizeof(line)))) { - if (!strncmp("shallow ", line, 8)) { + if (!prefixcmp(line, "shallow ")) { if (get_sha1_hex(line + 8, sha1)) die("invalid shallow line: %s", line); register_shallow(sha1); continue; } - if (!strncmp("unshallow ", line, 10)) { + if (!prefixcmp(line, "unshallow ")) { if (get_sha1_hex(line + 10, sha1)) die("invalid unshallow line: %s", line); if (!lookup_object(sha1)) @@ -346,7 +346,7 @@ static void filter_refs(struct ref **refs, int nr_match, char **match) check_ref_format(ref->name + 5)) ; /* trash */ else if (fetch_all && - (!depth || strncmp(ref->name, "refs/tags/", 10) )) { + (!depth || prefixcmp(ref->name, "refs/tags/") )) { *newtail = ref; ref->next = NULL; newtail = &ref->next; @@ -683,11 +683,11 @@ int main(int argc, char **argv) char *arg = argv[i]; if (*arg == '-') { - if (!strncmp("--upload-pack=", arg, 14)) { + if (!prefixcmp(arg, "--upload-pack=")) { uploadpack = arg + 14; continue; } - if (!strncmp("--exec=", arg, 7)) { + if (!prefixcmp(arg, "--exec=")) { uploadpack = arg + 7; continue; } @@ -712,7 +712,7 @@ int main(int argc, char **argv) verbose = 1; continue; } - if (!strncmp("--depth=", arg, 8)) { + if (!prefixcmp(arg, "--depth=")) { depth = strtol(arg + 8, NULL, 0); if (stat(git_path("shallow"), &st)) st.st_mtime = 0; diff --git a/git-commit.sh b/git-commit.sh index 476f4f18db..be3677c204 100755 --- a/git-commit.sh +++ b/git-commit.sh @@ -13,10 +13,10 @@ git-rev-parse --verify HEAD >/dev/null 2>&1 || initial_commit=t case "$0" in *status) status_only=t - unmerged_ok_if_status=--unmerged ;; + ;; *commit) status_only= - unmerged_ok_if_status= ;; + ;; esac refuse_partial () { @@ -393,16 +393,17 @@ else USE_INDEX="$THIS_INDEX" fi -GIT_INDEX_FILE="$USE_INDEX" \ - git-update-index -q $unmerged_ok_if_status --refresh || exit - -################################################################ -# If the request is status, just show it and exit. - -case "$0" in -*status) +case "$status_only" in +t) + # This will silently fail in a read-only repository, which is + # what we want. + GIT_INDEX_FILE="$USE_INDEX" git-update-index -q --unmerged --refresh run_status exit $? + ;; +'') + GIT_INDEX_FILE="$USE_INDEX" git-update-index -q --refresh || exit + ;; esac ################################################################ diff --git a/git-compat-util.h b/git-compat-util.h index 9863cf671f..5d154faef6 100644 --- a/git-compat-util.h +++ b/git-compat-util.h @@ -1,6 +1,8 @@ #ifndef GIT_COMPAT_UTIL_H #define GIT_COMPAT_UTIL_H +#define _FILE_OFFSET_BITS 64 + #ifndef FLEX_ARRAY #if defined(__GNUC__) && (__GNUC__ < 3) #define FLEX_ARRAY 0 @@ -279,4 +281,9 @@ static inline int sane_case(int x, int high) return x; } +static inline int prefixcmp(const char *str, const char *prefix) +{ + return strncmp(str, prefix, strlen(prefix)); +} + #endif diff --git a/git-cvsexportcommit.perl b/git-cvsexportcommit.perl index 32a4883321..67224b4449 100755 --- a/git-cvsexportcommit.perl +++ b/git-cvsexportcommit.perl @@ -15,14 +15,21 @@ unless ($ENV{GIT_DIR} && -r $ENV{GIT_DIR}){ die "GIT_DIR is not defined or is unreadable"; } -our ($opt_h, $opt_P, $opt_p, $opt_v, $opt_c, $opt_f, $opt_a, $opt_m ); +our ($opt_h, $opt_P, $opt_p, $opt_v, $opt_c, $opt_f, $opt_a, $opt_m, $opt_d); -getopts('hPpvcfam:'); +getopts('hPpvcfam:d:'); $opt_h && usage(); die "Need at least one commit identifier!" unless @ARGV; +my @cvs; +if ($opt_d) { + @cvs = ('cvs', '-d', $opt_d); +} else { + @cvs = ('cvs'); +} + # setup a tempdir our ($tmpdir, $tmpdirname) = tempdir('git-cvsapplycommit-XXXXXX', TMPDIR => 1, @@ -160,7 +167,7 @@ foreach my $f (@afiles) { my $p = $1; next if (grep { $_ eq $p } @dirs); } - my @status = grep(m/^File/, safe_pipe_capture('cvs', '-q', 'status' ,$f)); + my @status = grep(m/^File/, safe_pipe_capture(@cvs, '-q', 'status' ,$f)); if (@status > 1) { warn 'Strange! cvs status returned more than one line?'}; if (-d dirname $f and $status[0] !~ m/Status: Unknown$/ and $status[0] !~ m/^File: no file /) { @@ -173,7 +180,7 @@ foreach my $f (@afiles) { foreach my $f (@files) { next if grep { $_ eq $f } @afiles; # TODO:we need to handle removed in cvs - my @status = grep(m/^File/, safe_pipe_capture('cvs', '-q', 'status' ,$f)); + my @status = grep(m/^File/, safe_pipe_capture(@cvs, '-q', 'status' ,$f)); if (@status > 1) { warn 'Strange! cvs status returned more than one line?'}; unless ($status[0] =~ m/Status: Up-to-date$/) { $dirty = 1; @@ -194,7 +201,7 @@ print "Applying\n"; print "Patch applied successfully. Adding new files and directories to CVS\n"; my $dirtypatch = 0; foreach my $d (@dirs) { - if (system('cvs','add',$d)) { + if (system(@cvs,'add',$d)) { $dirtypatch = 1; warn "Failed to cvs add directory $d -- you may need to do it manually"; } @@ -202,9 +209,9 @@ foreach my $d (@dirs) { foreach my $f (@afiles) { if (grep { $_ eq $f } @bfiles) { - system('cvs', 'add','-kb',$f); + system(@cvs, 'add','-kb',$f); } else { - system('cvs', 'add', $f); + system(@cvs, 'add', $f); } if ($?) { $dirtypatch = 1; @@ -213,7 +220,7 @@ foreach my $f (@afiles) { } foreach my $f (@dfiles) { - system('cvs', 'rm', '-f', $f); + system(@cvs, 'rm', '-f', $f); if ($?) { $dirtypatch = 1; warn "Failed to cvs rm -f $f -- you may need to do it manually"; @@ -223,7 +230,7 @@ foreach my $f (@dfiles) { print "Commit to CVS\n"; print "Patch title (first comment line): $title\n"; my @commitfiles = map { unless (m/\s/) { '\''.$_.'\''; } else { $_; }; } (@files); -my $cmd = "cvs commit -F .msg @commitfiles"; +my $cmd = join(' ', @cvs)." commit -F .msg @commitfiles"; if ($dirtypatch) { print "NOTE: One or more hunks failed to apply cleanly.\n"; @@ -236,7 +243,7 @@ if ($dirtypatch) { if ($opt_c) { print "Autocommit\n $cmd\n"; - print safe_pipe_capture('cvs', 'commit', '-F', '.msg', @files); + print safe_pipe_capture(@cvs, 'commit', '-F', '.msg', @files); if ($?) { die "Exiting: The commit did not succeed"; } diff --git a/git-cvsserver.perl b/git-cvsserver.perl index 9371788fab..1bf892e4c1 100755 --- a/git-cvsserver.perl +++ b/git-cvsserver.perl @@ -374,7 +374,8 @@ sub req_add print "Checked-in $dirpart\n"; print "$filename\n"; - print "/$filepart/0///\n"; + my $kopts = kopts_from_path($filepart); + print "/$filepart/0//$kopts/\n"; $addcount++; } @@ -455,7 +456,8 @@ sub req_remove print "Checked-in $dirpart\n"; print "$filename\n"; - print "/$filepart/-1.$wrev///\n"; + my $kopts = kopts_from_path($filepart); + print "/$filepart/-1.$wrev//$kopts/\n"; $rmcount++; } @@ -726,7 +728,8 @@ sub req_co print $state->{CVSROOT} . "/$module/" . ( defined ( $git->{dir} ) and $git->{dir} ne "./" ? $git->{dir} . "/" : "" ) . "$git->{name}\n"; # this is an "entries" line - print "/$git->{name}/1.$git->{revision}///\n"; + my $kopts = kopts_from_path($git->{name}); + print "/$git->{name}/1.$git->{revision}//$kopts/\n"; # permissions print "u=$git->{mode},g=$git->{mode},o=$git->{mode}\n"; @@ -917,8 +920,9 @@ sub req_update print $state->{CVSROOT} . "/$state->{module}/$filename\n"; # this is an "entries" line - $log->debug("/$filepart/1.$meta->{revision}///"); - print "/$filepart/1.$meta->{revision}///\n"; + my $kopts = kopts_from_path($filepart); + $log->debug("/$filepart/1.$meta->{revision}//$kopts/"); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; # permissions $log->debug("SEND : u=$meta->{mode},g=$meta->{mode},o=$meta->{mode}"); @@ -961,8 +965,9 @@ sub req_update print "Update-existing $dirpart\n"; $log->debug($state->{CVSROOT} . "/$state->{module}/$filename"); print $state->{CVSROOT} . "/$state->{module}/$filename\n"; - $log->debug("/$filepart/1.$meta->{revision}///"); - print "/$filepart/1.$meta->{revision}///\n"; + my $kopts = kopts_from_path($filepart); + $log->debug("/$filepart/1.$meta->{revision}//$kopts/"); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; } } elsif ( $return == 1 ) @@ -975,7 +980,8 @@ sub req_update { print "Update-existing $dirpart\n"; print $state->{CVSROOT} . "/$state->{module}/$filename\n"; - print "/$filepart/1.$meta->{revision}/+//\n"; + my $kopts = kopts_from_path($filepart); + print "/$filepart/1.$meta->{revision}/+/$kopts/\n"; } } else @@ -1031,36 +1037,35 @@ sub req_ci exit; } - my $lockfile = "$state->{CVSROOT}/refs/heads/$state->{module}.lock"; - unless ( sysopen(LOCKFILE,$lockfile,O_EXCL|O_CREAT|O_WRONLY) ) - { - $log->warn("lockfile '$lockfile' already exists, please try again"); - print "error 1 Lock file '$lockfile' already exists, please try again\n"; - exit; - } - # Grab a handle to the SQLite db and do any necessary updates my $updater = GITCVS::updater->new($state->{CVSROOT}, $state->{module}, $log); $updater->update(); my $tmpdir = tempdir ( DIR => $TEMP_DIR ); my ( undef, $file_index ) = tempfile ( DIR => $TEMP_DIR, OPEN => 0 ); - $log->info("Lock successful, basing commit on '$tmpdir', index file is '$file_index'"); + $log->info("Lockless commit start, basing commit on '$tmpdir', index file is '$file_index'"); $ENV{GIT_DIR} = $state->{CVSROOT} . "/"; $ENV{GIT_INDEX_FILE} = $file_index; + # Remember where the head was at the beginning. + my $parenthash = `git show-ref -s refs/heads/$state->{module}`; + chomp $parenthash; + if ($parenthash !~ /^[0-9a-f]{40}$/) { + print "error 1 pserver cannot find the current HEAD of module"; + exit; + } + chdir $tmpdir; # populate the temporary index based - system("git-read-tree", $state->{module}); + system("git-read-tree", $parenthash); unless ($? == 0) { die "Error running git-read-tree $state->{module} $file_index $!"; } $log->info("Created index '$file_index' with for head $state->{module} - exit status $?"); - my @committedfiles = (); # foreach file specified on the command line ... @@ -1095,8 +1100,6 @@ sub req_ci { # fail everything if an up to date check fails print "error 1 Up to date check failed for $filename\n"; - close LOCKFILE; - unlink($lockfile); chdir "/"; exit; } @@ -1139,16 +1142,12 @@ sub req_ci { print "E No files to commit\n"; print "ok\n"; - close LOCKFILE; - unlink($lockfile); chdir "/"; return; } my $treehash = `git-write-tree`; - my $parenthash = `cat $ENV{GIT_DIR}refs/heads/$state->{module}`; chomp $treehash; - chomp $parenthash; $log->debug("Treehash : $treehash, Parenthash : $parenthash"); @@ -1159,19 +1158,36 @@ sub req_ci close $msg_fh; my $commithash = `git-commit-tree $treehash -p $parenthash < $msg_filename`; + chomp($commithash); $log->info("Commit hash : $commithash"); unless ( $commithash =~ /[a-zA-Z0-9]{40}/ ) { $log->warn("Commit failed (Invalid commit hash)"); print "error 1 Commit failed (unknown reason)\n"; - close LOCKFILE; - unlink($lockfile); chdir "/"; exit; } - print LOCKFILE $commithash; + # Check that this is allowed, just as we would with a receive-pack + my @cmd = ( $ENV{GIT_DIR}.'hooks/update', "refs/heads/$state->{module}", + $parenthash, $commithash ); + if( -x $cmd[0] ) { + unless( system( @cmd ) == 0 ) + { + $log->warn("Commit failed (update hook declined to update ref)"); + print "error 1 Commit failed (update hook declined)\n"; + chdir "/"; + exit; + } + } + + if (system(qw(git update-ref -m), "cvsserver ci", + "refs/heads/$state->{module}", $commithash, $parenthash)) { + $log->warn("update-ref for $state->{module} failed."); + print "error 1 Cannot commit -- update first\n"; + exit; + } $updater->update(); @@ -1196,16 +1212,12 @@ sub req_ci } else { print "Checked-in $dirpart\n"; print "$filename\n"; - print "/$filepart/1.$meta->{revision}///\n"; + my $kopts = kopts_from_path($filepart); + print "/$filepart/1.$meta->{revision}//$kopts/\n"; } } - close LOCKFILE; - my $reffile = "$ENV{GIT_DIR}refs/heads/$state->{module}"; - unlink($reffile); - rename($lockfile, $reffile); chdir "/"; - print "ok\n"; } @@ -1882,6 +1894,28 @@ sub filecleanup return $filename; } +# Given a path, this function returns a string containing the kopts +# that should go into that path's Entries line. For example, a binary +# file should get -kb. +sub kopts_from_path +{ + my ($path) = @_; + + # Once it exists, the git attributes system should be used to look up + # what attributes apply to this path. + + # Until then, take the setting from the config file + unless ( defined ( $cfg->{gitcvs}{allbinary} ) and $cfg->{gitcvs}{allbinary} =~ /^\s*(1|true|yes)\s*$/i ) + { + # Return "" to give no special treatment to any path + return ""; + } else { + # Alternatively, to have all files treated as if they are binary (which + # is more like git itself), always return the "-kb" option + return "-kb"; + } +} + package GITCVS::log; #### diff --git a/git-fetch.sh b/git-fetch.sh index ca984e739a..d230995f6e 100755 --- a/git-fetch.sh +++ b/git-fetch.sh @@ -243,6 +243,15 @@ then orig_head=$(git-rev-parse --verify HEAD 2>/dev/null) fi +# Allow --notags from remote.$1.tagopt +case "$tags$no_tags" in +'') + case "$(git-config --get "remote.$1.tagopt")" in + --no-tags) + no_tags=t ;; + esac +esac + # If --tags (and later --heads or --all) is specified, then we are # not talking about defaults stored in Pull: line of remotes or # branches file, and just fetch those and refspecs explicitly given. diff --git a/git-remote.perl b/git-remote.perl index 670bafb6d0..bd70bf1ddd 100755 --- a/git-remote.perl +++ b/git-remote.perl @@ -274,6 +274,31 @@ sub add_remote { } } +sub update_remote { + my ($name) = @_; + + my $conf = $git->config("remotes." . $name); + if (defined($conf)) { + @remotes = split(' ', $conf); + } elsif ($name eq 'default') { + undef @remotes; + for (sort keys %$remote) { + my $do_fetch = $git->config_boolean("remote." . $_ . + ".skipDefaultUpdate"); + if (!defined($do_fetch) || $do_fetch ne "true") { + push @remotes, $_; + } + } + } else { + print STDERR "Remote group $name does not exists.\n"; + exit(1); + } + for (@remotes) { + print "Updating $_\n"; + $git->command('fetch', "$_"); + } +} + sub add_usage { print STDERR "Usage: git remote add [-f] [-t track]* [-m master] <name> <url>\n"; exit(1); @@ -303,6 +328,15 @@ elsif ($ARGV[0] eq 'show') { show_remote($ARGV[$i], $ls_remote); } } +elsif ($ARGV[0] eq 'update') { + if (@ARGV <= 1) { + update_remote("default"); + exit(1); + } + for ($i = 1; $i < @ARGV; $i++) { + update_remote($ARGV[$i]); + } +} elsif ($ARGV[0] eq 'prune') { my $ls_remote = 1; my $i; @@ -360,5 +394,6 @@ else { print STDERR " git remote add <name> <url>\n"; print STDERR " git remote show <name>\n"; print STDERR " git remote prune <name>\n"; + print STDERR " git remote update [group]\n"; exit(1); } diff --git a/git-send-email.perl b/git-send-email.perl index 6a285bfd21..a71a192e4d 100755 --- a/git-send-email.perl +++ b/git-send-email.perl @@ -34,6 +34,53 @@ sub readline { } package main; + +sub usage { + print <<EOT; +git-send-email [options] <file | directory>... +Options: + --from Specify the "From:" line of the email to be sent. + + --to Specify the primary "To:" line of the email. + + --cc Specify an initial "Cc:" list for the entire series + of emails. + + --bcc Specify a list of email addresses that should be Bcc: + on all the emails. + + --compose Use \$EDITOR to edit an introductory message for the + patch series. + + --subject Specify the initial "Subject:" line. + Only necessary if --compose is also set. If --compose + is not set, this will be prompted for. + + --in-reply-to Specify the first "In-Reply-To:" header line. + Only used if --compose is also set. If --compose is not + set, this will be prompted for. + + --chain-reply-to If set, the replies will all be to the previous + email sent, rather than to the first email sent. + Defaults to on. + + --no-signed-off-cc Suppress the automatic addition of email addresses + that appear in a Signed-off-by: line, to the cc: list. + Note: Using this option is not recommended. + + --smtp-server If set, specifies the outgoing SMTP server to use. + Defaults to localhost. + + --suppress-from Suppress sending emails to yourself if your address + appears in a From: line. + + --quiet Make git-send-email less verbose. One line per email + should be all that is output. + +EOT + exit(1); +} + # most mail servers generate the Date: header, but not all... sub format_2822_time { my ($time) = @_; @@ -120,6 +167,10 @@ my $rc = GetOptions("from=s" => \$from, "dry-run" => \$dry_run, ); +unless ($rc) { + usage(); +} + # Verify the user input foreach my $entry (@to) { @@ -311,50 +362,8 @@ if (@files) { print $_,"\n" for (@files); } } else { - print <<EOT; -git-send-email [options] <file | directory> [... file | directory ] -Options: - --from Specify the "From:" line of the email to be sent. - - --to Specify the primary "To:" line of the email. - - --cc Specify an initial "Cc:" list for the entire series - of emails. - - --bcc Specify a list of email addresses that should be Bcc: - on all the emails. - - --compose Use \$EDITOR to edit an introductory message for the - patch series. - - --subject Specify the initial "Subject:" line. - Only necessary if --compose is also set. If --compose - is not set, this will be prompted for. - - --in-reply-to Specify the first "In-Reply-To:" header line. - Only used if --compose is also set. If --compose is not - set, this will be prompted for. - - --chain-reply-to If set, the replies will all be to the previous - email sent, rather than to the first email sent. - Defaults to on. - - --no-signed-off-cc Suppress the automatic addition of email addresses - that appear in a Signed-off-by: line, to the cc: list. - Note: Using this option is not recommended. - - --smtp-server If set, specifies the outgoing SMTP server to use. - Defaults to localhost. - - --suppress-from Suppress sending emails to yourself if your address - appears in a From: line. - - --quiet Make git-send-email less verbose. One line per email should be - all that is output. - -Error: Please specify a file or a directory on the command line. -EOT - exit(1); + print STDERR "\nNo patch files specified!\n\n"; + usage(); } # Variables we set as part of the loop over files diff --git a/git-svn.perl b/git-svn.perl index d792a62d7c..41961b59f6 100755 --- a/git-svn.perl +++ b/git-svn.perl @@ -4,33 +4,21 @@ use warnings; use strict; use vars qw/ $AUTHOR $VERSION - $SVN_URL $SVN_INFO $SVN_WC $SVN_UUID - $GIT_SVN_INDEX $GIT_SVN - $GIT_DIR $GIT_SVN_DIR $REVDB/; + $sha1 $sha1_short $_revision + $_q $_authors %users/; $AUTHOR = 'Eric Wong <normalperson@yhbt.net>'; $VERSION = '@@GIT_VERSION@@'; -use Cwd qw/abs_path/; -$GIT_DIR = abs_path($ENV{GIT_DIR} || '.git'); -$ENV{GIT_DIR} = $GIT_DIR; +my $git_dir_user_set = 1 if defined $ENV{GIT_DIR}; +$ENV{GIT_DIR} ||= '.git'; +$Git::SVN::default_repo_id = 'svn'; +$Git::SVN::default_ref_id = $ENV{GIT_SVN_ID} || 'git-svn'; +$Git::SVN::Ra::_log_window_size = 100; -my $LC_ALL = $ENV{LC_ALL}; -my $TZ = $ENV{TZ}; -# make sure the svn binary gives consistent output between locales and TZs: +$Git::SVN::Log::TZ = $ENV{TZ}; $ENV{TZ} = 'UTC'; -$ENV{LC_ALL} = 'C'; $| = 1; # unbuffer STDOUT -# properties that we do not log: -my %SKIP = ( 'svn:wc:ra_dav:version-url' => 1, - 'svn:special' => 1, - 'svn:executable' => 1, - 'svn:entry:committed-rev' => 1, - 'svn:entry:last-author' => 1, - 'svn:entry:uuid' => 1, - 'svn:entry:committed-date' => 1, -); - sub fatal (@) { print STDERR @_; exit 1 } require SVN::Core; # use()-ing this causes segfaults for me... *shrug* require SVN::Ra; @@ -38,120 +26,129 @@ require SVN::Delta; if ($SVN::Core::VERSION lt '1.1.0') { fatal "Need SVN::Core 1.1.0 or better (got $SVN::Core::VERSION)\n"; } +push @Git::SVN::Ra::ISA, 'SVN::Ra'; push @SVN::Git::Editor::ISA, 'SVN::Delta::Editor'; push @SVN::Git::Fetcher::ISA, 'SVN::Delta::Editor'; -*SVN::Git::Fetcher::process_rm = *process_rm; use Carp qw/croak/; use IO::File qw//; use File::Basename qw/dirname basename/; use File::Path qw/mkpath/; use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev pass_through/; -use POSIX qw/strftime/; use IPC::Open3; -use Memoize; -use Git qw/command command_oneline command_noisy - command_output_pipe command_input_pipe command_close_pipe/; -memoize('revisions_eq'); -memoize('cmt_metadata'); -memoize('get_commit_time'); +use Git; + +BEGIN { + my $s; + foreach (qw/command command_oneline command_noisy command_output_pipe + command_input_pipe command_close_pipe/) { + $s .= "*SVN::Git::Editor::$_ = *SVN::Git::Fetcher::$_ = ". + "*Git::SVN::Migration::$_ = ". + "*Git::SVN::Log::$_ = *Git::SVN::$_ = *$_ = *Git::$_; "; + } + eval $s; +} my ($SVN); -my $_optimize_commits = 1 unless $ENV{GIT_SVN_NO_OPTIMIZE_COMMITS}; -my $sha1 = qr/[a-f\d]{40}/; -my $sha1_short = qr/[a-f\d]{4,40}/; -my $_esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/; -my ($_revision,$_stdin,$_no_ignore_ext,$_no_stop_copy,$_help,$_rmdir,$_edit, - $_find_copies_harder, $_l, $_cp_similarity, $_cp_remote, - $_repack, $_repack_nr, $_repack_flags, $_q, - $_message, $_file, $_follow_parent, $_no_metadata, - $_template, $_shared, $_no_default_regex, $_no_graft_copy, - $_limit, $_verbose, $_incremental, $_oneline, $_l_fmt, $_show_commit, - $_version, $_upgrade, $_authors, $_branch_all_refs, @_opt_m, - $_merge, $_strategy, $_dry_run, $_ignore_nodate, $_non_recursive, - $_username, $_config_dir, $_no_auth_cache, - $_pager, $_color, $_prefix); -my (@_branch_from, %tree_map, %users, %rusers, %equiv); -my ($_svn_can_do_switch); -my @repo_path_split_cache; - -my %fc_opts = ( 'no-ignore-externals' => \$_no_ignore_ext, - 'branch|b=s' => \@_branch_from, - 'follow-parent|follow' => \$_follow_parent, - 'branch-all-refs|B' => \$_branch_all_refs, +$sha1 = qr/[a-f\d]{40}/; +$sha1_short = qr/[a-f\d]{4,40}/; +my ($_stdin, $_help, $_edit, + $_message, $_file, + $_template, $_shared, + $_version, $_fetch_all, + $_merge, $_strategy, $_dry_run, + $_prefix, $_no_checkout, $_verbose); +$Git::SVN::_follow_parent = 1; +my %remote_opts = ( 'username=s' => \$Git::SVN::Prompt::_username, + 'config-dir=s' => \$Git::SVN::Ra::config_dir, + 'no-auth-cache' => \$Git::SVN::Prompt::_no_auth_cache ); +my %fc_opts = ( 'follow-parent|follow!' => \$Git::SVN::_follow_parent, 'authors-file|A=s' => \$_authors, - 'repack:i' => \$_repack, - 'no-metadata' => \$_no_metadata, + 'repack:i' => \$Git::SVN::_repack, + 'noMetadata' => \$Git::SVN::_no_metadata, + 'useSvmProps' => \$Git::SVN::_use_svm_props, + 'useSvnsyncProps' => \$Git::SVN::_use_svnsync_props, + 'log-window-size=i' => \$Git::SVN::Ra::_log_window_size, + 'no-checkout' => \$_no_checkout, 'quiet|q' => \$_q, - 'username=s' => \$_username, - 'config-dir=s' => \$_config_dir, - 'no-auth-cache' => \$_no_auth_cache, - 'ignore-nodate' => \$_ignore_nodate, - 'repack-flags|repack-args|repack-opts=s' => \$_repack_flags); + 'repack-flags|repack-args|repack-opts=s' => + \$Git::SVN::_repack_flags, + %remote_opts ); my ($_trunk, $_tags, $_branches); -my %multi_opts = ( 'trunk|T=s' => \$_trunk, - 'tags|t=s' => \$_tags, - 'branches|b=s' => \$_branches ); -my %init_opts = ( 'template=s' => \$_template, 'shared' => \$_shared ); +my %icv; +my %init_opts = ( 'template=s' => \$_template, 'shared:s' => \$_shared, + 'trunk|T=s' => \$_trunk, 'tags|t=s' => \$_tags, + 'branches|b=s' => \$_branches, 'prefix=s' => \$_prefix, + 'no-metadata' => sub { $icv{noMetadata} = 1 }, + 'use-svm-props' => sub { $icv{useSvmProps} = 1 }, + 'use-svnsync-props' => sub { $icv{useSvnsyncProps} = 1 }, + 'rewrite-root=s' => sub { $icv{rewriteRoot} = $_[1] }, + %remote_opts ); my %cmt_opts = ( 'edit|e' => \$_edit, - 'rmdir' => \$_rmdir, - 'find-copies-harder' => \$_find_copies_harder, - 'l=i' => \$_l, - 'copy-similarity|C=i'=> \$_cp_similarity + 'rmdir' => \$SVN::Git::Editor::_rmdir, + 'find-copies-harder' => \$SVN::Git::Editor::_find_copies_harder, + 'l=i' => \$SVN::Git::Editor::_rename_limit, + 'copy-similarity|C=i'=> \$SVN::Git::Editor::_cp_similarity ); my %cmd = ( fetch => [ \&cmd_fetch, "Download new revisions from SVN", - { 'revision|r=s' => \$_revision, %fc_opts } ], - init => [ \&init, "Initialize a repo for tracking" . + { 'revision|r=s' => \$_revision, + 'fetch-all|all' => \$_fetch_all, + %fc_opts } ], + clone => [ \&cmd_clone, "Initialize and fetch revisions", + { 'revision|r=s' => \$_revision, + %fc_opts, %init_opts } ], + init => [ \&cmd_init, "Initialize a repo for tracking" . " (requires URL argument)", \%init_opts ], - dcommit => [ \&dcommit, 'Commit several diffs to merge with upstream', + 'multi-init' => [ \&cmd_multi_init, + "Deprecated alias for ". + "'$0 init -T<trunk> -b<branches> -t<tags>'", + \%init_opts ], + dcommit => [ \&cmd_dcommit, + 'Commit several diffs to merge with upstream', { 'merge|m|M' => \$_merge, 'strategy|s=s' => \$_strategy, + 'verbose|v' => \$_verbose, 'dry-run|n' => \$_dry_run, + 'fetch-all|all' => \$_fetch_all, %cmt_opts, %fc_opts } ], - 'set-tree' => [ \&commit, "Set an SVN repository to a git tree-ish", - { 'stdin|' => \$_stdin, %cmt_opts, %fc_opts, } ], - 'show-ignore' => [ \&show_ignore, "Show svn:ignore listings", + 'set-tree' => [ \&cmd_set_tree, + "Set an SVN repository to a git tree-ish", + { 'stdin|' => \$_stdin, %cmt_opts, %fc_opts, } ], + 'show-ignore' => [ \&cmd_show_ignore, "Show svn:ignore listings", { 'revision|r=i' => \$_revision } ], - rebuild => [ \&rebuild, "Rebuild git-svn metadata (after git clone)", - { 'no-ignore-externals' => \$_no_ignore_ext, - 'copy-remote|remote=s' => \$_cp_remote, - 'upgrade' => \$_upgrade } ], - 'graft-branches' => [ \&graft_branches, - 'Detect merges/branches from already imported history', - { 'merge-rx|m' => \@_opt_m, - 'branch|b=s' => \@_branch_from, - 'branch-all-refs|B' => \$_branch_all_refs, - 'no-default-regex' => \$_no_default_regex, - 'no-graft-copy' => \$_no_graft_copy } ], - 'multi-init' => [ \&multi_init, - 'Initialize multiple trees (like git-svnimport)', - { %multi_opts, %init_opts, - 'revision|r=i' => \$_revision, - 'username=s' => \$_username, - 'config-dir=s' => \$_config_dir, - 'no-auth-cache' => \$_no_auth_cache, - 'prefix=s' => \$_prefix, - } ], - 'multi-fetch' => [ \&multi_fetch, - 'Fetch multiple trees (like git-svnimport)', - \%fc_opts ], - 'log' => [ \&show_log, 'Show commit logs', - { 'limit=i' => \$_limit, + 'multi-fetch' => [ \&cmd_multi_fetch, + "Deprecated alias for $0 fetch --all", + { 'revision|r=s' => \$_revision, %fc_opts } ], + 'migrate' => [ sub { }, + # no-op, we automatically run this anyways, + 'Migrate configuration/metadata/layout from + previous versions of git-svn', + { 'minimize' => \$Git::SVN::Migration::_minimize, + %remote_opts } ], + 'log' => [ \&Git::SVN::Log::cmd_show_log, 'Show commit logs', + { 'limit=i' => \$Git::SVN::Log::limit, 'revision|r=s' => \$_revision, - 'verbose|v' => \$_verbose, - 'incremental' => \$_incremental, - 'oneline' => \$_oneline, - 'show-commit' => \$_show_commit, - 'non-recursive' => \$_non_recursive, + 'verbose|v' => \$Git::SVN::Log::verbose, + 'incremental' => \$Git::SVN::Log::incremental, + 'oneline' => \$Git::SVN::Log::oneline, + 'show-commit' => \$Git::SVN::Log::show_commit, + 'non-recursive' => \$Git::SVN::Log::non_recursive, 'authors-file|A=s' => \$_authors, - 'color' => \$_color, - 'pager=s' => \$_pager, + 'color' => \$Git::SVN::Log::color, + 'pager=s' => \$Git::SVN::Log::pager, } ], - 'commit-diff' => [ \&commit_diff, 'Commit a diff between two trees', + 'rebase' => [ \&cmd_rebase, "Fetch and rebase your working directory", + { 'merge|m|M' => \$_merge, + 'verbose|v' => \$_verbose, + 'strategy|s=s' => \$_strategy, + 'fetch-all|all' => \$_fetch_all, + %fc_opts } ], + 'commit-diff' => [ \&cmd_commit_diff, + 'Commit a diff between two trees', { 'message|m=s' => \$_message, 'file|F=s' => \$_file, 'revision|r=s' => \$_revision, @@ -170,20 +167,50 @@ for (my $i = 0; $i < @ARGV; $i++) { my %opts = %{$cmd{$cmd}->[2]} if (defined $cmd); read_repo_config(\%opts); -my $rv = GetOptions(%opts, 'help|H|h' => \$_help, - 'version|V' => \$_version, - 'id|i=s' => \$GIT_SVN); +my $rv = GetOptions(%opts, 'help|H|h' => \$_help, 'version|V' => \$_version, + 'minimize-connections' => \$Git::SVN::Migration::_minimize, + 'id|i=s' => \$Git::SVN::default_ref_id, + 'svn-remote|remote|R=s' => sub { + $Git::SVN::no_reuse_existing = 1; + $Git::SVN::default_repo_id = $_[1] }); exit 1 if (!$rv && $cmd ne 'log'); -set_default_vals(); usage(0) if $_help; version() if $_version; usage(1) unless defined $cmd; -init_vars(); load_authors() if $_authors; -load_all_refs() if $_branch_all_refs; -migration_check() unless $cmd =~ /^(?:init|rebuild|multi-init|commit-diff)$/; -$cmd{$cmd}->[0]->(@ARGV); + +# make sure we're always running +unless ($cmd =~ /(?:clone|init|multi-init)$/) { + unless (-d $ENV{GIT_DIR}) { + if ($git_dir_user_set) { + die "GIT_DIR=$ENV{GIT_DIR} explicitly set, ", + "but it is not a directory\n"; + } + my $git_dir = delete $ENV{GIT_DIR}; + chomp(my $cdup = command_oneline(qw/rev-parse --show-cdup/)); + unless (length $cdup) { + die "Already at toplevel, but $git_dir ", + "not found '$cdup'\n"; + } + chdir $cdup or die "Unable to chdir up to '$cdup'\n"; + unless (-d $git_dir) { + die "$git_dir still not found after going to ", + "'$cdup'\n"; + } + $ENV{GIT_DIR} = $git_dir; + } +} +unless ($cmd =~ /^(?:clone|init|multi-init|commit-diff)$/) { + Git::SVN::Migration::migration_check(); +} +Git::SVN::init_vars(); +eval { + Git::SVN::verify_remotes_sanity(); + $cmd{$cmd}->[0]->(@ARGV); +}; +fatal $@ if $@; +post_fetch_checkout(); exit 0; ####################### primary functions ###################### @@ -198,6 +225,7 @@ Usage: $0 <command> [options] [arguments]\n foreach (sort keys %cmd) { next if $cmd && $cmd ne $_; + next if /^multi-/; # don't show deprecated commands print $fd ' ',pack('A17',$_),$cmd{$_}->[1],"\n"; foreach (keys %{$cmd{$_}->[2]}) { # prints out arguments as they should be passed: @@ -221,173 +249,79 @@ sub version { exit 0; } -sub rebuild { - if (!verify_ref("refs/remotes/$GIT_SVN^0")) { - copy_remote_ref(); - } - $SVN_URL = shift or undef; - my $newest_rev = 0; - if ($_upgrade) { - command_noisy('update-ref',"refs/remotes/$GIT_SVN"," - $GIT_SVN-HEAD"); - } else { - check_upgrade_needed(); - } - - my ($rev_list, $ctx) = command_output_pipe("rev-list", - "refs/remotes/$GIT_SVN"); - my $latest; - while (<$rev_list>) { - chomp; - my $c = $_; - croak "Non-SHA1: $c\n" unless $c =~ /^$sha1$/o; - my @commit = grep(/^git-svn-id: /, - command(qw/cat-file commit/, $c)); - next if (!@commit); # skip merges - my ($url, $rev, $uuid) = extract_metadata($commit[$#commit]); - if (!defined $rev || !$uuid) { - croak "Unable to extract revision or UUID from ", - "$c, $commit[$#commit]\n"; - } - - # if we merged or otherwise started elsewhere, this is - # how we break out of it - next if (defined $SVN_UUID && ($uuid ne $SVN_UUID)); - next if (defined $SVN_URL && defined $url && ($url ne $SVN_URL)); - - unless (defined $latest) { - if (!$SVN_URL && !$url) { - croak "SVN repository location required: $url\n"; - } - $SVN_URL ||= $url; - $SVN_UUID ||= $uuid; - setup_git_svn(); - $latest = $rev; - } - revdb_set($REVDB, $rev, $c); - print "r$rev = $c\n"; - $newest_rev = $rev if ($rev > $newest_rev); - } - command_close_pipe($rev_list, $ctx); -} - -sub init { - my $url = shift or die "SVN repository location required " . - "as a command-line argument\n"; - $url =~ s!/+$!!; # strip trailing slash - - if (my $repo_path = shift) { - unless (-d $repo_path) { - mkpath([$repo_path]); - } - $GIT_DIR = $ENV{GIT_DIR} = $repo_path . "/.git"; - init_vars(); - } - - $SVN_URL = $url; - unless (-d $GIT_DIR) { +sub do_git_init_db { + unless (-d $ENV{GIT_DIR}) { my @init_db = ('init'); push @init_db, "--template=$_template" if defined $_template; - push @init_db, "--shared" if defined $_shared; + if (defined $_shared) { + if ($_shared =~ /[a-z]/) { + push @init_db, "--shared=$_shared"; + } else { + push @init_db, "--shared"; + } + } command_noisy(@init_db); } - setup_git_svn(); + my $set; + my $pfx = "svn-remote.$Git::SVN::default_repo_id"; + foreach my $i (keys %icv) { + die "'$set' and '$i' cannot both be set\n" if $set; + next unless defined $icv{$i}; + command_noisy('config', "$pfx.$i", $icv{$i}); + $set = $i; + } } -sub cmd_fetch { - fetch_child_id($GIT_SVN, @_); +sub init_subdir { + my $repo_path = shift or return; + mkpath([$repo_path]) unless -d $repo_path; + chdir $repo_path or die "Couldn't chdir to $repo_path: $!\n"; + $ENV{GIT_DIR} = '.git'; } -sub fetch { - check_upgrade_needed(); - $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url"); - my $ret = fetch_lib(@_); - if ($ret->{commit} && !verify_ref('refs/heads/master^0')) { - command_noisy(qw(update-ref refs/heads/master),$ret->{commit}); +sub cmd_clone { + my ($url, $path) = @_; + if (!defined $path && + (defined $_trunk || defined $_branches || defined $_tags) && + $url !~ m#^[a-z\+]+://#) { + $path = $url; } - return $ret; + $path = basename($url) if !defined $path || !length $path; + cmd_init($url, $path); + Git::SVN::fetch_all($Git::SVN::default_repo_id); } -sub fetch_lib { - my (@parents) = @_; - $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url"); - $SVN ||= libsvn_connect($SVN_URL); - my ($last_rev, $last_commit) = svn_grab_base_rev(); - my ($base, $head) = libsvn_parse_revision($last_rev); - if ($base > $head) { - return { revision => $last_rev, commit => $last_commit } +sub cmd_init { + if (defined $_trunk || defined $_branches || defined $_tags) { + return cmd_multi_init(@_); } - my $index = set_index($GIT_SVN_INDEX); + my $url = shift or die "SVN repository location required ", + "as a command-line argument\n"; + init_subdir(@_); + do_git_init_db(); - # limit ourselves and also fork() since get_log won't release memory - # after processing a revision and SVN stuff seems to leak - my $inc = 1000; - my ($min, $max) = ($base, $head < $base+$inc ? $head : $base+$inc); - read_uuid(); - if (defined $last_commit) { - unless (-e $GIT_SVN_INDEX) { - command_noisy('read-tree', $last_commit); - } - my $x = command_oneline('write-tree'); - my ($y) = (command(qw/cat-file commit/, $last_commit) - =~ /^tree ($sha1)/m); - if ($y ne $x) { - unlink $GIT_SVN_INDEX or croak $!; - command_noisy('read-tree', $last_commit); - } - $x = command_oneline('write-tree'); - if ($y ne $x) { - print STDERR "trees ($last_commit) $y != $x\n", - "Something is seriously wrong...\n"; - } + Git::SVN->init($url); +} + +sub cmd_fetch { + if (grep /^\d+=./, @_) { + die "'<rev>=<commit>' fetch arguments are ", + "no longer supported.\n"; } - while (1) { - # fork, because using SVN::Pool with get_log() still doesn't - # seem to help enough to keep memory usage down. - defined(my $pid = fork) or croak $!; - if (!$pid) { - $SVN::Error::handler = \&libsvn_skip_unknown_revs; - - # Yes I'm perfectly aware that the fourth argument - # below is the limit revisions number. Unfortunately - # performance sucks with it enabled, so it's much - # faster to fetch revision ranges instead of relying - # on the limiter. - libsvn_get_log(libsvn_dup_ra($SVN), [''], - $min, $max, 0, 1, 1, - sub { - my $log_msg; - if ($last_commit) { - $log_msg = libsvn_fetch( - $last_commit, @_); - $last_commit = git_commit( - $log_msg, - $last_commit, - @parents); - } else { - $log_msg = libsvn_new_tree(@_); - $last_commit = git_commit( - $log_msg, @parents); - } - }); - exit 0; - } - waitpid $pid, 0; - croak $? if $?; - ($last_rev, $last_commit) = svn_grab_base_rev(); - last if ($max >= $head); - $min = $max + 1; - $max += $inc; - $max = $head if ($max > $head); - $SVN = libsvn_connect($SVN_URL); + my ($remote) = @_; + if (@_ > 1) { + die "Usage: $0 fetch [--all] [svn-remote]\n"; + } + $remote ||= $Git::SVN::default_repo_id; + if ($_fetch_all) { + cmd_multi_fetch(); + } else { + Git::SVN::fetch_all($remote, Git::SVN::read_all_remotes()); } - restore_index($index); - return { revision => $last_rev, commit => $last_commit }; } -sub commit { +sub cmd_set_tree { my (@commits) = @_; - check_upgrade_needed(); if ($_stdin || !@commits) { print "Reading from stdin...\n"; @commits = (); @@ -405,702 +339,282 @@ sub commit { } elsif (scalar @tmp > 1) { push @revs, reverse(command('rev-list',@tmp)); } else { - die "Failed to rev-parse $c\n"; + fatal "Failed to rev-parse $c\n"; } } - commit_lib(@revs); + my $gs = Git::SVN->new; + my ($r_last, $cmt_last) = $gs->last_rev_commit; + $gs->fetch; + if (defined $gs->{last_rev} && $r_last != $gs->{last_rev}) { + fatal "There are new revisions that were fetched ", + "and need to be merged (or acknowledged) ", + "before committing.\nlast rev: $r_last\n", + " current: $gs->{last_rev}\n"; + } + $gs->set_tree($_) foreach @revs; print "Done committing ",scalar @revs," revisions to SVN\n"; } -sub commit_lib { - my (@revs) = @_; - my ($r_last, $cmt_last) = svn_grab_base_rev(); - defined $r_last or die "Must have an existing revision to commit\n"; - my $fetched = fetch(); - if ($r_last != $fetched->{revision}) { - print STDERR "There are new revisions that were fetched ", - "and need to be merged (or acknowledged) ", - "before committing.\n", - "last rev: $r_last\n", - " current: $fetched->{revision}\n"; - exit 1; +sub cmd_dcommit { + my $head = shift; + $head ||= 'HEAD'; + my @refs; + my ($url, $rev, $uuid) = working_head_info($head, \@refs); + my $c = $refs[-1]; + unless (defined $url && defined $rev && defined $uuid) { + die "Unable to determine upstream SVN information from ", + "$head history\n"; } - read_uuid(); - my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : (); - my $commit_msg = "$GIT_SVN_DIR/.svn-commit.tmp.$$"; - - my $repo; - set_svn_commit_env(); - foreach my $c (@revs) { - my $log_msg = get_commit_message($c, $commit_msg); - - # fork for each commit because there's a memory leak I - # can't track down... (it's probably in the SVN code) - defined(my $pid = open my $fh, '-|') or croak $!; - if (!$pid) { - my $ed = SVN::Git::Editor->new( - { r => $r_last, - ra => libsvn_dup_ra($SVN), - c => $c, - svn_path => $SVN->{svn_path}, - }, - $SVN->get_commit_editor( - $log_msg->{msg}, - sub { - libsvn_commit_cb( - @_, $c, - $log_msg->{msg}, - $r_last, - $cmt_last) - }, - @lock) - ); - my $mods = libsvn_checkout_tree($cmt_last, $c, $ed); - if (@$mods == 0) { - print "No changes\nr$r_last = $cmt_last\n"; - $ed->abort_edit; - } else { - $ed->close_edit; - } - exit 0; - } - my ($r_new, $cmt_new, $no); - while (<$fh>) { - print $_; - chomp; - if (/^r(\d+) = ($sha1)$/o) { - ($r_new, $cmt_new) = ($1, $2); - } elsif ($_ eq 'No changes') { - $no = 1; - } - } - close $fh or exit 1; - if (! defined $r_new && ! defined $cmt_new) { - unless ($no) { - die "Failed to parse revision information\n"; - } - } else { - ($r_last, $cmt_last) = ($r_new, $cmt_new); - } - } - $ENV{LC_ALL} = 'C'; - unlink $commit_msg; -} - -sub dcommit { - my $head = shift || 'HEAD'; - my $gs = "refs/remotes/$GIT_SVN"; - my @refs = command(qw/rev-list --no-merges/, "$gs..$head"); + my $gs = Git::SVN->find_by_url($url); my $last_rev; - foreach my $d (reverse @refs) { + foreach my $d (@refs) { if (!verify_ref("$d~1")) { - die "Commit $d\n", - "has no parent commit, and therefore ", - "nothing to diff against.\n", - "You should be working from a repository ", - "originally created by git-svn\n"; + fatal "Commit $d\n", + "has no parent commit, and therefore ", + "nothing to diff against.\n", + "You should be working from a repository ", + "originally created by git-svn\n"; } unless (defined $last_rev) { (undef, $last_rev, undef) = cmt_metadata("$d~1"); unless (defined $last_rev) { - die "Unable to extract revision information ", - "from commit $d~1\n"; + fatal "Unable to extract revision information ", + "from commit $d~1\n"; } } if ($_dry_run) { print "diff-tree $d~1 $d\n"; } else { - if (my $r = commit_diff("$d~1", $d, undef, $last_rev)) { - $last_rev = $r; - } # else: no changes, same $last_rev + my %ed_opts = ( r => $last_rev, + log => get_commit_entry($d)->{log}, + ra => Git::SVN::Ra->new($url), + tree_a => "$d~1", + tree_b => $d, + editor_cb => sub { + print "Committed r$_[0]\n"; + $last_rev = $_[0]; }, + svn_path => ''); + if (!SVN::Git::Editor->new(\%ed_opts)->apply_diff) { + print "No changes\n$d~1 == $d\n"; + } } } return if $_dry_run; - fetch(); - my @diff = command('diff-tree', 'HEAD', $gs, '--'); + unless ($gs) { + warn "Could not determine fetch information for $url\n", + "Will not attempt to fetch and rebase commits.\n", + "This probably means you have useSvmProps and should\n", + "now resync your SVN::Mirror repository.\n"; + return; + } + $_fetch_all ? $gs->fetch_all : $gs->fetch; + # we always want to rebase against the current HEAD, not any + # head that was passed to us + my @diff = command('diff-tree', 'HEAD', $gs->refname, '--'); my @finish; if (@diff) { - @finish = qw/rebase/; - push @finish, qw/--merge/ if $_merge; - push @finish, "--strategy=$_strategy" if $_strategy; - print STDERR "W: HEAD and $gs differ, using @finish:\n", @diff; + @finish = rebase_cmd(); + print STDERR "W: HEAD and ", $gs->refname, " differ, ", + "using @finish:\n", "@diff"; } else { - print "No changes between current HEAD and $gs\n", - "Resetting to the latest $gs\n"; + print "No changes between current HEAD and ", + $gs->refname, "\nResetting to the latest ", + $gs->refname, "\n"; @finish = qw/reset --mixed/; } - command_noisy(@finish, $gs); -} - -sub show_ignore { - $SVN_URL ||= file_to_s("$GIT_SVN_DIR/info/url"); - my $repo; - $SVN ||= libsvn_connect($SVN_URL); - my $r = defined $_revision ? $_revision : $SVN->get_latest_revnum; - libsvn_traverse_ignore(\*STDOUT, '', $r); + command_noisy(@finish, $gs->refname); } -sub graft_branches { - my $gr_file = "$GIT_DIR/info/grafts"; - my ($grafts, $comments) = read_grafts($gr_file); - my $gr_sha1; - - if (%$grafts) { - # temporarily disable our grafts file to make this idempotent - chomp($gr_sha1 = command(qw/hash-object -w/,$gr_file)); - rename $gr_file, "$gr_file~$gr_sha1" or croak $!; +sub cmd_rebase { + command_noisy(qw/update-index --refresh/); + my $url = (working_head_info('HEAD'))[0]; + if (!defined $url) { + die "Unable to determine upstream SVN information from ", + "working tree history\n"; } - my $l_map = read_url_paths(); - my @re = map { qr/$_/is } @_opt_m if @_opt_m; - unless ($_no_default_regex) { - push @re, (qr/\b(?:merge|merging|merged)\s+with\s+([\w\.\-]+)/i, - qr/\b(?:merge|merging|merged)\s+([\w\.\-]+)/i, - qr/\b(?:from|of)\s+([\w\.\-]+)/i ); - } - foreach my $u (keys %$l_map) { - if (@re) { - foreach my $p (keys %{$l_map->{$u}}) { - graft_merge_msg($grafts,$l_map,$u,$p,@re); - } - } - unless ($_no_graft_copy) { - graft_file_copy_lib($grafts,$l_map,$u); - } + my $gs = Git::SVN->find_by_url($url); + if (command(qw/diff-index HEAD --/)) { + print STDERR "Cannot rebase with uncommited changes:\n"; + command_noisy('status'); + exit 1; } - graft_tree_joins($grafts); + $_fetch_all ? $gs->fetch_all : $gs->fetch; + command_noisy(rebase_cmd(), $gs->refname); +} - write_grafts($grafts, $comments, $gr_file); - unlink "$gr_file~$gr_sha1" if $gr_sha1; +sub cmd_show_ignore { + my $url = (::working_head_info('HEAD'))[0]; + my $gs = Git::SVN->find_by_url($url) || Git::SVN->new; + my $r = (defined $_revision ? $_revision : $gs->ra->get_latest_revnum); + $gs->traverse_ignore(\*STDOUT, '', $r); } -sub multi_init { +sub cmd_multi_init { my $url = shift; unless (defined $_trunk || defined $_branches || defined $_tags) { usage(1); } + $_prefix = '' unless defined $_prefix; + if (defined $url) { + $url =~ s#/+$##; + init_subdir(@_); + } + do_git_init_db(); if (defined $_trunk) { - my $trunk_url = complete_svn_url($url, $_trunk); - my $ch_id; - if ($GIT_SVN eq 'git-svn') { - $ch_id = 1; - $GIT_SVN = $ENV{GIT_SVN_ID} = 'trunk'; - } - init_vars(); - unless (-d $GIT_SVN_DIR) { - if ($ch_id) { - print "GIT_SVN_ID set to 'trunk' for ", - "$trunk_url ($_trunk)\n"; - } - init($trunk_url); - command_noisy('config', 'svn.trunk', $trunk_url); + my $trunk_ref = $_prefix . 'trunk'; + # try both old-style and new-style lookups: + my $gs_trunk = eval { Git::SVN->new($trunk_ref) }; + unless ($gs_trunk) { + my ($trunk_url, $trunk_path) = + complete_svn_url($url, $_trunk); + $gs_trunk = Git::SVN->init($trunk_url, $trunk_path, + undef, $trunk_ref); } } - $_prefix = '' unless defined $_prefix; - complete_url_ls_init($url, $_branches, '--branches/-b', $_prefix); - complete_url_ls_init($url, $_tags, '--tags/-t', $_prefix . 'tags/'); -} - -sub multi_fetch { - # try to do trunk first, since branches/tags - # may be descended from it. - if (-e "$GIT_DIR/svn/trunk/info/url") { - fetch_child_id('trunk', @_); - } - rec_fetch('', "$GIT_DIR/svn", @_); + return unless defined $_branches || defined $_tags; + my $ra = $url ? Git::SVN::Ra->new($url) : undef; + complete_url_ls_init($ra, $_branches, '--branches/-b', $_prefix); + complete_url_ls_init($ra, $_tags, '--tags/-t', $_prefix . 'tags/'); } -sub show_log { - my (@args) = @_; - my ($r_min, $r_max); - my $r_last = -1; # prevent dupes - rload_authors() if $_authors; - if (defined $TZ) { - $ENV{TZ} = $TZ; - } else { - delete $ENV{TZ}; - } - if (defined $_revision) { - if ($_revision =~ /^(\d+):(\d+)$/) { - ($r_min, $r_max) = ($1, $2); - } elsif ($_revision =~ /^\d+$/) { - $r_min = $r_max = $_revision; - } else { - print STDERR "-r$_revision is not supported, use ", - "standard \'git log\' arguments instead\n"; - exit 1; +sub cmd_multi_fetch { + my $remotes = Git::SVN::read_all_remotes(); + foreach my $repo_id (sort keys %$remotes) { + if ($remotes->{$repo_id}->{url}) { + Git::SVN::fetch_all($repo_id, $remotes); } } - - config_pager(); - @args = (git_svn_log_cmd($r_min, $r_max), @args); - my $log = command_output_pipe(@args); - run_pager(); - my (@k, $c, $d); - - while (<$log>) { - if (/^${_esc_color}commit ($sha1_short)/o) { - my $cmt = $1; - if ($c && cmt_showable($c) && $c->{r} != $r_last) { - $r_last = $c->{r}; - process_commit($c, $r_min, $r_max, \@k) or - goto out; - } - $d = undef; - $c = { c => $cmt }; - } elsif (/^${_esc_color}author (.+) (\d+) ([\-\+]?\d+)$/) { - get_author_info($c, $1, $2, $3); - } elsif (/^${_esc_color}(?:tree|parent|committer) /) { - # ignore - } elsif (/^${_esc_color}:\d{6} \d{6} $sha1_short/o) { - push @{$c->{raw}}, $_; - } elsif (/^${_esc_color}[ACRMDT]\t/) { - # we could add $SVN->{svn_path} here, but that requires - # remote access at the moment (repo_path_split)... - s#^(${_esc_color})([ACRMDT])\t#$1 $2 #; - push @{$c->{changed}}, $_; - } elsif (/^${_esc_color}diff /) { - $d = 1; - push @{$c->{diff}}, $_; - } elsif ($d) { - push @{$c->{diff}}, $_; - } elsif (/^${_esc_color} (git-svn-id:.+)$/) { - ($c->{url}, $c->{r}, undef) = extract_metadata($1); - } elsif (s/^${_esc_color} //) { - push @{$c->{l}}, $_; - } - } - if ($c && defined $c->{r} && $c->{r} != $r_last) { - $r_last = $c->{r}; - process_commit($c, $r_min, $r_max, \@k); - } - if (@k) { - my $swap = $r_max; - $r_max = $r_min; - $r_min = $swap; - process_commit($_, $r_min, $r_max) foreach reverse @k; - } -out: - close $log; - print '-' x72,"\n" unless $_incremental || $_oneline; -} - -sub commit_diff_usage { - print STDERR "Usage: $0 commit-diff <tree-ish> <tree-ish> [<URL>]\n"; - exit 1 } -sub commit_diff { - my $ta = shift or commit_diff_usage(); - my $tb = shift or commit_diff_usage(); - if (!eval { $SVN_URL = shift || file_to_s("$GIT_SVN_DIR/info/url") }) { - print STDERR "Needed URL or usable git-svn id command-line\n"; - commit_diff_usage(); - } - my $r = shift; - unless (defined $r) { - if (defined $_revision) { - $r = $_revision - } else { - die "-r|--revision is a required argument\n"; +# this command is special because it requires no metadata +sub cmd_commit_diff { + my ($ta, $tb, $url) = @_; + my $usage = "Usage: $0 commit-diff -r<revision> ". + "<tree-ish> <tree-ish> [<URL>]\n"; + fatal($usage) if (!defined $ta || !defined $tb); + my $svn_path; + if (!defined $url) { + my $gs = eval { Git::SVN->new }; + if (!$gs) { + fatal("Needed URL or usable git-svn --id in ", + "the command-line\n", $usage); } + $url = $gs->{url}; + $svn_path = $gs->{path}; + } + unless (defined $_revision) { + fatal("-r|--revision is a required argument\n", $usage); } if (defined $_message && defined $_file) { - print STDERR "Both --message/-m and --file/-F specified ", - "for the commit message.\n", - "I have no idea what you mean\n"; - exit 1; + fatal("Both --message/-m and --file/-F specified ", + "for the commit message.\n", + "I have no idea what you mean\n"); } if (defined $_file) { $_message = file_to_s($_file); } else { - $_message ||= get_commit_message($tb, - "$GIT_DIR/.svn-commit.tmp.$$")->{msg}; + $_message ||= get_commit_entry($tb)->{log}; } - $SVN ||= libsvn_connect($SVN_URL); + my $ra ||= Git::SVN::Ra->new($url); + $svn_path ||= $ra->{svn_path}; + my $r = $_revision; if ($r eq 'HEAD') { - $r = $SVN->get_latest_revnum; + $r = $ra->get_latest_revnum; } elsif ($r !~ /^\d+$/) { die "revision argument: $r not understood by git-svn\n"; } - my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : (); - my $rev_committed; - my $ed = SVN::Git::Editor->new({ r => $r, - ra => libsvn_dup_ra($SVN), - c => $tb, - svn_path => $SVN->{svn_path} - }, - $SVN->get_commit_editor($_message, - sub { - $rev_committed = $_[0]; - print "Committed $_[0]\n"; - }, @lock) - ); - eval { - my $mods = libsvn_checkout_tree($ta, $tb, $ed); - if (@$mods == 0) { - print "No changes\n$ta == $tb\n"; - $ed->abort_edit; - } else { - $ed->close_edit; - } - }; - fatal "$@\n" if $@; - $_message = $_file = undef; - return $rev_committed; + my %ed_opts = ( r => $r, + log => $_message, + ra => $ra, + tree_a => $ta, + tree_b => $tb, + editor_cb => sub { print "Committed r$_[0]\n" }, + svn_path => $svn_path ); + if (!SVN::Git::Editor->new(\%ed_opts)->apply_diff) { + print "No changes\n$ta == $tb\n"; + } } ########################### utility functions ######################### -sub cmt_showable { - my ($c) = @_; - return 1 if defined $c->{r}; - if ($c->{l} && $c->{l}->[-1] eq "...\n" && - $c->{a_raw} =~ /\@([a-f\d\-]+)>$/) { - my @msg = command(qw/cat-file commit/, $c->{c}); - shift @msg while ($msg[0] ne "\n"); - shift @msg; - @{$c->{l}} = grep !/^git-svn-id: /, @msg; - - (undef, $c->{r}, undef) = extract_metadata( - (grep(/^git-svn-id: /, @msg))[-1]); - } - return defined $c->{r}; +sub rebase_cmd { + my @cmd = qw/rebase/; + push @cmd, '-v' if $_verbose; + push @cmd, qw/--merge/ if $_merge; + push @cmd, "--strategy=$_strategy" if $_strategy; + @cmd; } -sub log_use_color { - return 1 if $_color; - my ($dc, $dcvar); - $dcvar = 'color.diff'; - $dc = `git-config --get $dcvar`; - if ($dc eq '') { - # nothing at all; fallback to "diff.color" - $dcvar = 'diff.color'; - $dc = `git-config --get $dcvar`; - } - chomp($dc); - if ($dc eq 'auto') { - my $pc; - $pc = `git-config --get color.pager`; - if ($pc eq '') { - # does not have it -- fallback to pager.color - $pc = `git-config --bool --get pager.color`; - } - else { - $pc = `git-config --bool --get color.pager`; - if ($?) { - $pc = 'false'; - } - } - chomp($pc); - if (-t *STDOUT || (defined $_pager && $pc eq 'true')) { - return ($ENV{TERM} && $ENV{TERM} ne 'dumb'); - } - return 0; - } - return 0 if $dc eq 'never'; - return 1 if $dc eq 'always'; - chomp($dc = `git-config --bool --get $dcvar`); - return ($dc eq 'true'); -} +sub post_fetch_checkout { + return if $_no_checkout; + my $gs = $Git::SVN::_head or return; + return if verify_ref('refs/heads/master^0'); -sub git_svn_log_cmd { - my ($r_min, $r_max) = @_; - my @cmd = (qw/log --abbrev-commit --pretty=raw - --default/, "refs/remotes/$GIT_SVN"); - push @cmd, '-r' unless $_non_recursive; - push @cmd, qw/--raw --name-status/ if $_verbose; - push @cmd, '--color' if log_use_color(); - return @cmd unless defined $r_max; - if ($r_max == $r_min) { - push @cmd, '--max-count=1'; - if (my $c = revdb_get($REVDB, $r_max)) { - push @cmd, $c; - } - } else { - my ($c_min, $c_max); - $c_max = revdb_get($REVDB, $r_max); - $c_min = revdb_get($REVDB, $r_min); - if (defined $c_min && defined $c_max) { - if ($r_max > $r_max) { - push @cmd, "$c_min..$c_max"; - } else { - push @cmd, "$c_max..$c_min"; - } - } elsif ($r_max > $r_min) { - push @cmd, $c_max; - } else { - push @cmd, $c_min; - } - } - return @cmd; -} + my $valid_head = verify_ref('HEAD^0'); + command_noisy(qw(update-ref refs/heads/master), $gs->refname); + return if ($valid_head || !verify_ref('HEAD^0')); -sub fetch_child_id { - my $id = shift; - print "Fetching $id\n"; - my $ref = "$GIT_DIR/refs/remotes/$id"; - defined(my $pid = open my $fh, '-|') or croak $!; - if (!$pid) { - $GIT_SVN = $ENV{GIT_SVN_ID} = $id; - init_vars(); - fetch(@_); - exit 0; - } - while (<$fh>) { - print $_; - check_repack() if (/^r\d+ = $sha1/o); - } - close $fh or croak $?; -} + return if $ENV{GIT_DIR} !~ m#^(?:.*/)?\.git$#; + my $index = $ENV{GIT_INDEX_FILE} || "$ENV{GIT_DIR}/index"; + return if -f $index; -sub rec_fetch { - my ($pfx, $p, @args) = @_; - my @dir; - foreach (sort <$p/*>) { - if (-r "$_/info/url") { - $pfx .= '/' if $pfx && $pfx !~ m!/$!; - my $id = $pfx . basename $_; - next if $id eq 'trunk'; - fetch_child_id($id, @args); - } elsif (-d $_) { - push @dir, $_; - } - } - foreach (@dir) { - my $x = $_; - $x =~ s!^\Q$GIT_DIR\E/svn/!!; - rec_fetch($x, $_); - } + chomp(my $bare = `git config --bool --get core.bare`); + return if $bare eq 'true'; + return if command_oneline(qw/rev-parse --is-inside-git-dir/) eq 'true'; + command_noisy(qw/read-tree -m -u -v HEAD HEAD/); + print STDERR "Checked out HEAD:\n ", + $gs->full_url, " r", $gs->last_rev, "\n"; } sub complete_svn_url { my ($url, $path) = @_; $path =~ s#/+$##; - $url =~ s#/+$## if $url; if ($path !~ m#^[a-z\+]+://#) { - $path = '/' . $path if ($path !~ m#^/#); if (!defined $url || $url !~ m#^[a-z\+]+://#) { fatal("E: '$path' is not a complete URL ", "and a separate URL is not specified\n"); } - $path = $url . $path; + return ($url, $path); } - return $path; + return ($path, ''); } sub complete_url_ls_init { - my ($url, $path, $switch, $pfx) = @_; - unless ($path) { + my ($ra, $repo_path, $switch, $pfx) = @_; + unless ($repo_path) { print STDERR "W: $switch not specified\n"; return; } - my $full_url = complete_svn_url($url, $path); - my @ls = libsvn_ls_fullurl($full_url); - defined(my $pid = fork) or croak $!; - if (!$pid) { - foreach my $u (map { "$full_url/$_" } (grep m!/$!, @ls)) { - $u =~ s#/+$##; - if ($u !~ m!\Q$full_url\E/(.+)$!) { - print STDERR "W: Unrecognized URL: $u\n"; - die "This should never happen\n"; - } - # don't try to init already existing refs - my $id = $pfx.$1; - $GIT_SVN = $ENV{GIT_SVN_ID} = $id; - init_vars(); - unless (-d $GIT_SVN_DIR) { - print "init $u => $id\n"; - init($u); - } - } - exit 0; - } - waitpid $pid, 0; - croak $? if $?; - my ($n) = ($switch =~ /^--(\w+)/); - command_noisy('config', "svn.$n", $full_url); -} - -sub common_prefix { - my $paths = shift; - my %common; - foreach (@$paths) { - my @tmp = split m#/#, $_; - my $p = ''; - while (my $x = shift @tmp) { - $p .= "/$x"; - $common{$p} ||= 0; - $common{$p}++; - } - } - foreach (sort {length $b <=> length $a} keys %common) { - if ($common{$_} == @$paths) { - return $_; - } - } - return ''; -} - -# grafts set here are 'stronger' in that they're based on actual tree -# matches, and won't be deleted from merge-base checking in write_grafts() -sub graft_tree_joins { - my $grafts = shift; - map_tree_joins() if (@_branch_from && !%tree_map); - return unless %tree_map; - - git_svn_each(sub { - my $i = shift; - my @args = (qw/rev-list --pretty=raw/, "refs/remotes/$i"); - my ($fh, $ctx) = command_output_pipe(@args); - while (<$fh>) { - next unless /^commit ($sha1)$/o; - my $c = $1; - my ($t) = (<$fh> =~ /^tree ($sha1)$/o); - next unless $tree_map{$t}; - - my $l; - do { - $l = readline $fh; - } until ($l =~ /^committer (?:.+) (\d+) ([\-\+]?\d+)$/); - - my ($s, $tz) = ($1, $2); - if ($tz =~ s/^\+//) { - $s += tz_to_s_offset($tz); - } elsif ($tz =~ s/^\-//) { - $s -= tz_to_s_offset($tz); - } - - my ($url_a, $r_a, $uuid_a) = cmt_metadata($c); - - foreach my $p (@{$tree_map{$t}}) { - next if $p eq $c; - my $mb = eval { command('merge-base', $c, $p) }; - next unless ($@ || $?); - if (defined $r_a) { - # see if SVN says it's a relative - my ($url_b, $r_b, $uuid_b) = - cmt_metadata($p); - next if (defined $url_b && - defined $url_a && - ($url_a eq $url_b) && - ($uuid_a eq $uuid_b)); - if ($uuid_a eq $uuid_b) { - if ($r_b < $r_a) { - $grafts->{$c}->{$p} = 2; - next; - } elsif ($r_b > $r_a) { - $grafts->{$p}->{$c} = 2; - next; - } - } - } - my $ct = get_commit_time($p); - if ($ct < $s) { - $grafts->{$c}->{$p} = 2; - } elsif ($ct > $s) { - $grafts->{$p}->{$c} = 2; - } - # what should we do when $ct == $s ? - } - } - command_close_pipe($fh, $ctx); - }); -} - -sub graft_file_copy_lib { - my ($grafts, $l_map, $u) = @_; - my $tree_paths = $l_map->{$u}; - my $pfx = common_prefix([keys %$tree_paths]); - my ($repo, $path) = repo_path_split($u.$pfx); - $SVN = libsvn_connect($repo); - - my ($base, $head) = libsvn_parse_revision(); - my $inc = 1000; - my ($min, $max) = ($base, $head < $base+$inc ? $head : $base+$inc); - my $eh = $SVN::Error::handler; - $SVN::Error::handler = \&libsvn_skip_unknown_revs; - while (1) { - my $pool = SVN::Pool->new; - libsvn_get_log(libsvn_dup_ra($SVN), [$path], - $min, $max, 0, 2, 1, - sub { - libsvn_graft_file_copies($grafts, $tree_paths, - $path, @_); - }, $pool); - $pool->clear; - last if ($max >= $head); - $min = $max + 1; - $max += $inc; - $max = $head if ($max > $head); - } - $SVN::Error::handler = $eh; -} - -sub process_merge_msg_matches { - my ($grafts, $l_map, $u, $p, $c, @matches) = @_; - my (@strong, @weak); - foreach (@matches) { - # merging with ourselves is not interesting - next if $_ eq $p; - if ($l_map->{$u}->{$_}) { - push @strong, $_; - } else { - push @weak, $_; - } - } - foreach my $w (@weak) { - last if @strong; - # no exact match, use branch name as regexp. - my $re = qr/\Q$w\E/i; - foreach (keys %{$l_map->{$u}}) { - if (/$re/) { - push @strong, $l_map->{$u}->{$_}; - last; - } - } - last if @strong; - $w = basename($w); - $re = qr/\Q$w\E/i; - foreach (keys %{$l_map->{$u}}) { - if (/$re/) { - push @strong, $l_map->{$u}->{$_}; - last; - } + $repo_path =~ s#/+$##; + if ($repo_path =~ m#^[a-z\+]+://#) { + $ra = Git::SVN::Ra->new($repo_path); + $repo_path = ''; + } else { + $repo_path =~ s#^/+##; + unless ($ra) { + fatal("E: '$repo_path' is not a complete URL ", + "and a separate URL is not specified\n"); } } - my ($rev) = ($c->{m} =~ /^git-svn-id:\s(?:\S+?)\@(\d+) - \s(?:[a-f\d\-]+)$/xsm); - unless (defined $rev) { - ($rev) = ($c->{m} =~/^git-svn-id:\s(\d+) - \@(?:[a-f\d\-]+)/xsm); - return unless defined $rev; - } - foreach my $m (@strong) { - my ($r0, $s0) = find_rev_before($rev, $m, 1); - $grafts->{$c->{c}}->{$s0} = 1 if defined $s0; + my $url = $ra->{url}; + my $gs = Git::SVN->init($url, undef, undef, undef, 1); + my $k = "svn-remote.$gs->{repo_id}.url"; + my $orig_url = eval { command_oneline(qw/config --get/, $k) }; + if ($orig_url && ($orig_url ne $gs->{url})) { + die "$k already set: $orig_url\n", + "wanted to set to: $gs->{url}\n"; } -} - -sub graft_merge_msg { - my ($grafts, $l_map, $u, $p, @re) = @_; - - my $x = $l_map->{$u}->{$p}; - my $rl = rev_list_raw("refs/remotes/$x"); - while (my $c = next_rev_list_entry($rl)) { - foreach my $re (@re) { - my (@br) = ($c->{m} =~ /$re/g); - next unless @br; - process_merge_msg_matches($grafts,$l_map,$u,$p,$c,@br); - } + command_oneline('config', $k, $gs->{url}) unless $orig_url; + my $remote_path = "$ra->{svn_path}/$repo_path/*"; + $remote_path =~ s#/+#/#g; + $remote_path =~ s#^/##g; + my ($n) = ($switch =~ /^--(\w+)/); + if (length $pfx && $pfx !~ m#/$#) { + die "--prefix='$pfx' must have a trailing slash '/'\n"; } -} - -sub read_uuid { - return if $SVN_UUID; - my $pool = SVN::Pool->new; - $SVN_UUID = $SVN->get_uuid($pool); - $pool->clear; + command_noisy('config', "svn-remote.$gs->{repo_id}.$n", + "$remote_path:refs/remotes/$pfx*"); } sub verify_ref { @@ -1109,37 +623,9 @@ sub verify_ref { { STDERR => 0 }); }; } -sub repo_path_split { - my $full_url = shift; - $full_url =~ s#/+$##; - - foreach (@repo_path_split_cache) { - if ($full_url =~ s#$_##) { - my $u = $1; - $full_url =~ s#^/+##; - return ($u, $full_url); - } - } - my $tmp = libsvn_connect($full_url); - return ($tmp->{repos_root}, $tmp->{svn_path}); -} - -sub setup_git_svn { - defined $SVN_URL or croak "SVN repository location required\n"; - unless (-d $GIT_DIR) { - croak "GIT_DIR=$GIT_DIR does not exist!\n"; - } - mkpath([$GIT_SVN_DIR]); - mkpath(["$GIT_SVN_DIR/info"]); - open my $fh, '>>',$REVDB or croak $!; - close $fh; - s_to_file($SVN_URL,"$GIT_SVN_DIR/info/url"); - -} - sub get_tree_from_treeish { my ($treeish) = @_; - croak "Not a sha1: $treeish\n" unless $treeish =~ /^$sha1$/o; + # $treeish can be a symbolic ref, too: my $type = command_oneline(qw/cat-file -t/, $treeish); my $expected; while ($type eq 'tag') { @@ -1148,7 +634,7 @@ sub get_tree_from_treeish { if ($type eq 'commit') { $expected = (grep /^tree /, command(qw/cat-file commit/, $treeish))[0]; - ($expected) = ($expected =~ /^tree ($sha1)$/); + ($expected) = ($expected =~ /^tree ($sha1)$/o); die "Unable to get tree from $treeish\n" unless $expected; } elsif ($type eq 'tree') { $expected = $treeish; @@ -1158,146 +644,44 @@ sub get_tree_from_treeish { return $expected; } -sub get_diff { - my ($from, $treeish) = @_; - print "diff-tree $from $treeish\n"; - my @diff_tree = qw(diff-tree -z -r); - if ($_cp_similarity) { - push @diff_tree, "-C$_cp_similarity"; - } else { - push @diff_tree, '-C'; - } - push @diff_tree, '--find-copies-harder' if $_find_copies_harder; - push @diff_tree, "-l$_l" if defined $_l; - push @diff_tree, $from, $treeish; - my ($diff_fh, $ctx) = command_output_pipe(@diff_tree); - local $/ = "\0"; - my $state = 'meta'; - my @mods; - while (<$diff_fh>) { - chomp $_; # this gets rid of the trailing "\0" - if ($state eq 'meta' && /^:(\d{6})\s(\d{6})\s - $sha1\s($sha1)\s([MTCRAD])\d*$/xo) { - push @mods, { mode_a => $1, mode_b => $2, - sha1_b => $3, chg => $4 }; - if ($4 =~ /^(?:C|R)$/) { - $state = 'file_a'; - } else { - $state = 'file_b'; - } - } elsif ($state eq 'file_a') { - my $x = $mods[$#mods] or croak "Empty array\n"; - if ($x->{chg} !~ /^(?:C|R)$/) { - croak "Error parsing $_, $x->{chg}\n"; - } - $x->{file_a} = $_; - $state = 'file_b'; - } elsif ($state eq 'file_b') { - my $x = $mods[$#mods] or croak "Empty array\n"; - if (exists $x->{file_a} && $x->{chg} !~ /^(?:C|R)$/) { - croak "Error parsing $_, $x->{chg}\n"; - } - if (!exists $x->{file_a} && $x->{chg} =~ /^(?:C|R)$/) { - croak "Error parsing $_, $x->{chg}\n"; - } - $x->{file_b} = $_; - $state = 'meta'; - } else { - croak "Error parsing $_\n"; - } - } - command_close_pipe($diff_fh, $ctx); - return \@mods; -} +sub get_commit_entry { + my ($treeish) = shift; + my %log_entry = ( log => '', tree => get_tree_from_treeish($treeish) ); + my $commit_editmsg = "$ENV{GIT_DIR}/COMMIT_EDITMSG"; + my $commit_msg = "$ENV{GIT_DIR}/COMMIT_MSG"; + open my $log_fh, '>', $commit_editmsg or croak $!; -sub libsvn_checkout_tree { - my ($from, $treeish, $ed) = @_; - my $mods = get_diff($from, $treeish); - return $mods unless (scalar @$mods); - my %o = ( D => 1, R => 0, C => -1, A => 3, M => 3, T => 3 ); - foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) { - my $f = $m->{chg}; - if (defined $o{$f}) { - $ed->$f($m, $_q); - } else { - croak "Invalid change type: $f\n"; - } - } - $ed->rmdirs($_q) if $_rmdir; - return $mods; -} - -sub get_commit_message { - my ($commit, $commit_msg) = (@_); - my %log_msg = ( msg => '' ); - open my $msg, '>', $commit_msg or croak $!; - - my $type = command_oneline(qw/cat-file -t/, $commit); + my $type = command_oneline(qw/cat-file -t/, $treeish); if ($type eq 'commit' || $type eq 'tag') { my ($msg_fh, $ctx) = command_output_pipe('cat-file', - $type, $commit); + $type, $treeish); my $in_msg = 0; while (<$msg_fh>) { if (!$in_msg) { $in_msg = 1 if (/^\s*$/); } elsif (/^git-svn-id: /) { - # skip this, we regenerate the correct one - # on re-fetch anyways + # skip this for now, we regenerate the + # correct one on re-fetch anyways + # TODO: set *:merge properties or like... } else { - print $msg $_ or croak $!; + print $log_fh $_ or croak $!; } } command_close_pipe($msg_fh, $ctx); } - close $msg or croak $!; + close $log_fh or croak $!; if ($_edit || ($type eq 'tree')) { my $editor = $ENV{VISUAL} || $ENV{EDITOR} || 'vi'; - system($editor, $commit_msg); - } - - # file_to_s removes all trailing newlines, so just use chomp() here: - open $msg, '<', $commit_msg or croak $!; - { local $/; chomp($log_msg{msg} = <$msg>); } - close $msg or croak $!; - - return \%log_msg; -} - -sub set_svn_commit_env { - if (defined $LC_ALL) { - $ENV{LC_ALL} = $LC_ALL; - } else { - delete $ENV{LC_ALL}; - } -} - -sub rev_list_raw { - my ($fh, $c) = command_output_pipe(qw/rev-list --pretty=raw/, @_); - return { fh => $fh, ctx => $c, t => { } }; -} - -sub next_rev_list_entry { - my $rl = shift; - my $fh = $rl->{fh}; - my $x = $rl->{t}; - while (<$fh>) { - if (/^commit ($sha1)$/o) { - if ($x->{c}) { - $rl->{t} = { c => $1 }; - return $x; - } else { - $x->{c} = $1; - } - } elsif (/^parent ($sha1)$/o) { - $x->{p}->{$1} = 1; - } elsif (s/^ //) { - $x->{m} ||= ''; - $x->{m} .= $_; - } + # TODO: strip out spaces, comments, like git-commit.sh + system($editor, $commit_editmsg); } - command_close_pipe($fh, $rl->{ctx}); - return ($x != $rl->{t}) ? $x : undef; + rename $commit_editmsg, $commit_msg or croak $!; + open $log_fh, '<', $commit_msg or croak $!; + { local $/; chomp($log_entry{log} = <$log_fh>); } + close $log_fh or croak $!; + unlink $commit_msg; + \%log_entry; } sub s_to_file { @@ -1318,601 +702,1409 @@ sub file_to_s { return $ret; } -sub assert_revision_unknown { - my $r = shift; - if (my $c = revdb_get($REVDB, $r)) { - croak "$r = $c already exists! Why are we refetching it?"; +# '<svn username> = real-name <email address>' mapping based on git-svnimport: +sub load_authors { + open my $authors, '<', $_authors or die "Can't open $_authors $!\n"; + my $log = $cmd eq 'log'; + while (<$authors>) { + chomp; + next unless /^(\S+?|\(no author\))\s*=\s*(.+?)\s*<(.+)>\s*$/; + my ($user, $name, $email) = ($1, $2, $3); + if ($log) { + $Git::SVN::Log::rusers{"$name <$email>"} = $user; + } else { + $users{$user} = [$name, $email]; + } } + close $authors or croak $!; } -sub git_commit { - my ($log_msg, @parents) = @_; - assert_revision_unknown($log_msg->{revision}); - map_tree_joins() if (@_branch_from && !%tree_map); - - my (@tmp_parents, @exec_parents, %seen_parent); - if (my $lparents = $log_msg->{parents}) { - @tmp_parents = @$lparents - } - # commit parents can be conditionally bound to a particular - # svn revision via: "svn_revno=commit_sha1", filter them out here: - foreach my $p (@parents) { - next unless defined $p; - if ($p =~ /^(\d+)=($sha1_short)$/o) { - if ($1 == $log_msg->{revision}) { - push @tmp_parents, $2; - } +# convert GetOpt::Long specs for use by git-config +sub read_repo_config { + return unless -d $ENV{GIT_DIR}; + my $opts = shift; + my @config_only; + foreach my $o (keys %$opts) { + # if we have mixedCase and a long option-only, then + # it's a config-only variable that we don't need for + # the command-line. + push @config_only, $o if ($o =~ /[A-Z]/ && $o =~ /^[a-z]+$/i); + my $v = $opts->{$o}; + my ($key) = ($o =~ /^([a-zA-Z\-]+)/); + $key =~ s/-//g; + my $arg = 'git-config'; + $arg .= ' --int' if ($o =~ /[:=]i$/); + $arg .= ' --bool' if ($o !~ /[:=][sfi]$/); + if (ref $v eq 'ARRAY') { + chomp(my @tmp = `$arg --get-all svn.$key`); + @$v = @tmp if @tmp; } else { - push @tmp_parents, $p if $p =~ /$sha1_short/o; - } - } - my $tree = $log_msg->{tree}; - if (!defined $tree) { - my $index = set_index($GIT_SVN_INDEX); - $tree = command_oneline('write-tree'); - croak $? if $?; - restore_index($index); - } - # just in case we clobber the existing ref, we still want that ref - # as our parent: - if (my $cur = verify_ref("refs/remotes/$GIT_SVN^0")) { - chomp $cur; - push @tmp_parents, $cur; - } - - if (exists $tree_map{$tree}) { - foreach my $p (@{$tree_map{$tree}}) { - my $skip; - foreach (@tmp_parents) { - # see if a common parent is found - my $mb = eval { command('merge-base', $_, $p) }; - next if ($@ || $?); - $skip = 1; - last; + chomp(my $tmp = `$arg --get svn.$key`); + if ($tmp && !($arg =~ / --bool/ && $tmp eq 'false')) { + $$v = $tmp; } - next if $skip; - my ($url_p, $r_p, $uuid_p) = cmt_metadata($p); - next if (($SVN_UUID eq $uuid_p) && - ($log_msg->{revision} > $r_p)); - next if (defined $url_p && defined $SVN_URL && - ($SVN_UUID eq $uuid_p) && - ($url_p eq $SVN_URL)); - push @tmp_parents, $p; - } - } - foreach (@tmp_parents) { - next if $seen_parent{$_}; - $seen_parent{$_} = 1; - push @exec_parents, $_; - # MAXPARENT is defined to 16 in commit-tree.c: - last if @exec_parents > 16; + } } + delete @$opts{@config_only} if @config_only; +} - set_commit_env($log_msg); - my @exec = ('git-commit-tree', $tree); - push @exec, '-p', $_ foreach @exec_parents; - defined(my $pid = open3(my $msg_fh, my $out_fh, '>&STDERR', @exec)) - or croak $!; - print $msg_fh $log_msg->{msg} or croak $!; - unless ($_no_metadata) { - print $msg_fh "\ngit-svn-id: $SVN_URL\@$log_msg->{revision}", - " $SVN_UUID\n" or croak $!; +sub extract_metadata { + my $id = shift or return (undef, undef, undef); + my ($url, $rev, $uuid) = ($id =~ /^git-svn-id:\s(\S+?)\@(\d+) + \s([a-f\d\-]+)$/x); + if (!defined $rev || !$uuid || !$url) { + # some of the original repositories I made had + # identifiers like this: + ($rev, $uuid) = ($id =~/^git-svn-id:\s(\d+)\@([a-f\d\-]+)/); } - $msg_fh->flush == 0 or croak $!; - close $msg_fh or croak $!; - chomp(my $commit = do { local $/; <$out_fh> }); - close $out_fh or croak $!; - waitpid $pid, 0; - croak $? if $?; - if ($commit !~ /^$sha1$/o) { - die "Failed to commit, invalid sha1: $commit\n"; + return ($url, $rev, $uuid); +} + +sub cmt_metadata { + return extract_metadata((grep(/^git-svn-id: /, + command(qw/cat-file commit/, shift)))[-1]); +} + +sub working_head_info { + my ($head, $refs) = @_; + my ($url, $rev, $uuid); + my ($fh, $ctx) = command_output_pipe('rev-list', $head); + while (<$fh>) { + chomp; + ($url, $rev, $uuid) = cmt_metadata($_); + last if (defined $url && defined $rev && defined $uuid); + unshift @$refs, $_ if $refs; } - command_noisy('update-ref',"refs/remotes/$GIT_SVN",$commit); - revdb_set($REVDB, $log_msg->{revision}, $commit); + close $fh; # break the pipe + ($url, $rev, $uuid); +} - # this output is read via pipe, do not change: - print "r$log_msg->{revision} = $commit\n"; - return $commit; +package Git::SVN; +use strict; +use warnings; +use vars qw/$default_repo_id $default_ref_id $_no_metadata $_follow_parent + $_repack $_repack_flags $_use_svm_props $_head + $_use_svnsync_props $no_reuse_existing/; +use Carp qw/croak/; +use File::Path qw/mkpath/; +use File::Copy qw/copy/; +use IPC::Open3; + +my $_repack_nr; +# properties that we do not log: +my %SKIP_PROP; +BEGIN { + %SKIP_PROP = map { $_ => 1 } qw/svn:wc:ra_dav:version-url + svn:special svn:executable + svn:entry:committed-rev + svn:entry:last-author + svn:entry:uuid + svn:entry:committed-date/; + + # some options are read globally, but can be overridden locally + # per [svn-remote "..."] section. Command-line options will *NOT* + # override options set in an [svn-remote "..."] section + my $e; + foreach (qw/follow_parent no_metadata use_svm_props + use_svnsync_props/) { + my $key = $_; + $key =~ tr/_//d; + $e .= "sub $_ { + my (\$self) = \@_; + return \$self->{-$_} if exists \$self->{-$_}; + my \$k = \"svn-remote.\$self->{repo_id}\.$key\"; + eval { command_oneline(qw/config --get/, \$k) }; + if (\$@) { + \$self->{-$_} = \$Git::SVN::_$_; + } else { + my \$v = command_oneline(qw/config --bool/,\$k); + \$self->{-$_} = \$v eq 'false' ? 0 : 1; + } + return \$self->{-$_} }\n"; + } + $e .= "1;\n"; + eval $e or die $@; +} + +my %LOCKFILES; +END { unlink keys %LOCKFILES if %LOCKFILES } + +sub resolve_local_globs { + my ($url, $fetch, $glob_spec) = @_; + return unless defined $glob_spec; + my $ref = $glob_spec->{ref}; + my $path = $glob_spec->{path}; + foreach (command(qw#for-each-ref --format=%(refname) refs/remotes#)) { + next unless m#^refs/remotes/$ref->{regex}$#; + my $p = $1; + my $pathname = $path->full_path($p); + my $refname = $ref->full_path($p); + if (my $existing = $fetch->{$pathname}) { + if ($existing ne $refname) { + die "Refspec conflict:\n", + "existing: refs/remotes/$existing\n", + " globbed: refs/remotes/$refname\n"; + } + my $u = (::cmt_metadata("refs/remotes/$refname"))[0]; + $u =~ s!^\Q$url\E(/|$)!! or die + "refs/remotes/$refname: '$url' not found in '$u'\n"; + if ($pathname ne $u) { + warn "W: Refspec glob conflict ", + "(ref: refs/remotes/$refname):\n", + "expected path: $pathname\n", + " real path: $u\n", + "Continuing ahead with $u\n"; + next; + } + } else { + $fetch->{$pathname} = $refname; + } + } +} + +sub parse_revision_argument { + my ($base, $head) = @_; + if (!defined $::_revision || $::_revision eq 'BASE:HEAD') { + return ($base, $head); + } + return ($1, $2) if ($::_revision =~ /^(\d+):(\d+)$/); + return ($::_revision, $::_revision) if ($::_revision =~ /^\d+$/); + return ($head, $head) if ($::_revision eq 'HEAD'); + return ($base, $1) if ($::_revision =~ /^BASE:(\d+)$/); + return ($1, $head) if ($::_revision =~ /^(\d+):HEAD$/); + die "revision argument: $::_revision not understood by git-svn\n"; +} + +sub fetch_all { + my ($repo_id, $remotes) = @_; + if (ref $repo_id) { + my $gs = $repo_id; + $repo_id = undef; + $repo_id = $gs->{repo_id}; + } + $remotes ||= read_all_remotes(); + my $remote = $remotes->{$repo_id} or + die "[svn-remote \"$repo_id\"] unknown\n"; + my $fetch = $remote->{fetch}; + my $url = $remote->{url} or die "svn-remote.$repo_id.url not defined\n"; + my (@gs, @globs); + my $ra = Git::SVN::Ra->new($url); + my $uuid = $ra->get_uuid; + my $head = $ra->get_latest_revnum; + my $base = defined $fetch ? $head : 0; + + # read the max revs for wildcard expansion (branches/*, tags/*) + foreach my $t (qw/branches tags/) { + defined $remote->{$t} or next; + push @globs, $remote->{$t}; + my $max_rev = eval { tmp_config(qw/--int --get/, + "svn-remote.$repo_id.${t}-maxRev") }; + if (defined $max_rev && ($max_rev < $base)) { + $base = $max_rev; + } elsif (!defined $max_rev) { + $base = 0; + } + } + + if ($fetch) { + foreach my $p (sort keys %$fetch) { + my $gs = Git::SVN->new($fetch->{$p}, $repo_id, $p); + my $lr = $gs->rev_db_max; + if (defined $lr) { + $base = $lr if ($lr < $base); + } + push @gs, $gs; + } + } + + ($base, $head) = parse_revision_argument($base, $head); + $ra->gs_fetch_loop_common($base, $head, \@gs, \@globs); +} + +sub read_all_remotes { + my $r = {}; + foreach (grep { s/^svn-remote\.// } command(qw/config -l/)) { + if (m!^(.+)\.fetch=\s*(.*)\s*:\s*refs/remotes/(.+)\s*$!) { + $r->{$1}->{fetch}->{$2} = $3; + } elsif (m!^(.+)\.url=\s*(.*)\s*$!) { + $r->{$1}->{url} = $2; + } elsif (m!^(.+)\.(branches|tags)= + (.*):refs/remotes/(.+)\s*$/!x) { + my ($p, $g) = ($3, $4); + my $rs = $r->{$1}->{$2} = { + t => $2, + remote => $1, + path => Git::SVN::GlobSpec->new($p), + ref => Git::SVN::GlobSpec->new($g) }; + if (length($rs->{ref}->{right}) != 0) { + die "The '*' glob character must be the last ", + "character of '$g'\n"; + } + } + } + $r; } -sub check_repack { - if ($_repack && (--$_repack_nr == 0)) { +sub init_vars { + if (defined $_repack) { + $_repack = 1000 if ($_repack <= 0); $_repack_nr = $_repack; - # repack doesn't use any arguments with spaces in them, does it? - command_noisy('repack', split(/\s+/, $_repack_flags)); + $_repack_flags ||= '-d'; } } -sub set_commit_env { - my ($log_msg) = @_; - my $author = $log_msg->{author}; - if (!defined $author || length $author == 0) { - $author = '(no author)'; +sub verify_remotes_sanity { + return unless -d $ENV{GIT_DIR}; + my %seen; + foreach (command(qw/config -l/)) { + if (m!^svn-remote\.(?:.+)\.fetch=.*:refs/remotes/(\S+)\s*$!) { + if ($seen{$1}) { + die "Remote ref refs/remote/$1 is tracked by", + "\n \"$_\"\nand\n \"$seen{$1}\"\n", + "Please resolve this ambiguity in ", + "your git configuration file before ", + "continuing\n"; + } + $seen{$1} = $_; + } } - my ($name,$email) = defined $users{$author} ? @{$users{$author}} - : ($author,"$author\@$SVN_UUID"); - $ENV{GIT_AUTHOR_NAME} = $ENV{GIT_COMMITTER_NAME} = $name; - $ENV{GIT_AUTHOR_EMAIL} = $ENV{GIT_COMMITTER_EMAIL} = $email; - $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_msg->{date}; } -sub check_upgrade_needed { - if (!-r $REVDB) { - -d $GIT_SVN_DIR or mkpath([$GIT_SVN_DIR]); - open my $fh, '>>',$REVDB or croak $!; - close $fh; - } - return unless eval { - command([qw/rev-parse --verify/,"$GIT_SVN-HEAD^0"], - {STDERR => 0}); - }; - my $head = eval { command('rev-parse',"refs/remotes/$GIT_SVN") }; - if ($@ || !$head) { - print STDERR "Please run: $0 rebuild --upgrade\n"; - exit 1; +# we allow more chars than remotes2config.sh... +sub sanitize_remote_name { + my ($name) = @_; + $name =~ tr{A-Za-z0-9:,/+-}{.}c; + $name; +} + +sub find_existing_remote { + my ($url, $remotes) = @_; + return undef if $no_reuse_existing; + my $existing; + foreach my $repo_id (keys %$remotes) { + my $u = $remotes->{$repo_id}->{url} or next; + next if $u ne $url; + $existing = $repo_id; + last; } + $existing; } -# fills %tree_map with a reverse mapping of trees to commits. Useful -# for finding parents to commit on. -sub map_tree_joins { - my %seen; - foreach my $br (@_branch_from) { - my $pipe = command_output_pipe(qw/rev-list - --topo-order --pretty=raw/, $br); - while (<$pipe>) { - if (/^commit ($sha1)$/o) { - my $commit = $1; - - # if we've seen a commit, - # we've seen its parents - last if $seen{$commit}; - my ($tree) = (<$pipe> =~ /^tree ($sha1)$/o); - unless (defined $tree) { - die "Failed to parse commit $commit\n"; - } - push @{$tree_map{$tree}}, $commit; - $seen{$commit} = 1; +sub init_remote_config { + my ($self, $url, $no_write) = @_; + $url =~ s!/+$!!; # strip trailing slash + my $r = read_all_remotes(); + my $existing = find_existing_remote($url, $r); + if ($existing) { + unless ($no_write) { + print STDERR "Using existing ", + "[svn-remote \"$existing\"]\n"; + } + $self->{repo_id} = $existing; + } else { + my $min_url = Git::SVN::Ra->new($url)->minimize_url; + $existing = find_existing_remote($min_url, $r); + if ($existing) { + unless ($no_write) { + print STDERR "Using existing ", + "[svn-remote \"$existing\"]\n"; + } + $self->{repo_id} = $existing; + } + if ($min_url ne $url) { + unless ($no_write) { + print STDERR "Using higher level of URL: ", + "$url => $min_url\n"; + } + my $old_path = $self->{path}; + $self->{path} = $url; + $self->{path} =~ s!^\Q$min_url\E(/|$)!!; + if (length $old_path) { + $self->{path} .= "/$old_path"; } + $url = $min_url; + } + } + my $orig_url; + if (!$existing) { + # verify that we aren't overwriting anything: + $orig_url = eval { + command_oneline('config', '--get', + "svn-remote.$self->{repo_id}.url") + }; + if ($orig_url && ($orig_url ne $url)) { + die "svn-remote.$self->{repo_id}.url already set: ", + "$orig_url\nwanted to set to: $url\n"; } - close $pipe; } + my ($xrepo_id, $xpath) = find_ref($self->refname); + if (defined $xpath) { + die "svn-remote.$xrepo_id.fetch already set to track ", + "$xpath:refs/remotes/", $self->refname, "\n"; + } + unless ($no_write) { + command_noisy('config', + "svn-remote.$self->{repo_id}.url", $url); + command_noisy('config', '--add', + "svn-remote.$self->{repo_id}.fetch", + "$self->{path}:".$self->refname); + } + $self->{url} = $url; } -sub load_all_refs { - if (@_branch_from) { - print STDERR '--branch|-b parameters are ignored when ', - "--branch-all-refs|-B is passed\n"; +sub find_by_url { # repos_root and, path are optional + my ($class, $full_url, $repos_root, $path) = @_; + return undef unless defined $full_url; + my $remotes = read_all_remotes(); + if (defined $full_url && defined $repos_root && !defined $path) { + $path = $full_url; + $path =~ s#^\Q$repos_root\E(?:/|$)##; } + foreach my $repo_id (keys %$remotes) { + my $u = $remotes->{$repo_id}->{url} or next; + next if defined $repos_root && $repos_root ne $u; - # don't worry about rev-list on non-commit objects/tags, - # it shouldn't blow up if a ref is a blob or tree... - @_branch_from = command(qw/rev-parse --symbolic --all/); + my $fetch = $remotes->{$repo_id}->{fetch} || {}; + foreach (qw/branches tags/) { + resolve_local_globs($u, $fetch, + $remotes->{$repo_id}->{$_}); + } + my $p = $path; + unless (defined $p) { + $p = $full_url; + $p =~ s#^\Q$u\E(?:/|$)## or next; + } + foreach my $f (keys %$fetch) { + next if $f ne $p; + return Git::SVN->new($fetch->{$f}, $repo_id, $f); + } + } + undef; } -# '<svn username> = real-name <email address>' mapping based on git-svnimport: -sub load_authors { - open my $authors, '<', $_authors or die "Can't open $_authors $!\n"; - while (<$authors>) { - chomp; - next unless /^(\S+?|\(no author\))\s*=\s*(.+?)\s*<(.+)>\s*$/; - my ($user, $name, $email) = ($1, $2, $3); - $users{$user} = [$name, $email]; +sub init { + my ($class, $url, $path, $repo_id, $ref_id, $no_write) = @_; + my $self = _new($class, $repo_id, $ref_id, $path); + if (defined $url) { + $self->init_remote_config($url, $no_write); } - close $authors or croak $!; + $self; } -sub rload_authors { - open my $authors, '<', $_authors or die "Can't open $_authors $!\n"; - while (<$authors>) { - chomp; - next unless /^(\S+?)\s*=\s*(.+?)\s*<(.+)>\s*$/; - my ($user, $name, $email) = ($1, $2, $3); - $rusers{"$name <$email>"} = $user; +sub find_ref { + my ($ref_id) = @_; + foreach (command(qw/config -l/)) { + next unless m!^svn-remote\.(.+)\.fetch= + \s*(.*)\s*:\s*refs/remotes/(.+)\s*$!x; + my ($repo_id, $path, $ref) = ($1, $2, $3); + if ($ref eq $ref_id) { + $path = '' if ($path =~ m#^\./?#); + return ($repo_id, $path); + } } - close $authors or croak $!; + (undef, undef, undef); } -sub git_svn_each { - my $sub = shift; - foreach (command(qw/rev-parse --symbolic --all/)) { - next unless s#^refs/remotes/##; - chomp $_; - next unless -f "$GIT_DIR/svn/$_/info/url"; - &$sub($_); +sub new { + my ($class, $ref_id, $repo_id, $path) = @_; + if (defined $ref_id && !defined $repo_id && !defined $path) { + ($repo_id, $path) = find_ref($ref_id); + if (!defined $repo_id) { + die "Could not find a \"svn-remote.*.fetch\" key ", + "in the repository configuration matching: ", + "refs/remotes/$ref_id\n"; + } + } + my $self = _new($class, $repo_id, $ref_id, $path); + if (!defined $self->{path} || !length $self->{path}) { + my $fetch = command_oneline('config', '--get', + "svn-remote.$repo_id.fetch", + ":refs/remotes/$ref_id\$") or + die "Failed to read \"svn-remote.$repo_id.fetch\" ", + "\":refs/remotes/$ref_id\$\" in config\n"; + ($self->{path}, undef) = split(/\s*:\s*/, $fetch); + } + $self->{url} = command_oneline('config', '--get', + "svn-remote.$repo_id.url") or + die "Failed to read \"svn-remote.$repo_id.url\" in config\n"; + $self->rebuild; + $self; +} + +sub refname { "refs/remotes/$_[0]->{ref_id}" } + +sub svm_uuid { + my ($self) = @_; + return $self->{svm}->{uuid} if $self->svm; + $self->ra; + unless ($self->{svm}) { + die "SVM UUID not cached, and reading remotely failed\n"; } + $self->{svm}->{uuid}; } -sub migrate_revdb { - git_svn_each(sub { - my $id = shift; - defined(my $pid = fork) or croak $!; - if (!$pid) { - $GIT_SVN = $ENV{GIT_SVN_ID} = $id; - init_vars(); - exit 0 if -r $REVDB; - print "Upgrading svn => git mapping...\n"; - -d $GIT_SVN_DIR or mkpath([$GIT_SVN_DIR]); - open my $fh, '>>',$REVDB or croak $!; - close $fh; - rebuild(); - print "Done upgrading. You may now delete the ", - "deprecated $GIT_SVN_DIR/revs directory\n"; - exit 0; +sub svm { + my ($self) = @_; + return $self->{svm} if $self->{svm}; + my $svm; + # see if we have it in our config, first: + eval { + my $section = "svn-remote.$self->{repo_id}"; + $svm = { + source => tmp_config('--get', "$section.svm-source"), + uuid => tmp_config('--get', "$section.svm-uuid"), + replace => tmp_config('--get', "$section.svm-replace"), } - waitpid $pid, 0; - croak $? if $?; - }); -} + }; + if ($svm && $svm->{source} && $svm->{uuid} && $svm->{replace}) { + $self->{svm} = $svm; + } + $self->{svm}; +} + +sub _set_svm_vars { + my ($self, $ra) = @_; + return $ra if $self->svm; + + my @err = ( "useSvmProps set, but failed to read SVM properties\n", + "(svm:source, svm:uuid) ", + "from the following URLs:\n" ); + sub read_svm_props { + my ($self, $ra, $path, $r) = @_; + my $props = ($ra->get_dir($path, $r))[2]; + my $src = $props->{'svm:source'}; + my $uuid = $props->{'svm:uuid'}; + return undef if (!$src || !$uuid); + + chomp($src, $uuid); + + $uuid =~ m{^[0-9a-f\-]{30,}$} + or die "doesn't look right - svm:uuid is '$uuid'\n"; + + # the '!' is used to mark the repos_root!/relative/path + $src =~ s{/?!/?}{/}; + $src =~ s{/+$}{}; # no trailing slashes please + # username is of no interest + $src =~ s{(^[a-z\+]*://)[^/@]*@}{$1}; + + my $replace = $ra->{url}; + $replace .= "/$path" if length $path; + + my $section = "svn-remote.$self->{repo_id}"; + tmp_config("$section.svm-source", $src); + tmp_config("$section.svm-replace", $replace); + tmp_config("$section.svm-uuid", $uuid); + $self->{svm} = { + source => $src, + uuid => $uuid, + replace => $replace + }; + } -sub migration_check { - migrate_revdb() unless (-e $REVDB); - return if (-d "$GIT_DIR/svn" || !-d $GIT_DIR); - print "Upgrading repository...\n"; - unless (-d "$GIT_DIR/svn") { - mkdir "$GIT_DIR/svn" or croak $!; + my $r = $ra->get_latest_revnum; + my $path = $self->{path}; + my %tried; + while (length $path) { + unless ($tried{"$self->{url}/$path"}) { + return $ra if $self->read_svm_props($ra, $path, $r); + $tried{"$self->{url}/$path"} = 1; + } + $path =~ s#/?[^/]+$##; } - print "Data from a previous version of git-svn exists, but\n\t", - "$GIT_SVN_DIR\n\t(required for this version ", - "($VERSION) of git-svn) does not.\n"; + die "Path: '$path' should be ''\n" if $path ne ''; + return $ra if $self->read_svm_props($ra, $path, $r); + $tried{"$self->{url}/$path"} = 1; - foreach my $x (command(qw/rev-parse --symbolic --all/)) { - next unless $x =~ s#^refs/remotes/##; - chomp $x; - next unless -f "$GIT_DIR/$x/info/url"; - my $u = eval { file_to_s("$GIT_DIR/$x/info/url") }; - next unless $u; - my $dn = dirname("$GIT_DIR/svn/$x"); - mkpath([$dn]) unless -d $dn; - rename "$GIT_DIR/$x", "$GIT_DIR/svn/$x" or croak "$!: $x"; + if ($ra->{repos_root} eq $self->{url}) { + die @err, (map { " $_\n" } keys %tried), "\n"; } - migrate_revdb() if (-d $GIT_SVN_DIR && !-w $REVDB); - print "Done upgrading.\n"; -} -sub find_rev_before { - my ($r, $id, $eq_ok) = @_; - my $f = "$GIT_DIR/svn/$id/.rev_db"; - return (undef,undef) unless -r $f; - --$r unless $eq_ok; - while ($r > 0) { - if (my $c = revdb_get($f, $r)) { - return ($r, $c); + # nope, make sure we're connected to the repository root: + my $ok; + my @tried_b; + $path = $ra->{svn_path}; + $ra = Git::SVN::Ra->new($ra->{repos_root}); + while (length $path) { + unless ($tried{"$ra->{url}/$path"}) { + $ok = $self->read_svm_props($ra, $path, $r); + last if $ok; + $tried{"$ra->{url}/$path"} = 1; } - --$r; + $path =~ s#/?[^/]+$##; } - return (undef, undef); + die "Path: '$path' should be ''\n" if $path ne ''; + $ok ||= $self->read_svm_props($ra, $path, $r); + $tried{"$ra->{url}/$path"} = 1; + if (!$ok) { + die @err, (map { " $_\n" } keys %tried), "\n"; + } + Git::SVN::Ra->new($self->{url}); } -sub init_vars { - $GIT_SVN ||= $ENV{GIT_SVN_ID} || 'git-svn'; - $GIT_SVN_DIR = "$GIT_DIR/svn/$GIT_SVN"; - $REVDB = "$GIT_SVN_DIR/.rev_db"; - $GIT_SVN_INDEX = "$GIT_SVN_DIR/index"; - $SVN_URL = undef; - $SVN_WC = "$GIT_SVN_DIR/tree"; - %tree_map = (); -} +sub svnsync { + my ($self) = @_; + return $self->{svnsync} if $self->{svnsync}; -# convert GetOpt::Long specs for use by git-config -sub read_repo_config { - return unless -d $GIT_DIR; - my $opts = shift; - foreach my $o (keys %$opts) { - my $v = $opts->{$o}; - my ($key) = ($o =~ /^([a-z\-]+)/); - $key =~ s/-//g; - my $arg = 'git-config'; - $arg .= ' --int' if ($o =~ /[:=]i$/); - $arg .= ' --bool' if ($o !~ /[:=][sfi]$/); - if (ref $v eq 'ARRAY') { - chomp(my @tmp = `$arg --get-all svn.$key`); - @$v = @tmp if @tmp; - } else { - chomp(my $tmp = `$arg --get svn.$key`); - if ($tmp && !($arg =~ / --bool/ && $tmp eq 'false')) { - $$v = $tmp; - } + if ($self->no_metadata) { + die "Can't have both 'noMetadata' and ", + "'useSvnsyncProps' options set!\n"; + } + if ($self->rewrite_root) { + die "Can't have both 'useSvnsyncProps' and 'rewriteRoot' ", + "options set!\n"; + } + + my $svnsync; + # see if we have it in our config, first: + eval { + my $section = "svn-remote.$self->{repo_id}"; + $svnsync = { + url => tmp_config('--get', "$section.svnsync-url"), + uuid => tmp_config('--get', "$section.svnsync-uuid"), } + }; + if ($svnsync && $svnsync->{url} && $svnsync->{uuid}) { + return $self->{svnsync} = $svnsync; } + + my $err = "useSvnsyncProps set, but failed to read " . + "svnsync property: svn:sync-from-"; + my $rp = $self->ra->rev_proplist(0); + + my $url = $rp->{'svn:sync-from-url'} or die $err . "url\n"; + $url =~ m{^[a-z\+]+://} or + die "doesn't look right - svn:sync-from-url is '$url'\n"; + + my $uuid = $rp->{'svn:sync-from-uuid'} or die $err . "uuid\n"; + $uuid =~ m{^[0-9a-f\-]{30,}$} or + die "doesn't look right - svn:sync-from-uuid is '$uuid'\n"; + + my $section = "svn-remote.$self->{repo_id}"; + tmp_config('--add', "$section.svnsync-uuid", $uuid); + tmp_config('--add', "$section.svnsync-url", $url); + return $self->{svnsync} = { url => $url, uuid => $uuid }; } -sub set_default_vals { - if (defined $_repack) { - $_repack = 1000 if ($_repack <= 0); - $_repack_nr = $_repack; - $_repack_flags ||= '-d'; +# this allows us to memoize our SVN::Ra UUID locally and avoid a +# remote lookup (useful for 'git svn log'). +sub ra_uuid { + my ($self) = @_; + unless ($self->{ra_uuid}) { + my $key = "svn-remote.$self->{repo_id}.uuid"; + my $uuid = eval { tmp_config('--get', $key) }; + if (!$@ && $uuid && $uuid =~ /^([a-f\d\-]{30,})$/) { + $self->{ra_uuid} = $uuid; + } else { + die "ra_uuid called without URL\n" unless $self->{url}; + $self->{ra_uuid} = $self->ra->get_uuid; + tmp_config('--add', $key, $self->{ra_uuid}); + } } + $self->{ra_uuid}; } -sub read_grafts { - my $gr_file = shift; - my ($grafts, $comments) = ({}, {}); - if (open my $fh, '<', $gr_file) { - my @tmp; - while (<$fh>) { - if (/^($sha1)\s+/) { - my $c = $1; - if (@tmp) { - @{$comments->{$c}} = @tmp; - @tmp = (); - } - foreach my $p (split /\s+/, $_) { - $grafts->{$c}->{$p} = 1; - } - } else { - push @tmp, $_; - } +sub ra { + my ($self) = shift; + my $ra = Git::SVN::Ra->new($self->{url}); + if ($self->use_svm_props && !$self->{svm}) { + if ($self->no_metadata) { + die "Can't have both 'noMetadata' and ", + "'useSvmProps' options set!\n"; + } elsif ($self->use_svnsync_props) { + die "Can't have both 'useSvnsyncProps' and ", + "'useSvmProps' options set!\n"; } - close $fh or croak $!; - @{$comments->{'END'}} = @tmp if @tmp; + $ra = $self->_set_svm_vars($ra); + $self->{-want_revprops} = 1; } - return ($grafts, $comments); + $ra; } -sub write_grafts { - my ($grafts, $comments, $gr_file) = @_; +sub rel_path { + my ($self) = @_; + my $repos_root = $self->ra->{repos_root}; + return $self->{path} if ($self->{url} eq $repos_root); + die "BUG: rel_path failed! repos_root: $repos_root, Ra URL: ", + $self->ra->{url}, " path: $self->{path}, URL: $self->{url}\n"; +} - open my $fh, '>', $gr_file or croak $!; - foreach my $c (sort keys %$grafts) { - if ($comments->{$c}) { - print $fh $_ foreach @{$comments->{$c}}; - } - my $p = $grafts->{$c}; - my %x; # real parents - delete $p->{$c}; # commits are not self-reproducing... - my $ch = command_output_pipe(qw/cat-file commit/, $c); - while (<$ch>) { - if (/^parent ($sha1)/) { - $x{$1} = $p->{$1} = 1; - } else { - last unless /^\S/; - } - } - close $ch; # breaking the pipe - - # if real parents are the only ones in the grafts, drop it - next if join(' ',sort keys %$p) eq join(' ',sort keys %x); - - my (@ip, @jp, $mb); - my %del = %x; - @ip = @jp = keys %$p; - foreach my $i (@ip) { - next if $del{$i} || $p->{$i} == 2; - foreach my $j (@jp) { - next if $i eq $j || $del{$j} || $p->{$j} == 2; - $mb = eval { command('merge-base', $i, $j) }; - next unless $mb; - chomp $mb; - next if $x{$mb}; - if ($mb eq $j) { - delete $p->{$i}; - $del{$i} = 1; - } elsif ($mb eq $i) { - delete $p->{$j}; - $del{$j} = 1; - } - } +sub traverse_ignore { + my ($self, $fh, $path, $r) = @_; + $path =~ s#^/+##g; + my $ra = $self->ra; + my ($dirent, undef, $props) = $ra->get_dir($path, $r); + my $p = $path; + $p =~ s#^\Q$ra->{svn_path}\E/##; + print $fh length $p ? "\n# $p\n" : "\n# /\n"; + if (my $s = $props->{'svn:ignore'}) { + $s =~ s/[\r\n]+/\n/g; + chomp $s; + if (length $p == 0) { + $s =~ s#\n#\n/$p#g; + print $fh "/$s\n"; + } else { + $s =~ s#\n#\n/$p/#g; + print $fh "/$p/$s\n"; } + } + foreach (sort keys %$dirent) { + next if $dirent->{$_}->kind != $SVN::Node::dir; + $self->traverse_ignore($fh, "$path/$_", $r); + } +} - # if real parents are the only ones in the grafts, drop it - next if join(' ',sort keys %$p) eq join(' ',sort keys %x); +sub last_rev { ($_[0]->last_rev_commit)[0] } +sub last_commit { ($_[0]->last_rev_commit)[1] } - print $fh $c, ' ', join(' ', sort keys %$p),"\n"; +# returns the newest SVN revision number and newest commit SHA1 +sub last_rev_commit { + my ($self) = @_; + if (defined $self->{last_rev} && defined $self->{last_commit}) { + return ($self->{last_rev}, $self->{last_commit}); + } + my $c = ::verify_ref($self->refname.'^0'); + if ($c && !$self->use_svm_props && !$self->no_metadata) { + my $rev = (::cmt_metadata($c))[1]; + if (defined $rev) { + ($self->{last_rev}, $self->{last_commit}) = ($rev, $c); + return ($rev, $c); + } + } + my $db_path = $self->db_path; + unless (-e $db_path) { + ($self->{last_rev}, $self->{last_commit}) = (undef, undef); + return (undef, undef); + } + my $offset = -41; # from tail + my $rl; + open my $fh, '<', $db_path or croak "$db_path not readable: $!\n"; + sysseek($fh, $offset, 2); # don't care for errors + sysread($fh, $rl, 41) == 41 or return (undef, undef); + chomp $rl; + while (('0' x40) eq $rl && sysseek($fh, 0, 1) != 0) { + $offset -= 41; + sysseek($fh, $offset, 2); # don't care for errors + sysread($fh, $rl, 41) == 41 or return (undef, undef); + chomp $rl; } - if ($comments->{'END'}) { - print $fh $_ foreach @{$comments->{'END'}}; + if ($c && $c ne $rl) { + die "$db_path and ", $self->refname, + " inconsistent!:\n$c != $rl\n"; } + my $rev = sysseek($fh, 0, 1) or croak $!; + $rev = ($rev - 41) / 41; close $fh or croak $!; + ($self->{last_rev}, $self->{last_commit}) = ($rev, $c); + return ($rev, $c); } -sub read_url_paths_all { - my ($l_map, $pfx, $p) = @_; - my @dir; - foreach (<$p/*>) { - if (-r "$_/info/url") { - $pfx .= '/' if $pfx && $pfx !~ m!/$!; - my $id = $pfx . basename $_; - my $url = file_to_s("$_/info/url"); - my ($u, $p) = repo_path_split($url); - $l_map->{$u}->{$p} = $id; - } elsif (-d $_) { - push @dir, $_; - } - } - foreach (@dir) { - my $x = $_; - $x =~ s!^\Q$GIT_DIR\E/svn/!!o; - read_url_paths_all($l_map, $x, $_); - } +sub get_fetch_range { + my ($self, $min, $max) = @_; + $max ||= $self->ra->get_latest_revnum; + $min ||= $self->rev_db_max; + (++$min, $max); } -# this one only gets ids that have been imported, not new ones -sub read_url_paths { - my $l_map = {}; - git_svn_each(sub { my $x = shift; - my $url = file_to_s("$GIT_DIR/svn/$x/info/url"); - my ($u, $p) = repo_path_split($url); - $l_map->{$u}->{$p} = $x; - }); - return $l_map; +sub tmp_config { + my (@args) = @_; + my $old_def_config = "$ENV{GIT_DIR}/svn/config"; + my $config = "$ENV{GIT_DIR}/svn/.metadata"; + if (-e $old_def_config && ! -e $config) { + rename $old_def_config, $config or + die "Failed rename $old_def_config => $config: $!\n"; + } + my $old_config = $ENV{GIT_CONFIG}; + $ENV{GIT_CONFIG} = $config; + $@ = undef; + my @ret = eval { + unless (-f $config) { + mkfile($config); + open my $fh, '>', $config or + die "Can't open $config: $!\n"; + print $fh "; This file is used internally by ", + "git-svn\n" or die + "Couldn't write to $config: $!\n"; + print $fh "; You should not have to edit it\n" or + die "Couldn't write to $config: $!\n"; + close $fh or die "Couldn't close $config: $!\n"; + } + command('config', @args); + }; + my $err = $@; + if (defined $old_config) { + $ENV{GIT_CONFIG} = $old_config; + } else { + delete $ENV{GIT_CONFIG}; + } + die $err if $err; + wantarray ? @ret : $ret[0]; } -sub extract_metadata { - my $id = shift or return (undef, undef, undef); - my ($url, $rev, $uuid) = ($id =~ /^git-svn-id:\s(\S+?)\@(\d+) - \s([a-f\d\-]+)$/x); - if (!defined $rev || !$uuid || !$url) { - # some of the original repositories I made had - # identifiers like this: - ($rev, $uuid) = ($id =~/^git-svn-id:\s(\d+)\@([a-f\d\-]+)/); +sub tmp_index_do { + my ($self, $sub) = @_; + my $old_index = $ENV{GIT_INDEX_FILE}; + $ENV{GIT_INDEX_FILE} = $self->{index}; + $@ = undef; + my @ret = eval { + my ($dir, $base) = ($self->{index} =~ m#^(.*?)/?([^/]+)$#); + mkpath([$dir]) unless -d $dir; + &$sub; + }; + my $err = $@; + if (defined $old_index) { + $ENV{GIT_INDEX_FILE} = $old_index; + } else { + delete $ENV{GIT_INDEX_FILE}; } - return ($url, $rev, $uuid); + die $err if $err; + wantarray ? @ret : $ret[0]; } -sub cmt_metadata { - return extract_metadata((grep(/^git-svn-id: /, - command(qw/cat-file commit/, shift)))[-1]); +sub assert_index_clean { + my ($self, $treeish) = @_; + + $self->tmp_index_do(sub { + command_noisy('read-tree', $treeish) unless -e $self->{index}; + my $x = command_oneline('write-tree'); + my ($y) = (command(qw/cat-file commit/, $treeish) =~ + /^tree ($::sha1)/mo); + return if $y eq $x; + + warn "Index mismatch: $y != $x\nrereading $treeish\n"; + unlink $self->{index} or die "unlink $self->{index}: $!\n"; + command_noisy('read-tree', $treeish); + $x = command_oneline('write-tree'); + if ($y ne $x) { + ::fatal "trees ($treeish) $y != $x\n", + "Something is seriously wrong...\n"; + } + }); } -sub get_commit_time { - my $cmt = shift; - my $fh = command_output_pipe(qw/rev-list --pretty=raw -n1/, $cmt); - while (<$fh>) { - /^committer\s(?:.+) (\d+) ([\-\+]?\d+)$/ or next; - my ($s, $tz) = ($1, $2); - if ($tz =~ s/^\+//) { - $s += tz_to_s_offset($tz); - } elsif ($tz =~ s/^\-//) { - $s -= tz_to_s_offset($tz); +sub get_commit_parents { + my ($self, $log_entry) = @_; + my (%seen, @ret, @tmp); + # legacy support for 'set-tree'; this is only used by set_tree_cb: + if (my $ip = $self->{inject_parents}) { + if (my $commit = delete $ip->{$log_entry->{revision}}) { + push @tmp, $commit; } - close $fh; - return $s; } - die "Can't get commit time for commit: $cmt\n"; + if (my $cur = ::verify_ref($self->refname.'^0')) { + push @tmp, $cur; + } + push @tmp, $_ foreach (@{$log_entry->{parents}}, @tmp); + while (my $p = shift @tmp) { + next if $seen{$p}; + $seen{$p} = 1; + push @ret, $p; + # MAXPARENT is defined to 16 in commit-tree.c: + last if @ret >= 16; + } + if (@tmp) { + die "r$log_entry->{revision}: No room for parents:\n\t", + join("\n\t", @tmp), "\n"; + } + @ret; } -sub tz_to_s_offset { - my ($tz) = @_; - $tz =~ s/(\d\d)$//; - return ($1 * 60) + ($tz * 3600); +sub rewrite_root { + my ($self) = @_; + return $self->{-rewrite_root} if exists $self->{-rewrite_root}; + my $k = "svn-remote.$self->{repo_id}.rewriteRoot"; + my $rwr = eval { command_oneline(qw/config --get/, $k) }; + if ($rwr) { + $rwr =~ s#/+$##; + if ($rwr !~ m#^[a-z\+]+://#) { + die "$rwr is not a valid URL (key: $k)\n"; + } + } + $self->{-rewrite_root} = $rwr; } -# adapted from pager.c -sub config_pager { - $_pager ||= $ENV{GIT_PAGER} || $ENV{PAGER}; - if (!defined $_pager) { - $_pager = 'less'; - } elsif (length $_pager == 0 || $_pager eq 'cat') { - $_pager = undef; - } +sub metadata_url { + my ($self) = @_; + ($self->rewrite_root || $self->{url}) . + (length $self->{path} ? '/' . $self->{path} : ''); } -sub run_pager { - return unless -t *STDOUT; - pipe my $rfd, my $wfd or return; - defined(my $pid = fork) or croak $!; - if (!$pid) { - open STDOUT, '>&', $wfd or croak $!; - return; +sub full_url { + my ($self) = @_; + $self->{url} . (length $self->{path} ? '/' . $self->{path} : ''); +} + +sub do_git_commit { + my ($self, $log_entry) = @_; + my $lr = $self->last_rev; + if (defined $lr && $lr >= $log_entry->{revision}) { + die "Last fetched revision of ", $self->refname, + " was r$lr, but we are about to fetch: ", + "r$log_entry->{revision}!\n"; } - open STDIN, '<&', $rfd or croak $!; - $ENV{LESS} ||= 'FRSX'; - exec $_pager or croak "Can't run pager: $! ($_pager)\n"; + if (my $c = $self->rev_db_get($log_entry->{revision})) { + croak "$log_entry->{revision} = $c already exists! ", + "Why are we refetching it?\n"; + } + $ENV{GIT_AUTHOR_NAME} = $ENV{GIT_COMMITTER_NAME} = $log_entry->{name}; + $ENV{GIT_AUTHOR_EMAIL} = $ENV{GIT_COMMITTER_EMAIL} = + $log_entry->{email}; + $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_entry->{date}; + + my $tree = $log_entry->{tree}; + if (!defined $tree) { + $tree = $self->tmp_index_do(sub { + command_oneline('write-tree') }); + } + die "Tree is not a valid sha1: $tree\n" if $tree !~ /^$::sha1$/o; + + my @exec = ('git-commit-tree', $tree); + foreach ($self->get_commit_parents($log_entry)) { + push @exec, '-p', $_; + } + defined(my $pid = open3(my $msg_fh, my $out_fh, '>&STDERR', @exec)) + or croak $!; + print $msg_fh $log_entry->{log} or croak $!; + unless ($self->no_metadata) { + print $msg_fh "\ngit-svn-id: $log_entry->{metadata}\n" + or croak $!; + } + $msg_fh->flush == 0 or croak $!; + close $msg_fh or croak $!; + chomp(my $commit = do { local $/; <$out_fh> }); + close $out_fh or croak $!; + waitpid $pid, 0; + croak $? if $?; + if ($commit !~ /^$::sha1$/o) { + die "Failed to commit, invalid sha1: $commit\n"; + } + + $self->rev_db_set($log_entry->{revision}, $commit, 1); + + $self->{last_rev} = $log_entry->{revision}; + $self->{last_commit} = $commit; + print "r$log_entry->{revision}"; + if (defined $log_entry->{svm_revision}) { + print " (\@$log_entry->{svm_revision})"; + $self->rev_db_set($log_entry->{svm_revision}, $commit, + 0, $self->svm_uuid); + } + print " = $commit ($self->{ref_id})\n"; + if (defined $_repack && (--$_repack_nr == 0)) { + $_repack_nr = $_repack; + # repack doesn't use any arguments with spaces in them, does it? + print "Running git repack $_repack_flags ...\n"; + command_noisy('repack', split(/\s+/, $_repack_flags)); + print "Done repacking\n"; + } + return $commit; } -sub get_author_info { - my ($dest, $author, $t, $tz) = @_; - $author =~ s/(?:^\s*|\s*$)//g; - $dest->{a_raw} = $author; - my $_a; - if ($_authors) { - $_a = $rusers{$author} || undef; +sub match_paths { + my ($self, $paths, $r) = @_; + return 1 if $self->{path} eq ''; + if (my $path = $paths->{"/$self->{path}"}) { + return ($path->{action} eq 'D') ? 0 : 1; } - if (!$_a) { - ($_a) = ($author =~ /<([^>]+)\@[^>]+>$/); + $self->{path_regex} ||= qr/^\/\Q$self->{path}\E\//; + if (grep /$self->{path_regex}/, keys %$paths) { + return 1; } - $dest->{t} = $t; - $dest->{tz} = $tz; - $dest->{a} = $_a; - # Date::Parse isn't in the standard Perl distro :( - if ($tz =~ s/^\+//) { - $t += tz_to_s_offset($tz); - } elsif ($tz =~ s/^\-//) { - $t -= tz_to_s_offset($tz); + my $c = ''; + foreach (split m#/#, $self->{path}) { + $c .= "/$_"; + next unless ($paths->{$c} && + ($paths->{$c}->{action} =~ /^[AR]$/)); + if ($self->ra->check_path($self->{path}, $r) == + $SVN::Node::dir) { + return 1; + } } - $dest->{t_utc} = $t; + return 0; } -sub process_commit { - my ($c, $r_min, $r_max, $defer) = @_; - if (defined $r_min && defined $r_max) { - if ($r_min == $c->{r} && $r_min == $r_max) { - show_commit($c); - return 0; +sub find_parent_branch { + my ($self, $paths, $rev) = @_; + return undef unless $self->follow_parent; + unless (defined $paths) { + my $err_handler = $SVN::Error::handler; + $SVN::Error::handler = \&Git::SVN::Ra::skip_unknown_revs; + $self->ra->get_log([$self->{path}], $rev, $rev, 0, 1, 1, sub { + $paths = + Git::SVN::Ra::dup_changed_paths($_[0]) }); + $SVN::Error::handler = $err_handler; + } + return undef unless defined $paths; + + # look for a parent from another branch: + my @b_path_components = split m#/#, $self->rel_path; + my @a_path_components; + my $i; + while (@b_path_components) { + $i = $paths->{'/'.join('/', @b_path_components)}; + last if $i && defined $i->{copyfrom_path}; + unshift(@a_path_components, pop(@b_path_components)); + } + return undef unless defined $i && defined $i->{copyfrom_path}; + my $branch_from = $i->{copyfrom_path}; + if (@a_path_components) { + print STDERR "branch_from: $branch_from => "; + $branch_from .= '/'.join('/', @a_path_components); + print STDERR $branch_from, "\n"; + } + my $r = $i->{copyfrom_rev}; + my $repos_root = $self->ra->{repos_root}; + my $url = $self->ra->{url}; + my $new_url = $repos_root . $branch_from; + print STDERR "Found possible branch point: ", + "$new_url => ", $self->full_url, ", $r\n"; + $branch_from =~ s#^/##; + my $gs = Git::SVN->find_by_url($new_url, $repos_root, $branch_from); + unless ($gs) { + my $ref_id = $self->{ref_id}; + $ref_id =~ s/\@\d+$//; + $ref_id .= "\@$r"; + # just grow a tail if we're not unique enough :x + $ref_id .= '-' while find_ref($ref_id); + print STDERR "Initializing parent: $ref_id\n"; + $gs = Git::SVN->init($new_url, '', $ref_id, $ref_id, 1); + } + my ($r0, $parent) = $gs->find_rev_before($r, 1); + if (!defined $r0 || !defined $parent) { + $gs->fetch(0, $r); + ($r0, $parent) = $gs->last_rev_commit; + } + if (defined $r0 && defined $parent) { + print STDERR "Found branch parent: ($self->{ref_id}) $parent\n"; + my $ed; + if ($self->ra->can_do_switch) { + $self->assert_index_clean($parent); + print STDERR "Following parent with do_switch\n"; + # do_switch works with svn/trunk >= r22312, but that + # is not included with SVN 1.4.3 (the latest version + # at the moment), so we can't rely on it + $self->{last_commit} = $parent; + $ed = SVN::Git::Fetcher->new($self); + $gs->ra->gs_do_switch($r0, $rev, $gs, + $self->full_url, $ed) + or die "SVN connection failed somewhere...\n"; + } else { + print STDERR "Following parent with do_update\n"; + $ed = SVN::Git::Fetcher->new($self); + $self->ra->gs_do_update($rev, $rev, $self, $ed) + or die "SVN connection failed somewhere...\n"; } - return 1 if $r_min == $r_max; - if ($r_min < $r_max) { - # we need to reverse the print order - return 0 if (defined $_limit && --$_limit < 0); - push @$defer, $c; - return 1; + print STDERR "Successfully followed parent\n"; + return $self->make_log_entry($rev, [$parent], $ed); + } + return undef; +} + +sub do_fetch { + my ($self, $paths, $rev) = @_; + my $ed; + my ($last_rev, @parents); + if (my $lc = $self->last_commit) { + # we can have a branch that was deleted, then re-added + # under the same name but copied from another path, in + # which case we'll have multiple parents (we don't + # want to break the original ref, nor lose copypath info): + if (my $log_entry = $self->find_parent_branch($paths, $rev)) { + push @{$log_entry->{parents}}, $lc; + return $log_entry; + } + $ed = SVN::Git::Fetcher->new($self); + $last_rev = $self->{last_rev}; + $ed->{c} = $lc; + @parents = ($lc); + } else { + $last_rev = $rev; + if (my $log_entry = $self->find_parent_branch($paths, $rev)) { + return $log_entry; } - if ($r_min != $r_max) { - return 1 if ($r_min < $c->{r}); - return 1 if ($r_max > $c->{r}); + $ed = SVN::Git::Fetcher->new($self); + } + unless ($self->ra->gs_do_update($last_rev, $rev, $self, $ed)) { + die "SVN connection failed somewhere...\n"; + } + $self->make_log_entry($rev, \@parents, $ed); +} + +sub get_untracked { + my ($self, $ed) = @_; + my @out; + my $h = $ed->{empty}; + foreach (sort keys %$h) { + my $act = $h->{$_} ? '+empty_dir' : '-empty_dir'; + push @out, " $act: " . uri_encode($_); + warn "W: $act: $_\n"; + } + foreach my $t (qw/dir_prop file_prop/) { + $h = $ed->{$t} or next; + foreach my $path (sort keys %$h) { + my $ppath = $path eq '' ? '.' : $path; + foreach my $prop (sort keys %{$h->{$path}}) { + next if $SKIP_PROP{$prop}; + my $v = $h->{$path}->{$prop}; + my $t_ppath_prop = "$t: " . + uri_encode($ppath) . ' ' . + uri_encode($prop); + if (defined $v) { + push @out, " +$t_ppath_prop " . + uri_encode($v); + } else { + push @out, " -$t_ppath_prop"; + } + } } } - return 0 if (defined $_limit && --$_limit < 0); - show_commit($c); - return 1; + foreach my $t (qw/absent_file absent_directory/) { + $h = $ed->{$t} or next; + foreach my $parent (sort keys %$h) { + foreach my $path (sort @{$h->{$parent}}) { + push @out, " $t: " . + uri_encode("$parent/$path"); + warn "W: $t: $parent/$path ", + "Insufficient permissions?\n"; + } + } + } + \@out; } -sub show_commit { - my $c = shift; - if ($_oneline) { - my $x = "\n"; - if (my $l = $c->{l}) { - while ($l->[0] =~ /^\s*$/) { shift @$l } - $x = $l->[0]; +sub parse_svn_date { + my $date = shift || return '+0000 1970-01-01 00:00:00'; + my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T + (\d\d)\:(\d\d)\:(\d\d).\d+Z$/x) or + croak "Unable to parse date: $date\n"; + "+0000 $Y-$m-$d $H:$M:$S"; +} + +sub check_author { + my ($author) = @_; + if (!defined $author || length $author == 0) { + $author = '(no author)'; + } + if (defined $::_authors && ! defined $::users{$author}) { + die "Author: $author not defined in $::_authors file\n"; + } + $author; +} + +sub make_log_entry { + my ($self, $rev, $parents, $ed) = @_; + my $untracked = $self->get_untracked($ed); + + open my $un, '>>', "$self->{dir}/unhandled.log" or croak $!; + print $un "r$rev\n" or croak $!; + print $un $_, "\n" foreach @$untracked; + my %log_entry = ( parents => $parents || [], revision => $rev, + log => ''); + + my $headrev; + my $logged = delete $self->{logged_rev_props}; + if (!$logged || $self->{-want_revprops}) { + my $rp = $self->ra->rev_proplist($rev); + foreach (sort keys %$rp) { + my $v = $rp->{$_}; + if (/^svn:(author|date|log)$/) { + $log_entry{$1} = $v; + } elsif ($_ eq 'svm:headrev') { + $headrev = $v; + } else { + print $un " rev_prop: ", uri_encode($_), ' ', + uri_encode($v), "\n"; + } } - $_l_fmt ||= 'A' . length($c->{r}); - print 'r',pack($_l_fmt, $c->{r}),' | '; - print "$c->{c} | " if $_show_commit; - print $x; } else { - show_commit_normal($c); + map { $log_entry{$_} = $logged->{$_} } keys %$logged; + } + close $un or croak $!; + + $log_entry{date} = parse_svn_date($log_entry{date}); + $log_entry{log} .= "\n"; + my $author = $log_entry{author} = check_author($log_entry{author}); + my ($name, $email) = defined $::users{$author} ? @{$::users{$author}} + : ($author, undef); + if (defined $headrev && $self->use_svm_props) { + if ($self->rewrite_root) { + die "Can't have both 'useSvmProps' and 'rewriteRoot' ", + "options set!\n"; + } + my ($uuid, $r) = $headrev =~ m{^([a-f\d\-]{30,}):(\d+)$}; + # we don't want "SVM: initializing mirror for junk" ... + return undef if $r == 0; + my $svm = $self->svm; + if ($uuid ne $svm->{uuid}) { + die "UUID mismatch on SVM path:\n", + "expected: $svm->{uuid}\n", + " got: $uuid\n"; + } + my $full_url = $self->full_url; + $full_url =~ s#^\Q$svm->{replace}\E(/|$)#$svm->{source}$1# or + die "Failed to replace '$svm->{replace}' with ", + "'$svm->{source}' in $full_url\n"; + # throw away username for storing in records + remove_username($full_url); + $log_entry{metadata} = "$full_url\@$r $uuid"; + $log_entry{svm_revision} = $r; + $email ||= "$author\@$uuid" + } elsif ($self->use_svnsync_props) { + my $full_url = $self->svnsync->{url}; + $full_url .= "/$self->{path}" if length $self->{path}; + my $uuid = $self->svnsync->{uuid}; + $log_entry{metadata} = "$full_url\@$rev $uuid"; + $email ||= "$author\@$uuid" + } else { + $log_entry{metadata} = $self->metadata_url. "\@$rev " . + $self->ra->get_uuid; + $email ||= "$author\@" . $self->ra->get_uuid; } + $log_entry{name} = $name; + $log_entry{email} = $email; + \%log_entry; } -sub show_commit_changed_paths { - my ($c) = @_; - return unless $c->{changed}; - print "Changed paths:\n", @{$c->{changed}}; +sub fetch { + my ($self, $min_rev, $max_rev, @parents) = @_; + my ($last_rev, $last_commit) = $self->last_rev_commit; + my ($base, $head) = $self->get_fetch_range($min_rev, $max_rev); + $self->ra->gs_fetch_loop_common($base, $head, [$self]); } -sub show_commit_normal { - my ($c) = @_; - print '-' x72, "\nr$c->{r} | "; - print "$c->{c} | " if $_show_commit; - print "$c->{a} | ", strftime("%Y-%m-%d %H:%M:%S %z (%a, %d %b %Y)", - localtime($c->{t_utc})), ' | '; - my $nr_line = 0; +sub set_tree_cb { + my ($self, $log_entry, $tree, $rev, $date, $author) = @_; + $self->{inject_parents} = { $rev => $tree }; + $self->fetch(undef, undef); +} - if (my $l = $c->{l}) { - while ($l->[$#$l] eq "\n" && $#$l > 0 - && $l->[($#$l - 1)] eq "\n") { - pop @$l; +sub set_tree { + my ($self, $tree) = (shift, shift); + my $log_entry = ::get_commit_entry($tree); + unless ($self->{last_rev}) { + fatal("Must have an existing revision to commit\n"); + } + my %ed_opts = ( r => $self->{last_rev}, + log => $log_entry->{log}, + ra => $self->ra, + tree_a => $self->{last_commit}, + tree_b => $tree, + editor_cb => sub { + $self->set_tree_cb($log_entry, $tree, @_) }, + svn_path => $self->{path} ); + if (!SVN::Git::Editor->new(\%ed_opts)->apply_diff) { + print "No changes\nr$self->{last_rev} = $tree\n"; + } +} + +sub rebuild { + my ($self) = @_; + my $db_path = $self->db_path; + return if (-e $db_path && ! -z $db_path); + return unless ::verify_ref($self->refname.'^0'); + if (-f $self->{db_root}) { + rename $self->{db_root}, $db_path or die + "rename $self->{db_root} => $db_path failed: $!\n"; + my ($dir, $base) = ($db_path =~ m#^(.*?)/?([^/]+)$#); + symlink $base, $self->{db_root} or die + "symlink $base => $self->{db_root} failed: $!\n"; + return; + } + print "Rebuilding $db_path ...\n"; + my ($rev_list, $ctx) = command_output_pipe("rev-list", $self->refname); + my $latest; + my $full_url = $self->full_url; + remove_username($full_url); + my $svn_uuid; + while (<$rev_list>) { + chomp; + my $c = $_; + die "Non-SHA1: $c\n" unless $c =~ /^$::sha1$/o; + my ($url, $rev, $uuid) = ::cmt_metadata($c); + remove_username($url); + + # ignore merges (from set-tree) + next if (!defined $rev || !$uuid); + + # if we merged or otherwise started elsewhere, this is + # how we break out of it + if ((defined $svn_uuid && ($uuid ne $svn_uuid)) || + ($full_url && $url && ($url ne $full_url))) { + next; } - $nr_line = scalar @$l; - if (!$nr_line) { - print "1 line\n\n\n"; - } else { - if ($nr_line == 1) { - $nr_line = '1 line'; - } else { - $nr_line .= ' lines'; - } - print $nr_line, "\n"; - show_commit_changed_paths($c); - print "\n"; - print $_ foreach @$l; + $latest ||= $rev; + $svn_uuid ||= $uuid; + + $self->rev_db_set($rev, $c); + print "r$rev = $c\n"; + } + command_close_pipe($rev_list, $ctx); + print "Done rebuilding $db_path\n"; +} + +# rev_db: +# Tie::File seems to be prone to offset errors if revisions get sparse, +# it's not that fast, either. Tie::File is also not in Perl 5.6. So +# one of my favorite modules is out :< Next up would be one of the DBM +# modules, but I'm not sure which is most portable... So I'll just +# go with something that's plain-text, but still capable of +# being randomly accessed. So here's my ultra-simple fixed-width +# database. All records are 40 characters + "\n", so it's easy to seek +# to a revision: (41 * rev) is the byte offset. +# A record of 40 0s denotes an empty revision. +# And yes, it's still pretty fast (faster than Tie::File). +# These files are disposable unless noMetadata or useSvmProps is set + +sub _rev_db_set { + my ($fh, $rev, $commit) = @_; + my $offset = $rev * 41; + # assume that append is the common case: + seek $fh, 0, 2 or croak $!; + my $pos = tell $fh; + if ($pos < $offset) { + for (1 .. (($offset - $pos) / 41)) { + print $fh (('0' x 40),"\n") or croak $!; } + } + seek $fh, $offset, 0 or croak $!; + print $fh $commit,"\n" or croak $!; +} + +sub mkfile { + my ($path) = @_; + unless (-e $path) { + my ($dir, $base) = ($path =~ m#^(.*?)/?([^/]+)$#); + mkpath([$dir]) unless -d $dir; + open my $fh, '>>', $path or die "Couldn't create $path: $!\n"; + close $fh or die "Couldn't close (create) $path: $!\n"; + } +} + +sub rev_db_set { + my ($self, $rev, $commit, $update_ref, $uuid) = @_; + length $commit == 40 or die "arg3 must be a full SHA1 hexsum\n"; + my $db = $self->db_path($uuid); + my $db_lock = "$db.lock"; + my $sig; + if ($update_ref) { + $SIG{INT} = $SIG{HUP} = $SIG{TERM} = $SIG{ALRM} = $SIG{PIPE} = + $SIG{USR1} = $SIG{USR2} = sub { $sig = $_[0] }; + } + mkfile($db); + + $LOCKFILES{$db_lock} = 1; + my $sync; + # both of these options make our .rev_db file very, very important + # and we can't afford to lose it because rebuild() won't work + if ($self->use_svm_props || $self->no_metadata) { + $sync = 1; + copy($db, $db_lock) or die "rev_db_set(@_): ", + "Failed to copy: ", + "$db => $db_lock ($!)\n"; } else { - print "1 line\n"; - show_commit_changed_paths($c); - print "\n"; + rename $db, $db_lock or die "rev_db_set(@_): ", + "Failed to rename: ", + "$db => $db_lock ($!)\n"; + } + open my $fh, '+<', $db_lock or die "Couldn't open $db_lock: $!\n"; + _rev_db_set($fh, $rev, $commit); + if ($sync) { + $fh->flush or die "Couldn't flush $db_lock: $!\n"; + $fh->sync or die "Couldn't sync $db_lock: $!\n"; + } + close $fh or croak $!; + if ($update_ref) { + $_head = $self; + command_noisy('update-ref', '-m', "r$rev", + $self->refname, $commit); + } + rename $db_lock, $db or die "rev_db_set(@_): ", "Failed to rename: ", + "$db_lock => $db ($!)\n"; + delete $LOCKFILES{$db_lock}; + if ($update_ref) { + $SIG{INT} = $SIG{HUP} = $SIG{TERM} = $SIG{ALRM} = $SIG{PIPE} = + $SIG{USR1} = $SIG{USR2} = 'DEFAULT'; + kill $sig, $$ if defined $sig; + } +} +sub rev_db_max { + my ($self) = @_; + $self->rebuild; + my $db_path = $self->db_path; + my @stat = stat $db_path or return 0; + ($stat[7] % 41) == 0 or die "$db_path inconsistent size: $stat[7]\n"; + my $max = $stat[7] / 41; + (($max > 0) ? $max - 1 : 0); +} + +sub rev_db_get { + my ($self, $rev, $uuid) = @_; + my $ret; + my $offset = $rev * 41; + my $db_path = $self->db_path($uuid); + return undef unless -e $db_path; + open my $fh, '<', $db_path or croak $!; + if (sysseek($fh, $offset, 0) == $offset) { + my $read = sysread($fh, $ret, 40); + $ret = undef if ($read != 40 || $ret eq ('0'x40)); } - foreach my $x (qw/raw diff/) { - if ($c->{$x}) { - print "\n"; - print $_ foreach @{$c->{$x}} + close $fh or croak $!; + $ret; +} + +sub find_rev_before { + my ($self, $rev, $eq_ok) = @_; + --$rev unless $eq_ok; + while ($rev > 0) { + if (my $c = $self->rev_db_get($rev)) { + return ($rev, $c); } + --$rev; + } + return (undef, undef); +} + +sub _new { + my ($class, $repo_id, $ref_id, $path) = @_; + unless (defined $repo_id && length $repo_id) { + $repo_id = $Git::SVN::default_repo_id; } + unless (defined $ref_id && length $ref_id) { + $_[2] = $ref_id = $Git::SVN::default_ref_id; + } + $_[1] = $repo_id = sanitize_remote_name($repo_id); + my $dir = "$ENV{GIT_DIR}/svn/$ref_id"; + $_[3] = $path = '' unless (defined $path); + mkpath(["$ENV{GIT_DIR}/svn"]); + bless { + ref_id => $ref_id, dir => $dir, index => "$dir/index", + path => $path, config => "$ENV{GIT_DIR}/svn/config", + db_root => "$dir/.rev_db", repo_id => $repo_id }, $class; +} + +sub db_path { + my ($self, $uuid) = @_; + $uuid ||= $self->ra_uuid; + "$self->{db_root}.$uuid"; +} + +sub uri_encode { + my ($f) = @_; + $f =~ s#([^a-zA-Z0-9\*!\:_\./\-])#uc sprintf("%%%02x",ord($1))#eg; + $f +} + +sub remove_username { + $_[0] =~ s{^([^:]*://)[^@]+@}{$1}; } -sub _simple_prompt { +package Git::SVN::Prompt; +use strict; +use warnings; +require SVN::Core; +use vars qw/$_no_auth_cache $_username/; + +sub simple { my ($cred, $realm, $default_username, $may_save, $pool) = @_; $may_save = undef if $_no_auth_cache; $default_username = $_username if defined $_username; @@ -1923,7 +2115,7 @@ sub _simple_prompt { } $cred->username($default_username); } else { - _username_prompt($cred, $realm, $may_save, $pool); + username($cred, $realm, $may_save, $pool); } $cred->password(_read_password("Password for '" . $cred->username . "': ", $realm)); @@ -1931,7 +2123,7 @@ sub _simple_prompt { $SVN::_Core::SVN_NO_ERROR; } -sub _ssl_server_trust_prompt { +sub ssl_server_trust { my ($cred, $realm, $failures, $cert_info, $may_save, $pool) = @_; $may_save = undef if $_no_auth_cache; print STDERR "Error validating server certificate for '$realm':\n"; @@ -1980,7 +2172,7 @@ prompt: $SVN::_Core::SVN_NO_ERROR; } -sub _ssl_client_cert_prompt { +sub ssl_client_cert { my ($cred, $realm, $may_save, $pool) = @_; $may_save = undef if $_no_auth_cache; print STDERR "Client certificate filename: "; @@ -1991,7 +2183,7 @@ sub _ssl_client_cert_prompt { $SVN::_Core::SVN_NO_ERROR; } -sub _ssl_client_cert_pw_prompt { +sub ssl_client_cert_pw { my ($cred, $realm, $may_save, $pool) = @_; $may_save = undef if $_no_auth_cache; $cred->password(_read_password("Password: ", $realm)); @@ -1999,7 +2191,7 @@ sub _ssl_client_cert_pw_prompt { $SVN::_Core::SVN_NO_ERROR; } -sub _username_prompt { +sub username { my ($cred, $realm, $may_save, $pool) = @_; $may_save = undef if $_no_auth_cache; if (defined $realm && length $realm) { @@ -2035,546 +2227,7 @@ sub _read_password { $password; } -sub libsvn_connect { - my ($url) = @_; - SVN::_Core::svn_config_ensure($_config_dir, undef); - my ($baton, $callbacks) = SVN::Core::auth_open_helper([ - SVN::Client::get_simple_provider(), - SVN::Client::get_ssl_server_trust_file_provider(), - SVN::Client::get_simple_prompt_provider( - \&_simple_prompt, 2), - SVN::Client::get_ssl_client_cert_prompt_provider( - \&_ssl_client_cert_prompt, 2), - SVN::Client::get_ssl_client_cert_pw_prompt_provider( - \&_ssl_client_cert_pw_prompt, 2), - SVN::Client::get_username_provider(), - SVN::Client::get_ssl_server_trust_prompt_provider( - \&_ssl_server_trust_prompt), - SVN::Client::get_username_prompt_provider( - \&_username_prompt, 2), - ]); - my $config = SVN::Core::config_get_config($_config_dir); - my $ra = SVN::Ra->new(url => $url, auth => $baton, - config => $config, - pool => SVN::Pool->new, - auth_provider_callbacks => $callbacks); - $ra->{svn_path} = $url; - $ra->{repos_root} = $ra->get_repos_root; - $ra->{svn_path} =~ s#^\Q$ra->{repos_root}\E/*##; - push @repo_path_split_cache, qr/^(\Q$ra->{repos_root}\E)/; - return $ra; -} - -sub libsvn_can_do_switch { - unless (defined $_svn_can_do_switch) { - my $pool = SVN::Pool->new; - my $rep = eval { - $SVN->do_switch(1, '', 0, $SVN->{url}, - SVN::Delta::Editor->new, $pool); - }; - if ($@) { - $_svn_can_do_switch = 0; - } else { - $rep->abort_report($pool); - $_svn_can_do_switch = 1; - } - $pool->clear; - } - $_svn_can_do_switch; -} - -sub libsvn_dup_ra { - my ($ra) = @_; - SVN::Ra->new(map { $_ => $ra->{$_} } qw/config url - auth auth_provider_callbacks repos_root svn_path/); -} - -sub uri_encode { - my ($f) = @_; - $f =~ s#([^a-zA-Z0-9\*!\:_\./\-])#uc sprintf("%%%02x",ord($1))#eg; - $f -} - -sub uri_decode { - my ($f) = @_; - $f =~ tr/+/ /; - $f =~ s/%([A-F0-9]{2})/chr hex($1)/ge; - $f -} - -sub libsvn_log_entry { - my ($rev, $author, $date, $msg, $parents, $untracked) = @_; - my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T - (\d\d)\:(\d\d)\:(\d\d).\d+Z$/x) - or die "Unable to parse date: $date\n"; - if (defined $author && length $author > 0 && - defined $_authors && ! defined $users{$author}) { - die "Author: $author not defined in $_authors file\n"; - } - $msg = '' if ($rev == 0 && !defined $msg); - - open my $un, '>>', "$GIT_SVN_DIR/unhandled.log" or croak $!; - my $h; - print $un "r$rev\n" or croak $!; - $h = $untracked->{empty}; - foreach (sort keys %$h) { - my $act = $h->{$_} ? '+empty_dir' : '-empty_dir'; - print $un " $act: ", uri_encode($_), "\n" or croak $!; - warn "W: $act: $_\n"; - } - foreach my $t (qw/dir_prop file_prop/) { - $h = $untracked->{$t} or next; - foreach my $path (sort keys %$h) { - my $ppath = $path eq '' ? '.' : $path; - foreach my $prop (sort keys %{$h->{$path}}) { - next if $SKIP{$prop}; - my $v = $h->{$path}->{$prop}; - if (defined $v) { - print $un " +$t: ", - uri_encode($ppath), ' ', - uri_encode($prop), ' ', - uri_encode($v), "\n" - or croak $!; - } else { - print $un " -$t: ", - uri_encode($ppath), ' ', - uri_encode($prop), "\n" - or croak $!; - } - } - } - } - foreach my $t (qw/absent_file absent_directory/) { - $h = $untracked->{$t} or next; - foreach my $parent (sort keys %$h) { - foreach my $path (sort @{$h->{$parent}}) { - print $un " $t: ", - uri_encode("$parent/$path"), "\n" - or croak $!; - warn "W: $t: $parent/$path ", - "Insufficient permissions?\n"; - } - } - } - - # revprops (make this optional? it's an extra network trip...) - my $pool = SVN::Pool->new; - my $rp = $SVN->rev_proplist($rev, $pool); - foreach (sort keys %$rp) { - next if /^svn:(?:author|date|log)$/; - print $un " rev_prop: ", uri_encode($_), ' ', - uri_encode($rp->{$_}), "\n"; - } - $pool->clear; - close $un or croak $!; - - { revision => $rev, date => "+0000 $Y-$m-$d $H:$M:$S", - author => $author, msg => $msg."\n", parents => $parents || [], - revprops => $rp } -} - -sub process_rm { - my ($gui, $last_commit, $f, $q) = @_; - # remove entire directories. - if (command('ls-tree',$last_commit,'--',$f) =~ /^040000 tree/) { - my ($ls, $ctx) = command_output_pipe(qw/ls-tree - -r --name-only -z/, - $last_commit,'--',$f); - local $/ = "\0"; - while (<$ls>) { - print $gui '0 ',0 x 40,"\t",$_ or croak $!; - print "\tD\t$_\n" unless $q; - } - print "\tD\t$f/\n" unless $q; - command_close_pipe($ls, $ctx); - return $SVN::Node::dir; - } else { - print $gui '0 ',0 x 40,"\t",$f,"\0" or croak $!; - print "\tD\t$f\n" unless $q; - return $SVN::Node::file; - } -} - -sub libsvn_fetch { - my ($last_commit, $paths, $rev, $author, $date, $msg) = @_; - my $pool = SVN::Pool->new; - my $ed = SVN::Git::Fetcher->new({ c => $last_commit, q => $_q }); - my $reporter = $SVN->do_update($rev, '', 1, $ed, $pool); - my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); - my (undef, $last_rev, undef) = cmt_metadata($last_commit); - $reporter->set_path('', $last_rev, 0, @lock, $pool); - $reporter->finish_report($pool); - $pool->clear; - unless ($ed->{git_commit_ok}) { - die "SVN connection failed somewhere...\n"; - } - libsvn_log_entry($rev, $author, $date, $msg, [$last_commit], $ed); -} - -sub svn_grab_base_rev { - my $c = eval { command_oneline([qw/rev-parse --verify/, - "refs/remotes/$GIT_SVN^0"], - { STDERR => 0 }) }; - if (defined $c && length $c) { - my ($url, $rev, $uuid) = cmt_metadata($c); - return ($rev, $c) if defined $rev; - } - if ($_no_metadata) { - my $offset = -41; # from tail - my $rl; - open my $fh, '<', $REVDB or - die "--no-metadata specified and $REVDB not readable\n"; - seek $fh, $offset, 2; - $rl = readline $fh; - defined $rl or return (undef, undef); - chomp $rl; - while ($c ne $rl && tell $fh != 0) { - $offset -= 41; - seek $fh, $offset, 2; - $rl = readline $fh; - defined $rl or return (undef, undef); - chomp $rl; - } - my $rev = tell $fh; - croak $! if ($rev < -1); - $rev = ($rev - 41) / 41; - close $fh or croak $!; - return ($rev, $c); - } - return (undef, undef); -} - -sub libsvn_parse_revision { - my $base = shift; - my $head = $SVN->get_latest_revnum(); - if (!defined $_revision || $_revision eq 'BASE:HEAD') { - return ($base + 1, $head) if (defined $base); - return (0, $head); - } - return ($1, $2) if ($_revision =~ /^(\d+):(\d+)$/); - return ($_revision, $_revision) if ($_revision =~ /^\d+$/); - if ($_revision =~ /^BASE:(\d+)$/) { - return ($base + 1, $1) if (defined $base); - return (0, $head); - } - return ($1, $head) if ($_revision =~ /^(\d+):HEAD$/); - die "revision argument: $_revision not understood by git-svn\n", - "Try using the command-line svn client instead\n"; -} - -sub libsvn_traverse_ignore { - my ($fh, $path, $r) = @_; - $path =~ s#^/+##g; - my $pool = SVN::Pool->new; - my ($dirent, undef, $props) = $SVN->get_dir($path, $r, $pool); - my $p = $path; - $p =~ s#^\Q$SVN->{svn_path}\E/##; - print $fh length $p ? "\n# $p\n" : "\n# /\n"; - if (my $s = $props->{'svn:ignore'}) { - $s =~ s/[\r\n]+/\n/g; - chomp $s; - if (length $p == 0) { - $s =~ s#\n#\n/$p#g; - print $fh "/$s\n"; - } else { - $s =~ s#\n#\n/$p/#g; - print $fh "/$p/$s\n"; - } - } - foreach (sort keys %$dirent) { - next if $dirent->{$_}->kind != $SVN::Node::dir; - libsvn_traverse_ignore($fh, "$path/$_", $r); - } - $pool->clear; -} - -sub revisions_eq { - my ($path, $r0, $r1) = @_; - return 1 if $r0 == $r1; - my $nr = 0; - # should be OK to use Pool here (r1 - r0) should be small - my $pool = SVN::Pool->new; - libsvn_get_log($SVN, [$path], $r0, $r1, - 0, 0, 1, sub {$nr++}, $pool); - $pool->clear; - return 0 if ($nr > 1); - return 1; -} - -sub libsvn_find_parent_branch { - my ($paths, $rev, $author, $date, $msg) = @_; - my $svn_path = '/'.$SVN->{svn_path}; - - # look for a parent from another branch: - my $i = $paths->{$svn_path} or return; - my $branch_from = $i->copyfrom_path or return; - my $r = $i->copyfrom_rev; - print STDERR "Found possible branch point: ", - "$branch_from => $svn_path, $r\n"; - $branch_from =~ s#^/##; - my $l_map = {}; - read_url_paths_all($l_map, '', "$GIT_DIR/svn"); - my $url = $SVN->{repos_root}; - defined $l_map->{$url} or return; - my $id = $l_map->{$url}->{$branch_from}; - if (!defined $id && $_follow_parent) { - print STDERR "Following parent: $branch_from\@$r\n"; - # auto create a new branch and follow it - $id = basename($branch_from); - $id .= '@'.$r if -r "$GIT_DIR/svn/$id"; - while (-r "$GIT_DIR/svn/$id") { - # just grow a tail if we're not unique enough :x - $id .= '-'; - } - } - return unless defined $id; - - my ($r0, $parent) = find_rev_before($r,$id,1); - if ($_follow_parent && (!defined $r0 || !defined $parent)) { - defined(my $pid = fork) or croak $!; - if (!$pid) { - $GIT_SVN = $ENV{GIT_SVN_ID} = $id; - init_vars(); - $SVN_URL = "$url/$branch_from"; - $SVN = undef; - setup_git_svn(); - # we can't assume SVN_URL exists at r+1: - $_revision = "0:$r"; - fetch_lib(); - exit 0; - } - waitpid $pid, 0; - croak $? if $?; - ($r0, $parent) = find_rev_before($r,$id,1); - } - return unless (defined $r0 && defined $parent); - if (revisions_eq($branch_from, $r0, $r)) { - unlink $GIT_SVN_INDEX; - print STDERR "Found branch parent: ($GIT_SVN) $parent\n"; - command_noisy('read-tree', $parent); - unless (libsvn_can_do_switch()) { - return _libsvn_new_tree($paths, $rev, $author, $date, - $msg, [$parent]); - } - # do_switch works with svn/trunk >= r22312, but that is not - # included with SVN 1.4.2 (the latest version at the moment), - # so we can't rely on it. - my $ra = libsvn_connect("$url/$branch_from"); - my $ed = SVN::Git::Fetcher->new({c => $parent, q => $_q }); - my $pool = SVN::Pool->new; - my $reporter = $ra->do_switch($rev, '', 1, $SVN->{url}, - $ed, $pool); - my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); - $reporter->set_path('', $r0, 0, @lock, $pool); - $reporter->finish_report($pool); - $pool->clear; - unless ($ed->{git_commit_ok}) { - die "SVN connection failed somewhere...\n"; - } - return libsvn_log_entry($rev, $author, $date, $msg, [$parent]); - } - print STDERR "Nope, branch point not imported or unknown\n"; - return undef; -} - -sub libsvn_get_log { - my ($ra, @args) = @_; - $args[4]-- if $args[4] && ! $_follow_parent; - if ($SVN::Core::VERSION le '1.2.0') { - splice(@args, 3, 1); - } - $ra->get_log(@args); -} - -sub libsvn_new_tree { - if (my $log_entry = libsvn_find_parent_branch(@_)) { - return $log_entry; - } - my ($paths, $rev, $author, $date, $msg) = @_; # $pool is last - _libsvn_new_tree($paths, $rev, $author, $date, $msg, []); -} - -sub _libsvn_new_tree { - my ($paths, $rev, $author, $date, $msg, $parents) = @_; - my $pool = SVN::Pool->new; - my $ed = SVN::Git::Fetcher->new({q => $_q}); - my $reporter = $SVN->do_update($rev, '', 1, $ed, $pool); - my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); - $reporter->set_path('', $rev, 1, @lock, $pool); - $reporter->finish_report($pool); - $pool->clear; - unless ($ed->{git_commit_ok}) { - die "SVN connection failed somewhere...\n"; - } - libsvn_log_entry($rev, $author, $date, $msg, $parents, $ed); -} - -sub find_graft_path_commit { - my ($tree_paths, $p1, $r1) = @_; - foreach my $x (keys %$tree_paths) { - next unless ($p1 =~ /^\Q$x\E/); - my $i = $tree_paths->{$x}; - my ($r0, $parent) = find_rev_before($r1,$i,1); - return $parent if (defined $r0 && $r0 == $r1); - print STDERR "r$r1 of $i not imported\n"; - next; - } - return undef; -} - -sub find_graft_path_parents { - my ($grafts, $tree_paths, $c, $p0, $r0) = @_; - foreach my $x (keys %$tree_paths) { - next unless ($p0 =~ /^\Q$x\E/); - my $i = $tree_paths->{$x}; - my ($r, $parent) = find_rev_before($r0, $i, 1); - if (defined $r && defined $parent && revisions_eq($x,$r,$r0)) { - my ($url_b, undef, $uuid_b) = cmt_metadata($c); - my ($url_a, undef, $uuid_a) = cmt_metadata($parent); - next if ($url_a && $url_b && $url_a eq $url_b && - $uuid_b eq $uuid_a); - $grafts->{$c}->{$parent} = 1; - } - } -} - -sub libsvn_graft_file_copies { - my ($grafts, $tree_paths, $path, $paths, $rev) = @_; - foreach (keys %$paths) { - my $i = $paths->{$_}; - my ($m, $p0, $r0) = ($i->action, $i->copyfrom_path, - $i->copyfrom_rev); - next unless (defined $p0 && defined $r0); - - my $p1 = $_; - $p1 =~ s#^/##; - $p0 =~ s#^/##; - my $c = find_graft_path_commit($tree_paths, $p1, $rev); - next unless $c; - find_graft_path_parents($grafts, $tree_paths, $c, $p0, $r0); - } -} - -sub set_index { - my $old = $ENV{GIT_INDEX_FILE}; - $ENV{GIT_INDEX_FILE} = shift; - return $old; -} - -sub restore_index { - my ($old) = @_; - if (defined $old) { - $ENV{GIT_INDEX_FILE} = $old; - } else { - delete $ENV{GIT_INDEX_FILE}; - } -} - -sub libsvn_commit_cb { - my ($rev, $date, $committer, $c, $msg, $r_last, $cmt_last) = @_; - if ($_optimize_commits && $rev == ($r_last + 1)) { - my $log = libsvn_log_entry($rev,$committer,$date,$msg); - $log->{tree} = get_tree_from_treeish($c); - my $cmt = git_commit($log, $cmt_last, $c); - my @diff = command('diff-tree', $cmt, $c); - if (@diff) { - print STDERR "Trees differ: $cmt $c\n", - join('',@diff),"\n"; - exit 1; - } - } else { - fetch("$rev=$c"); - } -} - -sub libsvn_ls_fullurl { - my $fullurl = shift; - my $ra = libsvn_connect($fullurl); - my @ret; - my $pool = SVN::Pool->new; - my $r = defined $_revision ? $_revision : $ra->get_latest_revnum; - my ($dirent, undef, undef) = $ra->get_dir('', $r, $pool); - foreach my $d (sort keys %$dirent) { - if ($dirent->{$d}->kind == $SVN::Node::dir) { - push @ret, "$d/"; # add '/' for compat with cli svn - } - } - $pool->clear; - return @ret; -} - - -sub libsvn_skip_unknown_revs { - my $err = shift; - my $errno = $err->apr_err(); - # Maybe the branch we're tracking didn't - # exist when the repo started, so it's - # not an error if it doesn't, just continue - # - # Wonderfully consistent library, eh? - # 160013 - svn:// and file:// - # 175002 - http(s):// - # 175007 - http(s):// (this repo required authorization, too...) - # More codes may be discovered later... - if ($errno == 175007 || $errno == 175002 || $errno == 160013) { - return; - } - croak "Error from SVN, ($errno): ", $err->expanded_message,"\n"; -}; - -# Tie::File seems to be prone to offset errors if revisions get sparse, -# it's not that fast, either. Tie::File is also not in Perl 5.6. So -# one of my favorite modules is out :< Next up would be one of the DBM -# modules, but I'm not sure which is most portable... So I'll just -# go with something that's plain-text, but still capable of -# being randomly accessed. So here's my ultra-simple fixed-width -# database. All records are 40 characters + "\n", so it's easy to seek -# to a revision: (41 * rev) is the byte offset. -# A record of 40 0s denotes an empty revision. -# And yes, it's still pretty fast (faster than Tie::File). -sub revdb_set { - my ($file, $rev, $commit) = @_; - length $commit == 40 or croak "arg3 must be a full SHA1 hexsum\n"; - open my $fh, '+<', $file or croak $!; - my $offset = $rev * 41; - # assume that append is the common case: - seek $fh, 0, 2 or croak $!; - my $pos = tell $fh; - if ($pos < $offset) { - print $fh (('0' x 40),"\n") x (($offset - $pos) / 41); - } - seek $fh, $offset, 0 or croak $!; - print $fh $commit,"\n"; - close $fh or croak $!; -} - -sub revdb_get { - my ($file, $rev) = @_; - my $ret; - my $offset = $rev * 41; - open my $fh, '<', $file or croak $!; - seek $fh, $offset, 0; - if (tell $fh == $offset) { - $ret = readline $fh; - if (defined $ret) { - chomp $ret; - $ret = undef if ($ret =~ /^0{40}$/); - } - } - close $fh or croak $!; - return $ret; -} - -sub copy_remote_ref { - my $origin = $_cp_remote ? $_cp_remote : 'origin'; - my $ref = "refs/remotes/$GIT_SVN"; - if (command('ls-remote', $origin, $ref)) { - command_noisy('fetch', $origin, "$ref:$ref"); - } elsif ($_cp_remote && !$_upgrade) { - die "Unable to find remote reference: ", - "refs/remotes/$GIT_SVN on $origin\n"; - } -} +package main; { my $kill_stupid_warnings = $SVN::Node::none.$SVN::Node::file. @@ -2594,27 +2247,28 @@ use strict; use warnings; use Carp qw/croak/; use IO::File qw//; -use Git qw/command command_oneline command_noisy - command_output_pipe command_input_pipe command_close_pipe/; +use Digest::MD5; # file baton members: path, mode_a, mode_b, pool, fh, blob, base sub new { my ($class, $git_svn) = @_; my $self = SVN::Delta::Editor->new; bless $self, $class; - $self->{c} = $git_svn->{c} if exists $git_svn->{c}; - $self->{q} = $git_svn->{q}; + $self->{c} = $git_svn->{last_commit} if exists $git_svn->{last_commit}; $self->{empty} = {}; $self->{dir_prop} = {}; $self->{file_prop} = {}; $self->{absent_dir} = {}; $self->{absent_file} = {}; - ($self->{gui}, $self->{ctx}) = command_input_pipe( - qw/update-index -z --index-info/); - require Digest::MD5; + $self->{gii} = $git_svn->tmp_index_do(sub { Git::IndexInfo->new }); $self; } +sub set_path_strip { + my ($self, $path) = @_; + $self->{path_strip} = qr/^\Q$path\E(\/|$)/ if length $path; +} + sub open_root { { path => '' }; } @@ -2624,16 +2278,46 @@ sub open_directory { { path => $path }; } +sub git_path { + my ($self, $path) = @_; + if ($self->{path_strip}) { + $path =~ s!$self->{path_strip}!! or + die "Failed to strip path '$path' ($self->{path_strip})\n"; + } + $path; +} + sub delete_entry { my ($self, $path, $rev, $pb) = @_; - my $t = process_rm($self->{gui}, $self->{c}, $path, $self->{q}); - $self->{empty}->{$path} = 0 if $t == $SVN::Node::dir; + + my $gpath = $self->git_path($path); + return undef if ($gpath eq ''); + + # remove entire directories. + if (command('ls-tree', $self->{c}, '--', $gpath) =~ /^040000 tree/) { + my ($ls, $ctx) = command_output_pipe(qw/ls-tree + -r --name-only -z/, + $self->{c}, '--', $gpath); + local $/ = "\0"; + while (<$ls>) { + chomp; + $self->{gii}->remove($_); + print "\tD\t$_\n" unless $::_q; + } + print "\tD\t$gpath/\n" unless $::_q; + command_close_pipe($ls, $ctx); + $self->{empty}->{$path} = 0 + } else { + $self->{gii}->remove($gpath); + print "\tD\t$gpath\n" unless $::_q; + } undef; } sub open_file { my ($self, $path, $pb, $rev) = @_; - my ($mode, $blob) = (command('ls-tree', $self->{c}, '--',$path) + my $gpath = $self->git_path($path); + my ($mode, $blob) = (command('ls-tree', $self->{c}, '--', $gpath) =~ /^(\d{6}) blob ([a-f\d]{40})\t/); unless (defined $mode && defined $blob) { die "$path was not found in commit $self->{c} (r$rev)\n"; @@ -2732,7 +2416,7 @@ sub apply_textdelta { sub close_file { my ($self, $fb, $exp) = @_; my $hash; - my $path = $fb->{path}; + my $path = $self->git_path($fb->{path}); if (my $fh = $fb->{fh}) { seek($fh, 0, 0) or croak $!; my $md5 = Digest::MD5->new; @@ -2760,65 +2444,163 @@ sub close_file { $hash = $fb->{blob} or die "no blob information\n"; } $fb->{pool}->clear; - my $gui = $self->{gui}; - print $gui "$fb->{mode_b} $hash\t$path\0" or croak $!; - print "\t$fb->{action}\t$path\n" if $fb->{action} && ! $self->{q}; + $self->{gii}->update($fb->{mode_b}, $hash, $path) or croak $!; + print "\t$fb->{action}\t$path\n" if $fb->{action} && ! $::_q; undef; } sub abort_edit { my $self = shift; - eval { command_close_pipe($self->{gui}, $self->{ctx}) }; + $self->{nr} = $self->{gii}->{nr}; + delete $self->{gii}; $self->SUPER::abort_edit(@_); } sub close_edit { my $self = shift; - command_close_pipe($self->{gui}, $self->{ctx}); $self->{git_commit_ok} = 1; + $self->{nr} = $self->{gii}->{nr}; + delete $self->{gii}; $self->SUPER::close_edit(@_); } package SVN::Git::Editor; -use vars qw/@ISA/; +use vars qw/@ISA $_rmdir $_cp_similarity $_find_copies_harder $_rename_limit/; use strict; use warnings; use Carp qw/croak/; use IO::File; -use Git qw/command command_oneline command_noisy - command_output_pipe command_input_pipe command_close_pipe/; +use Digest::MD5; sub new { - my $class = shift; - my $git_svn = shift; - my $self = SVN::Delta::Editor->new(@_); + my ($class, $opts) = @_; + foreach (qw/svn_path r ra tree_a tree_b log editor_cb/) { + die "$_ required!\n" unless (defined $opts->{$_}); + } + + my $pool = SVN::Pool->new; + my $mods = generate_diff($opts->{tree_a}, $opts->{tree_b}); + my $types = check_diff_paths($opts->{ra}, $opts->{svn_path}, + $opts->{r}, $mods); + + # $opts->{ra} functions should not be used after this: + my @ce = $opts->{ra}->get_commit_editor($opts->{log}, + $opts->{editor_cb}, $pool); + my $self = SVN::Delta::Editor->new(@ce, $pool); bless $self, $class; - foreach (qw/svn_path c r ra /) { - die "$_ required!\n" unless (defined $git_svn->{$_}); - $self->{$_} = $git_svn->{$_}; + foreach (qw/svn_path r tree_a tree_b/) { + $self->{$_} = $opts->{$_}; } - $self->{pool} = SVN::Pool->new; + $self->{url} = $opts->{ra}->{url}; + $self->{mods} = $mods; + $self->{types} = $types; + $self->{pool} = $pool; $self->{bat} = { '' => $self->open_root($self->{r}, $self->{pool}) }; $self->{rm} = { }; - require Digest::MD5; + $self->{path_prefix} = length $self->{svn_path} ? + "$self->{svn_path}/" : ''; return $self; } +sub generate_diff { + my ($tree_a, $tree_b) = @_; + my @diff_tree = qw(diff-tree -z -r); + if ($_cp_similarity) { + push @diff_tree, "-C$_cp_similarity"; + } else { + push @diff_tree, '-C'; + } + push @diff_tree, '--find-copies-harder' if $_find_copies_harder; + push @diff_tree, "-l$_rename_limit" if defined $_rename_limit; + push @diff_tree, $tree_a, $tree_b; + my ($diff_fh, $ctx) = command_output_pipe(@diff_tree); + local $/ = "\0"; + my $state = 'meta'; + my @mods; + while (<$diff_fh>) { + chomp $_; # this gets rid of the trailing "\0" + if ($state eq 'meta' && /^:(\d{6})\s(\d{6})\s + $::sha1\s($::sha1)\s + ([MTCRAD])\d*$/xo) { + push @mods, { mode_a => $1, mode_b => $2, + sha1_b => $3, chg => $4 }; + if ($4 =~ /^(?:C|R)$/) { + $state = 'file_a'; + } else { + $state = 'file_b'; + } + } elsif ($state eq 'file_a') { + my $x = $mods[$#mods] or croak "Empty array\n"; + if ($x->{chg} !~ /^(?:C|R)$/) { + croak "Error parsing $_, $x->{chg}\n"; + } + $x->{file_a} = $_; + $state = 'file_b'; + } elsif ($state eq 'file_b') { + my $x = $mods[$#mods] or croak "Empty array\n"; + if (exists $x->{file_a} && $x->{chg} !~ /^(?:C|R)$/) { + croak "Error parsing $_, $x->{chg}\n"; + } + if (!exists $x->{file_a} && $x->{chg} =~ /^(?:C|R)$/) { + croak "Error parsing $_, $x->{chg}\n"; + } + $x->{file_b} = $_; + $state = 'meta'; + } else { + croak "Error parsing $_\n"; + } + } + command_close_pipe($diff_fh, $ctx); + \@mods; +} + +sub check_diff_paths { + my ($ra, $pfx, $rev, $mods) = @_; + my %types; + $pfx .= '/' if length $pfx; + + sub type_diff_paths { + my ($ra, $types, $path, $rev) = @_; + my @p = split m#/+#, $path; + my $c = shift @p; + unless (defined $types->{$c}) { + $types->{$c} = $ra->check_path($c, $rev); + } + while (@p) { + $c .= '/' . shift @p; + next if defined $types->{$c}; + $types->{$c} = $ra->check_path($c, $rev); + } + } + + foreach my $m (@$mods) { + foreach my $f (qw/file_a file_b/) { + next unless defined $m->{$f}; + my ($dir) = ($m->{$f} =~ m#^(.*?)/?(?:[^/]+)$#); + if (length $pfx.$dir && ! defined $types{$dir}) { + type_diff_paths($ra, \%types, $pfx.$dir, $rev); + } + } + } + \%types; +} + sub split_path { return ($_[0] =~ m#^(.*?)/?([^/]+)$#); } sub repo_path { - (defined $_[1] && length $_[1]) ? $_[1] : '' + my ($self, $path) = @_; + $self->{path_prefix}.(defined $path ? $path : ''); } sub url_path { my ($self, $path) = @_; - $self->{ra}->{url} . '/' . $self->repo_path($path); + $self->{url} . '/' . $self->repo_path($path); } sub rmdirs { - my ($self, $q) = @_; + my ($self) = @_; my $rm = $self->{rm}; delete $rm->{''}; # we never delete the url we're tracking return unless %$rm; @@ -2836,8 +2618,8 @@ sub rmdirs { delete $rm->{''}; # we never delete the url we're tracking return unless %$rm; - my ($fh, $ctx) = command_output_pipe( - qw/ls-tree --name-only -r -z/, $self->{c}); + my ($fh, $ctx) = command_output_pipe(qw/ls-tree --name-only -r -z/, + $self->{tree_b}); local $/ = "\0"; while (<$fh>) { chomp; @@ -2856,7 +2638,7 @@ sub rmdirs { foreach my $d (sort { $b =~ tr#/#/# <=> $a =~ tr#/#/# } keys %$rm) { $self->close_directory($bat->{$d}, $p); my ($dn) = ($d =~ m#^(.*?)/?(?:[^/]+)$#); - print "\tD+\t$d/\n" unless $q; + print "\tD+\t$d/\n" unless $::_q; $self->SUPER::delete_entry($d, $r, $bat->{$dn}, $p); delete $bat->{$d}; } @@ -2864,9 +2646,10 @@ sub rmdirs { sub open_or_add_dir { my ($self, $full_path, $baton) = @_; - my $p = SVN::Pool->new; - my $t = $self->{ra}->check_path($full_path, $self->{r}, $p); - $p->clear; + my $t = $self->{types}->{$full_path}; + if (!defined $t) { + die "$full_path not known in r$self->{r} or we have a bug!\n"; + } if ($t == $SVN::Node::none) { return $self->add_directory($full_path, $baton, undef, -1, $self->{pool}); @@ -2883,9 +2666,9 @@ sub open_or_add_dir { sub ensure_path { my ($self, $path) = @_; my $bat = $self->{bat}; - $path = $self->repo_path($path); - return $bat->{''} unless (length $path); - my @p = split m#/+#, $path; + my $repo_path = $self->repo_path($path); + return $bat->{''} unless (length $repo_path); + my @p = split m#/+#, $repo_path; my $c = shift @p; $bat->{$c} ||= $self->open_or_add_dir($c, $bat->{''}); while (@p) { @@ -2897,23 +2680,23 @@ sub ensure_path { } sub A { - my ($self, $m, $q) = @_; + my ($self, $m) = @_; my ($dir, $file) = split_path($m->{file_b}); my $pbat = $self->ensure_path($dir); my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat, undef, -1); - print "\tA\t$m->{file_b}\n" unless $q; + print "\tA\t$m->{file_b}\n" unless $::_q; $self->chg_file($fbat, $m); $self->close_file($fbat,undef,$self->{pool}); } sub C { - my ($self, $m, $q) = @_; + my ($self, $m) = @_; my ($dir, $file) = split_path($m->{file_b}); my $pbat = $self->ensure_path($dir); my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat, $self->url_path($m->{file_a}), $self->{r}); - print "\tC\t$m->{file_a} => $m->{file_b}\n" unless $q; + print "\tC\t$m->{file_a} => $m->{file_b}\n" unless $::_q; $self->chg_file($fbat, $m); $self->close_file($fbat,undef,$self->{pool}); } @@ -2927,12 +2710,12 @@ sub delete_entry { } sub R { - my ($self, $m, $q) = @_; + my ($self, $m) = @_; my ($dir, $file) = split_path($m->{file_b}); my $pbat = $self->ensure_path($dir); my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat, $self->url_path($m->{file_a}), $self->{r}); - print "\tR\t$m->{file_a} => $m->{file_b}\n" unless $q; + print "\tR\t$m->{file_a} => $m->{file_b}\n" unless $::_q; $self->chg_file($fbat, $m); $self->close_file($fbat,undef,$self->{pool}); @@ -2942,12 +2725,12 @@ sub R { } sub M { - my ($self, $m, $q) = @_; + my ($self, $m) = @_; my ($dir, $file) = split_path($m->{file_b}); my $pbat = $self->ensure_path($dir); my $fbat = $self->open_file($self->repo_path($m->{file_b}), $pbat,$self->{r},$self->{pool}); - print "\t$m->{chg}\t$m->{file_b}\n" unless $q; + print "\t$m->{chg}\t$m->{file_b}\n" unless $::_q; $self->chg_file($fbat, $m); $self->close_file($fbat,undef,$self->{pool}); } @@ -2998,10 +2781,10 @@ sub chg_file { } sub D { - my ($self, $m, $q) = @_; + my ($self, $m) = @_; my ($dir, $file) = split_path($m->{file_b}); my $pbat = $self->ensure_path($dir); - print "\tD\t$m->{file_b}\n" unless $q; + print "\tD\t$m->{file_b}\n" unless $::_q; $self->delete_entry($m->{file_b}, $pbat); } @@ -3018,22 +2801,1109 @@ sub close_edit { sub abort_edit { my ($self) = @_; $self->SUPER::abort_edit($self->{pool}); +} + +sub DESTROY { + my $self = shift; + $self->SUPER::DESTROY(@_); $self->{pool}->clear; } +# this drives the editor +sub apply_diff { + my ($self) = @_; + my $mods = $self->{mods}; + my %o = ( D => 1, R => 0, C => -1, A => 3, M => 3, T => 3 ); + foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) { + my $f = $m->{chg}; + if (defined $o{$f}) { + $self->$f($m); + } else { + fatal("Invalid change type: $f\n"); + } + } + $self->rmdirs if $_rmdir; + if (@$mods == 0) { + $self->abort_edit; + } else { + $self->close_edit; + } + return scalar @$mods; +} + +package Git::SVN::Ra; +use vars qw/@ISA $config_dir $_log_window_size/; +use strict; +use warnings; +my ($can_do_switch); +my $RA; + +BEGIN { + # enforce temporary pool usage for some simple functions + my $e; + foreach (qw/get_latest_revnum get_uuid get_repos_root/) { + $e .= "sub $_ { + my \$self = shift; + my \$pool = SVN::Pool->new; + my \@ret = \$self->SUPER::$_(\@_,\$pool); + \$pool->clear; + wantarray ? \@ret : \$ret[0]; }\n"; + } + + # get_dir needs $pool held in cache for dirents to work, + # check_path is cacheable and rev_proplist is close enough + # for our purposes. + foreach (qw/check_path get_dir rev_proplist/) { + $e .= "my \%${_}_cache; my \$${_}_rev = 0; sub $_ { + my \$self = shift; + my \$r = pop; + my \$k = join(\"\\0\", \@_); + if (my \$x = \$${_}_cache{\$r}->{\$k}) { + return wantarray ? \@\$x : \$x->[0]; + } + my \$pool = SVN::Pool->new; + my \@ret = \$self->SUPER::$_(\@_, \$r, \$pool); + if (\$r != \$${_}_rev) { + \%${_}_cache = ( pool => [] ); + \$${_}_rev = \$r; + } + \$${_}_cache{\$r}->{\$k} = \\\@ret; + push \@{\$${_}_cache{pool}}, \$pool; + wantarray ? \@ret : \$ret[0]; }\n"; + } + $e .= "\n1;"; + eval $e or die $@; +} + +sub new { + my ($class, $url) = @_; + $url =~ s!/+$!!; + return $RA if ($RA && $RA->{url} eq $url); + $RA->{pool}->clear if $RA; + + SVN::_Core::svn_config_ensure($config_dir, undef); + my ($baton, $callbacks) = SVN::Core::auth_open_helper([ + SVN::Client::get_simple_provider(), + SVN::Client::get_ssl_server_trust_file_provider(), + SVN::Client::get_simple_prompt_provider( + \&Git::SVN::Prompt::simple, 2), + SVN::Client::get_ssl_client_cert_prompt_provider( + \&Git::SVN::Prompt::ssl_client_cert, 2), + SVN::Client::get_ssl_client_cert_pw_prompt_provider( + \&Git::SVN::Prompt::ssl_client_cert_pw, 2), + SVN::Client::get_username_provider(), + SVN::Client::get_ssl_server_trust_prompt_provider( + \&Git::SVN::Prompt::ssl_server_trust), + SVN::Client::get_username_prompt_provider( + \&Git::SVN::Prompt::username, 2), + ]); + my $config = SVN::Core::config_get_config($config_dir); + my $self = SVN::Ra->new(url => $url, auth => $baton, + config => $config, + pool => SVN::Pool->new, + auth_provider_callbacks => $callbacks); + $self->{svn_path} = $url; + $self->{repos_root} = $self->get_repos_root; + $self->{svn_path} =~ s#^\Q$self->{repos_root}\E(/|$)##; + $RA = bless $self, $class; +} + +sub DESTROY { + # do not call the real DESTROY since we store ourselves in $RA +} + +sub get_log { + my ($self, @args) = @_; + my $pool = SVN::Pool->new; + splice(@args, 3, 1) if ($SVN::Core::VERSION le '1.2.0'); + my $ret = $self->SUPER::get_log(@args, $pool); + $pool->clear; + $ret; +} + +sub get_commit_editor { + my ($self, $log, $cb, $pool) = @_; + my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : (); + $self->SUPER::get_commit_editor($log, $cb, @lock, $pool); +} + +sub gs_do_update { + my ($self, $rev_a, $rev_b, $gs, $editor) = @_; + my $new = ($rev_a == $rev_b); + my $path = $gs->{path}; + + if ($new && -e $gs->{index}) { + unlink $gs->{index} or die + "Couldn't unlink index: $gs->{index}: $!\n"; + } + my $pool = SVN::Pool->new; + $editor->set_path_strip($path); + my (@pc) = split m#/#, $path; + my $reporter = $self->do_update($rev_b, (@pc ? shift @pc : ''), + 1, $editor, $pool); + my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); + + # Since we can't rely on svn_ra_reparent being available, we'll + # just have to do some magic with set_path to make it so + # we only want a partial path. + my $sp = ''; + my $final = join('/', @pc); + while (@pc) { + $reporter->set_path($sp, $rev_b, 0, @lock, $pool); + $sp .= '/' if length $sp; + $sp .= shift @pc; + } + die "BUG: '$sp' != '$final'\n" if ($sp ne $final); + + $reporter->set_path($sp, $rev_a, $new, @lock, $pool); + + $reporter->finish_report($pool); + $pool->clear; + $editor->{git_commit_ok}; +} + +# this requires SVN 1.4.3 or later (do_switch didn't work before 1.4.3, and +# svn_ra_reparent didn't work before 1.4) +sub gs_do_switch { + my ($self, $rev_a, $rev_b, $gs, $url_b, $editor) = @_; + my $path = $gs->{path}; + my $pool = SVN::Pool->new; + + my $full_url = $self->{url}; + my $old_url = $full_url; + $full_url .= "/$path" if length $path; + my ($ra, $reparented); + if ($old_url ne $full_url) { + if ($old_url !~ m#^svn(\+ssh)?://#) { + SVN::_Ra::svn_ra_reparent($self->{session}, $full_url, + $pool); + $self->{url} = $full_url; + $reparented = 1; + } else { + $ra = Git::SVN::Ra->new($full_url); + } + } + $ra ||= $self; + my $reporter = $ra->do_switch($rev_b, '', 1, $url_b, $editor, $pool); + my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : (); + $reporter->set_path('', $rev_a, 0, @lock, $pool); + $reporter->finish_report($pool); + + if ($reparented) { + SVN::_Ra::svn_ra_reparent($self->{session}, $old_url, $pool); + $self->{url} = $old_url; + } + + $pool->clear; + $editor->{git_commit_ok}; +} + +sub gs_fetch_loop_common { + my ($self, $base, $head, $gsv, $globs) = @_; + return if ($base > $head); + my $inc = $_log_window_size; + my ($min, $max) = ($base, $head < $base + $inc ? $head : $base + $inc); + my %common; + my $common_max = scalar @$gsv; + + foreach my $gs (@$gsv) { + my @tmp = split m#/#, $gs->{path}; + my $p = ''; + foreach (@tmp) { + $p .= length($p) ? "/$_" : $_; + $common{$p} ||= 0; + $common{$p}++; + } + } + $globs ||= []; + $common_max += scalar @$globs; + foreach my $glob (@$globs) { + my @tmp = split m#/#, $glob->{path}->{left}; + my $p = ''; + foreach (@tmp) { + $p .= length($p) ? "/$_" : $_; + $common{$p} ||= 0; + $common{$p}++; + } + } + + my $longest_path = ''; + foreach (sort {length $b <=> length $a} keys %common) { + if ($common{$_} == $common_max) { + $longest_path = $_; + last; + } + } + while (1) { + my %revs; + my $err; + my $err_handler = $SVN::Error::handler; + $SVN::Error::handler = sub { + ($err) = @_; + skip_unknown_revs($err); + }; + sub _cb { + my ($paths, $r, $author, $date, $log) = @_; + [ dup_changed_paths($paths), + { author => $author, date => $date, log => $log } ]; + } + $self->get_log([$longest_path], $min, $max, 0, 1, 1, + sub { $revs{$_[1]} = _cb(@_) }); + if ($err && $max >= $head) { + print STDERR "Path '$longest_path' ", + "was probably deleted:\n", + $err->expanded_message, + "\nWill attempt to follow ", + "revisions r$min .. r$max ", + "committed before the deletion\n"; + my $hi = $max; + while (--$hi >= $min) { + my $ok; + $self->get_log([$longest_path], $min, $hi, + 0, 1, 1, sub { + $ok ||= $_[1]; + $revs{$_[1]} = _cb(@_) }); + if ($ok) { + print STDERR "r$min .. r$ok OK\n"; + last; + } + } + } + $SVN::Error::handler = $err_handler; + + my %exists = map { $_->{path} => $_ } @$gsv; + foreach my $r (sort {$a <=> $b} keys %revs) { + my ($paths, $logged) = @{$revs{$r}}; + + foreach my $gs ($self->match_globs(\%exists, $paths, + $globs, $r)) { + if ($gs->rev_db_max >= $r) { + next; + } + next unless $gs->match_paths($paths, $r); + $gs->{logged_rev_props} = $logged; + if (my $last_commit = $gs->last_commit) { + $gs->assert_index_clean($last_commit); + } + my $log_entry = $gs->do_fetch($paths, $r); + if ($log_entry) { + $gs->do_git_commit($log_entry); + } + } + foreach my $g (@$globs) { + my $k = "svn-remote.$g->{remote}." . + "$g->{t}-maxRev"; + Git::SVN::tmp_config($k, $r); + } + } + # pre-fill the .rev_db since it'll eventually get filled in + # with '0' x40 if something new gets committed + foreach my $gs (@$gsv) { + next if defined $gs->rev_db_get($max); + $gs->rev_db_set($max, 0 x40); + } + foreach my $g (@$globs) { + my $k = "svn-remote.$g->{remote}.$g->{t}-maxRev"; + Git::SVN::tmp_config($k, $max); + } + last if $max >= $head; + $min = $max + 1; + $max += $inc; + $max = $head if ($max > $head); + } +} + +sub match_globs { + my ($self, $exists, $paths, $globs, $r) = @_; + + sub get_dir_check { + my ($self, $exists, $g, $r) = @_; + my @x = eval { $self->get_dir($g->{path}->{left}, $r) }; + return unless scalar @x == 3; + my $dirents = $x[0]; + foreach my $de (keys %$dirents) { + next if $dirents->{$de}->kind != $SVN::Node::dir; + my $p = $g->{path}->full_path($de); + next if $exists->{$p}; + next if (length $g->{path}->{right} && + ($self->check_path($p, $r) != + $SVN::Node::dir)); + $exists->{$p} = Git::SVN->init($self->{url}, $p, undef, + $g->{ref}->full_path($de), 1); + } + } + foreach my $g (@$globs) { + if (my $path = $paths->{"/$g->{path}->{left}"}) { + if ($path->{action} =~ /^[AR]$/) { + get_dir_check($self, $exists, $g, $r); + } + } + foreach (keys %$paths) { + if (/$g->{path}->{left_regex}/ && + !/$g->{path}->{regex}/) { + next if $paths->{$_}->{action} !~ /^[AR]$/; + get_dir_check($self, $exists, $g, $r); + } + next unless /$g->{path}->{regex}/; + my $p = $1; + my $pathname = $g->{path}->full_path($p); + next if $exists->{$pathname}; + $exists->{$pathname} = Git::SVN->init( + $self->{url}, $pathname, undef, + $g->{ref}->full_path($p), 1); + } + my $c = ''; + foreach (split m#/#, $g->{path}->{left}) { + $c .= "/$_"; + next unless ($paths->{$c} && + ($paths->{$c}->{action} =~ /^[AR]$/)); + get_dir_check($self, $exists, $g, $r); + } + } + values %$exists; +} + +sub minimize_url { + my ($self) = @_; + return $self->{url} if ($self->{url} eq $self->{repos_root}); + my $url = $self->{repos_root}; + my @components = split(m!/!, $self->{svn_path}); + my $c = ''; + do { + $url .= "/$c" if length $c; + eval { (ref $self)->new($url)->get_latest_revnum }; + } while ($@ && ($c = shift @components)); + $url; +} + +sub can_do_switch { + my $self = shift; + unless (defined $can_do_switch) { + my $pool = SVN::Pool->new; + my $rep = eval { + $self->do_switch(1, '', 0, $self->{url}, + SVN::Delta::Editor->new, $pool); + }; + if ($@) { + $can_do_switch = 0; + } else { + $rep->abort_report($pool); + $can_do_switch = 1; + } + $pool->clear; + } + $can_do_switch; +} + +sub skip_unknown_revs { + my ($err) = @_; + my $errno = $err->apr_err(); + # Maybe the branch we're tracking didn't + # exist when the repo started, so it's + # not an error if it doesn't, just continue + # + # Wonderfully consistent library, eh? + # 160013 - svn:// and file:// + # 175002 - http(s):// + # 175007 - http(s):// (this repo required authorization, too...) + # More codes may be discovered later... + if ($errno == 175007 || $errno == 175002 || $errno == 160013) { + warn "W: Ignoring error from SVN, path probably ", + "does not exist: ($errno): ", + $err->expanded_message,"\n"; + return; + } + die "Error from SVN, ($errno): ", $err->expanded_message,"\n"; +} + +# svn_log_changed_path_t objects passed to get_log are likely to be +# overwritten even if only the refs are copied to an external variable, +# so we should dup the structures in their entirety. Using an externally +# passed pool (instead of our temporary and quickly cleared pool in +# Git::SVN::Ra) does not help matters at all... +sub dup_changed_paths { + my ($paths) = @_; + return undef unless $paths; + my %ret; + foreach my $p (keys %$paths) { + my $i = $paths->{$p}; + my %s = map { $_ => $i->$_ } + qw/copyfrom_path copyfrom_rev action/; + $ret{$p} = \%s; + } + \%ret; +} + +package Git::SVN::Log; +use strict; +use warnings; +use POSIX qw/strftime/; +use vars qw/$TZ $limit $color $pager $non_recursive $verbose $oneline + %rusers $show_commit $incremental/; +my $l_fmt; + +sub cmt_showable { + my ($c) = @_; + return 1 if defined $c->{r}; + if ($c->{l} && $c->{l}->[-1] eq "...\n" && + $c->{a_raw} =~ /\@([a-f\d\-]+)>$/) { + my @log = command(qw/cat-file commit/, $c->{c}); + shift @log while ($log[0] ne "\n"); + shift @log; + @{$c->{l}} = grep !/^git-svn-id: /, @log; + + (undef, $c->{r}, undef) = ::extract_metadata( + (grep(/^git-svn-id: /, @log))[-1]); + } + return defined $c->{r}; +} + +sub log_use_color { + return 1 if $color; + my ($dc, $dcvar); + $dcvar = 'color.diff'; + $dc = `git-config --get $dcvar`; + if ($dc eq '') { + # nothing at all; fallback to "diff.color" + $dcvar = 'diff.color'; + $dc = `git-config --get $dcvar`; + } + chomp($dc); + if ($dc eq 'auto') { + my $pc; + $pc = `git-config --get color.pager`; + if ($pc eq '') { + # does not have it -- fallback to pager.color + $pc = `git-config --bool --get pager.color`; + } + else { + $pc = `git-config --bool --get color.pager`; + if ($?) { + $pc = 'false'; + } + } + chomp($pc); + if (-t *STDOUT || (defined $pager && $pc eq 'true')) { + return ($ENV{TERM} && $ENV{TERM} ne 'dumb'); + } + return 0; + } + return 0 if $dc eq 'never'; + return 1 if $dc eq 'always'; + chomp($dc = `git-config --bool --get $dcvar`); + return ($dc eq 'true'); +} + +sub git_svn_log_cmd { + my ($r_min, $r_max, @args) = @_; + my $head = 'HEAD'; + foreach my $x (@args) { + last if $x eq '--'; + next unless ::verify_ref("$x^0"); + $head = $x; + last; + } + + my $url = (::working_head_info($head))[0]; + my $gs = Git::SVN->find_by_url($url) || Git::SVN->_new; + my @cmd = (qw/log --abbrev-commit --pretty=raw --default/, + $gs->refname); + push @cmd, '-r' unless $non_recursive; + push @cmd, qw/--raw --name-status/ if $verbose; + push @cmd, '--color' if log_use_color(); + return @cmd unless defined $r_max; + if ($r_max == $r_min) { + push @cmd, '--max-count=1'; + if (my $c = $gs->rev_db_get($r_max)) { + push @cmd, $c; + } + } else { + my ($c_min, $c_max); + $c_max = $gs->rev_db_get($r_max); + $c_min = $gs->rev_db_get($r_min); + if (defined $c_min && defined $c_max) { + if ($r_max > $r_max) { + push @cmd, "$c_min..$c_max"; + } else { + push @cmd, "$c_max..$c_min"; + } + } elsif ($r_max > $r_min) { + push @cmd, $c_max; + } else { + push @cmd, $c_min; + } + } + return @cmd; +} + +# adapted from pager.c +sub config_pager { + $pager ||= $ENV{GIT_PAGER} || $ENV{PAGER}; + if (!defined $pager) { + $pager = 'less'; + } elsif (length $pager == 0 || $pager eq 'cat') { + $pager = undef; + } +} + +sub run_pager { + return unless -t *STDOUT; + pipe my $rfd, my $wfd or return; + defined(my $pid = fork) or ::fatal "Can't fork: $!\n"; + if (!$pid) { + open STDOUT, '>&', $wfd or + ::fatal "Can't redirect to stdout: $!\n"; + return; + } + open STDIN, '<&', $rfd or ::fatal "Can't redirect stdin: $!\n"; + $ENV{LESS} ||= 'FRSX'; + exec $pager or ::fatal "Can't run pager: $! ($pager)\n"; +} + +sub tz_to_s_offset { + my ($tz) = @_; + $tz =~ s/(\d\d)$//; + return ($1 * 60) + ($tz * 3600); +} + +sub get_author_info { + my ($dest, $author, $t, $tz) = @_; + $author =~ s/(?:^\s*|\s*$)//g; + $dest->{a_raw} = $author; + my $au; + if ($::_authors) { + $au = $rusers{$author} || undef; + } + if (!$au) { + ($au) = ($author =~ /<([^>]+)\@[^>]+>$/); + } + $dest->{t} = $t; + $dest->{tz} = $tz; + $dest->{a} = $au; + # Date::Parse isn't in the standard Perl distro :( + if ($tz =~ s/^\+//) { + $t += tz_to_s_offset($tz); + } elsif ($tz =~ s/^\-//) { + $t -= tz_to_s_offset($tz); + } + $dest->{t_utc} = $t; +} + +sub process_commit { + my ($c, $r_min, $r_max, $defer) = @_; + if (defined $r_min && defined $r_max) { + if ($r_min == $c->{r} && $r_min == $r_max) { + show_commit($c); + return 0; + } + return 1 if $r_min == $r_max; + if ($r_min < $r_max) { + # we need to reverse the print order + return 0 if (defined $limit && --$limit < 0); + push @$defer, $c; + return 1; + } + if ($r_min != $r_max) { + return 1 if ($r_min < $c->{r}); + return 1 if ($r_max > $c->{r}); + } + } + return 0 if (defined $limit && --$limit < 0); + show_commit($c); + return 1; +} + +sub show_commit { + my $c = shift; + if ($oneline) { + my $x = "\n"; + if (my $l = $c->{l}) { + while ($l->[0] =~ /^\s*$/) { shift @$l } + $x = $l->[0]; + } + $l_fmt ||= 'A' . length($c->{r}); + print 'r',pack($l_fmt, $c->{r}),' | '; + print "$c->{c} | " if $show_commit; + print $x; + } else { + show_commit_normal($c); + } +} + +sub show_commit_changed_paths { + my ($c) = @_; + return unless $c->{changed}; + print "Changed paths:\n", @{$c->{changed}}; +} + +sub show_commit_normal { + my ($c) = @_; + print '-' x72, "\nr$c->{r} | "; + print "$c->{c} | " if $show_commit; + print "$c->{a} | ", strftime("%Y-%m-%d %H:%M:%S %z (%a, %d %b %Y)", + localtime($c->{t_utc})), ' | '; + my $nr_line = 0; + + if (my $l = $c->{l}) { + while ($l->[$#$l] eq "\n" && $#$l > 0 + && $l->[($#$l - 1)] eq "\n") { + pop @$l; + } + $nr_line = scalar @$l; + if (!$nr_line) { + print "1 line\n\n\n"; + } else { + if ($nr_line == 1) { + $nr_line = '1 line'; + } else { + $nr_line .= ' lines'; + } + print $nr_line, "\n"; + show_commit_changed_paths($c); + print "\n"; + print $_ foreach @$l; + } + } else { + print "1 line\n"; + show_commit_changed_paths($c); + print "\n"; + + } + foreach my $x (qw/raw stat diff/) { + if ($c->{$x}) { + print "\n"; + print $_ foreach @{$c->{$x}} + } + } +} + +sub cmd_show_log { + my (@args) = @_; + my ($r_min, $r_max); + my $r_last = -1; # prevent dupes + if (defined $TZ) { + $ENV{TZ} = $TZ; + } else { + delete $ENV{TZ}; + } + if (defined $::_revision) { + if ($::_revision =~ /^(\d+):(\d+)$/) { + ($r_min, $r_max) = ($1, $2); + } elsif ($::_revision =~ /^\d+$/) { + $r_min = $r_max = $::_revision; + } else { + ::fatal "-r$::_revision is not supported, use ", + "standard \'git log\' arguments instead\n"; + } + } + + config_pager(); + @args = (git_svn_log_cmd($r_min, $r_max, @args), @args); + my $log = command_output_pipe(@args); + run_pager(); + my (@k, $c, $d, $stat); + my $esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/; + while (<$log>) { + if (/^${esc_color}commit ($::sha1_short)/o) { + my $cmt = $1; + if ($c && cmt_showable($c) && $c->{r} != $r_last) { + $r_last = $c->{r}; + process_commit($c, $r_min, $r_max, \@k) or + goto out; + } + $d = undef; + $c = { c => $cmt }; + } elsif (/^${esc_color}author (.+) (\d+) ([\-\+]?\d+)$/o) { + get_author_info($c, $1, $2, $3); + } elsif (/^${esc_color}(?:tree|parent|committer) /o) { + # ignore + } elsif (/^${esc_color}:\d{6} \d{6} $::sha1_short/o) { + push @{$c->{raw}}, $_; + } elsif (/^${esc_color}[ACRMDT]\t/) { + # we could add $SVN->{svn_path} here, but that requires + # remote access at the moment (repo_path_split)... + s#^(${esc_color})([ACRMDT])\t#$1 $2 #o; + push @{$c->{changed}}, $_; + } elsif (/^${esc_color}diff /o) { + $d = 1; + push @{$c->{diff}}, $_; + } elsif ($d) { + push @{$c->{diff}}, $_; + } elsif (/^\ .+\ \|\s*\d+\ $esc_color[\+\-]* + $esc_color*[\+\-]*$esc_color$/x) { + $stat = 1; + push @{$c->{stat}}, $_; + } elsif ($stat && /^ \d+ files changed, \d+ insertions/) { + push @{$c->{stat}}, $_; + $stat = undef; + } elsif (/^${esc_color} (git-svn-id:.+)$/o) { + ($c->{url}, $c->{r}, undef) = ::extract_metadata($1); + } elsif (s/^${esc_color} //o) { + push @{$c->{l}}, $_; + } + } + if ($c && defined $c->{r} && $c->{r} != $r_last) { + $r_last = $c->{r}; + process_commit($c, $r_min, $r_max, \@k); + } + if (@k) { + my $swap = $r_max; + $r_max = $r_min; + $r_min = $swap; + process_commit($_, $r_min, $r_max) foreach reverse @k; + } +out: + close $log; + print '-' x72,"\n" unless $incremental || $oneline; +} + +package Git::SVN::Migration; +# these version numbers do NOT correspond to actual version numbers +# of git nor git-svn. They are just relative. +# +# v0 layout: .git/$id/info/url, refs/heads/$id-HEAD +# +# v1 layout: .git/$id/info/url, refs/remotes/$id +# +# v2 layout: .git/svn/$id/info/url, refs/remotes/$id +# +# v3 layout: .git/svn/$id, refs/remotes/$id +# - info/url may remain for backwards compatibility +# - this is what we migrate up to this layout automatically, +# - this will be used by git svn init on single branches +# v3.1 layout (auto migrated): +# - .rev_db => .rev_db.$UUID, .rev_db will remain as a symlink +# for backwards compatibility +# +# v4 layout: .git/svn/$repo_id/$id, refs/remotes/$repo_id/$id +# - this is only created for newly multi-init-ed +# repositories. Similar in spirit to the +# --use-separate-remotes option in git-clone (now default) +# - we do not automatically migrate to this (following +# the example set by core git) +use strict; +use warnings; +use Carp qw/croak/; +use File::Path qw/mkpath/; +use File::Basename qw/dirname basename/; +use vars qw/$_minimize/; + +sub migrate_from_v0 { + my $git_dir = $ENV{GIT_DIR}; + return undef unless -d $git_dir; + my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/); + my $migrated = 0; + while (<$fh>) { + chomp; + my ($id, $orig_ref) = ($_, $_); + next unless $id =~ s#^refs/heads/(.+)-HEAD$#$1#; + next unless -f "$git_dir/$id/info/url"; + my $new_ref = "refs/remotes/$id"; + if (::verify_ref("$new_ref^0")) { + print STDERR "W: $orig_ref is probably an old ", + "branch used by an ancient version of ", + "git-svn.\n", + "However, $new_ref also exists.\n", + "We will not be able ", + "to use this branch until this ", + "ambiguity is resolved.\n"; + next; + } + print STDERR "Migrating from v0 layout...\n" if !$migrated; + print STDERR "Renaming ref: $orig_ref => $new_ref\n"; + command_noisy('update-ref', $new_ref, $orig_ref); + command_noisy('update-ref', '-d', $orig_ref, $orig_ref); + $migrated++; + } + command_close_pipe($fh, $ctx); + print STDERR "Done migrating from v0 layout...\n" if $migrated; + $migrated; +} + +sub migrate_from_v1 { + my $git_dir = $ENV{GIT_DIR}; + my $migrated = 0; + return $migrated unless -d $git_dir; + my $svn_dir = "$git_dir/svn"; + + # just in case somebody used 'svn' as their $id at some point... + return $migrated if -d $svn_dir && ! -f "$svn_dir/info/url"; + + print STDERR "Migrating from a git-svn v1 layout...\n"; + mkpath([$svn_dir]); + print STDERR "Data from a previous version of git-svn exists, but\n\t", + "$svn_dir\n\t(required for this version ", + "($::VERSION) of git-svn) does not. exist\n"; + my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/); + while (<$fh>) { + my $x = $_; + next unless $x =~ s#^refs/remotes/##; + chomp $x; + next unless -f "$git_dir/$x/info/url"; + my $u = eval { ::file_to_s("$git_dir/$x/info/url") }; + next unless $u; + my $dn = dirname("$git_dir/svn/$x"); + mkpath([$dn]) unless -d $dn; + if ($x eq 'svn') { # they used 'svn' as GIT_SVN_ID: + mkpath(["$git_dir/svn/svn"]); + print STDERR " - $git_dir/$x/info => ", + "$git_dir/svn/$x/info\n"; + rename "$git_dir/$x/info", "$git_dir/svn/$x/info" or + croak "$!: $x"; + # don't worry too much about these, they probably + # don't exist with repos this old (save for index, + # and we can easily regenerate that) + foreach my $f (qw/unhandled.log index .rev_db/) { + rename "$git_dir/$x/$f", "$git_dir/svn/$x/$f"; + } + } else { + print STDERR " - $git_dir/$x => $git_dir/svn/$x\n"; + rename "$git_dir/$x", "$git_dir/svn/$x" or + croak "$!: $x"; + } + $migrated++; + } + command_close_pipe($fh, $ctx); + print STDERR "Done migrating from a git-svn v1 layout\n"; + $migrated; +} + +sub read_old_urls { + my ($l_map, $pfx, $path) = @_; + my @dir; + foreach (<$path/*>) { + if (-r "$_/info/url") { + $pfx .= '/' if $pfx && $pfx !~ m!/$!; + my $ref_id = $pfx . basename $_; + my $url = ::file_to_s("$_/info/url"); + $l_map->{$ref_id} = $url; + } elsif (-d $_) { + push @dir, $_; + } + } + foreach (@dir) { + my $x = $_; + $x =~ s!^\Q$ENV{GIT_DIR}\E/svn/!!o; + read_old_urls($l_map, $x, $_); + } +} + +sub migrate_from_v2 { + my @cfg = command(qw/config -l/); + return if grep /^svn-remote\..+\.url=/, @cfg; + my %l_map; + read_old_urls(\%l_map, '', "$ENV{GIT_DIR}/svn"); + my $migrated = 0; + + foreach my $ref_id (sort keys %l_map) { + eval { Git::SVN->init($l_map{$ref_id}, '', undef, $ref_id) }; + if ($@) { + Git::SVN->init($l_map{$ref_id}, '', $ref_id, $ref_id); + } + $migrated++; + } + $migrated; +} + +sub minimize_connections { + my $r = Git::SVN::read_all_remotes(); + my $new_urls = {}; + my $root_repos = {}; + foreach my $repo_id (keys %$r) { + my $url = $r->{$repo_id}->{url} or next; + my $fetch = $r->{$repo_id}->{fetch} or next; + my $ra = Git::SVN::Ra->new($url); + + # skip existing cases where we already connect to the root + if (($ra->{url} eq $ra->{repos_root}) || + (Git::SVN::sanitize_remote_name($ra->{repos_root}) eq + $repo_id)) { + $root_repos->{$ra->{url}} = $repo_id; + next; + } + + my $root_ra = Git::SVN::Ra->new($ra->{repos_root}); + my $root_path = $ra->{url}; + $root_path =~ s#^\Q$ra->{repos_root}\E(/|$)##; + foreach my $path (keys %$fetch) { + my $ref_id = $fetch->{$path}; + my $gs = Git::SVN->new($ref_id, $repo_id, $path); + + # make sure we can read when connecting to + # a higher level of a repository + my ($last_rev, undef) = $gs->last_rev_commit; + if (!defined $last_rev) { + $last_rev = eval { + $root_ra->get_latest_revnum; + }; + next if $@; + } + my $new = $root_path; + $new .= length $path ? "/$path" : ''; + eval { + $root_ra->get_log([$new], $last_rev, $last_rev, + 0, 0, 1, sub { }); + }; + next if $@; + $new_urls->{$ra->{repos_root}}->{$new} = + { ref_id => $ref_id, + old_repo_id => $repo_id, + old_path => $path }; + } + } + + my @emptied; + foreach my $url (keys %$new_urls) { + # see if we can re-use an existing [svn-remote "repo_id"] + # instead of creating a(n ugly) new section: + my $repo_id = $root_repos->{$url} || + Git::SVN::sanitize_remote_name($url); + + my $fetch = $new_urls->{$url}; + foreach my $path (keys %$fetch) { + my $x = $fetch->{$path}; + Git::SVN->init($url, $path, $repo_id, $x->{ref_id}); + my $pfx = "svn-remote.$x->{old_repo_id}"; + + my $old_fetch = quotemeta("$x->{old_path}:". + "refs/remotes/$x->{ref_id}"); + command_noisy(qw/config --unset/, + "$pfx.fetch", '^'. $old_fetch . '$'); + delete $r->{$x->{old_repo_id}}-> + {fetch}->{$x->{old_path}}; + if (!keys %{$r->{$x->{old_repo_id}}->{fetch}}) { + command_noisy(qw/config --unset/, + "$pfx.url"); + push @emptied, $x->{old_repo_id} + } + } + } + if (@emptied) { + my $file = $ENV{GIT_CONFIG} || $ENV{GIT_CONFIG_LOCAL} || + "$ENV{GIT_DIR}/config"; + print STDERR <<EOF; +The following [svn-remote] sections in your config file ($file) are empty +and can be safely removed: +EOF + print STDERR "[svn-remote \"$_\"]\n" foreach @emptied; + } +} + +sub migration_check { + migrate_from_v0(); + migrate_from_v1(); + migrate_from_v2(); + minimize_connections() if $_minimize; +} + +package Git::IndexInfo; +use strict; +use warnings; +use Git qw/command_input_pipe command_close_pipe/; + +sub new { + my ($class) = @_; + my ($gui, $ctx) = command_input_pipe(qw/update-index -z --index-info/); + bless { gui => $gui, ctx => $ctx, nr => 0}, $class; +} + +sub remove { + my ($self, $path) = @_; + if (print { $self->{gui} } '0 ', 0 x 40, "\t", $path, "\0") { + return ++$self->{nr}; + } + undef; +} + +sub update { + my ($self, $mode, $hash, $path) = @_; + if (print { $self->{gui} } $mode, ' ', $hash, "\t", $path, "\0") { + return ++$self->{nr}; + } + undef; +} + +sub DESTROY { + my ($self) = @_; + command_close_pipe($self->{gui}, $self->{ctx}); +} + +package Git::SVN::GlobSpec; +use strict; +use warnings; + +sub new { + my ($class, $glob) = @_; + my $re = $glob; + $re =~ s!/+$!!g; # no need for trailing slashes + my $nr = ($re =~ s!^(.*)\*(.*)$!\(\[^/\]+\)!g); + my ($left, $right) = ($1, $2); + if ($nr > 1) { + die "Only one '*' wildcard expansion ", + "is supported (got $nr): '$glob'\n"; + } elsif ($nr == 0) { + die "One '*' is needed for glob: '$glob'\n"; + } + $re = quotemeta($left) . $re . quotemeta($right); + if (length $left && !($left =~ s!/+$!!g)) { + die "Missing trailing '/' on left side of: '$glob' ($left)\n"; + } + if (length $right && !($right =~ s!^/+!!g)) { + die "Missing leading '/' on right side of: '$glob' ($right)\n"; + } + my $left_re = qr/^\/\Q$left\E(\/|$)/; + bless { left => $left, right => $right, left_regex => $left_re, + regex => qr/$re/, glob => $glob }, $class; +} + +sub full_path { + my ($self, $path) = @_; + return (length $self->{left} ? "$self->{left}/" : '') . + $path . (length $self->{right} ? "/$self->{right}" : ''); +} + __END__ Data structures: -$log_msg hashref as returned by libsvn_log_entry() + +$remotes = { # returned by read_all_remotes() + 'svn' => { + # svn-remote.svn.url=https://svn.musicpd.org + url => 'https://svn.musicpd.org', + # svn-remote.svn.fetch=mpd/trunk:trunk + fetch => { + 'mpd/trunk' => 'trunk', + }, + # svn-remote.svn.tags=mpd/tags/*:tags/* + tags => { + path => { + left => 'mpd/tags', + right => '', + regex => qr!mpd/tags/([^/]+)$!, + glob => 'tags/*', + }, + ref => { + left => 'tags', + right => '', + regex => qr!tags/([^/]+)$!, + glob => 'tags/*', + }, + } + } +}; + +$log_entry hashref as returned by libsvn_log_entry() { - msg => 'whitespace-formatted log entry + log => 'whitespace-formatted log entry ', # trailing newline is preserved revision => '8', # integer date => '2004-02-24T17:01:44.108345Z', # commit date author => 'committer name' }; + +# this is generated by generate_diff(); @mods = array of diff-index line hashes, each element represents one line of diff-index output @@ -48,7 +48,7 @@ static int handle_options(const char*** argv, int* argc) /* * Check remaining flags. */ - if (!strncmp(cmd, "--exec-path", 11)) { + if (!prefixcmp(cmd, "--exec-path")) { cmd += 11; if (*cmd == '=') git_set_exec_path(cmd + 1); @@ -66,7 +66,7 @@ static int handle_options(const char*** argv, int* argc) setenv(GIT_DIR_ENVIRONMENT, (*argv)[1], 1); (*argv)++; (*argc)--; - } else if (!strncmp(cmd, "--git-dir=", 10)) { + } else if (!prefixcmp(cmd, "--git-dir=")) { setenv(GIT_DIR_ENVIRONMENT, cmd + 10, 1); } else if (!strcmp(cmd, "--bare")) { static char git_dir[PATH_MAX+1]; @@ -88,7 +88,7 @@ static char *alias_string; static int git_alias_config(const char *var, const char *value) { - if (!strncmp(var, "alias.", 6) && !strcmp(var + 6, alias_command)) { + if (!prefixcmp(var, "alias.") && !strcmp(var + 6, alias_command)) { alias_string = xstrdup(value); } return 0; @@ -237,10 +237,9 @@ static void handle_internal_command(int argc, const char **argv, char **envp) { "config", cmd_config }, { "count-objects", cmd_count_objects, RUN_SETUP }, { "describe", cmd_describe, RUN_SETUP }, - { "diff", cmd_diff, RUN_SETUP | USE_PAGER }, - { "diff-files", cmd_diff_files, RUN_SETUP }, + { "diff", cmd_diff, USE_PAGER }, + { "diff-files", cmd_diff_files }, { "diff-index", cmd_diff_index, RUN_SETUP }, - { "diff-stages", cmd_diff_stages, RUN_SETUP }, { "diff-tree", cmd_diff_tree, RUN_SETUP }, { "fmt-merge-msg", cmd_fmt_merge_msg, RUN_SETUP }, { "for-each-ref", cmd_for_each_ref, RUN_SETUP }, @@ -248,7 +247,7 @@ static void handle_internal_command(int argc, const char **argv, char **envp) { "fsck", cmd_fsck, RUN_SETUP }, { "fsck-objects", cmd_fsck, RUN_SETUP }, { "get-tar-commit-id", cmd_get_tar_commit_id }, - { "grep", cmd_grep, RUN_SETUP }, + { "grep", cmd_grep, RUN_SETUP | USE_PAGER }, { "help", cmd_help }, { "init", cmd_init_db }, { "init-db", cmd_init_db }, @@ -257,6 +256,7 @@ static void handle_internal_command(int argc, const char **argv, char **envp) { "ls-tree", cmd_ls_tree, RUN_SETUP }, { "mailinfo", cmd_mailinfo }, { "mailsplit", cmd_mailsplit }, + { "merge-base", cmd_merge_base, RUN_SETUP }, { "merge-file", cmd_merge_file }, { "mv", cmd_mv, RUN_SETUP | NOT_BARE }, { "name-rev", cmd_name_rev, RUN_SETUP }, @@ -348,7 +348,7 @@ int main(int argc, const char **argv, char **envp) * So we just directly call the internal command handler, and * die if that one cannot handle it. */ - if (!strncmp(cmd, "git-", 4)) { + if (!prefixcmp(cmd, "git-")) { cmd += 4; argv[0] = cmd; handle_internal_command(argc, argv, envp); @@ -360,7 +360,7 @@ int main(int argc, const char **argv, char **envp) argc--; handle_options(&argv, &argc); if (argc > 0) { - if (!strncmp(argv[0], "--", 2)) + if (!prefixcmp(argv[0], "--")) argv[0] += 2; } else { /* Default command: "help" */ diff --git a/hash-object.c b/hash-object.c index 5f89e64c13..18f5017f51 100644 --- a/hash-object.c +++ b/hash-object.c @@ -7,7 +7,7 @@ #include "cache.h" #include "blob.h" -static void hash_object(const char *path, const char *type, int write_object) +static void hash_object(const char *path, enum object_type type, int write_object) { int fd; struct stat st; @@ -15,7 +15,7 @@ static void hash_object(const char *path, const char *type, int write_object) fd = open(path, O_RDONLY); if (fd < 0 || fstat(fd, &st) < 0 || - index_fd(sha1, fd, &st, write_object, type)) + index_fd(sha1, fd, &st, write_object, type, path)) die(write_object ? "Unable to add %s to database" : "Unable to hash %s", path); @@ -73,7 +73,7 @@ int main(int argc, char **argv) if (0 <= prefix_length) arg = prefix_filename(prefix, prefix_length, arg); - hash_object(arg, type, write_object); + hash_object(arg, type_from_string(type), write_object); no_more_flags = 1; } } @@ -130,7 +130,7 @@ static void list_commands(const char *exec_path, const char *pattern) struct stat st; int entlen; - if (strncmp(de->d_name, "git-", 4)) + if (prefixcmp(de->d_name, "git-")) continue; strcpy(path+dirlen, de->d_name); if (stat(path, &st) || /* stat, not lstat */ @@ -179,7 +179,7 @@ static void show_man_page(const char *git_cmd) { const char *page; - if (!strncmp(git_cmd, "git", 3)) + if (!prefixcmp(git_cmd, "git")) page = git_cmd; else { int page_len = strlen(git_cmd) + 4; diff --git a/http-fetch.c b/http-fetch.c index 9f790a08e5..e6cd11db73 100644 --- a/http-fetch.c +++ b/http-fetch.c @@ -717,8 +717,8 @@ static int fetch_indices(struct alt_base *repo) case 'P': i++; if (i + 52 <= buffer.posn && - !strncmp(data + i, " pack-", 6) && - !strncmp(data + i + 46, ".pack\n", 6)) { + !prefixcmp(data + i, " pack-") && + !prefixcmp(data + i + 46, ".pack\n")) { get_sha1_hex(data + i + 6, sha1); setup_index(repo, sha1); i += 51; diff --git a/http-push.c b/http-push.c index cec7bf7fad..68b78b538a 100644 --- a/http-push.c +++ b/http-push.c @@ -479,7 +479,7 @@ static void start_put(struct transfer_request *request) char *hex = sha1_to_hex(request->obj->sha1); struct active_request_slot *slot; char *posn; - char type[20]; + enum object_type type; char hdr[50]; void *unpacked; unsigned long len; @@ -487,8 +487,8 @@ static void start_put(struct transfer_request *request) ssize_t size; z_stream stream; - unpacked = read_sha1_file(request->obj->sha1, type, &len); - hdrlen = sprintf(hdr, "%s %lu", type, len) + 1; + unpacked = read_sha1_file(request->obj->sha1, &type, &len); + hdrlen = sprintf(hdr, "%s %lu", typename(type), len) + 1; /* Set it up */ memset(&stream, 0, sizeof(stream)); @@ -1060,8 +1060,8 @@ static int fetch_indices(void) case 'P': i++; if (i + 52 < buffer.posn && - !strncmp(data + i, " pack-", 6) && - !strncmp(data + i + 46, ".pack\n", 6)) { + !prefixcmp(data + i, " pack-") && + !prefixcmp(data + i + 46, ".pack\n")) { get_sha1_hex(data + i + 6, sha1); setup_index(sha1); i += 51; @@ -1206,11 +1206,11 @@ static void handle_new_lock_ctx(struct xml_ctx *ctx, int tag_closed) lock->owner = xmalloc(strlen(ctx->cdata) + 1); strcpy(lock->owner, ctx->cdata); } else if (!strcmp(ctx->name, DAV_ACTIVELOCK_TIMEOUT)) { - if (!strncmp(ctx->cdata, "Second-", 7)) + if (!prefixcmp(ctx->cdata, "Second-")) lock->timeout = strtol(ctx->cdata + 7, NULL, 10); } else if (!strcmp(ctx->name, DAV_ACTIVELOCK_TOKEN)) { - if (!strncmp(ctx->cdata, "opaquelocktoken:", 16)) { + if (!prefixcmp(ctx->cdata, "opaquelocktoken:")) { lock->token = xmalloc(strlen(ctx->cdata) - 15); strcpy(lock->token, ctx->cdata + 16); } @@ -2168,7 +2168,7 @@ static void fetch_symref(const char *path, char **symref, unsigned char *sha1) return; /* If it's a symref, set the refname; otherwise try for a sha1 */ - if (!strncmp((char *)buffer.buffer, "ref: ", 5)) { + if (!prefixcmp((char *)buffer.buffer, "ref: ")) { *symref = xmalloc(buffer.posn - 5); strlcpy(*symref, (char *)buffer.buffer + 5, buffer.posn - 5); } else { diff --git a/imap-send.c b/imap-send.c index 3eaf025720..84df2fabb7 100644 --- a/imap-send.c +++ b/imap-send.c @@ -1192,7 +1192,7 @@ count_messages( msg_data_t *msg ) char *p = msg->data; while (1) { - if (!strncmp( "From ", p, 5 )) { + if (!prefixcmp(p, "From ")) { count++; p += 5; } @@ -1216,7 +1216,7 @@ split_msg( msg_data_t *all_msgs, msg_data_t *msg, int *ofs ) data = &all_msgs->data[ *ofs ]; msg->len = all_msgs->len - *ofs; - if (msg->len < 5 || strncmp( data, "From ", 5 )) + if (msg->len < 5 || prefixcmp(data, "From ")) return 0; p = strchr( data, '\n' ); @@ -1267,12 +1267,12 @@ git_imap_config(const char *key, const char *val) imap_folder = xstrdup( val ); } else if (!strcmp( "host", key )) { { - if (!strncmp( "imap:", val, 5 )) + if (!prefixcmp(val, "imap:")) val += 5; if (!server.port) server.port = 143; } - if (!strncmp( "//", val, 2 )) + if (!prefixcmp(val, "//")) val += 2; server.host = xstrdup( val ); } diff --git a/index-pack.c b/index-pack.c index f9177442af..9f8f0cad20 100644 --- a/index-pack.c +++ b/index-pack.c @@ -601,30 +601,23 @@ static void fix_unresolved_deltas(int nr_unresolved) struct delta_entry *d = sorted_by_pos[i]; void *data; unsigned long size; - char type[10]; - enum object_type obj_type; + enum object_type type; int j, first, last; if (objects[d->obj_no].real_type != OBJ_REF_DELTA) continue; - data = read_sha1_file(d->base.sha1, type, &size); + data = read_sha1_file(d->base.sha1, &type, &size); if (!data) continue; - if (!strcmp(type, blob_type)) obj_type = OBJ_BLOB; - else if (!strcmp(type, tree_type)) obj_type = OBJ_TREE; - else if (!strcmp(type, commit_type)) obj_type = OBJ_COMMIT; - else if (!strcmp(type, tag_type)) obj_type = OBJ_TAG; - else die("base object %s is of type '%s'", - sha1_to_hex(d->base.sha1), type); find_delta_children(&d->base, &first, &last); for (j = first; j <= last; j++) { struct object_entry *child = objects + deltas[j].obj_no; if (child->real_type == OBJ_REF_DELTA) - resolve_delta(child, data, size, obj_type); + resolve_delta(child, data, size, type); } - append_obj_to_pack(data, size, obj_type); + append_obj_to_pack(data, size, type); free(data); if (verbose) percent = display_progress(nr_resolved_deltas, @@ -855,9 +848,9 @@ int main(int argc, char **argv) fix_thin_pack = 1; } else if (!strcmp(arg, "--keep")) { keep_msg = ""; - } else if (!strncmp(arg, "--keep=", 7)) { + } else if (!prefixcmp(arg, "--keep=")) { keep_msg = arg + 7; - } else if (!strncmp(arg, "--pack_header=", 14)) { + } else if (!prefixcmp(arg, "--pack_header=")) { struct pack_header *hdr; char *c; diff --git a/merge-file.c b/merge-file.c index 69dc1ebbf7..748d15c0e0 100644 --- a/merge-file.c +++ b/merge-file.c @@ -7,12 +7,12 @@ static int fill_mmfile_blob(mmfile_t *f, struct blob *obj) { void *buf; unsigned long size; - char type[20]; + enum object_type type; - buf = read_sha1_file(obj->object.sha1, type, &size); + buf = read_sha1_file(obj->object.sha1, &type, &size); if (!buf) return -1; - if (strcmp(type, blob_type)) + if (type != OBJ_BLOB) return -1; f->ptr = buf; f->size = size; @@ -86,12 +86,12 @@ void *merge_file(struct blob *base, struct blob *our, struct blob *their, unsign * modified in the other branch! */ if (!our || !their) { - char type[20]; + enum object_type type; if (base) return NULL; if (!our) our = their; - return read_sha1_file(our->object.sha1, type, size); + return read_sha1_file(our->object.sha1, &type, size); } if (fill_mmfile_blob(&f1, our) < 0) diff --git a/merge-recursive.c b/merge-recursive.c index 397a7ad85b..87cd8709bb 100644 --- a/merge-recursive.c +++ b/merge-recursive.c @@ -560,14 +560,14 @@ static void update_file_flags(const unsigned char *sha, update_wd = 0; if (update_wd) { - char type[20]; + enum object_type type; void *buf; unsigned long size; - buf = read_sha1_file(sha, type, &size); + buf = read_sha1_file(sha, &type, &size); if (!buf) die("cannot read object %s '%s'", sha1_to_hex(sha), path); - if (strcmp(type, blob_type) != 0) + if (type != OBJ_BLOB) die("blob expected for %s '%s'", sha1_to_hex(sha), path); if (S_ISREG(mode)) { @@ -620,7 +620,7 @@ struct merge_file_info static void fill_mm(const unsigned char *sha1, mmfile_t *mm) { unsigned long size; - char type[20]; + enum object_type type; if (!hashcmp(sha1, null_sha1)) { mm->ptr = xstrdup(""); @@ -628,8 +628,8 @@ static void fill_mm(const unsigned char *sha1, mmfile_t *mm) return; } - mm->ptr = read_sha1_file(sha1, type, &size); - if (!mm->ptr || strcmp(type, blob_type)) + mm->ptr = read_sha1_file(sha1, &type, &size); + if (!mm->ptr || type != OBJ_BLOB) die("unable to read blob object %s", sha1_to_hex(sha1)); mm->size = size; } @@ -1213,7 +1213,7 @@ static int merge(struct commit *h1, tree->object.parsed = 1; tree->object.type = OBJ_TREE; - pretend_sha1_file(NULL, 0, tree_type, tree->object.sha1); + pretend_sha1_file(NULL, 0, OBJ_TREE, tree->object.sha1); merged_common_ancestors = make_virtual_commit(tree, "ancestor"); } diff --git a/merge-tree.c b/merge-tree.c index 692ede0e3d..b2867ba722 100644 --- a/merge-tree.c +++ b/merge-tree.c @@ -57,11 +57,11 @@ extern void *merge_file(struct blob *, struct blob *, struct blob *, unsigned lo static void *result(struct merge_list *entry, unsigned long *size) { - char type[20]; + enum object_type type; struct blob *base, *our, *their; if (!entry->stage) - return read_sha1_file(entry->blob->object.sha1, type, size); + return read_sha1_file(entry->blob->object.sha1, &type, size); base = NULL; if (entry->stage == 1) { base = entry->blob; @@ -80,10 +80,10 @@ static void *result(struct merge_list *entry, unsigned long *size) static void *origin(struct merge_list *entry, unsigned long *size) { - char type[20]; + enum object_type type; while (entry) { if (entry->stage == 2) - return read_sha1_file(entry->blob->object.sha1, type, size); + return read_sha1_file(entry->blob->object.sha1, &type, size); entry = entry->link; } return NULL; @@ -27,13 +27,13 @@ static int verify_object(unsigned char *sha1, const char *expected_type) { int ret = -1; - char type[100]; + enum object_type type; unsigned long size; - void *buffer = read_sha1_file(sha1, type, &size); + void *buffer = read_sha1_file(sha1, &type, &size); if (buffer) { - if (!strcmp(type, expected_type)) - ret = check_sha1_signature(sha1, buffer, size, type); + if (type == type_from_string(expected_type)) + ret = check_sha1_signature(sha1, buffer, size, expected_type); free(buffer); } return ret; @@ -95,7 +95,7 @@ int main(int ac, char **av) int len; char *ptr, *ntr; unsigned mode; - char type[20]; + enum object_type type; char *path; read_line(&sb, stdin, line_termination); @@ -115,11 +115,12 @@ int main(int ac, char **av) ntr[41] != '\t' || get_sha1_hex(ntr + 1, sha1)) die("input format error: %s", sb.buf); - if (sha1_object_info(sha1, type, NULL)) + type = sha1_object_info(sha1, NULL); + if (type < 0) die("object %s unavailable", sha1_to_hex(sha1)); *ntr++ = 0; /* now at the beginning of SHA1 */ - if (strcmp(ptr, type)) - die("object type %s mismatch (%s)", ptr, type); + if (type != type_from_string(ptr)) + die("object type %s mismatch (%s)", ptr, typename(type)); ntr += 41; /* at the beginning of name */ if (line_termination && ntr[0] == '"') path = unquote_c_style(ntr, NULL); @@ -18,11 +18,31 @@ struct object *get_indexed_object(unsigned int idx) return obj_hash[idx]; } -const char *type_names[] = { - "none", "commit", "tree", "blob", "tag", - "bad type 5", "bad type 6", "delta", "bad", +static const char *object_type_strings[] = { + NULL, /* OBJ_NONE = 0 */ + "commit", /* OBJ_COMMIT = 1 */ + "tree", /* OBJ_TREE = 2 */ + "blob", /* OBJ_BLOB = 3 */ + "tag", /* OBJ_TAG = 4 */ }; +const char *typename(unsigned int type) +{ + if (type >= ARRAY_SIZE(object_type_strings)) + return NULL; + return object_type_strings[type]; +} + +int type_from_string(const char *str) +{ + int i; + + for (i = 1; i < ARRAY_SIZE(object_type_strings); i++) + if (!strcmp(str, object_type_strings[i])) + return i; + die("invalid object type \"%s\"", str); +} + static unsigned int hash_obj(struct object *obj, unsigned int n) { unsigned int hash = *(unsigned int *)obj->sha1; @@ -100,24 +120,6 @@ void created_object(const unsigned char *sha1, struct object *obj) nr_objs++; } -struct object *lookup_object_type(const unsigned char *sha1, const char *type) -{ - if (!type) { - return lookup_unknown_object(sha1); - } else if (!strcmp(type, blob_type)) { - return &lookup_blob(sha1)->object; - } else if (!strcmp(type, tree_type)) { - return &lookup_tree(sha1)->object; - } else if (!strcmp(type, commit_type)) { - return &lookup_commit(sha1)->object; - } else if (!strcmp(type, tag_type)) { - return &lookup_tag(sha1)->object; - } else { - error("Unknown type %s", type); - return NULL; - } -} - union any_object { struct object object; struct commit commit; @@ -138,23 +140,23 @@ struct object *lookup_unknown_object(const unsigned char *sha1) return obj; } -struct object *parse_object_buffer(const unsigned char *sha1, const char *type, unsigned long size, void *buffer, int *eaten_p) +struct object *parse_object_buffer(const unsigned char *sha1, enum object_type type, unsigned long size, void *buffer, int *eaten_p) { struct object *obj; int eaten = 0; - if (!strcmp(type, blob_type)) { + if (type == OBJ_BLOB) { struct blob *blob = lookup_blob(sha1); parse_blob_buffer(blob, buffer, size); obj = &blob->object; - } else if (!strcmp(type, tree_type)) { + } else if (type == OBJ_TREE) { struct tree *tree = lookup_tree(sha1); obj = &tree->object; if (!tree->object.parsed) { parse_tree_buffer(tree, buffer, size); eaten = 1; } - } else if (!strcmp(type, commit_type)) { + } else if (type == OBJ_COMMIT) { struct commit *commit = lookup_commit(sha1); parse_commit_buffer(commit, buffer, size); if (!commit->buffer) { @@ -162,7 +164,7 @@ struct object *parse_object_buffer(const unsigned char *sha1, const char *type, eaten = 1; } obj = &commit->object; - } else if (!strcmp(type, tag_type)) { + } else if (type == OBJ_TAG) { struct tag *tag = lookup_tag(sha1); parse_tag_buffer(tag, buffer, size); obj = &tag->object; @@ -176,13 +178,13 @@ struct object *parse_object_buffer(const unsigned char *sha1, const char *type, struct object *parse_object(const unsigned char *sha1) { unsigned long size; - char type[20]; + enum object_type type; int eaten; - void *buffer = read_sha1_file(sha1, type, &size); + void *buffer = read_sha1_file(sha1, &type, &size); if (buffer) { struct object *obj; - if (check_sha1_signature(sha1, buffer, size, type) < 0) + if (check_sha1_signature(sha1, buffer, size, typename(type)) < 0) printf("sha1 mismatch %s\n", sha1_to_hex(sha1)); obj = parse_object_buffer(sha1, type, size, buffer, &eaten); @@ -36,24 +36,17 @@ struct object { }; extern int track_object_refs; -extern const char *type_names[9]; + +extern const char *typename(unsigned int type); +extern int type_from_string(const char *str); extern unsigned int get_max_object_index(void); extern struct object *get_indexed_object(unsigned int); - -static inline const char *typename(unsigned int type) -{ - return type_names[type > OBJ_BAD ? OBJ_BAD : type]; -} - extern struct object_refs *lookup_object_refs(struct object *); /** Internal only **/ struct object *lookup_object(const unsigned char *sha1); -/** Returns the object, having looked it up as being the given type. **/ -struct object *lookup_object_type(const unsigned char *sha1, const char *type); - void created_object(const unsigned char *sha1, struct object *obj); /** Returns the object, having parsed it to find out what it is. **/ @@ -63,7 +56,7 @@ struct object *parse_object(const unsigned char *sha1); * parsing it. eaten_p indicates if the object has a borrowed copy * of buffer and the caller should not free() it. */ -struct object *parse_object_buffer(const unsigned char *sha1, const char *type, unsigned long size, void *buffer, int *eaten_p); +struct object *parse_object_buffer(const unsigned char *sha1, enum object_type type, unsigned long size, void *buffer, int *eaten_p); /** Returns the object, with potentially excess memory allocated. **/ struct object *lookup_unknown_object(const unsigned char *sha1); diff --git a/pack-check.c b/pack-check.c index 08a9fd8dc0..f248ac8c7a 100644 --- a/pack-check.c +++ b/pack-check.c @@ -43,7 +43,7 @@ static int verify_packfile(struct packed_git *p, for (i = err = 0; i < nr_objects; i++) { unsigned char sha1[20]; void *data; - char type[20]; + enum object_type type; unsigned long size, offset; if (nth_packed_object_sha1(p, i, sha1)) @@ -51,13 +51,13 @@ static int verify_packfile(struct packed_git *p, offset = find_pack_entry_one(sha1, p); if (!offset) die("internal error pack-check find-pack-entry-one"); - data = unpack_entry(p, offset, type, &size); + data = unpack_entry(p, offset, &type, &size); if (!data) { err = error("cannot unpack %s from %s", sha1_to_hex(sha1), p->pack_name); continue; } - if (check_sha1_signature(sha1, data, size, type)) { + if (check_sha1_signature(sha1, data, size, typename(type))) { err = error("packed %s from %s is corrupt", sha1_to_hex(sha1), p->pack_name); free(data); @@ -82,7 +82,7 @@ static void show_pack_info(struct packed_git *p) for (i = 0; i < nr_objects; i++) { unsigned char sha1[20], base_sha1[20]; - char type[20]; + const char *type; unsigned long size; unsigned long store_size; unsigned long offset; @@ -94,9 +94,9 @@ static void show_pack_info(struct packed_git *p) if (!offset) die("internal error pack-check find-pack-entry-one"); - packed_object_info_detail(p, offset, type, &size, &store_size, - &delta_chain_length, - base_sha1); + type = packed_object_info_detail(p, offset, &size, &store_size, + &delta_chain_length, + base_sha1); printf("%s ", sha1_to_hex(sha1)); if (!delta_chain_length) printf("%-6s %lu %lu\n", type, size, offset); diff --git a/peek-remote.c b/peek-remote.c index ef3c76ce52..96bfac498b 100644 --- a/peek-remote.c +++ b/peek-remote.c @@ -35,11 +35,11 @@ int main(int argc, char **argv) char *arg = argv[i]; if (*arg == '-') { - if (!strncmp("--upload-pack=", arg, 14)) { + if (!prefixcmp(arg, "--upload-pack=")) { uploadpack = arg + 14; continue; } - if (!strncmp("--exec=", arg, 7)) { + if (!prefixcmp(arg, "--exec=")) { uploadpack = arg + 7; continue; } diff --git a/perl/Git.pm b/perl/Git.pm index f2c156cde9..b5b1cf5edc 100644 --- a/perl/Git.pm +++ b/perl/Git.pm @@ -516,6 +516,36 @@ sub config { } +=item config_boolean ( VARIABLE ) + +Retrieve the boolean configuration C<VARIABLE>. + +Must be called on a repository instance. + +This currently wraps command('config') so it is not so fast. + +=cut + +sub config_boolean { + my ($self, $var) = @_; + $self->repo_path() + or throw Error::Simple("not a repository"); + + try { + return $self->command_oneline('config', '--bool', '--get', + $var); + } catch Git::Error::Command with { + my $E = shift; + if ($E->value() == 1) { + # Key not found. + return undef; + } else { + throw $E; + } + }; +} + + =item ident ( TYPE | IDENTSTR ) =item ident_person ( TYPE | IDENTSTR | IDENTARRAY ) diff --git a/read-cache.c b/read-cache.c index 605b352396..7a104e3512 100644 --- a/read-cache.c +++ b/read-cache.c @@ -59,7 +59,7 @@ static int ce_compare_data(struct cache_entry *ce, struct stat *st) if (fd >= 0) { unsigned char sha1[20]; - if (!index_fd(sha1, fd, st, 0, NULL)) + if (!index_fd(sha1, fd, st, 0, OBJ_BLOB, ce->name)) match = hashcmp(sha1, ce->sha1); /* index_fd() closed the file descriptor already */ } @@ -72,7 +72,7 @@ static int ce_compare_link(struct cache_entry *ce, unsigned long expected_size) char *target; void *buffer; unsigned long size; - char type[10]; + enum object_type type; int len; target = xmalloc(expected_size); @@ -81,7 +81,7 @@ static int ce_compare_link(struct cache_entry *ce, unsigned long expected_size) free(target); return -1; } - buffer = read_sha1_file(ce->sha1, type, &size); + buffer = read_sha1_file(ce->sha1, &type, &size); if (!buffer) { free(target); return -1; diff --git a/receive-pack.c b/receive-pack.c index 7311c822dd..7f1dcc045c 100644 --- a/receive-pack.c +++ b/receive-pack.c @@ -109,7 +109,7 @@ static int update(struct command *cmd) struct ref_lock *lock; cmd->error_string = NULL; - if (!strncmp(name, "refs/", 5) && check_ref_format(name + 5)) { + if (!prefixcmp(name, "refs/") && check_ref_format(name + 5)) { cmd->error_string = "funny refname"; return error("refusing to create funny ref '%s' locally", name); @@ -125,7 +125,7 @@ static int update(struct command *cmd) } if (deny_non_fast_forwards && !is_null_sha1(new_sha1) && !is_null_sha1(old_sha1) && - !strncmp(name, "refs/heads/", 11)) { + !prefixcmp(name, "refs/heads/")) { struct commit *old_commit, *new_commit; struct commit_list *bases, *ent; @@ -828,8 +828,8 @@ int rename_ref(const char *oldref, const char *newref, const char *logmsg) goto rollback; } - if (!strncmp(oldref, "refs/heads/", 11) && - !strncmp(newref, "refs/heads/", 11)) { + if (!prefixcmp(oldref, "refs/heads/") && + !prefixcmp(newref, "refs/heads/")) { char oldsection[1024], newsection[1024]; snprintf(oldsection, 1024, "branch.%s", oldref + 11); @@ -894,8 +894,8 @@ static int log_ref_write(const char *ref_name, const unsigned char *old_sha1, log_file = git_path("logs/%s", ref_name); if (log_all_ref_updates && - (!strncmp(ref_name, "refs/heads/", 11) || - !strncmp(ref_name, "refs/remotes/", 13) || + (!prefixcmp(ref_name, "refs/heads/") || + !prefixcmp(ref_name, "refs/remotes/") || !strcmp(ref_name, "HEAD"))) { if (safe_create_leading_directories(log_file) < 0) return error("unable to create directory for %s", diff --git a/revision.c b/revision.c index 76499dcf38..b84c066cba 100644 --- a/revision.c +++ b/revision.c @@ -815,11 +815,11 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, const ch const char *arg = argv[i]; if (*arg == '-') { int opts; - if (!strncmp(arg, "--max-count=", 12)) { + if (!prefixcmp(arg, "--max-count=")) { revs->max_count = atoi(arg + 12); continue; } - if (!strncmp(arg, "--skip=", 7)) { + if (!prefixcmp(arg, "--skip=")) { revs->skip_count = atoi(arg + 7); continue; } @@ -834,31 +834,31 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, const ch revs->max_count = atoi(argv[++i]); continue; } - if (!strncmp(arg,"-n",2)) { + if (!prefixcmp(arg, "-n")) { revs->max_count = atoi(arg + 2); continue; } - if (!strncmp(arg, "--max-age=", 10)) { + if (!prefixcmp(arg, "--max-age=")) { revs->max_age = atoi(arg + 10); continue; } - if (!strncmp(arg, "--since=", 8)) { + if (!prefixcmp(arg, "--since=")) { revs->max_age = approxidate(arg + 8); continue; } - if (!strncmp(arg, "--after=", 8)) { + if (!prefixcmp(arg, "--after=")) { revs->max_age = approxidate(arg + 8); continue; } - if (!strncmp(arg, "--min-age=", 10)) { + if (!prefixcmp(arg, "--min-age=")) { revs->min_age = atoi(arg + 10); continue; } - if (!strncmp(arg, "--before=", 9)) { + if (!prefixcmp(arg, "--before=")) { revs->min_age = approxidate(arg + 9); continue; } - if (!strncmp(arg, "--until=", 8)) { + if (!prefixcmp(arg, "--until=")) { revs->min_age = approxidate(arg + 8); continue; } @@ -946,7 +946,7 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, const ch revs->num_ignore_packed = 0; continue; } - if (!strncmp(arg, "--unpacked=", 11)) { + if (!prefixcmp(arg, "--unpacked=")) { revs->unpacked = 1; add_ignore_packed(revs, arg+11); continue; @@ -982,7 +982,7 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, const ch revs->verbose_header = 1; continue; } - if (!strncmp(arg, "--pretty", 8)) { + if (!prefixcmp(arg, "--pretty")) { revs->verbose_header = 1; revs->commit_format = get_commit_format(arg+8); continue; @@ -1007,7 +1007,7 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, const ch revs->abbrev = DEFAULT_ABBREV; continue; } - if (!strncmp(arg, "--abbrev=", 9)) { + if (!prefixcmp(arg, "--abbrev=")) { revs->abbrev = strtoul(arg + 9, NULL, 10); if (revs->abbrev < MINIMUM_ABBREV) revs->abbrev = MINIMUM_ABBREV; @@ -1036,15 +1036,15 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, const ch /* * Grepping the commit log */ - if (!strncmp(arg, "--author=", 9)) { + if (!prefixcmp(arg, "--author=")) { add_header_grep(revs, "author", arg+9); continue; } - if (!strncmp(arg, "--committer=", 12)) { + if (!prefixcmp(arg, "--committer=")) { add_header_grep(revs, "committer", arg+12); continue; } - if (!strncmp(arg, "--grep=", 7)) { + if (!prefixcmp(arg, "--grep=")) { add_message_grep(revs, arg+7); continue; } @@ -1052,7 +1052,7 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, const ch all_match = 1; continue; } - if (!strncmp(arg, "--encoding=", 11)) { + if (!prefixcmp(arg, "--encoding=")) { arg += 11; if (strcmp(arg, "none")) git_log_output_encoding = strdup(arg); @@ -1060,6 +1060,10 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, const ch git_log_output_encoding = ""; continue; } + if (!strcmp(arg, "--reverse")) { + revs->reverse ^= 1; + continue; + } opts = diff_opt_parse(&revs->diffopt, argv+i, argc-i); if (opts > 0) { @@ -1231,9 +1235,15 @@ static struct commit *get_revision_1(struct rev_info *revs) */ if (!revs->limited) { if (revs->max_age != -1 && - (commit->date < revs->max_age)) - continue; - add_parents_to_list(revs, commit, &revs->commits); + (commit->date < revs->max_age)) { + if (revs->boundary) + commit->object.flags |= + BOUNDARY_SHOW | BOUNDARY; + else + continue; + } else + add_parents_to_list(revs, commit, + &revs->commits); } if (commit->object.flags & SHOWN) continue; @@ -1288,6 +1298,40 @@ struct commit *get_revision(struct rev_info *revs) { struct commit *c = NULL; + if (revs->reverse) { + struct commit_list *list; + + /* + * rev_info.reverse is used to note the fact that we + * want to output the list of revisions in reverse + * order. To accomplish this goal, reverse can have + * different values: + * + * 0 do nothing + * 1 reverse the list + * 2 internal use: we have already obtained and + * reversed the list, now we only need to yield + * its items. + */ + + if (revs->reverse == 1) { + revs->reverse = 0; + list = NULL; + while ((c = get_revision(revs))) + commit_list_insert(c, &list); + revs->commits = list; + revs->reverse = 2; + } + + if (!revs->commits) + return NULL; + c = revs->commits->item; + list = revs->commits->next; + free(revs->commits); + revs->commits = list; + return c; + } + if (0 < revs->skip_count) { while ((c = get_revision_1(revs)) != NULL) { if (revs->skip_count-- <= 0) @@ -1300,7 +1344,18 @@ struct commit *get_revision(struct rev_info *revs) case -1: break; case 0: - return NULL; + if (revs->boundary) { + struct commit_list *list = revs->commits; + while (list) { + list->item->object.flags |= + BOUNDARY_SHOW | BOUNDARY; + list = list->next; + } + /* all remaining commits are boundary commits */ + revs->max_count = -1; + revs->limited = 1; + } else + return NULL; default: revs->max_count--; } diff --git a/revision.h b/revision.h index d93481f68f..5fec1846f3 100644 --- a/revision.h +++ b/revision.h @@ -42,7 +42,8 @@ struct rev_info { unpacked:1, /* see also ignore_packed below */ boundary:1, left_right:1, - parents:1; + parents:1, + reverse:2; /* Diff flags */ unsigned int diff:1, diff --git a/send-pack.c b/send-pack.c index 33e69dbe18..512b660e99 100644 --- a/send-pack.c +++ b/send-pack.c @@ -379,11 +379,11 @@ int main(int argc, char **argv) char *arg = *argv; if (*arg == '-') { - if (!strncmp(arg, "--receive-pack=", 15)) { + if (!prefixcmp(arg, "--receive-pack=")) { receivepack = arg + 15; continue; } - if (!strncmp(arg, "--exec=", 7)) { + if (!prefixcmp(arg, "--exec=")) { receivepack = arg + 7; continue; } @@ -251,7 +251,7 @@ const char *setup_git_directory_gently(int *nongit_ok) offset++; cwd[len++] = '/'; cwd[len] = 0; - inside_git_dir = !strncmp(cwd + offset, ".git/", 5); + inside_git_dir = !prefixcmp(cwd + offset, ".git/"); return cwd + offset; } diff --git a/sha1_file.c b/sha1_file.c index 2c870314d5..6d0a72ed09 100644 --- a/sha1_file.c +++ b/sha1_file.c @@ -952,7 +952,7 @@ static int unpack_sha1_header(z_stream *stream, unsigned char *map, unsigned lon /* And generate the fake traditional header */ stream->total_out = 1 + snprintf(buffer, bufsiz, "%s %lu", - type_names[type], size); + typename(type), size); return 0; } @@ -983,26 +983,27 @@ static void *unpack_sha1_rest(z_stream *stream, void *buffer, unsigned long size * too permissive for what we want to check. So do an anal * object header parse by hand. */ -static int parse_sha1_header(char *hdr, char *type, unsigned long *sizep) +static int parse_sha1_header(const char *hdr, unsigned long *sizep) { + char type[10]; int i; unsigned long size; /* * The type can be at most ten bytes (including the * terminating '\0' that we add), and is followed by - * a space. + * a space. */ - i = 10; + i = 0; for (;;) { char c = *hdr++; if (c == ' ') break; - if (!--i) + type[i++] = c; + if (i >= sizeof(type)) return -1; - *type++ = c; } - *type = 0; + type[i] = 0; /* * The length must follow immediately, and be in canonical @@ -1025,17 +1026,17 @@ static int parse_sha1_header(char *hdr, char *type, unsigned long *sizep) /* * The length must be followed by a zero byte */ - return *hdr ? -1 : 0; + return *hdr ? -1 : type_from_string(type); } -void * unpack_sha1_file(void *map, unsigned long mapsize, char *type, unsigned long *size) +void * unpack_sha1_file(void *map, unsigned long mapsize, enum object_type *type, unsigned long *size) { int ret; z_stream stream; char hdr[8192]; ret = unpack_sha1_header(&stream, map, mapsize, hdr, sizeof(hdr)); - if (ret < Z_OK || parse_sha1_header(hdr, type, size) < 0) + if (ret < Z_OK || (*type = parse_sha1_header(hdr, size)) < 0) return NULL; return unpack_sha1_rest(&stream, hdr, *size); @@ -1043,12 +1044,11 @@ void * unpack_sha1_file(void *map, unsigned long mapsize, char *type, unsigned l static unsigned long get_delta_base(struct packed_git *p, struct pack_window **w_curs, - unsigned long offset, - enum object_type kind, - unsigned long delta_obj_offset, - unsigned long *base_obj_offset) + unsigned long *curpos, + enum object_type type, + unsigned long delta_obj_offset) { - unsigned char *base_info = use_pack(p, w_curs, offset, NULL); + unsigned char *base_info = use_pack(p, w_curs, *curpos, NULL); unsigned long base_offset; /* use_pack() assured us we have [base_info, base_info + 20) @@ -1057,7 +1057,7 @@ static unsigned long get_delta_base(struct packed_git *p, * that is assured. An OFS_DELTA longer than the hash size * is stupid, as then a REF_DELTA would be smaller to store. */ - if (kind == OBJ_OFS_DELTA) { + if (type == OBJ_OFS_DELTA) { unsigned used = 0; unsigned char c = base_info[used++]; base_offset = c & 127; @@ -1071,49 +1071,43 @@ static unsigned long get_delta_base(struct packed_git *p, base_offset = delta_obj_offset - base_offset; if (base_offset >= delta_obj_offset) die("delta base offset out of bound"); - offset += used; - } else if (kind == OBJ_REF_DELTA) { + *curpos += used; + } else if (type == OBJ_REF_DELTA) { /* The base entry _must_ be in the same pack */ base_offset = find_pack_entry_one(base_info, p); if (!base_offset) die("failed to find delta-pack base object %s", sha1_to_hex(base_info)); - offset += 20; + *curpos += 20; } else die("I am totally screwed"); - *base_obj_offset = base_offset; - return offset; + return base_offset; } /* forward declaration for a mutually recursive function */ static int packed_object_info(struct packed_git *p, unsigned long offset, - char *type, unsigned long *sizep); + unsigned long *sizep); static int packed_delta_info(struct packed_git *p, struct pack_window **w_curs, - unsigned long offset, - enum object_type kind, + unsigned long curpos, + enum object_type type, unsigned long obj_offset, - char *type, unsigned long *sizep) { unsigned long base_offset; - offset = get_delta_base(p, w_curs, offset, kind, - obj_offset, &base_offset); + base_offset = get_delta_base(p, w_curs, &curpos, type, obj_offset); + type = packed_object_info(p, base_offset, NULL); /* We choose to only get the type of the base object and * ignore potentially corrupt pack file that expects the delta * based on a base with a wrong size. This saves tons of * inflate() calls. */ - if (packed_object_info(p, base_offset, type, NULL)) - die("cannot get info for delta-pack base"); - if (sizep) { const unsigned char *data; unsigned char delta_head[20], *in; - unsigned long result_size; z_stream stream; int st; @@ -1123,10 +1117,10 @@ static int packed_delta_info(struct packed_git *p, inflateInit(&stream); do { - in = use_pack(p, w_curs, offset, &stream.avail_in); + in = use_pack(p, w_curs, curpos, &stream.avail_in); stream.next_in = in; st = inflate(&stream, Z_FINISH); - offset += stream.next_in - in; + curpos += stream.next_in - in; } while ((st == Z_OK || st == Z_BUF_ERROR) && stream.total_out < sizeof(delta_head)); inflateEnd(&stream); @@ -1143,21 +1137,21 @@ static int packed_delta_info(struct packed_git *p, get_delta_hdr_size(&data, delta_head+sizeof(delta_head)); /* Read the result size */ - result_size = get_delta_hdr_size(&data, delta_head+sizeof(delta_head)); - *sizep = result_size; + *sizep = get_delta_hdr_size(&data, delta_head+sizeof(delta_head)); } - return 0; + + return type; } -static unsigned long unpack_object_header(struct packed_git *p, - struct pack_window **w_curs, - unsigned long offset, - enum object_type *type, - unsigned long *sizep) +static int unpack_object_header(struct packed_git *p, + struct pack_window **w_curs, + unsigned long *curpos, + unsigned long *sizep) { unsigned char *base; unsigned int left; unsigned long used; + enum object_type type; /* use_pack() assures us we have [base, base + 20) available * as a range that we can look at at. (Its actually the hash @@ -1165,100 +1159,95 @@ static unsigned long unpack_object_header(struct packed_git *p, * the maximum deflated object size is 2^137, which is just * insane, so we know won't exceed what we have been given. */ - base = use_pack(p, w_curs, offset, &left); - used = unpack_object_header_gently(base, left, type, sizep); + base = use_pack(p, w_curs, *curpos, &left); + used = unpack_object_header_gently(base, left, &type, sizep); if (!used) die("object offset outside of pack file"); + *curpos += used; - return offset + used; + return type; } -void packed_object_info_detail(struct packed_git *p, - unsigned long offset, - char *type, - unsigned long *size, - unsigned long *store_size, - unsigned int *delta_chain_length, - unsigned char *base_sha1) +const char *packed_object_info_detail(struct packed_git *p, + unsigned long obj_offset, + unsigned long *size, + unsigned long *store_size, + unsigned int *delta_chain_length, + unsigned char *base_sha1) { struct pack_window *w_curs = NULL; - unsigned long obj_offset, val; + unsigned long curpos, dummy; unsigned char *next_sha1; - enum object_type kind; + enum object_type type; *delta_chain_length = 0; - obj_offset = offset; - offset = unpack_object_header(p, &w_curs, offset, &kind, size); + curpos = obj_offset; + type = unpack_object_header(p, &w_curs, &curpos, size); for (;;) { - switch (kind) { + switch (type) { default: die("pack %s contains unknown object type %d", - p->pack_name, kind); + p->pack_name, type); case OBJ_COMMIT: case OBJ_TREE: case OBJ_BLOB: case OBJ_TAG: - strcpy(type, type_names[kind]); *store_size = 0; /* notyet */ unuse_pack(&w_curs); - return; + return typename(type); case OBJ_OFS_DELTA: - get_delta_base(p, &w_curs, offset, kind, - obj_offset, &offset); + obj_offset = get_delta_base(p, &w_curs, &curpos, type, obj_offset); if (*delta_chain_length == 0) { - /* TODO: find base_sha1 as pointed by offset */ + /* TODO: find base_sha1 as pointed by curpos */ } break; case OBJ_REF_DELTA: - next_sha1 = use_pack(p, &w_curs, offset, NULL); + next_sha1 = use_pack(p, &w_curs, curpos, NULL); if (*delta_chain_length == 0) hashcpy(base_sha1, next_sha1); - offset = find_pack_entry_one(next_sha1, p); + obj_offset = find_pack_entry_one(next_sha1, p); break; } - obj_offset = offset; - offset = unpack_object_header(p, &w_curs, offset, &kind, &val); (*delta_chain_length)++; + curpos = obj_offset; + type = unpack_object_header(p, &w_curs, &curpos, &dummy); } } -static int packed_object_info(struct packed_git *p, unsigned long offset, - char *type, unsigned long *sizep) +static int packed_object_info(struct packed_git *p, unsigned long obj_offset, + unsigned long *sizep) { struct pack_window *w_curs = NULL; - unsigned long size, obj_offset = offset; - enum object_type kind; - int r; + unsigned long size, curpos = obj_offset; + enum object_type type; - offset = unpack_object_header(p, &w_curs, offset, &kind, &size); + type = unpack_object_header(p, &w_curs, &curpos, &size); - switch (kind) { + switch (type) { case OBJ_OFS_DELTA: case OBJ_REF_DELTA: - r = packed_delta_info(p, &w_curs, offset, kind, - obj_offset, type, sizep); - unuse_pack(&w_curs); - return r; + type = packed_delta_info(p, &w_curs, curpos, + type, obj_offset, sizep); + break; case OBJ_COMMIT: case OBJ_TREE: case OBJ_BLOB: case OBJ_TAG: - strcpy(type, type_names[kind]); - unuse_pack(&w_curs); + if (sizep) + *sizep = size; break; default: die("pack %s contains unknown object type %d", - p->pack_name, kind); + p->pack_name, type); } - if (sizep) - *sizep = size; - return 0; + unuse_pack(&w_curs); + return type; } static void *unpack_compressed_entry(struct packed_git *p, struct pack_window **w_curs, - unsigned long offset, + unsigned long curpos, unsigned long size) { int st; @@ -1273,10 +1262,10 @@ static void *unpack_compressed_entry(struct packed_git *p, inflateInit(&stream); do { - in = use_pack(p, w_curs, offset, &stream.avail_in); + in = use_pack(p, w_curs, curpos, &stream.avail_in); stream.next_in = in; st = inflate(&stream, Z_FINISH); - offset += stream.next_in - in; + curpos += stream.next_in - in; } while (st == Z_OK || st == Z_BUF_ERROR); inflateEnd(&stream); if ((st != Z_STREAM_END) || stream.total_out != size) { @@ -1289,63 +1278,57 @@ static void *unpack_compressed_entry(struct packed_git *p, static void *unpack_delta_entry(struct packed_git *p, struct pack_window **w_curs, - unsigned long offset, + unsigned long curpos, unsigned long delta_size, - enum object_type kind, unsigned long obj_offset, - char *type, + enum object_type *type, unsigned long *sizep) { void *delta_data, *result, *base; - unsigned long result_size, base_size, base_offset; + unsigned long base_size, base_offset; - offset = get_delta_base(p, w_curs, offset, kind, - obj_offset, &base_offset); + base_offset = get_delta_base(p, w_curs, &curpos, *type, obj_offset); base = unpack_entry(p, base_offset, type, &base_size); if (!base) die("failed to read delta base object at %lu from %s", base_offset, p->pack_name); - delta_data = unpack_compressed_entry(p, w_curs, offset, delta_size); + delta_data = unpack_compressed_entry(p, w_curs, curpos, delta_size); result = patch_delta(base, base_size, delta_data, delta_size, - &result_size); + sizep); if (!result) die("failed to apply delta"); free(delta_data); free(base); - *sizep = result_size; return result; } -void *unpack_entry(struct packed_git *p, unsigned long offset, - char *type, unsigned long *sizep) +void *unpack_entry(struct packed_git *p, unsigned long obj_offset, + enum object_type *type, unsigned long *sizep) { struct pack_window *w_curs = NULL; - unsigned long size, obj_offset = offset; - enum object_type kind; - void *retval; + unsigned long curpos = obj_offset; + void *data; - offset = unpack_object_header(p, &w_curs, offset, &kind, &size); - switch (kind) { + *type = unpack_object_header(p, &w_curs, &curpos, sizep); + switch (*type) { case OBJ_OFS_DELTA: case OBJ_REF_DELTA: - retval = unpack_delta_entry(p, &w_curs, offset, size, - kind, obj_offset, type, sizep); + data = unpack_delta_entry(p, &w_curs, curpos, *sizep, + obj_offset, type, sizep); break; case OBJ_COMMIT: case OBJ_TREE: case OBJ_BLOB: case OBJ_TAG: - strcpy(type, type_names[kind]); - *sizep = size; - retval = unpack_compressed_entry(p, &w_curs, offset, size); + data = unpack_compressed_entry(p, &w_curs, curpos, *sizep); break; default: - die("unknown object type %i in %s", kind, p->pack_name); + die("unknown object type %i in %s", *type, p->pack_name); } unuse_pack(&w_curs); - return retval; + return data; } int num_packed_objects(const struct packed_git *p) @@ -1452,16 +1435,16 @@ struct packed_git *find_sha1_pack(const unsigned char *sha1, return p; } return NULL; - + } -static int sha1_loose_object_info(const unsigned char *sha1, char *type, unsigned long *sizep) +static int sha1_loose_object_info(const unsigned char *sha1, unsigned long *sizep) { int status; unsigned long mapsize, size; void *map; z_stream stream; - char hdr[128]; + char hdr[32]; map = map_sha1_file(sha1, &mapsize); if (!map) @@ -1469,31 +1452,29 @@ static int sha1_loose_object_info(const unsigned char *sha1, char *type, unsigne if (unpack_sha1_header(&stream, map, mapsize, hdr, sizeof(hdr)) < 0) status = error("unable to unpack %s header", sha1_to_hex(sha1)); - if (parse_sha1_header(hdr, type, &size) < 0) + else if ((status = parse_sha1_header(hdr, &size)) < 0) status = error("unable to parse %s header", sha1_to_hex(sha1)); - else { - status = 0; - if (sizep) - *sizep = size; - } + else if (sizep) + *sizep = size; inflateEnd(&stream); munmap(map, mapsize); return status; } -int sha1_object_info(const unsigned char *sha1, char *type, unsigned long *sizep) +int sha1_object_info(const unsigned char *sha1, unsigned long *sizep) { struct pack_entry e; if (!find_pack_entry(sha1, &e, NULL)) { reprepare_packed_git(); if (!find_pack_entry(sha1, &e, NULL)) - return sha1_loose_object_info(sha1, type, sizep); + return sha1_loose_object_info(sha1, sizep); } - return packed_object_info(e.p, e.offset, type, sizep); + return packed_object_info(e.p, e.offset, sizep); } -static void *read_packed_sha1(const unsigned char *sha1, char *type, unsigned long *size) +static void *read_packed_sha1(const unsigned char *sha1, + enum object_type *type, unsigned long *size) { struct pack_entry e; @@ -1511,7 +1492,7 @@ static void *read_packed_sha1(const unsigned char *sha1, char *type, unsigned lo */ static struct cached_object { unsigned char sha1[20]; - const char *type; + enum object_type type; void *buf; unsigned long size; } *cached_objects; @@ -1529,11 +1510,12 @@ static struct cached_object *find_cached_object(const unsigned char *sha1) return NULL; } -int pretend_sha1_file(void *buf, unsigned long len, const char *type, unsigned char *sha1) +int pretend_sha1_file(void *buf, unsigned long len, enum object_type type, + unsigned char *sha1) { struct cached_object *co; - hash_sha1_file(buf, len, type, sha1); + hash_sha1_file(buf, len, typename(type), sha1); if (has_sha1_file(sha1) || find_cached_object(sha1)) return 0; if (cached_object_alloc <= cached_object_nr) { @@ -1544,14 +1526,15 @@ int pretend_sha1_file(void *buf, unsigned long len, const char *type, unsigned c } co = &cached_objects[cached_object_nr++]; co->size = len; - co->type = strdup(type); + co->type = type; co->buf = xmalloc(len); memcpy(co->buf, buf, len); hashcpy(co->sha1, sha1); return 0; } -void *read_sha1_file(const unsigned char *sha1, char *type, unsigned long *size) +void *read_sha1_file(const unsigned char *sha1, enum object_type *type, + unsigned long *size) { unsigned long mapsize; void *map, *buf; @@ -1562,7 +1545,7 @@ void *read_sha1_file(const unsigned char *sha1, char *type, unsigned long *size) buf = xmalloc(co->size + 1); memcpy(buf, co->buf, co->size); ((char*)buf)[co->size] = 0; - strcpy(type, co->type); + *type = co->type; *size = co->size; return buf; } @@ -1581,33 +1564,34 @@ void *read_sha1_file(const unsigned char *sha1, char *type, unsigned long *size) } void *read_object_with_reference(const unsigned char *sha1, - const char *required_type, + const char *required_type_name, unsigned long *size, unsigned char *actual_sha1_return) { - char type[20]; + enum object_type type, required_type; void *buffer; unsigned long isize; unsigned char actual_sha1[20]; + required_type = type_from_string(required_type_name); hashcpy(actual_sha1, sha1); while (1) { int ref_length = -1; const char *ref_type = NULL; - buffer = read_sha1_file(actual_sha1, type, &isize); + buffer = read_sha1_file(actual_sha1, &type, &isize); if (!buffer) return NULL; - if (!strcmp(type, required_type)) { + if (type == required_type) { *size = isize; if (actual_sha1_return) hashcpy(actual_sha1_return, actual_sha1); return buffer; } /* Handle references */ - else if (!strcmp(type, commit_type)) + else if (type == OBJ_COMMIT) ref_type = "tree "; - else if (!strcmp(type, tag_type)) + else if (type == OBJ_TAG) ref_type = "object "; else { free(buffer); @@ -1628,12 +1612,12 @@ void *read_object_with_reference(const unsigned char *sha1, static void write_sha1_file_prepare(void *buf, unsigned long len, const char *type, unsigned char *sha1, - unsigned char *hdr, int *hdrlen) + char *hdr, int *hdrlen) { SHA_CTX c; /* Generate the header */ - *hdrlen = sprintf((char *)hdr, "%s %lu", type, len)+1; + *hdrlen = sprintf(hdr, "%s %lu", type, len)+1; /* Sha1.. */ SHA1_Init(&c); @@ -1740,33 +1724,24 @@ static int write_binary_header(unsigned char *hdr, enum object_type type, unsign static void setup_object_header(z_stream *stream, const char *type, unsigned long len) { - int obj_type, hdr; + int obj_type, hdrlen; if (use_legacy_headers) { while (deflate(stream, 0) == Z_OK) /* nothing */; return; } - if (!strcmp(type, blob_type)) - obj_type = OBJ_BLOB; - else if (!strcmp(type, tree_type)) - obj_type = OBJ_TREE; - else if (!strcmp(type, commit_type)) - obj_type = OBJ_COMMIT; - else if (!strcmp(type, tag_type)) - obj_type = OBJ_TAG; - else - die("trying to generate bogus object of type '%s'", type); - hdr = write_binary_header(stream->next_out, obj_type, len); - stream->total_out = hdr; - stream->next_out += hdr; - stream->avail_out -= hdr; + obj_type = type_from_string(type); + hdrlen = write_binary_header(stream->next_out, obj_type, len); + stream->total_out = hdrlen; + stream->next_out += hdrlen; + stream->avail_out -= hdrlen; } int hash_sha1_file(void *buf, unsigned long len, const char *type, unsigned char *sha1) { - unsigned char hdr[50]; + char hdr[32]; int hdrlen; write_sha1_file_prepare(buf, len, type, sha1, hdr, &hdrlen); return 0; @@ -1780,7 +1755,7 @@ int write_sha1_file(void *buf, unsigned long len, const char *type, unsigned cha unsigned char sha1[20]; char *filename; static char tmpfile[PATH_MAX]; - unsigned char hdr[50]; + char hdr[32]; int fd, hdrlen; /* Normally if we have it in the pack then we do not bother writing @@ -1827,7 +1802,7 @@ int write_sha1_file(void *buf, unsigned long len, const char *type, unsigned cha stream.avail_out = size; /* First header.. */ - stream.next_in = hdr; + stream.next_in = (unsigned char *)hdr; stream.avail_in = hdrlen; setup_object_header(&stream, type, len); @@ -1858,17 +1833,17 @@ static void *repack_object(const unsigned char *sha1, unsigned long *objsize) z_stream stream; unsigned char *unpacked; unsigned long len; - char type[20]; - char hdr[50]; + enum object_type type; + char hdr[32]; int hdrlen; void *buf; /* need to unpack and recompress it by itself */ - unpacked = read_packed_sha1(sha1, type, &len); + unpacked = read_packed_sha1(sha1, &type, &len); if (!unpacked) error("cannot read sha1_file for %s", sha1_to_hex(sha1)); - hdrlen = sprintf(hdr, "%s %lu", type, len) + 1; + hdrlen = sprintf(hdr, "%s %lu", typename(type), len) + 1; /* Set it up */ memset(&stream, 0, sizeof(stream)); @@ -2078,11 +2053,12 @@ int index_pipe(unsigned char *sha1, int fd, const char *type, int write_object) return ret; } -int index_fd(unsigned char *sha1, int fd, struct stat *st, int write_object, const char *type) +int index_fd(unsigned char *sha1, int fd, struct stat *st, int write_object, + enum object_type type, const char *path) { unsigned long size = st->st_size; void *buf; - int ret; + int ret, re_allocated = 0; buf = ""; if (size) @@ -2090,12 +2066,31 @@ int index_fd(unsigned char *sha1, int fd, struct stat *st, int write_object, con close(fd); if (!type) - type = blob_type; - /* FIXME: CRLF -> LF conversion here for blobs! We'll need the path! */ + type = OBJ_BLOB; + + /* + * Convert blobs to git internal format + */ + if ((type == OBJ_BLOB) && S_ISREG(st->st_mode)) { + unsigned long nsize = size; + char *nbuf = buf; + if (convert_to_git(path, &nbuf, &nsize)) { + if (size) + munmap(buf, size); + size = nsize; + buf = nbuf; + re_allocated = 1; + } + } + if (write_object) - ret = write_sha1_file(buf, size, type, sha1); + ret = write_sha1_file(buf, size, typename(type), sha1); else - ret = hash_sha1_file(buf, size, type, sha1); + ret = hash_sha1_file(buf, size, typename(type), sha1); + if (re_allocated) { + free(buf); + return ret; + } if (size) munmap(buf, size); return ret; @@ -2112,7 +2107,7 @@ int index_path(unsigned char *sha1, const char *path, struct stat *st, int write if (fd < 0) return error("open(\"%s\"): %s", path, strerror(errno)); - if (index_fd(sha1, fd, st, write_object, NULL) < 0) + if (index_fd(sha1, fd, st, write_object, OBJ_BLOB, path) < 0) return error("%s: failed to insert into database", path); break; @@ -8,7 +8,7 @@ static int do_generic_cmd(const char *me, char *arg) if (!arg || !(arg = sq_dequote(arg))) die("bad argument"); - if (strncmp(me, "git-", 4)) + if (prefixcmp(me, "git-")) die("bad command"); my_argv[0] = me + 4; diff --git a/t/lib-git-svn.sh b/t/lib-git-svn.sh index 67d08cf740..f6fe78cd27 100644 --- a/t/lib-git-svn.sh +++ b/t/lib-git-svn.sh @@ -42,9 +42,9 @@ then exit fi +rawsvnrepo="$svnrepo" svnrepo="file://$svnrepo" - poke() { - perl -e '@x = stat($ARGV[0]); utime($x[8], $x[9] + 1, $ARGV[0])' "$1" + test-chmtime +1 "$1" } diff --git a/t/t0020-crlf.sh b/t/t0020-crlf.sh new file mode 100755 index 0000000000..723b29ad17 --- /dev/null +++ b/t/t0020-crlf.sh @@ -0,0 +1,217 @@ +#!/bin/sh + +test_description='CRLF conversion' + +. ./test-lib.sh + +append_cr () { + sed -e 's/$/Q/' | tr Q '\015' +} + +remove_cr () { + tr '\015' Q <"$1" | grep Q >/dev/null && + tr '\015' Q <"$1" | sed -ne 's/Q$//p' +} + +test_expect_success setup ' + + git repo-config core.autocrlf false && + + for w in Hello world how are you; do echo $w; done >one && + mkdir dir && + for w in I am very very fine thank you; do echo $w; done >dir/two && + git add . && + + git commit -m initial && + + one=`git rev-parse HEAD:one` && + dir=`git rev-parse HEAD:dir` && + two=`git rev-parse HEAD:dir/two` && + + for w in Some extra lines here; do echo $w; done >>one && + git diff >patch.file && + patched=`git hash-object --stdin <one` && + git read-tree --reset -u HEAD && + + echo happy. +' + +test_expect_success 'update with autocrlf=input' ' + + rm -f tmp one dir/two && + git read-tree --reset -u HEAD && + git repo-config core.autocrlf input && + + for f in one dir/two + do + append_cr <$f >tmp && mv -f tmp $f && + git update-index -- $f || { + echo Oops + false + break + } + done && + + differs=`git diff-index --cached HEAD` && + test -z "$differs" || { + echo Oops "$differs" + false + } + +' + +test_expect_success 'update with autocrlf=true' ' + + rm -f tmp one dir/two && + git read-tree --reset -u HEAD && + git repo-config core.autocrlf true && + + for f in one dir/two + do + append_cr <$f >tmp && mv -f tmp $f && + git update-index -- $f || { + echo "Oops $f" + false + break + } + done && + + differs=`git diff-index --cached HEAD` && + test -z "$differs" || { + echo Oops "$differs" + false + } + +' + +test_expect_success 'checkout with autocrlf=true' ' + + rm -f tmp one dir/two && + git repo-config core.autocrlf true && + git read-tree --reset -u HEAD && + + for f in one dir/two + do + remove_cr "$f" >tmp && mv -f tmp $f && + git update-index -- $f || { + echo "Eh? $f" + false + break + } + done && + test "$one" = `git hash-object --stdin <one` && + test "$two" = `git hash-object --stdin <dir/two` && + differs=`git diff-index --cached HEAD` && + test -z "$differs" || { + echo Oops "$differs" + false + } +' + +test_expect_success 'checkout with autocrlf=input' ' + + rm -f tmp one dir/two && + git repo-config core.autocrlf input && + git read-tree --reset -u HEAD && + + for f in one dir/two + do + if remove_cr "$f" >/dev/null + then + echo "Eh? $f" + false + break + else + git update-index -- $f + fi + done && + test "$one" = `git hash-object --stdin <one` && + test "$two" = `git hash-object --stdin <dir/two` && + differs=`git diff-index --cached HEAD` && + test -z "$differs" || { + echo Oops "$differs" + false + } +' + +test_expect_success 'apply patch (autocrlf=input)' ' + + rm -f tmp one dir/two && + git repo-config core.autocrlf input && + git read-tree --reset -u HEAD && + + git apply patch.file && + test "$patched" = "`git hash-object --stdin <one`" || { + echo "Eh? apply without index" + false + } +' + +test_expect_success 'apply patch --cached (autocrlf=input)' ' + + rm -f tmp one dir/two && + git repo-config core.autocrlf input && + git read-tree --reset -u HEAD && + + git apply --cached patch.file && + test "$patched" = `git rev-parse :one` || { + echo "Eh? apply with --cached" + false + } +' + +test_expect_success 'apply patch --index (autocrlf=input)' ' + + rm -f tmp one dir/two && + git repo-config core.autocrlf input && + git read-tree --reset -u HEAD && + + git apply --index patch.file && + test "$patched" = `git rev-parse :one` && + test "$patched" = `git hash-object --stdin <one` || { + echo "Eh? apply with --index" + false + } +' + +test_expect_success 'apply patch (autocrlf=true)' ' + + rm -f tmp one dir/two && + git repo-config core.autocrlf true && + git read-tree --reset -u HEAD && + + git apply patch.file && + test "$patched" = "`remove_cr one | git hash-object --stdin`" || { + echo "Eh? apply without index" + false + } +' + +test_expect_success 'apply patch --cached (autocrlf=true)' ' + + rm -f tmp one dir/two && + git repo-config core.autocrlf true && + git read-tree --reset -u HEAD && + + git apply --cached patch.file && + test "$patched" = `git rev-parse :one` || { + echo "Eh? apply without index" + false + } +' + +test_expect_success 'apply patch --index (autocrlf=true)' ' + + rm -f tmp one dir/two && + git repo-config core.autocrlf true && + git read-tree --reset -u HEAD && + + git apply --index patch.file && + test "$patched" = `git rev-parse :one` && + test "$patched" = "`remove_cr one | git hash-object --stdin`" || { + echo "Eh? apply with --index" + false + } +' + +test_done diff --git a/t/t1200-tutorial.sh b/t/t1200-tutorial.sh index eebe643bda..ca2c30f7af 100755 --- a/t/t1200-tutorial.sh +++ b/t/t1200-tutorial.sh @@ -101,7 +101,9 @@ echo "Play, play, play" >>hello echo "Lots of fun" >>example git commit -m 'Some fun.' -i hello example -test_expect_failure 'git resolve now fails' 'git resolve HEAD mybranch "Merge work in mybranch"' +test_expect_failure 'git resolve now fails' ' + git merge -m "Merge work in mybranch" mybranch +' cat > hello << EOF Hello World @@ -134,8 +136,8 @@ Updating from VARIABLE to VARIABLE 2 files changed, 2 insertions(+), 0 deletions(-) EOF -git resolve HEAD master "Merge upstream changes." | \ - sed -e "1s/[0-9a-f]\{40\}/VARIABLE/g" > resolve.output +git merge -s "Merge upstream changes." master | \ + sed -e "1s/[0-9a-f]\{40\}/VARIABLE/g" >resolve.output test_expect_success 'git resolve' 'cmp resolve.expect resolve.output' cat > show-branch2.expect << EOF diff --git a/t/t4016-diff-quote.sh b/t/t4016-diff-quote.sh index edde8f5568..2e7cd5f255 100755 --- a/t/t4016-diff-quote.sh +++ b/t/t4016-diff-quote.sh @@ -13,6 +13,10 @@ P1='pathname with HT' P2='pathname with SP' P3='pathname with LF' +: >"$P1" 2>&1 && test -f "$P1" && rm -f "$P1" || { + echo >&2 'Filesystem does not support tabs in names' + test_done +} test_expect_success setup ' echo P0.0 >"$P0.0" && diff --git a/t/t4119-apply-config.sh b/t/t4119-apply-config.sh new file mode 100755 index 0000000000..620a9207bf --- /dev/null +++ b/t/t4119-apply-config.sh @@ -0,0 +1,162 @@ +#!/bin/sh +# +# Copyright (c) 2007 Junio C Hamano +# + +test_description='git-apply --whitespace=strip and configuration file. + +' + +. ./test-lib.sh + +test_expect_success setup ' + mkdir sub && + echo A >sub/file1 && + cp sub/file1 saved && + git add sub/file1 && + echo "B " >sub/file1 && + git diff >patch.file +' + +# Also handcraft GNU diff output; note this has trailing whitespace. +cat >gpatch.file <<\EOF && +--- file1 2007-02-21 01:04:24.000000000 -0800 ++++ file1+ 2007-02-21 01:07:44.000000000 -0800 +@@ -1 +1 @@ +-A ++B +EOF + +sed -e 's|file1|sub/&|' gpatch.file >gpatch-sub.file && +sed -e ' + /^--- /s|file1|a/sub/&| + /^+++ /s|file1|b/sub/&| +' gpatch.file >gpatch-ab-sub.file && + +check_result () { + if grep " " "$1" + then + echo "Eh?" + false + elif grep B "$1" + then + echo Happy + else + echo "Huh?" + false + fi +} + +test_expect_success 'apply --whitespace=strip' ' + + rm -f sub/file1 && + cp saved sub/file1 && + git update-index --refresh && + + git apply --whitespace=strip patch.file && + check_result sub/file1 +' + +test_expect_success 'apply --whitespace=strip from config' ' + + rm -f sub/file1 && + cp saved sub/file1 && + git update-index --refresh && + + git config apply.whitespace strip && + git apply patch.file && + check_result sub/file1 +' + +D=`pwd` + +test_expect_success 'apply --whitespace=strip in subdir' ' + + cd "$D" && + git config --unset-all apply.whitespace + rm -f sub/file1 && + cp saved sub/file1 && + git update-index --refresh && + + cd sub && + git apply --whitespace=strip ../patch.file && + check_result file1 +' + +test_expect_success 'apply --whitespace=strip from config in subdir' ' + + cd "$D" && + git config apply.whitespace strip && + rm -f sub/file1 && + cp saved sub/file1 && + git update-index --refresh && + + cd sub && + git apply ../patch.file && + check_result file1 +' + +test_expect_success 'same in subdir but with traditional patch input' ' + + cd "$D" && + git config apply.whitespace strip && + rm -f sub/file1 && + cp saved sub/file1 && + git update-index --refresh && + + cd sub && + git apply ../gpatch.file && + check_result file1 +' + +test_expect_success 'same but with traditional patch input of depth 1' ' + + cd "$D" && + git config apply.whitespace strip && + rm -f sub/file1 && + cp saved sub/file1 && + git update-index --refresh && + + cd sub && + git apply ../gpatch-sub.file && + check_result file1 +' + +test_expect_success 'same but with traditional patch input of depth 2' ' + + cd "$D" && + git config apply.whitespace strip && + rm -f sub/file1 && + cp saved sub/file1 && + git update-index --refresh && + + cd sub && + git apply ../gpatch-ab-sub.file && + check_result file1 +' + +test_expect_success 'same but with traditional patch input of depth 1' ' + + cd "$D" && + git config apply.whitespace strip && + rm -f sub/file1 && + cp saved sub/file1 && + git update-index --refresh && + + git apply -p0 gpatch-sub.file && + check_result sub/file1 +' + +test_expect_success 'same but with traditional patch input of depth 2' ' + + cd "$D" && + git config apply.whitespace strip && + rm -f sub/file1 && + cp saved sub/file1 && + git update-index --refresh && + + git apply gpatch-ab-sub.file && + check_result sub/file1 +' + +test_done diff --git a/t/t4200-rerere.sh b/t/t4200-rerere.sh index c571a1bd74..639d45fcec 100755 --- a/t/t4200-rerere.sh +++ b/t/t4200-rerere.sh @@ -112,39 +112,26 @@ rr2=.git/rr-cache/$sha2 mkdir $rr2 echo Hello > $rr2/preimage -case "$(date -d @11111111 +%s 2>/dev/null)" in -11111111) - # 'date' must be able to take arbitrary input with @11111111 notation. - # for this test to succeed. We should fix this part using more - # portable script someday. - - now=$(date +%s) - almost_15_days_ago=$(($now+60-15*86400)) - just_over_15_days_ago=$(($now-1-15*86400)) - almost_60_days_ago=$(($now+60-60*86400)) - just_over_60_days_ago=$(($now-1-60*86400)) - predate1="$(date -d "@$almost_60_days_ago" +%Y%m%d%H%M.%S)" - predate2="$(date -d "@$almost_15_days_ago" +%Y%m%d%H%M.%S)" - postdate1="$(date -d "@$just_over_60_days_ago" +%Y%m%d%H%M.%S)" - postdate2="$(date -d "@$just_over_15_days_ago" +%Y%m%d%H%M.%S)" - - touch -m -t "$predate1" $rr/preimage - touch -m -t "$predate2" $rr2/preimage - - test_expect_success 'garbage collection (part1)' 'git rerere gc' - - test_expect_success 'young records still live' \ - "test -f $rr/preimage -a -f $rr2/preimage" - - touch -m -t "$postdate1" $rr/preimage - touch -m -t "$postdate2" $rr2/preimage - - test_expect_success 'garbage collection (part2)' 'git rerere gc' - - test_expect_success 'old records rest in peace' \ - "test ! -f $rr/preimage -a ! -f $rr2/preimage" - ;; -esac +almost_15_days_ago=$((60-15*86400)) +just_over_15_days_ago=$((-1-15*86400)) +almost_60_days_ago=$((60-60*86400)) +just_over_60_days_ago=$((-1-60*86400)) + +test-chmtime =$almost_60_days_ago $rr/preimage +test-chmtime =$almost_15_days_ago $rr2/preimage + +test_expect_success 'garbage collection (part1)' 'git rerere gc' + +test_expect_success 'young records still live' \ + "test -f $rr/preimage && test -f $rr2/preimage" + +test-chmtime =$just_over_60_days_ago $rr/preimage +test-chmtime =$just_over_15_days_ago $rr2/preimage + +test_expect_success 'garbage collection (part2)' 'git rerere gc' + +test_expect_success 'old records rest in peace' \ + "test ! -f $rr/preimage && test ! -f $rr2/preimage" test_done diff --git a/t/t9100-git-svn-basic.sh b/t/t9100-git-svn-basic.sh index 040da92756..7dcfc7e7db 100755 --- a/t/t9100-git-svn-basic.sh +++ b/t/t9100-git-svn-basic.sh @@ -211,8 +211,58 @@ tree d667270a1f7b109f5eb3aaea21ede14b56bfdd6e tree 8f51f74cf0163afc9ad68a4b1537288c4558b5a4 EOF -echo tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904 >> expected - test_expect_success "$name" "diff -u a expected" +test_expect_failure 'exit if remote refs are ambigious' " + git-config --add svn-remote.svn.fetch \ + bar:refs/remotes/git-svn && + git-svn migrate + " + +test_expect_failure 'exit if init-ing a would clobber a URL' " + svnadmin create ${PWD}/svnrepo2 && + svn mkdir -m 'mkdir bar' ${svnrepo}2/bar && + git-config --unset svn-remote.svn.fetch \ + '^bar:refs/remotes/git-svn$' && + git-svn init ${svnrepo}2/bar + " + +test_expect_success \ + 'init allows us to connect to another directory in the same repo' " + git-svn init -i bar $svnrepo/bar && + git config --get svn-remote.svn.fetch \ + '^bar:refs/remotes/bar$' && + git config --get svn-remote.svn.fetch \ + '^:refs/remotes/git-svn$' + " + +test_expect_success 'able to dcommit to a subdirectory' " + git-svn fetch -i bar && + git checkout -b my-bar refs/remotes/bar && + echo abc > d && + git update-index --add d && + git commit -m '/bar/d should be in the log' && + git-svn dcommit -i bar && + test -z \"\`git diff refs/heads/my-bar refs/remotes/bar\`\" && + mkdir newdir && + echo new > newdir/dir && + git update-index --add newdir/dir && + git commit -m 'add a new directory' && + git-svn dcommit -i bar && + test -z \"\`git diff refs/heads/my-bar refs/remotes/bar\`\" && + echo foo >> newdir/dir && + git update-index newdir/dir && + git commit -m 'modify a file in new directory' && + git-svn dcommit -i bar && + test -z \"\`git diff refs/heads/my-bar refs/remotes/bar\`\" + " + +test_expect_success 'able to set-tree to a subdirectory' " + echo cba > d && + git update-index d && + git commit -m 'update /bar/d' && + git-svn set-tree -i bar HEAD && + test -z \"\`git diff refs/heads/my-bar refs/remotes/bar\`\" + " + test_done diff --git a/t/t9101-git-svn-props.sh b/t/t9101-git-svn-props.sh index e8133d81cb..622ea1c0df 100755 --- a/t/t9101-git-svn-props.sh +++ b/t/t9101-git-svn-props.sh @@ -121,4 +121,30 @@ b_ne_cr="`git-hash-object ne_cr`" test_expect_success 'CRLF + $Id$' "test '$a_cr' = '$b_cr'" test_expect_success 'CRLF + $Id$ (no newline)' "test '$a_ne_cr' = '$b_ne_cr'" +cat > show-ignore.expect <<\EOF + +# / +/no-such-file* + +# deeply +/deeply/no-such-file* + +# deeply/nested +/deeply/nested/no-such-file* + +# deeply/nested/directory +/deeply/nested/directory/no-such-file* +EOF + +test_expect_success 'test show-ignore' " + cd test_wc && + mkdir -p deeply/nested/directory && + svn add deeply && + svn propset -R svn:ignore 'no-such-file*' . + svn commit -m 'propset svn:ignore' + cd .. && + git-svn show-ignore > show-ignore.got && + cmp show-ignore.expect show-ignore.got + " + test_done diff --git a/t/t9103-git-svn-graft-branches.sh b/t/t9103-git-svn-graft-branches.sh deleted file mode 100755 index 183ae3b1c2..0000000000 --- a/t/t9103-git-svn-graft-branches.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/bin/sh -test_description='git-svn graft-branches' -. ./lib-git-svn.sh - -svnrepo="$svnrepo/test-git-svn" - -test_expect_success 'initialize repo' " - mkdir import && - cd import && - mkdir -p trunk branches tags && - echo hello > trunk/readme && - svn import -m 'import for git-svn' . $svnrepo && - cd .. && - svn cp -m 'tag a' $svnrepo/trunk $svnrepo/tags/a && - svn cp -m 'branch a' $svnrepo/trunk $svnrepo/branches/a && - svn co $svnrepo wc && - cd wc && - echo feedme >> branches/a/readme && - poke branches/a/readme && - svn commit -m hungry && - cd trunk && - svn merge -r3:4 $svnrepo/branches/a && - svn commit -m 'merge with a' && - cd ../.. && - git-svn multi-init $svnrepo -T trunk -b branches -t tags && - git-svn multi-fetch - " - -r1=`git-rev-list remotes/trunk | tail -n1` -r2=`git-rev-list remotes/tags/a | tail -n1` -r3=`git-rev-list remotes/a | tail -n1` -r4=`git-rev-parse remotes/a` -r5=`git-rev-parse remotes/trunk` - -test_expect_success 'test graft-branches regexes and copies' " - test -n "$r1" && - test -n "$r2" && - test -n "$r3" && - test -n "$r4" && - test -n "$r5" && - git-svn graft-branches && - grep '^$r2 $r1' $GIT_DIR/info/grafts && - grep '^$r3 $r1' $GIT_DIR/info/grafts && - grep '^$r5 ' $GIT_DIR/info/grafts | grep '$r4' | grep '$r1' - " - -test_debug 'gitk --all & sleep 1' - -test_expect_success 'test graft-branches with tree-joins' " - rm $GIT_DIR/info/grafts && - git-svn graft-branches --no-default-regex --no-graft-copy -B && - grep '^$r3 ' $GIT_DIR/info/grafts | grep '$r1' | grep '$r2' && - grep '^$r2 $r1' $GIT_DIR/info/grafts && - grep '^$r5 ' $GIT_DIR/info/grafts | grep '$r1' | grep '$r4' - " - -# the result of this is kinda funky, we have a strange history and -# this is just a test :) -test_debug 'gitk --all &' - -test_done diff --git a/t/t9104-git-svn-follow-parent.sh b/t/t9104-git-svn-follow-parent.sh index 405b555368..bd4f366e86 100755 --- a/t/t9104-git-svn-follow-parent.sh +++ b/t/t9104-git-svn-follow-parent.sh @@ -3,7 +3,7 @@ # Copyright (c) 2006 Eric Wong # -test_description='git-svn --follow-parent fetching' +test_description='git-svn fetching' . ./lib-git-svn.sh test_expect_success 'initialize repo' " @@ -27,11 +27,141 @@ test_expect_success 'initialize repo' " cd .. " -test_expect_success 'init and fetch --follow-parent a moved directory' " +test_expect_success 'init and fetch a moved directory' " git-svn init -i thunk $svnrepo/thunk && - git-svn fetch --follow-parent -i thunk && - git-rev-parse --verify refs/remotes/trunk && - test '$?' -eq '0' + git-svn fetch -i thunk && + test \"\`git-rev-parse --verify refs/remotes/thunk@2\`\" \ + = \"\`git-rev-parse --verify refs/remotes/thunk~1\`\" && + test \"\`git-cat-file blob refs/remotes/thunk:readme |\ + sed -n -e '3p'\`\" = goodbye && + test -z \"\`git-config --get svn-remote.svn.fetch \ + '^trunk:refs/remotes/thunk@2$'\`\" + " + +test_expect_success 'init and fetch from one svn-remote' " + git-config svn-remote.svn.url $svnrepo && + git-config --add svn-remote.svn.fetch \ + trunk:refs/remotes/svn/trunk && + git-config --add svn-remote.svn.fetch \ + thunk:refs/remotes/svn/thunk && + git-svn fetch -i svn/thunk && + test \"\`git-rev-parse --verify refs/remotes/svn/trunk\`\" \ + = \"\`git-rev-parse --verify refs/remotes/svn/thunk~1\`\" && + test \"\`git-cat-file blob refs/remotes/svn/thunk:readme |\ + sed -n -e '3p'\`\" = goodbye + " + +test_expect_success 'follow deleted parent' " + svn cp -m 'resurrecting trunk as junk' \ + -r2 $svnrepo/trunk $svnrepo/junk && + git-config --add svn-remote.svn.fetch \ + junk:refs/remotes/svn/junk && + git-svn fetch -i svn/thunk && + git-svn fetch -i svn/junk && + test -z \"\`git diff svn/junk svn/trunk\`\" && + test \"\`git merge-base svn/junk svn/trunk\`\" \ + = \"\`git rev-parse svn/trunk\`\" + " + +test_expect_success 'follow larger parent' " + mkdir -p import/trunk/thunk/bump/thud && + echo hi > import/trunk/thunk/bump/thud/file && + svn import -m 'import a larger parent' import $svnrepo/larger-parent && + svn cp -m 'hi' $svnrepo/larger-parent $svnrepo/another-larger && + git-svn init -i larger $svnrepo/another-larger/trunk/thunk/bump/thud && + git-svn fetch -i larger && + git-rev-parse --verify refs/remotes/larger && + git-rev-parse --verify \ + refs/remotes/larger-parent/trunk/thunk/bump/thud && + test \"\`git-merge-base \ + refs/remotes/larger-parent/trunk/thunk/bump/thud \ + refs/remotes/larger\`\" = \ + \"\`git-rev-parse refs/remotes/larger\`\" + true + " + +test_expect_success 'follow higher-level parent' " + svn mkdir -m 'follow higher-level parent' $svnrepo/blob && + svn co $svnrepo/blob blob && + cd blob && + echo hi > hi && + svn add hi && + svn commit -m 'hihi' && + cd .. + svn mkdir -m 'new glob at top level' $svnrepo/glob && + svn mv -m 'move blob down a level' $svnrepo/blob $svnrepo/glob/blob && + git-svn init -i blob $svnrepo/glob/blob && + git-svn fetch -i blob + " + +test_expect_success 'follow deleted directory' " + svn mv -m 'bye!' $svnrepo/glob/blob/hi $svnrepo/glob/blob/bye && + svn rm -m 'remove glob' $svnrepo/glob && + git-svn init -i glob $svnrepo/glob && + git-svn fetch -i glob && + test \"\`git cat-file blob refs/remotes/glob:blob/bye\`\" = hi && + test \"\`git ls-tree refs/remotes/glob | wc -l \`\" -eq 1 + " + +# ref: r9270 of the Subversion repository: (http://svn.collab.net/repos/svn) +# in trunk/subversion/bindings/swig/perl +test_expect_success 'follow-parent avoids deleting relevant info' " + mkdir -p import/trunk/subversion/bindings/swig/perl/t && + for i in a b c ; do \ + echo \$i > import/trunk/subversion/bindings/swig/perl/\$i.pm && + echo _\$i > import/trunk/subversion/bindings/swig/perl/t/\$i.t; \ + done && + echo 'bad delete test' > \ + import/trunk/subversion/bindings/swig/perl/t/larger-parent && + echo 'bad delete test 2' > \ + import/trunk/subversion/bindings/swig/perl/another-larger && + cd import && + svn import -m 'r9270 test' . $svnrepo/r9270 && + cd .. && + svn co $svnrepo/r9270/trunk/subversion/bindings/swig/perl r9270 && + cd r9270 && + svn mkdir native && + svn mv t native/t && + for i in a b c; do svn mv \$i.pm native/\$i.pm; done && + echo z >> native/t/c.t && + poke native/t/c.t && + svn commit -m 'reorg test' && + cd .. && + git-svn init -i r9270-t \ + $svnrepo/r9270/trunk/subversion/bindings/swig/perl/native/t && + git-svn fetch -i r9270-t && + test \`git rev-list r9270-t | wc -l\` -eq 2 && + test \"\`git ls-tree --name-only r9270-t~1\`\" = \ + \"\`git ls-tree --name-only r9270-t\`\" + " + +test_expect_success "track initial change if it was only made to parent" " + svn cp -m 'wheee!' $svnrepo/r9270/trunk $svnrepo/r9270/drunk && + git-svn init -i r9270-d \ + $svnrepo/r9270/drunk/subversion/bindings/swig/perl/native/t && + git-svn fetch -i r9270-d && + test \`git rev-list r9270-d | wc -l\` -eq 3 && + test \"\`git ls-tree --name-only r9270-t\`\" = \ + \"\`git ls-tree --name-only r9270-d\`\" && + test \"\`git rev-parse r9270-t\`\" = \ + \"\`git rev-parse r9270-d~1\`\" + " + +test_expect_success "track multi-parent paths" " + svn cp -m 'resurrect /glob' $svnrepo/r9270 $svnrepo/glob && + git-svn multi-fetch && + test \`git cat-file commit refs/remotes/glob | \ + grep '^parent ' | wc -l\` -eq 2 + " + +test_expect_success "multi-fetch continues to work" " + git-svn multi-fetch + " + +test_expect_success "multi-fetch works off a 'clean' repository" " + rm -r $GIT_DIR/svn $GIT_DIR/refs/remotes $GIT_DIR/logs && + mkdir $GIT_DIR/svn && + git-svn multi-fetch " test_debug 'gitk --all &' diff --git a/t/t9105-git-svn-commit-diff.sh b/t/t9105-git-svn-commit-diff.sh index 6323c7e3ac..c668dd1270 100755 --- a/t/t9105-git-svn-commit-diff.sh +++ b/t/t9105-git-svn-commit-diff.sh @@ -31,4 +31,13 @@ test_expect_success 'test the commit-diff command' " cmp readme wc/readme " +test_expect_success 'commit-diff to a sub-directory (with git-svn config)' " + svn import -m 'sub-directory' import $svnrepo/subdir && + git-svn init $svnrepo/subdir && + git-svn fetch && + git-svn commit-diff -r3 '$prev' '$head' && + svn cat $svnrepo/subdir/readme > readme.2 && + cmp readme readme.2 + " + test_done diff --git a/t/t9107-git-svn-migrate.sh b/t/t9107-git-svn-migrate.sh new file mode 100755 index 0000000000..dc2afdaa45 --- /dev/null +++ b/t/t9107-git-svn-migrate.sh @@ -0,0 +1,112 @@ +#!/bin/sh +# Copyright (c) 2006 Eric Wong +test_description='git-svn metadata migrations from previous versions' +. ./lib-git-svn.sh + +test_expect_success 'setup old-looking metadata' " + cp $GIT_DIR/config $GIT_DIR/config-old-git-svn && + mkdir import && + cd import && + for i in trunk branches/a branches/b \ + tags/0.1 tags/0.2 tags/0.3; do + mkdir -p \$i && \ + echo hello >> \$i/README || exit 1 + done && \ + svn import -m test . $svnrepo + cd .. && + git-svn init $svnrepo && + git-svn fetch && + mv $GIT_DIR/svn/* $GIT_DIR/ && + mv $GIT_DIR/svn/.metadata $GIT_DIR/ && + rmdir $GIT_DIR/svn && + git-update-ref refs/heads/git-svn-HEAD refs/remotes/git-svn && + git-update-ref refs/heads/svn-HEAD refs/remotes/git-svn && + git-update-ref -d refs/remotes/git-svn refs/remotes/git-svn + " + +head=`git rev-parse --verify refs/heads/git-svn-HEAD^0` +test_expect_success 'git-svn-HEAD is a real HEAD' "test -n '$head'" + +test_expect_success 'initialize old-style (v0) git-svn layout' " + mkdir -p $GIT_DIR/git-svn/info $GIT_DIR/svn/info && + echo $svnrepo > $GIT_DIR/git-svn/info/url && + echo $svnrepo > $GIT_DIR/svn/info/url && + git-svn migrate && + ! test -d $GIT_DIR/git-svn && + git-rev-parse --verify refs/remotes/git-svn^0 && + git-rev-parse --verify refs/remotes/svn^0 && + test \`git config --get svn-remote.svn.url\` = '$svnrepo' && + test \`git config --get svn-remote.svn.fetch\` = \ + ':refs/remotes/git-svn' + " + +test_expect_success 'initialize a multi-repository repo' " + git-svn init $svnrepo -T trunk -t tags -b branches && + git-config --get-all svn-remote.svn.fetch > fetch.out && + grep '^trunk:refs/remotes/trunk$' fetch.out && + test -n \"\`git-config --get svn-remote.svn.branches \ + '^branches/\*:refs/remotes/\*$'\`\" && + test -n \"\`git-config --get svn-remote.svn.tags \ + '^tags/\*:refs/remotes/tags/\*$'\`\" && + git config --unset svn-remote.svn.branches \ + '^branches/\*:refs/remotes/\*$' && + git config --unset svn-remote.svn.tags \ + '^tags/\*:refs/remotes/tags/\*$' && + git-config --add svn-remote.svn.fetch 'branches/a:refs/remotes/a' && + git-config --add svn-remote.svn.fetch 'branches/b:refs/remotes/b' && + for i in tags/0.1 tags/0.2 tags/0.3; do + git-config --add svn-remote.svn.fetch \ + \$i:refs/remotes/\$i || exit 1; done + " + +# refs should all be different, but the trees should all be the same: +test_expect_success 'multi-fetch works on partial urls + paths' " + git-svn multi-fetch && + for i in trunk a b tags/0.1 tags/0.2 tags/0.3; do + git rev-parse --verify refs/remotes/\$i^0 >> refs.out || exit 1; + done && + test -z \"\`sort < refs.out | uniq -d\`\" && + for i in trunk a b tags/0.1 tags/0.2 tags/0.3; do + for j in trunk a b tags/0.1 tags/0.2 tags/0.3; do + if test \$j != \$i; then continue; fi + test -z \"\`git diff refs/remotes/\$i \ + refs/remotes/\$j\`\" ||exit 1; done; done + " + +test_expect_success 'migrate --minimize on old inited layout' " + git config --unset-all svn-remote.svn.fetch && + git config --unset-all svn-remote.svn.url && + rm -rf $GIT_DIR/svn && + for i in \`cat fetch.out\`; do + path=\`expr \$i : '\\([^:]*\\):.*$'\` + ref=\`expr \$i : '[^:]*:refs/remotes/\\(.*\\)$'\` + if test -z \"\$ref\"; then continue; fi + if test -n \"\$path\"; then path=\"/\$path\"; fi + ( mkdir -p $GIT_DIR/svn/\$ref/info/ && + echo $svnrepo\$path > $GIT_DIR/svn/\$ref/info/url ) || exit 1; + done && + git-svn migrate --minimize && + test -z \"\`git-config -l |grep -v '^svn-remote\.git-svn\.'\`\" && + git-config --get-all svn-remote.svn.fetch > fetch.out && + grep '^trunk:refs/remotes/trunk$' fetch.out && + grep '^branches/a:refs/remotes/a$' fetch.out && + grep '^branches/b:refs/remotes/b$' fetch.out && + grep '^tags/0\.1:refs/remotes/tags/0\.1$' fetch.out && + grep '^tags/0\.2:refs/remotes/tags/0\.2$' fetch.out && + grep '^tags/0\.3:refs/remotes/tags/0\.3$' fetch.out + grep '^:refs/remotes/git-svn' fetch.out + " + +test_expect_success ".rev_db auto-converted to .rev_db.UUID" " + git-svn fetch -i trunk && + expect=$GIT_DIR/svn/trunk/.rev_db.* && + test -n \"\$expect\" && + mv \$expect $GIT_DIR/svn/trunk/.rev_db && + git-svn fetch -i trunk && + test -L $GIT_DIR/svn/trunk/.rev_db && + test -f \$expect && + cmp \$expect $GIT_DIR/svn/trunk/.rev_db + " + +test_done + diff --git a/t/t9108-git-svn-glob.sh b/t/t9108-git-svn-glob.sh new file mode 100755 index 0000000000..db4344cc84 --- /dev/null +++ b/t/t9108-git-svn-glob.sh @@ -0,0 +1,86 @@ +#!/bin/sh +# Copyright (c) 2007 Eric Wong +test_description='git-svn globbing refspecs' +. ./lib-git-svn.sh + +cat > expect.end <<EOF +the end +hi +start a new branch +initial +EOF + +test_expect_success 'test refspec globbing' " + mkdir -p trunk/src/a trunk/src/b trunk/doc && + echo 'hello world' > trunk/src/a/readme && + echo 'goodbye world' > trunk/src/b/readme && + svn import -m 'initial' trunk $svnrepo/trunk && + svn co $svnrepo tmp && + cd tmp && + mkdir branches tags && + svn add branches tags && + svn cp trunk branches/start && + svn commit -m 'start a new branch' && + svn up && + echo 'hi' >> branches/start/src/b/readme && + poke branches/start/src/b/readme && + echo 'hey' >> branches/start/src/a/readme && + poke branches/start/src/a/readme && + svn commit -m 'hi' && + svn up && + svn cp branches/start tags/end && + echo 'bye' >> tags/end/src/b/readme && + poke tags/end/src/b/readme && + echo 'aye' >> tags/end/src/a/readme && + poke tags/end/src/a/readme && + svn commit -m 'the end' && + echo 'byebye' >> tags/end/src/b/readme && + poke tags/end/src/b/readme && + svn commit -m 'nothing to see here' + cd .. && + git config --add svn-remote.svn.url $svnrepo && + git config --add svn-remote.svn.fetch \ + 'trunk/src/a:refs/remotes/trunk' && + git config --add svn-remote.svn.branches \ + 'branches/*/src/a:refs/remotes/branches/*' && + git config --add svn-remote.svn.tags\ + 'tags/*/src/a:refs/remotes/tags/*' && + git-svn multi-fetch && + git log --pretty=oneline refs/remotes/tags/end | \ + sed -e 's/^.\{41\}//' > output.end && + cmp expect.end output.end && + test \"\`git rev-parse refs/remotes/tags/end~1\`\" = \ + \"\`git rev-parse refs/remotes/branches/start\`\" && + test \"\`git rev-parse refs/remotes/branches/start~2\`\" = \ + \"\`git rev-parse refs/remotes/trunk\`\" + " + +echo try to try > expect.two +echo nothing to see here >> expect.two +cat expect.end >> expect.two + +test_expect_success 'test left-hand-side only globbing' " + git config --add svn-remote.two.url $svnrepo && + git config --add svn-remote.two.fetch trunk:refs/remotes/two/trunk && + git config --add svn-remote.two.branches \ + 'branches/*:refs/remotes/two/branches/*' && + git config --add svn-remote.two.tags \ + 'tags/*:refs/remotes/two/tags/*' && + cd tmp && + echo 'try try' >> tags/end/src/b/readme && + poke tags/end/src/b/readme && + svn commit -m 'try to try' + cd .. && + git-svn fetch two && + test \`git rev-list refs/remotes/two/tags/end | wc -l\` -eq 6 && + test \`git rev-list refs/remotes/two/branches/start | wc -l\` -eq 3 && + test \`git rev-parse refs/remotes/two/branches/start~2\` = \ + \`git rev-parse refs/remotes/two/trunk\` && + test \`git rev-parse refs/remotes/two/tags/end~3\` = \ + \`git rev-parse refs/remotes/two/branches/start\` && + git log --pretty=oneline refs/remotes/two/tags/end | \ + sed -e 's/^.\{41\}//' > output.two && + cmp expect.two output.two + " + +test_done diff --git a/t/t9110-git-svn-use-svm-props.sh b/t/t9110-git-svn-use-svm-props.sh new file mode 100755 index 0000000000..9db0d8fd8d --- /dev/null +++ b/t/t9110-git-svn-use-svm-props.sh @@ -0,0 +1,51 @@ +#!/bin/sh +# +# Copyright (c) 2007 Eric Wong +# + +test_description='git-svn useSvmProps test' + +. ./lib-git-svn.sh + +test_expect_success 'load svm repo' " + svnadmin load -q $rawsvnrepo < ../t9110/svm.dump && + git-svn init -R arr -i bar $svnrepo/mirror/arr && + git-svn init -R argh -i dir $svnrepo/mirror/argh && + git-svn init -R argh -i e $svnrepo/mirror/argh/a/b/c/d/e && + git-config svn.useSvmProps true && + git-svn fetch --all + " + +uuid=161ce429-a9dd-4828-af4a-52023f968c89 + +bar_url=http://mayonaise/svnrepo/bar +test_expect_success 'verify metadata for /bar' " + git-cat-file commit refs/remotes/bar | \ + grep '^git-svn-id: $bar_url@12 $uuid$' && + git-cat-file commit refs/remotes/bar~1 | \ + grep '^git-svn-id: $bar_url@11 $uuid$' && + git-cat-file commit refs/remotes/bar~2 | \ + grep '^git-svn-id: $bar_url@10 $uuid$' && + git-cat-file commit refs/remotes/bar~3 | \ + grep '^git-svn-id: $bar_url@9 $uuid$' && + git-cat-file commit refs/remotes/bar~4 | \ + grep '^git-svn-id: $bar_url@6 $uuid$' && + git-cat-file commit refs/remotes/bar~5 | \ + grep '^git-svn-id: $bar_url@1 $uuid$' + " + +e_url=http://mayonaise/svnrepo/dir/a/b/c/d/e +test_expect_success 'verify metadata for /dir/a/b/c/d/e' " + git-cat-file commit refs/remotes/e | \ + grep '^git-svn-id: $e_url@1 $uuid$' + " + +dir_url=http://mayonaise/svnrepo/dir +test_expect_success 'verify metadata for /dir' " + git-cat-file commit refs/remotes/dir | \ + grep '^git-svn-id: $dir_url@2 $uuid$' && + git-cat-file commit refs/remotes/dir~1 | \ + grep '^git-svn-id: $dir_url@1 $uuid$' + " + +test_done diff --git a/t/t9110/svm.dump b/t/t9110/svm.dump new file mode 100644 index 0000000000..cc799c238d --- /dev/null +++ b/t/t9110/svm.dump @@ -0,0 +1,511 @@ +SVN-fs-dump-format-version: 2 + +UUID: de5973c6-545d-41da-aded-c265f9039e74 + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2007-02-17T06:54:59.793104Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 200 +Content-length: 200 + +K 7 +svn:log +V 40 +SVM: initializing mirror for /mirror/arr +K 10 +svn:author +V 3 +svm +K 11 +svm:headrev +V 39 +161ce429-a9dd-4828-af4a-52023f968c89:0 + +K 8 +svn:date +V 27 +2007-02-17T06:55:00.121647Z +PROPS-END + +Node-path: +Node-kind: dir +Node-action: change +Prop-content-length: 44 +Content-length: 44 + +K 10 +svm:mirror +V 12 +/mirror/arr + +PROPS-END + + +Node-path: mirror +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: mirror/arr +Node-kind: dir +Node-action: add +Prop-content-length: 116 +Content-length: 116 + +K 10 +svm:source +V 29 +http://mayonaise/svnrepo!/bar +K 8 +svm:uuid +V 36 +161ce429-a9dd-4828-af4a-52023f968c89 +PROPS-END + + +Revision-number: 2 +Prop-content-length: 182 +Content-length: 182 + +K 7 +svn:log +V 18 +import for git-svn +K 10 +svn:author +V 7 +svnsync +K 11 +svm:headrev +V 39 +161ce429-a9dd-4828-af4a-52023f968c89:1 + +K 8 +svn:date +V 27 +2007-02-17T05:10:52.108847Z +PROPS-END + +Node-path: mirror/arr +Node-kind: dir +Node-action: change +Prop-content-length: 116 +Content-length: 116 + +K 10 +svm:source +V 29 +http://mayonaise/svnrepo!/bar +K 8 +svm:uuid +V 36 +161ce429-a9dd-4828-af4a-52023f968c89 +PROPS-END + + +Node-path: mirror/arr/zzz +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: 33b02bc15ce9557d2dd8484d58f95ac4 +Content-length: 14 + +PROPS-END +zzz + + +Revision-number: 3 +Prop-content-length: 230 +Content-length: 230 + +K 7 +svn:log +V 66 +new symlink is added to a file that was also just made executable + +K 10 +svn:author +V 7 +svnsync +K 11 +svm:headrev +V 39 +161ce429-a9dd-4828-af4a-52023f968c89:6 + +K 8 +svn:date +V 27 +2007-02-17T05:11:01.686891Z +PROPS-END + +Node-path: mirror/arr/zzz +Node-kind: file +Node-action: change +Prop-content-length: 36 +Text-content-length: 4 +Text-content-md5: 33b02bc15ce9557d2dd8484d58f95ac4 +Content-length: 40 + +K 14 +svn:executable +V 1 +* +PROPS-END +zzz + + +Revision-number: 4 +Prop-content-length: 192 +Content-length: 192 + +K 7 +svn:log +V 28 +/bar/d should be in the log + +K 10 +svn:author +V 7 +svnsync +K 11 +svm:headrev +V 39 +161ce429-a9dd-4828-af4a-52023f968c89:9 + +K 8 +svn:date +V 27 +2007-02-17T05:11:07.686552Z +PROPS-END + +Node-path: mirror/arr/d +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: 0bee89b07a248e27c83fc3d5951213c1 +Content-length: 14 + +PROPS-END +abc + + +Revision-number: 5 +Prop-content-length: 185 +Content-length: 185 + +K 7 +svn:log +V 20 +add a new directory + +K 10 +svn:author +V 7 +svnsync +K 11 +svm:headrev +V 40 +161ce429-a9dd-4828-af4a-52023f968c89:10 + +K 8 +svn:date +V 27 +2007-02-17T05:11:08.405953Z +PROPS-END + +Node-path: mirror/arr/newdir +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: mirror/arr/newdir/dir +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: 9cd599a3523898e6a12e13ec787da50a +Content-length: 14 + +PROPS-END +new + + +Revision-number: 6 +Prop-content-length: 196 +Content-length: 196 + +K 7 +svn:log +V 31 +modify a file in new directory + +K 10 +svn:author +V 7 +svnsync +K 11 +svm:headrev +V 40 +161ce429-a9dd-4828-af4a-52023f968c89:11 + +K 8 +svn:date +V 27 +2007-02-17T05:11:09.126645Z +PROPS-END + +Node-path: mirror/arr/newdir/dir +Node-kind: file +Node-action: change +Text-content-length: 8 +Text-content-md5: a950e20332358e523a5e9d571e47fa64 +Content-length: 8 + +new +foo + + +Revision-number: 7 +Prop-content-length: 179 +Content-length: 179 + +K 7 +svn:log +V 14 +update /bar/d + +K 10 +svn:author +V 7 +svnsync +K 11 +svm:headrev +V 40 +161ce429-a9dd-4828-af4a-52023f968c89:12 + +K 8 +svn:date +V 27 +2007-02-17T05:11:09.846221Z +PROPS-END + +Node-path: mirror/arr/d +Node-kind: file +Node-action: change +Text-content-length: 4 +Text-content-md5: 7abb78de7f2756ca8b511cbc879fd5e7 +Content-length: 4 + +cba + + +Revision-number: 8 +Prop-content-length: 201 +Content-length: 201 + +K 7 +svn:log +V 41 +SVM: initializing mirror for /mirror/argh +K 10 +svn:author +V 3 +svm +K 11 +svm:headrev +V 39 +161ce429-a9dd-4828-af4a-52023f968c89:0 + +K 8 +svn:date +V 27 +2007-02-17T06:56:03.703677Z +PROPS-END + +Node-path: +Node-kind: dir +Node-action: change +Prop-content-length: 57 +Content-length: 57 + +K 10 +svm:mirror +V 25 +/mirror/argh +/mirror/arr + +PROPS-END + + +Node-path: mirror/argh +Node-kind: dir +Node-action: add +Prop-content-length: 116 +Content-length: 116 + +K 10 +svm:source +V 29 +http://mayonaise/svnrepo!/dir +K 8 +svm:uuid +V 36 +161ce429-a9dd-4828-af4a-52023f968c89 +PROPS-END + + +Revision-number: 9 +Prop-content-length: 182 +Content-length: 182 + +K 7 +svn:log +V 18 +import for git-svn +K 10 +svn:author +V 7 +svnsync +K 11 +svm:headrev +V 39 +161ce429-a9dd-4828-af4a-52023f968c89:1 + +K 8 +svn:date +V 27 +2007-02-17T05:10:52.108847Z +PROPS-END + +Node-path: mirror/argh +Node-kind: dir +Node-action: change +Prop-content-length: 116 +Content-length: 116 + +K 10 +svm:source +V 29 +http://mayonaise/svnrepo!/dir +K 8 +svm:uuid +V 36 +161ce429-a9dd-4828-af4a-52023f968c89 +PROPS-END + + +Node-path: mirror/argh/a +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: mirror/argh/a/b +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: mirror/argh/a/b/c +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: mirror/argh/a/b/c/d +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: mirror/argh/a/b/c/d/e +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: mirror/argh/a/b/c/d/e/file +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 9 +Text-content-md5: 3fd46fe46fcdcf062c802ca60dc826d5 +Content-length: 19 + +PROPS-END +deep dir + + +Revision-number: 10 +Prop-content-length: 197 +Content-length: 197 + +K 7 +svn:log +V 33 +try a deep --rmdir with a commit + +K 10 +svn:author +V 7 +svnsync +K 11 +svm:headrev +V 39 +161ce429-a9dd-4828-af4a-52023f968c89:2 + +K 8 +svn:date +V 27 +2007-02-17T05:10:54.847015Z +PROPS-END + +Node-path: mirror/argh/file +Node-kind: file +Node-action: add +Node-copyfrom-rev: 9 +Node-copyfrom-path: mirror/argh/a/b/c/d/e/file +Text-content-length: 9 +Text-content-md5: 3fd46fe46fcdcf062c802ca60dc826d5 +Content-length: 9 + +deep dir + + +Node-path: mirror/argh/a +Node-action: delete + + diff --git a/t/t9111-git-svn-use-svnsync-props.sh b/t/t9111-git-svn-use-svnsync-props.sh new file mode 100755 index 0000000000..483d7f8159 --- /dev/null +++ b/t/t9111-git-svn-use-svnsync-props.sh @@ -0,0 +1,51 @@ +#!/bin/sh +# +# Copyright (c) 2007 Eric Wong +# + +test_description='git-svn useSvnsyncProps test' + +. ./lib-git-svn.sh + +test_expect_success 'load svnsync repo' " + svnadmin load -q $rawsvnrepo < ../t9111/svnsync.dump && + git-svn init -R arr -i bar $svnrepo/bar && + git-svn init -R argh -i dir $svnrepo/dir && + git-svn init -R argh -i e $svnrepo/dir/a/b/c/d/e && + git-config svn.useSvnsyncProps true && + git-svn fetch --all + " + +uuid=161ce429-a9dd-4828-af4a-52023f968c89 + +bar_url=http://mayonaise/svnrepo/bar +test_expect_success 'verify metadata for /bar' " + git-cat-file commit refs/remotes/bar | \ + grep '^git-svn-id: $bar_url@12 $uuid$' && + git-cat-file commit refs/remotes/bar~1 | \ + grep '^git-svn-id: $bar_url@11 $uuid$' && + git-cat-file commit refs/remotes/bar~2 | \ + grep '^git-svn-id: $bar_url@10 $uuid$' && + git-cat-file commit refs/remotes/bar~3 | \ + grep '^git-svn-id: $bar_url@9 $uuid$' && + git-cat-file commit refs/remotes/bar~4 | \ + grep '^git-svn-id: $bar_url@6 $uuid$' && + git-cat-file commit refs/remotes/bar~5 | \ + grep '^git-svn-id: $bar_url@1 $uuid$' + " + +e_url=http://mayonaise/svnrepo/dir/a/b/c/d/e +test_expect_success 'verify metadata for /dir/a/b/c/d/e' " + git-cat-file commit refs/remotes/e | \ + grep '^git-svn-id: $e_url@1 $uuid$' + " + +dir_url=http://mayonaise/svnrepo/dir +test_expect_success 'verify metadata for /dir' " + git-cat-file commit refs/remotes/dir | \ + grep '^git-svn-id: $dir_url@2 $uuid$' && + git-cat-file commit refs/remotes/dir~1 | \ + grep '^git-svn-id: $dir_url@1 $uuid$' + " + +test_done diff --git a/t/t9111/svnsync.dump b/t/t9111/svnsync.dump new file mode 100644 index 0000000000..a9a46eeb29 --- /dev/null +++ b/t/t9111/svnsync.dump @@ -0,0 +1,562 @@ +SVN-fs-dump-format-version: 2 + +UUID: b4bfe35e-f256-4096-874c-08c5639ecad7 + +Revision-number: 0 +Prop-content-length: 240 +Content-length: 240 + +K 18 +svn:sync-from-uuid +V 36 +161ce429-a9dd-4828-af4a-52023f968c89 +K 10 +svn:author +V 7 +svnsync +K 24 +svn:sync-last-merged-rev +V 2 +12 +K 8 +svn:date +V 27 +2007-02-17T05:10:52.017552Z +K 17 +svn:sync-from-url +V 24 +http://mayonaise/svnrepo +PROPS-END + +Revision-number: 1 +Prop-content-length: 120 +Content-length: 120 + +K 7 +svn:log +V 18 +import for git-svn +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:10:52.108847Z +PROPS-END + +Node-path: bar +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: bar/zzz +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: 33b02bc15ce9557d2dd8484d58f95ac4 +Content-length: 14 + +PROPS-END +zzz + + +Node-path: dir +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: dir/a +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: dir/a/b +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: dir/a/b/c +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: dir/a/b/c/d +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: dir/a/b/c/d/e +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: dir/a/b/c/d/e/file +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 9 +Text-content-md5: 3fd46fe46fcdcf062c802ca60dc826d5 +Content-length: 19 + +PROPS-END +deep dir + + +Node-path: exec.sh +Node-kind: file +Node-action: add +Prop-content-length: 35 +Text-content-length: 10 +Text-content-md5: 3e2b31c72181b87149ff995e7202c0e3 +Content-length: 45 + +K 14 +svn:executable +V 0 + +PROPS-END +#!/bin/sh + + +Node-path: foo +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: d3b07384d113edec49eaa6238ad5ff00 +Content-length: 14 + +PROPS-END +foo + + +Node-path: foo.link +Node-kind: file +Node-action: add +Prop-content-length: 33 +Text-content-length: 8 +Text-content-md5: 1043146e49ef02cab12eef865cb34ff3 +Content-length: 41 + +K 11 +svn:special +V 1 +* +PROPS-END +link foo + +Revision-number: 2 +Prop-content-length: 135 +Content-length: 135 + +K 7 +svn:log +V 33 +try a deep --rmdir with a commit + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:10:54.847015Z +PROPS-END + +Node-path: dir/file +Node-kind: file +Node-action: add +Node-copyfrom-rev: 1 +Node-copyfrom-path: dir/a/b/c/d/e/file +Text-content-length: 9 +Text-content-md5: 3fd46fe46fcdcf062c802ca60dc826d5 +Content-length: 9 + +deep dir + + +Node-path: dir/a +Node-action: delete + + +Node-path: file +Node-kind: file +Node-action: add +Node-copyfrom-rev: 1 +Node-copyfrom-path: dir/a/b/c/d/e/file +Text-content-length: 9 +Text-content-md5: 3fd46fe46fcdcf062c802ca60dc826d5 +Content-length: 9 + +deep dir + + +Revision-number: 3 +Prop-content-length: 136 +Content-length: 136 + +K 7 +svn:log +V 34 +remove executable bit from a file + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:10:58.232691Z +PROPS-END + +Node-path: exec.sh +Node-kind: file +Node-action: change +Prop-content-length: 10 +Text-content-length: 10 +Text-content-md5: 3e2b31c72181b87149ff995e7202c0e3 +Content-length: 20 + +PROPS-END +#!/bin/sh + + +Revision-number: 4 +Prop-content-length: 131 +Content-length: 131 + +K 7 +svn:log +V 29 +add executable bit back file + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:10:59.666560Z +PROPS-END + +Node-path: exec.sh +Node-kind: file +Node-action: change +Prop-content-length: 36 +Text-content-length: 10 +Text-content-md5: 3e2b31c72181b87149ff995e7202c0e3 +Content-length: 46 + +K 14 +svn:executable +V 1 +* +PROPS-END +#!/bin/sh + + +Revision-number: 5 +Prop-content-length: 154 +Content-length: 154 + +K 7 +svn:log +V 52 +executable file becomes a symlink to bar/zzz (file) + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:11:00.676495Z +PROPS-END + +Node-path: exec.sh +Node-kind: file +Node-action: change +Prop-content-length: 33 +Text-content-length: 12 +Text-content-md5: f138693371665cc117742508761d684d +Content-length: 45 + +K 11 +svn:special +V 1 +* +PROPS-END +link bar/zzz + +Revision-number: 6 +Prop-content-length: 168 +Content-length: 168 + +K 7 +svn:log +V 66 +new symlink is added to a file that was also just made executable + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:11:01.686891Z +PROPS-END + +Node-path: bar/zzz +Node-kind: file +Node-action: change +Prop-content-length: 36 +Text-content-length: 4 +Text-content-md5: 33b02bc15ce9557d2dd8484d58f95ac4 +Content-length: 40 + +K 14 +svn:executable +V 1 +* +PROPS-END +zzz + + +Node-path: exec-2.sh +Node-kind: file +Node-action: add +Node-copyfrom-rev: 5 +Node-copyfrom-path: exec.sh +Text-content-length: 12 +Text-content-md5: f138693371665cc117742508761d684d +Content-length: 12 + +link bar/zzz + +Revision-number: 7 +Prop-content-length: 136 +Content-length: 136 + +K 7 +svn:log +V 34 +modify a symlink to become a file + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:11:02.677035Z +PROPS-END + +Node-path: exec-2.sh +Node-kind: file +Node-action: change +Prop-content-length: 10 +Text-content-length: 9 +Text-content-md5: 8e92eff9e911886cede27d420f89c735 +Content-length: 19 + +PROPS-END +git help + + +Revision-number: 8 +Prop-content-length: 109 +Content-length: 109 + +K 7 +svn:log +V 8 +éï∏ + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:11:03.676862Z +PROPS-END + +Node-path: exec-2.sh +Node-kind: file +Node-action: change +Text-content-length: 17 +Text-content-md5: 49881954063cf26ca48c212396a957ca +Content-length: 17 + +git help +# hello + + +Revision-number: 9 +Prop-content-length: 130 +Content-length: 130 + +K 7 +svn:log +V 28 +/bar/d should be in the log + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:11:07.686552Z +PROPS-END + +Node-path: bar/d +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: 0bee89b07a248e27c83fc3d5951213c1 +Content-length: 14 + +PROPS-END +abc + + +Revision-number: 10 +Prop-content-length: 122 +Content-length: 122 + +K 7 +svn:log +V 20 +add a new directory + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:11:08.405953Z +PROPS-END + +Node-path: bar/newdir +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: bar/newdir/dir +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: 9cd599a3523898e6a12e13ec787da50a +Content-length: 14 + +PROPS-END +new + + +Revision-number: 11 +Prop-content-length: 133 +Content-length: 133 + +K 7 +svn:log +V 31 +modify a file in new directory + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:11:09.126645Z +PROPS-END + +Node-path: bar/newdir/dir +Node-kind: file +Node-action: change +Text-content-length: 8 +Text-content-md5: a950e20332358e523a5e9d571e47fa64 +Content-length: 8 + +new +foo + + +Revision-number: 12 +Prop-content-length: 116 +Content-length: 116 + +K 7 +svn:log +V 14 +update /bar/d + +K 10 +svn:author +V 7 +svnsync +K 8 +svn:date +V 27 +2007-02-17T05:11:09.846221Z +PROPS-END + +Node-path: bar/d +Node-kind: file +Node-action: change +Text-content-length: 4 +Text-content-md5: 7abb78de7f2756ca8b511cbc879fd5e7 +Content-length: 4 + +cba + + diff --git a/t/test-lib.sh b/t/test-lib.sh index 37822fc13d..c0754747fb 100755 --- a/t/test-lib.sh +++ b/t/test-lib.sh @@ -255,8 +255,8 @@ test_done () { PATH=$(pwd)/..:$PATH GIT_EXEC_PATH=$(pwd)/.. GIT_TEMPLATE_DIR=$(pwd)/../templates/blt -HOME=$(pwd)/trash -export PATH GIT_EXEC_PATH GIT_TEMPLATE_DIR HOME +GIT_CONFIG=.git/config +export PATH GIT_EXEC_PATH GIT_TEMPLATE_DIR GIT_CONFIG GITPERLLIB=$(pwd)/../perl/blib/lib:$(pwd)/../perl/blib/arch/auto/Git export GITPERLLIB @@ -264,6 +264,12 @@ test -d ../templates/blt || { error "You haven't built things yet, have you?" } +if ! test -x ../test-chmtime; then + echo >&2 'You need to build test-chmtime:' + echo >&2 'Run "make test-chmtime" in the source (toplevel) directory' + exit 1 +fi + # Test repository test=trash rm -fr "$test" @@ -1,5 +1,8 @@ #include "cache.h" #include "tag.h" +#include "commit.h" +#include "tree.h" +#include "blob.h" const char *tag_type = "tag"; @@ -37,7 +40,7 @@ struct tag *lookup_tag(const unsigned char *sha1) int parse_tag_buffer(struct tag *item, void *data, unsigned long size) { int typelen, taglen; - unsigned char object[20]; + unsigned char sha1[20]; const char *type_line, *tag_line, *sig_line; char type[20]; @@ -47,7 +50,7 @@ int parse_tag_buffer(struct tag *item, void *data, unsigned long size) if (size < 64) return -1; - if (memcmp("object ", data, 7) || get_sha1_hex((char *) data + 7, object)) + if (memcmp("object ", data, 7) || get_sha1_hex((char *) data + 7, sha1)) return -1; type_line = (char *) data + 48; @@ -73,7 +76,19 @@ int parse_tag_buffer(struct tag *item, void *data, unsigned long size) memcpy(item->tag, tag_line + 4, taglen); item->tag[taglen] = '\0'; - item->tagged = lookup_object_type(object, type); + if (!strcmp(type, blob_type)) { + item->tagged = &lookup_blob(sha1)->object; + } else if (!strcmp(type, tree_type)) { + item->tagged = &lookup_tree(sha1)->object; + } else if (!strcmp(type, commit_type)) { + item->tagged = &lookup_commit(sha1)->object; + } else if (!strcmp(type, tag_type)) { + item->tagged = &lookup_tag(sha1)->object; + } else { + error("Unknown type %s", type); + item->tagged = NULL; + } + if (item->tagged && track_object_refs) { struct object_refs *refs = alloc_object_refs(1); refs->ref[0] = item->tagged; @@ -85,18 +100,18 @@ int parse_tag_buffer(struct tag *item, void *data, unsigned long size) int parse_tag(struct tag *item) { - char type[20]; + enum object_type type; void *data; unsigned long size; int ret; if (item->object.parsed) return 0; - data = read_sha1_file(item->object.sha1, type, &size); + data = read_sha1_file(item->object.sha1, &type, &size); if (!data) return error("Could not read %s", sha1_to_hex(item->object.sha1)); - if (strcmp(type, tag_type)) { + if (type != OBJ_TAG) { free(data); return error("Object %s not a tag", sha1_to_hex(item->object.sha1)); diff --git a/templates/hooks--update b/templates/hooks--update index e8c536fb61..fd1f73d6aa 100644 --- a/templates/hooks--update +++ b/templates/hooks--update @@ -57,7 +57,7 @@ announcerecipients=$(git-repo-config hooks.announcelist) allowunannotated=$(git-repo-config --bool hooks.allowunannotated) # --- Check types -newrev_type=$(git-cat-file -t "$newrev") +newrev_type=$(git-cat-file -t $newrev) case "$refname","$newrev_type" in refs/tags/*,commit) @@ -165,7 +165,7 @@ case "$refname_type" in baserev=$(git-merge-base $oldrev $newrev) # Commit with a parent - for rev in $(git-rev-list $newrev ^$baserev) + for rev in $(git-rev-parse --not --all | git-rev-list --stdin $newrev ^$baserev) do revtype=$(git-cat-file -t "$rev") echo " via $rev ($revtype)" @@ -190,7 +190,8 @@ case "$refname_type" in fi echo "" echo $LOGBEGIN - git-rev-list --pretty $newrev ^$baserev + git-rev-parse --not --all | + git-rev-list --stdin --pretty $newrev ^$baserev echo $LOGEND echo "" echo "Diffstat:" diff --git a/test-chmtime.c b/test-chmtime.c new file mode 100644 index 0000000000..90da448ebe --- /dev/null +++ b/test-chmtime.c @@ -0,0 +1,61 @@ +#include "git-compat-util.h" +#include <utime.h> + +static const char usage_str[] = "(+|=|=+|=-|-)<seconds> <file>..."; + +int main(int argc, const char *argv[]) +{ + int i; + int set_eq; + long int set_time; + char *test; + const char *timespec; + + if (argc < 3) + goto usage; + + timespec = argv[1]; + set_eq = (*timespec == '=') ? 1 : 0; + if (set_eq) { + timespec++; + if (*timespec == '+') { + set_eq = 2; /* relative "in the future" */ + timespec++; + } + } + set_time = strtol(timespec, &test, 10); + if (*test) { + fprintf(stderr, "Not a base-10 integer: %s\n", argv[1] + 1); + goto usage; + } + if ((set_eq && set_time < 0) || set_eq == 2) { + time_t now = time(NULL); + set_time += now; + } + + for (i = 2; i < argc; i++) { + struct stat sb; + struct utimbuf utb; + + if (stat(argv[i], &sb) < 0) { + fprintf(stderr, "Failed to stat %s: %s\n", + argv[i], strerror(errno)); + return -1; + } + + utb.actime = sb.st_atime; + utb.modtime = set_eq ? set_time : sb.st_mtime + set_time; + + if (utime(argv[i], &utb) < 0) { + fprintf(stderr, "Failed to modify time on %s: %s\n", + argv[i], strerror(errno)); + return -1; + } + } + + return 0; + +usage: + fprintf(stderr, "Usage: %s %s\n", argv[0], usage_str); + return -1; +} diff --git a/tree-diff.c b/tree-diff.c index 37d235e06e..c8275823d0 100644 --- a/tree-diff.c +++ b/tree-diff.c @@ -139,13 +139,13 @@ static void show_entry(struct diff_options *opt, const char *prefix, struct tree const unsigned char *sha1 = tree_entry_extract(desc, &path, &mode); if (opt->recursive && S_ISDIR(mode)) { - char type[20]; + enum object_type type; char *newbase = malloc_base(base, path, strlen(path)); struct tree_desc inner; void *tree; - tree = read_sha1_file(sha1, type, &inner.size); - if (!tree || strcmp(type, tree_type)) + tree = read_sha1_file(sha1, &type, &inner.size); + if (!tree || type != OBJ_TREE) die("corrupt tree sha %s", sha1_to_hex(sha1)); inner.buf = tree; @@ -190,17 +190,17 @@ int parse_tree_buffer(struct tree *item, void *buffer, unsigned long size) int parse_tree(struct tree *item) { - char type[20]; + enum object_type type; void *buffer; unsigned long size; if (item->object.parsed) return 0; - buffer = read_sha1_file(item->object.sha1, type, &size); + buffer = read_sha1_file(item->object.sha1, &type, &size); if (!buffer) return error("Could not read %s", sha1_to_hex(item->object.sha1)); - if (strcmp(type, tree_type)) { + if (type != OBJ_TREE) { free(buffer); return error("Object %s not a tree", sha1_to_hex(item->object.sha1)); diff --git a/unpack-file.c b/unpack-file.c index d24acc2a67..25c56b374a 100644 --- a/unpack-file.c +++ b/unpack-file.c @@ -5,12 +5,12 @@ static char *create_temp_file(unsigned char *sha1) { static char path[50]; void *buf; - char type[100]; + enum object_type type; unsigned long size; int fd; - buf = read_sha1_file(sha1, type, &size); - if (!buf || strcmp(type, blob_type)) + buf = read_sha1_file(sha1, &type, &size); + if (!buf || type != OBJ_BLOB) die("unable to read blob object %s", sha1_to_hex(sha1)); strcpy(path, ".merge_file_XXXXXX"); diff --git a/upload-pack.c b/upload-pack.c index 3648aae1a7..804bbb6c9e 100644 --- a/upload-pack.c +++ b/upload-pack.c @@ -455,7 +455,7 @@ static int get_common_commits(void) continue; } len = strip(line, len); - if (!strncmp(line, "have ", 5)) { + if (!prefixcmp(line, "have ")) { switch (got_sha1(line+5, sha1)) { case -1: /* they have what we do not */ if (multi_ack && ok_to_give_up()) @@ -502,7 +502,7 @@ static void receive_needs(void) if (!len) break; - if (!strncmp("shallow ", line, 8)) { + if (!prefixcmp(line, "shallow ")) { unsigned char sha1[20]; struct object *object; use_thin_pack = 0; @@ -515,7 +515,7 @@ static void receive_needs(void) add_object_array(object, NULL, &shallows); continue; } - if (!strncmp("deepen ", line, 7)) { + if (!prefixcmp(line, "deepen ")) { char *end; use_thin_pack = 0; depth = strtol(line + 7, &end, 0); @@ -523,7 +523,7 @@ static void receive_needs(void) die("Invalid deepen: %s", line); continue; } - if (strncmp("want ", line, 5) || + if (prefixcmp(line, "want ") || get_sha1_hex(line+5, sha1_buf)) die("git-upload-pack: protocol error, " "expected to get sha, not '%s'", line); @@ -656,7 +656,7 @@ int main(int argc, char **argv) strict = 1; continue; } - if (!strncmp(arg, "--timeout=", 10)) { + if (!prefixcmp(arg, "--timeout=")) { timeout = atoi(arg+10); continue; } diff --git a/wt-status.c b/wt-status.c index 2879c3d5ec..a25632bc87 100644 --- a/wt-status.c +++ b/wt-status.c @@ -191,12 +191,18 @@ static void wt_status_print_changed_cb(struct diff_queue_struct *q, wt_status_print_trailer(); } +static void wt_read_cache(struct wt_status *s) +{ + discard_cache(); + read_cache(); +} + void wt_status_print_initial(struct wt_status *s) { int i; char buf[PATH_MAX]; - read_cache(); + wt_read_cache(s); if (active_nr) { s->commitable = 1; wt_status_print_cached_header(NULL); @@ -220,6 +226,7 @@ static void wt_status_print_updated(struct wt_status *s) rev.diffopt.format_callback = wt_status_print_updated_cb; rev.diffopt.format_callback_data = s; rev.diffopt.detect_rename = 1; + wt_read_cache(s); run_diff_index(&rev, 1); } @@ -231,6 +238,7 @@ static void wt_status_print_changed(struct wt_status *s) rev.diffopt.output_format |= DIFF_FORMAT_CALLBACK; rev.diffopt.format_callback = wt_status_print_changed_cb; rev.diffopt.format_callback_data = s; + wt_read_cache(s); run_diff_files(&rev, 0); } @@ -287,6 +295,7 @@ static void wt_status_print_verbose(struct wt_status *s) setup_revisions(0, NULL, &rev, s->reference); rev.diffopt.output_format |= DIFF_FORMAT_PATCH; rev.diffopt.detect_rename = 1; + wt_read_cache(s); run_diff_index(&rev, 1); } @@ -298,7 +307,7 @@ void wt_status_print(struct wt_status *s) if (s->branch) { const char *on_what = "On branch "; const char *branch_name = s->branch; - if (!strncmp(branch_name, "refs/heads/", 11)) + if (!prefixcmp(branch_name, "refs/heads/")) branch_name += 11; else if (!strcmp(branch_name, "HEAD")) { branch_name = ""; @@ -316,7 +325,6 @@ void wt_status_print(struct wt_status *s) } else { wt_status_print_updated(s); - discard_cache(); } wt_status_print_changed(s); @@ -344,7 +352,7 @@ int git_status_config(const char *k, const char *v) wt_status_use_color = git_config_colorbool(k, v); return 0; } - if (!strncmp(k, "status.color.", 13) || !strncmp(k, "color.status.", 13)) { + if (!prefixcmp(k, "status.color.") || !prefixcmp(k, "color.status.")) { int slot = parse_status_slot(k, 13); color_parse(v, k, wt_status_colors[slot]); } diff --git a/xdiff/xdiff.h b/xdiff/xdiff.h index fa409d5234..e874a7c46a 100644 --- a/xdiff/xdiff.h +++ b/xdiff/xdiff.h @@ -31,7 +31,8 @@ extern "C" { #define XDF_NEED_MINIMAL (1 << 1) #define XDF_IGNORE_WHITESPACE (1 << 2) #define XDF_IGNORE_WHITESPACE_CHANGE (1 << 3) -#define XDF_WHITESPACE_FLAGS (XDF_IGNORE_WHITESPACE | XDF_IGNORE_WHITESPACE_CHANGE) +#define XDF_IGNORE_WHITESPACE_AT_EOL (1 << 4) +#define XDF_WHITESPACE_FLAGS (XDF_IGNORE_WHITESPACE | XDF_IGNORE_WHITESPACE_CHANGE | XDF_IGNORE_WHITESPACE_AT_EOL) #define XDL_PATCH_NORMAL '-' #define XDL_PATCH_REVERSE '+' diff --git a/xdiff/xutils.c b/xdiff/xutils.c index 1b899f32c4..3653864e4b 100644 --- a/xdiff/xutils.c +++ b/xdiff/xutils.c @@ -215,6 +215,21 @@ int xdl_recmatch(const char *l1, long s1, const char *l2, long s2, long flags) return 0; } return (i1 >= s1 && i2 >= s2); + } else if (flags & XDF_IGNORE_WHITESPACE_AT_EOL) { + for (i1 = i2 = 0; i1 < s1 && i2 < s2; ) { + if (l1[i1] != l2[i2]) { + while (i1 < s1 && isspace(l1[i1])) + i1++; + while (i2 < s2 && isspace(l2[i2])) + i2++; + if (i1 < s1 || i2 < s2) + return 0; + return 1; + } + i1++; + i2++; + } + return i1 >= s1 && i2 >= s2; } else return s1 == s2 && !memcmp(l1, l2, s1); @@ -227,6 +242,7 @@ unsigned long xdl_hash_record(char const **data, char const *top, long flags) { for (; ptr < top && *ptr != '\n'; ptr++) { if (isspace(*ptr) && (flags & XDF_WHITESPACE_FLAGS)) { + const char *ptr2 = ptr; while (ptr + 1 < top && isspace(ptr[1]) && ptr[1] != '\n') ptr++; @@ -235,6 +251,14 @@ unsigned long xdl_hash_record(char const **data, char const *top, long flags) { ha += (ha << 5); ha ^= (unsigned long) ' '; } + if (flags & XDF_IGNORE_WHITESPACE_AT_EOL + && ptr[1] != '\n') { + while (ptr2 != ptr + 1) { + ha += (ha << 5); + ha ^= (unsigned long) *ptr2; + ptr2++; + } + } continue; } ha += (ha << 5); |