git-merge
git-merge-base
git-merge-index
+git-merge-file
git-merge-tree
git-merge-octopus
git-merge-one-file
install: man
$(INSTALL) -d -m755 $(DESTDIR)$(man1dir) $(DESTDIR)$(man7dir)
- $(INSTALL) $(DOC_MAN1) $(DESTDIR)$(man1dir)
- $(INSTALL) $(DOC_MAN7) $(DESTDIR)$(man7dir)
+ $(INSTALL) -m644 $(DOC_MAN1) $(DESTDIR)$(man1dir)
+ $(INSTALL) -m644 $(DOC_MAN7) $(DESTDIR)$(man7dir)
#
<xsl:apply-templates/>
<xsl:text>.br </xsl:text>
</xsl:template>
+
+<!-- sorry, this is not about callouts, but attempts to work around
+ spurious .sp at the tail of the line docbook stylesheets seem to add -->
+<xsl:template match="simpara">
+ <xsl:variable name="content">
+ <xsl:apply-templates/>
+ </xsl:variable>
+ <xsl:value-of select="normalize-space($content)"/>
+ <xsl:if test="not(ancestor::authorblurb) and
+ not(ancestor::personblurb)">
+ <xsl:text> </xsl:text>
+ </xsl:if>
+</xsl:template>
+
</xsl:stylesheet>
branch.<name>.remote::
When in branch <name>, it tells `git fetch` which remote to fetch.
+ If this option is not given, `git fetch` defaults to remote "origin".
branch.<name>.merge::
- When in branch <name>, it tells `git fetch` the default remote branch
- to be merged.
-
-pager.color::
+ When in branch <name>, it tells `git fetch` the default refspec to
+ be marked for merging in FETCH_HEAD. The value has exactly to match
+ a remote part of one of the refspecs which are fetched from the remote
+ given by "branch.<name>.remote".
+ The merge information is used by `git pull` (which at first calls
+ `git fetch`) to lookup the default branch for merging. Without
+ this option, `git pull` defaults to merge the first refspec fetched.
+ Specify multiple values to get an octopus merge.
+
+color.pager::
A boolean to enable/disable colored output when the pager is in
use (default is true).
-diff.color::
+color.diff::
When true (or `always`), always use colors in patch.
When false (or `never`), never. When set to `auto`, use
colors only when the output is to the terminal.
-diff.color.<slot>::
+color.diff.<slot>::
Use customized color for diff colorization. `<slot>`
specifies which part of the patch to use the specified
color, and is one of `plain` (context text), `meta`
The default set of branches for gitlink:git-show-branch[1].
See gitlink:git-show-branch[1].
-status.color::
+color.status::
A boolean to enable/disable color in the output of
gitlink:git-status[1]. May be set to `true` (or `always`),
`false` (or `never`) or `auto`, in which case colors are used
only when the output is to a terminal. Defaults to false.
-status.color.<slot>::
+color.status.<slot>::
Use customized color for status colorization. `<slot>` is
one of `header` (the header text of the status message),
`updated` (files which are updated but not committed),
`changed` (files which are changed but not updated in the index),
or `untracked` (files which are not tracked by git). The values of
- these variables may be specified as in diff.color.<slot>.
+ these variables may be specified as in color.diff.<slot>.
tar.umask::
By default, gitlink:git-tar-tree[1] sets file and directories modes
git for CVS users
=================
-So you're a CVS user. That's OK, it's a treatable condition. The job of
-this document is to put you on the road to recovery, by helping you
-convert an existing cvs repository to git, and by showing you how to use a
-git repository in a cvs-like fashion.
+Git differs from CVS in that every working tree contains a repository with
+a full copy of the project history, and no repository is inherently more
+important than any other. However, you can emulate the CVS model by
+designating a single shared repository which people can synchronize with;
+this document explains how to do that.
Some basic familiarity with git is required. This
link:tutorial.html[tutorial introduction to git] should be sufficient.
-First, note some ways that git differs from CVS:
+Developing against a shared repository
+--------------------------------------
- * Commits are atomic and project-wide, not per-file as in CVS.
-
- * Offline work is supported: you can make multiple commits locally,
- then submit them when you're ready.
-
- * Branching is fast and easy.
-
- * Every working tree contains a repository with a full copy of the
- project history, and no repository is inherently more important than
- any other. However, you can emulate the CVS model by designating a
- single shared repository which people can synchronize with; see below
- for details.
-
- * Since every working tree contains a repository, a commit in your
- private repository will not publish your changes; it will only create
- a revision. You have to "push" your changes to a public repository to
- make them visible to others.
-
-Importing a CVS archive
------------------------
-
-First, install version 2.1 or higher of cvsps from
-link:http://www.cobite.com/cvsps/[http://www.cobite.com/cvsps/] and make
-sure it is in your path. The magic command line is then
-
--------------------------------------------
-$ git cvsimport -v -d <cvsroot> -C <destination> <module>
--------------------------------------------
-
-This puts a git archive of the named CVS module in the directory
-<destination>, which will be created if necessary. The -v option makes
-the conversion script very chatty.
-
-The import checks out from CVS every revision of every file. Reportedly
-cvsimport can average some twenty revisions per second, so for a
-medium-sized project this should not take more than a couple of minutes.
-Larger projects or remote repositories may take longer.
-
-The main trunk is stored in the git branch named `origin`, and additional
-CVS branches are stored in git branches with the same names. The most
-recent version of the main trunk is also left checked out on the `master`
-branch, so you can start adding your own changes right away.
-
-The import is incremental, so if you call it again next month it will
-fetch any CVS updates that have been made in the meantime. For this to
-work, you must not modify the imported branches; instead, create new
-branches for your own changes, and merge in the imported branches as
-necessary.
-
-Development Models
-------------------
-
-CVS users are accustomed to giving a group of developers commit access to
-a common repository. In the next section we'll explain how to do this
-with git. However, the distributed nature of git allows other development
-models, and you may want to first consider whether one of them might be a
-better fit for your project.
-
-For example, you can choose a single person to maintain the project's
-primary public repository. Other developers then clone this repository
-and each work in their own clone. When they have a series of changes that
-they're happy with, they ask the maintainer to pull from the branch
-containing the changes. The maintainer reviews their changes and pulls
-them into the primary repository, which other developers pull from as
-necessary to stay coordinated. The Linux kernel and other projects use
-variants of this model.
-
-With a small group, developers may just pull changes from each other's
-repositories without the need for a central maintainer.
-
-Creating a Shared Repository
-----------------------------
-
-Start with an ordinary git working directory containing the project, and
-remove the checked-out files, keeping just the bare .git directory:
-
-------------------------------------------------
-$ mv project/.git /pub/repo.git
-$ rm -r project/
-------------------------------------------------
-
-Next, give every team member read/write access to this repository. One
-easy way to do this is to give all the team members ssh access to the
-machine where the repository is hosted. If you don't want to give them a
-full shell on the machine, there is a restricted shell which only allows
-users to do git pushes and pulls; see gitlink:git-shell[1].
-
-Put all the committers in the same group, and make the repository
-writable by that group:
-
-------------------------------------------------
-$ chgrp -R $group repo.git
-$ find repo.git -mindepth 1 -type d |xargs chmod ug+rwx,g+s
-$ GIT_DIR=repo.git git repo-config core.sharedrepository true
-------------------------------------------------
-
-Make sure committers have a umask of at most 027, so that the directories
-they create are writable and searchable by other group members.
-
-Performing Development on a Shared Repository
----------------------------------------------
-
-Suppose a repository is now set up in /pub/repo.git on the host
+Suppose a shared repository is set up in /pub/repo.git on the host
foo.com. Then as an individual committer you can clone the shared
-repository:
+repository over ssh with:
------------------------------------------------
$ git clone foo.com:/pub/repo.git/ my-project
------------------------------------------------
which merges in any work that others might have done since the clone
-operation.
+operation. If there are uncommitted changes in your working tree, commit
+them first before running git pull.
[NOTE]
================================
`my-project/.git/remotes/origin` file, and that's why the previous step
and the next step both work.
------------
-URL: foo.com:/pub/project.git/ my-project
-Pull: master:origin
+URL: foo.com:/pub/project.git/
+Pull: refs/heads/master:refs/remotes/origin/master
------------
================================
-You can update the shared repository with your changes by first commiting
-your changes, and then using:
+You can update the shared repository with your changes by first committing
+your changes, and then using the gitlink:git-push[1] command:
------------------------------------------------
$ git push origin master
------------
$ git push origin
-$ git push repo.shared.xz:/pub/scm/project.git/
+$ git push foo.com:/pub/project.git/
------------
as long as the shared repository does not have any branches
other than `master`.
-[NOTE]
-============
-Because of this behavior, if the shared repository and the developer's
-repository both have branches named `origin`, then a push like the above
-attempts to update the `origin` branch in the shared repository from the
-developer's `origin` branch. The results may be unexpected, so it's
-usually best to remove any branch named `origin` from the shared
-repository.
-============
+Setting Up a Shared Repository
+------------------------------
+
+We assume you have already created a git repository for your project,
+possibly created from scratch or from a tarball (see the
+link:tutorial.html[tutorial]), or imported from an already existing CVS
+repository (see the next section).
+
+Assume your existing repo is at /home/alice/myproject. Create a new "bare"
+repository (a repository without a working tree) and fetch your project into
+it:
+
+------------------------------------------------
+$ mkdir /pub/my-repo.git
+$ cd /pub/my-repo.git
+$ git --bare init-db --shared
+$ git --bare fetch /home/alice/myproject master:master
+------------------------------------------------
+
+Next, give every team member read/write access to this repository. One
+easy way to do this is to give all the team members ssh access to the
+machine where the repository is hosted. If you don't want to give them a
+full shell on the machine, there is a restricted shell which only allows
+users to do git pushes and pulls; see gitlink:git-shell[1].
+
+Put all the committers in the same group, and make the repository
+writable by that group:
+
+------------------------------------------------
+$ chgrp -R $group /pub/my-repo.git
+------------------------------------------------
+
+Make sure committers have a umask of at most 027, so that the directories
+they create are writable and searchable by other group members.
+
+Importing a CVS archive
+-----------------------
+
+First, install version 2.1 or higher of cvsps from
+link:http://www.cobite.com/cvsps/[http://www.cobite.com/cvsps/] and make
+sure it is in your path. Then cd to a checked out CVS working directory
+of the project you are interested in and run gitlink:git-cvsimport[1]:
+
+-------------------------------------------
+$ git cvsimport -C <destination>
+-------------------------------------------
+
+This puts a git archive of the named CVS module in the directory
+<destination>, which will be created if necessary.
+
+The import checks out from CVS every revision of every file. Reportedly
+cvsimport can average some twenty revisions per second, so for a
+medium-sized project this should not take more than a couple of minutes.
+Larger projects or remote repositories may take longer.
+
+The main trunk is stored in the git branch named `origin`, and additional
+CVS branches are stored in git branches with the same names. The most
+recent version of the main trunk is also left checked out on the `master`
+branch, so you can start adding your own changes right away.
+
+The import is incremental, so if you call it again next month it will
+fetch any CVS updates that have been made in the meantime. For this to
+work, you must not modify the imported branches; instead, create new
+branches for your own changes, and merge in the imported branches as
+necessary.
Advanced Shared Repository Management
-------------------------------------
link:howto/update-hook-example.txt[Controlling access to branches using
update hooks].
-CVS annotate
-------------
+Providing CVS Access to a git Repository
+----------------------------------------
+
+It is also possible to provide true CVS access to a git repository, so
+that developers can still use CVS; see gitlink:git-cvsserver[1] for
+details.
+
+Alternative Development Models
+------------------------------
+
+CVS users are accustomed to giving a group of developers commit access to
+a common repository. As we've seen, this is also possible with git.
+However, the distributed nature of git allows other development models,
+and you may want to first consider whether one of them might be a better
+fit for your project.
+
+For example, you can choose a single person to maintain the project's
+primary public repository. Other developers then clone this repository
+and each work in their own clone. When they have a series of changes that
+they're happy with, they ask the maintainer to pull from the branch
+containing the changes. The maintainer reviews their changes and pulls
+them into the primary repository, which other developers pull from as
+necessary to stay coordinated. The Linux kernel and other projects use
+variants of this model.
-So, something has gone wrong, and you don't know whom to blame, and
-you're an ex-CVS user and used to do "cvs annotate" to see who caused
-the breakage. You're looking for the "git annotate", and it's just
-claiming not to find such a script. You're annoyed.
-
-Yes, that's right. Core git doesn't do "annotate", although it's
-technically possible, and there are at least two specialized scripts out
-there that can be used to get equivalent information (see the git
-mailing list archives for details).
-
-git has a couple of alternatives, though, that you may find sufficient
-or even superior depending on your use. One is called "git-whatchanged"
-(for obvious reasons) and the other one is called "pickaxe" ("a tool for
-the software archaeologist").
-
-The "git-whatchanged" script is a truly trivial script that can give you
-a good overview of what has changed in a file or a directory (or an
-arbitrary list of files or directories). The "pickaxe" support is an
-additional layer that can be used to further specify exactly what you're
-looking for, if you already know the specific area that changed.
-
-Let's step back a bit and think about the reason why you would
-want to do "cvs annotate a-file.c" to begin with.
-
-You would use "cvs annotate" on a file when you have trouble
-with a function (or even a single "if" statement in a function)
-that happens to be defined in the file, which does not do what
-you want it to do. And you would want to find out why it was
-written that way, because you are about to modify it to suit
-your needs, and at the same time you do not want to break its
-current callers. For that, you are trying to find out why the
-original author did things that way in the original context.
-
-Many times, it may be enough to see the commit log messages of
-commits that touch the file in question, possibly along with the
-patches themselves, like this:
-
- $ git-whatchanged -p a-file.c
-
-This will show log messages and patches for each commit that
-touches a-file.
-
-This, however, may not be very useful when this file has many
-modifications that are not related to the piece of code you are
-interested in. You would see many log messages and patches that
-do not have anything to do with the piece of code you are
-interested in. As an example, assuming that you have this piece
-of code that you are interested in in the HEAD version:
-
- if (frotz) {
- nitfol();
- }
-
-you would use git-rev-list and git-diff-tree like this:
-
- $ git-rev-list HEAD |
- git-diff-tree --stdin -v -p -S'if (frotz) {
- nitfol();
- }'
-
-We have already talked about the "\--stdin" form of git-diff-tree
-command that reads the list of commits and compares each commit
-with its parents (otherwise you should go back and read the tutorial).
-The git-whatchanged command internally runs
-the equivalent of the above command, and can be used like this:
-
- $ git-whatchanged -p -S'if (frotz) {
- nitfol();
- }'
-
-When the -S option is used, git-diff-tree command outputs
-differences between two commits only if one tree has the
-specified string in a file and the corresponding file in the
-other tree does not. The above example looks for a commit that
-has the "if" statement in it in a file, but its parent commit
-does not have it in the same shape in the corresponding file (or
-the other way around, where the parent has it and the commit
-does not), and the differences between them are shown, along
-with the commit message (thanks to the -v flag). It does not
-show anything for commits that do not touch this "if" statement.
-
-Also, in the original context, the same statement might have
-appeared at first in a different file and later the file was
-renamed to "a-file.c". CVS annotate would not help you to go
-back across such a rename, but git would still help you in such
-a situation. For that, you can give the -C flag to
-git-diff-tree, like this:
-
- $ git-whatchanged -p -C -S'if (frotz) {
- nitfol();
- }'
-
-When the -C flag is used, file renames and copies are followed.
-So if the "if" statement in question happens to be in "a-file.c"
-in the current HEAD commit, even if the file was originally
-called "o-file.c" and then renamed in an earlier commit, or if
-the file was created by copying an existing "o-file.c" in an
-earlier commit, you will not lose track. If the "if" statement
-did not change across such a rename or copy, then the commit that
-does rename or copy would not show in the output, and if the
-"if" statement was modified while the file was still called
-"o-file.c", it would find the commit that changed the statement
-when it was in "o-file.c".
-
-NOTE: The current version of "git-diff-tree -C" is not eager
- enough to find copies, and it will miss the fact that a-file.c
- was created by copying o-file.c unless o-file.c was somehow
- changed in the same commit.
-
-You can use the --pickaxe-all flag in addition to the -S flag.
-This causes the differences from all the files contained in
-those two commits, not just the differences between the files
-that contain this changed "if" statement:
-
- $ git-whatchanged -p -C -S'if (frotz) {
- nitfol();
- }' --pickaxe-all
-
-NOTE: This option is called "--pickaxe-all" because -S
- option is internally called "pickaxe", a tool for software
- archaeologists.
+With a small group, developers may just pull changes from each other's
+repositories without the need for a central maintainer.
DESCRIPTION
-----------
-Clones a repository into a newly created directory. All remote
-branch heads are copied under `$GIT_DIR/refs/heads/`, except
-that the remote `master` is also copied to `origin` branch.
-In addition, `$GIT_DIR/remotes/origin` file is set up to have
-this line:
+Clones a repository into a newly created directory, creates
+remote-tracking branches for each branch in the cloned repository
+(visible using `git branch -r`), and creates and checks out a master
+branch equal to the cloned repository's master branch.
- Pull: master:origin
-
-This is to help the typical workflow of working off of the
-remote `master` branch. Every time `git pull` without argument
-is run, the progress on the remote `master` branch is tracked by
-copying it into the local `origin` branch, and merged into the
-branch you are currently working on. Remote branches other than
-`master` are also added there to be tracked.
+After the clone, a plain `git fetch` without arguments will update
+all the remote-tracking branches, and a `git pull` without
+arguments will in addition merge the remote master branch into the
+current branch.
+This default configuration is achieved by creating references to
+the remote branch heads under `$GIT_DIR/refs/remotes/origin` and
+by initializing `remote.origin.url` and `remote.origin.fetch`
+configuration variables.
OPTIONS
-------
processes them in turn only stopping if merge returns a non-zero exit
code.
-Typically this is run with the a script calling the merge command from
-the RCS package.
+Typically this is run with the a script calling git's imitation of
+the merge command from the RCS package.
A sample script called "git-merge-one-file" is included in the
distribution.
refs that exist both on the local side and on the remote
side are updated.
+
-Some short-cut notations are also supported.
+`tag <tag>` means the same as `refs/tags/<tag>:refs/tags/<tag>`.
+
-* `tag <tag>` means the same as `refs/tags/<tag>:refs/tags/<tag>`.
-* A parameter <ref> without a colon is equivalent to
- <ref>`:`<ref>, hence updates <ref> in the destination from <ref>
- in the source.
+A parameter <ref> without a colon is equivalent to
+<ref>`:`<ref>, hence updates <ref> in the destination from <ref>
+in the source.
++
+Pushing an empty <src> allows you to delete the <dst> ref from
+the remote repository.
\--all::
Instead of naming each ref to push, specifies that all
Author
------
-Written by Junio C Hamano <junkio@cox.net>
+Written by Junio C Hamano <junkio@cox.net>, later rewritten in C
+by Linus Torvalds <torvalds@osdl.org>
Documentation
--------------
manually joining branches on commit.
'dcommit'::
- Commit all diffs from the current HEAD directly to the SVN
+ Commit all diffs from a specified head directly to the SVN
repository, and then rebase or reset (depending on whether or
- not there is a diff between SVN and HEAD). It is recommended
+ not there is a diff between SVN and head). It is recommended
that you run git-svn fetch and rebase (not pull) your commits
against the latest changes in the SVN repository.
+ An optional command-line argument may be specified as an
+ alternative to HEAD.
This is advantageous over 'commit' (below) because it produces
cleaner, more linear history.
do that even if it wasn't for git. There's no point in living
in the dark ages any more.
- - "merge", the standard UNIX three-way merge program. It usually
- comes with the "rcs" package on most Linux distributions, so if
- you have a developer install you probably have it already, but a
- "graphical user desktop" install might have left it out.
-
- You'll only need the merge program if you do development using
- git, and if you only use git to track other peoples work you'll
- never notice the lack of it.
-
- "wish", the Tcl/Tk windowing shell is used in gitk to show the
history graphically
builtin-ls-tree.o \
builtin-mailinfo.o \
builtin-mailsplit.o \
+ builtin-merge-file.o \
builtin-mv.o \
builtin-name-rev.o \
builtin-pack-objects.o \
NEEDS_SSL_WITH_CRYPTO = YesPlease
NEEDS_LIBICONV = YesPlease
NO_STRLCPY = YesPlease
- ifndef NO_FINK
- ifeq ($(shell test -d /sw/lib && echo y),y)
- BASIC_CFLAGS += -I/sw/include
- BASIC_LDFLAGS += -L/sw/lib
- endif
- endif
- ifndef NO_DARWIN_PORTS
- ifeq ($(shell test -d /opt/local/lib && echo y),y)
- BASIC_CFLAGS += -I/opt/local/include
- BASIC_LDFLAGS += -L/opt/local/lib
- endif
- endif
endif
ifeq ($(uname_S),SunOS)
NEEDS_SOCKET = YesPlease
-include config.mak.autogen
-include config.mak
+ifeq ($(uname_S),Darwin)
+ ifndef NO_FINK
+ ifeq ($(shell test -d /sw/lib && echo y),y)
+ BASIC_CFLAGS += -I/sw/include
+ BASIC_LDFLAGS += -L/sw/lib
+ endif
+ endif
+ ifndef NO_DARWIN_PORTS
+ ifeq ($(shell test -d /opt/local/lib && echo y),y)
+ BASIC_CFLAGS += -I/opt/local/include
+ BASIC_LDFLAGS += -L/opt/local/lib
+ endif
+ endif
+endif
+
ifndef NO_CURL
ifdef CURLDIR
# This is still problematic -- gcc does not always want -R.
$(LIB_FILE): $(LIB_OBJS)
rm -f $@ && $(AR) rcs $@ $(LIB_OBJS)
-XDIFF_OBJS=xdiff/xdiffi.o xdiff/xprepare.o xdiff/xutils.o xdiff/xemit.o
+XDIFF_OBJS=xdiff/xdiffi.o xdiff/xprepare.o xdiff/xutils.o xdiff/xemit.o \
+ xdiff/xmerge.o
$(XDIFF_OBJS): xdiff/xinclude.h xdiff/xmacros.h xdiff/xdiff.h xdiff/xtypes.h \
xdiff/xutils.h xdiff/xprepare.h xdiff/xdiffi.h xdiff/xemit.h
* Based on git-branch.sh by Junio C Hamano.
*/
+#include "color.h"
#include "cache.h"
#include "refs.h"
#include "commit.h"
static const char *head;
static unsigned char head_sha1[20];
+static int branch_use_color;
+static char branch_colors[][COLOR_MAXLEN] = {
+ "\033[m", /* reset */
+ "", /* PLAIN (normal) */
+ "\033[31m", /* REMOTE (red) */
+ "", /* LOCAL (normal) */
+ "\033[32m", /* CURRENT (green) */
+};
+enum color_branch {
+ COLOR_BRANCH_RESET = 0,
+ COLOR_BRANCH_PLAIN = 1,
+ COLOR_BRANCH_REMOTE = 2,
+ COLOR_BRANCH_LOCAL = 3,
+ COLOR_BRANCH_CURRENT = 4,
+};
+
+static int parse_branch_color_slot(const char *var, int ofs)
+{
+ if (!strcasecmp(var+ofs, "plain"))
+ return COLOR_BRANCH_PLAIN;
+ if (!strcasecmp(var+ofs, "reset"))
+ return COLOR_BRANCH_RESET;
+ if (!strcasecmp(var+ofs, "remote"))
+ return COLOR_BRANCH_REMOTE;
+ if (!strcasecmp(var+ofs, "local"))
+ return COLOR_BRANCH_LOCAL;
+ if (!strcasecmp(var+ofs, "current"))
+ return COLOR_BRANCH_CURRENT;
+ die("bad config variable '%s'", var);
+}
+
+int git_branch_config(const char *var, const char *value)
+{
+ if (!strcmp(var, "color.branch")) {
+ branch_use_color = git_config_colorbool(var, value);
+ return 0;
+ }
+ if (!strncmp(var, "color.branch.", 13)) {
+ int slot = parse_branch_color_slot(var, 13);
+ color_parse(value, var, branch_colors[slot]);
+ return 0;
+ }
+ return git_default_config(var, value);
+}
+
+const char *branch_get_color(enum color_branch ix)
+{
+ if (branch_use_color)
+ return branch_colors[ix];
+ return "";
+}
+
static int in_merge_bases(const unsigned char *sha1,
struct commit *rev1,
struct commit *rev2)
int i;
char c;
struct ref_list ref_list;
+ int color;
memset(&ref_list, 0, sizeof(ref_list));
ref_list.kinds = kinds;
qsort(ref_list.list, ref_list.index, sizeof(struct ref_item), ref_cmp);
for (i = 0; i < ref_list.index; i++) {
+ switch( ref_list.list[i].kind ) {
+ case REF_LOCAL_BRANCH:
+ color = COLOR_BRANCH_LOCAL;
+ break;
+ case REF_REMOTE_BRANCH:
+ color = COLOR_BRANCH_REMOTE;
+ break;
+ default:
+ color = COLOR_BRANCH_PLAIN;
+ break;
+ }
+
c = ' ';
if (ref_list.list[i].kind == REF_LOCAL_BRANCH &&
- !strcmp(ref_list.list[i].name, head))
+ !strcmp(ref_list.list[i].name, head)) {
c = '*';
+ color = COLOR_BRANCH_CURRENT;
+ }
if (verbose) {
- printf("%c %-*s", c, ref_list.maxwidth,
- ref_list.list[i].name);
+ printf("%c %s%-*s%s", c,
+ branch_get_color(color),
+ ref_list.maxwidth,
+ ref_list.list[i].name,
+ branch_get_color(COLOR_BRANCH_RESET));
print_ref_info(ref_list.list[i].sha1, abbrev);
}
else
- printf("%c %s\n", c, ref_list.list[i].name);
+ printf("%c %s%s%s\n", c,
+ branch_get_color(color),
+ ref_list.list[i].name,
+ branch_get_color(COLOR_BRANCH_RESET));
}
free_ref_list(&ref_list);
int kinds = REF_LOCAL_BRANCH;
int i;
- git_config(git_default_config);
+ git_config(git_branch_config);
for (i = 1; i < argc; i++) {
const char *arg = argv[i];
verbose = 1;
continue;
}
+ if (!strcmp(arg, "--color")) {
+ branch_use_color = 1;
+ continue;
+ }
+ if (!strcmp(arg, "--no-color")) {
+ branch_use_color = 0;
+ continue;
+ }
usage(builtin_branch_usage);
}
strcat(extra_headers, value);
return 0;
}
- if (!strcmp(var, "diff.color")) {
+ if (!strcmp(var, "diff.color") || !strcmp(var, "color.diff")) {
return 0;
}
return git_log_config(var, value);
--- /dev/null
+#include "cache.h"
+#include "xdiff/xdiff.h"
+
+static const char merge_file_usage[] =
+"git merge-file [-p | --stdout] [-q | --quiet] [-L name1 [-L orig [-L name2]]] file1 orig_file file2";
+
+static int read_file(mmfile_t *ptr, const char *filename)
+{
+ struct stat st;
+ FILE *f;
+
+ if (stat(filename, &st))
+ return error("Could not stat %s", filename);
+ if ((f = fopen(filename, "rb")) == NULL)
+ return error("Could not open %s", filename);
+ ptr->ptr = xmalloc(st.st_size);
+ if (fread(ptr->ptr, st.st_size, 1, f) != 1)
+ return error("Could not read %s", filename);
+ fclose(f);
+ ptr->size = st.st_size;
+ return 0;
+}
+
+int cmd_merge_file(int argc, char **argv, char **envp)
+{
+ char *names[3];
+ mmfile_t mmfs[3];
+ mmbuffer_t result = {NULL, 0};
+ xpparam_t xpp = {XDF_NEED_MINIMAL};
+ int ret = 0, i = 0, to_stdout = 0;
+
+ while (argc > 4) {
+ if (!strcmp(argv[1], "-L") && i < 3) {
+ names[i++] = argv[2];
+ argc--;
+ argv++;
+ } else if (!strcmp(argv[1], "-p") ||
+ !strcmp(argv[1], "--stdout"))
+ to_stdout = 1;
+ else if (!strcmp(argv[1], "-q") ||
+ !strcmp(argv[1], "--quiet"))
+ freopen("/dev/null", "w", stderr);
+ else
+ usage(merge_file_usage);
+ argc--;
+ argv++;
+ }
+
+ if (argc != 4)
+ usage(merge_file_usage);
+
+ for (; i < 3; i++)
+ names[i] = argv[i + 1];
+
+ for (i = 0; i < 3; i++)
+ if (read_file(mmfs + i, argv[i + 1]))
+ return -1;
+
+ ret = xdl_merge(mmfs + 1, mmfs + 0, names[0], mmfs + 2, names[2],
+ &xpp, XDL_MERGE_ZEALOUS, &result);
+
+ for (i = 0; i < 3; i++)
+ free(mmfs[i].ptr);
+
+ if (ret >= 0) {
+ char *filename = argv[1];
+ FILE *f = to_stdout ? stdout : fopen(filename, "wb");
+
+ if (!f)
+ ret = error("Could not open %s for writing", filename);
+ else if (fwrite(result.ptr, result.size, 1, f) != 1)
+ ret = error("Could not write to %s", filename);
+ else if (fclose(f))
+ ret = error("Could not close %s", filename);
+ free(result.ptr);
+ }
+
+ return ret;
+}
static void set_refspecs(const char **refs, int nr)
{
if (nr) {
- size_t bytes = nr * sizeof(char *);
-
- refspec = xrealloc(refspec, bytes);
- memcpy(refspec, refs, bytes);
- refspec_nr = nr;
+ int pass;
+ for (pass = 0; pass < 2; pass++) {
+ /* pass 0 counts and allocates, pass 1 fills */
+ int i, cnt;
+ for (i = cnt = 0; i < nr; i++) {
+ if (!strcmp("tag", refs[i])) {
+ int len;
+ char *tag;
+ if (nr <= ++i)
+ die("tag <tag> shorthand without <tag>");
+ if (pass) {
+ len = strlen(refs[i]) + 11;
+ tag = xmalloc(len);
+ strcpy(tag, "refs/tags/");
+ strcat(tag, refs[i]);
+ refspec[cnt] = tag;
+ }
+ cnt++;
+ continue;
+ }
+ if (pass)
+ refspec[cnt] = refs[i];
+ cnt++;
+ }
+ if (!pass) {
+ size_t bytes = cnt * sizeof(char *);
+ refspec_nr = cnt;
+ refspec = xrealloc(refspec, bytes);
+ }
+ }
}
expand_refspecs();
}
bob = buffer + strlen(buffer);
else {
offset = 8;
- while (isspace(bob[-1]))
+ while (buffer + offset < bob &&
+ isspace(bob[-1]))
bob--;
}
while (fgets(buffer2, sizeof(buffer2), stdin) &&
buffer2[0] != '\n')
; /* chomp input */
- if (fgets(buffer2, sizeof(buffer2), stdin))
+ if (fgets(buffer2, sizeof(buffer2), stdin)) {
+ int l2 = strlen(buffer2);
+ int i;
+ for (i = 0; i < l2; i++)
+ if (!isspace(buffer2[i]))
+ break;
insert_author_oneline(list,
buffer + offset,
bob - buffer - offset,
- buffer2, strlen(buffer2));
+ buffer2 + i, l2 - i);
+ }
}
}
}
author = scratch;
authorlen = strlen(scratch);
} else {
- while (bracket[-1] == ' ')
+ if (bracket[-1] == ' ')
bracket--;
author = buffer + 7;
extern int cmd_ls_tree(int argc, const char **argv, const char *prefix);
extern int cmd_mailinfo(int argc, const char **argv, const char *prefix);
extern int cmd_mailsplit(int argc, const char **argv, const char *prefix);
+extern int cmd_merge_file(int argc, const char **argv, const char *prefix);
extern int cmd_mv(int argc, const char **argv, const char *prefix);
extern int cmd_name_rev(int argc, const char **argv, const char *prefix);
extern int cmd_pack_objects(int argc, const char **argv, const char *prefix);
return 0;
}
- if (!strcmp(var, "pager.color")) {
+ if (!strcmp(var, "pager.color") || !strcmp(var, "color.pager")) {
pager_use_color = git_config_bool(var,value);
return 0;
}
core.legacyHeaders
i18n.commitEncoding
diff.color
+ color.diff
diff.renameLimit
diff.renames
pager.color
+ color.pager
status.color
+ color.status
log.showroot
show.difftree
showbranch.default
diff_rename_limit_default = git_config_int(var, value);
return 0;
}
- if (!strcmp(var, "diff.color")) {
+ if (!strcmp(var, "diff.color") || !strcmp(var, "color.diff")) {
diff_use_color_default = git_config_colorbool(var, value);
return 0;
}
diff_detect_rename_default = DIFF_DETECT_RENAME;
return 0;
}
- if (!strncmp(var, "diff.color.", 11)) {
+ if (!strncmp(var, "diff.color.", 11) || !strncmp(var, "color.diff.", 11)) {
int slot = parse_diff_color_slot(var, 11);
color_parse(value, var, diff_colors[slot]);
return 0;
for (i = 0; i < data->nr; i++) {
struct diffstat_file *file = data->files[i];
- printf("%d\t%d\t", file->added, file->deleted);
+ if (file->is_binary)
+ printf("-\t-\t");
+ else
+ printf("%d\t%d\t", file->added, file->deleted);
if (options->line_termination &&
quote_c_style(file->name, NULL, NULL, 0))
quote_c_style(file->name, NULL, stdout, 0);
fprintf(stderr, fmt, hex);
}
-static void report_missing(const char *what, const unsigned char *missing)
+static void report_missing(const struct object *obj)
{
char missing_hex[41];
-
- strcpy(missing_hex, sha1_to_hex(missing));;
- fprintf(stderr,
- "Cannot obtain needed %s %s\nwhile processing commit %s.\n",
- what, missing_hex, sha1_to_hex(current_commit_sha1));
+ strcpy(missing_hex, sha1_to_hex(obj->sha1));;
+ fprintf(stderr, "Cannot obtain needed %s %s\n",
+ obj->type ? typename(obj->type): "object", missing_hex);
+ if (!is_null_sha1(current_commit_sha1))
+ fprintf(stderr, "while processing commit %s.\n",
+ sha1_to_hex(current_commit_sha1));
}
static int process(struct object *obj);
*/
if (! (obj->flags & TO_SCAN)) {
if (fetch(obj->sha1)) {
- report_missing(typename(obj->type), obj->sha1);
+ report_missing(obj);
return -1;
}
}
# Known limitations:
# - does not propagate permissions
-# - tells "ready for commit" even when things could not be completed
-# (not sure this is true anymore, more testing is needed)
-# - does not handle whitespace in pathnames at all.
+# - error handling has not been extensively tested
+#
use strict;
use Getopt::Std;
}
close MSG;
-my (@afiles, @dfiles, @mfiles, @dirs);
-my %amodes;
-my @files = safe_pipe_capture('git-diff-tree', '-r', $parent, $commit);
-#print @files;
-$? && die "Error in git-diff-tree";
-foreach my $f (@files) {
- chomp $f;
- my @fields = split(m!\s+!, $f);
- if ($fields[4] eq 'A') {
- my $path = $fields[5];
- $amodes{$path} = $fields[1];
- push @afiles, $path;
- # add any needed parent directories
- $path = dirname $path;
- while (!-d $path and ! grep { $_ eq $path } @dirs) {
- unshift @dirs, $path;
- $path = dirname $path;
- }
- }
- if ($fields[4] eq 'M') {
- push @mfiles, $fields[5];
- }
- if ($fields[4] eq 'D') {
- push @dfiles, $fields[5];
- }
+`git-diff-tree --binary -p $parent $commit >.cvsexportcommit.diff`;# || die "Cannot diff";
+
+## apply non-binary changes
+my $fuzz = $opt_p ? 0 : 2;
+
+print "Checking if patch will apply\n";
+
+my @stat;
+open APPLY, "GIT_DIR= git-apply -C$fuzz --binary --summary --numstat<.cvsexportcommit.diff|" || die "cannot patch";
+@stat=<APPLY>;
+close APPLY || die "Cannot patch";
+my (@bfiles,@files,@afiles,@dfiles);
+chomp @stat;
+foreach (@stat) {
+ push (@bfiles,$1) if m/^-\t-\t(.*)$/;
+ push (@files, $1) if m/^-\t-\t(.*)$/;
+ push (@files, $1) if m/^\d+\t\d+\t(.*)$/;
+ push (@afiles,$1) if m/^ create mode [0-7]+ (.*)$/;
+ push (@dfiles,$1) if m/^ delete mode [0-7]+ (.*)$/;
}
-my (@binfiles, @abfiles, @dbfiles, @bfiles, @mbfiles);
-@binfiles = grep m/^Binary files/, safe_pipe_capture('git-diff-tree', '-p', $parent, $commit);
-map { chomp } @binfiles;
-@abfiles = grep s/^Binary files \/dev\/null and b\/(.*) differ$/$1/, @binfiles;
-@dbfiles = grep s/^Binary files a\/(.*) and \/dev\/null differ$/$1/, @binfiles;
-@mbfiles = grep s/^Binary files a\/(.*) and b\/(.*) differ$/$1/, @binfiles;
-push @bfiles, @abfiles;
-push @bfiles, @dbfiles;
-push @bfiles, @mbfiles;
-push @mfiles, @mbfiles;
-
-$opt_v && print "The commit affects:\n ";
-$opt_v && print join ("\n ", @afiles,@mfiles,@dfiles) . "\n\n";
-undef @files; # don't need it anymore
+map { s/^"(.*)"$/$1/g } @bfiles,@files;
+map { s/\\([0-7]{3})/sprintf('%c',oct $1)/eg } @bfiles,@files;
# check that the files are clean and up to date according to cvs
my $dirty;
+my @dirs;
+foreach my $p (@afiles) {
+ my $path = dirname $p;
+ while (!-d $path and ! grep { $_ eq $path } @dirs) {
+ unshift @dirs, $path;
+ $path = dirname $path;
+ }
+}
+
foreach my $d (@dirs) {
if (-e $d) {
$dirty = 1;
}
}
-foreach my $f (@mfiles, @dfiles) {
+foreach my $f (@files) {
+ next if grep { $_ eq $f } @afiles;
# TODO:we need to handle removed in cvs
my @status = grep(m/^File/, safe_pipe_capture('cvs', '-q', 'status' ,$f));
if (@status > 1) { warn 'Strange! cvs status returned more than one line?'};
}
}
-###
-### NOTE: if you are planning to die() past this point
-### you MUST call cleanupcvs(@files) before die()
-###
+print "Applying\n";
+`GIT_DIR= git-apply -C$fuzz --binary --summary --numstat --apply <.cvsexportcommit.diff` || die "cannot patch";
-
-print "Creating new directories\n";
+print "Patch applied successfully. Adding new files and directories to CVS\n";
+my $dirtypatch = 0;
foreach my $d (@dirs) {
- unless (mkdir $d) {
- warn "Could not mkdir $d: $!";
- $dirty = 1;
- }
- `cvs add $d`;
- if ($?) {
- $dirty = 1;
+ if (system('cvs','add',$d)) {
+ $dirtypatch = 1;
warn "Failed to cvs add directory $d -- you may need to do it manually";
}
}
-print "'Patching' binary files\n";
-
-foreach my $f (@bfiles) {
- # check that the file in cvs matches the "old" file
- # extract the file to $tmpdir and compare with cmp
- if (not(grep { $_ eq $f } @afiles)) {
- my $tree = safe_pipe_capture('git-rev-parse', "$parent^{tree}");
- chomp $tree;
- my $blob = `git-ls-tree $tree "$f" | cut -f 1 | cut -d ' ' -f 3`;
- chomp $blob;
- `git-cat-file blob $blob > $tmpdir/blob`;
- if (system('cmp', '-s', $f, "$tmpdir/blob")) {
- warn "Binary file $f in CVS does not match parent.\n";
- if (not $opt_f) {
- $dirty = 1;
- next;
- }
- }
- }
- if (not(grep { $_ eq $f } @dfiles)) {
- my $tree = safe_pipe_capture('git-rev-parse', "$commit^{tree}");
- chomp $tree;
- my $blob = `git-ls-tree $tree "$f" | cut -f 1 | cut -d ' ' -f 3`;
- chomp $blob;
- # replace with the new file
- `git-cat-file blob $blob > $f`;
- }
-
- # TODO: something smart with file modes
-
-}
-if ($dirty) {
- cleanupcvs(@files);
- die "Exiting: Binary files in CVS do not match parent";
-}
-
-## apply non-binary changes
-my $fuzz = $opt_p ? 0 : 2;
-
-print "Patching non-binary files\n";
-
-if (scalar(@afiles)+scalar(@dfiles)+scalar(@mfiles) != scalar(@bfiles)) {
- print `(git-diff-tree -p $parent -p $commit | patch -p1 -F $fuzz ) 2>&1`;
-}
-
-my $dirtypatch = 0;
-if (($? >> 8) == 2) {
- cleanupcvs(@files);
- die "Exiting: Patch reported serious trouble -- you will have to apply this patch manually";
-} elsif (($? >> 8) == 1) { # some hunks failed to apply
- $dirtypatch = 1;
-}
-
foreach my $f (@afiles) {
- set_new_file_permissions($f, $amodes{$f});
if (grep { $_ eq $f } @bfiles) {
system('cvs', 'add','-kb',$f);
} else {
system('cvs', 'add', $f);
}
if ($?) {
- $dirty = 1;
+ $dirtypatch = 1;
warn "Failed to cvs add $f -- you may need to do it manually";
}
}
foreach my $f (@dfiles) {
system('cvs', 'rm', '-f', $f);
if ($?) {
- $dirty = 1;
+ $dirtypatch = 1;
warn "Failed to cvs rm -f $f -- you may need to do it manually";
}
}
print "Commit to CVS\n";
-print "Patch: $title\n";
-my $commitfiles = join(' ', @afiles, @mfiles, @dfiles);
-my $cmd = "cvs commit -F .msg $commitfiles";
+print "Patch title (first comment line): $title\n";
+my @commitfiles = map { unless (m/\s/) { '\''.$_.'\''; } else { $_; }; } (@files);
+my $cmd = "cvs commit -F .msg @commitfiles";
if ($dirtypatch) {
print "NOTE: One or more hunks failed to apply cleanly.\n";
- print "Resolve the conflicts and then commit using:\n";
+ print "You'll need to apply the patch in .cvsexportcommit.diff manually\n";
+ print "using a patch program. After applying the patch and resolving the\n";
+ print "problems you may commit using:";
print "\n $cmd\n\n";
exit(1);
}
-
if ($opt_c) {
print "Autocommit\n $cmd\n";
- print safe_pipe_capture('cvs', 'commit', '-F', '.msg', @afiles, @mfiles, @dfiles);
+ print safe_pipe_capture('cvs', 'commit', '-F', '.msg', @files);
if ($?) {
- cleanupcvs(@files);
die "Exiting: The commit did not succeed";
}
print "Committed successfully to CVS\n";
} else {
print "Ready for you to commit, just run:\n\n $cmd\n";
}
+
+# clean up
+unlink(".cvsexportcommit.diff");
+unlink(".msg");
+
sub usage {
print STDERR <<END;
Usage: GIT_DIR=/path/to/.git ${\basename $0} [-h] [-p] [-v] [-c] [-f] [-m msgprefix] [ parent ] commit
exit(1);
}
-# ensure cvs is clean before we die
-sub cleanupcvs {
- my @files = @_;
- foreach my $f (@files) {
- system('cvs', '-q', 'update', '-C', $f);
- if ($?) {
- warn "Warning! Failed to cleanup state of $f\n";
- }
- }
-}
-
# An alternative to `command` that allows input to be passed as an array
# to work around shell problems with weird characters in arguments
# if the exec returns non-zero we die
return wantarray ? @output : join('',@output);
}
-# For any file we want to add to cvs, we must first set its permissions
-# properly, *before* the "cvs add ..." command. Otherwise, it is impossible
-# to change the permission of the file in the CVS repository using only cvs
-# commands. This should be fixed in cvs-1.12.14.
-sub set_new_file_permissions {
- my ($file, $perm) = @_;
- chmod oct($perm), $file
- or die "failed to set permissions of \"$file\": $!\n";
+sub safe_pipe_capture_blob {
+ my $output;
+ if (my $pid = open my $child, '-|') {
+ local $/;
+ undef $/;
+ $output = (<$child>);
+ close $child or die join(' ',@_).": $! $?";
+ } else {
+ exec(@_) or die "$! $?"; # exec() can fail the executable can't be found
+ }
+ return $output;
}
use strict;
use warnings;
+use bytes;
use Fcntl;
use File::Temp qw/tempdir tempfile/;
$log->debug("Temporary directory for merge is $dir");
- my $return = system("merge", $file_local, $file_old, $file_new);
+ my $return = system("git merge-file", $file_local, $file_old, $file_new);
$return >>= 8;
if ( $return == 0 )
case "$best_strategy" in
'')
restorestate
- echo >&2 "No merge strategy handled the merge."
+ case "$use_strategies" in
+ ?*' '?*)
+ echo >&2 "No merge strategy handled the merge."
+ ;;
+ *)
+ echo >&2 "Merge with strategy $use_strategies failed."
+ ;;
+ esac
exit 2
;;
"$wt_strategy")
# or the first one otherwise; add prefix . to the rest
# to prevent the secondary branches to be merged by default.
merge_branches=
+ found_mergeref=
+ curr_branch=
if test "$1" = "-d"
then
shift ; remote="$1" ; shift
dot_prefix= && break
done
fi
+ if test -z $dot_prefix
+ then
+ found_mergeref=true
+ fi
case "$remote" in
'') remote=HEAD ;;
refs/heads/* | refs/tags/* | refs/remotes/*) ;;
fi
echo "${dot_prefix}${force}${remote}:${local}"
done
+ if test -z "$found_mergeref" -a "$curr_branch"
+ then
+ echo >&2 "Warning: No merge candidate found because value of config option
+ \"branch.${curr_branch}.merge\" does not match any remote branch fetched."
+ fi
}
# Returns list of src: (no store), or src:dst (store)
sub merge {
my ($name, $path) = @_;
record_preimage($path, "$rr_dir/$name/thisimage");
- unless (system('merge', map { "$rr_dir/$name/${_}image" }
+ unless (system('git merge-file', map { "$rr_dir/$name/${_}image" }
qw(this pre post))) {
my $in;
open $in, "<$rr_dir/$name/thisimage" or
$ENV{LC_ALL} = 'C';
$| = 1; # unbuffer STDOUT
-sub fatal (@) { print STDERR $@; exit 1 }
+# properties that we do not log:
+my %SKIP = ( 'svn:wc:ra_dav:version-url' => 1,
+ 'svn:special' => 1,
+ 'svn:executable' => 1,
+ 'svn:entry:committed-rev' => 1,
+ 'svn:entry:last-author' => 1,
+ 'svn:entry:uuid' => 1,
+ 'svn:entry:committed-date' => 1,
+);
+
+sub fatal (@) { print STDERR @_; exit 1 }
# If SVN:: library support is added, please make the dependencies
# optional and preserve the capability to use the command-line client.
# use eval { require SVN::... } to make it lazy load
$_username, $_config_dir, $_no_auth_cache, $_xfer_delta,
$_pager, $_color);
my (@_branch_from, %tree_map, %users, %rusers, %equiv);
-my ($_svn_co_url_revs, $_svn_pg_peg_revs);
+my ($_svn_co_url_revs, $_svn_pg_peg_revs, $_svn_can_do_switch);
my @repo_path_split_cache;
my %fc_opts = ( 'no-ignore-externals' => \$_no_ignore_ext,
}
sub dcommit {
+ my $head = shift || 'HEAD';
my $gs = "refs/remotes/$GIT_SVN";
- chomp(my @refs = safe_qx(qw/git-rev-list --no-merges/, "$gs..HEAD"));
+ chomp(my @refs = safe_qx(qw/git-rev-list --no-merges/, "$gs..$head"));
my $last_rev;
foreach my $d (reverse @refs) {
if (quiet_run('git-rev-parse','--verify',"$d~1") != 0) {
}
return if $_dry_run;
fetch();
- my @diff = safe_qx(qw/git-diff-tree HEAD/, $gs);
+ my @diff = safe_qx('git-diff-tree', $head, $gs);
my @finish;
if (@diff) {
@finish = qw/rebase/;
push @finish, qw/--merge/ if $_merge;
push @finish, "--strategy=$_strategy" if $_strategy;
- print STDERR "W: HEAD and $gs differ, using @finish:\n", @diff;
+ print STDERR "W: $head and $gs differ, using @finish:\n", @diff;
} else {
- print "No changes between current HEAD and $gs\n",
- "Hard resetting to the latest $gs\n";
+ print "No changes between current $head and $gs\n",
+ "Resetting to the latest $gs\n";
@finish = qw/reset --mixed/;
}
sys('git', @finish, $gs);
# just in case we clobber the existing ref, we still want that ref
# as our parent:
- if (my $cur = eval { file_to_s("$GIT_DIR/refs/remotes/$GIT_SVN") }) {
+ open my $null, '>', '/dev/null' or croak $!;
+ open my $stderr, '>&', \*STDERR or croak $!;
+ open STDERR, '>&', $null or croak $!;
+ if (my $cur = eval { safe_qx('git-rev-parse',
+ "refs/remotes/$GIT_SVN^0") }) {
+ chomp $cur;
push @tmp_parents, $cur;
}
+ open STDERR, '>&', $stderr or croak $!;
+ close $stderr or croak $!;
+ close $null or croak $!;
if (exists $tree_map{$tree}) {
foreach my $p (@{$tree_map{$tree}}) {
return $ra;
}
+sub libsvn_can_do_switch {
+ unless (defined $_svn_can_do_switch) {
+ my $pool = SVN::Pool->new;
+ my $rep = eval {
+ $SVN->do_switch(1, '', 0, $SVN->{url},
+ SVN::Delta::Editor->new, $pool);
+ };
+ if ($@) {
+ $_svn_can_do_switch = 0;
+ } else {
+ $rep->abort_report($pool);
+ $_svn_can_do_switch = 1;
+ }
+ $pool->clear;
+ }
+ $_svn_can_do_switch;
+}
+
sub libsvn_dup_ra {
my ($ra) = @_;
SVN::Ra->new(map { $_ => $ra->{$_} } qw/config url
}
sub libsvn_get_file {
- my ($gui, $f, $rev, $chg) = @_;
+ my ($gui, $f, $rev, $chg, $untracked) = @_;
$f =~ s#^/##;
print "\t$chg\t$f\n" unless $_q;
waitpid $pid, 0;
$hash =~ /^$sha1$/o or die "not a sha1: $hash\n";
}
+ %{$untracked->{file_prop}->{$f}} = %$props;
print $gui $mode,' ',$hash,"\t",$f,"\0" or croak $!;
}
+sub uri_encode {
+ my ($f) = @_;
+ $f =~ s#([^a-zA-Z0-9\*!\:_\./\-])#uc sprintf("%%%02x",ord($1))#eg;
+ $f
+}
+
+sub uri_decode {
+ my ($f) = @_;
+ $f =~ tr/+/ /;
+ $f =~ s/%([A-F0-9]{2})/chr hex($1)/ge;
+ $f
+}
+
sub libsvn_log_entry {
- my ($rev, $author, $date, $msg, $parents) = @_;
+ my ($rev, $author, $date, $msg, $parents, $untracked) = @_;
my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T
(\d\d)\:(\d\d)\:(\d\d).\d+Z$/x)
or die "Unable to parse date: $date\n";
die "Author: $author not defined in $_authors file\n";
}
$msg = '' if ($rev == 0 && !defined $msg);
- return { revision => $rev, date => "+0000 $Y-$m-$d $H:$M:$S",
- author => $author, msg => $msg."\n", parents => $parents || [] }
+
+ open my $un, '>>', "$GIT_SVN_DIR/unhandled.log" or croak $!;
+ my $h;
+ print $un "r$rev\n" or croak $!;
+ $h = $untracked->{empty};
+ foreach (sort keys %$h) {
+ my $act = $h->{$_} ? '+empty_dir' : '-empty_dir';
+ print $un " $act: ", uri_encode($_), "\n" or croak $!;
+ warn "W: $act: $_\n";
+ }
+ foreach my $t (qw/dir_prop file_prop/) {
+ $h = $untracked->{$t} or next;
+ foreach my $path (sort keys %$h) {
+ my $ppath = $path eq '' ? '.' : $path;
+ foreach my $prop (sort keys %{$h->{$path}}) {
+ next if $SKIP{$prop};
+ my $v = $h->{$path}->{$prop};
+ if (defined $v) {
+ print $un " +$t: ",
+ uri_encode($ppath), ' ',
+ uri_encode($prop), ' ',
+ uri_encode($v), "\n"
+ or croak $!;
+ } else {
+ print $un " -$t: ",
+ uri_encode($ppath), ' ',
+ uri_encode($prop), "\n"
+ or croak $!;
+ }
+ }
+ }
+ }
+ foreach my $t (qw/absent_file absent_directory/) {
+ $h = $untracked->{$t} or next;
+ foreach my $parent (sort keys %$h) {
+ foreach my $path (sort @{$h->{$parent}}) {
+ print $un " $t: ",
+ uri_encode("$parent/$path"), "\n"
+ or croak $!;
+ warn "W: $t: $parent/$path ",
+ "Insufficient permissions?\n";
+ }
+ }
+ }
+
+ # revprops (make this optional? it's an extra network trip...)
+ my $pool = SVN::Pool->new;
+ my $rp = $SVN->rev_proplist($rev, $pool);
+ foreach (sort keys %$rp) {
+ next if /^svn:(?:author|date|log)$/;
+ print $un " rev_prop: ", uri_encode($_), ' ',
+ uri_encode($rp->{$_}), "\n";
+ }
+ $pool->clear;
+ close $un or croak $!;
+
+ { revision => $rev, date => "+0000 $Y-$m-$d $H:$M:$S",
+ author => $author, msg => $msg."\n", parents => $parents || [],
+ revprops => $rp }
}
sub process_rm {
}
print "\tD\t$f/\n" unless $q;
close $ls or croak $?;
+ return $SVN::Node::dir;
} else {
print $gui '0 ',0 x 40,"\t",$f,"\0" or croak $!;
print "\tD\t$f\n" unless $q;
+ return $SVN::Node::file;
}
}
unless ($ed->{git_commit_ok}) {
die "SVN connection failed somewhere...\n";
}
- libsvn_log_entry($rev, $author, $date, $msg, [$last_commit]);
+ libsvn_log_entry($rev, $author, $date, $msg, [$last_commit], $ed);
}
sub libsvn_fetch_full {
my ($last_commit, $paths, $rev, $author, $date, $msg) = @_;
open my $gui, '| git-update-index -z --index-info' or croak $!;
my %amr;
+ my $ut = { empty => {}, dir_prop => {}, file_prop => {} };
my $p = $SVN->{svn_path};
foreach my $f (keys %$paths) {
my $m = $paths->{$f}->action();
$f =~ s#^/##;
}
if ($m =~ /^[DR]$/) {
- process_rm($gui, $last_commit, $f, $_q);
- next if $m eq 'D';
+ my $t = process_rm($gui, $last_commit, $f, $_q);
+ if ($m eq 'D') {
+ $ut->{empty}->{$f} = 0 if $t == $SVN::Node::dir;
+ next;
+ }
# 'R' can be file replacements, too, right?
}
my $pool = SVN::Pool->new;
}
} elsif ($t == $SVN::Node::dir && $m =~ /^[AR]$/) {
my @traversed = ();
- libsvn_traverse($gui, '', $f, $rev, \@traversed);
- foreach (@traversed) {
- $amr{$_} = $m;
+ libsvn_traverse($gui, '', $f, $rev, \@traversed, $ut);
+ if (@traversed) {
+ foreach (@traversed) {
+ $amr{$_} = $m;
+ }
+ } else {
+ my ($dir, $file) = ($f =~ m#^(.*?)/?([^/]+)$#);
+ delete $ut->{empty}->{$dir};
+ $ut->{empty}->{$f} = 1;
}
}
$pool->clear;
}
foreach (keys %amr) {
- libsvn_get_file($gui, $_, $rev, $amr{$_});
+ libsvn_get_file($gui, $_, $rev, $amr{$_}, $ut);
+ my ($d) = ($_ =~ m#^(.*?)/?(?:[^/]+)$#);
+ delete $ut->{empty}->{$d};
+ }
+ unless (exists $ut->{dir_prop}->{''}) {
+ my $pool = SVN::Pool->new;
+ my (undef, undef, $props) = $SVN->get_dir('', $rev, $pool);
+ %{$ut->{dir_prop}->{''}} = %$props;
+ $pool->clear;
}
close $gui or croak $?;
- return libsvn_log_entry($rev, $author, $date, $msg, [$last_commit]);
+ libsvn_log_entry($rev, $author, $date, $msg, [$last_commit], $ut);
}
sub svn_grab_base_rev {
}
sub libsvn_traverse {
- my ($gui, $pfx, $path, $rev, $files) = @_;
+ my ($gui, $pfx, $path, $rev, $files, $untracked) = @_;
my $cwd = length $pfx ? "$pfx/$path" : $path;
my $pool = SVN::Pool->new;
$cwd =~ s#^\Q$SVN->{svn_path}\E##;
+ my $nr = 0;
my ($dirent, $r, $props) = $SVN->get_dir($cwd, $rev, $pool);
+ %{$untracked->{dir_prop}->{$cwd}} = %$props;
foreach my $d (keys %$dirent) {
my $t = $dirent->{$d}->kind;
if ($t == $SVN::Node::dir) {
- libsvn_traverse($gui, $cwd, $d, $rev, $files);
+ my $i = libsvn_traverse($gui, $cwd, $d, $rev,
+ $files, $untracked);
+ if ($i) {
+ $nr += $i;
+ } else {
+ $untracked->{empty}->{"$cwd/$d"} = 1;
+ }
} elsif ($t == $SVN::Node::file) {
+ $nr++;
my $file = "$cwd/$d";
if (defined $files) {
push @$files, $file;
} else {
- libsvn_get_file($gui, $file, $rev, 'A');
+ libsvn_get_file($gui, $file, $rev, 'A',
+ $untracked);
+ my ($dir) = ($file =~ m#^(.*?)/?(?:[^/]+)$#);
+ delete $untracked->{empty}->{$dir};
}
}
}
$pool->clear;
+ $nr;
}
sub libsvn_traverse_ignore {
unlink $GIT_SVN_INDEX;
print STDERR "Found branch parent: ($GIT_SVN) $parent\n";
sys(qw/git-read-tree/, $parent);
- # I can't seem to get do_switch() to work correctly with
- # the SWIG interface (TypeError when passing switch_url...),
- # so we'll unconditionally bypass the delta interface here
- # for now
- return libsvn_fetch_full($parent, $paths, $rev,
- $author, $date, $msg);
+ unless (libsvn_can_do_switch()) {
+ return libsvn_fetch_full($parent, $paths, $rev,
+ $author, $date, $msg);
+ }
+ # do_switch works with svn/trunk >= r22312, but that is not
+ # included with SVN 1.4.2 (the latest version at the moment),
+ # so we can't rely on it.
+ my $ra = libsvn_connect("$url/$branch_from");
+ my $ed = SVN::Git::Fetcher->new({c => $parent, q => $_q});
+ my $pool = SVN::Pool->new;
+ my $reporter = $ra->do_switch($rev, '', 1, $SVN->{url},
+ $ed, $pool);
+ my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : ();
+ $reporter->set_path('', $r0, 0, @lock, $pool);
+ $reporter->finish_report($pool);
+ $pool->clear;
+ unless ($ed->{git_commit_ok}) {
+ die "SVN connection failed somewhere...\n";
+ }
+ return libsvn_log_entry($rev, $author, $date, $msg, [$parent]);
}
print STDERR "Nope, branch point not imported or unknown\n";
return undef;
return $log_entry;
}
my ($paths, $rev, $author, $date, $msg) = @_;
+ my $ut;
if ($_xfer_delta) {
my $pool = SVN::Pool->new;
my $ed = SVN::Git::Fetcher->new({q => $_q});
unless ($ed->{git_commit_ok}) {
die "SVN connection failed somewhere...\n";
}
+ $ut = $ed;
} else {
+ $ut = { empty => {}, dir_prop => {}, file_prop => {} };
open my $gui, '| git-update-index -z --index-info' or croak $!;
- libsvn_traverse($gui, '', $SVN->{svn_path}, $rev);
+ libsvn_traverse($gui, '', $SVN->{svn_path}, $rev, undef, $ut);
close $gui or croak $?;
}
- return libsvn_log_entry($rev, $author, $date, $msg);
+ libsvn_log_entry($rev, $author, $date, $msg, [], $ut);
}
sub find_graft_path_commit {
$self->{gui} = $gui;
$self->{c} = $git_svn->{c} if exists $git_svn->{c};
$self->{q} = $git_svn->{q};
+ $self->{empty} = {};
+ $self->{dir_prop} = {};
+ $self->{file_prop} = {};
+ $self->{absent_dir} = {};
+ $self->{absent_file} = {};
require Digest::MD5;
$self;
}
+sub open_root {
+ { path => '' };
+}
+
+sub open_directory {
+ my ($self, $path, $pb, $rev) = @_;
+ { path => $path };
+}
+
sub delete_entry {
my ($self, $path, $rev, $pb) = @_;
- process_rm($self->{gui}, $self->{c}, $path, $self->{q});
+ my $t = process_rm($self->{gui}, $self->{c}, $path, $self->{q});
+ $self->{empty}->{$path} = 0 if $t == $SVN::Node::dir;
undef;
}
my ($self, $path, $pb, $rev) = @_;
my ($mode, $blob) = (safe_qx('git-ls-tree',$self->{c},'--',$path)
=~ /^(\d{6}) blob ([a-f\d]{40})\t/);
+ unless (defined $mode && defined $blob) {
+ die "$path was not found in commit $self->{c} (r$rev)\n";
+ }
{ path => $path, mode_a => $mode, mode_b => $mode, blob => $blob,
pool => SVN::Pool->new, action => 'M' };
}
sub add_file {
my ($self, $path, $pb, $cp_path, $cp_rev) = @_;
+ my ($dir, $file) = ($path =~ m#^(.*?)/?([^/]+)$#);
+ delete $self->{empty}->{$dir};
{ path => $path, mode_a => 100644, mode_b => 100644,
pool => SVN::Pool->new, action => 'A' };
}
+sub add_directory {
+ my ($self, $path, $cp_path, $cp_rev) = @_;
+ my ($dir, $file) = ($path =~ m#^(.*?)/?([^/]+)$#);
+ delete $self->{empty}->{$dir};
+ $self->{empty}->{$path} = 1;
+ { path => $path };
+}
+
+sub change_dir_prop {
+ my ($self, $db, $prop, $value) = @_;
+ $self->{dir_prop}->{$db->{path}} ||= {};
+ $self->{dir_prop}->{$db->{path}}->{$prop} = $value;
+ undef;
+}
+
+sub absent_directory {
+ my ($self, $path, $pb) = @_;
+ $self->{absent_dir}->{$pb->{path}} ||= [];
+ push @{$self->{absent_dir}->{$pb->{path}}}, $path;
+ undef;
+}
+
+sub absent_file {
+ my ($self, $path, $pb) = @_;
+ $self->{absent_file}->{$pb->{path}} ||= [];
+ push @{$self->{absent_file}->{$pb->{path}}}, $path;
+ undef;
+}
+
sub change_file_prop {
my ($self, $fb, $prop, $value) = @_;
if ($prop eq 'svn:executable') {
}
} elsif ($prop eq 'svn:special') {
$fb->{mode_b} = defined $value ? 120000 : 100644;
+ } else {
+ $self->{file_prop}->{$fb->{path}} ||= {};
+ $self->{file_prop}->{$fb->{path}}->{$prop} = $value;
}
undef;
}
{ "ls-tree", cmd_ls_tree, RUN_SETUP },
{ "mailinfo", cmd_mailinfo },
{ "mailsplit", cmd_mailsplit },
+ { "merge-file", cmd_merge_file },
{ "mv", cmd_mv, RUN_SETUP },
{ "name-rev", cmd_name_rev, RUN_SETUP },
{ "pack-objects", cmd_pack_objects, RUN_SETUP },
%package core
Summary: Core git tools
Group: Development/Tools
-Requires: zlib >= 1.2, rsync, rcs, curl, less, openssh-clients, expat
+Requires: zlib >= 1.2, rsync, curl, less, openssh-clients, expat
%description core
This is a stupid (but extremely fast) directory content manager. It
doesn't do a whole lot, but what it _does_ do is track directory
# To disable system wide have in $GITWEB_CONFIG
# $feature{'snapshot'}{'default'} = [undef];
# To have project specific config enable override in $GITWEB_CONFIG
- # $feature{'blame'}{'override'} = 1;
+ # $feature{'snapshot'}{'override'} = 1;
# and in project config gitweb.snapshot = none|gzip|bzip2;
'snapshot' => {
'sub' => \&feature_snapshot,
if (output_fd >= 0)
write_or_die(output_fd, input_buffer, input_offset);
SHA1_Update(&input_ctx, input_buffer, input_offset);
- memcpy(input_buffer, input_buffer + input_offset, input_len);
+ memmove(input_buffer, input_buffer + input_offset, input_len);
input_offset = 0;
}
}
#include "xdiff-interface.h"
#include "blob.h"
-static void rm_temp_file(const char *filename)
-{
- unlink(filename);
- free((void *)filename);
-}
-
-static const char *write_temp_file(mmfile_t *f)
-{
- int fd;
- const char *tmp = getenv("TMPDIR");
- char *filename;
-
- if (!tmp)
- tmp = "/tmp";
- filename = mkpath("%s/%s", tmp, "git-tmp-XXXXXX");
- fd = mkstemp(filename);
- if (fd < 0)
- return NULL;
- filename = xstrdup(filename);
- if (f->size != xwrite(fd, f->ptr, f->size)) {
- rm_temp_file(filename);
- return NULL;
- }
- close(fd);
- return filename;
-}
-
-static void *read_temp_file(const char *filename, unsigned long *size)
-{
- struct stat st;
- char *buf = NULL;
- int fd = open(filename, O_RDONLY);
- if (fd < 0)
- return NULL;
- if (!fstat(fd, &st)) {
- *size = st.st_size;
- buf = xmalloc(st.st_size);
- if (st.st_size != xread(fd, buf, st.st_size)) {
- free(buf);
- buf = NULL;
- }
- }
- close(fd);
- return buf;
-}
-
static int fill_mmfile_blob(mmfile_t *f, struct blob *obj)
{
void *buf;
static void *three_way_filemerge(mmfile_t *base, mmfile_t *our, mmfile_t *their, unsigned long *size)
{
- void *res;
- const char *t1, *t2, *t3;
-
- t1 = write_temp_file(base);
- t2 = write_temp_file(our);
- t3 = write_temp_file(their);
- res = NULL;
- if (t1 && t2 && t3) {
- int code = run_command("merge", t2, t1, t3, NULL);
- if (!code || code == -1)
- res = read_temp_file(t2, size);
- }
- rm_temp_file(t1);
- rm_temp_file(t2);
- rm_temp_file(t3);
- return res;
+ mmbuffer_t res;
+ xpparam_t xpp;
+ int merge_status;
+
+ memset(&xpp, 0, sizeof(xpp));
+ merge_status = xdl_merge(base, our, ".our", their, ".their",
+ &xpp, XDL_MERGE_ZEALOUS, &res);
+
+ if (merge_status < 0)
+ return NULL;
+
+ *size = res.size;
+ return res.ptr;
}
static int common_outf(void *priv_, mmbuffer_t *mb, int nbuf)
#include "tag.h"
#include "unpack-trees.h"
#include "path-list.h"
+#include "xdiff-interface.h"
/*
* A virtual commit has
merge:1;
};
-static char *git_unpack_file(const unsigned char *sha1, char *path)
+static void fill_mm(const unsigned char *sha1, mmfile_t *mm)
{
- void *buf;
- char type[20];
unsigned long size;
- int fd;
+ char type[20];
- buf = read_sha1_file(sha1, type, &size);
- if (!buf || strcmp(type, blob_type))
- die("unable to read blob object %s", sha1_to_hex(sha1));
+ if (!hashcmp(sha1, null_sha1)) {
+ mm->ptr = xstrdup("");
+ mm->size = 0;
+ return;
+ }
- strcpy(path, ".merge_file_XXXXXX");
- fd = mkstemp(path);
- if (fd < 0)
- die("unable to create temp-file");
- flush_buffer(fd, buf, size);
- close(fd);
- return path;
+ mm->ptr = read_sha1_file(sha1, type, &size);
+ if (!mm->ptr || strcmp(type, blob_type))
+ die("unable to read blob object %s", sha1_to_hex(sha1));
+ mm->size = size;
}
static struct merge_file_info merge_file(struct diff_filespec *o,
else if (sha_eq(b->sha1, o->sha1))
hashcpy(result.sha, a->sha1);
else if (S_ISREG(a->mode)) {
- int code = 1, fd;
- struct stat st;
- char orig[PATH_MAX];
- char src1[PATH_MAX];
- char src2[PATH_MAX];
- const char *argv[] = {
- "merge", "-L", NULL, "-L", NULL, "-L", NULL,
- NULL, NULL, NULL,
- NULL
- };
- char *la, *lb, *lo;
-
- git_unpack_file(o->sha1, orig);
- git_unpack_file(a->sha1, src1);
- git_unpack_file(b->sha1, src2);
-
- argv[2] = la = xstrdup(mkpath("%s/%s", branch1, a->path));
- argv[6] = lb = xstrdup(mkpath("%s/%s", branch2, b->path));
- argv[4] = lo = xstrdup(mkpath("orig/%s", o->path));
- argv[7] = src1;
- argv[8] = orig;
- argv[9] = src2,
-
- code = run_command_v(10, argv);
-
- free(la);
- free(lb);
- free(lo);
- if (code && code < -256) {
- die("Failed to execute 'merge'. merge(1) is used as the "
- "file-level merge tool. Is 'merge' in your path?");
- }
- fd = open(src1, O_RDONLY);
- if (fd < 0 || fstat(fd, &st) < 0 ||
- index_fd(result.sha, fd, &st, 1,
- "blob"))
- die("Unable to add %s to database", src1);
-
- unlink(orig);
- unlink(src1);
- unlink(src2);
-
- result.clean = WEXITSTATUS(code) == 0;
+ mmfile_t orig, src1, src2;
+ mmbuffer_t result_buf;
+ xpparam_t xpp;
+ char *name1, *name2;
+ int merge_status;
+
+ name1 = xstrdup(mkpath("%s/%s", branch1, a->path));
+ name2 = xstrdup(mkpath("%s/%s", branch2, b->path));
+
+ fill_mm(o->sha1, &orig);
+ fill_mm(a->sha1, &src1);
+ fill_mm(b->sha1, &src2);
+
+ memset(&xpp, 0, sizeof(xpp));
+ merge_status = xdl_merge(&orig,
+ &src1, name1,
+ &src2, name2,
+ &xpp, XDL_MERGE_ZEALOUS,
+ &result_buf);
+ free(name1);
+ free(name2);
+ free(orig.ptr);
+ free(src1.ptr);
+ free(src2.ptr);
+
+ if ((merge_status < 0) || !result_buf.ptr)
+ die("Failed to execute internal merge");
+
+ if (write_sha1_file(result_buf.ptr, result_buf.size,
+ blob_type, result.sha))
+ die("Unable to add %s to database",
+ a->path);
+
+ free(result_buf.ptr);
+ result.clean = (merge_status == 0);
} else {
if (!(S_ISLNK(a->mode) || S_ISLNK(b->mode)))
die("cannot merge modes?");
struct diff_filespec src_other, dst_other;
int try_merge, stage = a_renames == renames1 ? 3: 2;
- remove_file(1, ren1_src, 1);
+ remove_file(1, ren1_src, index_only);
hashcpy(src_other.sha1, ren1->src_entry->stages[stage].sha);
src_other.mode = ren1->src_entry->stages[stage].mode;
output("Adding %s", path);
update_file(1, sha, mode, path);
}
- } else if (!o_sha && a_sha && b_sha) {
- /* Case C: Added in both (check for same permissions). */
- if (sha_eq(a_sha, b_sha)) {
- if (a_mode != b_mode) {
- clean_merge = 0;
- output("CONFLICT: File %s added identically in both branches, "
- "but permissions conflict %06o->%06o",
- path, a_mode, b_mode);
- output("CONFLICT: adding with permission: %06o", a_mode);
- update_file(0, a_sha, a_mode, path);
- } else {
- /* This case is handled by git-read-tree */
- assert(0 && "This case must be handled by git-read-tree");
- }
- } else {
- const char *new_path1, *new_path2;
- clean_merge = 0;
- new_path1 = unique_path(path, branch1);
- new_path2 = unique_path(path, branch2);
- output("CONFLICT (add/add): File %s added non-identically "
- "in both branches. Adding as %s and %s instead.",
- path, new_path1, new_path2);
- remove_file(0, path, 0);
- update_file(0, a_sha, a_mode, new_path1);
- update_file(0, b_sha, b_mode, new_path2);
- }
-
- } else if (o_sha && a_sha && b_sha) {
+ } else if (a_sha && b_sha) {
+ /* Case C: Added in both (check for same permissions) and */
/* case D: Modified in both, but differently. */
+ const char *reason = "content";
struct merge_file_info mfi;
struct diff_filespec o, a, b;
+ if (!o_sha) {
+ reason = "add/add";
+ o_sha = (unsigned char *)null_sha1;
+ }
output("Auto-merging %s", path);
o.path = a.path = b.path = (char *)path;
hashcpy(o.sha1, o_sha);
update_file(1, mfi.sha, mfi.mode, path);
else {
clean_merge = 0;
- output("CONFLICT (content): Merge conflict in %s", path);
+ output("CONFLICT (%s): Merge conflict in %s",
+ reason, path);
if (index_only)
update_file(0, mfi.sha, mfi.mode, path);
tree->object.parsed = 1;
tree->object.type = OBJ_TREE;
- hash_sha1_file(NULL, 0, tree_type, tree->object.sha1);
+ write_sha1_file(NULL, 0, tree_type, tree->object.sha1);
merged_common_ancestors = make_virtual_commit(tree, "ancestor");
}
echo ' echo $(instdir_SQ)' >> $@
else
$(makfile): Makefile.PL ../GIT-CFLAGS
- '$(PERL_PATH_SQ)' $< FIRST_MAKEFILE='$@' PREFIX='$(prefix_SQ)'
+ '$(PERL_PATH_SQ)' $< PREFIX='$(prefix_SQ)'
endif
# this is just added comfort for calling make directly in perl dir
NAME => 'Git',
VERSION_FROM => 'Git.pm',
PM => \%pm,
+ MAKEFILE => 'perl.mak',
%extra
);
static const char receive_pack_usage[] = "git-receive-pack <git-dir>";
static int deny_non_fast_forwards = 0;
-static int unpack_limit = 5000;
+static int unpack_limit = 100;
static int report_status;
static char capabilities[] = " report-status delete-refs ";
level++;
if (!ch) {
if (level < 2)
- return -1; /* at least of form "heads/blah" */
+ return -2; /* at least of form "heads/blah" */
return 0;
}
}
return ret;
}
+static void verify_remote_names(int nr_heads, char **heads)
+{
+ int i;
+
+ for (i = 0; i < nr_heads; i++) {
+ const char *remote = strchr(heads[i], ':');
+
+ remote = remote ? (remote + 1) : heads[i];
+ switch (check_ref_format(remote)) {
+ case 0: /* ok */
+ case -2: /* ok but a single level -- that is fine for
+ * a match pattern.
+ */
+ continue;
+ }
+ die("remote part of refspec is not a valid name in %s",
+ heads[i]);
+ }
+}
int main(int argc, char **argv)
{
usage(send_pack_usage);
if (heads && send_all)
usage(send_pack_usage);
+ verify_remote_names(nr_heads, heads);
+
pid = git_connect(fd, dest, exec);
if (pid < 0)
return 1;
'
################################################################
-# It appears that people are getting bitten by not installing
-# 'merge' (usually part of RCS package in binary distributions).
-# Check this and error out before running any tests. Also catch
-# the bogosity of trying to run tests without building while we
-# are at it.
+# It appears that people try to run tests without building...
../git >/dev/null
if test $? != 1
exit 1
fi
-merge >/dev/null 2>/dev/null
-if test $? = 127
-then
- echo >&2 'You do not seem to have "merge" installed.
-Please check INSTALL document.'
- exit 1
-fi
-
. ./test-lib.sh
################################################################
--- /dev/null
+#!/bin/sh
+
+test_description='RCS merge replacement: merge-file'
+. ./test-lib.sh
+
+cat > orig.txt << EOF
+Dominus regit me,
+et nihil mihi deerit.
+In loco pascuae ibi me collocavit,
+super aquam refectionis educavit me;
+animam meam convertit,
+deduxit me super semitas jusitiae,
+propter nomen suum.
+EOF
+
+cat > new1.txt << EOF
+Dominus regit me,
+et nihil mihi deerit.
+In loco pascuae ibi me collocavit,
+super aquam refectionis educavit me;
+animam meam convertit,
+deduxit me super semitas jusitiae,
+propter nomen suum.
+Nam et si ambulavero in medio umbrae mortis,
+non timebo mala, quoniam tu mecum es:
+virga tua et baculus tuus ipsa me consolata sunt.
+EOF
+
+cat > new2.txt << EOF
+Dominus regit me, et nihil mihi deerit.
+In loco pascuae ibi me collocavit,
+super aquam refectionis educavit me;
+animam meam convertit,
+deduxit me super semitas jusitiae,
+propter nomen suum.
+EOF
+
+cat > new3.txt << EOF
+DOMINUS regit me,
+et nihil mihi deerit.
+In loco pascuae ibi me collocavit,
+super aquam refectionis educavit me;
+animam meam convertit,
+deduxit me super semitas jusitiae,
+propter nomen suum.
+EOF
+
+cat > new4.txt << EOF
+Dominus regit me, et nihil mihi deerit.
+In loco pascuae ibi me collocavit,
+super aquam refectionis educavit me;
+animam meam convertit,
+deduxit me super semitas jusitiae,
+EOF
+echo -n "propter nomen suum." >> new4.txt
+
+cp new1.txt test.txt
+test_expect_success "merge without conflict" \
+ "git-merge-file test.txt orig.txt new2.txt"
+
+cp new1.txt test2.txt
+test_expect_success "merge without conflict (missing LF at EOF)" \
+ "git-merge-file test2.txt orig.txt new2.txt"
+
+test_expect_success "merge result added missing LF" \
+ "diff -u test.txt test2.txt"
+
+cp test.txt backup.txt
+test_expect_failure "merge with conflicts" \
+ "git-merge-file test.txt orig.txt new3.txt"
+
+cat > expect.txt << EOF
+<<<<<<< test.txt
+Dominus regit me, et nihil mihi deerit.
+=======
+DOMINUS regit me,
+et nihil mihi deerit.
+>>>>>>> new3.txt
+In loco pascuae ibi me collocavit,
+super aquam refectionis educavit me;
+animam meam convertit,
+deduxit me super semitas jusitiae,
+propter nomen suum.
+Nam et si ambulavero in medio umbrae mortis,
+non timebo mala, quoniam tu mecum es:
+virga tua et baculus tuus ipsa me consolata sunt.
+EOF
+
+test_expect_success "expected conflict markers" "diff -u test.txt expect.txt"
+
+cp backup.txt test.txt
+test_expect_failure "merge with conflicts, using -L" \
+ "git-merge-file -L 1 -L 2 test.txt orig.txt new3.txt"
+
+cat > expect.txt << EOF
+<<<<<<< 1
+Dominus regit me, et nihil mihi deerit.
+=======
+DOMINUS regit me,
+et nihil mihi deerit.
+>>>>>>> new3.txt
+In loco pascuae ibi me collocavit,
+super aquam refectionis educavit me;
+animam meam convertit,
+deduxit me super semitas jusitiae,
+propter nomen suum.
+Nam et si ambulavero in medio umbrae mortis,
+non timebo mala, quoniam tu mecum es:
+virga tua et baculus tuus ipsa me consolata sunt.
+EOF
+
+test_expect_success "expected conflict markers, with -L" \
+ "diff -u test.txt expect.txt"
+
+test_done
+
--- /dev/null
+#!/bin/sh
+
+test_description='Merge-recursive merging renames'
+. ./test-lib.sh
+
+test_expect_success setup \
+'
+cat >A <<\EOF &&
+a aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+b bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
+c cccccccccccccccccccccccccccccccccccccccccccccccc
+d dddddddddddddddddddddddddddddddddddddddddddddddd
+e eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
+f ffffffffffffffffffffffffffffffffffffffffffffffff
+g gggggggggggggggggggggggggggggggggggggggggggggggg
+h hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh
+i iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii
+j jjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjj
+k kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk
+l llllllllllllllllllllllllllllllllllllllllllllllll
+m mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm
+n nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
+o oooooooooooooooooooooooooooooooooooooooooooooooo
+EOF
+
+cat >M <<\EOF &&
+A AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+B BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+C CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
+D DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD
+E EEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEE
+F FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
+G GGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG
+H HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
+I IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
+J JJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJ
+K KKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK
+L LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL
+M MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM
+N NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+O OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO
+EOF
+
+git add A M &&
+git commit -m "initial has A and M" &&
+git branch white &&
+git branch red &&
+
+git checkout white &&
+sed -e "/^g /s/.*/g : white changes a line/" <A >B &&
+sed -e "/^G /s/.*/G : colored branch changes a line/" <M >N &&
+rm -f A M &&
+git update-index --add --remove A B M N &&
+git commit -m "white renames A->B, M->N" &&
+
+git checkout red &&
+echo created by red >R &&
+git update-index --add R &&
+git commit -m "red creates R" &&
+
+git checkout master'
+
+# This test broke in 65ac6e9c3f47807cb603af07a6a9e1a43bc119ae
+test_expect_success 'merge white into red (A->B,M->N)' \
+'
+ git checkout -b red-white red &&
+ git merge white &&
+ git write-tree >/dev/null || {
+ echo "BAD: merge did not complete"
+ return 1
+ }
+
+ test -f B || {
+ echo "BAD: B does not exist in working directory"
+ return 1
+ }
+ test -f N || {
+ echo "BAD: N does not exist in working directory"
+ return 1
+ }
+ test -f R || {
+ echo "BAD: R does not exist in working directory"
+ return 1
+ }
+
+ test -f A && {
+ echo "BAD: A still exists in working directory"
+ return 1
+ }
+ test -f M && {
+ echo "BAD: M still exists in working directory"
+ return 1
+ }
+ return 0
+'
+
+test_done
--- /dev/null
+#!/bin/sh
+
+test_description='Test merge without common ancestors'
+. ./test-lib.sh
+
+# This scenario is based on a real-world repository of Shawn Pearce.
+
+# 1 - A - D - F
+# \ X /
+# B X
+# X \
+# 2 - C - E - G
+
+export GIT_COMMITTER_DATE="2006-12-12 23:28:00 +0100"
+echo 1 > a1
+git add a1
+GIT_AUTHOR_DATE="2006-12-12 23:00:00" git commit -m 1 a1
+
+git checkout -b A master
+echo A > a1
+GIT_AUTHOR_DATE="2006-12-12 23:00:01" git commit -m A a1
+
+git checkout -b B master
+echo B > a1
+GIT_AUTHOR_DATE="2006-12-12 23:00:02" git commit -m B a1
+
+git checkout -b D A
+git-rev-parse B > .git/MERGE_HEAD
+echo D > a1
+git update-index a1
+GIT_AUTHOR_DATE="2006-12-12 23:00:03" git commit -m D
+
+git symbolic-ref HEAD refs/heads/other
+echo 2 > a1
+GIT_AUTHOR_DATE="2006-12-12 23:00:04" git commit -m 2 a1
+
+git checkout -b C
+echo C > a1
+GIT_AUTHOR_DATE="2006-12-12 23:00:05" git commit -m C a1
+
+git checkout -b E C
+git-rev-parse B > .git/MERGE_HEAD
+echo E > a1
+git update-index a1
+GIT_AUTHOR_DATE="2006-12-12 23:00:06" git commit -m E
+
+git checkout -b G E
+git-rev-parse A > .git/MERGE_HEAD
+echo G > a1
+git update-index a1
+GIT_AUTHOR_DATE="2006-12-12 23:00:07" git commit -m G
+
+git checkout -b F D
+git-rev-parse C > .git/MERGE_HEAD
+echo F > a1
+git update-index a1
+GIT_AUTHOR_DATE="2006-12-12 23:00:08" git commit -m F
+
+test_expect_failure "combined merge conflicts" "git merge -m final G"
+
+cat > expect << EOF
+<<<<<<< HEAD/a1
+F
+=======
+G
+>>>>>>> 26f86b677eb03d4d956dbe108b29cb77061c1e73/a1
+EOF
+
+test_expect_success "result contains a conflict" "diff -u expect a1"
+
+git ls-files --stage > out
+cat > expect << EOF
+100644 f16f906ab60483c100d1241dfc39868de9ec9fcb 1 a1
+100644 cf84443e49e1b366fac938711ddf4be2d4d1d9e9 2 a1
+100644 fd7923529855d0b274795ae3349c5e0438333979 3 a1
+EOF
+
+test_expect_success "virtual trees were processed" "diff -u expect out"
+
+test_done
! git cvsexportcommit -c $id
)'
-# Should fail, but only on the git-cvsexportcommit stage
-test_expect_success \
- 'Fail to remove binary file more than one generation old' \
- 'git reset --hard HEAD^ &&
- cat F/newfile6.png >>D/newfile4.png &&
- git commit -a -m "generation 2 (again)" &&
- rm -f D/newfile4.png &&
- git commit -a -m "generation 3" &&
- id=$(git rev-list --max-count=1 HEAD) &&
- (cd "$CVSWORK" &&
- ! git cvsexportcommit -c $id
- )'
+#test_expect_success \
+# 'Fail to remove binary file more than one generation old' \
+# 'git reset --hard HEAD^ &&
+# cat F/newfile6.png >>D/newfile4.png &&
+# git commit -a -m "generation 2 (again)" &&
+# rm -f D/newfile4.png &&
+# git commit -a -m "generation 3" &&
+# id=$(git rev-list --max-count=1 HEAD) &&
+# (cd "$CVSWORK" &&
+# ! git cvsexportcommit -c $id
+# )'
# We reuse the state from two tests back here
# fail with gnu patch, so cvsexportcommit must handle that.
test_expect_success \
'Remove only binary files' \
- 'git reset --hard HEAD^^^ &&
+ 'git reset --hard HEAD^^ &&
rm -f D/newfile4.png &&
git commit -a -m "test: remove only a binary file" &&
id=$(git rev-list --max-count=1 HEAD) &&
diff F/newfile6.png ../F/newfile6.png
)'
-test_expect_success 'Retain execute bit' '
- mkdir G &&
- echo executeon >G/on &&
- chmod +x G/on &&
- echo executeoff >G/off &&
- git add G/on &&
- git add G/off &&
- git commit -a -m "Execute test" &&
- (
- cd "$CVSWORK" &&
- git-cvsexportcommit -c HEAD
- test -x G/on &&
- ! test -x G/off
- )
-'
+test_expect_success \
+ 'New file with spaces in file name' \
+ 'mkdir "G g" &&
+ echo ok then >"G g/with spaces.txt" &&
+ git add "G g/with spaces.txt" && \
+ cp ../test9200a.png "G g/with spaces.png" && \
+ git add "G g/with spaces.png" &&
+ git commit -a -m "With spaces" &&
+ id=$(git rev-list --max-count=1 HEAD) &&
+ (cd "$CVSWORK" &&
+ git-cvsexportcommit -c $id &&
+ test "$(echo $(sort "G g/CVS/Entries"|cut -d/ -f2,3,5))" = "with spaces.png/1.1/-kb with spaces.txt/1.1/"
+ )'
+
+test_expect_success \
+ 'Update file with spaces in file name' \
+ 'echo Ok then >>"G g/with spaces.txt" &&
+ cat ../test9200a.png >>"G g/with spaces.png" && \
+ git add "G g/with spaces.png" &&
+ git commit -a -m "Update with spaces" &&
+ id=$(git rev-list --max-count=1 HEAD) &&
+ (cd "$CVSWORK" &&
+ git-cvsexportcommit -c $id
+ test "$(echo $(sort "G g/CVS/Entries"|cut -d/ -f2,3,5))" = "with spaces.png/1.2/-kb with spaces.txt/1.2/"
+ )'
+
+# This test contains ISO-8859-1 characters
+test_expect_success \
+ 'File with non-ascii file name' \
+ 'mkdir -p Å/goo/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z/å/ä/ö &&
+ echo Foo >Å/goo/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z/å/ä/ö/gårdetsågårdet.txt &&
+ git add Å/goo/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z/å/ä/ö/gårdetsågårdet.txt &&
+ cp ../test9200a.png Å/goo/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z/å/ä/ö/gårdetsågårdet.png &&
+ git add Å/goo/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z/å/ä/ö/gårdetsågårdet.png &&
+ git commit -a -m "Går det så går det" && \
+ id=$(git rev-list --max-count=1 HEAD) &&
+ (cd "$CVSWORK" &&
+ git-cvsexportcommit -v -c $id &&
+ test "$(echo $(sort Å/goo/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z/å/ä/ö/CVS/Entries|cut -d/ -f2,3,5))" = "gårdetsågårdet.png/1.1/-kb gårdetsågårdet.txt/1.1/"
+ )'
+
+test_expect_success \
+ 'Mismatching patch should fail' \
+ 'date >>"E/newfile5.txt" &&
+ git add "E/newfile5.txt" &&
+ git commit -a -m "Update one" &&
+ date >>"E/newfile5.txt" &&
+ git add "E/newfile5.txt" &&
+ git commit -a -m "Update two" &&
+ id=$(git rev-list --max-count=1 HEAD) &&
+ (cd "$CVSWORK" &&
+ ! git-cvsexportcommit -c $id
+ )'
+
+test_expect_success \
+ 'Retain execute bit' \
+ 'mkdir G &&
+ echo executeon >G/on &&
+ chmod +x G/on &&
+ echo executeoff >G/off &&
+ git add G/on &&
+ git add G/off &&
+ git commit -a -m "Execute test" &&
+ (cd "$CVSWORK" &&
+ git-cvsexportcommit -c HEAD
+ test -x G/on &&
+ ! test -x G/off
+ )'
test_done
int git_status_config(const char *k, const char *v)
{
- if (!strcmp(k, "status.color")) {
+ if (!strcmp(k, "status.color") || !strcmp(k, "color.status")) {
wt_status_use_color = git_config_colorbool(k, v);
return 0;
}
- if (!strncmp(k, "status.color.", 13)) {
+ if (!strncmp(k, "status.color.", 13) || !strncmp(k, "color.status", 13)) {
int slot = parse_status_slot(k, 13);
color_parse(v, k, wt_status_colors[slot]);
}
#define XDL_BDOP_CPY 2
#define XDL_BDOP_INSB 3
+#define XDL_MERGE_MINIMAL 0
+#define XDL_MERGE_EAGER 1
+#define XDL_MERGE_ZEALOUS 2
typedef struct s_mmfile {
char *ptr;
int xdl_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
xdemitconf_t const *xecfg, xdemitcb_t *ecb);
+int xdl_merge(mmfile_t *orig, mmfile_t *mf1, const char *name1,
+ mmfile_t *mf2, const char *name2,
+ xpparam_t const *xpp, int level, mmbuffer_t *result);
+
#ifdef __cplusplus
}
#endif /* #ifdef __cplusplus */
long *kvdf, long *kvdb, int need_min, xdpsplit_t *spl,
xdalgoenv_t *xenv);
static xdchange_t *xdl_add_change(xdchange_t *xscr, long i1, long i2, long chg1, long chg2);
-static int xdl_change_compact(xdfile_t *xdf, xdfile_t *xdfo, long flags);
}
-static int xdl_change_compact(xdfile_t *xdf, xdfile_t *xdfo, long flags) {
+int xdl_change_compact(xdfile_t *xdf, xdfile_t *xdfo, long flags) {
long ix, ixo, ixs, ixref, grpsiz, nrec = xdf->nrec;
char *rchg = xdf->rchg, *rchgo = xdfo->rchg;
xrecord_t **recs = xdf->recs;
long *kvdf, long *kvdb, int need_min, xdalgoenv_t *xenv);
int xdl_do_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
xdfenv_t *xe);
+int xdl_change_compact(xdfile_t *xdf, xdfile_t *xdfo, long flags);
int xdl_build_script(xdfenv_t *xe, xdchange_t **xscr);
void xdl_free_script(xdchange_t *xscr);
int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
--- /dev/null
+/*
+ * LibXDiff by Davide Libenzi ( File Differential Library )
+ * Copyright (C) 2003-2006 Davide Libenzi, Johannes E. Schindelin
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ *
+ * Davide Libenzi <davidel@xmailserver.org>
+ *
+ */
+
+#include "xinclude.h"
+
+typedef struct s_xdmerge {
+ struct s_xdmerge *next;
+ /*
+ * 0 = conflict,
+ * 1 = no conflict, take first,
+ * 2 = no conflict, take second.
+ */
+ int mode;
+ long i1, i2;
+ long chg1, chg2;
+} xdmerge_t;
+
+static int xdl_append_merge(xdmerge_t **merge, int mode,
+ long i1, long chg1, long i2, long chg2)
+{
+ xdmerge_t *m = *merge;
+ if (m && (i1 <= m->i1 + m->chg1 || i2 <= m->i2 + m->chg2)) {
+ if (mode != m->mode)
+ m->mode = 0;
+ m->chg1 = i1 + chg1 - m->i1;
+ m->chg2 = i2 + chg2 - m->i2;
+ } else {
+ m = xdl_malloc(sizeof(xdmerge_t));
+ if (!m)
+ return -1;
+ m->next = NULL;
+ m->mode = mode;
+ m->i1 = i1;
+ m->chg1 = chg1;
+ m->i2 = i2;
+ m->chg2 = chg2;
+ if (*merge)
+ (*merge)->next = m;
+ *merge = m;
+ }
+ return 0;
+}
+
+static int xdl_cleanup_merge(xdmerge_t *c)
+{
+ int count = 0;
+ xdmerge_t *next_c;
+
+ /* were there conflicts? */
+ for (; c; c = next_c) {
+ if (c->mode == 0)
+ count++;
+ next_c = c->next;
+ free(c);
+ }
+ return count;
+}
+
+static int xdl_merge_cmp_lines(xdfenv_t *xe1, int i1, xdfenv_t *xe2, int i2,
+ int line_count, long flags)
+{
+ int i;
+ xrecord_t **rec1 = xe1->xdf2.recs + i1;
+ xrecord_t **rec2 = xe2->xdf2.recs + i2;
+
+ for (i = 0; i < line_count; i++) {
+ int result = xdl_recmatch(rec1[i]->ptr, rec1[i]->size,
+ rec2[i]->ptr, rec2[i]->size, flags);
+ if (!result)
+ return -1;
+ }
+ return 0;
+}
+
+static int xdl_recs_copy(xdfenv_t *xe, int i, int count, int add_nl, char *dest)
+{
+ xrecord_t **recs = xe->xdf2.recs + i;
+ int size = 0;
+
+ if (count < 1)
+ return 0;
+
+ for (i = 0; i < count; size += recs[i++]->size)
+ if (dest)
+ memcpy(dest + size, recs[i]->ptr, recs[i]->size);
+ if (add_nl) {
+ i = recs[count - 1]->size;
+ if (i == 0 || recs[count - 1]->ptr[i - 1] != '\n') {
+ if (dest)
+ dest[size] = '\n';
+ size++;
+ }
+ }
+ return size;
+}
+
+static int xdl_fill_merge_buffer(xdfenv_t *xe1, const char *name1,
+ xdfenv_t *xe2, const char *name2, xdmerge_t *m, char *dest)
+{
+ const int marker_size = 7;
+ int marker1_size = (name1 ? strlen(name1) + 1 : 0);
+ int marker2_size = (name2 ? strlen(name2) + 1 : 0);
+ int conflict_marker_size = 3 * (marker_size + 1)
+ + marker1_size + marker2_size;
+ int size, i1, j;
+
+ for (size = i1 = 0; m; m = m->next) {
+ if (m->mode == 0) {
+ size += xdl_recs_copy(xe1, i1, m->i1 - i1, 0,
+ dest ? dest + size : NULL);
+ if (dest) {
+ for (j = 0; j < marker_size; j++)
+ dest[size++] = '<';
+ if (marker1_size) {
+ dest[size] = ' ';
+ memcpy(dest + size + 1, name1,
+ marker1_size - 1);
+ size += marker1_size;
+ }
+ dest[size++] = '\n';
+ } else
+ size += conflict_marker_size;
+ size += xdl_recs_copy(xe1, m->i1, m->chg1, 1,
+ dest ? dest + size : NULL);
+ if (dest) {
+ for (j = 0; j < marker_size; j++)
+ dest[size++] = '=';
+ dest[size++] = '\n';
+ }
+ size += xdl_recs_copy(xe2, m->i2, m->chg2, 1,
+ dest ? dest + size : NULL);
+ if (dest) {
+ for (j = 0; j < marker_size; j++)
+ dest[size++] = '>';
+ if (marker2_size) {
+ dest[size] = ' ';
+ memcpy(dest + size + 1, name2,
+ marker2_size - 1);
+ size += marker2_size;
+ }
+ dest[size++] = '\n';
+ }
+ } else if (m->mode == 1)
+ size += xdl_recs_copy(xe1, i1, m->i1 + m->chg1 - i1, 0,
+ dest ? dest + size : NULL);
+ else if (m->mode == 2)
+ size += xdl_recs_copy(xe2, m->i2 - m->i1 + i1,
+ m->i1 + m->chg2 - i1, 0,
+ dest ? dest + size : NULL);
+ i1 = m->i1 + m->chg1;
+ }
+ size += xdl_recs_copy(xe1, i1, xe1->xdf2.nrec - i1, 0,
+ dest ? dest + size : NULL);
+ return size;
+}
+
+/*
+ * Sometimes, changes are not quite identical, but differ in only a few
+ * lines. Try hard to show only these few lines as conflicting.
+ */
+static int xdl_refine_conflicts(xdfenv_t *xe1, xdfenv_t *xe2, xdmerge_t *m,
+ xpparam_t const *xpp)
+{
+ for (; m; m = m->next) {
+ mmfile_t t1, t2;
+ xdfenv_t xe;
+ xdchange_t *xscr, *x;
+ int i1 = m->i1, i2 = m->i2;
+
+ /* let's handle just the conflicts */
+ if (m->mode)
+ continue;
+
+ /*
+ * This probably does not work outside git, since
+ * we have a very simple mmfile structure.
+ */
+ t1.ptr = (char *)xe1->xdf2.recs[m->i1]->ptr;
+ t1.size = xe1->xdf2.recs[m->i1 + m->chg1 - 1]->ptr
+ + xe1->xdf2.recs[m->i1 + m->chg1 - 1]->size - t1.ptr;
+ t2.ptr = (char *)xe2->xdf2.recs[m->i2]->ptr;
+ t2.size = xe2->xdf2.recs[m->i2 + m->chg2 - 1]->ptr
+ + xe2->xdf2.recs[m->i2 + m->chg2 - 1]->size - t2.ptr;
+ if (xdl_do_diff(&t1, &t2, xpp, &xe) < 0)
+ return -1;
+ if (xdl_change_compact(&xe.xdf1, &xe.xdf2, xpp->flags) < 0 ||
+ xdl_change_compact(&xe.xdf2, &xe.xdf1, xpp->flags) < 0 ||
+ xdl_build_script(&xe, &xscr) < 0) {
+ xdl_free_env(&xe);
+ return -1;
+ }
+ if (!xscr) {
+ /* If this happens, it's a bug. */
+ xdl_free_env(&xe);
+ return -2;
+ }
+ x = xscr;
+ m->i1 = xscr->i1 + i1;
+ m->chg1 = xscr->chg1;
+ m->i2 = xscr->i2 + i2;
+ m->chg2 = xscr->chg2;
+ while (xscr->next) {
+ xdmerge_t *m2 = xdl_malloc(sizeof(xdmerge_t));
+ if (!m2) {
+ xdl_free_env(&xe);
+ xdl_free_script(x);
+ return -1;
+ }
+ xscr = xscr->next;
+ m2->next = m->next;
+ m->next = m2;
+ m = m2;
+ m->mode = 0;
+ m->i1 = xscr->i1 + i1;
+ m->chg1 = xscr->chg1;
+ m->i2 = xscr->i2 + i2;
+ m->chg2 = xscr->chg2;
+ }
+ xdl_free_env(&xe);
+ xdl_free_script(x);
+ }
+ return 0;
+}
+
+/*
+ * level == 0: mark all overlapping changes as conflict
+ * level == 1: mark overlapping changes as conflict only if not identical
+ * level == 2: analyze non-identical changes for minimal conflict set
+ *
+ * returns < 0 on error, == 0 for no conflicts, else number of conflicts
+ */
+static int xdl_do_merge(xdfenv_t *xe1, xdchange_t *xscr1, const char *name1,
+ xdfenv_t *xe2, xdchange_t *xscr2, const char *name2,
+ int level, xpparam_t const *xpp, mmbuffer_t *result) {
+ xdmerge_t *changes, *c;
+ int i1, i2, chg1, chg2;
+
+ c = changes = NULL;
+
+ while (xscr1 && xscr2) {
+ if (!changes)
+ changes = c;
+ if (xscr1->i1 + xscr1->chg1 < xscr2->i1) {
+ i1 = xscr1->i2;
+ i2 = xscr2->i2 - xscr2->i1 + xscr1->i1;
+ chg1 = xscr1->chg2;
+ chg2 = xscr1->chg1;
+ if (xdl_append_merge(&c, 1, i1, chg1, i2, chg2)) {
+ xdl_cleanup_merge(changes);
+ return -1;
+ }
+ xscr1 = xscr1->next;
+ continue;
+ }
+ if (xscr2->i1 + xscr2->chg1 < xscr1->i1) {
+ i1 = xscr1->i2 - xscr1->i1 + xscr2->i1;
+ i2 = xscr2->i2;
+ chg1 = xscr2->chg1;
+ chg2 = xscr2->chg2;
+ if (xdl_append_merge(&c, 2, i1, chg1, i2, chg2)) {
+ xdl_cleanup_merge(changes);
+ return -1;
+ }
+ xscr2 = xscr2->next;
+ continue;
+ }
+ if (level < 1 || xscr1->i1 != xscr2->i1 ||
+ xscr1->chg1 != xscr2->chg1 ||
+ xscr1->chg2 != xscr2->chg2 ||
+ xdl_merge_cmp_lines(xe1, xscr1->i2,
+ xe2, xscr2->i2,
+ xscr1->chg2, xpp->flags)) {
+ /* conflict */
+ int off = xscr1->i1 - xscr2->i1;
+ int ffo = off + xscr1->chg1 - xscr2->chg1;
+
+ i1 = xscr1->i2;
+ i2 = xscr2->i2;
+ if (off > 0)
+ i1 -= off;
+ else
+ i2 += off;
+ chg1 = xscr1->i2 + xscr1->chg2 - i1;
+ chg2 = xscr2->i2 + xscr2->chg2 - i2;
+ if (ffo > 0)
+ chg2 += ffo;
+ else
+ chg1 -= ffo;
+ if (xdl_append_merge(&c, 0, i1, chg1, i2, chg2)) {
+ xdl_cleanup_merge(changes);
+ return -1;
+ }
+ }
+
+ i1 = xscr1->i1 + xscr1->chg1;
+ i2 = xscr2->i1 + xscr2->chg1;
+
+ if (i1 >= i2)
+ xscr2 = xscr2->next;
+ if (i2 >= i1)
+ xscr1 = xscr1->next;
+ }
+ while (xscr1) {
+ if (!changes)
+ changes = c;
+ i1 = xscr1->i2;
+ i2 = xscr1->i1 + xe2->xdf2.nrec - xe2->xdf1.nrec;
+ chg1 = xscr1->chg2;
+ chg2 = xscr1->chg1;
+ if (xdl_append_merge(&c, 1, i1, chg1, i2, chg2)) {
+ xdl_cleanup_merge(changes);
+ return -1;
+ }
+ xscr1 = xscr1->next;
+ }
+ while (xscr2) {
+ if (!changes)
+ changes = c;
+ i1 = xscr2->i1 + xe1->xdf2.nrec - xe1->xdf1.nrec;
+ i2 = xscr2->i2;
+ chg1 = xscr2->chg1;
+ chg2 = xscr2->chg2;
+ if (xdl_append_merge(&c, 2, i1, chg1, i2, chg2)) {
+ xdl_cleanup_merge(changes);
+ return -1;
+ }
+ xscr2 = xscr2->next;
+ }
+ if (!changes)
+ changes = c;
+ /* refine conflicts */
+ if (level > 1 && xdl_refine_conflicts(xe1, xe2, changes, xpp) < 0) {
+ xdl_cleanup_merge(changes);
+ return -1;
+ }
+ /* output */
+ if (result) {
+ int size = xdl_fill_merge_buffer(xe1, name1, xe2, name2,
+ changes, NULL);
+ result->ptr = xdl_malloc(size);
+ if (!result->ptr) {
+ xdl_cleanup_merge(changes);
+ return -1;
+ }
+ result->size = size;
+ xdl_fill_merge_buffer(xe1, name1, xe2, name2, changes,
+ result->ptr);
+ }
+ return xdl_cleanup_merge(changes);
+}
+
+int xdl_merge(mmfile_t *orig, mmfile_t *mf1, const char *name1,
+ mmfile_t *mf2, const char *name2,
+ xpparam_t const *xpp, int level, mmbuffer_t *result) {
+ xdchange_t *xscr1, *xscr2;
+ xdfenv_t xe1, xe2;
+ int status;
+
+ result->ptr = NULL;
+ result->size = 0;
+
+ if (xdl_do_diff(orig, mf1, xpp, &xe1) < 0 ||
+ xdl_do_diff(orig, mf2, xpp, &xe2) < 0) {
+ return -1;
+ }
+ if (xdl_change_compact(&xe1.xdf1, &xe1.xdf2, xpp->flags) < 0 ||
+ xdl_change_compact(&xe1.xdf2, &xe1.xdf1, xpp->flags) < 0 ||
+ xdl_build_script(&xe1, &xscr1) < 0) {
+ xdl_free_env(&xe1);
+ return -1;
+ }
+ if (xdl_change_compact(&xe2.xdf1, &xe2.xdf2, xpp->flags) < 0 ||
+ xdl_change_compact(&xe2.xdf2, &xe2.xdf1, xpp->flags) < 0 ||
+ xdl_build_script(&xe2, &xscr2) < 0) {
+ xdl_free_env(&xe2);
+ return -1;
+ }
+ status = 0;
+ if (xscr1 || xscr2) {
+ if (!xscr1) {
+ result->ptr = xdl_malloc(mf2->size);
+ memcpy(result->ptr, mf2->ptr, mf2->size);
+ result->size = mf2->size;
+ } else if (!xscr2) {
+ result->ptr = xdl_malloc(mf1->size);
+ memcpy(result->ptr, mf1->ptr, mf1->size);
+ result->size = mf1->size;
+ } else {
+ status = xdl_do_merge(&xe1, xscr1, name1,
+ &xe2, xscr2, name2,
+ level, xpp, result);
+ }
+ xdl_free_script(xscr1);
+ xdl_free_script(xscr2);
+ }
+ xdl_free_env(&xe1);
+ xdl_free_env(&xe2);
+
+ return status;
+}