ASCIIDOC=asciidoc
ASCIIDOC_EXTRA =
-MANPAGE_XSL = callouts.xsl
+MANPAGE_XSL = manpage-normal.xsl
+XMLTO_EXTRA =
INSTALL?=install
RM ?= rm -f
DOC_REF = origin/man
-include ../config.mak.autogen
-include ../config.mak
+#
+# For asciidoc ...
+# -7.1.2, no extra settings are needed.
+# 8.0-, set ASCIIDOC8.
+#
+
+#
+# For docbook-xsl ...
+# -1.68.1, set ASCIIDOC_NO_ROFF? (based on changelog from 1.73.0)
+# 1.69.0, no extra settings are needed?
+# 1.69.1-1.71.0, set DOCBOOK_SUPPRESS_SP?
+# 1.71.1, no extra settings are needed?
+# 1.72.0, set DOCBOOK_XSL_172.
+# 1.73.0-, set ASCIIDOC_NO_ROFF
+#
+
+#
+# If you had been using DOCBOOK_XSL_172 in an attempt to get rid
+# of 'the ".ft C" problem' in your generated manpages, and you
+# instead ended up with weird characters around callouts, try
+# using ASCIIDOC_NO_ROFF instead (it works fine with ASCIIDOC8).
+#
+
ifdef ASCIIDOC8
ASCIIDOC_EXTRA += -a asciidoc7compatible
endif
ifdef DOCBOOK_XSL_172
-ASCIIDOC_EXTRA += -a docbook-xsl-172
+ASCIIDOC_EXTRA += -a git-asciidoc-no-roff
MANPAGE_XSL = manpage-1.72.xsl
+else
+ ifdef ASCIIDOC_NO_ROFF
+ # docbook-xsl after 1.72 needs the regular XSL, but will not
+ # pass-thru raw roff codes from asciidoc.conf, so turn them off.
+ ASCIIDOC_EXTRA += -a git-asciidoc-no-roff
+ endif
+endif
+ifdef MAN_BOLD_LITERAL
+XMLTO_EXTRA += -m manpage-bold-literal.xsl
+endif
+ifdef DOCBOOK_SUPPRESS_SP
+XMLTO_EXTRA += -m manpage-suppress-sp.xsl
endif
#
# yourself - yes, all 6 characters of it!
#
+QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir
+QUIET_SUBDIR1 =
+
+ifneq ($(findstring $(MAKEFLAGS),w),w)
+PRINT_DIR = --no-print-directory
+else # "make -w"
+NO_SUBDIR = :
+endif
+
+ifneq ($(findstring $(MAKEFLAGS),s),s)
+ifndef V
+ QUIET_ASCIIDOC = @echo ' ' ASCIIDOC $@;
+ QUIET_XMLTO = @echo ' ' XMLTO $@;
+ QUIET_DB2TEXI = @echo ' ' DB2TEXI $@;
+ QUIET_MAKEINFO = @echo ' ' MAKEINFO $@;
+ QUIET_DBLATEX = @echo ' ' DBLATEX $@;
+ QUIET_XSLTPROC = @echo ' ' XSLTPROC $@;
+ QUIET_GEN = @echo ' ' GEN $@;
+ QUIET_STDERR = 2> /dev/null
+ QUIET_SUBDIR0 = +@subdir=
+ QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo ' ' SUBDIR $$subdir; \
+ $(MAKE) $(PRINT_DIR) -C $$subdir
+ export V
+endif
+endif
+
all: html man
html: $(DOC_HTML)
sh ./install-webdoc.sh $(DESTDIR)$(htmldir)
../GIT-VERSION-FILE: .FORCE-GIT-VERSION-FILE
- $(MAKE) -C ../ GIT-VERSION-FILE
+ $(QUIET_SUBDIR0)../ $(QUIET_SUBDIR1) GIT-VERSION-FILE
-include ../GIT-VERSION-FILE
# Determine "include::" file references in asciidoc files.
#
doc.dep : $(wildcard *.txt) build-docdep.perl
- $(RM) $@+ $@
- $(PERL_PATH) ./build-docdep.perl >$@+
+ $(QUIET_GEN)$(RM) $@+ $@ && \
+ $(PERL_PATH) ./build-docdep.perl >$@+ $(QUIET_STDERR) && \
mv $@+ $@
-include doc.dep
$(cmds_txt): cmd-list.made
cmd-list.made: cmd-list.perl ../command-list.txt $(MAN1_TXT)
- $(RM) $@
- $(PERL_PATH) ./cmd-list.perl ../command-list.txt
+ $(QUIET_GEN)$(RM) $@ && \
+ $(PERL_PATH) ./cmd-list.perl ../command-list.txt $(QUIET_STDERR) && \
date >$@
clean:
$(RM) *.xml *.xml+ *.html *.html+ *.1 *.5 *.7
- $(RM) *.texi *.texi+ git.info gitman.info
+ $(RM) *.texi *.texi+ *.texi++ git.info gitman.info
$(RM) howto-index.txt howto/*.html doc.dep
$(RM) technical/api-*.html technical/api-index.txt
$(RM) $(cmds_txt) *.made
$(MAN_HTML): %.html : %.txt
- $(RM) $@+ $@
+ $(QUIET_ASCIIDOC)$(RM) $@+ $@ && \
$(ASCIIDOC) -b xhtml11 -d manpage -f asciidoc.conf \
- $(ASCIIDOC_EXTRA) -agit_version=$(GIT_VERSION) -o $@+ $<
+ $(ASCIIDOC_EXTRA) -agit_version=$(GIT_VERSION) -o $@+ $< && \
mv $@+ $@
%.1 %.5 %.7 : %.xml
- $(RM) $@
- xmlto -m $(MANPAGE_XSL) man $<
+ $(QUIET_XMLTO)$(RM) $@ && \
+ xmlto -m $(MANPAGE_XSL) $(XMLTO_EXTRA) man $<
%.xml : %.txt
- $(RM) $@+ $@
+ $(QUIET_ASCIIDOC)$(RM) $@+ $@ && \
$(ASCIIDOC) -b docbook -d manpage -f asciidoc.conf \
- $(ASCIIDOC_EXTRA) -agit_version=$(GIT_VERSION) -o $@+ $<
+ $(ASCIIDOC_EXTRA) -agit_version=$(GIT_VERSION) -o $@+ $< && \
mv $@+ $@
user-manual.xml: user-manual.txt user-manual.conf
- $(ASCIIDOC) -b docbook -d book $<
+ $(QUIET_ASCIIDOC)$(ASCIIDOC) -b docbook -d book $<
technical/api-index.txt: technical/api-index-skel.txt \
technical/api-index.sh $(patsubst %,%.txt,$(API_DOCS))
- cd technical && sh ./api-index.sh
+ $(QUIET_GEN)cd technical && sh ./api-index.sh
$(patsubst %,%.html,$(API_DOCS) technical/api-index): %.html : %.txt
- $(ASCIIDOC) -b xhtml11 -f asciidoc.conf \
+ $(QUIET_ASCIIDOC)$(ASCIIDOC) -b xhtml11 -f asciidoc.conf \
$(ASCIIDOC_EXTRA) -agit_version=$(GIT_VERSION) $*.txt
XSLT = docbook.xsl
XSLTOPTS = --xinclude --stringparam html.stylesheet docbook-xsl.css
user-manual.html: user-manual.xml
- xsltproc $(XSLTOPTS) -o $@ $(XSLT) $<
+ $(QUIET_XSLTPROC)xsltproc $(XSLTOPTS) -o $@ $(XSLT) $<
git.info: user-manual.texi
- $(MAKEINFO) --no-split -o $@ user-manual.texi
+ $(QUIET_MAKEINFO)$(MAKEINFO) --no-split -o $@ user-manual.texi
user-manual.texi: user-manual.xml
- $(RM) $@+ $@
- $(DOCBOOK2X_TEXI) user-manual.xml --encoding=UTF-8 --to-stdout | \
- $(PERL_PATH) fix-texi.perl >$@+
+ $(QUIET_DB2TEXI)$(RM) $@+ $@ && \
+ $(DOCBOOK2X_TEXI) user-manual.xml --encoding=UTF-8 --to-stdout >$@++ && \
+ $(PERL_PATH) fix-texi.perl <$@++ >$@+ && \
+ rm $@++ && \
mv $@+ $@
user-manual.pdf: user-manual.xml
- $(RM) $@+ $@
- $(DBLATEX) -o $@+ -p /etc/asciidoc/dblatex/asciidoc-dblatex.xsl -s /etc/asciidoc/dblatex/asciidoc-dblatex.sty $<
+ $(QUIET_DBLATEX)$(RM) $@+ $@ && \
+ $(DBLATEX) -o $@+ -p /etc/asciidoc/dblatex/asciidoc-dblatex.xsl -s /etc/asciidoc/dblatex/asciidoc-dblatex.sty $< && \
mv $@+ $@
gitman.texi: $(MAN_XML) cat-texi.perl
- $(RM) $@+ $@
+ $(QUIET_DB2TEXI)$(RM) $@+ $@ && \
($(foreach xml,$(MAN_XML),$(DOCBOOK2X_TEXI) --encoding=UTF-8 \
- --to-stdout $(xml);)) | $(PERL_PATH) cat-texi.perl $@ >$@+
+ --to-stdout $(xml) &&) true) > $@++ && \
+ $(PERL_PATH) cat-texi.perl $@ <$@++ >$@+ && \
+ rm $@++ && \
mv $@+ $@
gitman.info: gitman.texi
- $(MAKEINFO) --no-split --no-validate $*.texi
+ $(QUIET_MAKEINFO)$(MAKEINFO) --no-split --no-validate $*.texi
$(patsubst %.txt,%.texi,$(MAN_TXT)): %.texi : %.xml
- $(RM) $@+ $@
- $(DOCBOOK2X_TEXI) --to-stdout $*.xml >$@+
+ $(QUIET_DB2TEXI)$(RM) $@+ $@ && \
+ $(DOCBOOK2X_TEXI) --to-stdout $*.xml >$@+ && \
mv $@+ $@
howto-index.txt: howto-index.sh $(wildcard howto/*.txt)
- $(RM) $@+ $@
- sh ./howto-index.sh $(wildcard howto/*.txt) >$@+
+ $(QUIET_GEN)$(RM) $@+ $@ && \
+ sh ./howto-index.sh $(wildcard howto/*.txt) >$@+ && \
mv $@+ $@
$(patsubst %,%.html,$(ARTICLES)) : %.html : %.txt
- $(ASCIIDOC) -b xhtml11 $*.txt
+ $(QUIET_ASCIIDOC)$(ASCIIDOC) -b xhtml11 $*.txt
WEBDOC_DEST = /pub/software/scm/git/docs
$(patsubst %.txt,%.html,$(wildcard howto/*.txt)): %.html : %.txt
- $(RM) $@+ $@
- sed -e '1,/^$$/d' $< | $(ASCIIDOC) -b xhtml11 - >$@+
+ $(QUIET_ASCIIDOC)$(RM) $@+ $@ && \
+ sed -e '1,/^$$/d' $< | $(ASCIIDOC) -b xhtml11 - >$@+ && \
mv $@+ $@
install-webdoc : html
* 'git-submodule add' did not tolerate extra slashes and ./ in the path it
accepted from the command line; it now is more lenient.
+* git-svn misbehaved when the project contained a path that began with
+ two dashes.
+
+* import-zips script (in contrib) did not compute the common directory
+ prefix correctly.
+
+Many small documentation updates are included as well.
---
exec >/var/tmp/1
-O=v1.6.2.1-23-g67c176f
+O=v1.6.2.1-46-gb19293d
echo O=$(git describe maint)
git shortlog --no-merges $O..maint
should happen upon such a push by setting the configuration variable
receive.denyDeleteCurrent in the receiving repository.
+When the user does not tell "git push" what to push, it has always
+pushed matching refs. For some people it is unexpected, and a new
+configuration variable push.default has been introduced to allow
+changing a different default behaviour. To advertise the new feature,
+a big warning is issued if this is not configured and a git push without
+arguments is attempted.
+
Updates since v1.6.2
--------------------
repositories. It may not be useful in practice; meant primarily for
testing.
+* http transport learned to prompt and use password when fetching from or
+ pushing to http://user@host.xz/ URL.
+
* (msysgit) progress output that is sent over the sideband protocol can
be handled appropriately in Windows console.
* Test scripts can be run under valgrind.
+* Test scripts can be run with installed git.
+
* Makefile learned 'coverage' option to run the test suites with
coverage tracking enabled.
+* Building the manpages with docbook-xsl between 1.69.1 and 1.71.1 now
+ requires setting DOCBOOK_SUPPRESS_SP to work around a docbook-xsl bug.
+ This workaround used to be enabled by default, but causes problems
+ with newer versions of docbook-xsl.
+
Fixes since v1.6.2
------------------
Here are fixes that this release has, but have not been backported to
v1.6.2.X series.
+* "git-blame -S" did not quite work near the commits that were given
+ on the command line correctly (jc/maint-1.6.0-blame-s).
+
+* The initial checkout did not read the attributes from the .gitattribute
+ file that is being checked out.
+
+* git-diff feeds files in work-tree representation to external diff and
+ textconv (js/maint-diff-temp-smudge).
+
* git-gc spent excessive amount of time to decide if an object appears
in a locally existing pack (if needed, backport by merging 69e020a).
---
exec >/var/tmp/1
-O=v1.6.2.1-213-g7d4e3a7
+O=v1.6.2.1-399-gaa72a14
echo O=$(git describe master)
git shortlog --no-merges $O..master ^maint
endif::backend-docbook[]
ifdef::backend-docbook[]
-ifndef::docbook-xsl-172[]
+ifndef::git-asciidoc-no-roff[]
# "unbreak" docbook-xsl v1.68 for manpages. v1.69 works with or without this.
# v1.72 breaks with this because it replaces dots not in roff requests.
[listingblock]
endif::doctype-manpage[]
</literallayout>
{title#}</example>
-endif::docbook-xsl-172[]
+endif::git-asciidoc-no-roff[]
-ifdef::docbook-xsl-172[]
+ifdef::git-asciidoc-no-roff[]
ifdef::doctype-manpage[]
# The following two small workarounds insert a simple paragraph after screen
[listingblock]
<example><title>{title}</title>
-<screen>
+<literallayout>
|
-</screen><simpara></simpara>
+</literallayout><simpara></simpara>
{title#}</example>
[verseblock]
{title%}<literallayout{id? id="{id}"}>
{title#}<literallayout>
|
-</literallayout><simpara></simpara>
+</literallayout>
{title#}</para></formalpara>
+{title%}<simpara></simpara>
endif::doctype-manpage[]
-endif::docbook-xsl-172[]
+endif::git-asciidoc-no-roff[]
endif::backend-docbook[]
ifdef::doctype-manpage[]
+++ /dev/null
-<!-- callout.xsl: converts asciidoc callouts to man page format -->
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
-<xsl:template match="co">
- <xsl:value-of select="concat('\fB(',substring-after(@id,'-'),')\fR')"/>
-</xsl:template>
-<xsl:template match="calloutlist">
- <xsl:text>.sp </xsl:text>
- <xsl:apply-templates/>
- <xsl:text> </xsl:text>
-</xsl:template>
-<xsl:template match="callout">
- <xsl:value-of select="concat('\fB',substring-after(@arearefs,'-'),'. \fR')"/>
- <xsl:apply-templates/>
- <xsl:text>.br </xsl:text>
-</xsl:template>
-
-<!-- sorry, this is not about callouts, but attempts to work around
- spurious .sp at the tail of the line docbook stylesheets seem to add -->
-<xsl:template match="simpara">
- <xsl:variable name="content">
- <xsl:apply-templates/>
- </xsl:variable>
- <xsl:value-of select="normalize-space($content)"/>
- <xsl:if test="not(ancestor::authorblurb) and
- not(ancestor::personblurb)">
- <xsl:text> </xsl:text>
- </xsl:if>
-</xsl:template>
-
-</xsl:stylesheet>
This option defaults to never.
branch.<name>.remote::
- When in branch <name>, it tells 'git-fetch' which remote to fetch.
- If this option is not given, 'git-fetch' defaults to remote "origin".
+ When in branch <name>, it tells 'git-fetch' and 'git-push' which
+ remote to fetch from/push to. It defaults to `origin` if no remote is
+ configured. `origin` is also used if you are not on any branch.
branch.<name>.merge::
+ Defines, together with branch.<name>.remote, the upstream branch
+ for the given branch. It tells 'git-fetch'/'git-pull' which
+ branch to merge from.
When in branch <name>, it tells 'git-fetch' the default
refspec to be marked for merging in FETCH_HEAD. The value is
handled like the remote part of a refspec, and must match a
pull.twohead::
The default merge strategy to use when pulling a single branch.
+push.default::
+ Defines the action git push should take if no refspec is given
+ on the command line, no refspec is configured in the remote, and
+ no refspec is implied by any of the options given on the command
+ line. Possible values are:
++
+* `nothing` do not push anything.
+* `matching` push all matching branches.
+ All branches having the same name in both ends are considered to be
+ matching. This is the default.
+* `tracking` push the current branch to the branch it is tracking.
+* `current` push the current branch to a branch of the same name.
+
rebase.stat::
Whether to show a diffstat of what changed upstream since the last
rebase. False by default.
Bisect reset
~~~~~~~~~~~~
-To return to the original head after a bisect session, you issue the
+To return to the original head after a bisect session, issue the
following command:
------------------------------------------------
Bisect visualize
~~~~~~~~~~~~~~~~
-To see the currently remaining suspects in 'gitk', the following command
-is issued during the bisection process:
+To see the currently remaining suspects in 'gitk', issue the following
+command during the bisection process:
------------
$ git bisect visualize
Bisect log and bisect replay
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-After having marked revisions as good or bad, you issue the following
+After having marked revisions as good or bad, issue the following
command to show what has been done so far:
------------
Avoiding testing a commit
~~~~~~~~~~~~~~~~~~~~~~~~~
-If in the middle of a bisect session, you know that the next suggested
+If, in the middle of a bisect session, you know that the next suggested
revision is not a good one to test (e.g. the change the commit
introduces is known not to work in your environment and you know it
does not have anything to do with the bug you are chasing), you may
# was suggested
------------
-Then compile and test the chosen revision. Afterwards the revision
-is marked as good or bad in the usual manner.
+Then compile and test the chosen revision, and afterwards mark
+the revision as good or bad in the usual manner.
Bisect skip
~~~~~~~~~~~~
$ git bisect skip v2.5..v2.6
------------
-The effect of this would be that no commit between `v2.5` excluded and
-`v2.6` included could be tested.
+This tells the bisect process that no commit after `v2.5`, up to and
+including `v2.6`, should be tested.
Note that if you also want to skip the first commit of the range you
would issue the command:
$ git bisect skip v2.5 v2.5..v2.6
------------
-This would cause the commits between `v2.5` included and `v2.6` included
-to be skipped.
+This tells the bisect process that the commits between `v2.5` included
+and `v2.6` included should be skipped.
Cutting down bisection by giving more parameters to bisect start
Some workflows require that one or more branches of development on one
machine be replicated on another machine, but the two machines cannot
-be directly connected so the interactive git protocols (git, ssh,
-rsync, http) cannot be used. This command provides support for
+be directly connected, and therefore the interactive git protocols (git,
+ssh, rsync, http) cannot be used. This command provides support for
'git-fetch' and 'git-pull' to operate by packaging objects and references
in an archive at the originating machine, then importing those into
another repository using 'git-fetch' and 'git-pull'
after moving the archive by some means (i.e., by sneakernet). As no
-direct connection between repositories exists, the user must specify a
+direct connection between the repositories exists, the user must specify a
basis for the bundle that is held by the destination repository: the
bundle assumes that all objects in the basis are already in the
destination repository.
bundle format itself as well as checking that the prerequisite
commits exist and are fully linked in the current repository.
'git-bundle' prints a list of missing commits, if any, and exits
- with non-zero status.
+ with a non-zero status.
list-heads <file>::
Lists the references defined in the bundle. If followed by a
unbundle <file>::
Passes the objects in the bundle to 'git-index-pack'
for storage in the repository, then prints the names of all
- defined references. If a reflist is given, only references
- matching those in the given list are printed. This command is
+ defined references. If a list of references is given, only
+ references matching those in the list are printed. This command is
really plumbing, intended to be called only by 'git-fetch'.
[git-rev-list-args...]::
A list of arguments, acceptable to 'git-rev-parse' and
- 'git-rev-list', that specify the specific objects and references
- to transport. For example, "master~10..master" causes the
+ 'git-rev-list', that specifies the specific objects and references
+ to transport. For example, `master\~10..master` causes the
current master reference to be packaged along with all objects
added since its 10th ancestor commit. There is no explicit
limit to the number of references and objects that may be
A list of references used to limit the references reported as
available. This is principally of use to 'git-fetch', which
expects to receive only those references asked for and not
- necessarily everything in the pack (in this case, 'git-bundle' is
- acting like 'git-fetch-pack').
+ necessarily everything in the pack (in this case, 'git-bundle' acts
+ like 'git-fetch-pack').
SPECIFYING REFERENCES
---------------------
'git-bundle' will only package references that are shown by
'git-show-ref': this includes heads, tags, and remote heads. References
-such as master~1 cannot be packaged, but are perfectly suitable for
+such as `master\~1` cannot be packaged, but are perfectly suitable for
defining the basis. More than one reference may be packaged, and more
than one basis can be specified. The objects packaged are those not
contained in the union of the given bases. Each basis can be
-specified explicitly (e.g., ^master~10), or implicitly (e.g.,
-master~10..master, --since=10.days.ago master).
+specified explicitly (e.g. `^master\~10`), or implicitly (e.g.
+`master\~10..master`, `--since=10.days.ago master`).
It is very important that the basis used be held by the destination.
-It is okay to err on the side of conservatism, causing the bundle file
-to contain objects already in the destination as these are ignored
+It is okay to err on the side of caution, causing the bundle file
+to contain objects already in the destination, as these are ignored
when unpacking at the destination.
EXAMPLE
Assume you want to transfer the history from a repository R1 on machine A
to another repository R2 on machine B.
For whatever reason, direct connection between A and B is not allowed,
-but we can move data from A to B via some mechanism (CD, email, etc).
-We want to update R2 with developments made on branch master in R1.
+but we can move data from A to B via some mechanism (CD, email, etc.).
+We want to update R2 with development made on the branch master in R1.
-To bootstrap the process, you can first create a bundle that doesn't have
-any basis. You can use a tag to remember up to what commit you sent out
-in order to make it easy to later update the other repository with
-incremental bundle,
+To bootstrap the process, you can first create a bundle that does not have
+any basis. You can use a tag to remember up to what commit you last
+processed, in order to make it easy to later update the other repository
+with an incremental bundle:
----------------
machineA$ cd R1
machineA$ git tag -f lastR2bundle master
----------------
-Then you sneakernet file.bundle to the target machine B. Because you don't
-have to have any object to extract objects from such a bundle, not only
-you can fetch/pull from a bundle, you can clone from it as if it was a
-remote repository.
+Then you transfer file.bundle to the target machine B. If you are creating
+the repository on machine B, then you can clone from the bundle as if it
+were a remote repository instead of creating an empty repository and then
+pulling or fetching objects from the bundle:
----------------
machineB$ git clone /home/me/tmp/file.bundle R2
----------------
This will define a remote called "origin" in the resulting repository that
-lets you fetch and pull from the bundle. $GIT_DIR/config file in R2 may
+lets you fetch and pull from the bundle. The $GIT_DIR/config file in R2 will
have an entry like this:
------------------------
fetch = refs/heads/*:refs/remotes/origin/*
------------------------
-You can fetch/pull to update the resulting mine.git repository after
-replacing the bundle you store at /home/me/tmp/file.bundle with incremental
-updates from here on.
+To update the resulting mine.git repository, you can fetch or pull after
+replacing the bundle stored at /home/me/tmp/file.bundle with incremental
+updates.
-After working more in the original repository, you can create an
-incremental bundle to update the other:
+After working some more in the original repository, you can create an
+incremental bundle to update the other repository:
----------------
machineA$ cd R1
machineA$ git tag -f lastR2bundle master
----------------
-and sneakernet it to the other machine to replace /home/me/tmp/file.bundle,
-and pull from it.
+You then transfer the bundle to the other machine to replace
+/home/me/tmp/file.bundle, and pull from it.
----------------
machineB$ cd R2
----------------
If you know up to what commit the intended recipient repository should
-have the necessary objects for, you can use that knowledge to specify the
+have the necessary objects, you can use that knowledge to specify the
basis, giving a cut-off point to limit the revisions and objects that go
in the resulting bundle. The previous example used lastR2bundle tag
-for this purpose, but you can use other options you would give to
+for this purpose, but you can use any other options that you would give to
the linkgit:git-log[1] command. Here are more examples:
-You can use a tag that is present in both.
+You can use a tag that is present in both:
----------------
$ git bundle create mybundle v1.0.0..master
----------------
-You can use a basis based on time.
+You can use a basis based on time:
----------------
$ git bundle create mybundle --since=10.days master
----------------
-Or you can use the number of commits.
+You can use the number of commits:
----------------
$ git bundle create mybundle -10 master
----------------
You can run `git-bundle verify` to see if you can extract from a bundle
-that was created with a basis.
+that was created with a basis:
----------------
$ git bundle verify mybundle
----------------
This will list what commits you must have in order to extract from the
-bundle and will error out if you don't have them.
+bundle and will error out if you do not have them.
A bundle from a recipient repository's point of view is just like a
-regular repository it fetches/pulls from. You can for example map
-refs, like this example, when fetching:
+regular repository which it fetches or pulls from. You can, for example, map
+references when fetching:
----------------
$ git fetch mybundle master:localRef
----------------
-Or see what refs it offers.
+You can also see what references it offers.
----------------
$ git ls-remote mybundle
NAME
----
-git-cat-file - Provide content or type/size information for repository objects
+git-cat-file - Provide content or type and size information for repository objects
SYNOPSIS
DESCRIPTION
-----------
-In the first form, provides content or type of objects in the repository. The
-type is required unless '-t' or '-p' is used to find the object type, or '-s'
-is used to find the object size.
+In its first form, the command provides the content or the type of an object in
+the repository. The type is required unless '-t' or '-p' is used to find the
+object type, or '-s' is used to find the object size.
-In the second form, a list of object (separated by LFs) is provided on stdin,
-and the SHA1, type, and size of each object is printed on stdout.
+In the second form, a list of objects (separated by linefeeds) is provided on
+stdin, and the SHA1, type, and size of each object is printed on stdout.
OPTIONS
-------
<object>::
The name of the object to show.
For a more complete list of ways to spell object names, see
- "SPECIFYING REVISIONS" section in linkgit:git-rev-parse[1].
+ the "SPECIFYING REVISIONS" section in linkgit:git-rev-parse[1].
-t::
Instead of the content, show the object type identified by
stdin. May not be combined with any other options or arguments.
--batch-check::
- Print the SHA1, type, and size of each object provided on stdin. May not be
- combined with any other options or arguments.
+ Print the SHA1, type, and size of each object provided on stdin. May not
+ be combined with any other options or arguments.
OUTPUT
------
DESCRIPTION
-----------
-For every pathname, this command will list if each attr is 'unspecified',
+For every pathname, this command will list if each attribute is 'unspecified',
'set', or 'unset' as a gitattribute on that pathname.
OPTIONS
Read file names from stdin instead of from the command-line.
-z::
- Only meaningful with `--stdin`; paths are separated with
- NUL character instead of LF.
+ Only meaningful with `--stdin`; paths are separated with a
+ NUL character instead of a linefeed character.
\--::
- Interpret all preceding arguments as attributes, and all following
+ Interpret all preceding arguments as attributes and all following
arguments as path names. If not supplied, only the first argument will
be treated as an attribute.
The output is of the form:
<path> COLON SP <attribute> COLON SP <info> LF
-Where <path> is the path of a file being queried, <attribute> is an attribute
+<path> is the path of a file being queried, <attribute> is an attribute
being queried and <info> can be either:
'unspecified';; when the attribute is not defined for the path.
-'unset';; when the attribute is defined to false.
-'set';; when the attribute is defined to true.
+'unset';; when the attribute is defined as false.
+'set';; when the attribute is defined as true.
<value>;; when a value has been assigned to the attribute.
EXAMPLES
org/example/MyClass.java: myAttr: set
---------------
-* Listing attribute for multiple files:
+* Listing an attribute for multiple files:
---------------
$ git check-attr myAttr -- org/example/MyClass.java org/example/NoMyAttr.java
org/example/MyClass.java: myAttr: set
NAME
----
-git-check-ref-format - Make sure ref name is well formed
+git-check-ref-format - Ensures that a reference name is well formed
SYNOPSIS
--------
DESCRIPTION
-----------
-Checks if a given 'refname' is acceptable, and exits non-zero if
-it is not.
+Checks if a given 'refname' is acceptable, and exits with a non-zero
+status if it is not.
A reference is used in git to specify branches and tags. A
-branch head is stored under `$GIT_DIR/refs/heads` directory, and
-a tag is stored under `$GIT_DIR/refs/tags` directory. git
-imposes the following rules on how refs are named:
+branch head is stored under the `$GIT_DIR/refs/heads` directory, and
+a tag is stored under the `$GIT_DIR/refs/tags` directory. git
+imposes the following rules on how references are named:
-. It can include slash `/` for hierarchical (directory)
+. They can include slash `/` for hierarchical (directory)
grouping, but no slash-separated component can begin with a
- dot `.`;
+ dot `.`.
-. It cannot have two consecutive dots `..` anywhere;
+. They cannot have two consecutive dots `..` anywhere.
-. It cannot have ASCII control character (i.e. bytes whose
+. They cannot have ASCII control characters (i.e. bytes whose
values are lower than \040, or \177 `DEL`), space, tilde `~`,
caret `{caret}`, colon `:`, question-mark `?`, asterisk `*`,
- or open bracket `[` anywhere;
+ or open bracket `[` anywhere.
-. It cannot end with a slash `/`.
+. They cannot end with a slash `/`.
-These rules makes it easy for shell script based tools to parse
-refnames, pathname expansion by the shell when a refname is used
+These rules make it easy for shell script based tools to parse
+reference names, pathname expansion by the shell when a reference name is used
unquoted (by mistake), and also avoids ambiguities in certain
-refname expressions (see linkgit:git-rev-parse[1]). Namely:
+reference name expressions (see linkgit:git-rev-parse[1]):
-. double-dot `..` are often used as in `ref1..ref2`, and in some
- context this notation means `{caret}ref1 ref2` (i.e. not in
- ref1 and in ref2).
+. A double-dot `..` is often used as in `ref1..ref2`, and in some
+ contexts this notation means `{caret}ref1 ref2` (i.e. not in
+ `ref1` and in `ref2`).
-. tilde `~` and caret `{caret}` are used to introduce postfix
+. A tilde `~` and caret `{caret}` are used to introduce the postfix
'nth parent' and 'peel onion' operation.
-. colon `:` is used as in `srcref:dstref` to mean "use srcref\'s
+. A colon `:` is used as in `srcref:dstref` to mean "use srcref\'s
value and store it in dstref" in fetch and push operations.
It may also be used to select a specific object such as with
'git-cat-file': "git cat-file blob v1.3.3:refs.c".
--origin <name>::
-o <name>::
Instead of using the remote name 'origin' to keep track
- of the upstream repository, use <name> instead.
+ of the upstream repository, use <name>.
--upload-pack <upload-pack>::
-u <upload-pack>::
Splitting the CVS log into patch sets is done by 'cvsps'.
At least version 2.1 is required.
+*WARNING:* for certain situations the import leads to incorrect results.
+Please see the section <<issues,ISSUES>> for further reference.
+
You should *never* do any work of your own on the branches that are
created by 'git-cvsimport'. By default initial import will create and populate a
"master" branch from the CVS repository's main branch which you're free
-r <remote>::
The git remote to import this CVS repository into.
Moves all CVS branches into remotes/<remote>/<branch>
- akin to the 'git-clone' "--use-separate-remote" option.
+ akin to the way 'git-clone' uses 'origin' by default.
-o <branch-for-HEAD>::
When no remote is specified (via -r) the 'HEAD' branch
Otherwise, success is indicated the Unix way, i.e. by simply exiting with
a zero exit status.
+[[issues]]
+ISSUES
+------
+Problems related to timestamps:
+
+ * If timestamps of commits in the cvs repository are not stable enough
+ to be used for ordering commits changes may show up in the wrong
+ order.
+ * If any files were ever "cvs import"ed more than once (e.g., import of
+ more than one vendor release) the HEAD contains the wrong content.
+ * If the timestamp order of different files cross the revision order
+ within the commit matching time window the order of commits may be
+ wrong.
+
+Problems related to branches:
+
+ * Branches on which no commits have been made are not imported.
+ * All files from the branching point are added to a branch even if
+ never added in cvs.
+ * This applies to files added to the source branch *after* a daughter
+ branch was created: if previously no commit was made on the daughter
+ branch they will erroneously be added to the daughter branch in git.
+
+Problems related to tags:
+
+* Multiple tags on the same revision are not imported.
+
+If you suspect that any of these issues may apply to the repository you
+want to import consider using these alternative tools which proved to be
+more stable in practise:
+
+* cvs2git (part of cvs2svn), `http://cvs2svn.tigris.org`
+* parsecvs, `http://cgit.freedesktop.org/~keithp/parsecvs`
Author
------
REVISIONS" section in linkgit:git-rev-parse[1]) means the
commits in the specified range.
-A single commit, when interpreted as a <revision range>
-expression, means "everything that leads to that commit", but
-if you write 'git format-patch <commit>', the previous rule
-applies to that command line and you do not get "everything
-since the beginning of the time". If you want to format
-everything since project inception to one commit, say "git
-format-patch \--root <commit>" to make it clear that it is the
-latter case. If you want to format a single commit, you can do
-this with "git format-patch -1 <commit>".
+The first rule takes precedence in the case of a single <commit>. To
+apply the second rule, i.e., format everything since the beginning of
+history up until <commit>, use the '\--root' option: "git format-patch
+\--root <commit>". If you want to format only <commit> itself, you
+can do this with "git format-patch -1 <commit>".
By default, each output file is numbered sequentially from 1, and uses the
first line of the commit message (massaged for pathname safety) as
Add a "Cc:" header to the email headers. This is in addition
to any configured headers, and may be used multiple times.
+--add-header=<header>::
+ Add an arbitrary header to the email headers. This is in addition
+ to any configured headers, and may be used multiple times.
+ For example, --add-header="Organization: git-foo"
+
--cover-letter::
In addition to the patches, generate a cover letter file
containing the shortlog and the overall diffstat. You can
applied. By default the contents of changes in those files are
encoded in the patch.
+--root::
+ Treat the revision argument as a <revision range>, even if it
+ is just a single commit (that would normally be treated as a
+ <since>). Note that root commits included in the specified
+ range are always formatted as creation patches, independently
+ of this flag.
+
CONFIGURATION
-------------
You can specify extra mail header lines to be added to each message
include::merge-strategies.txt[]
-If you tried a merge which resulted in a complex conflicts and
-would want to start over, you can recover with 'git-reset'.
+If you tried a merge which resulted in complex conflicts and
+want to start over, you can recover with 'git-reset'.
CONFIGURATION
-------------
When dealing with 'git-diff-tree' output, it takes advantage of
the fact that the patch is prefixed with the object name of the
-commit, and outputs two 40-byte hexadecimal string. The first
+commit, and outputs two 40-byte hexadecimal strings. The first
string is the patch ID, and the second string is the commit ID.
This can be used to make a mapping from patch ID to commit ID.
-q::
--quiet::
- Make 'git-svn' less verbose.
+ Make 'git-svn' less verbose. Specify a second time to make it
+ even less verbose.
--repack[=<n>]::
--repack-flags=<flags>::
------------------------------------------------------------------------
[svn-remote "project-a"]
url = http://server.org/svn
+ fetch = trunk/project-a:refs/remotes/project-a/trunk
branches = branches/*/project-a:refs/remotes/project-a/branches/*
tags = tags/*/project-a:refs/remotes/project-a/tags/*
- trunk = trunk/project-a:refs/remotes/project-a/trunk
------------------------------------------------------------------------
Keep in mind that the '*' (asterisk) wildcard of the local ref
are printed when using -l.
The default is not to print any annotation lines.
If no number is given to `-n`, only the first line is printed.
+ If the tag is not annotated, the commit message is displayed instead.
-l <pattern>::
List tags with names that match the given pattern (or all if no pattern is given).
flag=1) or a file checkout (retrieving a file from the index, flag=0).
This hook cannot affect the outcome of 'git-checkout'.
+It is also run after 'git-clone', unless the --no-checkout (-n) option is
+used. The first parameter given to the hook is the null-ref, the second the
+ref of the new HEAD and the flag is always 1.
+
This hook can be used to perform repository validity checks, auto-display
differences from the previous HEAD if different, or set working dir metadata
properties.
-<!-- Based on callouts.xsl. Fixes man page callouts for DocBook 1.72 XSL -->
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
+<!-- manpage-1.72.xsl:
+ special settings for manpages rendered from asciidoc+docbook
+ handles peculiarities in docbook-xsl 1.72.0 -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
-<xsl:param name="man.output.quietly" select="1"/>
-<xsl:param name="refentry.meta.get.quietly" select="1"/>
+<xsl:import href="manpage-base.xsl"/>
-<xsl:template match="co">
- <xsl:value-of select="concat('▓fB(',substring-after(@id,'-'),')▓fR')"/>
-</xsl:template>
-<xsl:template match="calloutlist">
- <xsl:text>⌂sp </xsl:text>
- <xsl:apply-templates/>
- <xsl:text> </xsl:text>
-</xsl:template>
-<xsl:template match="callout">
- <xsl:value-of select="concat('▓fB',substring-after(@arearefs,'-'),'. ▓fR')"/>
- <xsl:apply-templates/>
- <xsl:text>⌂br </xsl:text>
-</xsl:template>
+<!-- these are the special values for the roff control characters
+ needed for docbook-xsl 1.72.0 -->
+<xsl:param name="git.docbook.backslash">▓</xsl:param>
+<xsl:param name="git.docbook.dot" >⌂</xsl:param>
</xsl:stylesheet>
--- /dev/null
+<!-- manpage-base.xsl:
+ special formatting for manpages rendered from asciidoc+docbook -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+
+<!-- these params silence some output from xmlto -->
+<xsl:param name="man.output.quietly" select="1"/>
+<xsl:param name="refentry.meta.get.quietly" select="1"/>
+
+<!-- convert asciidoc callouts to man page format;
+ git.docbook.backslash and git.docbook.dot params
+ must be supplied by another XSL file or other means -->
+<xsl:template match="co">
+ <xsl:value-of select="concat(
+ $git.docbook.backslash,'fB(',
+ substring-after(@id,'-'),')',
+ $git.docbook.backslash,'fR')"/>
+</xsl:template>
+<xsl:template match="calloutlist">
+ <xsl:value-of select="$git.docbook.dot"/>
+ <xsl:text>sp </xsl:text>
+ <xsl:apply-templates/>
+ <xsl:text> </xsl:text>
+</xsl:template>
+<xsl:template match="callout">
+ <xsl:value-of select="concat(
+ $git.docbook.backslash,'fB',
+ substring-after(@arearefs,'-'),
+ '. ',$git.docbook.backslash,'fR')"/>
+ <xsl:apply-templates/>
+ <xsl:value-of select="$git.docbook.dot"/>
+ <xsl:text>br </xsl:text>
+</xsl:template>
+
+</xsl:stylesheet>
--- /dev/null
+<!-- manpage-bold-literal.xsl:
+ special formatting for manpages rendered from asciidoc+docbook -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+
+<!-- render literal text as bold (instead of plain or monospace);
+ this makes literal text easier to distinguish in manpages
+ viewed on a tty -->
+<xsl:template match="literal">
+ <xsl:value-of select="$git.docbook.backslash"/>
+ <xsl:text>fB</xsl:text>
+ <xsl:apply-templates/>
+ <xsl:value-of select="$git.docbook.backslash"/>
+ <xsl:text>fR</xsl:text>
+</xsl:template>
+
+</xsl:stylesheet>
--- /dev/null
+<!-- manpage-normal.xsl:
+ special settings for manpages rendered from asciidoc+docbook
+ handles anything we want to keep away from docbook-xsl 1.72.0 -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+
+<xsl:import href="manpage-base.xsl"/>
+
+<!-- these are the normal values for the roff control characters -->
+<xsl:param name="git.docbook.backslash">\</xsl:param>
+<xsl:param name="git.docbook.dot" >.</xsl:param>
+
+</xsl:stylesheet>
--- /dev/null
+<!-- manpage-suppress-sp.xsl:
+ special settings for manpages rendered from asciidoc+docbook
+ handles erroneous, inline .sp in manpage output of some
+ versions of docbook-xsl -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+
+<!-- attempt to work around spurious .sp at the tail of the line
+ that some versions of docbook stylesheets seem to add -->
+<xsl:template match="simpara">
+ <xsl:variable name="content">
+ <xsl:apply-templates/>
+ </xsl:variable>
+ <xsl:value-of select="normalize-space($content)"/>
+ <xsl:if test="not(ancestor::authorblurb) and
+ not(ancestor::personblurb)">
+ <xsl:text> </xsl:text>
+ </xsl:if>
+</xsl:template>
+
+</xsl:stylesheet>
resolve::
This can only resolve two heads (i.e. the current branch
- and another branch you pulled from) using 3-way merge
+ and another branch you pulled from) using a 3-way merge
algorithm. It tries to carefully detect criss-cross
merge ambiguities and is considered generally safe and
fast.
recursive::
- This can only resolve two heads using 3-way merge
- algorithm. When there are more than one common
- ancestors that can be used for 3-way merge, it creates a
+ This can only resolve two heads using a 3-way merge
+ algorithm. When there is more than one common
+ ancestor that can be used for 3-way merge, it creates a
merged tree of the common ancestors and uses that as
the reference tree for the 3-way merge. This has been
reported to result in fewer merge conflicts without
pulling or merging one branch.
octopus::
- This resolves more than two-head case, but refuses to do
- complex merge that needs manual resolution. It is
+ This resolves cases with more than two heads, but refuses to do
+ a complex merge that needs manual resolution. It is
primarily meant to be used for bundling topic branch
heads together. This is the default merge strategy when
- pulling or merging more than one branches.
+ pulling or merging more than one branch.
ours::
This resolves any number of heads, but the result of the
------------
*
*
-M
+*
|\
* |
| | *
| \ \
| \ \
-M-. \ \
+*-. \ \
|\ \ \ \
| | * | |
| | | | | *
| | | | | *
-| | | | | M
+| | | | | *
| | | | | |\
| | | | | | *
| * | | | | |
-| | | | | M \
+| | | | | * \
| | | | | |\ |
| | | | * | | |
| | | | * | | |
BASIC_CFLAGS =
BASIC_LDFLAGS =
+# Guard against environment variables
+BUILTIN_OBJS =
+BUILT_INS =
+COMPAT_CFLAGS =
+COMPAT_OBJS =
+LIB_H =
+LIB_OBJS =
+PROGRAMS =
+SCRIPT_PERL =
+SCRIPT_SH =
+TEST_PROGRAMS =
+
SCRIPT_SH += git-am.sh
SCRIPT_SH += git-bisect.sh
SCRIPT_SH += git-filter-branch.sh
BASIC_CFLAGS += -I/usr/local/include
BASIC_LDFLAGS += -L/usr/local/lib
DIR_HAS_BSD_GROUP_SEMANTICS = YesPlease
+ USE_ST_TIMESPEC = YesPlease
THREADED_DELTA_SEARCH = YesPlease
ifeq ($(shell expr "$(uname_R)" : '4\.'),2)
PTHREAD_LIBS = -pthread
+#define NO_THE_INDEX_COMPATIBILITY_MACROS
#include "cache.h"
#include "attr.h"
return res;
}
+static enum git_attr_direction direction;
+static struct index_state *use_index;
+
static struct attr_stack *read_attr_from_file(const char *path, int macro_ok)
{
FILE *fp = fopen(path, "r");
unsigned long sz;
enum object_type type;
void *data;
+ struct index_state *istate = use_index ? use_index : &the_index;
len = strlen(path);
- pos = cache_name_pos(path, len);
+ pos = index_name_pos(istate, path, len);
if (pos < 0) {
/*
* We might be in the middle of a merge, in which
*/
int i;
for (i = -pos - 1;
- (pos < 0 && i < active_nr &&
- !strcmp(active_cache[i]->name, path));
+ (pos < 0 && i < istate->cache_nr &&
+ !strcmp(istate->cache[i]->name, path));
i++)
- if (ce_stage(active_cache[i]) == 2)
+ if (ce_stage(istate->cache[i]) == 2)
pos = i;
}
if (pos < 0)
return NULL;
- data = read_sha1_file(active_cache[pos]->sha1, &type, &sz);
+ data = read_sha1_file(istate->cache[pos]->sha1, &type, &sz);
if (!data || type != OBJ_BLOB) {
free(data);
return NULL;
return data;
}
-static struct attr_stack *read_attr(const char *path, int macro_ok)
+static struct attr_stack *read_attr_from_index(const char *path, int macro_ok)
{
struct attr_stack *res;
char *buf, *sp;
int lineno = 0;
- res = read_attr_from_file(path, macro_ok);
- if (res)
- return res;
-
- res = xcalloc(1, sizeof(*res));
-
- /*
- * There is no checked out .gitattributes file there, but
- * we might have it in the index. We allow operation in a
- * sparsely checked out work tree, so read from it.
- */
buf = read_index_data(path);
if (!buf)
- return res;
+ return NULL;
+ res = xcalloc(1, sizeof(*res));
for (sp = buf; *sp; ) {
char *ep;
int more;
return res;
}
+static struct attr_stack *read_attr(const char *path, int macro_ok)
+{
+ struct attr_stack *res;
+
+ if (direction == GIT_ATTR_CHECKOUT) {
+ res = read_attr_from_index(path, macro_ok);
+ if (!res)
+ res = read_attr_from_file(path, macro_ok);
+ }
+ else {
+ res = read_attr_from_file(path, macro_ok);
+ if (!res)
+ /*
+ * There is no checked out .gitattributes file there, but
+ * we might have it in the index. We allow operation in a
+ * sparsely checked out work tree, so read from it.
+ */
+ res = read_attr_from_index(path, macro_ok);
+ }
+ if (!res)
+ res = xcalloc(1, sizeof(*res));
+ return res;
+}
+
#if DEBUG_ATTR
static void debug_info(const char *what, struct attr_stack *elem)
{
#define debug_set(a,b,c,d) do { ; } while (0)
#endif
+static void drop_attr_stack(void)
+{
+ while (attr_stack) {
+ struct attr_stack *elem = attr_stack;
+ attr_stack = elem->prev;
+ free_attr_elem(elem);
+ }
+}
+
static void bootstrap_attr_stack(void)
{
if (!attr_stack) {
return 0;
}
+
+void git_attr_set_direction(enum git_attr_direction new, struct index_state *istate)
+{
+ enum git_attr_direction old = direction;
+ direction = new;
+ if (new != old)
+ drop_attr_stack();
+ use_index = istate;
+}
int git_checkattr(const char *path, int, struct git_attr_check *);
+enum git_attr_direction {
+ GIT_ATTR_CHECKIN,
+ GIT_ATTR_CHECKOUT
+};
+void git_attr_set_direction(enum git_attr_direction, struct index_state *);
+
#endif /* ATTR_H */
if ((st_mode ^ patch->old_mode) & S_IFMT)
return error("%s: wrong type", old_name);
if (st_mode != patch->old_mode)
- fprintf(stderr, "warning: %s has type %o, expected %o\n",
+ warning("%s has type %o, expected %o",
old_name, st_mode, patch->old_mode);
if (!patch->new_mode && !patch->is_delete)
patch->new_mode = st_mode;
cnt = strlen(patch->new_name);
if (ARRAY_SIZE(namebuf) <= cnt + 5) {
cnt = ARRAY_SIZE(namebuf) - 5;
- fprintf(stderr,
- "warning: truncating .rej filename to %.*s.rej",
+ warning("truncating .rej filename to %.*s.rej",
cnt - 1, patch->new_name);
}
memcpy(namebuf, patch->new_name, cnt);
squelch_whitespace_errors < whitespace_error) {
int squelched =
whitespace_error - squelch_whitespace_errors;
- fprintf(stderr, "warning: squelched %d "
- "whitespace error%s\n",
+ warning("squelched %d "
+ "whitespace error%s",
squelched,
squelched == 1 ? "" : "s");
}
whitespace_error == 1 ? "" : "s",
whitespace_error == 1 ? "s" : "");
if (applied_after_fixing_ws && apply)
- fprintf(stderr, "warning: %d line%s applied after"
- " fixing whitespace errors.\n",
+ warning("%d line%s applied after"
+ " fixing whitespace errors.",
applied_after_fixing_ws,
applied_after_fixing_ws == 1 ? "" : "s");
else if (whitespace_error)
- fprintf(stderr, "warning: %d line%s add%s whitespace errors.\n",
+ warning("%d line%s add%s whitespace errors.",
whitespace_error,
whitespace_error == 1 ? "" : "s",
whitespace_error == 1 ? "s" : "");
parse_done:
argc = parse_options_end(&ctx);
+ if (revs_file && read_ancestry(revs_file))
+ die("reading graft file %s failed: %s",
+ revs_file, strerror(errno));
+
if (cmd_is_annotate) {
output_option |= OUTPUT_ANNOTATE_COMPAT;
blame_date_mode = DATE_ISO8601;
sb.ent = ent;
sb.path = path;
- if (revs_file && read_ancestry(revs_file))
- die("reading graft file %s failed: %s",
- revs_file, strerror(errno));
-
read_mailmap(&mailmap, NULL);
if (!incremental)
ret = 1;
} else {
struct strbuf buf = STRBUF_INIT;
- printf("Deleted %sbranch %s (%s).\n", remote,
+ printf("Deleted %sbranch %s (was %s).\n", remote,
bname.buf,
find_unique_abbrev(sha1, DEFAULT_ABBREV));
strbuf_addf(&buf, "branch.%s", bname.buf);
if (!old.commit && !opts->force) {
if (!opts->quiet) {
- fprintf(stderr, "warning: You appear to be on a branch yet to be born.\n");
- fprintf(stderr, "warning: Forcing checkout of %s.\n", new->name);
+ warning("You appear to be on a branch yet to be born.");
+ warning("Forcing checkout of %s.", new->name);
}
opts->force = 1;
}
static const char *junk_work_tree;
static const char *junk_git_dir;
-pid_t junk_pid;
+static pid_t junk_pid;
static void remove_junk(void)
{
atexit(remove_junk);
sigchain_push_common(remove_junk_on_signal);
- setenv(CONFIG_ENVIRONMENT, xstrdup(mkpath("%s/config", git_dir)), 1);
+ setenv(CONFIG_ENVIRONMENT, mkpath("%s/config", git_dir), 1);
if (safe_create_leading_directories_const(git_dir) < 0)
die("could not create leading directories of '%s'", git_dir);
if (message)
message += 2;
- if (commit->parents) {
+ if (commit->parents &&
+ get_object_mark(&commit->parents->item->object) != 0) {
parse_commit(commit->parents->item);
diff_tree_sha1(commit->parents->item->tree->object.sha1,
commit->tree->object.sha1, "", &rev->diffopt);
/* When cloning, it is not unusual to have
* no common commit.
*/
- fprintf(stderr, "warning: no common commits\n");
+ warning("no common commits");
if (get_pack(fd, pack_lockfile))
die("git fetch-pack: fetch failed.");
struct commit *current = NULL, *updated;
enum object_type type;
struct branch *current_branch = branch_get(NULL);
- const char *pretty_ref = ref->name + (
- !prefixcmp(ref->name, "refs/heads/") ? 11 :
- !prefixcmp(ref->name, "refs/tags/") ? 10 :
- !prefixcmp(ref->name, "refs/remotes/") ? 13 :
- 0);
+ const char *pretty_ref = prettify_ref(ref);
*display = 0;
type = sha1_object_info(ref->new_sha1, NULL);
}
dir = opendir(template_path);
if (!dir) {
- fprintf(stderr, "warning: templates not found %s\n",
- template_dir);
+ warning("templates not found %s", template_dir);
return;
}
if (repository_format_version &&
repository_format_version != GIT_REPO_VERSION) {
- fprintf(stderr, "warning: not copying templates of "
- "a wrong format version %d from '%s'\n",
+ warning("not copying templates of "
+ "a wrong format version %d from '%s'",
repository_format_version,
template_dir);
closedir(dir);
cover_letter = 1;
else if (!strcmp(argv[i], "--no-binary"))
no_binary_diff = 1;
+ else if (!prefixcmp(argv[i], "--add-header="))
+ add_header(argv[i] + 13);
else
argv[j++] = argv[i];
}
max_size = trg_entry->delta_size;
ref_depth = trg->depth;
}
- max_size = max_size * (max_depth - src->depth) /
+ max_size = (uint64_t)max_size * (max_depth - src->depth) /
(max_depth - ref_depth + 1);
if (max_size == 0)
return 0;
const unsigned char *sha1;
struct object *o;
- if (p->pack_keep)
+ if (!p->pack_local || p->pack_keep)
continue;
if (open_pack_index(p))
die("cannot open pack index");
free(in_pack.array);
}
+static int has_sha1_pack_kept_or_nonlocal(const unsigned char *sha1)
+{
+ static struct packed_git *last_found = (void *)1;
+ struct packed_git *p;
+
+ p = (last_found != (void *)1) ? last_found : packed_git;
+
+ while (p) {
+ if ((!p->pack_local || p->pack_keep) &&
+ find_pack_entry_one(sha1, p)) {
+ last_found = p;
+ return 1;
+ }
+ if (p == last_found)
+ p = packed_git;
+ else
+ p = p->next;
+ if (p == last_found)
+ p = p->next;
+ }
+ return 0;
+}
+
static void loosen_unused_packed_objects(struct rev_info *revs)
{
struct packed_git *p;
const unsigned char *sha1;
for (p = packed_git; p; p = p->next) {
- if (p->pack_keep)
+ if (!p->pack_local || p->pack_keep)
continue;
if (open_pack_index(p))
for (i = 0; i < p->num_objects; i++) {
sha1 = nth_packed_object_sha1(p, i);
- if (!locate_object_entry(sha1))
+ if (!locate_object_entry(sha1) &&
+ !has_sha1_pack_kept_or_nonlocal(sha1))
if (force_object_loose(sha1, p->mtime))
die("unable to force loose object");
}
continue;
}
if (!strcmp("--unpacked", arg) ||
- !strcmp("--kept-pack-only", arg) ||
!strcmp("--reflog", arg) ||
!strcmp("--all", arg)) {
use_internal_rev_list = 1;
}
}
+static void setup_push_tracking(void)
+{
+ struct strbuf refspec = STRBUF_INIT;
+ struct branch *branch = branch_get(NULL);
+ if (!branch)
+ die("You are not currently on a branch.");
+ if (!branch->merge_nr)
+ die("The current branch %s is not tracking anything.",
+ branch->name);
+ if (branch->merge_nr != 1)
+ die("The current branch %s is tracking multiple branches, "
+ "refusing to push.", branch->name);
+ strbuf_addf(&refspec, "%s:%s", branch->name, branch->merge[0]->src);
+ add_refspec(refspec.buf);
+}
+
+static const char *warn_unconfigured_push_msg[] = {
+ "You did not specify any refspecs to push, and the current remote",
+ "has not configured any push refspecs. The default action in this",
+ "case is to push all matching refspecs, that is, all branches",
+ "that exist both locally and remotely will be updated. This may",
+ "not necessarily be what you want to happen.",
+ "",
+ "You can specify what action you want to take in this case, and",
+ "avoid seeing this message again, by configuring 'push.default' to:",
+ " 'nothing' : Do not push anything",
+ " 'matching' : Push all matching branches (default)",
+ " 'tracking' : Push the current branch to whatever it is tracking",
+ " 'current' : Push the current branch"
+};
+
+static void warn_unconfigured_push(void)
+{
+ int i;
+ for (i = 0; i < ARRAY_SIZE(warn_unconfigured_push_msg); i++)
+ warning("%s", warn_unconfigured_push_msg[i]);
+}
+
+static void setup_default_push_refspecs(void)
+{
+ git_config(git_default_config, NULL);
+ switch (push_default) {
+ case PUSH_DEFAULT_UNSPECIFIED:
+ warn_unconfigured_push();
+ /* fallthrough */
+
+ case PUSH_DEFAULT_MATCHING:
+ add_refspec(":");
+ break;
+
+ case PUSH_DEFAULT_TRACKING:
+ setup_push_tracking();
+ break;
+
+ case PUSH_DEFAULT_CURRENT:
+ add_refspec("HEAD");
+ break;
+
+ case PUSH_DEFAULT_NOTHING:
+ die("You didn't specify any refspecs to push, and "
+ "push.default is \"nothing\".");
+ break;
+ }
+}
+
static int do_push(const char *repo, int flags)
{
int i, errs;
return error("--all and --mirror are incompatible");
}
- if (!refspec
- && !(flags & TRANSPORT_PUSH_ALL)
- && remote->push_refspec_nr) {
- refspec = remote->push_refspec;
- refspec_nr = remote->push_refspec_nr;
+ if (!refspec && !(flags & TRANSPORT_PUSH_ALL)) {
+ if (remote->push_refspec_nr) {
+ refspec = remote->push_refspec;
+ refspec_nr = remote->push_refspec_nr;
+ } else if (!(flags & TRANSPORT_PUSH_MIRROR))
+ setup_default_push_refspecs();
}
errs = 0;
for (i = 0; i < remote->url_nr; i++) {
return 0;
}
+/*
+ * Sorting comparison for a string list that has push_info
+ * structs in its util field
+ */
+static int cmp_string_with_push(const void *va, const void *vb)
+{
+ const struct string_list_item *a = va;
+ const struct string_list_item *b = vb;
+ const struct push_info *a_push = a->util;
+ const struct push_info *b_push = b->util;
+ int cmp = strcmp(a->string, b->string);
+ return cmp ? cmp : strcmp(a_push->dest, b_push->dest);
+}
+
int show_push_info_item(struct string_list_item *item, void *cb_data)
{
struct show_info *show_info = cb_data;
info.width = info.width2 = 0;
for_each_string_list(add_push_to_show_info, &states.push, &info);
- sort_string_list(info.list);
+ qsort(info.list->items, info.list->nr,
+ sizeof(*info.list->items), cmp_string_with_push);
if (info.list->nr)
printf(" Local ref%s configured for 'git push'%s:\n",
info.list->nr > 1 ? "s" : "",
if (lstat(ce->name, &st) < 0) {
if (errno != ENOENT)
- fprintf(stderr, "warning: '%s': %s",
- ce->name, strerror(errno));
+ warning("'%s': %s", ce->name, strerror(errno));
/* It already vanished from the working tree */
continue;
}
"git send-pack [--all | --mirror] [--dry-run] [--force] [--receive-pack=<git-receive-pack>] [--verbose] [--thin] [<host>:]<directory> [<ref>...]\n"
" --all and explicit <ref> specification are mutually exclusive.";
-static struct send_pack_args args = {
- /* .receivepack = */ "git-receive-pack",
-};
+static struct send_pack_args args;
static int feed_object(const unsigned char *sha1, int fd, int negative)
{
/*
* Make a pack stream and spit it out into file descriptor fd
*/
-static int pack_objects(int fd, struct ref *refs, struct extra_have_objects *extra)
+static int pack_objects(int fd, struct ref *refs, struct extra_have_objects *extra, struct send_pack_args *args)
{
/*
* The child becomes pack-objects --revs; we feed
struct child_process po;
int i;
- if (args.use_thin_pack)
+ if (args->use_thin_pack)
argv[4] = "--thin";
memset(&po, 0, sizeof(po));
po.argv = argv;
return 0;
}
-static struct ref *remote_refs, **remote_tail;
-
static int receive_status(int in, struct ref *refs)
{
struct ref *hint;
}
}
-static const char *prettify_ref(const struct ref *ref)
-{
- const char *name = ref->name;
- return name + (
- !prefixcmp(name, "refs/heads/") ? 11 :
- !prefixcmp(name, "refs/tags/") ? 10 :
- !prefixcmp(name, "refs/remotes/") ? 13 :
- 0);
-}
-
#define SUMMARY_WIDTH (2 * DEFAULT_ABBREV + 3)
static void print_ref_status(char flag, const char *summary, struct ref *to, struct ref *from, const char *msg)
return 0;
}
-static int do_send_pack(int in, int out, struct remote *remote, const char *dest, int nr_refspec, const char **refspec)
+int send_pack(struct send_pack_args *args,
+ int fd[], struct child_process *conn,
+ struct ref *remote_refs,
+ struct extra_have_objects *extra_have)
{
- struct ref *ref, *local_refs;
+ int in = fd[0];
+ int out = fd[1];
+ struct ref *ref;
int new_refs;
int ask_for_status_report = 0;
int allow_deleting_refs = 0;
int expect_status_report = 0;
- int flags = MATCH_REFS_NONE;
int ret;
- struct extra_have_objects extra_have;
-
- memset(&extra_have, 0, sizeof(extra_have));
- if (args.send_all)
- flags |= MATCH_REFS_ALL;
- if (args.send_mirror)
- flags |= MATCH_REFS_MIRROR;
-
- /* No funny business with the matcher */
- remote_tail = get_remote_heads(in, &remote_refs, 0, NULL, REF_NORMAL,
- &extra_have);
- local_refs = get_local_heads();
/* Does the other end support the reporting? */
if (server_supports("report-status"))
if (server_supports("delete-refs"))
allow_deleting_refs = 1;
- /* match them up */
- if (!remote_tail)
- remote_tail = &remote_refs;
- if (match_refs(local_refs, remote_refs, &remote_tail,
- nr_refspec, refspec, flags)) {
- close(out);
- return -1;
- }
-
if (!remote_refs) {
fprintf(stderr, "No refs in common and none specified; doing nothing.\n"
"Perhaps you should specify a branch such as 'master'.\n");
- close(out);
return 0;
}
if (ref->peer_ref)
hashcpy(ref->new_sha1, ref->peer_ref->new_sha1);
- else if (!args.send_mirror)
+ else if (!args->send_mirror)
continue;
ref->deletion = is_null_sha1(ref->new_sha1);
(!has_sha1_file(ref->old_sha1)
|| !ref_newer(ref->new_sha1, ref->old_sha1));
- if (ref->nonfastforward && !ref->force && !args.force_update) {
+ if (ref->nonfastforward && !ref->force && !args->force_update) {
ref->status = REF_STATUS_REJECT_NONFASTFORWARD;
continue;
}
if (!ref->deletion)
new_refs++;
- if (!args.dry_run) {
+ if (!args->dry_run) {
char *old_hex = sha1_to_hex(ref->old_sha1);
char *new_hex = sha1_to_hex(ref->new_sha1);
}
packet_flush(out);
- if (new_refs && !args.dry_run) {
- if (pack_objects(out, remote_refs, &extra_have) < 0)
+ if (new_refs && !args->dry_run) {
+ if (pack_objects(out, remote_refs, extra_have, args) < 0) {
+ for (ref = remote_refs; ref; ref = ref->next)
+ ref->status = REF_STATUS_NONE;
return -1;
+ }
}
- else
- close(out);
if (expect_status_report)
ret = receive_status(in, remote_refs);
else
ret = 0;
- print_push_status(dest, remote_refs);
-
- if (!args.dry_run && remote) {
- for (ref = remote_refs; ref; ref = ref->next)
- update_tracking_ref(remote, ref);
- }
-
- if (!refs_pushed(remote_refs))
- fprintf(stderr, "Everything up-to-date\n");
if (ret < 0)
return ret;
for (ref = remote_refs; ref; ref = ref->next) {
int cmd_send_pack(int argc, const char **argv, const char *prefix)
{
- int i, nr_heads = 0;
- const char **heads = NULL;
+ int i, nr_refspecs = 0;
+ const char **refspecs = NULL;
const char *remote_name = NULL;
struct remote *remote = NULL;
const char *dest = NULL;
+ int fd[2];
+ struct child_process *conn;
+ struct extra_have_objects extra_have;
+ struct ref *remote_refs, **remote_tail, *local_refs;
+ int ret;
+ int send_all = 0;
+ const char *receivepack = "git-receive-pack";
+ int flags;
argv++;
for (i = 1; i < argc; i++, argv++) {
if (*arg == '-') {
if (!prefixcmp(arg, "--receive-pack=")) {
- args.receivepack = arg + 15;
+ receivepack = arg + 15;
continue;
}
if (!prefixcmp(arg, "--exec=")) {
- args.receivepack = arg + 7;
+ receivepack = arg + 7;
continue;
}
if (!prefixcmp(arg, "--remote=")) {
continue;
}
if (!strcmp(arg, "--all")) {
- args.send_all = 1;
+ send_all = 1;
continue;
}
if (!strcmp(arg, "--dry-run")) {
dest = arg;
continue;
}
- heads = (const char **) argv;
- nr_heads = argc - i;
+ refspecs = (const char **) argv;
+ nr_refspecs = argc - i;
break;
}
if (!dest)
* --all and --mirror are incompatible; neither makes sense
* with any refspecs.
*/
- if ((heads && (args.send_all || args.send_mirror)) ||
- (args.send_all && args.send_mirror))
+ if ((refspecs && (send_all || args.send_mirror)) ||
+ (send_all && args.send_mirror))
usage(send_pack_usage);
if (remote_name) {
}
}
- return send_pack(&args, dest, remote, nr_heads, heads);
-}
+ conn = git_connect(fd, dest, receivepack, args.verbose ? CONNECT_VERBOSE : 0);
-int send_pack(struct send_pack_args *my_args,
- const char *dest, struct remote *remote,
- int nr_heads, const char **heads)
-{
- int fd[2], ret;
- struct child_process *conn;
+ memset(&extra_have, 0, sizeof(extra_have));
+
+ get_remote_heads(fd[0], &remote_refs, 0, NULL, REF_NORMAL,
+ &extra_have);
- memcpy(&args, my_args, sizeof(args));
+ verify_remote_names(nr_refspecs, refspecs);
+
+ local_refs = get_local_heads();
+
+ flags = MATCH_REFS_NONE;
+
+ if (send_all)
+ flags |= MATCH_REFS_ALL;
+ if (args.send_mirror)
+ flags |= MATCH_REFS_MIRROR;
+
+ /* match them up */
+ remote_tail = &remote_refs;
+ while (*remote_tail)
+ remote_tail = &((*remote_tail)->next);
+ if (match_refs(local_refs, remote_refs, &remote_tail,
+ nr_refspecs, refspecs, flags)) {
+ return -1;
+ }
- verify_remote_names(nr_heads, heads);
+ ret = send_pack(&args, fd, conn, remote_refs, &extra_have);
- conn = git_connect(fd, dest, args.receivepack, args.verbose ? CONNECT_VERBOSE : 0);
- ret = do_send_pack(fd[0], fd[1], remote, dest, nr_heads, heads);
+ close(fd[1]);
close(fd[0]);
- /* do_send_pack always closes fd[1] */
+
ret |= finish_connect(conn);
- return !!ret;
+
+ print_push_status(dest, remote_refs);
+
+ if (!args.dry_run && remote) {
+ struct ref *ref;
+ for (ref = remote_refs; ref; ref = ref->next)
+ update_tracking_ref(remote, ref);
+ }
+
+ if (!ret && !refs_pushed(remote_refs))
+ fprintf(stderr, "Everything up-to-date\n");
+
+ return ret;
}
return 0;
}
if (MAX_REVS <= ref_name_cnt) {
- fprintf(stderr, "warning: ignoring %s; "
- "cannot handle more than %d refs\n",
+ warning("ignoring %s; cannot handle more than %d refs",
refname, MAX_REVS);
return 0;
}
continue;
}
if (check_ref_format(ref)) {
- fprintf(stderr, "warning: ref '%s' ignored\n", ref);
+ warning("ref '%s' ignored", ref);
continue;
}
if (!string_list_has_string(&existing_refs, ref)) {
AUTOREBASE_ALWAYS,
};
+enum push_default_type {
+ PUSH_DEFAULT_UNSPECIFIED = -1,
+ PUSH_DEFAULT_NOTHING = 0,
+ PUSH_DEFAULT_MATCHING,
+ PUSH_DEFAULT_TRACKING,
+ PUSH_DEFAULT_CURRENT,
+};
+
extern enum branch_track git_branch_track;
extern enum rebase_setup_type autorebase;
+extern enum push_default_type push_default;
#define GIT_REPO_VERSION 0
extern int repository_format_version;
extern int move_temp_to_file(const char *tmpfile, const char *filename);
extern int has_sha1_pack(const unsigned char *sha1);
-extern int has_sha1_kept_pack(const unsigned char *sha1);
extern int has_sha1_file(const unsigned char *sha1);
extern int has_loose_object_nonlocal(const unsigned char *sha1);
const char *p = arg;
if (!*p) force_quotes = 1;
while (*p) {
- if (isspace(*p) || *p == '*' || *p == '?' || *p == '{')
+ if (isspace(*p) || *p == '*' || *p == '?' || *p == '{' || *p == '\'')
force_quotes = 1;
else if (*p == '"')
n++;
return 0;
}
+static int git_default_push_config(const char *var, const char *value)
+{
+ if (!strcmp(var, "push.default")) {
+ if (!value)
+ return config_error_nonbool(var);
+ else if (!strcmp(value, "nothing"))
+ push_default = PUSH_DEFAULT_NOTHING;
+ else if (!strcmp(value, "matching"))
+ push_default = PUSH_DEFAULT_MATCHING;
+ else if (!strcmp(value, "tracking"))
+ push_default = PUSH_DEFAULT_TRACKING;
+ else if (!strcmp(value, "current"))
+ push_default = PUSH_DEFAULT_CURRENT;
+ else {
+ error("Malformed value for %s: %s", var, value);
+ return error("Must be one of nothing, matching, "
+ "tracking or current.");
+ }
+ return 0;
+ }
+
+ /* Add other config variables here and to Documentation/config.txt. */
+ return 0;
+}
+
static int git_default_mailmap_config(const char *var, const char *value)
{
if (!strcmp(var, "mailmap.file"))
if (!prefixcmp(var, "branch."))
return git_default_branch_config(var, value);
+ if (!prefixcmp(var, "push."))
+ return git_default_push_config(var, value);
+
if (!prefixcmp(var, "mailmap."))
return git_default_mailmap_config(var, value);
local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
- __gitcomp "--cached --pickaxe-all --pickaxe-regex
+ __gitcomp "--cached --staged --pickaxe-all --pickaxe-regex
--base --ours --theirs
$__git_diff_common_options
"
{
local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
+ --thread=*)
+ __gitcomp "
+ deep shallow
+ " "" "${cur##--thread=}"
+ return
+ ;;
--*)
__gitcomp "
- --stdout --attach --thread
+ --stdout --attach --no-attach --thread --thread=
--output-directory
--numbered --start-number
--numbered-files
--keep-subject
--signoff
- --in-reply-to=
+ --in-reply-to= --cc=
--full-index --binary
--not --all
--cover-letter
__git_complete_revlist
}
+_git_fsck ()
+{
+ local cur="${COMP_WORDS[COMP_CWORD]}"
+ case "$cur" in
+ --*)
+ __gitcomp "
+ --tags --root --unreachable --cache --no-reflogs --full
+ --strict --verbose --lost-found
+ "
+ return
+ ;;
+ esac
+ COMPREPLY=()
+}
+
_git_gc ()
{
local cur="${COMP_WORDS[COMP_CWORD]}"
local cur="${COMP_WORDS[COMP_CWORD]}"
local g="$(git rev-parse --git-dir 2>/dev/null)"
local merge=""
- if [ -f $g/MERGE_HEAD ]; then
+ if [ -f "$g/MERGE_HEAD" ]; then
merge="--merge"
fi
case "$cur" in
local cur="${COMP_WORDS[COMP_CWORD]}"
case "$cur" in
--*)
- __gitcomp "--bcc --cc --cc-cmd --chain-reply-to --compose
- --dry-run --envelope-sender --from --identity
+ __gitcomp "--annotate --bcc --cc --cc-cmd --chain-reply-to
+ --compose --dry-run --envelope-sender --from --identity
--in-reply-to --no-chain-reply-to --no-signed-off-by-cc
--no-suppress-from --no-thread --quiet
--signed-off-by-cc --smtp-pass --smtp-server
diff) _git_diff ;;
fetch) _git_fetch ;;
format-patch) _git_format_patch ;;
+ fsck) _git_fsck ;;
gc) _git_gc ;;
grep) _git_grep ;;
help) _git_help ;;
local cur="${COMP_WORDS[COMP_CWORD]}"
local g="$(__gitdir)"
local merge=""
- if [ -f $g/MERGE_HEAD ]; then
+ if [ -f "$g/MERGE_HEAD" ]; then
merge="--merge"
fi
case "$cur" in
common_prefix = name[:name.rfind('/') + 1]
else:
while not name.startswith(common_prefix):
- common_prefix = name[:name.rfind('/') + 1]
+ last_slash = common_prefix[:-1].rfind('/') + 1
+ common_prefix = common_prefix[:last_slash]
mark[name] = ':' + str(next_mark)
next_mark += 1
no_index ? "--no-index" : "[--no-index]");
diff_setup(&revs->diffopt);
- if (!revs->diffopt.output_format)
- revs->diffopt.output_format = DIFF_FORMAT_PATCH;
for (i = 1; i < argc - 2; ) {
int j;
if (!strcmp(argv[i], "--no-index"))
revs->diffopt.paths = argv + argc - 2;
revs->diffopt.nr_paths = 2;
revs->diffopt.skip_stat_unmatch = 1;
+ if (!revs->diffopt.output_format)
+ revs->diffopt.output_format = DIFF_FORMAT_PATCH;
DIFF_OPT_SET(&revs->diffopt, EXIT_WITH_STATUS);
DIFF_OPT_SET(&revs->diffopt, NO_INDEX);
struct stat st;
int pos, len;
- /* We do not read the cache ourselves here, because the
+ /*
+ * We do not read the cache ourselves here, because the
* benchmark with my previous version that always reads cache
* shows that it makes things worse for diff-tree comparing
* two linux-2.6 kernel trees in an already checked out work
if (hashcmp(sha1, ce->sha1) || !S_ISREG(ce->ce_mode))
return 0;
+ /*
+ * If ce is marked as "assume unchanged", there is no
+ * guarantee that work tree matches what we are looking for.
+ */
+ if (ce->ce_flags & CE_VALID)
+ return 0;
+
/*
* If ce matches the file in the work tree, we can reuse it.
*/
s->cnt_data = NULL;
}
-static void prep_temp_blob(struct diff_tempfile *temp,
+static void prep_temp_blob(const char *path, struct diff_tempfile *temp,
void *blob,
unsigned long size,
const unsigned char *sha1,
int mode)
{
int fd;
+ struct strbuf buf = STRBUF_INIT;
fd = git_mkstemp(temp->tmp_path, PATH_MAX, ".diff_XXXXXX");
if (fd < 0)
die("unable to create temp-file: %s", strerror(errno));
+ if (convert_to_working_tree(path,
+ (const char *)blob, (size_t)size, &buf)) {
+ blob = buf.buf;
+ size = buf.len;
+ }
if (write_in_full(fd, blob, size) != size)
die("unable to write temp-file");
close(fd);
strcpy(temp->hex, sha1_to_hex(sha1));
temp->hex[40] = 0;
sprintf(temp->mode, "%06o", mode);
+ strbuf_release(&buf);
}
static struct diff_tempfile *prepare_temp_file(const char *name,
die("readlink(%s)", name);
if (ret == sizeof(buf))
die("symlink too long: %s", name);
- prep_temp_blob(temp, buf, ret,
+ prep_temp_blob(name, temp, buf, ret,
(one->sha1_valid ?
one->sha1 : null_sha1),
(one->sha1_valid ?
else {
if (diff_populate_filespec(one, 0))
die("cannot read data blob for %s", one->path);
- prep_temp_blob(temp, one->data, one->size,
+ prep_temp_blob(name, temp, one->data, one->size,
one->sha1, one->mode);
}
return temp;
unsigned whitespace_rule_cfg = WS_DEFAULT_RULE;
enum branch_track git_branch_track = BRANCH_TRACK_REMOTE;
enum rebase_setup_type autorebase = AUTOREBASE_NEVER;
+enum push_default_type push_default = PUSH_DEFAULT_UNSPECIFIED;
/* Parallel index stat data preload? */
int core_preload_index = 0;
void git_set_argv_exec_path(const char *exec_path)
{
argv_exec_path = exec_path;
+ /*
+ * Propagate this setting to external programs.
+ */
+ setenv(EXEC_PATH_ENVIRONMENT, exec_path, 1);
}
/*
+(See Documentation/git-fast-import.txt for maintained documentation.)
Format of STDIN stream:
stream ::= cmd*;
new_commit ::= 'commit' sp ref_str lf
mark?
- ('author' sp name '<' email '>' when lf)?
- 'committer' sp name '<' email '>' when lf
+ ('author' sp name sp '<' email '>' sp when lf)?
+ 'committer' sp name sp '<' email '>' sp when lf
commit_msg
('from' sp (ref_str | hexsha1 | sha1exp_str | idnum) lf)?
('merge' sp (ref_str | hexsha1 | sha1exp_str | idnum) lf)*
new_tag ::= 'tag' sp tag_str lf
'from' sp (ref_str | hexsha1 | sha1exp_str | idnum) lf
- ('tagger' sp name '<' email '>' when lf)?
+ ('tagger' sp name sp '<' email '>' sp when lf)?
tag_msg;
tag_msg ::= data;
# Rewrite the commits
-i=0
+git_filter_branch__commit_count=0
while read commit parents; do
- i=$(($i+1))
- printf "\rRewrite $commit ($i/$commits)"
+ git_filter_branch__commit_count=$(($git_filter_branch__commit_count+1))
+ printf "\rRewrite $commit ($git_filter_branch__commit_count/$commits)"
case "$filter_subdir" in
"")
--root)
rebase_root=t
;;
- -f|--f|--fo|--for|--forc|force|--force-r|--force-re|--force-reb|--force-reba|--force_rebas|--force-rebase)
+ -f|--f|--fo|--for|--forc|force|--force-r|--force-re|--force-reb|--force-reba|--force-rebas|--force-rebase)
force_rebase=t
;;
-*)
existing="$existing $e"
fi
done
- if test -n "$existing"
- then
- args="--kept-pack-only"
- fi
- if test -n "$args" -a -n "$unpack_unreachable" -a \
+ if test -n "$existing" -a -n "$unpack_unreachable" -a \
-n "$remove_redundant"
then
args="$args $unpack_unreachable"
do_edit(@files);
}
+sub ask {
+ my ($prompt, %arg) = @_;
+ my $valid_re = $arg{valid_re} || ""; # "" matches anything
+ my $default = $arg{default};
+ my $resp;
+ my $i = 0;
+ return defined $default ? $default : undef
+ unless defined $term->IN and defined fileno($term->IN) and
+ defined $term->OUT and defined fileno($term->OUT);
+ while ($i++ < 10) {
+ $resp = $term->readline($prompt);
+ if (!defined $resp) { # EOF
+ print "\n";
+ return defined $default ? $default : undef;
+ }
+ if ($resp eq '' and defined $default) {
+ return $default;
+ }
+ if ($resp =~ /$valid_re/) {
+ return $resp;
+ }
+ }
+ return undef;
+}
+
my $prompting = 0;
if (!defined $sender) {
$sender = $repoauthor || $repocommitter || '';
-
- while (1) {
- $_ = $term->readline("Who should the emails appear to be from? [$sender] ");
- last if defined $_;
- print "\n";
- }
-
- $sender = $_ if ($_);
+ $sender = ask("Who should the emails appear to be from? [$sender] ",
+ default => $sender);
print "Emails will be sent from: ", $sender, "\n";
$prompting++;
}
if (!@to) {
-
-
- while (1) {
- $_ = $term->readline("Who should the emails be sent to? ", "");
- last if defined $_;
- print "\n";
- }
-
- my $to = $_;
- push @to, parse_address_line($to);
+ my $to = ask("Who should the emails be sent to? ");
+ push @to, parse_address_line($to) if defined $to; # sanitized/validated later
$prompting++;
}
@bcclist = expand_aliases(@bcclist);
if ($thread && !defined $initial_reply_to && $prompting) {
- while (1) {
- $_= $term->readline("Message-ID to be used as In-Reply-To for the first email? ", $initial_reply_to);
- last if defined $_;
- print "\n";
- }
-
- $initial_reply_to = $_;
+ $initial_reply_to = ask(
+ "Message-ID to be used as In-Reply-To for the first email? ");
}
if (defined $initial_reply_to) {
$initial_reply_to =~ s/^\s*<?//;
# Variables we set as part of the loop over files
our ($message_id, %mail, $subject, $reply_to, $references, $message,
- $needs_confirm, $message_num);
+ $needs_confirm, $message_num, $ask_default);
sub extract_valid_address {
my $address = shift;
print "\n$header\n";
if ($needs_confirm eq "inform") {
$confirm_unconfigured = 0; # squelch this message for the rest of this run
+ $ask_default = "y"; # assume yes on EOF since user hasn't explicitly asked for confirmation
print " The Cc list above has been expanded by additional\n";
print " addresses found in the patch commit message. By default\n";
print " send-email prompts before sending whenever this occurs.\n";
print " To retain the current behavior, but squelch this message,\n";
print " run 'git config --global sendemail.confirm auto'.\n\n";
}
- while (1) {
- chomp ($_ = $term->readline(
- "Send this email? ([y]es|[n]o|[q]uit|[a]ll): "
- ));
- last if /^(?:yes|y|no|n|quit|q|all|a)/i;
- print "\n";
- }
+ $_ = ask("Send this email? ([y]es|[n]o|[q]uit|[a]ll): ",
+ valid_re => qr/^(?:yes|y|no|n|quit|q|all|a)/i,
+ default => $ask_default);
+ die "Send this email reply required" unless defined $_;
if (/^n/i) {
return;
} elsif (/^q/i) {
$_prefix, $_no_checkout, $_url, $_verbose,
$_git_format, $_commit_url, $_tag);
$Git::SVN::_follow_parent = 1;
+$_q ||= 0;
my %remote_opts = ( 'username=s' => \$Git::SVN::Prompt::_username,
'config-dir=s' => \$Git::SVN::Ra::config_dir,
'no-auth-cache' => \$Git::SVN::Prompt::_no_auth_cache,
'useSvnsyncProps' => \$Git::SVN::_use_svnsync_props,
'log-window-size=i' => \$Git::SVN::Ra::_log_window_size,
'no-checkout' => \$_no_checkout,
- 'quiet|q' => \$_q,
+ 'quiet|q+' => \$_q,
'repack-flags|repack-args|repack-opts=s' =>
\$Git::SVN::_repack_flags,
'use-log-author' => \$Git::SVN::_use_log_author,
$self->{last_rev} = $log_entry->{revision};
$self->{last_commit} = $commit;
- print "r$log_entry->{revision}";
+ print "r$log_entry->{revision}" unless $::_q > 1;
if (defined $log_entry->{svm_revision}) {
- print " (\@$log_entry->{svm_revision})";
+ print " (\@$log_entry->{svm_revision})" unless $::_q > 1;
$self->rev_map_set($log_entry->{svm_revision}, $commit,
0, $self->svm_uuid);
}
- print " = $commit ($self->{ref_id})\n";
+ print " = $commit ($self->{ref_id})\n" unless $::_q > 1;
if (--$_gc_nr == 0) {
$_gc_nr = $_gc_period;
gc();
return undef if ($gpath eq '');
# remove entire directories.
- if (command('ls-tree', $self->{c}, '--', $gpath) =~ /^040000 tree/) {
+ my ($tree) = (command('ls-tree', '-z', $self->{c}, "./$gpath")
+ =~ /\A040000 tree ([a-f\d]{40})\t\Q$gpath\E\0/);
+ if ($tree) {
my ($ls, $ctx) = command_output_pipe(qw/ls-tree
-r --name-only -z/,
- $self->{c}, '--', $gpath);
+ $tree);
local $/ = "\0";
while (<$ls>) {
chomp;
- $self->{gii}->remove($_);
- print "\tD\t$_\n" unless $::_q;
+ my $rmpath = "$gpath/$_";
+ $self->{gii}->remove($rmpath);
+ print "\tD\t$rmpath\n" unless $::_q;
}
print "\tD\t$gpath/\n" unless $::_q;
command_close_pipe($ls, $ctx);
goto out if is_path_ignored($path);
my $gpath = $self->git_path($path);
- ($mode, $blob) = (command('ls-tree', $self->{c}, '--', $gpath)
- =~ /^(\d{6}) blob ([a-f\d]{40})\t/);
+ ($mode, $blob) = (command('ls-tree', '-z', $self->{c}, "./$gpath")
+ =~ /\A(\d{6}) blob ([a-f\d]{40})\t\Q$gpath\E\0/);
unless (defined $mode && defined $blob) {
die "$path was not found in commit $self->{c} (r$rev)\n";
}
struct remote_lock *locks;
};
-static struct repo *remote;
+static struct repo *repo;
enum transfer_state {
NEED_FETCH,
git_SHA1_Init(&request->c);
- url = get_remote_object_url(remote->url, hex, 0);
+ url = get_remote_object_url(repo->url, hex, 0);
request->url = xstrdup(url);
/* If a previous temp file is present, process what was already
request->state = RUN_FETCH_LOOSE;
if (!start_active_slot(slot)) {
fprintf(stderr, "Unable to start GET request\n");
- remote->can_update_info_refs = 0;
+ repo->can_update_info_refs = 0;
release_request(request);
}
}
char *hex = sha1_to_hex(request->obj->sha1);
struct active_request_slot *slot;
- request->url = get_remote_object_url(remote->url, hex, 1);
+ request->url = get_remote_object_url(repo->url, hex, 1);
slot = get_active_slot();
slot->callback_func = process_response;
struct transfer_request *check_request = request_queue_head;
struct active_request_slot *slot;
- target = find_sha1_pack(request->obj->sha1, remote->packs);
+ target = find_sha1_pack(request->obj->sha1, repo->packs);
if (!target) {
fprintf(stderr, "Unable to fetch %s, will not be able to update server info refs\n", sha1_to_hex(request->obj->sha1));
- remote->can_update_info_refs = 0;
+ repo->can_update_info_refs = 0;
release_request(request);
return;
}
snprintf(request->tmpfile, sizeof(request->tmpfile),
"%s.temp", filename);
- url = xmalloc(strlen(remote->url) + 64);
+ url = xmalloc(strlen(repo->url) + 64);
sprintf(url, "%sobjects/pack/pack-%s.pack",
- remote->url, sha1_to_hex(target->sha1));
+ repo->url, sha1_to_hex(target->sha1));
/* Make sure there isn't another open request for this pack */
while (check_request) {
if (!packfile) {
fprintf(stderr, "Unable to open local file %s for pack",
request->tmpfile);
- remote->can_update_info_refs = 0;
+ repo->can_update_info_refs = 0;
free(url);
return;
}
request->state = RUN_FETCH_PACKED;
if (!start_active_slot(slot)) {
fprintf(stderr, "Unable to start GET request\n");
- remote->can_update_info_refs = 0;
+ repo->can_update_info_refs = 0;
release_request(request);
}
}
request->buffer.buf.len = stream.total_out;
strbuf_addstr(&buf, "Destination: ");
- append_remote_object_url(&buf, remote->url, hex, 0);
+ append_remote_object_url(&buf, repo->url, hex, 0);
request->dest = strbuf_detach(&buf, NULL);
- append_remote_object_url(&buf, remote->url, hex, 0);
+ append_remote_object_url(&buf, repo->url, hex, 0);
strbuf_add(&buf, request->lock->tmpfile_suffix, 41);
request->url = strbuf_detach(&buf, NULL);
static void check_locks(void)
{
- struct remote_lock *lock = remote->locks;
+ struct remote_lock *lock = repo->locks;
time_t current_time = time(NULL);
int time_remaining;
}
} else {
if (request->http_code == 416)
- fprintf(stderr, "Warning: requested range invalid; we may already have all the data.\n");
+ warning("requested range invalid; we may already have all the data.");
git_inflate_end(&request->stream);
git_SHA1_Final(request->real_sha1, &request->c);
if (request->curl_result != CURLE_OK) {
fprintf(stderr, "Unable to get pack file %s\n%s",
request->url, curl_errorstr);
- remote->can_update_info_refs = 0;
+ repo->can_update_info_refs = 0;
} else {
off_t pack_size = ftell(request->local_stream);
request->filename)) {
target = (struct packed_git *)request->userData;
target->pack_size = pack_size;
- lst = &remote->packs;
+ lst = &repo->packs;
while (*lst != target)
lst = &((*lst)->next);
*lst = (*lst)->next;
if (!verify_pack(target))
install_packed_git(target);
else
- remote->can_update_info_refs = 0;
+ repo->can_update_info_refs = 0;
}
}
release_request(request);
get_remote_object_list(obj->sha1[0]);
if (obj->flags & (REMOTE | PUSHING))
return 0;
- target = find_sha1_pack(obj->sha1, remote->packs);
+ target = find_sha1_pack(obj->sha1, repo->packs);
if (target) {
obj->flags |= REMOTE;
return 0;
struct slot_results results;
/* Don't use the index if the pack isn't there */
- url = xmalloc(strlen(remote->url) + 64);
- sprintf(url, "%sobjects/pack/pack-%s.pack", remote->url, hex);
+ url = xmalloc(strlen(repo->url) + 64);
+ sprintf(url, "%sobjects/pack/pack-%s.pack", repo->url, hex);
slot = get_active_slot();
slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_URL, url);
if (push_verbosely)
fprintf(stderr, "Getting index for pack %s\n", hex);
- sprintf(url, "%sobjects/pack/pack-%s.idx", remote->url, hex);
+ sprintf(url, "%sobjects/pack/pack-%s.idx", repo->url, hex);
filename = sha1_pack_index_name(sha1);
snprintf(tmpfile, sizeof(tmpfile), "%s.temp", filename);
return -1;
new_pack = parse_pack_index(sha1);
- new_pack->next = remote->packs;
- remote->packs = new_pack;
+ new_pack->next = repo->packs;
+ repo->packs = new_pack;
return 0;
}
if (push_verbosely)
fprintf(stderr, "Getting pack list\n");
- url = xmalloc(strlen(remote->url) + 20);
- sprintf(url, "%sobjects/info/packs", remote->url);
+ url = xmalloc(strlen(repo->url) + 20);
+ sprintf(url, "%sobjects/info/packs", repo->url);
slot = get_active_slot();
slot->results = &results;
struct curl_slist *dav_headers = NULL;
struct xml_ctx ctx;
- url = xmalloc(strlen(remote->url) + strlen(path) + 1);
- sprintf(url, "%s%s", remote->url, path);
+ url = xmalloc(strlen(repo->url) + strlen(path) + 1);
+ sprintf(url, "%s%s", repo->url, path);
/* Make sure leading directories exist for the remote ref */
- ep = strchr(url + strlen(remote->url) + 1, '/');
+ ep = strchr(url + strlen(repo->url) + 1, '/');
while (ep) {
char saved_character = ep[1];
ep[1] = '\0';
} else {
lock->url = url;
lock->start_time = time(NULL);
- lock->next = remote->locks;
- remote->locks = lock;
+ lock->next = repo->locks;
+ repo->locks = lock;
}
return lock;
{
struct active_request_slot *slot;
struct slot_results results;
- struct remote_lock *prev = remote->locks;
+ struct remote_lock *prev = repo->locks;
struct curl_slist *dav_headers;
int rc = 0;
curl_slist_free_all(dav_headers);
- if (remote->locks == lock) {
- remote->locks = lock->next;
+ if (repo->locks == lock) {
+ repo->locks = lock->next;
} else {
while (prev && prev->next != lock)
prev = prev->next;
static void remove_locks(void)
{
- struct remote_lock *lock = remote->locks;
+ struct remote_lock *lock = repo->locks;
fprintf(stderr, "Removing remote locks...\n");
while (lock) {
}
}
if (path) {
- path += remote->path_len;
+ path += repo->path_len;
ls->dentry_name = xstrdup(path);
}
} else if (!strcmp(ctx->name, DAV_PROPFIND_COLLECTION)) {
void (*userFunc)(struct remote_ls_ctx *ls),
void *userData)
{
- char *url = xmalloc(strlen(remote->url) + strlen(path) + 1);
+ char *url = xmalloc(strlen(repo->url) + strlen(path) + 1);
struct active_request_slot *slot;
struct slot_results results;
struct strbuf in_buffer = STRBUF_INIT;
ls.userData = userData;
ls.userFunc = userFunc;
- sprintf(url, "%s%s", remote->url, path);
+ sprintf(url, "%s%s", repo->url, path);
strbuf_addf(&out_buffer.buf, PROPFIND_ALL_REQUEST);
struct xml_ctx ctx;
int lock_flags = 0;
- strbuf_addf(&out_buffer.buf, PROPFIND_SUPPORTEDLOCK_REQUEST, remote->url);
+ strbuf_addf(&out_buffer.buf, PROPFIND_SUPPORTEDLOCK_REQUEST, repo->url);
dav_headers = curl_slist_append(dav_headers, "Depth: 0");
dav_headers = curl_slist_append(dav_headers, "Content-Type: text/xml");
curl_easy_setopt(slot->curl, CURLOPT_READFUNCTION, fread_buffer);
curl_easy_setopt(slot->curl, CURLOPT_FILE, &in_buffer);
curl_easy_setopt(slot->curl, CURLOPT_WRITEFUNCTION, fwrite_buffer);
- curl_easy_setopt(slot->curl, CURLOPT_URL, remote->url);
+ curl_easy_setopt(slot->curl, CURLOPT_URL, repo->url);
curl_easy_setopt(slot->curl, CURLOPT_UPLOAD, 1);
curl_easy_setopt(slot->curl, CURLOPT_CUSTOMREQUEST, DAV_PROPFIND);
curl_easy_setopt(slot->curl, CURLOPT_HTTPHEADER, dav_headers);
}
XML_ParserFree(parser);
if (!lock_flags)
- error("Error: no DAV locking support on %s",
- remote->url);
+ error("no DAV locking support on %s",
+ repo->url);
} else {
error("Cannot access URL %s, return code %d",
- remote->url, results.curl_result);
+ repo->url, results.curl_result);
lock_flags = 0;
}
} else {
- error("Unable to start PROPFIND request on %s", remote->url);
+ error("Unable to start PROPFIND request on %s", repo->url);
}
strbuf_release(&out_buffer.buf);
ref = alloc_ref(refname);
- if (http_fetch_ref(remote->url, ref) != 0) {
+ if (http_fetch_ref(repo->url, ref) != 0) {
fprintf(stderr,
"Unable to fetch ref %s from %s\n",
- refname, remote->url);
+ refname, repo->url);
free(ref);
return;
}
* Fetch a copy of the object if it doesn't exist locally - it
* may be required for updating server info later.
*/
- if (remote->can_update_info_refs && !has_sha1_file(ref->old_sha1)) {
+ if (repo->can_update_info_refs && !has_sha1_file(ref->old_sha1)) {
obj = lookup_unknown_object(ref->old_sha1);
if (obj) {
fprintf(stderr, " fetch %s for %s\n",
ref = alloc_ref(ls->dentry_name);
- if (http_fetch_ref(remote->url, ref) != 0) {
+ if (http_fetch_ref(repo->url, ref) != 0) {
fprintf(stderr,
"Unable to fetch ref %s from %s\n",
- ls->dentry_name, remote->url);
+ ls->dentry_name, repo->url);
aborted = 1;
free(ref);
return;
static int remote_exists(const char *path)
{
- char *url = xmalloc(strlen(remote->url) + strlen(path) + 1);
+ char *url = xmalloc(strlen(repo->url) + strlen(path) + 1);
struct active_request_slot *slot;
struct slot_results results;
int ret = -1;
- sprintf(url, "%s%s", remote->url, path);
+ sprintf(url, "%s%s", repo->url, path);
slot = get_active_slot();
slot->results = &results;
struct active_request_slot *slot;
struct slot_results results;
- url = xmalloc(strlen(remote->url) + strlen(path) + 1);
- sprintf(url, "%s%s", remote->url, path);
+ url = xmalloc(strlen(repo->url) + strlen(path) + 1);
+ sprintf(url, "%s%s", repo->url, path);
slot = get_active_slot();
slot->results = &results;
"of your current HEAD.\n"
"If you are sure you want to delete it,"
" run:\n\t'git http-push -D %s %s'",
- remote_ref->name, remote->url, pattern);
+ remote_ref->name, repo->url, pattern);
}
}
fprintf(stderr, "Removing remote branch '%s'\n", remote_ref->name);
if (dry_run)
return 0;
- url = xmalloc(strlen(remote->url) + strlen(remote_ref->name) + 1);
- sprintf(url, "%s%s", remote->url, remote_ref->name);
+ url = xmalloc(strlen(repo->url) + strlen(remote_ref->name) + 1);
+ sprintf(url, "%s%s", repo->url, remote_ref->name);
slot = get_active_slot();
slot->results = &results;
curl_easy_setopt(slot->curl, CURLOPT_HTTPGET, 1);
int i;
int new_refs;
struct ref *ref, *local_refs;
+ struct remote *remote;
char *rewritten_url = NULL;
git_extract_argv0_path(argv[0]);
setup_git_directory();
- remote = xcalloc(sizeof(*remote), 1);
+ repo = xcalloc(sizeof(*repo), 1);
argv++;
for (i = 1; i < argc; i++, argv++) {
continue;
}
}
- if (!remote->url) {
+ if (!repo->url) {
char *path = strstr(arg, "//");
- remote->url = arg;
- remote->path_len = strlen(arg);
+ repo->url = arg;
+ repo->path_len = strlen(arg);
if (path) {
- remote->path = strchr(path+2, '/');
- if (remote->path)
- remote->path_len = strlen(remote->path);
+ repo->path = strchr(path+2, '/');
+ if (repo->path)
+ repo->path_len = strlen(repo->path);
}
continue;
}
die("git-push is not available for http/https repository when not compiled with USE_CURL_MULTI");
#endif
- if (!remote->url)
+ if (!repo->url)
usage(http_push_usage);
if (delete_branch && nr_refspec != 1)
memset(remote_dir_exists, -1, 256);
- http_init(NULL);
+ /*
+ * Create a minimum remote by hand to give to http_init(),
+ * primarily to allow it to look at the URL.
+ */
+ remote = xcalloc(sizeof(*remote), 1);
+ ALLOC_GROW(remote->url, remote->url_nr + 1, remote->url_alloc);
+ remote->url[remote->url_nr++] = repo->url;
+ http_init(remote);
no_pragma_header = curl_slist_append(no_pragma_header, "Pragma:");
- if (remote->url && remote->url[strlen(remote->url)-1] != '/') {
- rewritten_url = xmalloc(strlen(remote->url)+2);
- strcpy(rewritten_url, remote->url);
+ if (repo->url && repo->url[strlen(repo->url)-1] != '/') {
+ rewritten_url = xmalloc(strlen(repo->url)+2);
+ strcpy(rewritten_url, repo->url);
strcat(rewritten_url, "/");
- remote->path = rewritten_url + (remote->path - remote->url);
- remote->path_len++;
- remote->url = rewritten_url;
+ repo->path = rewritten_url + (repo->path - repo->url);
+ repo->path_len++;
+ repo->url = rewritten_url;
}
/* Verify DAV compliance/lock support */
sigchain_push_common(remove_locks_on_signal);
/* Check whether the remote has server info files */
- remote->can_update_info_refs = 0;
- remote->has_info_refs = remote_exists("info/refs");
- remote->has_info_packs = remote_exists("objects/info/packs");
- if (remote->has_info_refs) {
+ repo->can_update_info_refs = 0;
+ repo->has_info_refs = remote_exists("info/refs");
+ repo->has_info_packs = remote_exists("objects/info/packs");
+ if (repo->has_info_refs) {
info_ref_lock = lock_remote("info/refs", LOCK_TIME);
if (info_ref_lock)
- remote->can_update_info_refs = 1;
+ repo->can_update_info_refs = 1;
else {
- fprintf(stderr, "Error: cannot lock existing info/refs\n");
+ error("cannot lock existing info/refs");
rc = 1;
goto cleanup;
}
}
- if (remote->has_info_packs)
+ if (repo->has_info_packs)
fetch_indices();
/* Get a list of all local and remote heads to validate refspecs */
}
/* Update remote server info if appropriate */
- if (remote->has_info_refs && new_refs) {
- if (info_ref_lock && remote->can_update_info_refs) {
+ if (repo->has_info_refs && new_refs) {
+ if (info_ref_lock && repo->can_update_info_refs) {
fprintf(stderr, "Updating remote server info\n");
if (!dry_run)
update_remote_info_refs(info_ref_lock);
free(rewritten_url);
if (info_ref_lock)
unlock_remote(info_ref_lock);
- free(remote);
+ free(repo);
curl_slist_free_all(no_pragma_header);
#include "http.h"
int data_received;
-int active_requests = 0;
+int active_requests;
#ifdef USE_CURL_MULTI
static int max_requests = -1;
char curl_errorstr[CURL_ERROR_SIZE];
static int curl_ssl_verify = -1;
-static const char *ssl_cert = NULL;
+static const char *ssl_cert;
#if LIBCURL_VERSION_NUM >= 0x070902
-static const char *ssl_key = NULL;
+static const char *ssl_key;
#endif
#if LIBCURL_VERSION_NUM >= 0x070908
-static const char *ssl_capath = NULL;
+static const char *ssl_capath;
#endif
-static const char *ssl_cainfo = NULL;
+static const char *ssl_cainfo;
static long curl_low_speed_limit = -1;
static long curl_low_speed_time = -1;
-static int curl_ftp_no_epsv = 0;
-static const char *curl_http_proxy = NULL;
+static int curl_ftp_no_epsv;
+static const char *curl_http_proxy;
+static char *user_name, *user_pass;
static struct curl_slist *pragma_header;
-static struct active_request_slot *active_queue_head = NULL;
+static struct active_request_slot *active_queue_head;
size_t fread_buffer(void *ptr, size_t eltsize, size_t nmemb, void *buffer_)
{
static int http_options(const char *var, const char *value, void *cb)
{
if (!strcmp("http.sslverify", var)) {
- if (curl_ssl_verify == -1) {
- curl_ssl_verify = git_config_bool(var, value);
- }
- return 0;
- }
-
- if (!strcmp("http.sslcert", var)) {
- if (ssl_cert == NULL)
- return git_config_string(&ssl_cert, var, value);
+ curl_ssl_verify = git_config_bool(var, value);
return 0;
}
+ if (!strcmp("http.sslcert", var))
+ return git_config_string(&ssl_cert, var, value);
#if LIBCURL_VERSION_NUM >= 0x070902
- if (!strcmp("http.sslkey", var)) {
- if (ssl_key == NULL)
- return git_config_string(&ssl_key, var, value);
- return 0;
- }
+ if (!strcmp("http.sslkey", var))
+ return git_config_string(&ssl_key, var, value);
#endif
#if LIBCURL_VERSION_NUM >= 0x070908
- if (!strcmp("http.sslcapath", var)) {
- if (ssl_capath == NULL)
- return git_config_string(&ssl_capath, var, value);
- return 0;
- }
+ if (!strcmp("http.sslcapath", var))
+ return git_config_string(&ssl_capath, var, value);
#endif
- if (!strcmp("http.sslcainfo", var)) {
- if (ssl_cainfo == NULL)
- return git_config_string(&ssl_cainfo, var, value);
- return 0;
- }
-
+ if (!strcmp("http.sslcainfo", var))
+ return git_config_string(&ssl_cainfo, var, value);
#ifdef USE_CURL_MULTI
if (!strcmp("http.maxrequests", var)) {
- if (max_requests == -1)
- max_requests = git_config_int(var, value);
+ max_requests = git_config_int(var, value);
return 0;
}
#endif
-
if (!strcmp("http.lowspeedlimit", var)) {
- if (curl_low_speed_limit == -1)
- curl_low_speed_limit = (long)git_config_int(var, value);
+ curl_low_speed_limit = (long)git_config_int(var, value);
return 0;
}
if (!strcmp("http.lowspeedtime", var)) {
- if (curl_low_speed_time == -1)
- curl_low_speed_time = (long)git_config_int(var, value);
+ curl_low_speed_time = (long)git_config_int(var, value);
return 0;
}
curl_ftp_no_epsv = git_config_bool(var, value);
return 0;
}
- if (!strcmp("http.proxy", var)) {
- if (curl_http_proxy == NULL)
- return git_config_string(&curl_http_proxy, var, value);
- return 0;
- }
+ if (!strcmp("http.proxy", var))
+ return git_config_string(&curl_http_proxy, var, value);
/* Fall back on the default ones */
return git_default_config(var, value, cb);
}
-static CURL* get_curl_handle(void)
+static void init_curl_http_auth(CURL *result)
+{
+ if (user_name) {
+ struct strbuf up = STRBUF_INIT;
+ if (!user_pass)
+ user_pass = xstrdup(getpass("Password: "));
+ strbuf_addf(&up, "%s:%s", user_name, user_pass);
+ curl_easy_setopt(result, CURLOPT_USERPWD,
+ strbuf_detach(&up, NULL));
+ }
+}
+
+static CURL *get_curl_handle(void)
{
- CURL* result = curl_easy_init();
+ CURL *result = curl_easy_init();
if (!curl_ssl_verify) {
curl_easy_setopt(result, CURLOPT_SSL_VERIFYPEER, 0);
curl_easy_setopt(result, CURLOPT_NETRC, CURL_NETRC_OPTIONAL);
#endif
+ init_curl_http_auth(result);
+
if (ssl_cert != NULL)
curl_easy_setopt(result, CURLOPT_SSLCERT, ssl_cert);
#if LIBCURL_VERSION_NUM >= 0x070902
return result;
}
+static void http_auth_init(const char *url)
+{
+ char *at, *colon, *cp, *slash;
+ int len;
+
+ cp = strstr(url, "://");
+ if (!cp)
+ return;
+
+ /*
+ * Ok, the URL looks like "proto://something". Which one?
+ * "proto://<user>:<pass>@<host>/...",
+ * "proto://<user>@<host>/...", or just
+ * "proto://<host>/..."?
+ */
+ cp += 3;
+ at = strchr(cp, '@');
+ colon = strchr(cp, ':');
+ slash = strchrnul(cp, '/');
+ if (!at || slash <= at)
+ return; /* No credentials */
+ if (!colon || at <= colon) {
+ /* Only username */
+ len = at - cp;
+ user_name = xmalloc(len + 1);
+ memcpy(user_name, cp, len);
+ user_name[len] = '\0';
+ user_pass = NULL;
+ } else {
+ len = colon - cp;
+ user_name = xmalloc(len + 1);
+ memcpy(user_name, cp, len);
+ user_name[len] = '\0';
+ len = at - (colon + 1);
+ user_pass = xmalloc(len + 1);
+ memcpy(user_pass, colon + 1, len);
+ user_pass[len] = '\0';
+ }
+}
+
+static void set_from_env(const char **var, const char *envname)
+{
+ const char *val = getenv(envname);
+ if (val)
+ *var = val;
+}
+
void http_init(struct remote *remote)
{
char *low_speed_limit;
char *low_speed_time;
+ git_config(http_options, NULL);
+
curl_global_init(CURL_GLOBAL_ALL);
if (remote && remote->http_proxy)
if (getenv("GIT_SSL_NO_VERIFY"))
curl_ssl_verify = 0;
- ssl_cert = getenv("GIT_SSL_CERT");
+ set_from_env(&ssl_cert, "GIT_SSL_CERT");
#if LIBCURL_VERSION_NUM >= 0x070902
- ssl_key = getenv("GIT_SSL_KEY");
+ set_from_env(&ssl_key, "GIT_SSL_KEY");
#endif
#if LIBCURL_VERSION_NUM >= 0x070908
- ssl_capath = getenv("GIT_SSL_CAPATH");
+ set_from_env(&ssl_capath, "GIT_SSL_CAPATH");
#endif
- ssl_cainfo = getenv("GIT_SSL_CAINFO");
+ set_from_env(&ssl_cainfo, "GIT_SSL_CAINFO");
low_speed_limit = getenv("GIT_HTTP_LOW_SPEED_LIMIT");
if (low_speed_limit != NULL)
if (low_speed_time != NULL)
curl_low_speed_time = strtol(low_speed_time, NULL, 10);
- git_config(http_options, NULL);
-
if (curl_ssl_verify == -1)
curl_ssl_verify = 1;
if (getenv("GIT_CURL_FTP_NO_EPSV"))
curl_ftp_no_epsv = 1;
+ if (remote && remote->url && remote->url[0])
+ http_auth_init(remote->url[0]);
+
#ifndef NO_CURL_EASY_DUPHANDLE
curl_default = get_curl_handle();
#endif
/* Wait for a slot to open up if the queue is full */
while (active_requests >= max_requests) {
curl_multi_perform(curlm, &num_transfers);
- if (num_transfers < active_requests) {
+ if (num_transfers < active_requests)
process_curl_messages();
- }
}
#endif
- while (slot != NULL && slot->in_use) {
+ while (slot != NULL && slot->in_use)
slot = slot->next;
- }
+
if (slot == NULL) {
newslot = xmalloc(sizeof(*newslot));
newslot->curl = NULL;
if (slot == NULL) {
active_queue_head = newslot;
} else {
- while (slot->next != NULL) {
+ while (slot->next != NULL)
slot = slot->next;
- }
slot->next = newslot;
}
slot = newslot;
struct fill_chain *next;
};
-static struct fill_chain *fill_cfg = NULL;
+static struct fill_chain *fill_cfg;
void add_fill_function(void *data, int (*fill)(void *))
{
}
/* Run callback if appropriate */
- if (slot->callback_func != NULL) {
+ if (slot->callback_func != NULL)
slot->callback_func(slot->callback_data);
- }
}
void finish_all_active_slots(void)
static inline int hex(int v)
{
- if (v < 10) return '0' + v;
- else return 'A' + v - 10;
+ if (v < 10)
+ return '0' + v;
+ else
+ return 'A' + v - 10;
}
static char *quote_ref_url(const char *base, const char *ref)
{
struct mailmap_entry *me;
int index;
+ char *p;
+
+ if (old_email)
+ for (p = old_email; *p; p++)
+ *p = tolower(*p);
+ if (new_email)
+ for (p = new_email; *p; p++)
+ *p = tolower(*p);
+
if (old_email == NULL) {
old_email = new_email;
new_email = NULL;
old_name, old_email, new_name, new_email);
}
-static char *parse_name_and_email(char *buffer, char **name, char **email)
+static char *parse_name_and_email(char *buffer, char **name,
+ char **email, int allow_empty_email)
{
char *left, *right, *nstart, *nend;
*name = *email = 0;
return NULL;
if ((right = strchr(left+1, '>')) == NULL)
return NULL;
- if (left+1 == right)
+ if (!allow_empty_email && (left+1 == right))
return NULL;
/* remove whitespace from beginning and end of name */
}
continue;
}
- if ((name2 = parse_name_and_email(buffer, &name1, &email1)) != NULL)
- parse_name_and_email(name2, &name2, &email2);
+ if ((name2 = parse_name_and_email(buffer, &name1, &email1, 0)) != NULL)
+ parse_name_and_email(name2, &name2, &email2, 1);
if (email1)
add_mapping(map, name1, email1, name2, email2);
}
void show_reflog_message(struct reflog_walk_info* info, int oneline,
- int relative_date)
+ enum date_mode dmode)
{
if (info && info->last_commit_reflog) {
struct commit_reflog *commit_reflog = info->last_commit_reflog;
info = &commit_reflog->reflogs->items[commit_reflog->recno+1];
if (oneline) {
printf("%s@{", commit_reflog->reflogs->ref);
- if (commit_reflog->flag || relative_date)
- printf("%s", show_date(info->timestamp, 0, 1));
+ if (commit_reflog->flag || dmode)
+ printf("%s", show_date(info->timestamp,
+ info->tz,
+ dmode));
else
printf("%d", commit_reflog->reflogs->nr
- 2 - commit_reflog->recno);
}
else {
printf("Reflog: %s@{", commit_reflog->reflogs->ref);
- if (commit_reflog->flag || relative_date)
+ if (commit_reflog->flag || dmode)
printf("%s", show_date(info->timestamp,
info->tz,
- relative_date));
+ dmode));
else
printf("%d", commit_reflog->reflogs->nr
- 2 - commit_reflog->recno);
#ifndef REFLOG_WALK_H
#define REFLOG_WALK_H
+#include "cache.h"
+
extern void init_reflog_walk(struct reflog_walk_info** info);
extern int add_reflog_for_walk(struct reflog_walk_info *info,
struct commit *commit, const char *name);
extern void fake_reflog_parent(struct reflog_walk_info *info,
struct commit *commit);
-extern void show_reflog_message(struct reflog_walk_info *info, int, int);
+extern void show_reflog_message(struct reflog_walk_info *info, int,
+ enum date_mode);
#endif
}
}
+const char *prettify_ref(const struct ref *ref)
+{
+ const char *name = ref->name;
+ return name + (
+ !prefixcmp(name, "refs/heads/") ? 11 :
+ !prefixcmp(name, "refs/tags/") ? 10 :
+ !prefixcmp(name, "refs/remotes/") ? 13 :
+ 0);
+}
+
const char *ref_rev_parse_rules[] = {
"%.*s",
"refs/%.*s",
err = unlink(git_path("logs/%s", lock->ref_name));
if (err && errno != ENOENT)
- fprintf(stderr, "warning: unlink(%s) failed: %s",
+ warning("unlink(%s) failed: %s",
git_path("logs/%s", lock->ref_name), strerror(errno));
invalidate_cached_refs();
unlock_ref(lock);
if (get_sha1_hex(rec + 41, sha1))
die("Log %s is corrupt.", logfile);
if (hashcmp(logged_sha1, sha1)) {
- fprintf(stderr,
- "warning: Log %s has gap after %s.\n",
+ warning("Log %s has gap after %s.",
logfile, show_date(date, tz, DATE_RFC2822));
}
}
if (get_sha1_hex(rec + 41, logged_sha1))
die("Log %s is corrupt.", logfile);
if (hashcmp(logged_sha1, sha1)) {
- fprintf(stderr,
- "warning: Log %s unexpectedly ended on %s.\n",
+ warning("Log %s unexpectedly ended on %s.",
logfile, show_date(date, tz, DATE_RFC2822));
}
}
#define CHECK_REF_FORMAT_WILDCARD (-3)
extern int check_ref_format(const char *target);
+extern const char *prettify_ref(const struct ref *ref);
+
/** rename ref, return 0 on success **/
extern int rename_ref(const char *oldref, const char *newref, const char *logmsg);
if (theirs == ours)
return 0;
- /* Run "rev-list --left-right ours...theirs" internally... */
+ /* Run "rev-list --no-merges --left-right ours...theirs" internally... */
rev_argc = 0;
rev_argv[rev_argc++] = NULL;
+ rev_argv[rev_argc++] = "--no-merges";
rev_argv[rev_argc++] = "--left-right";
rev_argv[rev_argc++] = symmetric;
rev_argv[rev_argc++] = "--";
revs->edge_hint = 1;
} else if (!strcmp(arg, "--unpacked")) {
revs->unpacked = 1;
- revs->kept_pack_only = 0;
- } else if (!strcmp(arg, "--kept-pack-only")) {
- revs->unpacked = 1;
- revs->kept_pack_only = 1;
} else if (!prefixcmp(arg, "--unpacked=")) {
die("--unpacked=<packfile> no longer supported.");
} else if (!strcmp(arg, "-r")) {
{
if (commit->object.flags & SHOWN)
return commit_ignore;
- if (revs->unpacked &&
- (revs->kept_pack_only
- ? has_sha1_kept_pack(commit->object.sha1)
- : has_sha1_pack(commit->object.sha1)))
+ if (revs->unpacked && has_sha1_pack(commit->object.sha1))
return commit_ignore;
if (revs->show_all)
return commit_show;
edge_hint:1,
limited:1,
unpacked:1,
- kept_pack_only:1,
boundary:2,
left_right:1,
rewrite_parents:1,
ERR_RUN_COMMAND_WAITPID_SIGNAL,
ERR_RUN_COMMAND_WAITPID_NOEXIT,
};
-#define IS_RUN_COMMAND_ERR(x) ((x) <= -ERR_RUN_COMMAND_FORK)
+#define IS_RUN_COMMAND_ERR(x) (-(x) >= ERR_RUN_COMMAND_FORK)
struct child_process {
const char **argv;
#define SEND_PACK_H
struct send_pack_args {
- const char *receivepack;
unsigned verbose:1,
- send_all:1,
send_mirror:1,
force_update:1,
use_thin_pack:1,
};
int send_pack(struct send_pack_args *args,
- const char *dest, struct remote *remote,
- int nr_heads, const char **heads);
+ int fd[], struct child_process *conn,
+ struct ref *remote_refs, struct extra_have_objects *extra_have);
#endif
return 0;
}
-static int find_pack_ent(const unsigned char *sha1, struct pack_entry *e,
- int kept_pack_only)
+static int find_pack_entry(const unsigned char *sha1, struct pack_entry *e)
{
static struct packed_git *last_found = (void *)1;
struct packed_git *p;
p = (last_found == (void *)1) ? packed_git : last_found;
do {
- if (kept_pack_only && !p->pack_keep)
- goto next;
if (p->num_bad_objects) {
unsigned i;
for (i = 0; i < p->num_bad_objects; i++)
return 0;
}
-static int find_pack_entry(const unsigned char *sha1, struct pack_entry *e)
-{
- return find_pack_ent(sha1, e, 0);
-}
-
-static int find_kept_pack_entry(const unsigned char *sha1, struct pack_entry *e)
-{
- return find_pack_ent(sha1, e, 1);
-}
-
struct packed_git *find_sha1_pack(const unsigned char *sha1,
struct packed_git *packs)
{
fsync_or_die(fd, "sha1 file");
fchmod(fd, 0444);
if (close(fd) != 0)
- die("unable to write sha1 file");
+ die("error when closing sha1 file (%s)", strerror(errno));
}
/* Size of directory component, including the ending '/' */
return find_pack_entry(sha1, &e);
}
-int has_sha1_kept_pack(const unsigned char *sha1)
-{
- struct pack_entry e;
- return find_kept_pack_entry(sha1, &e);
-}
-
int has_sha1_file(const unsigned char *sha1)
{
struct pack_entry e;
$(RM) -r test-results
clean:
- $(RM) -r 'trash directory' test-results
+ $(RM) -r 'trash directory'.* test-results
aggregate-results-and-cleanup: $(T)
$(MAKE) aggregate-results
test_expect_failure 'pretend we have fixed a known breakage' '
:
'
+test_set_prereq HAVEIT
+haveit=no
+test_expect_success HAVEIT 'test runs if prerequisite is satisfied' '
+ test_have_prereq HAVEIT &&
+ haveit=yes
+'
+donthaveit=yes
+test_expect_success DONTHAVEIT 'unmet prerequisite causes test to be skipped' '
+ donthaveit=no
+'
+if test $haveit$donthaveit != yesyes
+then
+ say "bug in test framework: prerequisite tags do not work reliably"
+ exit 1
+fi
################################################################
# Basics of the basics
'test "$tree" = 4b825dc642cb6eb9a060e54bf8d69288fbee4904'
# Various types of objects
+# Some filesystems do not support symblic links; on such systems
+# some expected values are different
mkdir path2 path3 path3/subp3
-for p in path0 path2/file2 path3/file3 path3/subp3/file3
+paths='path0 path2/file2 path3/file3 path3/subp3/file3'
+for p in $paths
do
echo "hello $p" >$p
- ln -s "hello $p" ${p}sym
done
+if test_have_prereq SYMLINKS
+then
+ for p in $paths
+ do
+ ln -s "hello $p" ${p}sym
+ done
+ expectfilter=cat
+ expectedtree=087704a96baf1c2d1c869a8b084481e121c88b5b
+ expectedptree1=21ae8269cacbe57ae09138dcc3a2887f904d02b3
+ expectedptree2=3c5e5399f3a333eddecce7a9b9465b63f65f51e2
+else
+ expectfilter='grep -v sym'
+ expectedtree=8e18edf7d7edcf4371a3ac6ae5f07c2641db7c46
+ expectedptree1=cfb8591b2f65de8b8cc1020cd7d9e67e7793b325
+ expectedptree2=ce580448f0148b985a513b693fdf7d802cacb44f
+fi
+
test_expect_success \
'adding various types of objects with git update-index --add.' \
'find path* ! -type d -print | xargs git update-index --add'
'showing stage with git ls-files --stage' \
'git ls-files --stage >current'
-cat >expected <<\EOF
+$expectfilter >expected <<\EOF
100644 f87290f8eb2cbbea7857214459a0739927eab154 0 path0
120000 15a98433ae33114b085f3eb3bb03b832b3180a01 0 path0sym
100644 3feff949ed00a62d9f7af97c15cd8a30595e7ac7 0 path2/file2
'tree=$(git write-tree)'
test_expect_success \
'validate object ID for a known tree.' \
- 'test "$tree" = 087704a96baf1c2d1c869a8b084481e121c88b5b'
+ 'test "$tree" = "$expectedtree"'
test_expect_success \
'showing tree with git ls-tree' \
040000 tree 58a09c23e2ca152193f2786e06986b7b6712bdbe path2
040000 tree 21ae8269cacbe57ae09138dcc3a2887f904d02b3 path3
EOF
-test_expect_success \
+test_expect_success SYMLINKS \
'git ls-tree output for a known tree.' \
'test_cmp expected current'
test_expect_success \
'showing tree with git ls-tree -r' \
'git ls-tree -r $tree >current'
-cat >expected <<\EOF
+$expectfilter >expected <<\EOF
100644 blob f87290f8eb2cbbea7857214459a0739927eab154 path0
120000 blob 15a98433ae33114b085f3eb3bb03b832b3180a01 path0sym
100644 blob 3feff949ed00a62d9f7af97c15cd8a30595e7ac7 path2/file2
100644 blob 00fb5908cb97c2564a9783c0c64087333b3b464f path3/subp3/file3
120000 blob 6649a1ebe9e9f1c553b66f5a6e74136a07ccc57c path3/subp3/file3sym
EOF
-test_expect_success \
+test_expect_success SYMLINKS \
'git ls-tree -r output for a known tree.' \
'test_cmp expected current'
'ptree=$(git write-tree --prefix=path3)'
test_expect_success \
'validate object ID for a known tree.' \
- 'test "$ptree" = 21ae8269cacbe57ae09138dcc3a2887f904d02b3'
+ 'test "$ptree" = "$expectedptree1"'
test_expect_success \
'writing partial tree out with git write-tree --prefix.' \
'ptree=$(git write-tree --prefix=path3/subp3)'
test_expect_success \
'validate object ID for a known tree.' \
- 'test "$ptree" = 3c5e5399f3a333eddecce7a9b9465b63f65f51e2'
+ 'test "$ptree" = "$expectedptree2"'
cat >badobjects <<EOF
100644 blob 1000000000000000000000000000000000000000 dir/file1
newtree=$(git write-tree) &&
test "$newtree" = "$tree"'
-cat >expected <<\EOF
+$expectfilter >expected <<\EOF
:100644 100644 f87290f8eb2cbbea7857214459a0739927eab154 0000000000000000000000000000000000000000 M path0
:120000 120000 15a98433ae33114b085f3eb3bb03b832b3180a01 0000000000000000000000000000000000000000 M path0sym
:100644 100644 3feff949ed00a62d9f7af97c15cd8a30595e7ac7 0000000000000000000000000000000000000000 M path2/file2
'git diff-files >current && cmp -s current /dev/null'
################################################################
-P=087704a96baf1c2d1c869a8b084481e121c88b5b
+P=$expectedtree
test_expect_success \
'git commit-tree records the correct tree in a commit.' \
'commit0=$(echo NO | git commit-tree $P) &&
test $numpath0 = 1
'
-test_expect_success 'absolute path works as expected' '
+test_expect_success SYMLINKS 'absolute path works as expected' '
mkdir first &&
ln -s ../.git first/.git &&
mkdir second &&
'
-test_expect_success 'write-tree should notice unwritable repository' '
+test_expect_success POSIXPERM 'write-tree should notice unwritable repository' '
(
chmod a-w .git/objects .git/objects/?? &&
'
-test_expect_success 'commit should notice unwritable repository' '
+test_expect_success POSIXPERM 'commit should notice unwritable repository' '
(
chmod a-w .git/objects .git/objects/?? &&
'
-test_expect_success 'update-index should notice unwritable repository' '
+test_expect_success POSIXPERM 'update-index should notice unwritable repository' '
(
echo 6O >file &&
'
-test_expect_success 'add should notice unwritable repository' '
+test_expect_success POSIXPERM 'add should notice unwritable repository' '
(
echo b >file &&
}
'
+test_expect_success 'checkout with existing .gitattributes' '
+
+ git config core.autocrlf true &&
+ git config --unset core.safecrlf &&
+ echo ".file2 -crlfQ" | q_to_cr >> .gitattributes &&
+ git add .gitattributes &&
+ git commit -m initial &&
+ echo ".file -crlfQ" | q_to_cr >> .gitattributes &&
+ echo "contents" > .file &&
+ git add .gitattributes .file &&
+ git commit -m second &&
+
+ git checkout master~1 &&
+ git checkout master &&
+ test "$(git diff-files --raw)" = ""
+
+'
+
+test_expect_success 'checkout when deleting .gitattributes' '
+
+ git rm .gitattributes &&
+ echo "contentsQ" | q_to_cr > .file2 &&
+ git add .file2 &&
+ git commit -m third
+
+ git checkout master~1 &&
+ git checkout master &&
+ remove_cr .file2 >/dev/null
+
+'
+
test_expect_success 'invalid .gitattributes (must not crash)' '
echo "three +crlf" >>.gitattributes &&
"$UNZIP" -v >/dev/null 2>&1
if [ $? -eq 127 ]; then
say "Skipping ZIP test, because unzip was not found"
- test_done
- exit
+else
+ test_set_prereq UNZIP
fi
-test_expect_success 'zip archive' '
+test_expect_success UNZIP 'zip archive' '
git archive --format=zip HEAD >test.zip &&
case_insensitive=
unibad=
-test_expect_success 'see if we expect ' '
+no_symlinks=
+test_expect_success 'see what we expect' '
test_case=test_expect_success
test_unicode=test_expect_success
;;
*) ;;
esac &&
- rm -fr junk
+ rm -fr junk &&
+ {
+ ln -s x y 2> /dev/null &&
+ test -h y 2> /dev/null ||
+ no_symlinks=1
+ rm -f y
+ }
'
test "$case_insensitive" &&
say "will test on a case insensitive filesystem"
test "$unibad" &&
say "will test on a unicode corrupting filesystem"
+test "$no_symlinks" &&
+ say "will test on a filesystem lacking symbolic links"
if test "$case_insensitive"
then
'
fi
+if test "$no_symlinks"
+then
+test_expect_success "detection of filesystem w/o symlink support during repo init" '
+
+ v=$(git config --bool core.symlinks) &&
+ test "$v" = false
+'
+else
+test_expect_success "detection of filesystem w/o symlink support during repo init" '
+
+ test_must_fail git config --bool core.symlinks ||
+ test "$(git config --bool core.symlinks)" = true
+'
+fi
+
test_expect_success "setup case tests" '
git config core.ignorecase true &&
. ./test-lib.sh
-test_expect_success setup '
+test_expect_success SYMLINKS setup '
>a &&
mkdir b &&
ln -s b c &&
git update-index --add a b/d
'
-test_expect_success 'update-index --add beyond symlinks' '
+test_expect_success SYMLINKS 'update-index --add beyond symlinks' '
test_must_fail git update-index --add c/d &&
! ( git ls-files | grep c/d )
'
-test_expect_success 'add beyond symlinks' '
+test_expect_success SYMLINKS 'add beyond symlinks' '
test_must_fail git add c/d &&
! ( git ls-files | grep c/d )
'
. ./test-lib.sh
-norm_abs() {
- test_expect_success "normalize absolute: $1 => $2" \
+norm_path() {
+ test_expect_success $3 "normalize path: $1 => $2" \
"test \"\$(test-path-utils normalize_path_copy '$1')\" = '$2'"
}
+# On Windows, we are using MSYS's bash, which mangles the paths.
+# Absolute paths are anchored at the MSYS installation directory,
+# which means that the path / accounts for this many characters:
+rootoff=$(test-path-utils normalize_path_copy / | wc -c)
+# Account for the trailing LF:
+if test $rootoff = 2; then
+ rootoff= # we are on Unix
+else
+ rootoff=$(($rootoff-1))
+fi
+
ancestor() {
- test_expect_success "longest ancestor: $1 $2 => $3" \
- "test \"\$(test-path-utils longest_ancestor_length '$1' '$2')\" = '$3'"
+ # We do some math with the expected ancestor length.
+ expected=$3
+ if test -n "$rootoff" && test "x$expected" != x-1; then
+ expected=$(($expected+$rootoff))
+ fi
+ test_expect_success "longest ancestor: $1 $2 => $expected" \
+ "actual=\$(test-path-utils longest_ancestor_length '$1' '$2') &&
+ test \"\$actual\" = '$expected'"
}
-norm_abs "" ""
-norm_abs / /
-norm_abs // /
-norm_abs /// /
-norm_abs /. /
-norm_abs /./ /
-norm_abs /./.. ++failed++
-norm_abs /../. ++failed++
-norm_abs /./../.// ++failed++
-norm_abs /dir/.. /
-norm_abs /dir/sub/../.. /
-norm_abs /dir/sub/../../.. ++failed++
-norm_abs /dir /dir
-norm_abs /dir// /dir/
-norm_abs /./dir /dir
-norm_abs /dir/. /dir/
-norm_abs /dir///./ /dir/
-norm_abs /dir//sub/.. /dir/
-norm_abs /dir/sub/../ /dir/
-norm_abs //dir/sub/../. /dir/
-norm_abs /dir/s1/../s2/ /dir/s2/
-norm_abs /d1/s1///s2/..//../s3/ /d1/s3/
-norm_abs /d1/s1//../s2/../../d2 /d2
-norm_abs /d1/.../d2 /d1/.../d2
-norm_abs /d1/..././../d2 /d1/d2
+# Absolute path tests must be skipped on Windows because due to path mangling
+# the test program never sees a POSIX-style absolute path
+case $(uname -s) in
+*MINGW*)
+ ;;
+*)
+ test_set_prereq POSIX
+ ;;
+esac
+
+norm_path "" ""
+norm_path . ""
+norm_path ./ ""
+norm_path ./. ""
+norm_path ./.. ++failed++
+norm_path ../. ++failed++
+norm_path ./../.// ++failed++
+norm_path dir/.. ""
+norm_path dir/sub/../.. ""
+norm_path dir/sub/../../.. ++failed++
+norm_path dir dir
+norm_path dir// dir/
+norm_path ./dir dir
+norm_path dir/. dir/
+norm_path dir///./ dir/
+norm_path dir//sub/.. dir/
+norm_path dir/sub/../ dir/
+norm_path dir/sub/../. dir/
+norm_path dir/s1/../s2/ dir/s2/
+norm_path d1/s1///s2/..//../s3/ d1/s3/
+norm_path d1/s1//../s2/../../d2 d2
+norm_path d1/.../d2 d1/.../d2
+norm_path d1/..././../d2 d1/d2
+
+norm_path / / POSIX
+norm_path // / POSIX
+norm_path /// / POSIX
+norm_path /. / POSIX
+norm_path /./ / POSIX
+norm_path /./.. ++failed++ POSIX
+norm_path /../. ++failed++ POSIX
+norm_path /./../.// ++failed++ POSIX
+norm_path /dir/.. / POSIX
+norm_path /dir/sub/../.. / POSIX
+norm_path /dir/sub/../../.. ++failed++ POSIX
+norm_path /dir /dir POSIX
+norm_path /dir// /dir/ POSIX
+norm_path /./dir /dir POSIX
+norm_path /dir/. /dir/ POSIX
+norm_path /dir///./ /dir/ POSIX
+norm_path /dir//sub/.. /dir/ POSIX
+norm_path /dir/sub/../ /dir/ POSIX
+norm_path //dir/sub/../. /dir/ POSIX
+norm_path /dir/s1/../s2/ /dir/s2/ POSIX
+norm_path /d1/s1///s2/..//../s3/ /d1/s3/ POSIX
+norm_path /d1/s1//../s2/../../d2 /d2 POSIX
+norm_path /d1/.../d2 /d1/.../d2 POSIX
+norm_path /d1/..././../d2 /d1/d2 POSIX
ancestor / "" -1
ancestor / / -1
ancestor /foo/bar /foo:/:/bar/ 4
ancestor /foo/bar /:/bar/:/fo 0
ancestor /foo/bar /:/bar/ 0
-ancestor /foo/bar :://foo/. 4
-ancestor /foo/bar :://foo/.:: 4
-ancestor /foo/bar //foo/./::/bar 4
-ancestor /foo/bar ::/bar -1
+ancestor /foo/bar .:/foo/. 4
+ancestor /foo/bar .:/foo/.:.: 4
+ancestor /foo/bar /foo/./:.:/bar 4
+ancestor /foo/bar .:/bar -1
test_expect_success 'strip_path_suffix' '
test c:/msysgit = $(test-path-utils strip_path_suffix \
'
-test_expect_success 'funny symlink in work tree' '
+test_expect_success SYMLINKS 'funny symlink in work tree' '
git reset --hard &&
git checkout -b sym-b side-b &&
'
-test_expect_success 'funny symlink in work tree, un-unlink-able' '
+test_expect_success SYMLINKS 'funny symlink in work tree, un-unlink-able' '
rm -fr a b &&
git reset --hard &&
'
# clean-up from the above test
-chmod a+w a
+chmod a+w a 2>/dev/null
rm -fr a b
test_expect_success 'D/F setup' '
cd foo.git && git show -s HEAD
'
-test_expect_success 'detection should not be fooled by a symlink' '
+test_expect_success SYMLINKS 'detection should not be fooled by a symlink' '
cd "$HERE" &&
rm -fr foo.git &&
git clone -s .git another &&
test_expect_success '--null --get-regexp' 'cmp result expect'
-test_expect_success 'symlinked configuration' '
+test_expect_success SYMLINKS 'symlinked configuration' '
ln -s notyet myconfig &&
GIT_CONFIG=myconfig git config test.frotz nitfol &&
for u in 002 022
do
- test_expect_success "shared=1 does not clear bits preset by umask $u" '
+ test_expect_success POSIXPERM "shared=1 does not clear bits preset by umask $u" '
mkdir sub && (
cd sub &&
umask $u &&
test 2 = $(git config core.sharedrepository)
'
-test_expect_success 'update-server-info honors core.sharedRepository' '
+test_expect_success POSIXPERM 'update-server-info honors core.sharedRepository' '
: > a1 &&
git add a1 &&
test_tick &&
git config core.sharedrepository "$u" &&
umask 0277 &&
- test_expect_success "shared = $u ($y) ro" '
+ test_expect_success POSIXPERM "shared = $u ($y) ro" '
rm -f .git/info/refs &&
git update-server-info &&
'
umask 077 &&
- test_expect_success "shared = $u ($x) rw" '
+ test_expect_success POSIXPERM "shared = $u ($x) rw" '
rm -f .git/info/refs &&
git update-server-info &&
done
-test_expect_success 'git reflog expire honors core.sharedRepository' '
+test_expect_success POSIXPERM 'git reflog expire honors core.sharedRepository' '
git config core.sharedRepository group &&
git reflog expire --all &&
actual="$(ls -l .git/logs/refs/heads/master)" &&
--- /dev/null
+#!/bin/sh
+
+test_description='Test reflog display routines'
+. ./test-lib.sh
+
+test_expect_success 'setup' '
+ echo content >file &&
+ git add file &&
+ test_tick &&
+ git commit -m one
+'
+
+cat >expect <<'EOF'
+Reflog: HEAD@{0} (C O Mitter <committer@example.com>)
+Reflog message: commit (initial): one
+EOF
+test_expect_success 'log -g shows reflog headers' '
+ git log -g -1 >tmp &&
+ grep ^Reflog <tmp >actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<'EOF'
+e46513e HEAD@{0}: commit (initial): one
+EOF
+test_expect_success 'oneline reflog format' '
+ git log -g -1 --oneline >actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<'EOF'
+Reflog: HEAD@{Thu Apr 7 15:13:13 2005 -0700} (C O Mitter <committer@example.com>)
+Reflog message: commit (initial): one
+EOF
+test_expect_success 'using @{now} syntax shows reflog date (multiline)' '
+ git log -g -1 HEAD@{now} >tmp &&
+ grep ^Reflog <tmp >actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<'EOF'
+e46513e HEAD@{Thu Apr 7 15:13:13 2005 -0700}: commit (initial): one
+EOF
+test_expect_success 'using @{now} syntax shows reflog date (oneline)' '
+ git log -g -1 --oneline HEAD@{now} >actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<'EOF'
+Reflog: HEAD@{1112911993 -0700} (C O Mitter <committer@example.com>)
+Reflog message: commit (initial): one
+EOF
+test_expect_success 'using --date= shows reflog date (multiline)' '
+ git log -g -1 --date=raw >tmp &&
+ grep ^Reflog <tmp >actual &&
+ test_cmp expect actual
+'
+
+cat >expect <<'EOF'
+e46513e HEAD@{1112911993 -0700}: commit (initial): one
+EOF
+test_expect_success 'using --date= shows reflog date (oneline)' '
+ git log -g -1 --oneline --date=raw >actual &&
+ test_cmp expect actual
+'
+
+test_done
"git rev-parse --show-prefix"
}
-TRASH_ROOT="$(pwd)"
+TRASH_ROOT="$PWD"
ROOT_PARENT=$(dirname "$TRASH_ROOT")
'git read-tree -m $tree1 && git checkout-index -f -a'
test_debug 'show_files $tree1'
-ln -s path0 path1
-test_expect_success \
+test_expect_success SYMLINKS \
'git update-index --add a symlink.' \
- 'git update-index --add path1'
+ 'ln -s path0 path1 &&
+ git update-index --add path1'
test_expect_success \
'writing tree out with git write-tree' \
'tree3=$(git write-tree)'
echo rezrov >path1/file1 &&
git update-index --add path0 path1/file1'
-test_expect_success \
+test_expect_success SYMLINKS \
'have symlink in place where dir is expected.' \
'rm -fr path0 path1 &&
mkdir path2 &&
test ! -f path1/file1'
# Linus fix #1
-test_expect_success \
+test_expect_success SYMLINKS \
'use --prefix=tmp/orary/ where tmp is a symlink' \
'rm -fr path0 path1 path2 tmp* &&
mkdir tmp1 tmp1/orary &&
test -h tmp'
# Linus fix #2
-test_expect_success \
+test_expect_success SYMLINKS \
'use --prefix=tmp/orary- where tmp is a symlink' \
'rm -fr path0 path1 path2 tmp* &&
mkdir tmp1 &&
test -h tmp'
# Linus fix #3
-test_expect_success \
+test_expect_success SYMLINKS \
'use --prefix=tmp- where tmp-path1 is a symlink' \
'rm -fr path0 path1 path2 tmp* &&
mkdir tmp1 &&
test $(cat ../$s1) = tree1asubdir/path5)
)'
-test_expect_success \
+test_expect_success SYMLINKS \
'checkout --temp symlink' '
rm -f path* .merge_* out .git/index &&
ln -s b a &&
. ./test-lib.sh
+if ! test_have_prereq SYMLINKS
+then
+ say "symbolic links not supported - skipping tests"
+ test_done
+fi
+
test_expect_success setup '
mkdir frotz &&
mkdir path2 path3
date >path0
-ln -s xyzzy path1
+if test_have_prereq SYMLINKS
+then
+ ln -s xyzzy path1
+else
+ date > path1
+fi
date >path2/file2
date >path3/file3
mkdir path0 path1
date >path2
-ln -s frotz path3
+if test_have_prereq SYMLINKS
+then
+ ln -s frotz path3
+else
+ date > path3
+fi
date >path0/file0
date >path1/file1
'
-test_expect_success 'replace a file with a symlink' '
+test_expect_success SYMLINKS 'replace a file with a symlink' '
rm foo &&
ln -s top foo &&
_empty=$(git hash-object --stdin <xyzzy) &&
>yomin &&
>caskly &&
- ln -s frotz nitfol &&
+ if test_have_prereq SYMLINKS; then
+ ln -s frotz nitfol &&
+ T_letter=T
+ else
+ printf %s frotz > nitfol &&
+ T_letter=M
+ fi &&
mkdir rezrov &&
>rezrov/bozbar &&
git add caskly xyzzy yomin nitfol rezrov/bozbar &&
>nitfol &&
# rezrov/bozbar disappears
rm -fr rezrov &&
- ln -s xyzzy rezrov &&
+ if test_have_prereq SYMLINKS; then
+ ln -s xyzzy rezrov
+ else
+ printf %s xyzzy > rezrov
+ fi &&
# xyzzy disappears (not a submodule)
mkdir xyzzy &&
echo gnusto >xyzzy/bozbar &&
s/blob/000000/
}
/ nitfol/{
- s/ nitfol/ $_z40 T&/
+ s/ nitfol/ $_z40 $T_letter&/
s/blob/100644/
}
/ rezrov.bozbar/{
. ./test-lib.sh
test_cd_to_toplevel () {
- test_expect_success "$2" '
+ test_expect_success $3 "$2" '
(
cd '"'$1'"' &&
. git-sh-setup &&
test_cd_to_toplevel repo/sub/dir 'at physical subdir'
-ln -s repo symrepo
-test_cd_to_toplevel symrepo 'at symbolic root'
+ln -s repo symrepo 2>/dev/null
+test_cd_to_toplevel symrepo 'at symbolic root' SYMLINKS
-ln -s repo/sub/dir subdir-link
-test_cd_to_toplevel subdir-link 'at symbolic subdir'
+ln -s repo/sub/dir subdir-link 2>/dev/null
+test_cd_to_toplevel subdir-link 'at symbolic subdir' SYMLINKS
cd repo
-ln -s sub/dir internal-link
-test_cd_to_toplevel internal-link 'at internal symbolic subdir'
+ln -s sub/dir internal-link 2>/dev/null
+test_cd_to_toplevel internal-link 'at internal symbolic subdir' SYMLINKS
test_done
. ./test-lib.sh
date >path0
-ln -s xyzzy path1
+if test_have_prereq SYMLINKS
+then
+ ln -s xyzzy path1
+else
+ date > path1
+fi
mkdir path2 path3 path4
date >path2/file2
date >path2-junk
. ./test-lib.sh
date >path0
-ln -s xyzzy path1
+if test_have_prereq SYMLINKS
+then
+ ln -s xyzzy path1
+else
+ date > path1
+fi
mkdir path2 path3
date >path2/file2
date >path3/file3
rm -fr path? ;# leave path10 alone
date >path2
-ln -s frotz path3
-ln -s nitfol path5
+if test_have_prereq SYMLINKS
+then
+ ln -s frotz path3
+ ln -s nitfol path5
+else
+ date > path3
+ date > path5
+fi
mkdir path0 path1 path6
date >path0/file0
date >path1/file1
'setup' \
'mkdir path2 path2/baz &&
echo Hi >path0 &&
- ln -s path0 path1 &&
+ if test_have_prereq SYMLINKS
+ then
+ ln -s path0 path1 &&
+ ln -s ../path1 path2/bazbo
+ make_expected () {
+ cat >expected
+ }
+ else
+ printf path0 > path1 &&
+ printf ../path1 > path2/bazbo
+ make_expected () {
+ sed -e "s/120000 /100644 /" >expected
+ }
+ fi &&
echo Lo >path2/foo &&
- ln -s ../path1 path2/bazbo &&
echo Mi >path2/baz/b &&
find path? \( -type f -o -type l \) -print |
xargs git update-index --add &&
test_expect_success \
'ls-tree plain' \
'git ls-tree $tree >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
100644 blob X path0
120000 blob X path1
040000 tree X path2
test_expect_success \
'ls-tree recursive' \
'git ls-tree -r $tree >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
100644 blob X path0
120000 blob X path1
100644 blob X path2/baz/b
test_expect_success \
'ls-tree recursive with -t' \
'git ls-tree -r -t $tree >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
100644 blob X path0
120000 blob X path1
040000 tree X path2
test_expect_success \
'ls-tree recursive with -d' \
'git ls-tree -r -d $tree >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
040000 tree X path2
040000 tree X path2/baz
EOF
test_expect_success \
'ls-tree filtered with path' \
'git ls-tree $tree path >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
EOF
test_output'
test_expect_success \
'ls-tree filtered with path1 path0' \
'git ls-tree $tree path1 path0 >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
100644 blob X path0
120000 blob X path1
EOF
test_expect_success \
'ls-tree filtered with path0/' \
'git ls-tree $tree path0/ >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
EOF
test_output'
test_expect_success \
'ls-tree filtered with path2' \
'git ls-tree $tree path2 >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
040000 tree X path2
EOF
test_output'
test_expect_success \
'ls-tree filtered with path2/' \
'git ls-tree $tree path2/ >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
040000 tree X path2/baz
120000 blob X path2/bazbo
100644 blob X path2/foo
test_expect_success \
'ls-tree filtered with path2/baz' \
'git ls-tree $tree path2/baz >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
040000 tree X path2/baz
EOF
test_output'
test_expect_success \
'ls-tree filtered with path2/bak' \
'git ls-tree $tree path2/bak >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
EOF
test_output'
test_expect_success \
'ls-tree -t filtered with path2/bak' \
'git ls-tree -t $tree path2/bak >current &&
- cat >expected <<\EOF &&
+ make_expected <<\EOF &&
040000 tree X path2
EOF
test_output'
! test -f .git/refs/heads/master3
'
-test_expect_success \
+test_expect_success SYMLINKS \
'git branch -m u v should fail when the reflog for u is a symlink' '
git branch -l u &&
mv .git/logs/refs/heads/u real-u &&
test_expect_success 'test deleting branch without config' \
'git branch my7 s &&
sha1=$(git rev-parse my7 | cut -c 1-7) &&
- test "$(git branch -d my7 2>&1)" = "Deleted branch my7 ($sha1)."'
+ test "$(git branch -d my7 2>&1)" = "Deleted branch my7 (was $sha1)."'
test_expect_success 'test --track without .fetch entries' \
'git branch --track my8 &&
'Initialize test directory' \
"touch -- foo bar baz 'space embedded' -q &&
git add -- foo bar baz 'space embedded' -q &&
- git commit -m 'add normal files' &&
- test_tabs=y &&
- if touch -- 'tab embedded' 'newline
-embedded'
- then
+ git commit -m 'add normal files'"
+
+if touch -- 'tab embedded' 'newline
+embedded' 2>/dev/null
+then
+ test_set_prereq FUNNYNAMES
+else
+ say 'Your filesystem does not allow tabs in filenames.'
+fi
+
+test_expect_success FUNNYNAMES 'add files with funny names' "
git add -- 'tab embedded' 'newline
embedded' &&
git commit -m 'add files with tabs and newlines'
- else
- test_tabs=n
- fi"
-
-test "$test_tabs" = n && say 'Your filesystem does not allow tabs in filenames.'
+"
+# Determine rm behavior
# Later we will try removing an unremovable path to make sure
# git rm barfs, but if the test is run as root that cannot be
# arranged.
-test_expect_success \
- 'Determine rm behavior' \
- ': >test-file
- chmod a-w .
- rm -f test-file
- test -f test-file && test_failed_remove=y
- chmod 775 .
- rm -f test-file'
+: >test-file
+chmod a-w .
+rm -f test-file 2>/dev/null
+if test -f test-file
+then
+ test_set_prereq RO_DIR
+else
+ say 'skipping removal failure test (perhaps running as root?)'
+fi
+chmod 775 .
+rm -f test-file
test_expect_success \
'Pre-check that foo exists and is in index before git rm foo' \
'Test that "git rm -- -q" succeeds (remove a file that looks like an option)' \
'git rm -- -q'
-test "$test_tabs" = y && test_expect_success \
+test_expect_success FUNNYNAMES \
"Test that \"git rm -f\" succeeds with embedded space, tab, or newline characters." \
"git rm -f 'space embedded' 'tab embedded' 'newline
embedded'"
-if test "$test_failed_remove" = y; then
-chmod a-w .
-test_expect_success \
- 'Test that "git rm -f" fails if its rm fails' \
- 'test_must_fail git rm -f baz'
-chmod 775 .
-else
- say 'skipping removal failure test (perhaps running as root?)'
-fi
+test_expect_success RO_DIR 'Test that "git rm -f" fails if its rm fails' '
+ chmod a-w . &&
+ test_must_fail git rm -f baz &&
+ chmod 775 .
+'
test_expect_success \
'When the rm in "git rm -f" fails, it should not remove the file from the index' \
*) echo fail; git ls-files --stage xfoo1; (exit 1);;
esac'
-test_expect_success 'git add: filemode=0 should not get confused by symlink' '
+test_expect_success SYMLINKS 'git add: filemode=0 should not get confused by symlink' '
rm -f xfoo1 &&
ln -s foo xfoo1 &&
git add xfoo1 &&
*) echo fail; git ls-files --stage xfoo2; (exit 1);;
esac'
-test_expect_success 'git add: filemode=0 should not get confused by symlink' '
+test_expect_success SYMLINKS 'git add: filemode=0 should not get confused by symlink' '
rm -f xfoo2 &&
ln -s foo xfoo2 &&
git update-index --add xfoo2 &&
esac
'
-test_expect_success \
+test_expect_success SYMLINKS \
'git update-index --add: Test that executable bit is not used...' \
'git config core.filemode 0 &&
ln -s xfoo2 xfoo3 &&
test -z "`git diff-index HEAD -- foo`"
'
-test_expect_success 'git add should fail atomically upon an unreadable file' '
+test_expect_success POSIXPERM 'git add should fail atomically upon an unreadable file' '
git reset --hard &&
date >foo1 &&
date >foo2 &&
rm -f foo2
-test_expect_success 'git add --ignore-errors' '
+test_expect_success POSIXPERM 'git add --ignore-errors' '
git reset --hard &&
date >foo1 &&
date >foo2 &&
rm -f foo2
-test_expect_success 'git add (add.ignore-errors)' '
+test_expect_success POSIXPERM 'git add (add.ignore-errors)' '
git config add.ignore-errors 1 &&
git reset --hard &&
date >foo1 &&
'
rm -f foo2
-test_expect_success 'git add (add.ignore-errors = false)' '
+test_expect_success POSIXPERM 'git add (add.ignore-errors = false)' '
git config add.ignore-errors 0 &&
git reset --hard &&
date >foo1 &&
! ( git ls-files foo1 | grep foo1 )
'
-test_expect_success 'git add '\''fo\[ou\]bar'\'' ignores foobar' '
+test_expect_success BSLASHPSPEC "git add 'fo\\[ou\\]bar' ignores foobar" '
git reset --hard &&
touch fo\[ou\]bar foobar &&
git add '\''fo\[ou\]bar'\'' &&
if test "$(git config --bool core.filemode)" = false
then
- say 'skipping filemode tests (filesystem does not properly support modes)'
+ say 'skipping filemode tests (filesystem does not properly support modes)'
else
+ test_set_prereq FILEMODE
+fi
-test_expect_success 'patch does not affect mode' '
+test_expect_success FILEMODE 'patch does not affect mode' '
git reset --hard &&
echo content >>file &&
chmod +x file &&
git diff file | grep "new mode"
'
-test_expect_success 'stage mode but not hunk' '
+test_expect_success FILEMODE 'stage mode but not hunk' '
git reset --hard &&
echo content >>file &&
chmod +x file &&
git diff file | grep "+content"
'
-fi
# end of tests disabled when filemode is not usable
test_done
. ./test-lib.sh
. "$TEST_DIRECTORY"/diff-lib.sh
+if ! test_have_prereq SYMLINKS
+then
+ say 'Symbolic links not supported, skipping tests.'
+ test_done
+ exit
+fi
+
test_expect_success \
'prepare reference tree' \
'echo xyzzy | tr -d '\\\\'012 >yomin &&
'validate result of -B -M (#4)' \
'compare_diff_raw expected current'
-test_expect_success \
+test_expect_success SYMLINKS \
'make file0 into something completely different' \
'rm -f file0 &&
ln -s frotz file0 &&
:100644 100644 6ff87c4664981e4397625791c8ea3bbb5f2279a3 f5deac7be59e7eeab8657fd9ae706fd6a57daed2 M100 file1
EOF
-test_expect_success \
+test_expect_success SYMLINKS \
'validate result of -B (#5)' \
'compare_diff_raw expected current'
:100644 100644 6ff87c4664981e4397625791c8ea3bbb5f2279a3 f5deac7be59e7eeab8657fd9ae706fd6a57daed2 R file0 file1
EOF
-test_expect_success \
+test_expect_success SYMLINKS \
'validate result of -B -M (#6)' \
'compare_diff_raw expected current'
:100644 100644 6ff87c4664981e4397625791c8ea3bbb5f2279a3 f5deac7be59e7eeab8657fd9ae706fd6a57daed2 M file1
EOF
-test_expect_success \
+test_expect_success SYMLINKS \
'validate result of -M (#7)' \
'compare_diff_raw expected current'
. ./test-lib.sh
. "$TEST_DIRECTORY"/diff-lib.sh
+if ! test_have_prereq SYMLINKS
+then
+ say 'Symbolic links not supported, skipping tests.'
+ test_done
+ exit
+fi
+
cat > expected << EOF
diff --git a/frotz b/frotz
new file mode 120000
grep "^ *S. E. Cipient <scipient@example.com>$" patch5
'
+test_expect_success 'command line headers' '
+
+ git config --unset-all format.headers &&
+ git format-patch --add-header="Cc: R. E. Cipient <rcipient@example.com>" --stdout master..side | sed -e "/^$/q" >patch6 &&
+ grep "^Cc: R. E. Cipient <rcipient@example.com>$" patch6
+'
+
+test_expect_success 'configuration headers and command line headers' '
+
+ git config --replace-all format.headers "Cc: R. E. Cipient <rcipient@example.com>" &&
+ git format-patch --add-header="Cc: S. E. Cipient <scipient@example.com>" --stdout master..side | sed -e "/^$/q" >patch7 &&
+ grep "^Cc: R. E. Cipient <rcipient@example.com>,$" patch7 &&
+ grep "^ *S. E. Cipient <scipient@example.com>$" patch7
+'
+
test_expect_success 'multiple files' '
rm -rf patches/ &&
GIT_EXTERNAL_DIFF=echo git diff
'
+echo "#!$SHELL_PATH" >fake-diff.sh
+cat >> fake-diff.sh <<\EOF
+cat $2 >> crlfed.txt
+EOF
+chmod a+x fake-diff.sh
+
+keep_only_cr () {
+ tr -dc '\015'
+}
+
+test_expect_success 'external diff with autocrlf = true' '
+ git config core.autocrlf true &&
+ GIT_EXTERNAL_DIFF=./fake-diff.sh git diff &&
+ test $(wc -l < crlfed.txt) = $(cat crlfed.txt | keep_only_cr | wc -c)
+'
+
+test_expect_success 'diff --cached' '
+ git add file &&
+ git update-index --assume-unchanged file &&
+ echo second >file &&
+ git diff --cached >actual &&
+ test_cmp ../t4020/diff.NUL actual
+'
+
test_done
. ./test-lib.sh
+if ! test_have_prereq SYMLINKS
+then
+ say 'Symbolic links not supported, skipping tests.'
+ test_done
+ exit
+fi
+
test_expect_success setup '
rm -f foo bar &&
test_expect_success apply \
'git apply --index --stat --summary --apply test-patch'
-if [ "$(git config --get core.filemode)" = false ]
+if test "$(git config --bool core.filemode)" = false
then
say 'filemode disabled on the filesystem'
else
- test_expect_success validate \
- 'test -f bar && ls -l bar | grep "^-..x......"'
+ test_set_prereq FILEMODE
fi
+test_expect_success FILEMODE validate \
+ 'test -f bar && ls -l bar | grep "^-..x......"'
+
test_expect_success 'apply reverse' \
'git apply -R --index --stat --summary --apply test-patch &&
test "$(cat foo)" = "This is foo"'
. ./test-lib.sh
+if ! test_have_prereq SYMLINKS
+then
+ say 'Symbolic links not supported, skipping tests.'
+ test_done
+ exit
+fi
+
test_expect_success 'setup repository and commits' '
echo "hello world" > foo &&
echo "hi planet" > bar &&
. ./test-lib.sh
+if ! test_have_prereq SYMLINKS
+then
+ say 'Symbolic links not supported, skipping tests.'
+ test_done
+ exit
+fi
+
test_expect_success setup '
ln -s path1/path2/path3/path4/path5 link1 &&
test_description='apply to deeper directory without getting fooled with symlink'
. ./test-lib.sh
+if ! test_have_prereq SYMLINKS
+then
+ say 'Symbolic links not supported, skipping tests.'
+ test_done
+ exit
+fi
+
lecho () {
for l_
do
. ./test-lib.sh
+if test "$(git config --bool core.filemode)" = false
+then
+ say 'filemode disabled on the filesystem'
+else
+ test_set_prereq FILEMODE
+fi
+
test_expect_success setup '
echo original >file &&
git add file &&
git diff --stat -p >patch-1.txt
'
-test_expect_success 'same mode (no index)' '
+test_expect_success FILEMODE 'same mode (no index)' '
git reset --hard &&
chmod +x file &&
git apply patch-0.txt &&
test -x file
'
-test_expect_success 'same mode (with index)' '
+test_expect_success FILEMODE 'same mode (with index)' '
git reset --hard &&
chmod +x file &&
git add file &&
git diff --exit-code
'
-test_expect_success 'same mode (index only)' '
+test_expect_success FILEMODE 'same mode (index only)' '
git reset --hard &&
chmod +x file &&
git add file &&
git ls-files -s file | grep "^100755"
'
-test_expect_success 'mode update (no index)' '
+test_expect_success FILEMODE 'mode update (no index)' '
git reset --hard &&
git apply patch-1.txt &&
test -x file
'
-test_expect_success 'mode update (with index)' '
+test_expect_success FILEMODE 'mode update (with index)' '
git reset --hard &&
git apply --index patch-1.txt &&
test -x file &&
git diff --exit-code
'
-test_expect_success 'mode update (index only)' '
+test_expect_success FILEMODE 'mode update (index only)' '
git reset --hard &&
git apply --cached patch-1.txt &&
git ls-files -s file | grep "^100755"
cp /bin/sh a/bin &&
printf "A\$Format:%s\$O" "$SUBSTFORMAT" >a/substfile1 &&
printf "A not substituted O" >a/substfile2 &&
- ln -s a a/l1 &&
+ if test_have_prereq SYMLINKS; then
+ ln -s a a/l1
+ else
+ printf %s a > a/l1
+ fi &&
(p=long_path_to_a_file && cd a &&
for depth in 1 2 3 4 5; do mkdir $p && cd $p; done &&
echo text >file_with_long_path) &&
$UNZIP -v >/dev/null 2>&1
if [ $? -eq 127 ]; then
say "Skipping ZIP tests, because unzip was not found"
- test_done
- exit
+else
+ test_set_prereq UNZIP
fi
-test_expect_success \
+test_expect_success UNZIP \
'extract ZIP archive' \
'(mkdir d && cd d && $UNZIP ../d.zip)'
-test_expect_success \
+test_expect_success UNZIP \
'validate filenames' \
'(cd d/a && find .) | sort >d.lst &&
test_cmp a.lst d.lst'
-test_expect_success \
+test_expect_success UNZIP \
'validate file contents' \
'diff -r a d/a'
'git archive --format=zip with prefix' \
'git archive --format=zip --prefix=prefix/ HEAD >e.zip'
-test_expect_success \
+test_expect_success UNZIP \
'extract ZIP archive with prefix' \
'(mkdir e && cd e && $UNZIP ../e.zip)'
-test_expect_success \
+test_expect_success UNZIP \
'validate filenames with prefix' \
'(cd e/prefix/a && find .) | sort >e.lst &&
test_cmp a.lst e.lst'
-test_expect_success \
+test_expect_success UNZIP \
'validate file contents with prefix' \
'diff -r a e/prefix/a'
'index v2: force some 64-bit offsets with pack-objects' \
'pack3=$(git pack-objects --index-version=2,0x40000 test-3 <obj-list)'
-have_64bits=
if msg=$(git verify-pack -v "test-3-${pack3}.pack" 2>&1) ||
! (echo "$msg" | grep "pack too large .* off_t")
then
- have_64bits=t
+ test_set_prereq OFF64_T
else
say "skipping tests concerning 64-bit offsets"
fi
-test "$have_64bits" &&
-test_expect_success \
+test_expect_success OFF64_T \
'index v2: verify a pack with some 64-bit offsets' \
'git verify-pack -v "test-3-${pack3}.pack"'
-test "$have_64bits" &&
-test_expect_success \
+test_expect_success OFF64_T \
'64-bit offsets: should be different from previous index v2 results' \
'! cmp "test-2-${pack2}.idx" "test-3-${pack3}.idx"'
-test "$have_64bits" &&
-test_expect_success \
+test_expect_success OFF64_T \
'index v2: force some 64-bit offsets with index-pack' \
'git index-pack --index-version=2,0x40000 -o 3.idx "test-1-${pack1}.pack"'
-test "$have_64bits" &&
-test_expect_success \
+test_expect_success OFF64_T \
'64-bit offsets: index-pack result should match pack-objects one' \
'cmp "test-3-${pack3}.idx" "3.idx"'
. ./test-lib.sh
+case $(uname -s) in
+*MINGW*)
+ say "GIT_DEBUG_SEND_PACK not supported - skipping tests"
+ test_done
+ exit
+esac
+
# End state of the repository:
#
# T - tag1 S - tag2
. ./test-lib.sh
+if ! test_have_prereq SYMLINKS
+then
+ say 'Symbolic links not supported, skipping tests.'
+ test_done
+ exit
+fi
+
# The scenario we are building:
#
# trash\ directory/
test_description='merge-recursive: handle file mode'
. ./test-lib.sh
+if ! test "$(git config --bool core.filemode)" = false
+then
+ test_set_prereq FILEMODE
+fi
+
test_expect_success 'mode change in one branch: keep changed version' '
: >file1 &&
git add file1 &&
git commit -m b1 &&
git checkout a1 &&
git merge-recursive master -- a1 b1 &&
+ git ls-files -s file1 | grep ^100755
+'
+
+test_expect_success FILEMODE 'verify executable bit on file' '
test -x file1
'
echo "100644 $H 3 file2"
) >expect &&
test_cmp actual expect &&
+ git ls-files -s file2 | grep ^100755
+'
+
+test_expect_success FILEMODE 'verify executable bit on file' '
test -x file2
'
'
cat >expected <<EOF
-Merge branch 'left' of $TEST_DIRECTORY/$test
+Merge branch 'left' of $(pwd)
EOF
test_expect_success 'merge-msg test #2' '
git checkout master &&
- git fetch "$TEST_DIRECTORY/$test" left &&
+ git fetch "$(pwd)" left &&
git fmt-merge-msg <.git/FETCH_HEAD >actual &&
test_cmp expected actual
rm -f dirty dirty2
-test_expect_success 'git mv should overwrite symlink to a file' '
+test_expect_success SYMLINKS 'git mv should overwrite symlink to a file' '
rm -fr .git &&
git init &&
rm -f moved symlink
-test_expect_success 'git mv should overwrite file with a symlink' '
+test_expect_success SYMLINKS 'git mv should overwrite file with a symlink' '
rm -fr .git &&
git init &&
'
# subsequent tests require gpg; check if it is available
-gpg --version >/dev/null
+gpg --version >/dev/null 2>/dev/null
if [ $? -eq 127 ]; then
say "gpg not found - skipping tag signing and verification tests"
- test_done
- exit
+else
+ # As said here: http://www.gnupg.org/documentation/faqs.html#q6.19
+ # the gpg version 1.0.6 didn't parse trust packets correctly, so for
+ # that version, creation of signed tags using the generated key fails.
+ case "$(gpg --version)" in
+ 'gpg (GnuPG) 1.0.6'*)
+ say "Skipping signed tag tests, because a bug in 1.0.6 version"
+ ;;
+ *)
+ test_set_prereq GPG
+ ;;
+ esac
fi
# trying to verify annotated non-signed tags:
-test_expect_success \
+test_expect_success GPG \
'trying to verify an annotated non-signed tag should fail' '
tag_exists annotated-tag &&
test_must_fail git tag -v annotated-tag
'
-test_expect_success \
+test_expect_success GPG \
'trying to verify a file-annotated non-signed tag should fail' '
tag_exists file-annotated-tag &&
test_must_fail git tag -v file-annotated-tag
'
-test_expect_success \
+test_expect_success GPG \
'trying to verify two annotated non-signed tags should fail' '
tag_exists annotated-tag file-annotated-tag &&
test_must_fail git tag -v annotated-tag file-annotated-tag
# creating and verifying signed tags:
-# As said here: http://www.gnupg.org/documentation/faqs.html#q6.19
-# the gpg version 1.0.6 didn't parse trust packets correctly, so for
-# that version, creation of signed tags using the generated key fails.
-case "$(gpg --version)" in
-'gpg (GnuPG) 1.0.6'*)
- say "Skipping signed tag tests, because a bug in 1.0.6 version"
- test_done
- exit
- ;;
-esac
-
# key generation info: gpg --homedir t/t7004 --gen-key
# Type DSA and Elgamal, size 2048 bits, no expiration date.
# Name and email: C O Mitter <committer@example.com>
get_tag_header signed-tag $commit commit $time >expect
echo 'A signed tag message' >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success 'creating a signed tag with -m message should succeed' '
+test_expect_success GPG 'creating a signed tag with -m message should succeed' '
git tag -s -m "A signed tag message" signed-tag &&
get_tag_msg signed-tag >actual &&
test_cmp expect actual
get_tag_header u-signed-tag $commit commit $time >expect
echo 'Another message' >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success 'sign with a given key id' '
+test_expect_success GPG 'sign with a given key id' '
git tag -u committer@example.com -m "Another message" u-signed-tag &&
get_tag_msg u-signed-tag >actual &&
'
-test_expect_success 'sign with an unknown id (1)' '
+test_expect_success GPG 'sign with an unknown id (1)' '
test_must_fail git tag -u author@example.com \
-m "Another message" o-signed-tag
'
-test_expect_success 'sign with an unknown id (2)' '
+test_expect_success GPG 'sign with an unknown id (2)' '
test_must_fail git tag -u DEADBEEF -m "Another message" o-signed-tag
get_tag_header implied-sign $commit commit $time >expect
./fakeeditor >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success '-u implies signed tag' '
+test_expect_success GPG '-u implies signed tag' '
GIT_EDITOR=./fakeeditor git tag -u CDDE430D implied-sign &&
get_tag_msg implied-sign >actual &&
test_cmp expect actual
get_tag_header file-signed-tag $commit commit $time >expect
cat sigmsgfile >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with -F messagefile should succeed' '
git tag -s -F sigmsgfile file-signed-tag &&
get_tag_msg file-signed-tag >actual &&
get_tag_header stdin-signed-tag $commit commit $time >expect
cat siginputmsg >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success 'creating a signed tag with -F - should succeed' '
+test_expect_success GPG 'creating a signed tag with -F - should succeed' '
git tag -s -F - stdin-signed-tag <siginputmsg &&
get_tag_msg stdin-signed-tag >actual &&
test_cmp expect actual
get_tag_header implied-annotate $commit commit $time >expect
./fakeeditor >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success '-s implies annotated tag' '
+test_expect_success GPG '-s implies annotated tag' '
GIT_EDITOR=./fakeeditor git tag -s implied-annotate &&
get_tag_msg implied-annotate >actual &&
test_cmp expect actual
'
-test_expect_success \
+test_expect_success GPG \
'trying to create a signed tag with non-existing -F file should fail' '
! test -f nonexistingfile &&
! tag_exists nosigtag &&
! tag_exists nosigtag
'
-test_expect_success 'verifying a signed tag should succeed' \
+test_expect_success GPG 'verifying a signed tag should succeed' \
'git tag -v signed-tag'
-test_expect_success 'verifying two signed tags in one command should succeed' \
+test_expect_success GPG 'verifying two signed tags in one command should succeed' \
'git tag -v signed-tag file-signed-tag'
-test_expect_success \
+test_expect_success GPG \
'verifying many signed and non-signed tags should fail' '
test_must_fail git tag -v signed-tag annotated-tag &&
test_must_fail git tag -v file-annotated-tag file-signed-tag &&
test_must_fail git tag -v signed-tag annotated-tag file-signed-tag
'
-test_expect_success 'verifying a forged tag should fail' '
+test_expect_success GPG 'verifying a forged tag should fail' '
forged=$(git cat-file tag signed-tag |
sed -e "s/signed-tag/forged-tag/" |
git mktag) &&
get_tag_header empty-signed-tag $commit commit $time >expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with an empty -m message should succeed' '
git tag -s -m "" empty-signed-tag &&
get_tag_msg empty-signed-tag >actual &&
>sigemptyfile
get_tag_header emptyfile-signed-tag $commit commit $time >expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with an empty -F messagefile should succeed' '
git tag -s -F sigemptyfile emptyfile-signed-tag &&
get_tag_msg emptyfile-signed-tag >actual &&
Trailing blank lines
EOF
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'extra blanks in the message for a signed tag should be removed' '
git tag -s -F sigblanksfile blanks-signed-tag &&
get_tag_msg blanks-signed-tag >actual &&
get_tag_header blank-signed-tag $commit commit $time >expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with a blank -m message should succeed' '
git tag -s -m " " blank-signed-tag &&
get_tag_msg blank-signed-tag >actual &&
echo ' ' >>sigblankfile
get_tag_header blankfile-signed-tag $commit commit $time >expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with blank -F file with spaces should succeed' '
git tag -s -F sigblankfile blankfile-signed-tag &&
get_tag_msg blankfile-signed-tag >actual &&
printf ' ' >sigblanknonlfile
get_tag_header blanknonlfile-signed-tag $commit commit $time >expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with spaces and no newline should succeed' '
git tag -s -F sigblanknonlfile blanknonlfile-signed-tag &&
get_tag_msg blanknonlfile-signed-tag >actual &&
Last line.
EOF
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with a -F file with #comments should succeed' '
git tag -s -F sigcommentsfile comments-signed-tag &&
get_tag_msg comments-signed-tag >actual &&
get_tag_header comment-signed-tag $commit commit $time >expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with #commented -m message should succeed' '
git tag -s -m "#comment" comment-signed-tag &&
get_tag_msg comment-signed-tag >actual &&
echo '####' >>sigcommentfile
get_tag_header commentfile-signed-tag $commit commit $time >expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with #commented -F messagefile should succeed' '
git tag -s -F sigcommentfile commentfile-signed-tag &&
get_tag_msg commentfile-signed-tag >actual &&
printf '#comment' >sigcommentnonlfile
get_tag_header commentnonlfile-signed-tag $commit commit $time >expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag with a #comment and no newline should succeed' '
git tag -s -F sigcommentnonlfile commentnonlfile-signed-tag &&
get_tag_msg commentnonlfile-signed-tag >actual &&
# listing messages for signed tags:
-test_expect_success \
+test_expect_success GPG \
'listing the one-line message of a signed tag should succeed' '
git tag -s -m "A message line signed" stag-one-line &&
test_cmp expect actual
'
-test_expect_success \
+test_expect_success GPG \
'listing the zero-lines message of a signed tag should succeed' '
git tag -s -m "" stag-zero-lines &&
echo 'stag line one' >sigtagmsg
echo 'stag line two' >>sigtagmsg
echo 'stag line three' >>sigtagmsg
-test_expect_success \
+test_expect_success GPG \
'listing many message lines of a signed tag should succeed' '
git tag -s -F sigtagmsg stag-lines &&
tree=$(git rev-parse HEAD^{tree})
blob=$(git rev-parse HEAD:foo)
-tag=$(git rev-parse signed-tag)
+tag=$(git rev-parse signed-tag 2>/dev/null)
get_tag_header tree-signed-tag $tree tree $time >expect
echo "A message for a tree" >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag pointing to a tree should succeed' '
git tag -s -m "A message for a tree" tree-signed-tag HEAD^{tree} &&
get_tag_msg tree-signed-tag >actual &&
get_tag_header blob-signed-tag $blob blob $time >expect
echo "A message for a blob" >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag pointing to a blob should succeed' '
git tag -s -m "A message for a blob" blob-signed-tag HEAD:foo &&
get_tag_msg blob-signed-tag >actual &&
get_tag_header tag-signed-tag $tag tag $time >expect
echo "A message for another tag" >>expect
echo '-----BEGIN PGP SIGNATURE-----' >>expect
-test_expect_success \
+test_expect_success GPG \
'creating a signed tag pointing to another tag should succeed' '
git tag -s -m "A message for another tag" tag-signed-tag signed-tag &&
get_tag_msg tag-signed-tag >actual &&
# try to sign with bad user.signingkey
git config user.signingkey BobTheMouse
-test_expect_success \
+test_expect_success GPG \
'git tag -s fails if gpg is misconfigured' \
'test_must_fail git tag -s -m tail tag-gpg-failure'
git config --unset user.signingkey
# try to verify without gpg:
rm -rf gpghome
-test_expect_success \
+test_expect_success GPG \
'verify signed tag fails when public key is not present' \
'test_must_fail git tag -v signed-tag'
for i in GIT_EDITOR core_editor EDITOR VISUAL vi
do
cat >e-$i.sh <<-EOF
+ #!$SHELL_PATH
echo "Edited by $i" >"\$1"
EOF
chmod +x e-$i.sh
# kill -TERM command added below.
EOF
-test_expect_success 'a SIGTERM should break locks' '
+test_expect_success EXECKEEPSPID 'a SIGTERM should break locks' '
echo >>negative &&
! "$SHELL_PATH" -c '\''
echo kill -TERM $$ >> .git/FAKE_EDITOR
'
chmod -x "$HOOK"
-test_expect_success 'with non-executable hook' '
+test_expect_success POSIXPERM 'with non-executable hook' '
echo "content" >> file &&
git add file &&
'
-test_expect_success '--no-verify with non-executable hook' '
+test_expect_success POSIXPERM '--no-verify with non-executable hook' '
echo "more content" >> file &&
git add file &&
'
chmod -x "$HOOK"
-test_expect_success 'with non-executable hook' '
+test_expect_success POSIXPERM 'with non-executable hook' '
echo "content" >> file &&
git add file &&
'
-test_expect_success 'with non-executable hook (editor)' '
+test_expect_success POSIXPERM 'with non-executable hook (editor)' '
echo "content again" >> file &&
git add file &&
'
-test_expect_success '--no-verify with non-executable hook' '
+test_expect_success POSIXPERM '--no-verify with non-executable hook' '
echo "more content" >> file &&
git add file &&
'
-test_expect_success '--no-verify with non-executable hook (editor)' '
+test_expect_success POSIXPERM '--no-verify with non-executable hook (editor)' '
echo "even more content" >> file &&
git add file &&
done
'
+test_expect_success 'packed obs in alternate ODB kept pack are repacked' '
+ # swap the .keep so the commit object is in the pack with .keep
+ for p in alt_objects/pack/*.pack
+ do
+ base_name=$(basename $p .pack)
+ if test -f alt_objects/pack/$base_name.keep
+ then
+ rm alt_objects/pack/$base_name.keep
+ else
+ touch alt_objects/pack/$base_name.keep
+ fi
+ done
+ git repack -a -d &&
+ myidx=$(ls -1 .git/objects/pack/*.idx) &&
+ test -f "$myidx" &&
+ for p in alt_objects/pack/*.idx; do
+ git verify-pack -v $p | sed -n -e "/^[0-9a-f]\{40\}/p"
+ done | while read sha1 rest; do
+ if ! ( git verify-pack -v $myidx | grep "^$sha1" ); then
+ echo "Missing object in local pack: $sha1"
+ return 1
+ fi
+ done
+'
+
+test_expect_success 'packed unreachable obs in alternate ODB are not loosened' '
+ rm -f alt_objects/pack/*.keep &&
+ mv .git/objects/pack/* alt_objects/pack/ &&
+ csha1=$(git rev-parse HEAD^{commit}) &&
+ git reset --hard HEAD^ &&
+ sleep 1 &&
+ git reflog expire --expire=now --expire-unreachable=now --all &&
+ # The pack-objects call on the next line is equivalent to
+ # git repack -A -d without the call to prune-packed
+ git pack-objects --honor-pack-keep --non-empty --all --reflog \
+ --unpack-unreachable </dev/null pack &&
+ rm -f .git/objects/pack/* &&
+ mv pack-* .git/objects/pack/ &&
+ test 0 = $(git verify-pack -v -- .git/objects/pack/*.idx |
+ egrep "^$csha1 " | sort | uniq | wc -l) &&
+ echo > .git/objects/info/alternates &&
+ test_must_fail git show $csha1
+'
+
+test_expect_success 'local packed unreachable obs that exist in alternate ODB are not loosened' '
+ echo `pwd`/alt_objects > .git/objects/info/alternates &&
+ echo "$csha1" | git pack-objects --non-empty --all --reflog pack &&
+ rm -f .git/objects/pack/* &&
+ mv pack-* .git/objects/pack/ &&
+ # The pack-objects call on the next line is equivalent to
+ # git repack -A -d without the call to prune-packed
+ git pack-objects --honor-pack-keep --non-empty --all --reflog \
+ --unpack-unreachable </dev/null pack &&
+ rm -f .git/objects/pack/* &&
+ mv pack-* .git/objects/pack/ &&
+ test 0 = $(git verify-pack -v -- .git/objects/pack/*.idx |
+ egrep "^$csha1 " | sort | uniq | wc -l) &&
+ echo > .git/objects/info/alternates &&
+ test_must_fail git show $csha1
+'
+
test_done
--from="Example <nobody@example.com>" \
--to=nobody@example.com \
--smtp-server="$(pwd)/fake.sendmail" \
- $@ \
- $patches | grep "Send this email"
+ $@ $patches > stdout &&
+ grep "Send this email" stdout
}
test_expect_success '--confirm=always' '
test_expect_success 'confirm by default (due to cc)' '
CONFIRM=$(git config --get sendemail.confirm) &&
git config --unset sendemail.confirm &&
- test_confirm &&
- git config sendemail.confirm $CONFIRM
+ test_confirm
+ ret="$?"
+ git config sendemail.confirm ${CONFIRM:-never}
+ test $ret = "0"
'
test_expect_success 'confirm by default (due to --compose)' '
test $ret = "0"
'
+test_expect_success 'confirm detects EOF (inform assumes y)' '
+ CONFIRM=$(git config --get sendemail.confirm) &&
+ git config --unset sendemail.confirm &&
+ rm -fr outdir &&
+ git format-patch -2 -o outdir &&
+ GIT_SEND_EMAIL_NOTTY=1 \
+ git send-email \
+ --from="Example <nobody@example.com>" \
+ --to=nobody@example.com \
+ --smtp-server="$(pwd)/fake.sendmail" \
+ outdir/*.patch < /dev/null
+ ret="$?"
+ git config sendemail.confirm ${CONFIRM:-never}
+ test $ret = "0"
+'
+
+test_expect_success 'confirm detects EOF (auto causes failure)' '
+ CONFIRM=$(git config --get sendemail.confirm) &&
+ git config sendemail.confirm auto &&
+ GIT_SEND_EMAIL_NOTTY=1 \
+ test_must_fail git send-email \
+ --from="Example <nobody@example.com>" \
+ --to=nobody@example.com \
+ --smtp-server="$(pwd)/fake.sendmail" \
+ $patches < /dev/null
+ ret="$?"
+ git config sendemail.confirm ${CONFIRM:-never}
+ test $ret = "0"
+'
+
+test_expect_success 'confirm doesnt loop forever' '
+ CONFIRM=$(git config --get sendemail.confirm) &&
+ git config sendemail.confirm auto &&
+ yes "bogus" | GIT_SEND_EMAIL_NOTTY=1 \
+ test_must_fail git send-email \
+ --from="Example <nobody@example.com>" \
+ --to=nobody@example.com \
+ --smtp-server="$(pwd)/fake.sendmail" \
+ $patches
+ ret="$?"
+ git config sendemail.confirm ${CONFIRM:-never}
+ test $ret = "0"
+'
+
test_expect_success '--compose adds MIME for utf8 body' '
clean_fake_sendmail &&
(echo "#!$SHELL_PATH" &&
test_description='git svn basic tests'
GIT_SVN_LC_ALL=${LC_ALL:-$LANG}
+. ./lib-git-svn.sh
+
+say 'define NO_SVN_TESTS to skip git svn tests'
+
case "$GIT_SVN_LC_ALL" in
*.UTF-8)
- have_utf8=t
+ test_set_prereq UTF8
;;
*)
- have_utf8=
+ say "UTF-8 locale not set, some tests skipped ($GIT_SVN_LC_ALL)"
;;
esac
-. ./lib-git-svn.sh
-
-say 'define NO_SVN_TESTS to skip git svn tests'
-
test_expect_success \
'initialize git svn' '
mkdir import &&
test ! -L "$SVN_TREE"/exec-2.sh &&
test_cmp help "$SVN_TREE"/exec-2.sh'
-if test "$have_utf8" = t
-then
- name="commit with UTF-8 message: locale: $GIT_SVN_LC_ALL"
- LC_ALL="$GIT_SVN_LC_ALL"
- export LC_ALL
- test_expect_success "$name" "
- echo '# hello' >> exec-2.sh &&
- git update-index exec-2.sh &&
- git commit -m 'éï∏' &&
- git svn set-tree HEAD"
- unset LC_ALL
-else
- say "UTF-8 locale not set, test skipped ($GIT_SVN_LC_ALL)"
-fi
+name="commit with UTF-8 message: locale: $GIT_SVN_LC_ALL"
+LC_ALL="$GIT_SVN_LC_ALL"
+export LC_ALL
+test_expect_success UTF8 "$name" "
+ echo '# hello' >> exec-2.sh &&
+ git update-index exec-2.sh &&
+ git commit -m 'éï∏' &&
+ git svn set-tree HEAD"
+unset LC_ALL
name='test fetch functionality (svn => git) with alternate GIT_SVN_ID'
GIT_SVN_ID=alt
name='check imported tree checksums expected tree checksums'
rm -f expected
-if test "$have_utf8" = t
+if test_have_prereq UTF8
then
echo tree bf522353586b1b883488f2bc73dab0d9f774b9a9 > expected
fi
done
if locale -a |grep -q en_US.utf8; then
- test_expect_success 'ISO-8859-1 should match UTF-8 in svn' '
+ test_set_prereq UTF8
+else
+ say "UTF-8 locale not available, test skipped"
+fi
+
+test_expect_success UTF8 'ISO-8859-1 should match UTF-8 in svn' '
(
cd ISO-8859-1 &&
compare_svn_head_with "$TEST_DIRECTORY"/t3900/1-UTF-8.txt
)
- '
+'
- for H in EUCJP ISO-2022-JP
- do
- test_expect_success '$H should match UTF-8 in svn' '
+for H in EUCJP ISO-2022-JP
+do
+ test_expect_success UTF8 "$H should match UTF-8 in svn" '
(
cd $H &&
compare_svn_head_with "$TEST_DIRECTORY"/t3900/2-UTF-8.txt
)
- '
- done
-else
- say "UTF-8 locale not available, test skipped"
-fi
+ '
+done
test_done
test_expect_success '"bar" is an empty file' 'test -f x/bar && ! test -s x/bar'
test_expect_success 'get "bar" => symlink fix from svn' \
'(cd x && git svn rebase)'
-test_expect_success '"bar" becomes a symlink' 'test -L x/bar'
+test_expect_success SYMLINKS '"bar" becomes a symlink' 'test -L x/bar'
test_expect_success 'clone using git svn' 'git svn clone -r1 "$svnrepo" y'
test_expect_success 'clone using git svn' 'git svn clone -r1 "$svnrepo" x'
-test_expect_success '"bar" is a symlink that points to "asdf"' '
+test_expect_success SYMLINKS '"bar" is a symlink that points to "asdf"' '
test -L x/bar &&
(cd x && test xasdf = x"`git cat-file blob HEAD:bar`")
'
(cd x && git svn rebase)
'
-test_expect_success '"bar" remains a proper symlink' '
+test_expect_success SYMLINKS '"bar" remains a proper symlink' '
test -L x/bar &&
(cd x && test xdoink = x"`git cat-file blob HEAD:bar`")
'
test_must_fail git cvsexportcommit -c $id
)'
-case "$(git config --bool core.filemode)" in
-false)
- ;;
-*)
-test_expect_success \
+if ! test "$(git config --bool core.filemode)" = false
+then
+ test_set_prereq FILEMODE
+fi
+
+test_expect_success FILEMODE \
'Retain execute bit' \
'mkdir G &&
echo executeon >G/on &&
test -x G/on &&
! test -x G/off
)'
- ;;
-esac
test_expect_success '-w option should work with relative GIT_DIR' '
mkdir W &&
test_expect_success 'setup' '
+ echo break it > file0 &&
+ git add file0 &&
+ test_tick &&
echo Wohlauf > file &&
git add file &&
test_tick &&
(cd new &&
git fast-import &&
test $MASTER != $(git rev-parse --verify refs/heads/partial) &&
- git diff master..partial &&
- git diff master^..partial^ &&
+ git diff --exit-code master partial &&
+ git diff --exit-code master^ partial^ &&
test_must_fail git rev-parse partial~2)
'
gitweb_run "p=.git;a=commitdiff"'
test_debug 'cat gitweb.log'
-test_expect_success \
+test_expect_success SYMLINKS \
'commitdiff(0): file to symlink' \
'rm renamed_file &&
ln -s file renamed_file &&
# ----------------------------------------------------------------------
# commitdiff testing (taken from t4114-apply-typechange.sh)
-test_expect_success 'setup typechange commits' '
+test_expect_success SYMLINKS 'setup typechange commits' '
echo "hello world" > foo &&
echo "hi planet" > bar &&
git update-index --add foo bar &&
git mv 04-rename-from 04-rename-to &&
echo "Changed" >> 04-rename-to &&
test_chmod +x 05-mode-change &&
- rm -f 06-file-or-symlink && ln -s 01-change 06-file-or-symlink &&
+ rm -f 06-file-or-symlink &&
+ if test_have_prereq SYMLINKS; then
+ ln -s 01-change 06-file-or-symlink
+ else
+ printf %s 01-change > 06-file-or-symlink
+ fi &&
echo "Changed and have mode changed" > 07-change-mode-change &&
test_chmod +x 07-change-mode-change &&
git commit -a -m "Large commit" &&
git update-index --add "--chmod=$@"
}
+# Use test_set_prereq to tell that a particular prerequisite is available.
+# The prerequisite can later be checked for in two ways:
+#
+# - Explicitly using test_have_prereq.
+#
+# - Implicitly by specifying the prerequisite tag in the calls to
+# test_expect_{success,failure,code}.
+#
+# The single parameter is the prerequisite tag (a simple word, in all
+# capital letters by convention).
+
+test_set_prereq () {
+ satisfied="$satisfied$1 "
+}
+satisfied=" "
+
+test_have_prereq () {
+ case $satisfied in
+ *" $1 "*)
+ : yes, have it ;;
+ *)
+ ! : nope ;;
+ esac
+}
+
# You are not expected to call test_ok_ and test_failure_ directly, use
# the text_expect_* functions instead.
to_skip=t
esac
done
+ if test -z "$to_skip" && test -n "$prereq" &&
+ ! test_have_prereq "$prereq"
+ then
+ to_skip=t
+ fi
case "$to_skip" in
t)
say_color skip >&3 "skipping test: $@"
}
test_expect_failure () {
+ test "$#" = 3 && { prereq=$1; shift; } || prereq=
test "$#" = 2 ||
- error "bug in the test script: not 2 parameters to test-expect-failure"
+ error "bug in the test script: not 2 or 3 parameters to test-expect-failure"
if ! test_skip "$@"
then
say >&3 "checking known breakage: $2"
}
test_expect_success () {
+ test "$#" = 3 && { prereq=$1; shift; } || prereq=
test "$#" = 2 ||
- error "bug in the test script: not 2 parameters to test-expect-success"
+ error "bug in the test script: not 2 or 3 parameters to test-expect-success"
if ! test_skip "$@"
then
say >&3 "expecting success: $2"
}
test_expect_code () {
+ test "$#" = 4 && { prereq=$1; shift; } || prereq=
test "$#" = 3 ||
- error "bug in the test script: not 3 parameters to test-expect-code"
+ error "bug in the test script: not 3 or 4 parameters to test-expect-code"
if ! test_skip "$@"
then
say >&3 "expecting exit code $1: $3"
# Usage: test_external description command arguments...
# Example: test_external 'Perl API' perl ../path/to/test.pl
test_external () {
- test "$#" -eq 3 ||
- error >&5 "bug in the test script: not 3 parameters to test_external"
+ test "$#" = 4 && { prereq=$1; shift; } || prereq=
+ test "$#" = 3 ||
+ error >&5 "bug in the test script: not 3 or 4 parameters to test_external"
descr="$1"
shift
if ! test_skip "$descr" "$@"
fi
case "$test_failure" in
0)
- # We could:
- # cd .. && rm -fr 'trash directory'
- # but that means we forbid any tests that use their own
- # subdirectory from calling test_done without coming back
- # to where they started from.
- # The Makefile provided will clean this test area so
- # we will leave things as they are.
-
say_color pass "passed all $msg"
test -d "$remove_trash" &&
test_done
esac
done
+
+# Fix some commands on Windows
+case $(uname -s) in
+*MINGW*)
+ # Windows has its own (incompatible) sort and find
+ sort () {
+ /usr/bin/sort "$@"
+ }
+ find () {
+ /usr/bin/find "$@"
+ }
+ sum () {
+ md5sum "$@"
+ }
+ # git sees Windows-style pwd
+ pwd () {
+ builtin pwd -W
+ }
+ # no POSIX permissions
+ # backslashes in pathspec are converted to '/'
+ # exec does not inherit the PID
+ ;;
+*)
+ test_set_prereq POSIXPERM
+ test_set_prereq BSLASHPSPEC
+ test_set_prereq EXECKEEPSPID
+ ;;
+esac
+
+# test whether the filesystem supports symbolic links
+ln -s x y 2>/dev/null && test -h y 2>/dev/null && test_set_prereq SYMLINKS
+rm -f y
return prefixcmp(url, "rsync://") ? skip_prefix(url, "rsync:") : url;
}
-static struct ref *get_refs_via_rsync(struct transport *transport)
+static struct ref *get_refs_via_rsync(struct transport *transport, int for_push)
{
struct strbuf buf = STRBUF_INIT, temp_dir = STRBUF_INIT;
struct ref dummy, *tail = &dummy;
const char *args[5];
int temp_dir_len;
+ if (for_push)
+ return NULL;
+
/* copy the refs to the temporary directory */
strbuf_addstr(&temp_dir, git_path("rsync-refs-XXXXXX"));
return !!err;
}
-static struct ref *get_refs_via_curl(struct transport *transport)
+static struct ref *get_refs_via_curl(struct transport *transport, int for_push)
{
struct strbuf buffer = STRBUF_INIT;
char *data, *start, *mid;
struct walker *walker;
+ if (for_push)
+ return NULL;
+
if (!transport->data)
transport->data = get_http_walker(transport->url,
transport->remote);
struct bundle_header header;
};
-static struct ref *get_refs_from_bundle(struct transport *transport)
+static struct ref *get_refs_from_bundle(struct transport *transport, int for_push)
{
struct bundle_transport_data *data = transport->data;
struct ref *result = NULL;
int i;
+ if (for_push)
+ return NULL;
+
if (data->fd > 0)
close(data->fd);
data->fd = read_bundle_header(transport->url, &data->header);
int fd[2];
const char *uploadpack;
const char *receivepack;
+ struct extra_have_objects extra_have;
};
static int set_git_option(struct transport *connection,
return 1;
}
-static int connect_setup(struct transport *transport)
+static int connect_setup(struct transport *transport, int for_push, int verbose)
{
struct git_transport_data *data = transport->data;
- data->conn = git_connect(data->fd, transport->url, data->uploadpack, 0);
+ data->conn = git_connect(data->fd, transport->url,
+ for_push ? data->receivepack : data->uploadpack,
+ verbose ? CONNECT_VERBOSE : 0);
return 0;
}
-static struct ref *get_refs_via_connect(struct transport *transport)
+static struct ref *get_refs_via_connect(struct transport *transport, int for_push)
{
struct git_transport_data *data = transport->data;
struct ref *refs;
- connect_setup(transport);
- get_remote_heads(data->fd[0], &refs, 0, NULL, 0, NULL);
+ connect_setup(transport, for_push, 0);
+ get_remote_heads(data->fd[0], &refs, 0, NULL,
+ for_push ? REF_NORMAL : 0, &data->extra_have);
return refs;
}
origh[i] = heads[i] = xstrdup(to_fetch[i]->name);
if (!data->conn) {
- connect_setup(transport);
+ connect_setup(transport, 0, 0);
get_remote_heads(data->fd[0], &refs_tmp, 0, NULL, 0, NULL);
}
return (refs ? 0 : -1);
}
-static int git_transport_push(struct transport *transport, int refspec_nr, const char **refspec, int flags)
+static int refs_pushed(struct ref *ref)
+{
+ for (; ref; ref = ref->next) {
+ switch(ref->status) {
+ case REF_STATUS_NONE:
+ case REF_STATUS_UPTODATE:
+ break;
+ default:
+ return 1;
+ }
+ }
+ return 0;
+}
+
+static void update_tracking_ref(struct remote *remote, struct ref *ref, int verbose)
+{
+ struct refspec rs;
+
+ if (ref->status != REF_STATUS_OK && ref->status != REF_STATUS_UPTODATE)
+ return;
+
+ rs.src = ref->name;
+ rs.dst = NULL;
+
+ if (!remote_find_tracking(remote, &rs)) {
+ if (verbose)
+ fprintf(stderr, "updating local tracking ref '%s'\n", rs.dst);
+ if (ref->deletion) {
+ delete_ref(rs.dst, NULL, 0);
+ } else
+ update_ref("update by push", rs.dst,
+ ref->new_sha1, NULL, 0, 0);
+ free(rs.dst);
+ }
+}
+
+#define SUMMARY_WIDTH (2 * DEFAULT_ABBREV + 3)
+
+static void print_ref_status(char flag, const char *summary, struct ref *to, struct ref *from, const char *msg)
+{
+ fprintf(stderr, " %c %-*s ", flag, SUMMARY_WIDTH, summary);
+ if (from)
+ fprintf(stderr, "%s -> %s", prettify_ref(from), prettify_ref(to));
+ else
+ fputs(prettify_ref(to), stderr);
+ if (msg) {
+ fputs(" (", stderr);
+ fputs(msg, stderr);
+ fputc(')', stderr);
+ }
+ fputc('\n', stderr);
+}
+
+static const char *status_abbrev(unsigned char sha1[20])
+{
+ return find_unique_abbrev(sha1, DEFAULT_ABBREV);
+}
+
+static void print_ok_ref_status(struct ref *ref)
+{
+ if (ref->deletion)
+ print_ref_status('-', "[deleted]", ref, NULL, NULL);
+ else if (is_null_sha1(ref->old_sha1))
+ print_ref_status('*',
+ (!prefixcmp(ref->name, "refs/tags/") ? "[new tag]" :
+ "[new branch]"),
+ ref, ref->peer_ref, NULL);
+ else {
+ char quickref[84];
+ char type;
+ const char *msg;
+
+ strcpy(quickref, status_abbrev(ref->old_sha1));
+ if (ref->nonfastforward) {
+ strcat(quickref, "...");
+ type = '+';
+ msg = "forced update";
+ } else {
+ strcat(quickref, "..");
+ type = ' ';
+ msg = NULL;
+ }
+ strcat(quickref, status_abbrev(ref->new_sha1));
+
+ print_ref_status(type, quickref, ref, ref->peer_ref, msg);
+ }
+}
+
+static int print_one_push_status(struct ref *ref, const char *dest, int count)
+{
+ if (!count)
+ fprintf(stderr, "To %s\n", dest);
+
+ switch(ref->status) {
+ case REF_STATUS_NONE:
+ print_ref_status('X', "[no match]", ref, NULL, NULL);
+ break;
+ case REF_STATUS_REJECT_NODELETE:
+ print_ref_status('!', "[rejected]", ref, NULL,
+ "remote does not support deleting refs");
+ break;
+ case REF_STATUS_UPTODATE:
+ print_ref_status('=', "[up to date]", ref,
+ ref->peer_ref, NULL);
+ break;
+ case REF_STATUS_REJECT_NONFASTFORWARD:
+ print_ref_status('!', "[rejected]", ref, ref->peer_ref,
+ "non-fast forward");
+ break;
+ case REF_STATUS_REMOTE_REJECT:
+ print_ref_status('!', "[remote rejected]", ref,
+ ref->deletion ? NULL : ref->peer_ref,
+ ref->remote_status);
+ break;
+ case REF_STATUS_EXPECTING_REPORT:
+ print_ref_status('!', "[remote failure]", ref,
+ ref->deletion ? NULL : ref->peer_ref,
+ "remote failed to report status");
+ break;
+ case REF_STATUS_OK:
+ print_ok_ref_status(ref);
+ break;
+ }
+
+ return 1;
+}
+
+static void print_push_status(const char *dest, struct ref *refs, int verbose)
+{
+ struct ref *ref;
+ int n = 0;
+
+ if (verbose) {
+ for (ref = refs; ref; ref = ref->next)
+ if (ref->status == REF_STATUS_UPTODATE)
+ n += print_one_push_status(ref, dest, n);
+ }
+
+ for (ref = refs; ref; ref = ref->next)
+ if (ref->status == REF_STATUS_OK)
+ n += print_one_push_status(ref, dest, n);
+
+ for (ref = refs; ref; ref = ref->next) {
+ if (ref->status != REF_STATUS_NONE &&
+ ref->status != REF_STATUS_UPTODATE &&
+ ref->status != REF_STATUS_OK)
+ n += print_one_push_status(ref, dest, n);
+ }
+}
+
+static void verify_remote_names(int nr_heads, const char **heads)
+{
+ int i;
+
+ for (i = 0; i < nr_heads; i++) {
+ const char *local = heads[i];
+ const char *remote = strrchr(heads[i], ':');
+
+ if (*local == '+')
+ local++;
+
+ /* A matching refspec is okay. */
+ if (remote == local && remote[1] == '\0')
+ continue;
+
+ remote = remote ? (remote + 1) : local;
+ switch (check_ref_format(remote)) {
+ case 0: /* ok */
+ case CHECK_REF_FORMAT_ONELEVEL:
+ /* ok but a single level -- that is fine for
+ * a match pattern.
+ */
+ case CHECK_REF_FORMAT_WILDCARD:
+ /* ok but ends with a pattern-match character */
+ continue;
+ }
+ die("remote part of refspec is not a valid name in %s",
+ heads[i]);
+ }
+}
+
+static int git_transport_push(struct transport *transport, struct ref *remote_refs, int flags)
{
struct git_transport_data *data = transport->data;
struct send_pack_args args;
+ int ret;
+
+ if (!data->conn) {
+ struct ref *tmp_refs;
+ connect_setup(transport, 1, 0);
+
+ get_remote_heads(data->fd[0], &tmp_refs, 0, NULL, REF_NORMAL,
+ NULL);
+ }
- args.receivepack = data->receivepack;
- args.send_all = !!(flags & TRANSPORT_PUSH_ALL);
args.send_mirror = !!(flags & TRANSPORT_PUSH_MIRROR);
args.force_update = !!(flags & TRANSPORT_PUSH_FORCE);
args.use_thin_pack = data->thin;
args.verbose = !!(flags & TRANSPORT_PUSH_VERBOSE);
args.dry_run = !!(flags & TRANSPORT_PUSH_DRY_RUN);
- return send_pack(&args, transport->url, transport->remote, refspec_nr, refspec);
+ ret = send_pack(&args, data->fd, data->conn, remote_refs,
+ &data->extra_have);
+
+ close(data->fd[1]);
+ close(data->fd[0]);
+ ret |= finish_connect(data->conn);
+ data->conn = NULL;
+
+ return ret;
}
static int disconnect_git(struct transport *transport)
ret->set_option = set_git_option;
ret->get_refs_list = get_refs_via_connect;
ret->fetch = fetch_refs_via_pack;
- ret->push = git_transport_push;
+ ret->push_refs = git_transport_push;
ret->disconnect = disconnect_git;
data->thin = 1;
int transport_push(struct transport *transport,
int refspec_nr, const char **refspec, int flags)
{
- if (!transport->push)
- return 1;
- return transport->push(transport, refspec_nr, refspec, flags);
+ verify_remote_names(refspec_nr, refspec);
+
+ if (transport->push)
+ return transport->push(transport, refspec_nr, refspec, flags);
+ if (transport->push_refs) {
+ struct ref *remote_refs =
+ transport->get_refs_list(transport, 1);
+ struct ref **remote_tail;
+ struct ref *local_refs = get_local_heads();
+ int match_flags = MATCH_REFS_NONE;
+ int verbose = flags & TRANSPORT_PUSH_VERBOSE;
+ int ret;
+
+ if (flags & TRANSPORT_PUSH_ALL)
+ match_flags |= MATCH_REFS_ALL;
+ if (flags & TRANSPORT_PUSH_MIRROR)
+ match_flags |= MATCH_REFS_MIRROR;
+
+ remote_tail = &remote_refs;
+ while (*remote_tail)
+ remote_tail = &((*remote_tail)->next);
+ if (match_refs(local_refs, remote_refs, &remote_tail,
+ refspec_nr, refspec, match_flags)) {
+ return -1;
+ }
+
+ ret = transport->push_refs(transport, remote_refs, flags);
+
+ print_push_status(transport->url, remote_refs, verbose);
+
+ if (!(flags & TRANSPORT_PUSH_DRY_RUN)) {
+ struct ref *ref;
+ for (ref = remote_refs; ref; ref = ref->next)
+ update_tracking_ref(transport->remote, ref, verbose);
+ }
+
+ if (!ret && !refs_pushed(remote_refs))
+ fprintf(stderr, "Everything up-to-date\n");
+ return ret;
+ }
+ return 1;
}
const struct ref *transport_get_remote_refs(struct transport *transport)
{
if (!transport->remote_refs)
- transport->remote_refs = transport->get_refs_list(transport);
+ transport->remote_refs = transport->get_refs_list(transport, 0);
return transport->remote_refs;
}
int (*set_option)(struct transport *connection, const char *name,
const char *value);
- struct ref *(*get_refs_list)(struct transport *transport);
+ struct ref *(*get_refs_list)(struct transport *transport, int for_push);
int (*fetch)(struct transport *transport, int refs_nr, const struct ref **refs);
+ int (*push_refs)(struct transport *transport, struct ref *refs, int flags);
int (*push)(struct transport *connection, int refspec_nr, const char **refspec, int flags);
int (*disconnect)(struct transport *connection);
#include "unpack-trees.h"
#include "progress.h"
#include "refs.h"
+#include "attr.h"
/*
* Error messages expected by scripts out of plumbing commands such as
cnt = 0;
}
+ git_attr_set_direction(GIT_ATTR_CHECKOUT, &o->result);
for (i = 0; i < index->cache_nr; i++) {
struct cache_entry *ce = index->cache[i];
}
}
stop_progress(&progress);
+ git_attr_set_direction(GIT_ATTR_CHECKIN, NULL);
return errs != 0;
}
static void report(const char *prefix, const char *err, va_list params)
{
- char msg[256];
+ char msg[1024];
vsnprintf(msg, sizeof(msg), err, params);
fprintf(stderr, "%s%s\n", prefix, msg);
}