/config.mak.autogen
/config.mak.append
/configure
-/unicode
/tags
/TAGS
/cscope*
# The Linux build installs the defined dependency versions below.
# The OS X build installs the latest available versions. Keep that
# in mind when you encounter a broken OS X build!
- - LINUX_P4_VERSION="16.1"
- - LINUX_GIT_LFS_VERSION="1.2.0"
+ - LINUX_P4_VERSION="16.2"
+ - LINUX_GIT_LFS_VERSION="1.5.2"
- DEFAULT_TEST_TARGET=prove
- GIT_PROVE_OPTS="--timer --jobs 3 --state=failed,slow,save"
- GIT_TEST_OPTS="--verbose-log"
--- /dev/null
+Git v2.10.3 Release Notes
+=========================
+
+Fixes since v2.10.2
+-------------------
+
+ * Extract a small helper out of the function that reads the authors
+ script file "git am" internally uses.
+ This by itself is not useful until a second caller appears in the
+ future for "rebase -i" helper.
+
+ * The command-line completion script (in contrib/) learned to
+ complete "git cmd ^mas<HT>" to complete the negative end of
+ reference to "git cmd ^master".
+
+ * "git send-email" attempts to pick up valid e-mails from the
+ trailers, but people in real world write non-addresses there, like
+ "Cc: Stable <add@re.ss> # 4.8+", which broke the output depending
+ on the availability and vintage of Mail::Address perl module.
+
+ * The code that we have used for the past 10+ years to cycle
+ 4-element ring buffers turns out to be not quite portable in
+ theoretical world.
+
+ * "git daemon" used fixed-length buffers to turn URL to the
+ repository the client asked for into the server side directory
+ path, using snprintf() to avoid overflowing these buffers, but
+ allowed possibly truncated paths to the directory. This has been
+ tightened to reject such a request that causes overlong path to be
+ required to serve.
+
+ * Recent update to git-sh-setup (a library of shell functions that
+ are used by our in-tree scripted Porcelain commands) included
+ another shell library git-sh-i18n without specifying where it is,
+ relying on the $PATH. This has been fixed to be more explicit by
+ prefixing $(git --exec-path) output in front.
+
+ * Fix for a racy false-positive test failure.
+
+ * Portability update and workaround for builds on recent Mac OS X.
+
+ * Update to the test framework made in 2.9 timeframe broke running
+ the tests under valgrind, which has been fixed.
+
+ * Improve the rule to convert "unsigned char [20]" into "struct
+ object_id *" in contrib/coccinelle/
+
+Also contains minor documentation updates and code clean-ups.
* Even though "git hash-objects", which is a tool to take an
on-filesystem data stream and put it into the Git object store,
- allowed to perform the "outside-world-to-Git" conversions (e.g.
+ can perform "outside-world-to-Git" conversions (e.g.
end-of-line conversions and application of the clean-filter), and
- it had the feature on by default from very early days, its reverse
+ it has had this feature on by default from very early days, its reverse
operation "git cat-file", which takes an object from the Git object
- store and externalize for the consumption by the outside world,
+ store and externalizes it for consumption by the outside world,
lacked an equivalent mechanism to run the "Git-to-outside-world"
conversion. The command learned the "--filters" option to do so.
- * Output from "git diff" can be made easier to read by selecting
+ * Output from "git diff" can be made easier to read by intelligently selecting
which lines are common and which lines are added/deleted
- intelligently when the lines before and after the changed section
- are the same. A command line option is added to help with the
- experiment to find a good heuristics.
+ when the lines before and after the changed section
+ are the same. A command line option (--indent-heuristic) and a
+ configuration variable (diff.indentHeuristic) are added to help with the
+ experiment to find good heuristics.
* In some projects, it is common to use "[RFC PATCH]" as the subject
prefix for a patch meant for discussion rather than application. A
- new option "--rfc" is a short-hand for "--subject-prefix=RFC PATCH"
+ new format-patch option "--rfc" is a short-hand for "--subject-prefix=RFC PATCH"
to help the participants of such projects.
- * "git add --chmod=+x <pathspec>" added recently only toggled the
+ * "git add --chmod={+,-}x <pathspec>" only changed the
executable bit for paths that are either new or modified. This has
- been corrected to flip the executable bit for all paths that match
+ been corrected to change the executable bit for all paths that match
the given pathspec.
* When "git format-patch --stdout" output is placed as an in-body
- header and it uses the RFC2822 header folding, "git am" failed to
+ header and it uses RFC2822 header folding, "git am" fails to
put the header line back into a single logical line. The
underlying "git mailinfo" was taught to handle this properly.
* "gitweb" can spawn "highlight" to show blob contents with
(programming) language-specific syntax highlighting, but only
when the language is known. "highlight" can however be told
- to make the guess itself by giving it "--force" option, which
+ to guess the language itself by giving it "--force" option, which
has been enabled.
* "git gui" l10n to Portuguese.
history leading to nth parent was looking the other way.
* In recent versions of cURL, GSSAPI credential delegation is
- disabled by default due to CVE-2011-2192; introduce a configuration
- to selectively allow enabling this.
+ disabled by default due to CVE-2011-2192; introduce a http.delegation
+ configuration variable to selectively allow enabling this.
(merge 26a7b23429 ps/http-gssapi-cred-delegation later to maint).
* "git mergetool" learned to honor "-O<orderfile>" to control the
order of paths to present to the end user.
* "git diff/log --ws-error-highlight=<kind>" lacked the corresponding
- configuration variable to set it by default.
+ configuration variable (diff.wsErrorHighlight) to set it by default.
- * "git ls-files" learned "--recurse-submodules" option that can be
- used to get a listing of tracked files across submodules (i.e. this
- only works with "--cached" option, not for listing untracked or
+ * "git ls-files" learned the "--recurse-submodules" option
+ to get a listing of tracked files across submodules (i.e. this
+ only works with the "--cached" option, not for listing untracked or
ignored files). This would be a useful tool to sit on the upstream
side of a pipe that is read with xargs to work on all working tree
files from the top-level superproject.
implementations of XDG Secret Service API has been added to
contrib/credential/.
- * The GPG verification status shown in "%G?" pretty format specifier
+ * The GPG verification status shown by the "%G?" pretty format specifier
was not rich enough to differentiate a signature made by an expired
key, a signature made by a revoked key, etc. New output letters
have been assigned to express them.
learned to turn "git describe" output (e.g. v2.9.3-599-g2376d31787)
into clickable links in its output.
- * When new paths were added by "git add -N" to the index, it was
- enough to circumvent the check by "git commit" to refrain from
- making an empty commit without "--allow-empty". The same logic
- prevented "git status" to show such a path as "new file" in the
+ * "git commit" created an empty commit when invoked with an index
+ consisting solely of intend-to-add paths (added with "git add -N").
+ It now requires the "--allow-empty" option to create such a commit.
+ The same logic prevented "git status" from showing such paths as "new files" in the
"Changes not staged for commit" section.
- * The smudge/clean filter API expect an external process is spawned
- to filter the contents for each path that has a filter defined. A
+ * The smudge/clean filter API spawns an external process
+ to filter the contents of each path that has a filter defined. A
new type of "process" filter API has been added to allow the first
request to run the filter for a path to spawn a single process, and
- all filtering need is served by this single process for multiple
+ all filtering is served by this single process for multiple
paths, reducing the process creation overhead.
* The user always has to say "stash@{$N}" when naming a single
script file "git am" internally uses.
(merge a77598e jc/am-read-author-file later to maint).
- * Lifts calls to exit(2) and die() higher in the callchain in
+ * Lift calls to exit(2) and die() higher in the callchain in
sequencer.c files so that more helper functions in it can be used
by callers that want to handle error conditions themselves.
does not advertise any refs, but "git fetch" was not prepared to
see such an advertisement. When the other side disconnects without
giving any ref advertisement, we used to say "there may not be a
- repository at that URL", but we may have seen other advertisement
+ repository at that URL", but we may have seen other advertisements
like "shallow" and ".have" in which case we definitely know that a
repository is there. The code to detect this case has also been
updated.
* Some codepaths in "git pack-objects" were not ready to use an
- existing pack bitmap; now they are and as the result they have
+ existing pack bitmap; now they are and as a result they have
become faster.
* The codepath in "git fsck" to detect malformed tree objects has
packfile first.
(merge c9af708b1a jk/pack-objects-optim-mru later to maint).
- * Codepaths involved in interacting alternate object store have
+ * Codepaths involved in interacting alternate object stores have
been cleaned up.
* In order for the receiving end of "git push" to inspect the
from the sending end need to be made available to the hook and
the mechanism for the connectivity check, and this was done
traditionally by storing the objects in the receiving repository
- and letting "git gc" to expire it. Instead, store the newly
+ and letting "git gc" expire them. Instead, store the newly
received objects in a temporary area, and make them available by
reusing the alternate object store mechanism to them only while we
decide if we accept the check, and once we decide, either migrate
replaced with a priority queue.
* "git diff --no-index" codepath has been updated not to try to peek
- into .git/ directory that happens to be under the current
+ into a .git/ directory that happens to be under the current
directory, when we know we are operating outside any repository.
* Update of the sequencer codebase to make it reusable to reimplement
holding onto them. Use O_CLOEXEC flag to open files in various
codepaths.
- * Update "interpret-trailers" machinery and teaches it that people in
- real world write all sorts of crufts in the "trailer" that was
+ * Update "interpret-trailers" machinery and teach it that people in
+ the real world write all sorts of cruft in the "trailer" that was
originally designed to have the neat-o "Mail-Header: like thing"
and nothing else.
has been removed.
* Having a submodule whose ".git" repository is somehow corrupt
- caused a few commands that recurse into submodules loop forever.
+ caused a few commands that recurse into submodules to loop forever.
* "git symbolic-ref -d HEAD" happily removes the symbolic ref, but
the resulting repository becomes an invalid one. Teach the command
forgot to update "git gui" to look at the configuration to match
this change.
- * "git add --chmod=+x" added recently lacked documentation, which has
+ * "git add --chmod={+,-}x" added recently lacked documentation, which has
been corrected.
* "git log --cherry-pick" used to include merge commits as candidates
to be matched up with other commits, resulting a lot of wasted time.
- The patch-id generation logic has been updated to ignore merges to
+ The patch-id generation logic has been updated to ignore merges and
avoid the wastage.
* The http transport (with curl-multi option, which is the default
line of the next one. This process may have to merge two adjacent
hunks, but the code forgot to do so in some cases.
- * Performance tests done via "t/perf" did not use the same set of
+ * Performance tests done via "t/perf" did not use the right
build configuration if the user relied on autoconf generated
configuration.
* "git format-patch --base=..." feature that was recently added
- showed the base commit information after "-- " e-mail signature
+ showed the base commit information after the "-- " e-mail signature
line, which turned out to be inconvenient. The base information
has been moved above the signature line.
* More i18n.
* Even when "git pull --rebase=preserve" (and the underlying "git
- rebase --preserve") can complete without creating any new commit
- (i.e. fast-forwards), it still insisted on having a usable ident
+ rebase --preserve") can complete without creating any new commits
+ (i.e. fast-forwards), it still insisted on having usable ident
information (read: user.email is set correctly), which was less
than nice. As the underlying commands used inside "git rebase"
would fail with a more meaningful error message and advice text
* Documentation around tools to import from CVS was fairly outdated.
* "git clone --recurse-submodules" lost the progress eye-candy in
- recent update, which has been corrected.
+ a recent update, which has been corrected.
* A low-level function verify_packfile() was meant to show errors
that were detected without dying itself, but under some conditions
to a design bug, which has been fixed.
* In the codepath that comes up with the hostname to be used in an
- e-mail when the user didn't tell us, we looked at ai_canonname
+ e-mail when the user didn't tell us, we looked at the ai_canonname
field in struct addrinfo without making sure it is not NULL first.
* "git worktree", even though it used the default_abbrev setting that
- ought to be affected by core.abbrev configuration variable, ignored
+ ought to be affected by the core.abbrev configuration variable, ignored
the variable setting. The command has been taught to read the
default set of configuration variables to correct this.
* "git init" tried to record core.worktree in the repository's
- 'config' file when GIT_WORK_TREE environment variable was set and
+ 'config' file when the GIT_WORK_TREE environment variable was set and
it was different from where GIT_DIR appears as ".git" at its top,
but the logic was faulty when .git is a "gitdir:" file that points
at the real place, causing trouble in working trees that are
managed by "git worktree". This has been corrected.
* Codepaths that read from an on-disk loose object were too loose in
- validating what they are reading is a proper object file and
+ validating that they are reading a proper object file and
sometimes read past the data they read from the disk, which has
been corrected. H/t to Gustavo Grieco for reporting.
time, and "git gui" was the last in-tree user of the syntax. This
is finally fixed, so that we can move forward with the deprecation.
- * An author name, that spelled a backslash-quoted double quote in the
- human readable part "My \"double quoted\" name", was not unquoted
+ * An author name that has a backslash-quoted double quote in the
+ human readable part ("My \"double quoted\" name"), was not unquoted
correctly while applying a patch from a piece of e-mail.
* Doc update to clarify what "log -3 --reverse" does.
has been fixed; this did not affect any existing code as nobody
tried to write anything after the padding on such a line, though.
- * The code that parses the format parameter of for-each-ref command
+ * The code that parses the format parameter of the for-each-ref command
has seen a micro-optimization.
- * When we started cURL to talk to imap server when a new enough
- version of cURL library is available, we forgot to explicitly add
+ * When we started to use cURL to talk to an imap server, we forgot to explicitly add
imap(s):// before the destination. To some folks, that didn't work
and the library tried to make HTTP(s) requests instead.
"Give me only the history since that version".
(merge cccf74e2da nd/shallow-deepen later to maint).
- * It is a common mistake to say "git blame --reverse OLD path",
- expecting that the command line is dwimmed as if asking how lines
+ * "git blame --reverse OLD path" is now DWIMmed to show how lines
in path in an old revision OLD have survived up to the current
commit.
(merge e1d09701a4 jc/blame-reverse later to maint).
- * http.emptyauth configuration is a way to allow an empty username to
+ * The http.emptyauth configuration variable is a way to allow an empty username to
pass when attempting to authenticate using mechanisms like
Kerberos. We took an unspecified (NULL) username and sent ":"
(i.e. no username, no password) to CURLOPT_USERPWD, but did not do
-p <paths>" adds to the current contents of the index to come up
with what to commit.
- * A stray symbolic link in $GIT_DIR/refs/ directory could make name
+ * A stray symbolic link in the $GIT_DIR/refs/ directory could make name
resolution loop forever, which has been corrected.
* The "submodule.<name>.path" stored in .gitmodules is never copied
to .git/config and such a key in .git/config has no meaning, but
- the documentation described it and submodule.<name>.url next to
- each other as if both belong to .git/config. This has been fixed.
+ the documentation described it next to submodule.<name>.url
+ as if both belong to .git/config. This has been fixed.
- * In a worktree connected to a repository elsewhere, created via "git
+ * In a worktree created via "git
worktree", "git checkout" attempts to protect users from confusion
by refusing to check out a branch that is already checked out in
another worktree. However, this also prevented checking out a
- branch, which is designated as the primary branch of a bare
- reopsitory, in a worktree that is connected to the bare
+ branch which is designated as the primary branch of a bare
+ repository, in a worktree that is connected to the bare
repository. The check has been corrected to allow it.
* "git rebase" immediately after "git clone" failed to find the fork
* When fetching from a remote that has many tags that are irrelevant
to branches we are following, we used to waste way too many cycles
- when checking if the object pointed at by a tag (that we are not
+ checking if the object pointed at by a tag (that we are not
going to fetch!) exists in our repository too carefully.
* Protect our code from over-eager compilers.
"." instead of the branch name; the documentation has been updated
to describe it.
- * A hot-fix for a test added by a recent topic that went to both
- 'master' and 'maint' already.
-
* "git send-email" attempts to pick up valid e-mails from the
- trailers, but people in real world write non-addresses there, like
+ trailers, but people in the real world write non-addresses there, like
"Cc: Stable <add@re.ss> # 4.8+", which broke the output depending
- on the availability and vintage of Mail::Address perl module.
+ on the availability and vintage of the Mail::Address perl module.
(merge dcfafc5214 mm/send-email-cc-cruft-after-address later to maint).
- * The Travis CI configuration we ship ran the tests with --verbose
+ * The Travis CI configuration we ship ran the tests with the --verbose
option but this risks non-TAP output that happens to be "ok" to be
misinterpreted as TAP signalling a test that passed. This resulted
- in unnecessary failure. This has been corrected by introducing a
+ in unnecessary failures. This has been corrected by introducing a
new mode to run our tests in the test harness to send the verbose
output separately to the log file.
- * Some AsciiDoc formatter mishandles a displayed illustration with
+ * Some AsciiDoc formatters mishandle a displayed illustration with
tabs in it. Adjust a few of them in merge-base documentation to
work around them.
- * A minor regression fix for "git submodule" that was introduced
+ * Fixed a minor regression in "git submodule" that was introduced
when more helper functions were reimplemented in C.
(merge 77b63ac31e sb/submodule-ignore-trailing-slash later to maint).
theoretical world.
(merge bb84735c80 rs/ring-buffer-wraparound later to maint).
- * "git daemon" used fixed-length buffers to turn URL to the
+ * "git daemon" used fixed-length buffers to turn URLs to the
repository the client asked for into the server side directory
- path, using snprintf() to avoid overflowing these buffers, but
+ paths, using snprintf() to avoid overflowing these buffers, but
allowed possibly truncated paths to the directory. This has been
- tightened to reject such a request that causes overlong path to be
- required to serve.
+ tightened to reject such a request that causes an overlong path to be
+ served.
(merge 6bdb0083be jk/daemon-path-ok-check-truncation later to maint).
* Recent update to git-sh-setup (a library of shell functions that
are used by our in-tree scripted Porcelain commands) included
another shell library git-sh-i18n without specifying where it is,
relying on the $PATH. This has been fixed to be more explicit by
- prefixing $(git --exec-path) output in front.
+ prefixing with $(git --exec-path) output.
(merge 1073094f30 ak/sh-setup-dot-source-i18n-fix later to maint).
* Fix for a racy false-positive test failure.
caused the command to segfault when on an unborn branch.
(merge 84679d470d jc/for-each-ref-head-segfault-fix later to maint).
- * "git rebase -i" did not work well with core.commentchar
+ * "git rebase -i" did not work well with the core.commentchar
configuration variable for two reasons, both of which have been
fixed.
(merge 882cd23777 js/rebase-i-commentchar-fix later to maint).
--- /dev/null
+Git 2.12 Release Notes
+======================
+
+Backward compatibility notes.
+
+ * Use of an empty string that is used for 'everything matches' is
+ still warned and Git asks users to use a more explicit '.' for that
+ instead. The hope is that existing users will not mind this
+ change, and eventually the warning can be turned into a hard error,
+ upgrading the deprecation into removal of this (mis)feature. That
+ is not scheduled to happen in the upcoming release (yet).
+
+ * The historical argument order "git merge <msg> HEAD <commit>..."
+ has been deprecated for quite some time, and will be removed in the
+ upcoming release.
+
+
+Updates since v2.11
+-------------------
+
+UI, Workflows & Features
+
+ * Various updates to "git p4".
+
+ * "git p4" didn't interact with the internal of .git directory
+ correctly in the modern "git-worktree"-enabled world.
+
+ * "git branch --list" and friends learned "--ignore-case" option to
+ optionally sort branches and tags case insensitively.
+
+ * In addition to %(subject), %(body), "log --pretty=format:..."
+ learned a new placeholder %(trailers).
+
+ * "git rebase" learned "--quit" option, which allows a user to
+ remove the metadata left by an earlier "git rebase" that was
+ manually aborted without using "git rebase --abort".
+
+ * "git clone --reference $there --recurse-submodules $super" has been
+ taught to guess repositories usable as references for submodules of
+ $super that are embedded in $there while making a clone of the
+ superproject borrow objects from $there; extend the mechanism to
+ also allow submodules of these submodules to borrow repositories
+ embedded in these clones of the submodules embedded in the clone of
+ the superproject.
+
+
+Performance, Internal Implementation, Development Support etc.
+
+ * Commands that operate on a log message and add lines to the trailer
+ blocks, such as "format-patch -s", "cherry-pick (-x|-s)", and
+ "commit -s", have been taught to use the logic of and share the
+ code with "git interpret-trailer".
+
+ * The default Travis-CI configuration specifies newer P4 and GitLFS.
+ (merge 5f703e8f02 ls/travis-update-p4-and-lfs later to maint).
+
+ * The "fast hash" that had disastrous performance issues in some
+ corner cases has been retired from the internal diff.
+
+ * The character width table has been updated to match Unicode 9.0
+ (merge 9e6e9aefdf bb/unicode-9.0 later to maint).
+
+ * Update the procedure to generate "tags" for developer support.
+ (merge 046e4c1c09 jk/make-tags-find-sources-tweak later to maint).
+
+
+
+Also contains various documentation updates and code clean-ups.
+
+Fixes since v2.10
+-----------------
+
+Unless otherwise noted, all the fixes since v2.9 in the maintenance
+track are contained in this release (see the maintenance releases'
+notes for details).
+
+ * We often decide if a session is interactive by checking if the
+ standard I/O streams are connected to a TTY, but isatty() that
+ comes with Windows incorrectly returned true if it is used on NUL
+ (i.e. an equivalent to /dev/null). This has been fixed.
+ (merge cbb3f3c9b1 js/mingw-isatty later to maint).
+
+ * "git svn" did not work well with path components that are "0", and
+ some configuration variable it uses were not documented.
+ (merge ea9a93dcc2 ew/svn-fixes later to maint).
+
+ * "git rev-parse --symbolic" failed with a more recent notation like
+ "HEAD^-1" and "HEAD^!".
+ (merge a2e7b04c44 jk/rev-parse-symbolic-parents-fix later to maint).
+
+ * An empty directory in a working tree that can simply be nuked used
+ to interfere while merging or cherry-picking a change to create a
+ submodule directory there, which has been fixed..
+ (merge 5423d2e700 dt/empty-submodule-in-merge later to maint).
+
+ * The code in "git push" to compute if any commit being pushed in the
+ superproject binds a commit in a submodule that hasn't been pushed
+ out was overly inefficient, making it unusable even for a small
+ project that does not have any submodule but have a reasonable
+ number of refs.
+ (merge 250ab24ab3 hv/submodule-not-yet-pushed-fix later to maint).
+
+ * "git push --dry-run --recurse-submodule=on-demand" wasn't
+ "--dry-run" in the submodules.
+ (merge 0301c821c5 bw/push-dry-run later to maint).
+
+ * The output from "git worktree list" was made in readdir() order,
+ and was unstable.
+ (merge 4df1d4d466 nd/worktree-list-fixup later to maint).
+
+ * mergetool.<tool>.trustExitCode configuration variable did not apply
+ to built-in tools, but now it does.
+ (merge 2967284456 da/mergetool-trust-exit-code later to maint).
+
+ * "git p4" LFS support was broken when LFS stores an empty blob.
+ (merge d5eb3cf5e7 ls/p4-empty-file-on-lfs later to maint).
+
+ * A corner case in merge-recursive regression that crept in
+ during 2.10 development cycle has been fixed.
+ (merge 1c25d2d8ed jc/renormalize-merge-kill-safer-crlf later to maint).
+
+ * Transport with dumb http can be fooled into following foreign URLs
+ that the end user does not intend to, especially with the server
+ side redirects and http-alternates mechanism, which can lead to
+ security issues. Tighten the redirection and make it more obvious
+ to the end user when it happens.
+ (merge cb4d2d35c4 jk/http-walker-limit-redirect-2.9 later to maint).
+
+ * Update the error messages from the dumb-http client when it fails
+ to obtain loose objects; we used to give sensible error message
+ only upon 404 but we now forbid unexpected redirects that needs to
+ be reported with something sensible.
+ (merge 3680f16f9d jk/http-walker-limit-redirect later to maint).
+
+ * When diff.renames configuration is on (and with Git 2.9 and later,
+ it is enabled by default, which made it worse), "git stash"
+ misbehaved if a file is removed and another file with a very
+ similar content is added.
+ (merge 9d4e28ead5 jk/stash-disable-renames-internally later to maint).
+
+ * "git diff --no-index" did not take "--no-abbrev" option.
+ (merge 43d1948b7b jb/diff-no-index-no-abbrev later to maint).
+
+ * "git difftool --dir-diff" had a minor regression when started from
+ a subdirectory, which has been fixed.
+ (merge 853e10c197 da/difftool-dir-diff-fix later to maint).
+
+ * "git commit --allow-empty --only" (no pathspec) with dirty index
+ ought to be an acceptable way to create a new commit that does not
+ change any paths, but it was forbidden, perhaps because nobody
+ needed it so far.
+ (merge beb635ca9c ak/commit-only-allow-empty later to maint).
+
+ * Git 2.11 had a minor regression in "merge --ff-only" that competed
+ with another process that simultanously attempted to update the
+ index. We used to explain what went wrong with an error message,
+ but the new code silently failed. The error message has been
+ resurrected.
+
+ * A pathname that begins with "//" or "\\" on Windows is special but
+ path normalization logic was unaware of it.
+ (merge 7814fbe3f1 js/normalize-path-copy-ceil later to maint).
+
+ * "git pull --rebase", when there is no new commits on our side since
+ we forked from the upstream, should be able to fast-forward without
+ invoking "git rebase", but it didn't.
+ (merge 33b842a1e9 jc/pull-rebase-ff later to maint).
+
+ * The way to specify hotkeys to "xxdiff" that is used by "git
+ mergetool" has been modernized to match recent versions of xxdiff.
+ (merge 6cf5f6cef7 da/mergetool-xxdiff-hotkey later to maint).
+
+ * Unlike "git am --abort", "git cherry-pick --abort" moved HEAD back
+ to where cherry-pick started while picking multiple changes, when
+ the cherry-pick stopped to ask for help from the user, and the user
+ did "git reset --hard" to a different commit in order to re-attempt
+ the operation.
+ (merge ce73bb22d8 sb/sequencer-abort-safety later to maint).
+
+ * Code cleanup in shallow boundary computation.
+ (merge 649b0c316a nd/shallow-fixup later to maint).
+
+ * A recent update to receive-pack to make it easier to drop garbage
+ objects made it clear that GIT_ALTERNATE_OBJECT_DIRECTORIES cannot
+ have a pathname with a colon in it (no surprise!), and this in turn
+ made it impossible to push into a repository at such a path. This
+ has been fixed by introducing a quoting mechanism used when
+ appending such a path to the colon-separated list.
+ (merge 5e74824fac jk/quote-env-path-list-component later to maint).
+
+ * The function usage_msg_opt() has been updated to say "fatal:"
+ before the custom message programs give, when they want to die
+ with a message about wrong command line options followed by the
+ standard usage string.
+ (merge 87433261a4 jk/parseopt-usage-msg-opt later to maint).
+
+ * "git index-pack --stdin" needs an access to an existing repository,
+ but "git index-pack file.pack" to generate an .idx file that
+ corresponds to a packfile does not.
+ (merge 29401e1575 jk/index-pack-wo-repo-from-stdin later to maint).
+
+ * Fix for NDEBUG builds.
+ (merge 08414938a2 jt/mailinfo-fold-in-body-headers later to maint).
+
+ * Other minor doc, test and build updates and code cleanups.
+ (merge fa6ca11105 nd/qsort-in-merge-recursive later to maint).
+ (merge fa3142c919 ak/lazy-prereq-mktemp later to maint).
+ (merge 9c48b4fb23 ls/t0021-fixup later to maint).
+ (merge 584f99c87b sb/unpack-trees-grammofix later to maint).
+ (merge 54471fdcc3 jk/readme-gmane-is-no-more later to maint).
+ (merge 9e189f1a5c sb/t3600-cleanup later to maint).
+ (merge e2c20be57c lr/doc-fix-cet later to maint).
+ (merge 47437fd3bd kh/tutorial-grammofix later to maint).
of common USER_AGENT strings (but not including those like git/1.7.1).
Can be overridden by the `GIT_HTTP_USER_AGENT` environment variable.
+http.followRedirects::
+ Whether git should follow HTTP redirects. If set to `true`, git
+ will transparently follow any redirect issued by a server it
+ encounters. If set to `false`, git will treat all redirects as
+ errors. If set to `initial`, git will follow redirects only for
+ the initial request to a remote, but not for subsequent
+ follow-up HTTP requests. Since git uses the redirected URL as
+ the base for the follow-up requests, this is generally
+ sufficient. The default is `initial`.
+
http.<url>.*::
Any of the http.* options above can be applied selectively to some URLs.
For a config key to match a URL, each element of the config key is
It is `<unix timestamp> <time zone offset>`, where `<unix
timestamp>` is the number of seconds since the UNIX epoch.
`<time zone offset>` is a positive or negative offset from UTC.
- For example CET (which is 2 hours ahead UTC) is `+0200`.
+ For example CET (which is 1 hour ahead of UTC) is `+0100`.
RFC 2822::
The standard email format as described by RFC 2822, for example
default to color output.
Same as `--color=never`.
+-i::
+--ignore-case::
+ Sorting and filtering branches are case insensitive.
+
--column[=<options>]::
--no-column::
Display branch listing in columns. See configuration variable
If this option is specified together with `--amend`, then
no paths need to be specified, which can be used to amend
the last commit without committing changes that have
- already been staged.
+ already been staged. If used together with `--allow-empty`
+ paths are also not required, and an empty commit will be created.
-u[<mode>]::
--untracked-files[=<mode>]::
Only list refs which contain the specified commit (HEAD if not
specified).
+--ignore-case::
+ Sorting and filtering refs are case insensitive.
+
FIELD NAMES
-----------
line is 'contents:body', where body is all of the lines after the first
blank line. The optional GPG signature is `contents:signature`. The
first `N` lines of the message is obtained using `contents:lines=N`.
+Additionally, the trailers as interpreted by linkgit:git-interpret-trailers[1]
+are obtained as 'contents:trailers'.
For sorting purposes, fields with numeric values sort in numeric order
(`objectsize`, `authordate`, `committerdate`, `creatordate`, `taggerdate`).
submit manually or revert. This option always stops after the
first (oldest) commit. Git tags are not exported to p4.
+--shelve::
+ Instead of submitting create a series of shelved changelists.
+ After creating each shelve, the relevant files are reverted/deleted.
+ If you have multiple commits pending multiple shelves will be created.
+
+--update-shelve CHANGELIST::
+ Update an existing shelved changelist with this commit. Implies
+ --shelve.
+
--conflict=(ask|skip|quit)::
Conflicts can occur when applying a commit to p4. When this
happens, the default behavior ("ask") is to prompt whether to
Client specified as an option to all p4 commands, with
'-c <client>', including the client spec.
+git-p4.retries::
+ Specifies the number of times to retry a p4 command (notably,
+ 'p4 sync') if the network times out. The default value is 3.
+
Clone and sync variables
~~~~~~~~~~~~~~~~~~~~~~~~
git-p4.syncFromOrigin::
[<upstream> [<branch>]]
'git rebase' [-i | --interactive] [options] [--exec <cmd>] [--onto <newbase>]
--root [<branch>]
-'git rebase' --continue | --skip | --abort | --edit-todo
+'git rebase' --continue | --skip | --abort | --quit | --edit-todo
DESCRIPTION
-----------
will be reset to where it was when the rebase operation was
started.
+--quit::
+ Abort the rebase operation but HEAD is not reset back to the
+ original branch. The index and working tree are also left
+ unchanged as a result.
+
--keep-empty::
Keep the commits that do not change anything from its
parents in the result.
When retrieving svn commits into Git (as part of 'fetch', 'rebase', or
'dcommit' operations), look for the first `From:` or `Signed-off-by:` line
in the log message and use that as the author string.
++
+[verse]
+config key: svn.useLogAuthor
+
--add-author-from::
When committing to svn from Git (as part of 'commit-diff', 'set-tree' or 'dcommit'
operations), if the existing log message doesn't already have a
`From:` or `Signed-off-by:` line, append a `From:` line based on the
Git commit's author string. If you use this, then `--use-log-author`
will retrieve a valid author string for all commits.
-
++
+[verse]
+config key: svn.addAuthorFrom
ADVANCED OPTIONS
----------------
variable if it exists, or lexicographic order otherwise. See
linkgit:git-config[1].
+-i::
+--ignore-case::
+ Sorting and filtering tags are case insensitive.
+
--column[=<options>]::
--no-column::
Display tag listing in columns. See configuration variable
branch of the `git.git` repository.
Documentation for older releases are available here:
+* link:v2.11.0/git.html[documentation for release 2.11]
+
+* release notes for
+ link:RelNotes/2.11.0.txt[2.11].
+
* link:v2.10.2/git.html[documentation for release 2.10.2]
* release notes for
specifies a ":" separated (on Windows ";" separated) list
of Git object directories which can be used to search for Git
objects. New objects will not be written to these directories.
++
+ Entries that begin with `"` (double-quote) will be interpreted
+ as C-style quoted paths, removing leading and trailing
+ double-quotes and respecting backslash escapes. E.g., the value
+ `"path-with-\"-and-:-in-it":vanilla-path` has two paths:
+ `path-with-"-and-:-in-it` and `vanilla-path`.
`GIT_DIR`::
If the `GIT_DIR` environment variable is set then it
The core Git is often called "plumbing", with the prettier user
interfaces on top of it called "porcelain". You may not want to use the
plumbing directly very often, but it can be good to know what the
-plumbing does for when the porcelain isn't flushing.
+plumbing does when the porcelain isn't flushing.
Back when this document was originally written, many porcelain
commands were shell scripts. For simplicity, it still uses them as
will do it for you. If you followed the tutorial examples, you
would have accumulated about 17 objects in `.git/objects/??/`
directories by now. 'git repack' tells you how many objects it
-packed, and stores the packed file in `.git/objects/pack`
+packed, and stores the packed file in the `.git/objects/pack`
directory.
[NOTE]
A recommended work cycle for a "subsystem maintainer" who works
on that project and has an own "public repository" goes like this:
-1. Prepare your work repository, by 'git clone' the public
+1. Prepare your work repository, by running 'git clone' on the public
repository of the "project lead". The URL used for the
initial cloning is stored in the remote.origin.url
configuration variable.
Working with Others, Shared Repository Style
--------------------------------------------
-If you are coming from CVS background, the style of cooperation
+If you are coming from a CVS background, the style of cooperation
suggested in the previous section may be new to you. You do not
-have to worry. Git supports "shared public repository" style of
+have to worry. Git supports the "shared public repository" style of
cooperation you are probably more familiar with as well.
See linkgit:gitcvs-migration[7] for the details.
++* [master~2] Pretty-print messages.
------------
-Note that you should not do Octopus because you can. An octopus
+Note that you should not do Octopus just because you can. An octopus
is a valid thing to do and often makes it easier to view the
commit history if you are merging more than two independent
changes at the same time. However, if you have merge conflicts
than given and there are spaces on its left, use those spaces
- '%><(<N>)', '%><|(<N>)': similar to '% <(<N>)', '%<|(<N>)'
respectively, but padding both sides (i.e. the text is centered)
+-%(trailers): display the trailers of the body as interpreted by
+ linkgit:git-interpret-trailers[1]
NOTE: Some placeholders may depend on other options given to the
revision traversal engine. For example, the `%g*` reflog options will
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v2.11.0-rc3
+DEF_VER=v2.11.GIT
LF='
'
then
VN=$(cat version) || VN="$DEF_VER"
elif test -d ${GIT_DIR:-.git} -o -f .git &&
- VN=$(git describe --match "v[0-9]*" --abbrev=7 HEAD 2>/dev/null) &&
+ VN=$(git describe --match "v[0-9]*" HEAD 2>/dev/null) &&
case "$VN" in
*$LF*) (exit 1) ;;
v[0-9]*)
#
# Define NATIVE_CRLF if your platform uses CRLF for line endings.
#
-# Define XDL_FAST_HASH to use an alternative line-hashing method in
-# the diff algorithm. It gives a nice speedup if your processor has
-# fast unaligned word loads. Does NOT work on big-endian systems!
-# Enabled by default on x86_64.
-#
# Define GIT_USER_AGENT if you want to change how git identifies itself during
# network interactions. The default is "git/$(GIT_VERSION)".
#
MSGFMT += --check --statistics
endif
-ifneq (,$(XDL_FAST_HASH))
- BASIC_CFLAGS += -DXDL_FAST_HASH
-endif
-
ifdef GMTIME_UNRELIABLE_ERRORS
COMPAT_OBJS += compat/gmtime.o
BASIC_CFLAGS += -DGMTIME_UNRELIABLE_ERRORS
po/build/locale/%/LC_MESSAGES/git.mo: po/%.po
$(QUIET_MSGFMT)mkdir -p $(dir $@) && $(MSGFMT) -o $@ $<
-FIND_SOURCE_FILES = ( git ls-files '*.[hcS]' 2>/dev/null || \
- $(FIND) . \( -name .git -type d -prune \) \
- -o \( -name '*.[hcS]' -type f -print \) )
+FIND_SOURCE_FILES = ( \
+ git ls-files \
+ '*.[hcS]' \
+ '*.sh' \
+ ':!*[tp][0-9][0-9][0-9][0-9]*' \
+ ':!contrib' \
+ 2>/dev/null || \
+ $(FIND) . \
+ \( -name .git -type d -prune \) \
+ -o \( -name '[tp][0-9][0-9][0-9][0-9]*' -prune \) \
+ -o \( -name contrib -type d -prune \) \
+ -o \( -name build -type d -prune \) \
+ -o \( -name 'trash*' -type d -prune \) \
+ -o \( -name '*.[hcS]' -type f -print \) \
+ -o \( -name '*.sh' -type f -print \) \
+ )
$(ETAGS_TARGET): FORCE
$(RM) $(ETAGS_TARGET)
[Documentation/SubmittingPatches][] for instructions on patch submission).
To subscribe to the list, send an email with just "subscribe git" in
the body to majordomo@vger.kernel.org. The mailing list archives are
-available at http://news.gmane.org/gmane.comp.version-control.git/,
+available at https://public-inbox.org/git,
http://marc.info/?l=git and other archival sites.
The maintainer frequently sends the "What's cooking" reports that
-Documentation/RelNotes/2.11.0.txt
\ No newline at end of file
+Documentation/RelNotes/2.12.0.txt
\ No newline at end of file
state->index_file,
LOCK_DIE_ON_ERROR);
else
- state->newfd = hold_locked_index(state->lock_file, 1);
+ state->newfd = hold_locked_index(state->lock_file, LOCK_DIE_ON_ERROR);
}
if (state->check_index && read_apply_cache(state) < 0) {
exit(3);
}
- fprintf(stderr, _("Some %s revs are not ancestor of the %s rev.\n"
+ fprintf(stderr, _("Some %s revs are not ancestors of the %s rev.\n"
"git bisect cannot work properly in this case.\n"
"Maybe you mistook %s and %s revs?\n"),
term_good, term_bad, term_good, term_bad);
int replace_each_worktree_head_symref(const char *oldref, const char *newref)
{
int ret = 0;
- struct worktree **worktrees = get_worktrees();
+ struct worktree **worktrees = get_worktrees(0);
int i;
for (i = 0; worktrees[i]; i++) {
add_new_files = !take_worktree_changes && !refresh_only;
require_pathspec = !(take_worktree_changes || (0 < addremove_explicit));
- hold_locked_index(&lock_file, 1);
+ hold_locked_index(&lock_file, LOCK_DIE_ON_ERROR);
flags = ((verbose ? ADD_CACHE_VERBOSE : 0) |
(show_only ? ADD_CACHE_PRETEND : 0) |
{
struct lock_file *lock_file = xcalloc(1, sizeof(struct lock_file));
- hold_locked_index(lock_file, 1);
+ hold_locked_index(lock_file, LOCK_DIE_ON_ERROR);
refresh_cache(REFRESH_QUIET);
if (write_locked_index(&the_index, lock_file, COMMIT_LOCK))
die(_("unable to write index file"));
return -1;
lock_file = xcalloc(1, sizeof(struct lock_file));
- hold_locked_index(lock_file, 1);
+ hold_locked_index(lock_file, LOCK_DIE_ON_ERROR);
refresh_cache(REFRESH_QUIET);
return -1;
lock_file = xcalloc(1, sizeof(struct lock_file));
- hold_locked_index(lock_file, 1);
+ hold_locked_index(lock_file, LOCK_DIE_ON_ERROR);
memset(&opts, 0, sizeof(opts));
opts.head_idx = 1;
if (read_state_file(&sb, state, "abort-safety", 1) > 0) {
if (get_oid_hex(sb.buf, &abort_safety))
- die(_("could not parse %s"), am_path(state, "abort_safety"));
+ die(_("could not parse %s"), am_path(state, "abort-safety"));
} else
oidclr(&abort_safety);
if (!oidcmp(&head, &abort_safety))
return 1;
- error(_("You seem to have moved HEAD since the last 'am' failure.\n"
+ warning(_("You seem to have moved HEAD since the last 'am' failure.\n"
"Not rewinding to ORIG_HEAD"));
return 0;
if (filter->verbose)
maxwidth = calc_maxwidth(&array, strlen(remote_prefix));
- /*
- * If no sorting parameter is given then we default to sorting
- * by 'refname'. This would give us an alphabetically sorted
- * array with the 'HEAD' ref at the beginning followed by
- * local branches 'refs/heads/...' and finally remote-tacking
- * branches 'refs/remotes/...'.
- */
- if (!sorting)
- sorting = ref_default_sorting();
ref_array_sort(sorting, &array);
for (i = 0; i < array.nr; i++)
static void reject_rebase_or_bisect_branch(const char *target)
{
- struct worktree **worktrees = get_worktrees();
+ struct worktree **worktrees = get_worktrees(0);
int i;
for (i = 0; worktrees[i]; i++) {
const char *new_upstream = NULL;
enum branch_track track;
struct ref_filter filter;
+ int icase = 0;
static struct ref_sorting *sorting = NULL, **sorting_tail = &sorting;
struct option options[] = {
OPTION_CALLBACK, 0, "points-at", &filter.points_at, N_("object"),
N_("print only branches of the object"), 0, parse_opt_object_name
},
+ OPT_BOOL('i', "ignore-case", &icase, N_("sorting and filtering are case insensitive")),
OPT_END(),
};
if (filter.abbrev == -1)
filter.abbrev = DEFAULT_ABBREV;
+ filter.ignore_case = icase;
+
finalize_colopts(&colopts, -1);
if (filter.verbose) {
if (explicitly_enable_column(colopts))
if ((filter.kind & FILTER_REFS_BRANCHES) && filter.detached)
filter.kind |= FILTER_REFS_DETACHED_HEAD;
filter.name_patterns = argv;
+ /*
+ * If no sorting parameter is given then we default to sorting
+ * by 'refname'. This would give us an alphabetically sorted
+ * array with the 'HEAD' ref at the beginning followed by
+ * local branches 'refs/heads/...' and finally remote-tacking
+ * branches 'refs/remotes/...'.
+ */
+ if (!sorting)
+ sorting = ref_default_sorting();
+ sorting->ignore_case = icase;
print_ref_list(&filter, sorting);
print_columns(&output, colopts, NULL);
string_list_clear(&output, 0);
if (index_opt && !state.base_dir_len && !to_tempfile) {
state.refresh_cache = 1;
state.istate = &the_index;
- newfd = hold_locked_index(&lock_file, 1);
+ newfd = hold_locked_index(&lock_file, LOCK_DIE_ON_ERROR);
}
/* Check out named files first */
lock_file = xcalloc(1, sizeof(struct lock_file));
- hold_locked_index(lock_file, 1);
+ hold_locked_index(lock_file, LOCK_DIE_ON_ERROR);
if (read_cache_preload(&opts->pathspec) < 0)
return error(_("index file corrupt"));
int ret;
struct lock_file *lock_file = xcalloc(1, sizeof(struct lock_file));
- hold_locked_index(lock_file, 1);
+ hold_locked_index(lock_file, LOCK_DIE_ON_ERROR);
if (read_cache_preload(NULL) < 0)
return error(_("index file corrupt"));
OPT_STRING(0, "shallow-since", &option_since, N_("time"),
N_("create a shallow clone since a specific time")),
OPT_STRING_LIST(0, "shallow-exclude", &option_not, N_("revision"),
- N_("deepen history of shallow clone by excluding rev")),
+ N_("deepen history of shallow clone, excluding rev")),
OPT_BOOL(0, "single-branch", &option_single_branch,
N_("clone only one branch, HEAD or --branch")),
OPT_BOOL(0, "shallow-submodules", &option_shallow_submodules,
setup_work_tree();
lock_file = xcalloc(1, sizeof(struct lock_file));
- hold_locked_index(lock_file, 1);
+ hold_locked_index(lock_file, LOCK_DIE_ON_ERROR);
memset(&opts, 0, sizeof opts);
opts.update = 1;
if (interactive) {
char *old_index_env = NULL;
- hold_locked_index(&index_lock, 1);
+ hold_locked_index(&index_lock, LOCK_DIE_ON_ERROR);
refresh_cache_or_die(refresh_flags);
* (B) on failure, rollback the real index.
*/
if (all || (also && pathspec.nr)) {
- hold_locked_index(&index_lock, 1);
+ hold_locked_index(&index_lock, LOCK_DIE_ON_ERROR);
add_files_to_cache(also ? prefix : NULL, &pathspec, 0);
refresh_cache_or_die(refresh_flags);
update_main_cache_tree(WRITE_TREE_SILENT);
* We still need to refresh the index here.
*/
if (!only && !pathspec.nr) {
- hold_locked_index(&index_lock, 1);
+ hold_locked_index(&index_lock, LOCK_DIE_ON_ERROR);
refresh_cache_or_die(refresh_flags);
if (active_cache_changed
|| !cache_tree_fully_valid(active_cache_tree))
if (read_cache() < 0)
die(_("cannot read the index"));
- hold_locked_index(&index_lock, 1);
+ hold_locked_index(&index_lock, LOCK_DIE_ON_ERROR);
add_remove_files(&partial);
refresh_cache(REFRESH_QUIET);
update_main_cache_tree(WRITE_TREE_SILENT);
strbuf_stripspace(&sb, 0);
if (signoff)
- append_signoff(&sb, ignore_non_trailer(&sb), 0);
+ append_signoff(&sb, ignore_non_trailer(sb.buf, sb.len), 0);
if (fwrite(sb.buf, 1, sb.len, s->fp) < sb.len)
die_errno(_("could not write commit template"));
if (also + only + all + interactive > 1)
die(_("Only one of --include/--only/--all/--interactive/--patch can be used."));
- if (argc == 0 && (also || (only && !amend)))
+ if (argc == 0 && (also || (only && !amend && !allow_empty)))
die(_("No paths with --include/--only does not make sense."));
- if (argc == 0 && only && amend)
- only_include_assumed = _("Clever... amending the last one with dirty index.");
if (argc > 0 && !also && !only)
only_include_assumed = _("Explicit paths specified without -i or -o; assuming --only paths...");
if (!cleanup_arg || !strcmp(cleanup_arg, "default"))
OPT_STRING(0, "shallow-since", &deepen_since, N_("time"),
N_("deepen history of shallow repository based on time")),
OPT_STRING_LIST(0, "shallow-exclude", &deepen_not, N_("revision"),
- N_("deepen history of shallow clone by excluding rev")),
+ N_("deepen history of shallow clone, excluding rev")),
OPT_INTEGER(0, "deepen", &deepen_relative,
N_("deepen history of shallow clone")),
{ OPTION_SET_INT, 0, "unshallow", &unshallow, NULL,
int i;
const char *format = "%(objectname) %(objecttype)\t%(refname)";
struct ref_sorting *sorting = NULL, **sorting_tail = &sorting;
- int maxcount = 0, quote_style = 0;
+ int maxcount = 0, quote_style = 0, icase = 0;
struct ref_array array;
struct ref_filter filter;
OPT_MERGED(&filter, N_("print only refs that are merged")),
OPT_NO_MERGED(&filter, N_("print only refs that are not merged")),
OPT_CONTAINS(&filter.with_commit, N_("print only refs which contain the commit")),
+ OPT_BOOL(0, "ignore-case", &icase, N_("sorting and filtering are case insensitive")),
OPT_END(),
};
if (!sorting)
sorting = ref_default_sorting();
+ sorting->ignore_case = icase;
+ filter.ignore_case = icase;
/* for warn_ambiguous_refs */
git_config(git_default_config, NULL);
const unsigned char *sha1)
{
void *new_data = NULL;
- int collision_test_needed;
+ int collision_test_needed = 0;
assert(data || obj_entry);
- read_lock();
- collision_test_needed = has_sha1_file_with_flags(sha1, HAS_SHA1_QUICK);
- read_unlock();
+ if (startup_info->have_repository) {
+ read_lock();
+ collision_test_needed = has_sha1_file_with_flags(sha1, HAS_SHA1_QUICK);
+ read_unlock();
+ }
if (collision_test_needed && !data) {
read_lock();
usage(index_pack_usage);
if (fix_thin_pack && !from_stdin)
die(_("--fix-thin cannot be used without --stdin"));
+ if (from_stdin && !startup_info->have_repository)
+ die(_("--stdin requires a git repository"));
if (!index_name && pack_name)
index_name = derive_filename(pack_name, ".idx", &index_name_buf);
if (keep_msg && !keep_name && pack_name)
{
static struct lock_file lock;
- hold_locked_index(&lock, 1);
+ hold_locked_index(&lock, LOCK_DIE_ON_ERROR);
refresh_cache(REFRESH_QUIET);
if (active_cache_changed &&
write_locked_index(&the_index, &lock, COMMIT_LOCK))
for (j = common; j; j = j->next)
commit_list_insert(j->item, &reversed);
- hold_locked_index(&lock, 1);
+ hold_locked_index(&lock, LOCK_DIE_ON_ERROR);
clean = merge_recursive(&o, head,
remoteheads->item, reversed, &result);
if (clean < 0)
struct commit_list *parents, **pptr = &parents;
static struct lock_file lock;
- hold_locked_index(&lock, 1);
+ hold_locked_index(&lock, LOCK_DIE_ON_ERROR);
refresh_cache(REFRESH_QUIET);
if (active_cache_changed &&
write_locked_index(&the_index, &lock, COMMIT_LOCK))
if (--argc < 1)
usage_with_options(builtin_mv_usage, builtin_mv_options);
- hold_locked_index(&lock_file, 1);
+ hold_locked_index(&lock_file, LOCK_DIE_ON_ERROR);
if (read_cache() < 0)
die(_("index file corrupt"));
if (merge_heads.nr > 1)
die(_("Cannot merge multiple branches into empty head."));
return pull_into_void(*merge_heads.sha1, curr_head);
- } else if (opt_rebase) {
- if (merge_heads.nr > 1)
- die(_("Cannot rebase onto multiple branches."));
+ }
+ if (opt_rebase && merge_heads.nr > 1)
+ die(_("Cannot rebase onto multiple branches."));
+
+ if (opt_rebase) {
+ struct commit_list *list = NULL;
+ struct commit *merge_head, *head;
+
+ head = lookup_commit_reference(orig_head);
+ commit_list_insert(head, &list);
+ merge_head = lookup_commit_reference(merge_heads.sha1[0]);
+ if (is_descendant_of(merge_head, list)) {
+ /* we can fast-forward this without invoking rebase */
+ opt_ff = "--ff-only";
+ return run_merge();
+ }
return run_rebase(curr_head, *merge_heads.sha1, rebase_fork_point);
- } else
+ } else {
return run_merge();
+ }
}
argc = parse_options(argc, argv, unused_prefix, read_tree_options,
read_tree_usage, 0);
- hold_locked_index(&lock_file, 1);
+ hold_locked_index(&lock_file, LOCK_DIE_ON_ERROR);
prefix_set = opts.prefix ? 1 : 0;
if (1 < opts.merge + opts.reset + prefix_set)
"with what you pushed, and will require 'git reset --hard' to match\n"
"the work tree to HEAD.\n"
"\n"
- "You can set 'receive.denyCurrentBranch' configuration variable to\n"
- "'ignore' or 'warn' in the remote repository to allow pushing into\n"
+ "You can set the 'receive.denyCurrentBranch' configuration variable\n"
+ "to 'ignore' or 'warn' in the remote repository to allow pushing into\n"
"its current branch; however, this is not recommended unless you\n"
"arranged to update its work tree to match what you pushed in some\n"
"other way.\n"
if (reset_type != SOFT) {
struct lock_file *lock = xcalloc(1, sizeof(*lock));
- hold_locked_index(lock, 1);
+ hold_locked_index(lock, LOCK_DIE_ON_ERROR);
if (reset_type == MIXED) {
int flags = quiet ? REFRESH_QUIET : REFRESH_IN_PORCELAIN;
if (read_from_tree(&pathspec, &oid, intent_to_add))
for (parents = commit->parents, parent_number = 1;
parents;
parents = parents->next, parent_number++) {
+ char *name = NULL;
+
if (exclude_parent && parent_number != exclude_parent)
continue;
+ if (symbolic)
+ name = xstrfmt("%s^%d", arg, parent_number);
show_rev(include_parents ? NORMAL : REVERSED,
- parents->item->object.oid.hash, arg);
+ parents->item->object.oid.hash, name);
+ free(name);
}
*dotdot = '^';
if (!index_only)
setup_work_tree();
- hold_locked_index(&lock_file, 1);
+ hold_locked_index(&lock_file, LOCK_DIE_ON_ERROR);
if (read_cache() < 0)
die(_("index file corrupt"));
/*
* If the alternate object store is another repository, try the
- * standard layout with .git/modules/<name>/objects
+ * standard layout with .git/(modules/<name>)+/objects
*/
- if (ends_with(alt->path, ".git/objects")) {
+ if (ends_with(alt->path, "/objects")) {
char *sm_alternate;
struct strbuf sb = STRBUF_INIT;
struct strbuf err = STRBUF_INIT;
struct strbuf rel_path = STRBUF_INIT;
struct strbuf sb = STRBUF_INIT;
struct string_list reference = STRING_LIST_INIT_NODUP;
+ char *sm_alternate = NULL, *error_strategy = NULL;
struct option module_clone_options[] = {
OPT_STRING(0, "prefix", &prefix,
die(_("could not get submodule directory for '%s'"), path);
git_config_set_in_file(p, "core.worktree",
relative_path(path, sm_gitdir, &rel_path));
+
+ /* setup alternateLocation and alternateErrorStrategy in the cloned submodule if needed */
+ git_config_get_string("submodule.alternateLocation", &sm_alternate);
+ if (sm_alternate)
+ git_config_set_in_file(p, "submodule.alternateLocation",
+ sm_alternate);
+ git_config_get_string("submodule.alternateErrorStrategy", &error_strategy);
+ if (error_strategy)
+ git_config_set_in_file(p, "submodule.alternateErrorStrategy",
+ error_strategy);
+
+ free(sm_alternate);
+ free(error_strategy);
+
strbuf_release(&sb);
strbuf_release(&rel_path);
free(sm_gitdir);
struct ref_filter filter;
static struct ref_sorting *sorting = NULL, **sorting_tail = &sorting;
const char *format = NULL;
+ int icase = 0;
struct option options[] = {
OPT_CMDMODE('l', "list", &cmdmode, N_("list tag names"), 'l'),
{ OPTION_INTEGER, 'n', NULL, &filter.lines, N_("n"),
N_("print only tags of the object"), 0, parse_opt_object_name
},
OPT_STRING( 0 , "format", &format, N_("format"), N_("format to use for the output")),
+ OPT_BOOL('i', "ignore-case", &icase, N_("sorting and filtering are case insensitive")),
OPT_END()
};
}
if (!sorting)
sorting = ref_default_sorting();
+ sorting->ignore_case = icase;
+ filter.ignore_case = icase;
if (cmdmode == 'l') {
int ret;
if (column_active(colopts)) {
/* We can't free this memory, it becomes part of a linked list parsed atexit() */
lock_file = xcalloc(1, sizeof(struct lock_file));
+ /* we will diagnose later if it turns out that we need to update it */
newfd = hold_locked_index(lock_file, 0);
if (newfd < 0)
lock_error = errno;
printf("HEAD %s\n", sha1_to_hex(wt->head_sha1));
if (wt->is_detached)
printf("detached\n");
- else
+ else if (wt->head_ref)
printf("branch %s\n", wt->head_ref);
}
printf("\n");
else {
strbuf_addf(&sb, "%-*s ", abbrev_len,
find_unique_abbrev(wt->head_sha1, DEFAULT_ABBREV));
- if (!wt->is_detached)
+ if (wt->is_detached)
+ strbuf_addstr(&sb, "(detached HEAD)");
+ else if (wt->head_ref)
strbuf_addf(&sb, "[%s]", shorten_unambiguous_ref(wt->head_ref, 0));
else
- strbuf_addstr(&sb, "(detached HEAD)");
+ strbuf_addstr(&sb, "(error)");
}
printf("%s\n", sb.buf);
if (ac)
usage_with_options(worktree_usage, options);
else {
- struct worktree **worktrees = get_worktrees();
+ struct worktree **worktrees = get_worktrees(GWT_SORT_LINKED);
int path_maxlen = 0, abbrev = DEFAULT_ABBREV, i;
if (!porcelain)
if (ac != 1)
usage_with_options(worktree_usage, options);
- worktrees = get_worktrees();
+ worktrees = get_worktrees(0);
wt = find_worktree(worktrees, prefix, av[0]);
if (!wt)
die(_("'%s' is not a working tree"), av[0]);
if (ac != 1)
usage_with_options(worktree_usage, options);
- worktrees = get_worktrees();
+ worktrees = get_worktrees(0);
wt = find_worktree(worktrees, prefix, av[0]);
if (!wt)
die(_("'%s' is not a working tree"), av[0]);
}
/*
- * Inspect sb and determine the true "end" of the log message, in
+ * Inspect the given string and determine the true "end" of the log message, in
* order to find where to put a new Signed-off-by: line. Ignored are
* trailing comment lines and blank lines, and also the traditional
* "Conflicts:" block that is not commented out, so that we can use
* Returns the number of bytes from the tail to ignore, to be fed as
* the second parameter to append_signoff().
*/
-int ignore_non_trailer(struct strbuf *sb)
+int ignore_non_trailer(const char *buf, size_t len)
{
int boc = 0;
int bol = 0;
int in_old_conflicts_block = 0;
- while (bol < sb->len) {
- char *next_line;
+ while (bol < len) {
+ const char *next_line = memchr(buf + bol, '\n', len - bol);
- if (!(next_line = memchr(sb->buf + bol, '\n', sb->len - bol)))
- next_line = sb->buf + sb->len;
+ if (!next_line)
+ next_line = buf + len;
else
next_line++;
- if (sb->buf[bol] == comment_line_char || sb->buf[bol] == '\n') {
+ if (buf[bol] == comment_line_char || buf[bol] == '\n') {
/* is this the first of the run of comments? */
if (!boc)
boc = bol;
/* otherwise, it is just continuing */
- } else if (starts_with(sb->buf + bol, "Conflicts:\n")) {
+ } else if (starts_with(buf + bol, "Conflicts:\n")) {
in_old_conflicts_block = 1;
if (!boc)
boc = bol;
- } else if (in_old_conflicts_block && sb->buf[bol] == '\t') {
+ } else if (in_old_conflicts_block && buf[bol] == '\t') {
; /* a pathname in the conflicts block */
} else if (boc) {
/* the previous was not trailing comment */
boc = 0;
in_old_conflicts_block = 0;
}
- bol = next_line - sb->buf;
+ bol = next_line - buf;
}
- return boc ? sb->len - boc : 0;
+ return boc ? len - boc : 0;
}
size_t *out_len);
/* Find the end of the log message, the right place for a new trailer. */
-extern int ignore_non_trailer(struct strbuf *sb);
+extern int ignore_non_trailer(const char *buf, size_t len);
typedef void (*each_mergetag_fn)(struct commit *commit, struct commit_extra_header *extra,
void *cb_data);
git_setup_gettext();
- argv[0] = git_extract_argv0_path(argv[0]);
+ git_extract_argv0_path(argv[0]);
restore_sigpipe_to_default();
* ANSI emulation wrappers
*/
+int winansi_isatty(int fd);
+#define isatty winansi_isatty
+
void winansi_init(void);
HANDLE winansi_get_osfhandle(int fd);
#include <wingdi.h>
#include <winreg.h>
+/* In this file, we actually want to use Windows' own isatty(). */
+#undef isatty
+
/*
ANSI codes used by git: m, K
#endif
+int winansi_isatty(int fd)
+{
+ int res = isatty(fd);
+
+ if (res) {
+ /*
+ * Make sure that /dev/null is not fooling Git into believing
+ * that we are connected to a terminal, as "_isatty() returns a
+ * nonzero value if the descriptor is associated with a
+ * character device."; for more information, see
+ *
+ * https://msdn.microsoft.com/en-us/library/f4s0ddew.aspx
+ */
+ HANDLE handle = (HANDLE)_get_osfhandle(fd);
+ if (fd == STDIN_FILENO) {
+ DWORD dummy;
+
+ if (!GetConsoleMode(handle, &dummy))
+ res = 0;
+ } else if (fd == STDOUT_FILENO || fd == STDERR_FILENO) {
+ CONSOLE_SCREEN_BUFFER_INFO dummy;
+
+ if (!GetConsoleScreenBufferInfo(handle, &dummy))
+ res = 0;
+ }
+ }
+
+ return res;
+}
+
void winansi_init(void)
{
int con1, con2;
# because maintaining the nesting to match is a pain. If
# we had "elif" things would have been much nicer...
-ifeq ($(uname_M),x86_64)
- XDL_FAST_HASH = YesPlease
-endif
ifeq ($(uname_S),OSF1)
# Need this for u_short definitions et al
BASIC_CFLAGS += -D_OSF_SOURCE
{
local dir="$(__gitdir)"
if [ -f "$dir"/rebase-merge/interactive ]; then
- __gitcomp "--continue --skip --abort --edit-todo"
+ __gitcomp "--continue --skip --abort --quit --edit-todo"
return
elif [ -d "$dir"/rebase-apply ] || [ -d "$dir"/rebase-merge ]; then
- __gitcomp "--continue --skip --abort"
+ __gitcomp "--continue --skip --abort --quit"
return
fi
__git_complete_strategy && return
--- /dev/null
+uniset/
+UnicodeData.txt
+EastAsianWidth.txt
--- /dev/null
+TL;DR: Run update_unicode.sh after the publication of a new Unicode
+standard and commit the resulting unicode_widths.h file.
+
+The long version
+================
+
+The Git source code ships the file unicode_widths.h which contains
+tables of zero and double width Unicode code points, respectively.
+These tables are generated using update_unicode.sh in this directory.
+update_unicode.sh itself uses a third-party tool, uniset, to query two
+Unicode data files for the interesting code points.
+
+On first run, update_unicode.sh clones uniset from Github and builds it.
+This requires a current-ish version of autoconf (2.69 works per December
+2016).
+
+On each run, update_unicode.sh checks whether more recent Unicode data
+files are available from the Unicode consortium, and rebuilds the header
+unicode_widths.h with the new data. The new header can then be
+committed.
--- /dev/null
+#!/bin/sh
+#See http://www.unicode.org/reports/tr44/
+#
+#Me Enclosing_Mark an enclosing combining mark
+#Mn Nonspacing_Mark a nonspacing combining mark (zero advance width)
+#Cf Format a format control character
+#
+cd "$(dirname "$0")"
+UNICODEWIDTH_H=$(git rev-parse --show-toplevel)/unicode_width.h
+
+wget -N http://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt \
+ http://www.unicode.org/Public/UCD/latest/ucd/EastAsianWidth.txt &&
+if ! test -d uniset; then
+ git clone https://github.com/depp/uniset.git &&
+ ( cd uniset && git checkout 4b186196dd )
+fi &&
+(
+ cd uniset &&
+ if ! test -x uniset; then
+ autoreconf -i &&
+ ./configure --enable-warnings=-Werror CFLAGS='-O0 -ggdb'
+ fi &&
+ make
+) &&
+UNICODE_DIR=. && export UNICODE_DIR &&
+cat >$UNICODEWIDTH_H <<-EOF
+static const struct interval zero_width[] = {
+ $(uniset/uniset --32 cat:Me,Mn,Cf + U+1160..U+11FF - U+00AD)
+};
+static const struct interval double_width[] = {
+ $(uniset/uniset --32 eaw:F,W)
+};
+EOF
if (convert_is_binary(len, &stats))
return 0;
/*
- * If the file in the index has any CR in it, do not convert.
- * This is the new safer autocrlf handling.
+ * If the file in the index has any CR in it, do not
+ * convert. This is the new safer autocrlf handling,
+ * unless we want to renormalize in a merge or
+ * cherry-pick.
*/
- if (checksafe == SAFE_CRLF_RENORMALIZE)
- checksafe = SAFE_CRLF_FALSE;
- else if (has_cr_in_index(path))
+ if ((checksafe != SAFE_CRLF_RENORMALIZE) && has_cr_in_index(path))
convert_crlf_into_lf = 0;
}
- if (checksafe && len) {
+ if ((checksafe == SAFE_CRLF_WARN ||
+ (checksafe == SAFE_CRLF_FAIL)) && len) {
struct text_stat new_stats;
memcpy(&new_stats, &stats, sizeof(new_stats));
/* simulate "git add" */
abbrev = FALLBACK_DEFAULT_ABBREV;
if (abbrev > GIT_SHA1_HEXSZ)
die("BUG: oid abbreviation out of range: %d", abbrev);
- hex[abbrev] = '\0';
+ if (abbrev)
+ hex[abbrev] = '\0';
return hex;
}
}
options->file = stdout;
+ options->abbrev = DEFAULT_ABBREV;
options->line_termination = '\n';
options->break_opt = -1;
options->rename_limit = -1;
offending, optarg);
return argcount;
}
+ else if (!strcmp(arg, "--no-abbrev"))
+ options->abbrev = 0;
else if (!strcmp(arg, "--abbrev"))
options->abbrev = DEFAULT_ABBREV;
else if (skip_prefix(arg, "--abbrev=", &arg)) {
return strbuf_detach(&d, NULL);
}
-const char *git_extract_argv0_path(const char *argv0)
+void git_extract_argv0_path(const char *argv0)
{
const char *slash;
if (!argv0 || !*argv0)
- return NULL;
+ return;
slash = find_last_dir_sep(argv0);
- if (slash) {
+ if (slash)
argv0_path = xstrndup(argv0, slash - argv0);
- return slash + 1;
- }
-
- return argv0;
}
void git_set_argv_exec_path(const char *exec_path)
struct argv_array;
extern void git_set_argv_exec_path(const char *exec_path);
-extern const char *git_extract_argv0_path(const char *path);
+extern void git_extract_argv0_path(const char *path);
extern const char *git_exec_path(void);
extern void setup_path(void);
extern const char **prepare_git_cmd(struct argv_array *out, const char **argv);
}
}
+ # Go to the root of the worktree so that the left index files
+ # are properly setup -- the index is toplevel-relative.
+ chdir($workdir);
+
# Setup temp directories
my $tmpdir = tempdir('git-difftool.XXXXX', CLEANUP => 0, TMPDIR => 1);
my $ldir = "$tmpdir/left";
}
merge_cmd () {
- trust_exit_code=$(git config --bool \
- "mergetool.$1.trustExitCode" || echo false)
- if test "$trust_exit_code" = "false"
- then
- touch "$BACKUP"
- ( eval $merge_tool_cmd )
- check_unchanged
- else
- ( eval $merge_tool_cmd )
- fi
+ ( eval $merge_tool_cmd )
}
}
echo "$1"
}
+ # Most tools' exit codes cannot be trusted, so By default we ignore
+ # their exit code and check the merged file's modification time in
+ # check_unchanged() to determine whether or not the merge was
+ # successful. The return value from run_merge_cmd, by default, is
+ # determined by check_unchanged().
+ #
+ # When a tool's exit code can be trusted then the return value from
+ # run_merge_cmd is simply the tool's exit code, and check_unchanged()
+ # is not called.
+ #
+ # The return value of exit_code_trustable() tells us whether or not we
+ # can trust the tool's exit code.
+ #
+ # User-defined and built-in tools default to false.
+ # Built-in tools advertise that their exit code is trustable by
+ # redefining exit_code_trustable() to true.
+
+ exit_code_trustable () {
+ false
+ }
+
+
if ! test -f "$MERGE_TOOLS_DIR/$tool"
then
setup_user_tool
fi
}
+trust_exit_code () {
+ if git config --bool "mergetool.$1.trustExitCode"
+ then
+ :; # OK
+ elif exit_code_trustable
+ then
+ echo true
+ else
+ echo false
+ fi
+}
+
+
# Entry point for running tools
run_merge_tool () {
# If GIT_PREFIX is empty then we cannot use it in tools
# Run a either a configured or built-in merge tool
run_merge_cmd () {
- merge_cmd "$1"
+ mergetool_trust_exit_code=$(trust_exit_code "$1")
+ if test "$mergetool_trust_exit_code" = "true"
+ then
+ merge_cmd "$1"
+ else
+ touch "$BACKUP"
+ merge_cmd "$1"
+ check_unchanged
+ fi
}
list_merge_tool_candidates () {
if len(client) > 0:
real_cmd += ["-c", client]
+ retries = gitConfigInt("git-p4.retries")
+ if retries is None:
+ # Perform 3 retries by default
+ retries = 3
+ real_cmd += ["-r", str(retries)]
if isinstance(cmd,basestring):
real_cmd = ' '.join(real_cmd) + ' ' + cmd
real_cmd += cmd
return real_cmd
+def git_dir(path):
+ """ Return TRUE if the given path is a git directory (/path/to/dir/.git).
+ This won't automatically add ".git" to a directory.
+ """
+ d = read_pipe(["git", "--git-dir", path, "rev-parse", "--git-dir"], True).strip()
+ if not d or len(d) == 0:
+ return None
+ else:
+ return d
+
def chdir(path, is_client_path=False):
"""Do chdir to the given path, and set the PWD environment
variable for use by P4. It does not look at getcwd() output.
def p4_reopen(type, f):
p4_system(["reopen", "-t", type, wildcard_encode(f)])
+def p4_reopen_in_change(changelist, files):
+ cmd = ["reopen", "-c", str(changelist)] + files
+ p4_system(cmd)
+
def p4_move(src, dest):
p4_system(["move", "-k", wildcard_encode(src), wildcard_encode(dest)])
return read_pipe(["git", "name-rev", "HEAD"]).split(" ")[1].strip()
def isValidGitDir(path):
- if (os.path.exists(path + "/HEAD")
- and os.path.exists(path + "/refs") and os.path.exists(path + "/objects")):
- return True;
- return False
+ return git_dir(path) != None
def parseRevision(ref):
return read_pipe("git rev-parse %s" % ref).strip()
steps."""
if self.exceedsLargeFileThreshold(relPath, contents) or self.hasLargeFileExtension(relPath):
contentTempFile = self.generateTempFile(contents)
- (git_mode, contents, localLargeFile) = self.generatePointer(contentTempFile)
-
- # Move temp file to final location in large file system
- largeFileDir = os.path.dirname(localLargeFile)
- if not os.path.isdir(largeFileDir):
- os.makedirs(largeFileDir)
- shutil.move(contentTempFile, localLargeFile)
- self.addLargeFile(relPath)
- if gitConfigBool('git-p4.largeFilePush'):
- self.pushFile(localLargeFile)
- if verbose:
- sys.stderr.write("%s moved to large file system (%s)\n" % (relPath, localLargeFile))
+ (pointer_git_mode, contents, localLargeFile) = self.generatePointer(contentTempFile)
+ if pointer_git_mode:
+ git_mode = pointer_git_mode
+ if localLargeFile:
+ # Move temp file to final location in large file system
+ largeFileDir = os.path.dirname(localLargeFile)
+ if not os.path.isdir(largeFileDir):
+ os.makedirs(largeFileDir)
+ shutil.move(contentTempFile, localLargeFile)
+ self.addLargeFile(relPath)
+ if gitConfigBool('git-p4.largeFilePush'):
+ self.pushFile(localLargeFile)
+ if verbose:
+ sys.stderr.write("%s moved to large file system (%s)\n" % (relPath, localLargeFile))
return (git_mode, contents)
class MockLFS(LargeFileSystem):
the actual content. Return also the new location of the actual
content.
"""
+ if os.path.getsize(contentFile) == 0:
+ return (None, '', None)
+
pointerProcess = subprocess.Popen(
['git', 'lfs', 'pointer', '--file=' + contentFile],
stdout=subprocess.PIPE
optparse.make_option("--conflict", dest="conflict_behavior",
choices=self.conflict_behavior_choices),
optparse.make_option("--branch", dest="branch"),
+ optparse.make_option("--shelve", dest="shelve", action="store_true",
+ help="Shelve instead of submit. Shelved files are reverted, "
+ "restoring the workspace to the state before the shelve"),
+ optparse.make_option("--update-shelve", dest="update_shelve", action="store", type="int",
+ metavar="CHANGELIST",
+ help="update an existing shelved changelist, implies --shelve")
]
self.description = "Submit changes from git to the perforce depot."
self.usage += " [name of git branch to submit into perforce depot]"
self.detectRenames = False
self.preserveUser = gitConfigBool("git-p4.preserveUser")
self.dry_run = False
+ self.shelve = False
+ self.update_shelve = None
self.prepare_p4_only = False
self.conflict_behavior = None
self.isWindows = (platform.system() == "Windows")
return 1
return 0
- def prepareSubmitTemplate(self):
+ def prepareSubmitTemplate(self, changelist=None):
"""Run "p4 change -o" to grab a change specification template.
This does not use "p4 -G", as it is nice to keep the submission
template in original order, since a human might edit it.
template = ""
inFilesSection = False
- for line in p4_read_pipe_lines(['change', '-o']):
+ args = ['change', '-o']
+ if changelist:
+ args.append(str(changelist))
+
+ for line in p4_read_pipe_lines(args):
if line.endswith("\r\n"):
line = line[:-2] + "\n"
if inFilesSection:
editedFiles = set()
pureRenameCopy = set()
filesToChangeExecBit = {}
+ all_files = list()
for line in diff:
diff = parseDiffTreeEntry(line)
modifier = diff['status']
path = diff['src']
+ all_files.append(path)
+
if modifier == "M":
p4_edit(path)
if isModeExecChanged(diff['src_mode'], diff['dst_mode']):
mode = filesToChangeExecBit[f]
setP4ExecBit(f, mode)
+ if self.update_shelve:
+ print("all_files = %s" % str(all_files))
+ p4_reopen_in_change(self.update_shelve, all_files)
+
#
# Build p4 change description, starting with the contents
# of the git commit message.
logMessage = logMessage.strip()
(logMessage, jobs) = self.separate_jobs_from_description(logMessage)
- template = self.prepareSubmitTemplate()
+ template = self.prepareSubmitTemplate(self.update_shelve)
submitTemplate = self.prepareLogMessage(template, logMessage, jobs)
if self.preserveUser:
if self.isWindows:
message = message.replace("\r\n", "\n")
submitTemplate = message[:message.index(separatorLine)]
- p4_write_pipe(['submit', '-i'], submitTemplate)
+
+ if self.update_shelve:
+ p4_write_pipe(['shelve', '-r', '-i'], submitTemplate)
+ elif self.shelve:
+ p4_write_pipe(['shelve', '-i'], submitTemplate)
+ else:
+ p4_write_pipe(['submit', '-i'], submitTemplate)
+ # The rename/copy happened by applying a patch that created a
+ # new file. This leaves it writable, which confuses p4.
+ for f in pureRenameCopy:
+ p4_sync(f, "-f")
if self.preserveUser:
if p4User:
changelist = self.lastP4Changelist()
self.modifyChangelistUser(changelist, p4User)
- # The rename/copy happened by applying a patch that created a
- # new file. This leaves it writable, which confuses p4.
- for f in pureRenameCopy:
- p4_sync(f, "-f")
submitted = True
finally:
# skip this patch
- if not submitted:
- print "Submission cancelled, undoing p4 changes."
- for f in editedFiles:
+ if not submitted or self.shelve:
+ if self.shelve:
+ print ("Reverting shelved files.")
+ else:
+ print ("Submission cancelled, undoing p4 changes.")
+ for f in editedFiles | filesToDelete:
p4_revert(f)
for f in filesToAdd:
p4_revert(f)
os.remove(f)
- for f in filesToDelete:
- p4_revert(f)
os.remove(fileName)
return submitted
if len(self.origin) == 0:
self.origin = upstream
+ if self.update_shelve:
+ self.shelve = True
+
if self.preserveUser:
if not self.canChangeChangelists():
die("Cannot preserve user names without p4 super-user or admin permissions")
break
chdir(self.oldWorkingDirectory)
-
+ shelved_applied = "shelved" if self.shelve else "applied"
if self.dry_run:
pass
elif self.prepare_p4_only:
pass
elif len(commits) == len(applied):
- print "All commits applied!"
+ print ("All commits {0}!".format(shelved_applied))
sync = P4Sync()
if self.branch:
else:
if len(applied) == 0:
- print "No commits applied."
+ print ("No commits {0}.".format(shelved_applied))
else:
- print "Applied only the commits marked with '*':"
+ print ("{0} only the commits marked with '*':".format(shelved_applied.capitalize()))
for c in commits:
if c in applied:
star = "*"
if cmd.gitdir == None:
cmd.gitdir = os.path.abspath(".git")
if not isValidGitDir(cmd.gitdir):
+ # "rev-parse --git-dir" without arguments will try $PWD/.git
cmd.gitdir = read_pipe("git rev-parse --git-dir").strip()
if os.path.exists(cmd.gitdir):
cdup = read_pipe("git rev-parse --show-cdup").strip()
else:
die("fatal: cannot locate git repository at %s" % cmd.gitdir)
+ # so git commands invoked from the P4 workspace will succeed
os.environ["GIT_DIR"] = cmd.gitdir
if not cmd.run(args):
abort! abort and check out the original branch
skip! skip current patch and continue
edit-todo! edit the todo list during an interactive rebase
+quit! abort but keep HEAD where it is
"
. git-sh-setup
set_reflog_action rebase
--verify)
ok_to_skip_pre_rebase=
;;
- --continue|--skip|--abort|--edit-todo)
+ --continue|--skip|--abort|--quit|--edit-todo)
test $total_argc -eq 2 || usage
action=${1##--}
;;
finish_rebase
exit
;;
+quit)
+ exec rm -rf "$state_dir"
+ ;;
edit-todo)
run_specific_rebase
;;
git read-tree --index-output="$TMPindex" -m $i_tree &&
GIT_INDEX_FILE="$TMPindex" &&
export GIT_INDEX_FILE &&
- git diff --name-only -z HEAD -- >"$TMP-stagenames" &&
+ git diff-index --name-only -z HEAD -- >"$TMP-stagenames" &&
git update-index -z --add --remove --stdin <"$TMP-stagenames" &&
git write-tree &&
rm -f "$TMPindex"
cmd = argv[0];
if (!cmd)
cmd = "git-help";
+ else {
+ const char *slash = find_last_dir_sep(cmd);
+ if (slash)
+ cmd = slash + 1;
+ }
trace_command_performance(argv);
struct strbuf target = STRBUF_INIT;
strbuf_add(&target, base, serverlen);
strbuf_add(&target, data + i, posn - i - 7);
- if (walker->get_verbosely)
- fprintf(stderr, "Also look at %s\n",
- target.buf);
+ warning("adding alternate object store: %s",
+ target.buf);
newalt = xmalloc(sizeof(*newalt));
newalt->next = NULL;
newalt->base = strbuf_detach(&target, NULL);
struct alternates_request alt_req;
struct walker_data *cdata = walker->data;
+ if (http_follow_config != HTTP_FOLLOW_ALWAYS)
+ return;
+
/*
* If another request has already started fetching alternates,
* wait for them to arrive and return to processing this request's
* we turned off CURLOPT_FAILONERROR to avoid losing a
* persistent connection and got CURLE_OK.
*/
- if (req->http_code == 404 && req->curl_result == CURLE_OK &&
+ if (req->http_code >= 300 && req->curl_result == CURLE_OK &&
(starts_with(req->url, "http://") ||
- starts_with(req->url, "https://")))
+ starts_with(req->url, "https://"))) {
req->curl_result = CURLE_HTTP_RETURNED_ERROR;
+ xsnprintf(req->errorstr, sizeof(req->errorstr),
+ "HTTP request failed");
+ }
if (obj_req->state == ABORTED) {
ret = error("Request for %s aborted", hex);
static const char *user_agent;
static int curl_empty_auth;
+enum http_follow_config http_follow_config = HTTP_FOLLOW_INITIAL;
+
#if LIBCURL_VERSION_NUM >= 0x071700
/* Use CURLOPT_KEYPASSWD as is */
#elif LIBCURL_VERSION_NUM >= 0x070903
return 0;
}
+ if (!strcmp("http.followredirects", var)) {
+ if (value && !strcmp(value, "initial"))
+ http_follow_config = HTTP_FOLLOW_INITIAL;
+ else if (git_config_bool(var, value))
+ http_follow_config = HTTP_FOLLOW_ALWAYS;
+ else
+ http_follow_config = HTTP_FOLLOW_NONE;
+ return 0;
+ }
+
/* Fall back on the default ones */
return git_default_config(var, value, cb);
}
curl_low_speed_time);
}
- curl_easy_setopt(result, CURLOPT_FOLLOWLOCATION, 1);
curl_easy_setopt(result, CURLOPT_MAXREDIRS, 20);
#if LIBCURL_VERSION_NUM >= 0x071301
curl_easy_setopt(result, CURLOPT_POSTREDIR, CURL_REDIR_POST_ALL);
if (is_transport_allowed("ftps"))
allowed_protocols |= CURLPROTO_FTPS;
curl_easy_setopt(result, CURLOPT_REDIR_PROTOCOLS, allowed_protocols);
+ curl_easy_setopt(result, CURLOPT_PROTOCOLS, allowed_protocols);
#else
if (transport_restrict_protocols())
warning("protocol restrictions not applied to curl redirects because\n"
curl_easy_setopt(slot->curl, CURLOPT_FAILONERROR, 1);
curl_easy_setopt(slot->curl, CURLOPT_RANGE, NULL);
+ /*
+ * Default following to off unless "ALWAYS" is configured; this gives
+ * callers a sane starting point, and they can tweak for individual
+ * HTTP_FOLLOW_* cases themselves.
+ */
+ if (http_follow_config == HTTP_FOLLOW_ALWAYS)
+ curl_easy_setopt(slot->curl, CURLOPT_FOLLOWLOCATION, 1);
+ else
+ curl_easy_setopt(slot->curl, CURLOPT_FOLLOWLOCATION, 0);
+
#if LIBCURL_VERSION_NUM >= 0x070a08
curl_easy_setopt(slot->curl, CURLOPT_IPRESOLVE, git_curl_ipresolve);
#endif
* If we see a failing http code with CURLE_OK, we have turned off
* FAILONERROR (to keep the server's custom error response), and should
* translate the code into failure here.
+ *
+ * Likewise, if we see a redirect (30x code), that means we turned off
+ * redirect-following, and we should treat the result as an error.
*/
if (results->curl_result == CURLE_OK &&
- results->http_code >= 400) {
+ results->http_code >= 300) {
results->curl_result = CURLE_HTTP_RETURNED_ERROR;
/*
* Normally curl will already have put the "reason phrase"
strbuf_addstr(&buf, " no-cache");
if (options && options->keep_error)
curl_easy_setopt(slot->curl, CURLOPT_FAILONERROR, 0);
+ if (options && options->initial_request &&
+ http_follow_config == HTTP_FOLLOW_INITIAL)
+ curl_easy_setopt(slot->curl, CURLOPT_FOLLOWLOCATION, 1);
headers = curl_slist_append(headers, buf.buf);
*
* Note that this assumes a sane redirect scheme. It's entirely possible
* in the example above to end up at a URL that does not even end in
- * "info/refs". In such a case we simply punt, as there is not much we can
- * do (and such a scheme is unlikely to represent a real git repository,
- * which means we are likely about to abort anyway).
+ * "info/refs". In such a case we die. There's not much we can do, such a
+ * scheme is unlikely to represent a real git repository, and failing to
+ * rewrite the base opens options for malicious redirects to do funny things.
*/
static int update_url_from_redirect(struct strbuf *base,
const char *asked,
const struct strbuf *got)
{
const char *tail;
- size_t tail_len;
+ size_t new_len;
if (!strcmp(asked, got->buf))
return 0;
die("BUG: update_url_from_redirect: %s is not a superset of %s",
asked, base->buf);
- tail_len = strlen(tail);
-
- if (got->len < tail_len ||
- strcmp(tail, got->buf + got->len - tail_len))
- return 0; /* insane redirect scheme */
+ new_len = got->len;
+ if (!strip_suffix_mem(got->buf, &new_len, tail))
+ die(_("unable to update url base from redirection:\n"
+ " asked for: %s\n"
+ " redirect: %s"),
+ asked, got->buf);
strbuf_reset(base);
- strbuf_add(base, got->buf, got->len - tail_len);
+ strbuf_add(base, got->buf, new_len);
+
return 1;
}
if (c != CURLE_OK)
die("BUG: curl_easy_getinfo for HTTP code failed: %s",
curl_easy_strerror(c));
- if (slot->http_code >= 400)
+ if (slot->http_code >= 300)
return size;
}
extern char curl_errorstr[CURL_ERROR_SIZE];
+enum http_follow_config {
+ HTTP_FOLLOW_NONE,
+ HTTP_FOLLOW_ALWAYS,
+ HTTP_FOLLOW_INITIAL
+};
+extern enum http_follow_config http_follow_config;
+
static inline int missing__target(int code, int result)
{
return /* file:// URL -- do we ever use one??? */
/* Options for http_get_*() */
struct http_get_options {
unsigned no_cache:1,
- keep_error:1;
+ keep_error:1,
+ initial_request:1;
/* If non-NULL, returns the content-type of the response. */
struct strbuf *content_type;
int flags, long timeout_ms)
{
int fd = lock_file_timeout(lk, path, flags, timeout_ms);
- if (fd < 0 && (flags & LOCK_DIE_ON_ERROR))
- unable_to_lock_die(path, errno);
+ if (fd < 0) {
+ if (flags & LOCK_DIE_ON_ERROR)
+ unable_to_lock_die(path, errno);
+ if (flags & LOCK_REPORT_ON_ERROR) {
+ struct strbuf buf = STRBUF_INIT;
+ unable_to_lock_message(path, errno, &buf);
+ error("%s", buf.buf);
+ strbuf_release(&buf);
+ }
+ }
return fd;
}
/*
* If a lock is already taken for the file, `die()` with an error
* message. If this flag is not specified, trying to lock a file that
- * is already locked returns -1 to the caller.
+ * is already locked silently returns -1 to the caller, or ...
*/
#define LOCK_DIE_ON_ERROR 1
+/*
+ * ... this flag can be passed instead to return -1 and give the usual
+ * error message upon an error.
+ */
+#define LOCK_REPORT_ON_ERROR 2
+
/*
* Usually symbolic links in the destination path are resolved. This
* means that (1) the lockfile is created by adding ".lock" to the
{
if (!mi->inbody_header_accum.len)
return;
- assert(check_header(mi, &mi->inbody_header_accum, mi->s_hdr_data, 0));
+ if (!check_header(mi, &mi->inbody_header_accum, mi->s_hdr_data, 0))
+ die("BUG: inbody_header_accum, if not empty, must always contain a valid in-body header");
strbuf_reset(&mi->inbody_header_accum);
}
struct cache_entry *nce;
nce = refresh_cache_entry(ce, CE_MATCH_REFRESH | CE_MATCH_IGNORE_MISSING);
+ if (!nce)
+ return err(o, _("addinfo_cache failed for path '%s'"), path);
if (nce != ce)
ret = add_cache_entry(nce, options);
}
return unmerged;
}
-static int string_list_df_name_compare(const void *a, const void *b)
+static int string_list_df_name_compare(const char *one, const char *two)
{
- const struct string_list_item *one = a;
- const struct string_list_item *two = b;
- int onelen = strlen(one->string);
- int twolen = strlen(two->string);
+ int onelen = strlen(one);
+ int twolen = strlen(two);
/*
* Here we only care that entries for D/F conflicts are
* adjacent, in particular with the file of the D/F conflict
* since in other cases any changes in their order due to
* sorting cause no problems for us.
*/
- int cmp = df_name_compare(one->string, onelen, S_IFDIR,
- two->string, twolen, S_IFDIR);
+ int cmp = df_name_compare(one, onelen, S_IFDIR,
+ two, twolen, S_IFDIR);
/*
* Now that 'foo' and 'foo/bar' compare equal, we have to make sure
* that 'foo' comes before 'foo/bar'.
string_list_append(&df_sorted_entries, next->string)->util =
next->util;
}
- qsort(df_sorted_entries.items, entries->nr, sizeof(*entries->items),
- string_list_df_name_compare);
+ df_sorted_entries.cmp = string_list_df_name_compare;
+ string_list_sort(&df_sorted_entries);
string_list_clear(&o->df_conflict_file_set, 1);
for (i = 0; i < df_sorted_entries.nr; i++) {
return strbuf_detach(&newpath, NULL);
}
-static int dir_in_way(const char *path, int check_working_copy)
+/**
+ * Check whether a directory in the index is in the way of an incoming
+ * file. Return 1 if so. If check_working_copy is non-zero, also
+ * check the working directory. If empty_ok is non-zero, also return
+ * 0 in the case where the working-tree dir exists but is empty.
+ */
+static int dir_in_way(const char *path, int check_working_copy, int empty_ok)
{
int pos;
struct strbuf dirpath = STRBUF_INIT;
}
strbuf_release(&dirpath);
- return check_working_copy && !lstat(path, &st) && S_ISDIR(st.st_mode);
+ return check_working_copy && !lstat(path, &st) && S_ISDIR(st.st_mode) &&
+ !(empty_ok && is_empty_dir(path));
}
static int was_tracked(const char *path)
{
char *renamed = NULL;
int ret = 0;
- if (dir_in_way(path, !o->call_depth)) {
+ if (dir_in_way(path, !o->call_depth, 0)) {
renamed = unique_path(o, path, a_oid ? o->branch1 : o->branch2);
}
remove_file(o, 0, rename->path, 0);
dst_name = unique_path(o, rename->path, cur_branch);
} else {
- if (dir_in_way(rename->path, !o->call_depth)) {
+ if (dir_in_way(rename->path, !o->call_depth, 0)) {
dst_name = unique_path(o, rename->path, cur_branch);
output(o, 1, _("%s is a directory in %s adding as %s instead"),
rename->path, other_branch, dst_name);
o->branch2 == rename_conflict_info->branch1) ?
pair1->two->path : pair1->one->path;
- if (dir_in_way(path, !o->call_depth))
+ if (dir_in_way(path, !o->call_depth,
+ S_ISGITLINK(pair1->two->mode)))
df_conflict_remains = 1;
}
if (merge_file_special_markers(o, &one, &a, &b,
oid = b_oid;
conf = _("directory/file");
}
- if (dir_in_way(path, !o->call_depth)) {
+ if (dir_in_way(path, !o->call_depth,
+ S_ISGITLINK(a_mode))) {
char *new_path = unique_path(o, path, add_branch);
clean_merge = 0;
output(o, 1, _("CONFLICT (%s): There is a directory with name %s in %s. "
}
}
- hold_locked_index(lock, 1);
+ hold_locked_index(lock, LOCK_DIE_ON_ERROR);
clean = merge_recursive(o, head_commit, next_commit, ca,
result);
if (clean < 0)
refresh_cache(REFRESH_QUIET);
- if (hold_locked_index(lock_file, 0) < 0)
+ if (hold_locked_index(lock_file, LOCK_REPORT_ON_ERROR) < 0)
return -1;
memset(&trees, 0, sizeof(trees));
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" -wait -merge -3 -a1 \
"$merge_tool_path" -wait -2 \
"$LOCAL" "$REMOTE" "$MERGED" >/dev/null 2>&1
fi
- check_unchanged
}
translate_merge_tool_path() {
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" "$LOCAL" "$REMOTE" "$BASE" \
"$merge_tool_path" "$LOCAL" "$REMOTE" \
-mergeoutput="$MERGED"
fi
- check_unchanged
}
translate_merge_tool_path() {
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" -MF="$LOCAL" -TF="$REMOTE" -BF="$BASE" \
"$merge_tool_path" -MF="$LOCAL" -TF="$REMOTE" \
-RF="$MERGED"
fi
- check_unchanged
}
translate_merge_tool_path() {
fi >/dev/null 2>&1
}
-translate_merge_tool_path() {
+translate_merge_tool_path () {
echo DeltaWalker
}
+
+exit_code_trustable () {
+ true
+}
--result="$MERGED" "$LOCAL" "$REMOTE"
fi
}
+
+exit_code_trustable () {
+ true
+}
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" \
"$merge_tool_path" \
"$LOCAL" "$MERGED" "$REMOTE" | cat
fi
- check_unchanged
}
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" "$BASE" "$LOCAL" "$REMOTE" \
"$merge_tool_path" "$LOCAL" "$REMOTE" \
--default --mode=merge2 --to="$MERGED"
fi
- check_unchanged
}
translate_merge_tool_path() {
echo emacs
}
+
+exit_code_trustable () {
+ true
+}
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" -merge "$LOCAL" "$BASE" "$REMOTE" -o:"$MERGED" -nh
else
"$merge_tool_path" -merge "$LOCAL" "$REMOTE" -o:"$MERGED" -nh
fi
- check_unchanged
}
translate_merge_tool_path() {
>/dev/null 2>&1
fi
}
+
+exit_code_trustable () {
+ true
+}
diff_cmd () {
"$merge_tool_path" "$LOCAL" "$REMOTE"
}
+
+exit_code_trustable () {
+ true
+}
then
check_meld_for_output_version
fi
- touch "$BACKUP"
+
if test "$meld_has_output_option" = true
then
"$merge_tool_path" --output "$MERGED" \
else
"$merge_tool_path" "$LOCAL" "$MERGED" "$REMOTE"
fi
- check_unchanged
}
# Check whether we should use 'meld --output <file>'
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" "$LOCAL" "$REMOTE" \
"$merge_tool_path" "$LOCAL" "$REMOTE" \
-merge "$MERGED" | cat
fi
- check_unchanged
}
}
merge_cmd () {
- touch "$BACKUP"
if ! $base_present
then
cp -- "$LOCAL" "$BASE"
create_virtual_base "$BASE" "$REMOTE"
fi
"$merge_tool_path" "$BASE" "$REMOTE" "$LOCAL" "$MERGED"
- check_unchanged
}
create_empty_file () {
"$merge_tool_path" -o "$MERGED" "$LOCAL" "$REMOTE"
fi
}
+
+exit_code_trustable () {
+ true
+}
merge_cmd () {
if $base_present
then
- touch "$BACKUP"
basename="$(basename "$merge_tool_path" .exe)"
if test "$basename" = "tortoisegitmerge"
then
-base:"$BASE" -mine:"$LOCAL" \
-theirs:"$REMOTE" -merged:"$MERGED"
fi
- check_unchanged
else
echo "$merge_tool_path cannot be used without a base" 1>&2
return 1
}
merge_cmd () {
- touch "$BACKUP"
case "$1" in
gvimdiff|vimdiff)
if $base_present
fi
;;
esac
- check_unchanged
}
translate_merge_tool_path() {
;;
esac
}
+
+exit_code_trustable () {
+ true
+}
merge_cmd () {
# mergetool.winmerge.trustExitCode is implicitly false.
# touch $BACKUP so that we can check_unchanged.
- touch "$BACKUP"
"$merge_tool_path" -u -e -dl Local -dr Remote \
"$LOCAL" "$REMOTE" "$MERGED"
- check_unchanged
}
translate_merge_tool_path() {
diff_cmd () {
"$merge_tool_path" \
-R 'Accel.Search: "Ctrl+F"' \
- -R 'Accel.SearchForward: "Ctrl-G"' \
+ -R 'Accel.SearchForward: "Ctrl+G"' \
"$LOCAL" "$REMOTE"
}
merge_cmd () {
- touch "$BACKUP"
if $base_present
then
"$merge_tool_path" -X --show-merged-pane \
- -R 'Accel.SaveAsMerged: "Ctrl-S"' \
+ -R 'Accel.SaveAsMerged: "Ctrl+S"' \
-R 'Accel.Search: "Ctrl+F"' \
- -R 'Accel.SearchForward: "Ctrl-G"' \
+ -R 'Accel.SearchForward: "Ctrl+G"' \
--merged-file "$MERGED" "$LOCAL" "$BASE" "$REMOTE"
else
"$merge_tool_path" -X $extra \
- -R 'Accel.SaveAsMerged: "Ctrl-S"' \
+ -R 'Accel.SaveAsMerged: "Ctrl+S"' \
-R 'Accel.Search: "Ctrl+F"' \
- -R 'Accel.SearchForward: "Ctrl-G"' \
+ -R 'Accel.SearchForward: "Ctrl+G"' \
--merged-file "$MERGED" "$LOCAL" "$REMOTE"
fi
- check_unchanged
}
const char * const *usagestr,
const struct option *options)
{
- fprintf(stderr, "%s\n\n", msg);
+ fprintf(stderr, "fatal: %s\n\n", msg);
usage_with_options(usagestr, options);
}
*
* Performs the following normalizations on src, storing the result in dst:
* - Ensures that components are separated by '/' (Windows only)
- * - Squashes sequences of '/'.
+ * - Squashes sequences of '/' except "//server/share" on Windows
* - Removes "." components.
* - Removes ".." components, and the components the precede them.
* Returns failure (non-zero) if a ".." component appears as first path
int normalize_path_copy_len(char *dst, const char *src, int *prefix_len)
{
char *dst0;
- int i;
+ const char *end;
- for (i = has_dos_drive_prefix(src); i > 0; i--)
- *dst++ = *src++;
+ /*
+ * Copy initial part of absolute path: "/", "C:/", "//server/share/".
+ */
+ end = src + offset_1st_component(src);
+ while (src < end) {
+ char c = *src++;
+ if (is_dir_sep(c))
+ c = '/';
+ *dst++ = c;
+ }
dst0 = dst;
- if (is_dir_sep(*src)) {
- *dst++ = '/';
- while (is_dir_sep(*src))
- src++;
- }
+ while (is_dir_sep(*src))
+ src++;
for (;;) {
char c = *src;
my $latest = $ra->get_latest_revnum;
$ra->get_log("", $latest, 0, 1, 0, 1, sub {});
};
- } while ($@ && ($c = shift @components));
+ } while ($@ && defined($c = shift @components));
return canonicalize_url($url);
}
"Project-Id-Version: Git\n"
"Report-Msgid-Bugs-To: Git Mailing List <git@vger.kernel.org>\n"
"POT-Creation-Date: 2016-11-25 22:50+0800\n"
-"PO-Revision-Date: 2016-11-25 22:54+0800\n"
+"PO-Revision-Date: 2017-01-02 20:11+0800\n"
"Last-Translator: Jiang Xin <worldhello.net@gmail.com>\n"
"Language-Team: GitHub <https://github.com/jiangxin/git/>\n"
"Language: zh_CN\n"
#: apply.c:3098
#, c-format
msgid "cannot reverse-apply a binary patch without the reverse hunk to '%s'"
-msgstr "不能反向应用一个没有至 '%s' 的反向数据块的二进制补丁"
+msgstr "不能反向应用一个缺少到 '%s' 的反向数据块的二进制补丁"
#: apply.c:3144
#, c-format
#, c-format
msgid ""
"the patch applies to '%s' (%s), which does not match the current contents."
-msgstr "è¡¥ä¸\81å¼\95ç\94¨å\88° '%s' (%s),但是和当前内容不匹配。"
+msgstr "è¡¥ä¸\81åº\94ç\94¨å\88° '%s'ï¼\88%sï¼\89,但是和当前内容不匹配。"
#: apply.c:3162
#, c-format
msgid "the patch applies to an empty '%s' but it is not empty"
-msgstr "补丁应用到空文件 '%s',但是它并不空"
+msgstr "补丁应用到空文件 '%s',但其并非空文件"
#: apply.c:3180
#, c-format
#: apply.c:4082
#, c-format
msgid "mode change for %s, which is not in current HEAD"
-msgstr "%s 的属性改变,但它不再当前 HEAD 中"
+msgstr "%s 的模式被改变,但它不在当前 HEAD 中"
#: apply.c:4085
#, c-format
"You may want to amend it after fixing the message, or set the config\n"
"variable i18n.commitencoding to the encoding your project uses.\n"
msgstr ""
-"警告:提交说明不符合 UTF-8 字符集。\n"
+"警告:提交说明不符合 UTF-8 字符编码。\n"
"您可以通过修补提交来改正提交说明,或者将配置变量 i18n.commitencoding\n"
-"设置为您项目所用的字符集。\n"
+"设置为您项目所用的字符编码。\n"
#: compat/obstack.c:406 compat/obstack.c:408
msgid "memory exhausted"
#: connect.c:49
msgid "The remote end hung up upon initial contact"
-msgstr "远端在初始连接时即挂断"
+msgstr "远端在连接发起时即挂断"
#: connect.c:51
msgid ""
#, c-format
msgid ""
"you may want to set your %s variable to at least %d and retry the command."
-msgstr "您可能想要将您的 %s 变量至少设置为 %d 并重复此命令。"
+msgstr "您可能想要将变量 %s 设置为至少 %d 并再次执行此命令。"
#: dir.c:1866
msgid "failed to get kernel name and information"
#: fetch-pack.c:697
#, c-format
msgid "already have %s (%s)"
-msgstr "已经有 %s (%s)"
+msgstr "已经有 %s(%s)"
#: fetch-pack.c:735
msgid "fetch-pack: unable to fork off sideband demultiplexer"
"Please, use 'git notes merge --commit' or 'git notes merge --abort' to "
"commit/abort the previous merge before you start a new notes merge."
msgstr ""
-"您尚未结束您前一次注释合并(存在 %s)。\n"
-"请您在开始一个新的注释合并之前,使用 'git notes merge --commit' 或者 'git "
-"notes merge --abort' 来提交/终止前一次合并。"
+"您的前一次注释合并尚未结束(存在 %s)。\n"
+"在开始一个新的注释合并之前,请使用 'git notes merge --commit' 或者 "
+"'git notes merge --abort' 来提交/终止前一次合并。"
#: notes-merge.c:280
#, c-format
"empty strings as pathspecs will be made invalid in upcoming releases. please "
"use . instead if you meant to match all paths"
msgstr ""
-"在即将到来的版本,不能再使用空字符串作为路径表达式。如果要匹配所有路径,\n"
+"在下一个版本中,使用空字符串作为路径规格将被视作非法。如果要匹配所有路径,\n"
"请代之以 ."
#: pathspec.c:440
#: sequencer.c:438
msgid "unable to update cache tree\n"
-msgstr "不能更新缓存\n"
+msgstr "不能更新缓存树\n"
#: sequencer.c:483
#, c-format
"\n"
" git rebase --continue\n"
msgstr ""
-"您已暂存了工作区的修改。如果这些修改要压缩到前一个提交,执行:\n"
+"您的工作区中存在已暂存的修改\n如果这些修改需要被并入前一个提交,执行:\n"
"\n"
" git commit --amend %s\n"
"\n"
-"如果这些变更要形成一个新提交,执行:\n"
+"如果这些修改要形成一个新提交,执行:\n"
"\n"
" git commit %s\n"
"\n"
#: sequencer.c:972
#, c-format
msgid "unusable instruction sheet: '%s'"
-msgstr "无用的指令清单:'%s'"
+msgstr "不可用的指令清单:'%s'"
#: sequencer.c:983
msgid "cannot cherry-pick during a revert."
#: sha1_name.c:407
#, c-format
msgid "short SHA1 %s is ambiguous"
-msgstr "歧义的短 SHA1 %s"
+msgstr "短 SHA1 %s 存在歧义"
#: sha1_name.c:418
msgid "The candidates are:"
#: tree-walk.c:37
msgid "malformed mode in tree entry"
-msgstr "树对象中的条目属性错误"
+msgstr "树对象中的条目模式错误"
#: tree-walk.c:41
msgid "empty filename in tree entry"
#: wt-status.c:2276
msgid "additionally, your index contains uncommitted changes."
-msgstr "而且您的索引中包含未提交的变更。"
+msgstr "另外,您的索引中包含未提交的变更。"
#: wt-status.c:2278
#, c-format
#: builtin/blame.c:2854
#, c-format
msgid "cannot read blob %s for path %s"
-msgstr "不能为路径 %2$s 读取对象 %1$s"
+msgstr "不能为路径 %2$s 读取数据对象 %1$s"
#: builtin/blame.c:2873
#, c-format
msgid ""
"gc is already running on machine '%s' pid %<PRIuMAX> (use --force if not)"
msgstr ""
-"已经有一个 gc 正运行在机器 '%s' pid %<PRIuMAX> (如果不是,使用 --force)"
+"已经有一个 gc 正运行在机器 '%s' pid %<PRIuMAX>(如果不是,使用 --force)"
#: builtin/gc.c:441
msgid ""
#: builtin/merge.c:652
msgid "Not handling anything other than two heads merge."
-msgstr "不能处理两个头合并之外的任何操作。"
+msgstr "未处理两个头合并之外的任何操作。"
#: builtin/merge.c:666
#, c-format
"To squelch this message, you can set it to 'refuse'."
msgstr ""
"默认禁止删除当前分支,因为下一次 'git clone' 将不会检出任何文件,\n"
-"导致混淆。\n"
+"导致困惑。\n"
"\n"
"您可以在远程仓库中设置 'receive.denyDeleteCurrent' 配置变量为\n"
-"'warn' 或 'ignore' 以允许删除当前分支,显示或者不显示警告。\n"
+"'warn'(显示警告信息)或 'ignore'(忽略警告信息)以允许删除当前分支。\n"
"\n"
"若要屏蔽此信息,您可以设置它为 'refuse'。"
#: builtin/show-branch.c:836
#, c-format
msgid "cannot find commit %s (%s)"
-msgstr "不能找到提交 %s (%s)"
+msgstr "不能找到提交 %s(%s)"
#: builtin/show-ref.c:10
msgid ""
#: builtin/submodule--helper.c:608 builtin/submodule--helper.c:964
msgid "force cloning progress"
-msgstr "显示克隆进度"
+msgstr "强制显示克隆进度"
#: builtin/submodule--helper.c:613
msgid ""
#: builtin/worktree.c:272
#, c-format
msgid "Preparing %s (identifier %s)"
-msgstr "准备 %s (标识符 %s)"
+msgstr "准备 %s(标识符 %s)"
#: builtin/worktree.c:323
msgid "checkout <branch> even if already checked out in other worktree"
"\n"
"\tchmod 0700 %s"
msgstr ""
-"您 socket 目录权限过于放松,其他用户可能会读取您缓存的认证信息。考虑执行:\n"
+"您的 socket 目录权限过于宽松,其他用户可能会读取您缓存的认证信息。考虑执行:\n"
"\n"
"\tchmod 0700 %s"
"You can fix this with 'git rebase --edit-todo' and then run 'git rebase --"
"continue'."
msgstr ""
-"您可以用命令 'git rebase --edit-todo' 修正然后执行命令 'git rebase --"
-"continue'。"
+"您可以用 'git rebase --edit-todo' 修正问题然后执行 'git rebase --continue'。"
#: git-rebase--interactive.sh:1045
msgid "Or you can abort the rebase with 'git rebase --abort'."
#include "color.h"
#include "reflog-walk.h"
#include "gpg-interface.h"
+#include "trailer.h"
static char *user_format;
static struct cmt_fmt_map {
return msg;
}
+static void format_trailers(struct strbuf *sb, const char *msg)
+{
+ struct trailer_info info;
+
+ trailer_info_get(&info, msg);
+ strbuf_add(sb, info.trailer_start,
+ info.trailer_end - info.trailer_start);
+ trailer_info_release(&info);
+}
+
static void parse_commit_message(struct format_commit_context *c)
{
const char *msg = c->message + c->message_off;
strbuf_addstr(sb, msg + c->body_off);
return 1;
}
+
+ if (starts_with(placeholder, "(trailers)")) {
+ format_trailers(sb, msg + c->subject_off);
+ return strlen("(trailers)");
+ }
+
return 0; /* unknown placeholder */
}
return 0;
}
-int hold_locked_index(struct lock_file *lk, int die_on_error)
+int hold_locked_index(struct lock_file *lk, int lock_flags)
{
- return hold_lock_file_for_update(lk, get_index_file(),
- die_on_error
- ? LOCK_DIE_ON_ERROR
- : 0);
+ return hold_lock_file_for_update(lk, get_index_file(), lock_flags);
}
int read_index(struct index_state *istate)
#include "utf8.h"
#include "git-compat-util.h"
#include "version.h"
+#include "trailer.h"
typedef enum { FIELD_STR, FIELD_ULONG, FIELD_TIME } cmp_type;
enum { RR_NORMAL, RR_SHORTEN, RR_TRACK, RR_TRACKSHORT }
remote_ref;
struct {
- enum { C_BARE, C_BODY, C_BODY_DEP, C_LINES, C_SIG, C_SUB } option;
+ enum { C_BARE, C_BODY, C_BODY_DEP, C_LINES, C_SIG, C_SUB, C_TRAILERS } option;
unsigned int nlines;
} contents;
enum { O_FULL, O_SHORT } objectname;
atom->u.contents.option = C_SUB;
}
+static void trailers_atom_parser(struct used_atom *atom, const char *arg)
+{
+ if (arg)
+ die(_("%%(trailers) does not take arguments"));
+ atom->u.contents.option = C_TRAILERS;
+}
+
static void contents_atom_parser(struct used_atom *atom, const char *arg)
{
if (!arg)
atom->u.contents.option = C_SIG;
else if (!strcmp(arg, "subject"))
atom->u.contents.option = C_SUB;
+ else if (!strcmp(arg, "trailers"))
+ atom->u.contents.option = C_TRAILERS;
else if (skip_prefix(arg, "lines=", &arg)) {
atom->u.contents.option = C_LINES;
if (strtoul_ui(arg, 10, &atom->u.contents.nlines))
{ "creatordate", FIELD_TIME },
{ "subject", FIELD_STR, subject_atom_parser },
{ "body", FIELD_STR, body_atom_parser },
+ { "trailers", FIELD_STR, trailers_atom_parser },
{ "contents", FIELD_STR, contents_atom_parser },
{ "upstream", FIELD_STR, remote_ref_atom_parser },
{ "push", FIELD_STR, remote_ref_atom_parser },
name++;
if (strcmp(name, "subject") &&
strcmp(name, "body") &&
+ strcmp(name, "trailers") &&
!starts_with(name, "contents"))
continue;
if (!subpos)
/* Size is the length of the message after removing the signature */
append_lines(&s, subpos, contents_end - subpos, atom->u.contents.nlines);
v->s = strbuf_detach(&s, NULL);
+ } else if (atom->u.contents.option == C_TRAILERS) {
+ struct trailer_info info;
+
+ /* Search for trailer info */
+ trailer_info_get(&info, subpos);
+ v->s = xmemdupz(info.trailer_start,
+ info.trailer_end - info.trailer_start);
+ trailer_info_release(&info);
} else if (atom->u.contents.option == C_BARE)
v->s = xstrdup(subpos);
}
* matches a pattern "refs/heads/mas") or a wildcard (e.g. the same ref
* matches "refs/heads/mas*", too).
*/
-static int match_pattern(const char **patterns, const char *refname)
+static int match_pattern(const struct ref_filter *filter, const char *refname)
{
+ const char **patterns = filter->name_patterns;
+ unsigned flags = 0;
+
+ if (filter->ignore_case)
+ flags |= WM_CASEFOLD;
+
/*
* When no '--format' option is given we need to skip the prefix
* for matching refs of tags and branches.
skip_prefix(refname, "refs/", &refname));
for (; *patterns; patterns++) {
- if (!wildmatch(*patterns, refname, 0, NULL))
+ if (!wildmatch(*patterns, refname, flags, NULL))
return 1;
}
return 0;
* matches a pattern "refs/heads/" but not "refs/heads/m") or a
* wildcard (e.g. the same ref matches "refs/heads/m*", too).
*/
-static int match_name_as_path(const char **pattern, const char *refname)
+static int match_name_as_path(const struct ref_filter *filter, const char *refname)
{
+ const char **pattern = filter->name_patterns;
int namelen = strlen(refname);
+ unsigned flags = WM_PATHNAME;
+
+ if (filter->ignore_case)
+ flags |= WM_CASEFOLD;
+
for (; *pattern; pattern++) {
const char *p = *pattern;
int plen = strlen(p);
if (!*filter->name_patterns)
return 1; /* No pattern always matches */
if (filter->match_as_path)
- return match_name_as_path(filter->name_patterns, refname);
- return match_pattern(filter->name_patterns, refname);
+ return match_name_as_path(filter, refname);
+ return match_pattern(filter, refname);
}
/*
struct atom_value *va, *vb;
int cmp;
cmp_type cmp_type = used_atom[s->atom].type;
+ int (*cmp_fn)(const char *, const char *);
get_ref_atom_value(a, s->atom, &va);
get_ref_atom_value(b, s->atom, &vb);
+ cmp_fn = s->ignore_case ? strcasecmp : strcmp;
if (s->version)
cmp = versioncmp(va->s, vb->s);
else if (cmp_type == FIELD_STR)
- cmp = strcmp(va->s, vb->s);
+ cmp = cmp_fn(va->s, vb->s);
else {
if (va->ul < vb->ul)
cmp = -1;
else if (va->ul == vb->ul)
- cmp = strcmp(a->refname, b->refname);
+ cmp = cmp_fn(a->refname, b->refname);
else
cmp = 1;
}
struct ref_sorting *next;
int atom; /* index into used_atom array (internal) */
unsigned reverse : 1,
+ ignore_case : 1,
version : 1;
};
unsigned int with_commit_tag_algo : 1,
match_as_path : 1,
+ ignore_case : 1,
detached : 1;
unsigned int kind,
lines;
struct strbuf effective_url = STRBUF_INIT;
struct discovery *last = last_discovery;
int http_ret, maybe_smart = 0;
- struct http_get_options options;
+ struct http_get_options http_options;
if (last && !strcmp(service, last->service))
return last;
strbuf_addf(&refs_url, "service=%s", service);
}
- memset(&options, 0, sizeof(options));
- options.content_type = &type;
- options.charset = &charset;
- options.effective_url = &effective_url;
- options.base_url = &url;
- options.no_cache = 1;
- options.keep_error = 1;
+ memset(&http_options, 0, sizeof(http_options));
+ http_options.content_type = &type;
+ http_options.charset = &charset;
+ http_options.effective_url = &effective_url;
+ http_options.base_url = &url;
+ http_options.initial_request = 1;
+ http_options.no_cache = 1;
+ http_options.keep_error = 1;
- http_ret = http_get_strbuf(refs_url.buf, &buffer, &options);
+ http_ret = http_get_strbuf(refs_url.buf, &buffer, &http_options);
switch (http_ret) {
case HTTP_OK:
break;
die("unable to access '%s': %s", url.buf, curl_errorstr);
}
+ if (options.verbosity && !starts_with(refs_url.buf, url.buf))
+ warning(_("redirecting to %s"), url.buf);
+
last= xcalloc(1, sizeof(*last_discovery));
last->service = service;
last->buf_alloc = strbuf_detach(&buffer, &last->len);
{
int i;
- hold_locked_index(&index_lock, 1);
+ hold_locked_index(&index_lock, LOCK_DIE_ON_ERROR);
for (i = 0; i < update->nr; i++) {
struct string_list_item *item = &update->items[i];
#include "refs.h"
#include "argv-array.h"
#include "quote.h"
+#include "trailer.h"
#define GIT_REFLOG_ACTION "GIT_REFLOG_ACTION"
static GIT_PATH_FUNC(git_path_todo_file, "sequencer/todo")
static GIT_PATH_FUNC(git_path_opts_file, "sequencer/opts")
static GIT_PATH_FUNC(git_path_head_file, "sequencer/head")
+static GIT_PATH_FUNC(git_path_abort_safety_file, "sequencer/abort-safety")
/*
* A script to set the GIT_AUTHOR_NAME, GIT_AUTHOR_EMAIL, and
return git_path_todo_file();
}
-static int is_rfc2822_line(const char *buf, int len)
-{
- int i;
-
- for (i = 0; i < len; i++) {
- int ch = buf[i];
- if (ch == ':')
- return 1;
- if (!isalnum(ch) && ch != '-')
- break;
- }
-
- return 0;
-}
-
-static int is_cherry_picked_from_line(const char *buf, int len)
-{
- /*
- * We only care that it looks roughly like (cherry picked from ...)
- */
- return len > strlen(cherry_picked_prefix) + 1 &&
- starts_with(buf, cherry_picked_prefix) && buf[len - 1] == ')';
-}
-
/*
* Returns 0 for non-conforming footer
* Returns 1 for conforming footer
static int has_conforming_footer(struct strbuf *sb, struct strbuf *sob,
int ignore_footer)
{
- char prev;
- int i, k;
- int len = sb->len - ignore_footer;
- const char *buf = sb->buf;
- int found_sob = 0;
-
- /* footer must end with newline */
- if (!len || buf[len - 1] != '\n')
- return 0;
+ struct trailer_info info;
+ int i;
+ int found_sob = 0, found_sob_last = 0;
- prev = '\0';
- for (i = len - 1; i > 0; i--) {
- char ch = buf[i];
- if (prev == '\n' && ch == '\n') /* paragraph break */
- break;
- prev = ch;
- }
+ trailer_info_get(&info, sb->buf);
- /* require at least one blank line */
- if (prev != '\n' || buf[i] != '\n')
+ if (info.trailer_start == info.trailer_end)
return 0;
- /* advance to start of last paragraph */
- while (i < len - 1 && buf[i] == '\n')
- i++;
-
- for (; i < len; i = k) {
- int found_rfc2822;
-
- for (k = i; k < len && buf[k] != '\n'; k++)
- ; /* do nothing */
- k++;
+ for (i = 0; i < info.trailer_nr; i++)
+ if (sob && !strncmp(info.trailers[i], sob->buf, sob->len)) {
+ found_sob = 1;
+ if (i == info.trailer_nr - 1)
+ found_sob_last = 1;
+ }
- found_rfc2822 = is_rfc2822_line(buf + i, k - i - 1);
- if (found_rfc2822 && sob &&
- !strncmp(buf + i, sob->buf, sob->len))
- found_sob = k;
+ trailer_info_release(&info);
- if (!(found_rfc2822 ||
- is_cherry_picked_from_line(buf + i, k - i - 1)))
- return 0;
- }
- if (found_sob == i)
+ if (found_sob_last)
return 3;
if (found_sob)
return 2;
return -1;
}
+static void update_abort_safety_file(void)
+{
+ struct object_id head;
+
+ /* Do nothing on a single-pick */
+ if (!file_exists(git_path_seq_dir()))
+ return;
+
+ if (!get_oid("HEAD", &head))
+ write_file(git_path_abort_safety_file(), "%s", oid_to_hex(&head));
+ else
+ write_file(git_path_abort_safety_file(), "%s", "");
+}
+
static int fast_forward_to(const unsigned char *to, const unsigned char *from,
int unborn, struct replay_opts *opts)
{
strbuf_release(&sb);
strbuf_release(&err);
ref_transaction_free(transaction);
+ update_abort_safety_file();
return 0;
}
char **xopt;
static struct lock_file index_lock;
- hold_locked_index(&index_lock, 1);
+ hold_locked_index(&index_lock, LOCK_DIE_ON_ERROR);
read_cache();
leave:
free_message(commit, &msg);
+ update_abort_safety_file();
return res;
}
return 0;
}
+static int rollback_is_safe(void)
+{
+ struct strbuf sb = STRBUF_INIT;
+ struct object_id expected_head, actual_head;
+
+ if (strbuf_read_file(&sb, git_path_abort_safety_file(), 0) >= 0) {
+ strbuf_trim(&sb);
+ if (get_oid_hex(sb.buf, &expected_head)) {
+ strbuf_release(&sb);
+ die(_("could not parse %s"), git_path_abort_safety_file());
+ }
+ strbuf_release(&sb);
+ }
+ else if (errno == ENOENT)
+ oidclr(&expected_head);
+ else
+ die_errno(_("could not read '%s'"), git_path_abort_safety_file());
+
+ if (get_oid("HEAD", &actual_head))
+ oidclr(&actual_head);
+
+ return !oidcmp(&actual_head, &expected_head);
+}
+
static int reset_for_rollback(const unsigned char *sha1)
{
const char *argv[4]; /* reset --merge <arg> + NULL */
+
argv[0] = "reset";
argv[1] = "--merge";
argv[2] = sha1_to_hex(sha1);
error(_("cannot abort from a branch yet to be born"));
goto fail;
}
+
+ if (!rollback_is_safe()) {
+ /* Do not error, just do not rollback */
+ warning(_("You seem to have moved HEAD. "
+ "Not rewinding, check your HEAD!"));
+ } else
if (reset_for_rollback(sha1))
goto fail;
strbuf_release(&buf);
return -1;
if (save_opts(opts))
return -1;
+ update_abort_safety_file();
res = pick_commits(&todo_list, opts);
todo_list_release(&todo_list);
return res;
#include "mru.h"
#include "list.h"
#include "mergesort.h"
+#include "quote.h"
#ifndef O_NOATIME
#if defined(__linux__) && (defined(__i386__) || defined(__PPC__))
return 0;
}
+static const char *parse_alt_odb_entry(const char *string,
+ int sep,
+ struct strbuf *out)
+{
+ const char *end;
+
+ strbuf_reset(out);
+
+ if (*string == '#') {
+ /* comment; consume up to next separator */
+ end = strchrnul(string, sep);
+ } else if (*string == '"' && !unquote_c_style(out, string, &end)) {
+ /*
+ * quoted path; unquote_c_style has copied the
+ * data for us and set "end". Broken quoting (e.g.,
+ * an entry that doesn't end with a quote) falls
+ * back to the unquoted case below.
+ */
+ } else {
+ /* normal, unquoted path */
+ end = strchrnul(string, sep);
+ strbuf_add(out, string, end - string);
+ }
+
+ if (*end)
+ end++;
+ return end;
+}
+
static void link_alt_odb_entries(const char *alt, int len, int sep,
const char *relative_base, int depth)
{
- struct string_list entries = STRING_LIST_INIT_NODUP;
- char *alt_copy;
- int i;
struct strbuf objdirbuf = STRBUF_INIT;
+ struct strbuf entry = STRBUF_INIT;
if (depth > 5) {
error("%s: ignoring alternate object stores, nesting too deep.",
die("unable to normalize object directory: %s",
objdirbuf.buf);
- alt_copy = xmemdupz(alt, len);
- string_list_split_in_place(&entries, alt_copy, sep, -1);
- for (i = 0; i < entries.nr; i++) {
- const char *entry = entries.items[i].string;
- if (entry[0] == '\0' || entry[0] == '#')
+ while (*alt) {
+ alt = parse_alt_odb_entry(alt, sep, &entry);
+ if (!entry.len)
continue;
- link_alt_odb_entry(entry, relative_base, depth, objdirbuf.buf);
+ link_alt_odb_entry(entry.buf, relative_base, depth, objdirbuf.buf);
}
- string_list_clear(&entries, 0);
- free(alt_copy);
+ strbuf_release(&entry);
strbuf_release(&objdirbuf);
}
define_commit_slab(ref_bitmap, uint32_t *);
+#define POOL_SIZE (512 * 1024)
+
struct paint_info {
struct ref_bitmap ref_bitmap;
unsigned nr_bits;
- char **slab;
+ char **pools;
char *free, *end;
- unsigned slab_count;
+ unsigned pool_count;
};
static uint32_t *paint_alloc(struct paint_info *info)
unsigned nr = (info->nr_bits + 31) / 32;
unsigned size = nr * sizeof(uint32_t);
void *p;
- if (!info->slab_count || info->free + size > info->end) {
- info->slab_count++;
- REALLOC_ARRAY(info->slab, info->slab_count);
- info->free = xmalloc(COMMIT_SLAB_SIZE);
- info->slab[info->slab_count - 1] = info->free;
- info->end = info->free + COMMIT_SLAB_SIZE;
+ if (!info->pool_count || size > info->end - info->free) {
+ if (size > POOL_SIZE)
+ die("BUG: pool size too small for %d in paint_alloc()",
+ size);
+ info->pool_count++;
+ REALLOC_ARRAY(info->pools, info->pool_count);
+ info->free = xmalloc(POOL_SIZE);
+ info->pools[info->pool_count - 1] = info->free;
+ info->end = info->free + POOL_SIZE;
}
p = info->free;
info->free += size;
* all walked commits.
*/
static void paint_down(struct paint_info *info, const unsigned char *sha1,
- int id)
+ unsigned int id)
{
unsigned int i, nr;
struct commit_list *head = NULL;
if (!c)
return;
memset(bitmap, 0, bitmap_size);
- bitmap[id / 32] |= (1 << (id % 32));
+ bitmap[id / 32] |= (1U << (id % 32));
commit_list_insert(c, &head);
while (head) {
struct commit_list *p;
oid_to_hex(&c->object.oid));
for (p = c->parents; p; p = p->next) {
- uint32_t **p_refs = ref_bitmap_at(&info->ref_bitmap,
- p->item);
if (p->item->object.flags & SEEN)
continue;
- if (*p_refs == NULL || *p_refs == *refs)
- *p_refs = *refs;
commit_list_insert(p->item, &head);
}
}
post_assign_shallow(info, &pi.ref_bitmap, ref_status);
clear_ref_bitmap(&pi.ref_bitmap);
- for (i = 0; i < pi.slab_count; i++)
- free(pi.slab[i]);
- free(pi.slab);
+ for (i = 0; i < pi.pool_count; i++)
+ free(pi.pools[i]);
+ free(pi.pools);
free(shallow);
}
static void update_refstatus(int *ref_status, int nr, uint32_t *bitmap)
{
- int i;
+ unsigned int i;
if (!ref_status)
return;
for (i = 0; i < nr; i++)
- if (bitmap[i / 32] & (1 << (i % 32)))
+ if (bitmap[i / 32] & (1U << (i % 32)))
ref_status[i]++;
}
return 1;
}
-static int submodule_needs_pushing(const char *path, const unsigned char sha1[20])
+static int append_sha1_to_argv(const unsigned char sha1[20], void *data)
{
- if (add_submodule_odb(path) || !lookup_commit_reference(sha1))
+ struct argv_array *argv = data;
+ argv_array_push(argv, sha1_to_hex(sha1));
+ return 0;
+}
+
+static int check_has_commit(const unsigned char sha1[20], void *data)
+{
+ int *has_commit = data;
+
+ if (!lookup_commit_reference(sha1))
+ *has_commit = 0;
+
+ return 0;
+}
+
+static int submodule_has_commits(const char *path, struct sha1_array *commits)
+{
+ int has_commit = 1;
+
+ if (add_submodule_odb(path))
+ return 0;
+
+ sha1_array_for_each_unique(commits, check_has_commit, &has_commit);
+ return has_commit;
+}
+
+static int submodule_needs_pushing(const char *path, struct sha1_array *commits)
+{
+ if (!submodule_has_commits(path, commits))
+ /*
+ * NOTE: We do consider it safe to return "no" here. The
+ * correct answer would be "We do not know" instead of
+ * "No push needed", but it is quite hard to change
+ * the submodule pointer without having the submodule
+ * around. If a user did however change the submodules
+ * without having the submodule around, this indicates
+ * an expert who knows what they are doing or a
+ * maintainer integrating work from other people. In
+ * both cases it should be safe to skip this check.
+ */
return 0;
if (for_each_remote_ref_submodule(path, has_remote, NULL) > 0) {
struct child_process cp = CHILD_PROCESS_INIT;
- const char *argv[] = {"rev-list", NULL, "--not", "--remotes", "-n", "1" , NULL};
struct strbuf buf = STRBUF_INIT;
int needs_pushing = 0;
- argv[1] = sha1_to_hex(sha1);
- cp.argv = argv;
+ argv_array_push(&cp.args, "rev-list");
+ sha1_array_for_each_unique(commits, append_sha1_to_argv, &cp.args);
+ argv_array_pushl(&cp.args, "--not", "--remotes", "-n", "1" , NULL);
+
prepare_submodule_repo_env(&cp.env_array);
cp.git_cmd = 1;
cp.no_stdin = 1;
cp.out = -1;
cp.dir = path;
if (start_command(&cp))
- die("Could not run 'git rev-list %s --not --remotes -n 1' command in submodule %s",
- sha1_to_hex(sha1), path);
+ die("Could not run 'git rev-list <commits> --not --remotes -n 1' command in submodule %s",
+ path);
if (strbuf_read(&buf, cp.out, 41))
needs_pushing = 1;
finish_command(&cp);
return 0;
}
+static struct sha1_array *submodule_commits(struct string_list *submodules,
+ const char *path)
+{
+ struct string_list_item *item;
+
+ item = string_list_insert(submodules, path);
+ if (item->util)
+ return (struct sha1_array *) item->util;
+
+ /* NEEDSWORK: should we have sha1_array_init()? */
+ item->util = xcalloc(1, sizeof(struct sha1_array));
+ return (struct sha1_array *) item->util;
+}
+
static void collect_submodules_from_diff(struct diff_queue_struct *q,
struct diff_options *options,
void *data)
{
int i;
- struct string_list *needs_pushing = data;
+ struct string_list *submodules = data;
for (i = 0; i < q->nr; i++) {
struct diff_filepair *p = q->queue[i];
+ struct sha1_array *commits;
if (!S_ISGITLINK(p->two->mode))
continue;
- if (submodule_needs_pushing(p->two->path, p->two->oid.hash))
- string_list_insert(needs_pushing, p->two->path);
+ commits = submodule_commits(submodules, p->two->path);
+ sha1_array_append(commits, p->two->oid.hash);
}
}
diff_tree_combined_merge(commit, 1, &rev);
}
-int find_unpushed_submodules(unsigned char new_sha1[20],
+static void free_submodules_sha1s(struct string_list *submodules)
+{
+ struct string_list_item *item;
+ for_each_string_list_item(item, submodules)
+ sha1_array_clear((struct sha1_array *) item->util);
+ string_list_clear(submodules, 1);
+}
+
+int find_unpushed_submodules(struct sha1_array *commits,
const char *remotes_name, struct string_list *needs_pushing)
{
struct rev_info rev;
struct commit *commit;
- const char *argv[] = {NULL, NULL, "--not", "NULL", NULL};
- int argc = ARRAY_SIZE(argv) - 1;
- char *sha1_copy;
-
- struct strbuf remotes_arg = STRBUF_INIT;
+ struct string_list submodules = STRING_LIST_INIT_DUP;
+ struct string_list_item *submodule;
+ struct argv_array argv = ARGV_ARRAY_INIT;
- strbuf_addf(&remotes_arg, "--remotes=%s", remotes_name);
init_revisions(&rev, NULL);
- sha1_copy = xstrdup(sha1_to_hex(new_sha1));
- argv[1] = sha1_copy;
- argv[3] = remotes_arg.buf;
- setup_revisions(argc, argv, &rev, NULL);
+
+ /* argv.argv[0] will be ignored by setup_revisions */
+ argv_array_push(&argv, "find_unpushed_submodules");
+ sha1_array_for_each_unique(commits, append_sha1_to_argv, &argv);
+ argv_array_push(&argv, "--not");
+ argv_array_pushf(&argv, "--remotes=%s", remotes_name);
+
+ setup_revisions(argv.argc, argv.argv, &rev, NULL);
if (prepare_revision_walk(&rev))
die("revision walk setup failed");
while ((commit = get_revision(&rev)) != NULL)
- find_unpushed_submodule_commits(commit, needs_pushing);
+ find_unpushed_submodule_commits(commit, &submodules);
reset_revision_walk();
- free(sha1_copy);
- strbuf_release(&remotes_arg);
+ argv_array_clear(&argv);
+
+ for_each_string_list_item(submodule, &submodules) {
+ struct sha1_array *commits = (struct sha1_array *) submodule->util;
+
+ if (submodule_needs_pushing(submodule->string, commits))
+ string_list_insert(needs_pushing, submodule->string);
+ }
+ free_submodules_sha1s(&submodules);
return needs_pushing->nr;
}
-static int push_submodule(const char *path)
+static int push_submodule(const char *path, int dry_run)
{
if (add_submodule_odb(path))
return 1;
if (for_each_remote_ref_submodule(path, has_remote, NULL) > 0) {
struct child_process cp = CHILD_PROCESS_INIT;
- const char *argv[] = {"push", NULL};
+ argv_array_push(&cp.args, "push");
+ if (dry_run)
+ argv_array_push(&cp.args, "--dry-run");
- cp.argv = argv;
prepare_submodule_repo_env(&cp.env_array);
cp.git_cmd = 1;
cp.no_stdin = 1;
return 1;
}
-int push_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name)
+int push_unpushed_submodules(struct sha1_array *commits,
+ const char *remotes_name,
+ int dry_run)
{
int i, ret = 1;
struct string_list needs_pushing = STRING_LIST_INIT_DUP;
- if (!find_unpushed_submodules(new_sha1, remotes_name, &needs_pushing))
+ if (!find_unpushed_submodules(commits, remotes_name, &needs_pushing))
return 1;
for (i = 0; i < needs_pushing.nr; i++) {
const char *path = needs_pushing.items[i].string;
fprintf(stderr, "Pushing submodule '%s'\n", path);
- if (!push_submodule(path)) {
+ if (!push_submodule(path, dry_run)) {
fprintf(stderr, "Unable to push submodule '%s'\n", path);
ret = 0;
}
struct diff_options;
struct argv_array;
+struct sha1_array;
enum {
RECURSE_SUBMODULES_CHECK = -4,
int ok_to_remove_submodule(const char *path);
int merge_submodule(unsigned char result[20], const char *path, const unsigned char base[20],
const unsigned char a[20], const unsigned char b[20], int search);
-int find_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name,
+int find_unpushed_submodules(struct sha1_array *commits, const char *remotes_name,
struct string_list *needs_pushing);
-int push_unpushed_submodules(unsigned char new_sha1[20], const char *remotes_name);
+extern int push_unpushed_submodules(struct sha1_array *commits,
+ const char *remotes_name,
+ int dry_run);
void connect_work_tree_and_git_dir(const char *work_tree, const char *git_dir);
int parallel_submodules(void);
int cmd_main(int ac, const char **av)
{
setup_git_directory();
- hold_locked_index(&index_lock, 1);
+ hold_locked_index(&index_lock, LOCK_DIE_ON_ERROR);
if (read_cache() < 0)
die("unable to read index file");
active_cache_tree = NULL;
</Files>
RewriteEngine on
+RewriteRule ^/dumb-redir/(.*)$ /dumb/$1 [R=301]
RewriteRule ^/smart-redir-perm/(.*)$ /smart/$1 [R=301]
RewriteRule ^/smart-redir-temp/(.*)$ /smart/$1 [R=302]
RewriteRule ^/smart-redir-auth/(.*)$ /auth/smart/$1 [R=301]
RewriteRule ^/loop-redir/x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-(.*) /$1 [R=302]
RewriteRule ^/loop-redir/(.*)$ /loop-redir/x-$1 [R=302]
+# The first rule issues a client-side redirect to something
+# that _doesn't_ look like a git repo. The second rule is a
+# server-side rewrite, so that it turns out the odd-looking
+# thing _is_ a git repo. The "[PT]" tells Apache to match
+# the usual ScriptAlias rules for /smart.
+RewriteRule ^/insane-redir/(.*)$ /intern-redir/$1/foo [R=301]
+RewriteRule ^/intern-redir/(.*)/foo$ /smart/$1 [PT]
+
+# Serve info/refs internally without redirecting, but
+# issue a redirect for any object requests.
+RewriteRule ^/redir-objects/(.*/info/refs)$ /dumb/$1 [PT]
+RewriteRule ^/redir-objects/(.*/objects/.*)$ /dumb/$1 [R=301]
+
# Apache 2.2 does not understand <RequireAll>, so we use RewriteCond.
# And as RewriteCond does not allow testing for non-matches, we match
# the desired case first (one has abra, two has cadabra), and let it
cd repo &&
git init &&
- echo "git-stderr.log" >.gitignore &&
echo "*.r filter=protocol" >.gitattributes &&
git add . &&
- git commit . -m "test commit 1" &&
+ git commit -m "test commit 1" &&
git branch empty-branch &&
cp "$TEST_ROOT/test.o" test.r &&
EOF
test_cmp_count expected.log rot13-filter.log &&
- filter_git commit . -m "test commit 2" &&
+ filter_git commit -m "test commit 2" &&
cat >expected.log <<-EOF &&
START
init handshake complete
'
test_expect_success 'error on modifying repo config without repo' '
- mkdir no-repo &&
- (
- GIT_CEILING_DIRECTORIES=$(pwd) &&
- export GIT_CEILING_DIRECTORIES &&
- cd no-repo &&
- test_must_fail git config a.b c 2>err &&
- grep "not in a git directory" err
- )
+ nongit test_must_fail git config a.b c 2>err &&
+ grep "not in a git directory" err
'
cmdline_config="'foo.bar=from-cmdline'"
rm -rf bare1
'
+test_expect_success 'broken main worktree still at the top' '
+ git init broken-main &&
+ (
+ cd broken-main &&
+ test_commit new &&
+ git worktree add linked &&
+ cat >expected <<-EOF &&
+ worktree $(pwd)
+ HEAD $_z40
+
+ EOF
+ cd linked &&
+ echo "worktree $(pwd)" >expected &&
+ echo "ref: .broken" >../.git/HEAD &&
+ git worktree list --porcelain | head -n 3 >actual &&
+ test_cmp ../expected actual &&
+ git worktree list | head -n 1 >actual.2 &&
+ grep -F "(error)" actual.2
+ )
+'
+
+test_expect_success 'linked worktrees are sorted' '
+ mkdir sorted &&
+ git init sorted/main &&
+ (
+ cd sorted/main &&
+ test_tick &&
+ test_commit new &&
+ git worktree add ../first &&
+ git worktree add ../second &&
+ git worktree list --porcelain | grep ^worktree >actual
+ ) &&
+ cat >expected <<-EOF &&
+ worktree $(pwd)/sorted/main
+ worktree $(pwd)/sorted/first
+ worktree $(pwd)/sorted/second
+ EOF
+ test_cmp expected sorted/main/actual
+'
+
test_done
test_must_fail test -d d
'
-test_expect_failure 'merge-recursive simple w/submodule' '
+test_expect_success 'merge-recursive simple w/submodule' '
git checkout submod &&
git merge remove
'
-test_expect_failure 'merge-recursive simple w/submodule result' '
+test_expect_success 'merge-recursive simple w/submodule result' '
git ls-files -s >actual &&
(
awk "{print \$NF}" <tmp >actual &&
test_cmp expect actual
'
+test_expect_success 'git branch --ignore-case --list -v pattern shows branch summaries' '
+ git branch --list --ignore-case -v BRANCH* >tmp &&
+ awk "{print \$NF}" <tmp >actual &&
+ test_cmp expect actual
+'
test_expect_success 'git branch -v pattern does not show branch summaries' '
test_must_fail git branch -v branch*
test_cmp expect actual
'
+test_expect_success 'sort branches, ignore case' '
+ (
+ git init sort-icase &&
+ cd sort-icase &&
+ test_commit initial &&
+ git branch branch-one &&
+ git branch BRANCH-two &&
+ git branch --list | awk "{print \$NF}" >actual &&
+ cat >expected <<-\EOF &&
+ BRANCH-two
+ branch-one
+ master
+ EOF
+ test_cmp expected actual &&
+ git branch --list -i | awk "{print \$NF}" >actual &&
+ cat >expected <<-\EOF &&
+ branch-one
+ BRANCH-two
+ master
+ EOF
+ test_cmp expected actual
+ )
+'
+
test_done
testrebase "" .git/rebase-apply
testrebase " --merge" .git/rebase-merge
+test_expect_success 'rebase --quit' '
+ cd "$work_dir" &&
+ # Clean up the state from the previous one
+ git reset --hard pre-rebase &&
+ test_must_fail git rebase master &&
+ test_path_is_dir .git/rebase-apply &&
+ head_before=$(git rev-parse HEAD) &&
+ git rebase --quit &&
+ test $(git rev-parse HEAD) = $head_before &&
+ test ! -d .git/rebase-apply
+'
+
+test_expect_success 'rebase --merge --quit' '
+ cd "$work_dir" &&
+ # Clean up the state from the previous one
+ git reset --hard pre-rebase &&
+ test_must_fail git rebase --merge master &&
+ test_path_is_dir .git/rebase-merge &&
+ head_before=$(git rev-parse HEAD) &&
+ git rebase --quit &&
+ test $(git rev-parse HEAD) = $head_before &&
+ test ! -d .git/rebase-merge
+'
+
test_done
git rebase -i "$1"
}
-KNOWN_FAILURE_NOFF_MERGE_DOESNT_CREATE_EMPTY_SUBMODULE_DIR=1
-# The real reason "replace directory with submodule" fails is because a
-# directory "sub1" exists, but we reuse the suppression added for merge here
test_submodule_switch "git_rebase_interactive"
test_done
test_cmp expect actual
'
+test_expect_success 'cherry-pick works with dirty renamed file' '
+ test_commit to-rename &&
+ git checkout -b unrelated &&
+ test_commit unrelated &&
+ git checkout @{-1} &&
+ git mv to-rename.t renamed &&
+ test_tick &&
+ git commit -m renamed &&
+ echo modified >renamed &&
+ git cherry-pick refs/heads/unrelated
+'
+
test_done
git diff-index --exit-code HEAD
'
+test_expect_success '--abort does not unsafely change HEAD' '
+ pristine_detach initial &&
+ test_must_fail git cherry-pick picked anotherpick &&
+ git reset --hard base &&
+ test_must_fail git cherry-pick picked anotherpick &&
+ git cherry-pick --abort 2>actual &&
+ test_i18ngrep "You seem to have moved HEAD" actual &&
+ test_cmp_rev base HEAD
+'
+
test_expect_success 'cherry-pick --abort to cancel multiple revert' '
pristine_detach anotherpick &&
test_expect_code 1 git revert base..picked &&
mesg_broken_footer="$mesg_no_footer
-The signed-off-by string should begin with the words Signed-off-by followed
-by a colon and space, and then the signers name and email address. e.g.
-Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>"
+This is not recognized as a footer because Myfooter is not a recognized token.
+Myfooter: A.U. Thor <author@example.com>"
mesg_with_footer_sob="$mesg_with_footer
Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>"
test_cmp expect actual
'
+test_expect_success 'cherry-pick -s recognizes trailer config' '
+ pristine_detach initial &&
+ git -c "trailer.Myfooter.ifexists=add" cherry-pick -s mesg-broken-footer &&
+ cat <<-EOF >expect &&
+ $mesg_broken_footer
+ Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>
+ EOF
+ git log -1 --pretty=format:%B >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'cherry-pick -x inserts blank line when conforming footer not found' '
pristine_detach initial &&
sha1=$(git rev-parse mesg-no-footer^0) &&
'
test_expect_success '"rm" command printed' '
- echo frotz > test-file &&
+ echo frotz >test-file &&
git add test-file &&
git commit -m "add file for rm test" &&
- git rm test-file > rm-output &&
+ git rm test-file >rm-output &&
test $(grep "^rm " rm-output | wc -l) = 1 &&
rm -f test-file rm-output &&
git commit -m "remove file from rm test"
'
test_expect_success '"rm" command suppressed with --quiet' '
- echo frotz > test-file &&
+ echo frotz >test-file &&
git add test-file &&
git commit -m "add file for rm --quiet test" &&
- git rm --quiet test-file > rm-output &&
- test $(wc -l < rm-output) = 0 &&
+ git rm --quiet test-file >rm-output &&
+ test_must_be_empty rm-output &&
rm -f test-file rm-output &&
git commit -m "remove file from rm --quiet test"
'
mkdir repo &&
(cd repo &&
git init &&
- echo something > somefile &&
+ echo something >somefile &&
git add somefile &&
git commit -m "add a file" &&
(cd .. &&
git commit -m "add submodule" &&
git rm submod &&
test ! -e submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual &&
test_must_fail git config -f .gitmodules submodule.sub.url &&
test_must_fail git config -f .gitmodules submodule.sub.path
git submodule update &&
rm -rf submod &&
git rm submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual &&
test_must_fail git config -f .gitmodules submodule.sub.url &&
test_must_fail git config -f .gitmodules submodule.sub.path
git submodule update &&
git rm submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual &&
test_must_fail git config -f .gitmodules submodule.sub.url &&
test_must_fail git config -f .gitmodules submodule.sub.path
git submodule update &&
git rm submod/ &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
test_expect_success 'rm of a populated submodule with different HEAD fails unless forced' '
git reset --hard &&
git submodule update &&
- (cd submod &&
- git checkout HEAD^
- ) &&
+ git -C submod checkout HEAD^ &&
test_must_fail git rm submod &&
test -d submod &&
test -f submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.modified actual &&
git rm -f submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual &&
test_must_fail git config -f .gitmodules submodule.sub.url &&
test_must_fail git config -f .gitmodules submodule.sub.path
test_expect_success 'rm of a populated submodule with modifications fails unless forced' '
git reset --hard &&
git submodule update &&
- (cd submod &&
- echo X >empty
- ) &&
+ echo X >submod/empty &&
test_must_fail git rm submod &&
test -d submod &&
test -f submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.modified actual &&
git rm -f submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
test_expect_success 'rm of a populated submodule with untracked files fails unless forced' '
git reset --hard &&
git submodule update &&
- (cd submod &&
- echo X >untracked
- ) &&
+ echo X >submod/untracked &&
test_must_fail git rm submod &&
test -d submod &&
test -f submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.modified actual &&
git rm -f submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
git add nitfol &&
git commit -m "added nitfol 2" &&
git checkout -b conflict1 master &&
- (cd submod &&
- git fetch &&
- git checkout branch1
- ) &&
+ git -C submod fetch &&
+ git -C submod checkout branch1 &&
git add submod &&
git commit -m "submod 1" &&
git checkout -b conflict2 master &&
- (cd submod &&
- git checkout branch2
- ) &&
+ git -C submod checkout branch2 &&
git add submod &&
git commit -m "submod 2"
'
test_must_fail git merge conflict2 &&
git rm submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
git checkout conflict1 &&
git reset --hard &&
git submodule update &&
- (cd submod &&
- git checkout HEAD^
- ) &&
+ git -C submod checkout HEAD^ &&
test_must_fail git merge conflict2 &&
test_must_fail git rm submod &&
test -d submod &&
test -f submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.conflict actual &&
git rm -f submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual &&
test_must_fail git config -f .gitmodules submodule.sub.url &&
test_must_fail git config -f .gitmodules submodule.sub.path
git checkout conflict1 &&
git reset --hard &&
git submodule update &&
- (cd submod &&
- echo X >empty
- ) &&
+ echo X >submod/empty &&
test_must_fail git merge conflict2 &&
test_must_fail git rm submod &&
test -d submod &&
test -f submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.conflict actual &&
git rm -f submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual &&
test_must_fail git config -f .gitmodules submodule.sub.url &&
test_must_fail git config -f .gitmodules submodule.sub.path
git checkout conflict1 &&
git reset --hard &&
git submodule update &&
- (cd submod &&
- echo X >untracked
- ) &&
+ echo X >submod/untracked &&
test_must_fail git merge conflict2 &&
test_must_fail git rm submod &&
test -d submod &&
test -f submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.conflict actual &&
git rm -f submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
test_must_fail git rm submod &&
test -d submod &&
test -d submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.conflict actual &&
test_must_fail git rm -f submod &&
test -d submod &&
test -d submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.conflict actual &&
git merge --abort &&
rm -rf submod
test_must_fail git merge conflict2 &&
git rm submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
test_must_fail git rm submod &&
test -d submod &&
test -d submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
! test -s actual &&
test_must_fail git rm -f submod &&
test -d submod &&
test -d submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
! test -s actual &&
rm -rf submod
'
test_expect_success 'rm recursively removes work tree of unmodified submodules' '
git rm submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
test_expect_success 'rm of a populated nested submodule with different nested HEAD fails unless forced' '
git reset --hard &&
git submodule update --recursive &&
- (cd submod/subsubmod &&
- git checkout HEAD^
- ) &&
+ git -C submod/subsubmod checkout HEAD^ &&
test_must_fail git rm submod &&
test -d submod &&
test -f submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.modified actual &&
git rm -f submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
test_expect_success 'rm of a populated nested submodule with nested modifications fails unless forced' '
git reset --hard &&
git submodule update --recursive &&
- (cd submod/subsubmod &&
- echo X >empty
- ) &&
+ echo X >submod/subsubmod/empty &&
test_must_fail git rm submod &&
test -d submod &&
test -f submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.modified actual &&
git rm -f submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
test_expect_success 'rm of a populated nested submodule with nested untracked files fails unless forced' '
git reset --hard &&
git submodule update --recursive &&
- (cd submod/subsubmod &&
- echo X >untracked
- ) &&
+ echo X >submod/subsubmod/untracked &&
test_must_fail git rm submod &&
test -d submod &&
test -f submod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect.modified actual &&
git rm -f submod &&
test ! -d submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
test_cmp expect actual
'
test_must_fail git rm submod &&
test -d submod &&
test -d submod/subsubmod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
! test -s actual &&
test_must_fail git rm -f submod &&
test -d submod &&
test -d submod/subsubmod/.git &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
! test -s actual &&
rm -rf submod
'
git commit -m "submodule removal" submod &&
git checkout HEAD^ &&
git submodule update &&
- git checkout -q HEAD^ 2>actual &&
+ git checkout -q HEAD^ &&
git checkout -q master 2>actual &&
test_i18ngrep "^warning: unable to rmdir submod:" actual &&
git status -s submod >actual &&
echo "?? submod/" >expected &&
test_cmp expected actual &&
rm -rf submod &&
- git status -s -uno --ignore-submodules=none > actual &&
+ git status -s -uno --ignore-submodules=none >actual &&
! test -s actual
'
test_cmp expect actual
'
+test_expect_success 'stash is not confused by partial renames' '
+ mv file renamed &&
+ git add renamed &&
+ git stash &&
+ git stash apply &&
+ test_path_is_file renamed &&
+ test_path_is_missing file
+'
+
test_done
diff --dirstat master~1 master~2
diff --dirstat initial rearrange
diff --dirstat-by-file initial rearrange
+# No-index --abbrev and --no-abbrev
+diff --raw initial
+diff --raw --abbrev=4 initial
+diff --raw --no-abbrev initial
+diff --no-index --raw dir2 dir
+diff --no-index --raw --abbrev=4 dir2 dir
+diff --no-index --raw --no-abbrev dir2 dir
EOF
test_expect_success 'log -S requires an argument' '
--- /dev/null
+$ git diff --no-index --raw --abbrev=4 dir2 dir
+:000000 100644 0000... 0000... A dir/sub
+$
--- /dev/null
+$ git diff --no-index --raw --no-abbrev dir2 dir
+:000000 100644 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 A dir/sub
+$
--- /dev/null
+$ git diff --no-index --raw dir2 dir
+:000000 100644 0000000... 0000000... A dir/sub
+$
--- /dev/null
+$ git diff --raw --abbrev=4 initial
+:100644 100644 35d2... 9929... M dir/sub
+:100644 100644 01e7... 10a8... M file0
+:000000 100644 0000... b1e6... A file1
+:100644 000000 01e7... 0000... D file2
+$
--- /dev/null
+$ git diff --raw --no-abbrev initial
+:100644 100644 35d242ba79ae89ac695e26b3d4c27a8e6f028f9e 992913c5aa0a5476d10c49ed0f21fc0c6d1aedf3 M dir/sub
+:100644 100644 01e79c32a8c99c557f0757da7cb6d65b3414466d 10a8a9f3657f91a156b9f0184ed79a20adef9f7f M file0
+:000000 100644 0000000000000000000000000000000000000000 b1e67221afe8461efd244b487afca22d46b95eb8 A file1
+:100644 000000 01e79c32a8c99c557f0757da7cb6d65b3414466d 0000000000000000000000000000000000000000 D file2
+$
--- /dev/null
+$ git diff --raw initial
+:100644 100644 35d242b... 992913c... M dir/sub
+:100644 100644 01e79c3... 10a8a9f... M file0
+:000000 100644 0000000... b1e6722... A file1
+:100644 000000 01e79c3... 0000000... D file2
+$
4:Subject: [PATCH] subject
8:
10:Signed-off-by: example happens to be wrapped here.
-11:
-12:Signed-off-by: C O Mitter <committer@example.com>
+11:Signed-off-by: C O Mitter <committer@example.com>
EOF
test_cmp expected actual
'
test_cmp expected actual
'
-test_expect_success 'signoff: detect garbage in non-conforming footer' '
+test_expect_success 'signoff: tolerate garbage in conforming footer' '
append_signoff <<\EOF >actual &&
subject
8:
10:
13:Signed-off-by: C O Mitter <committer@example.com>
-14:
-15:Signed-off-by: C O Mitter <committer@example.com>
+EOF
+ test_cmp expected actual
+'
+
+test_expect_success 'signoff: respect trailer config' '
+ append_signoff <<\EOF >actual &&
+subject
+
+Myfooter: x
+Some Trash
+EOF
+ cat >expected <<\EOF &&
+4:Subject: [PATCH] subject
+8:
+11:
+12:Signed-off-by: C O Mitter <committer@example.com>
+EOF
+ test_cmp expected actual &&
+
+ test_config trailer.Myfooter.ifexists add &&
+ append_signoff <<\EOF >actual &&
+subject
+
+Myfooter: x
+Some Trash
+EOF
+ cat >expected <<\EOF &&
+4:Subject: [PATCH] subject
+8:
+11:Signed-off-by: C O Mitter <committer@example.com>
EOF
test_cmp expected actual
'
test_cmp expected actual1
'
+cat >trailers <<EOF
+Signed-off-by: A U Thor <author@example.com>
+Acked-by: A U Thor <author@example.com>
+[ v2 updated patch description ]
+Signed-off-by: A U Thor <author@example.com>
+EOF
+
+test_expect_success 'pretty format %(trailers) shows trailers' '
+ echo "Some contents" >trailerfile &&
+ git add trailerfile &&
+ git commit -F - <<-EOF &&
+ trailers: this commit message has trailers
+
+ This commit is a test commit with trailers at the end. We parse this
+ message and display the trailers using %bT
+
+ $(cat trailers)
+ EOF
+ git log --no-walk --pretty="%(trailers)" >actual &&
+ cat >expect <<-EOF &&
+ $(cat trailers)
+
+ EOF
+ test_cmp expect actual
+'
+
test_done
'
}
-# run "$@" inside a non-git directory
-nongit () {
- test -d non-repo ||
- mkdir non-repo ||
- return 1
-
- (
- GIT_CEILING_DIRECTORIES=$(pwd) &&
- export GIT_CEILING_DIRECTORIES &&
- cd non-repo &&
- "$@"
- )
-}
-
test_expect_success \
'populate workdir' \
'mkdir a &&
git verify-pack test-11-*.pack
'
+test_expect_success 'set up pack for non-repo tests' '
+ # make sure we have a pack with no matching index file
+ cp test-1-*.pack foo.pack
+'
+
+test_expect_success 'index-pack --stdin complains of non-repo' '
+ nongit test_must_fail git index-pack --stdin <foo.pack &&
+ test_path_is_missing non-repo/.git
+'
+
+test_expect_success 'index-pack <pack> works in non-repo' '
+ nongit git index-pack ../foo.pack &&
+ test_path_is_file foo.idx
+'
+
#
# WARNING!
#
test new = "$(git show HEAD:file2)"
'
+test_expect_success '--rebase fast forward' '
+ git reset --hard before-rebase &&
+ git checkout -b ff &&
+ echo another modification >file &&
+ git commit -m third file &&
+
+ git checkout to-rebase &&
+ git pull --rebase . ff &&
+ test "$(git rev-parse HEAD)" = "$(git rev-parse ff)" &&
+
+ # The above only validates the result. Did we actually bypass rebase?
+ git reflog -1 >reflog.actual &&
+ sed "s/^[0-9a-f][0-9a-f]*/OBJID/" reflog.actual >reflog.fuzzy &&
+ echo "OBJID HEAD@{0}: pull --rebase . ff: Fast-forward" >reflog.expected &&
+ test_cmp reflog.expected reflog.fuzzy
+'
+
test_expect_success '--rebase with conflicts shows advice' '
test_when_finished "git rebase --abort; git checkout -f to-rebase" &&
git checkout -b seq &&
cd submodule.git &&
git rev-parse master >../actual
) &&
+ test_when_finished git -C work reset --hard master^ &&
test_cmp expected actual
'
+test_expect_success 'push --dry-run does not recursively update submodules' '
+ (
+ cd work/gar/bage &&
+ git checkout master &&
+ git rev-parse master >../../../expected_submodule &&
+ > junk9 &&
+ git add junk9 &&
+ git commit -m "Ninth junk" &&
+
+ # Go up to 'work' directory
+ cd ../.. &&
+ git checkout master &&
+ git rev-parse master >../expected_pub &&
+ git add gar/bage &&
+ git commit -m "Ninth commit for gar/bage" &&
+ git push --dry-run --recurse-submodules=on-demand ../pub.git master
+ ) &&
+ git -C submodule.git rev-parse master >actual_submodule &&
+ git -C pub.git rev-parse master >actual_pub &&
+ test_cmp expected_pub actual_pub &&
+ test_cmp expected_submodule actual_submodule
+'
+
test_done
test_cmp expect actual
'
+test_expect_success 'push to repo path with path separator (colon)' '
+ # The interesting failure case here is when the
+ # receiving end cannot access its original object directory,
+ # so make it likely for us to generate a delta by having
+ # a non-trivial file with multiple versions.
+
+ test-genrandom foo 4096 >file.bin &&
+ git add file.bin &&
+ git commit -m bin &&
+
+ if test_have_prereq MINGW
+ then
+ pathsep=";"
+ else
+ pathsep=":"
+ fi &&
+ git clone --bare . "xxx${pathsep}yyy.git" &&
+
+ echo change >>file.bin &&
+ git commit -am change &&
+ # Note that we have to use the full path here, or it gets confused
+ # with the ssh host:path syntax.
+ git push "$(pwd)/xxx${pathsep}yyy.git" HEAD
+'
+
test_done
test_must_fail git remote-http http::/example.com/repo.git
'
+test_expect_success 'redirects can be forbidden/allowed' '
+ test_must_fail git -c http.followRedirects=false \
+ clone $HTTPD_URL/dumb-redir/repo.git dumb-redir &&
+ git -c http.followRedirects=true \
+ clone $HTTPD_URL/dumb-redir/repo.git dumb-redir 2>stderr
+'
+
+test_expect_success 'redirects are reported to stderr' '
+ # just look for a snippet of the redirected-to URL
+ test_i18ngrep /dumb/ stderr
+'
+
+test_expect_success 'non-initial redirects can be forbidden' '
+ test_must_fail git -c http.followRedirects=initial \
+ clone $HTTPD_URL/redir-objects/repo.git redir-objects &&
+ git -c http.followRedirects=true \
+ clone $HTTPD_URL/redir-objects/repo.git redir-objects
+'
+
+test_expect_success 'http.followRedirects defaults to "initial"' '
+ test_must_fail git clone $HTTPD_URL/redir-objects/repo.git default
+'
+
+# The goal is for a clone of the "evil" repository, which has no objects
+# itself, to cause the client to fetch objects from the "victim" repository.
+test_expect_success 'set up evil alternates scheme' '
+ victim=$HTTPD_DOCUMENT_ROOT_PATH/victim.git &&
+ git init --bare "$victim" &&
+ git -C "$victim" --work-tree=. commit --allow-empty -m secret &&
+ git -C "$victim" repack -ad &&
+ git -C "$victim" update-server-info &&
+ sha1=$(git -C "$victim" rev-parse HEAD) &&
+
+ evil=$HTTPD_DOCUMENT_ROOT_PATH/evil.git &&
+ git init --bare "$evil" &&
+ # do this by hand to avoid object existence check
+ printf "%s\\t%s\\n" $sha1 refs/heads/master >"$evil/info/refs"
+'
+
+# Here we'll just redirect via HTTP. In a real-world attack these would be on
+# different servers, but we should reject it either way.
+test_expect_success 'http-alternates is a non-initial redirect' '
+ echo "$HTTPD_URL/dumb/victim.git/objects" \
+ >"$evil/objects/info/http-alternates" &&
+ test_must_fail git -c http.followRedirects=initial \
+ clone $HTTPD_URL/dumb/evil.git evil-initial &&
+ git -c http.followRedirects=true \
+ clone $HTTPD_URL/dumb/evil.git evil-initial
+'
+
+# Curl supports a lot of protocols that we'd prefer not to allow
+# http-alternates to use, but it's hard to test whether curl has
+# accessed, say, the SMTP protocol, because we are not running an SMTP server.
+# But we can check that it does not allow access to file://, which would
+# otherwise allow this clone to complete.
+test_expect_success 'http-alternates cannot point at funny protocols' '
+ echo "file://$victim/objects" >"$evil/objects/info/http-alternates" &&
+ test_must_fail git -c http.followRedirects=true \
+ clone "$HTTPD_URL/dumb/evil.git" evil-file
+'
+
stop_httpd
test_done
git clone $HTTPD_URL/smart-redir-limited/repo.git repo-redir-limited
'
+test_expect_success 're-rooting dies on insane schemes' '
+ test_must_fail git clone $HTTPD_URL/insane-redir/repo.git insane
+'
+
test_expect_success 'clone from password-protected repository' '
echo two >expect &&
set_askpass user@host pass@host &&
EOF
'
+# set variables outside test to avoid quote insanity; the \057 is '/',
+# which doesn't need quoting, but just confirms that de-quoting
+# is working.
+quoted='"one.git\057objects"'
+unquoted='two.git/objects'
+test_expect_success 'mix of quoted and unquoted alternates' '
+ check_obj "$quoted:$unquoted" <<-EOF
+ $one blob
+ $two blob
+'
+
+test_expect_success !MINGW 'broken quoting falls back to interpreting raw' '
+ mv one.git \"one.git &&
+ check_obj \"one.git/objects <<-EOF
+ $one blob
+ EOF
+'
+
test_done
test_expect_success 'curl redirects respect whitelist' '
test_must_fail env GIT_ALLOW_PROTOCOL=http:https \
+ GIT_SMART_HTTP=0 \
git clone "$HTTPD_URL/ftp-redir/repo.git" 2>stderr &&
{
test_i18ngrep "ftp.*disabled" stderr ||
test_cmp expect actual
'
+test_expect_success 'symbolic final^1^@ = final^1^1 final^1^2' '
+ git rev-parse --symbolic final^1^1 final^1^2 >expect &&
+ git rev-parse --symbolic final^1^@ >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'final^1^! = final^1 ^final^1^1 ^final^1^2' '
git rev-parse final^1 ^final^1^1 ^final^1^2 >expect &&
git rev-parse final^1^! >actual &&
test_cmp expect actual
'
+test_expect_success 'symbolic final^1^! = final^1 ^final^1^1 ^final^1^2' '
+ git rev-parse --symbolic final^1 ^final^1^1 ^final^1^2 >expect &&
+ git rev-parse --symbolic final^1^! >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'large graft octopus' '
test_cmp_rev_output b31 "git rev-parse --verify b1^30"
'
test_cmp expect actual
'
+test_expect_success 'symbolic merge^-1 = merge^1..merge' '
+ git rev-parse --symbolic merge^1..merge >expect &&
+ git rev-parse --symbolic merge^-1 >actual &&
+ test_cmp expect actual
+'
+
test_expect_success 'rev-parse merge^-0 (invalid parent)' '
test_must_fail git rev-parse merge^-0
'
test_cmp expect actual
'
+cat >trailers <<EOF
+Reviewed-by: A U Thor <author@example.com>
+Signed-off-by: A U Thor <author@example.com>
+EOF
+
+test_expect_success 'basic atom: head contents:trailers' '
+ echo "Some contents" > two &&
+ git add two &&
+ git commit -F - <<-EOF &&
+ trailers: this commit message has trailers
+
+ Some message contents
+
+ $(cat trailers)
+ EOF
+ git for-each-ref --format="%(contents:trailers)" refs/heads/master >actual &&
+ sanitize_pgp <actual >actual.clean &&
+ # git for-each-ref ends with a blank line
+ cat >expect <<-EOF &&
+ $(cat trailers)
+
+ EOF
+ test_cmp expect actual.clean
+'
+
test_done
test $(git tag | wc -l) -eq 0
'
+test_expect_success 'sort tags, ignore case' '
+ (
+ git init sort &&
+ cd sort &&
+ test_commit initial &&
+ git tag tag-one &&
+ git tag TAG-two &&
+ git tag -l >actual &&
+ cat >expected <<-\EOF &&
+ TAG-two
+ initial
+ tag-one
+ EOF
+ test_cmp expected actual &&
+ git tag -l -i >actual &&
+ cat >expected <<-\EOF &&
+ initial
+ tag-one
+ TAG-two
+ EOF
+ test_cmp expected actual
+ )
+'
+
test_expect_success 'looking for a tag in an empty tree should fail' \
'! (tag_exists mytag)'
test_expect_success 'listing a tag using a matching pattern should succeed' \
'git tag -l mytag'
+test_expect_success 'listing a tag with --ignore-case' \
+ 'test $(git tag -l --ignore-case MYTAG) = mytag'
+
test_expect_success \
'listing a tag using a matching pattern should output that tag' \
'test $(git tag -l mytag) = mytag'
)
'
+test_expect_success 'preparing second superproject with a nested submodule plus partial clone' '
+ test_create_repo supersuper &&
+ (
+ cd supersuper &&
+ echo "I am super super." >file &&
+ git add file &&
+ git commit -m B-super-super-initial
+ git submodule add "file://$base_dir/super" subwithsub &&
+ git commit -m B-super-super-added &&
+ git submodule update --init --recursive &&
+ git repack -ad
+ ) &&
+ git clone supersuper supersuper2 &&
+ (
+ cd supersuper2 &&
+ git submodule update --init
+ )
+'
+
+# At this point there are three root-level positories: A, B, super and super2
+
+test_expect_success 'nested submodule alternate in works and is actually used' '
+ test_when_finished "rm -rf supersuper-clone" &&
+ git clone --recursive --reference supersuper supersuper supersuper-clone &&
+ (
+ cd supersuper-clone &&
+ # test superproject has alternates setup correctly
+ test_alternate_is_used .git/objects/info/alternates . &&
+ # immediate submodule has alternate:
+ test_alternate_is_used .git/modules/subwithsub/objects/info/alternates subwithsub &&
+ # nested submodule also has alternate:
+ test_alternate_is_used .git/modules/subwithsub/modules/sub/objects/info/alternates subwithsub/sub
+ )
+'
+
+check_that_two_of_three_alternates_are_used() {
+ test_alternate_is_used .git/objects/info/alternates . &&
+ # immediate submodule has alternate:
+ test_alternate_is_used .git/modules/subwithsub/objects/info/alternates subwithsub &&
+ # but nested submodule has no alternate:
+ test_must_fail test_alternate_is_used .git/modules/subwithsub/modules/sub/objects/info/alternates subwithsub/sub
+}
+
+
+test_expect_success 'missing nested submodule alternate fails clone and submodule update' '
+ test_when_finished "rm -rf supersuper-clone" &&
+ test_must_fail git clone --recursive --reference supersuper2 supersuper2 supersuper-clone &&
+ (
+ cd supersuper-clone &&
+ check_that_two_of_three_alternates_are_used &&
+ # update of the submodule fails
+ test_must_fail git submodule update --init --recursive
+ )
+'
+
+test_expect_success 'missing nested submodule alternate in --reference-if-able mode' '
+ test_when_finished "rm -rf supersuper-clone" &&
+ git clone --recursive --reference-if-able supersuper2 supersuper2 supersuper-clone &&
+ (
+ cd supersuper-clone &&
+ check_that_two_of_three_alternates_are_used &&
+ # update of the submodule succeeds
+ git submodule update --init --recursive
+ )
+'
+
test_done
git diff --exit-code
'
+test_expect_success 'allow-empty --only ignores staged contents' '
+ echo changed-again >file &&
+ git add file &&
+ git commit --allow-empty --only -m "empty" &&
+ git cat-file blob HEAD:file >file.actual &&
+ test_cmp file.expect file.actual &&
+ git diff --exit-code
+'
+
test_expect_success 'set up editor' '
cat >editor <<-\EOF &&
#!/bin/sh
test_cmp expected actual
'
+test_expect_success 'signoff respects trailer config' '
+
+ echo 5 >positive &&
+ git add positive &&
+ git commit -s -m "subject
+
+non-trailer line
+Myfooter: x" &&
+ git cat-file commit HEAD | sed -e "1,/^\$/d" > actual &&
+ (
+ echo subject
+ echo
+ echo non-trailer line
+ echo Myfooter: x
+ echo
+ echo "Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>"
+ ) >expected &&
+ test_cmp expected actual &&
+
+ echo 6 >positive &&
+ git add positive &&
+ git -c "trailer.Myfooter.ifexists=add" commit -s -m "subject
+
+non-trailer line
+Myfooter: x" &&
+ git cat-file commit HEAD | sed -e "1,/^\$/d" > actual &&
+ (
+ echo subject
+ echo
+ echo non-trailer line
+ echo Myfooter: x
+ echo "Signed-off-by: $GIT_COMMITTER_NAME <$GIT_COMMITTER_EMAIL>"
+ ) >expected &&
+ test_cmp expected actual
+'
+
test_expect_success 'multiple -m' '
>negative &&
'
cat >expect <<\EOF
-error: Updating the following directories would lose untracked files in it:
+error: Updating the following directories would lose untracked files in them:
rep
rep2
test_lazy_prereq MKTEMP '
tempdir=$(mktemp -d -t foo.XXXXXX) &&
- test -d "$tempdir"
+ test -d "$tempdir" &&
+ rmdir "$tempdir"
'
test_expect_success MKTEMP 'temporary filenames are used with mergetool.writeToTemp' '
echo master >sub/sub &&
git add sub/sub &&
git commit -m "added sub/sub" &&
+ git tag v1 &&
echo test >>file &&
echo test >>sub/sub &&
git add file sub/sub &&
grep file output
'
-run_dir_diff_test 'difftool --dir-diff from subdirectory' '
+run_dir_diff_test 'difftool --dir-diff branch from subdirectory' '
(
cd sub &&
git difftool --dir-diff $symlinks --extcmd ls branch >output &&
- grep sub output &&
- grep file output
+ # "sub" must only exist in "right"
+ # "file" and "file2" must be listed in both "left" and "right"
+ test "1" = $(grep sub output | wc -l) &&
+ test "2" = $(grep file"$" output | wc -l) &&
+ test "2" = $(grep file2 output | wc -l)
+ )
+'
+
+run_dir_diff_test 'difftool --dir-diff v1 from subdirectory' '
+ (
+ cd sub &&
+ git difftool --dir-diff $symlinks --extcmd ls v1 >output &&
+ # "sub" and "file" exist in both v1 and HEAD.
+ # "file2" is unchanged.
+ test "2" = $(grep sub output | wc -l) &&
+ test "2" = $(grep file output | wc -l) &&
+ test "0" = $(grep file2 output | wc -l)
+ )
+'
+
+run_dir_diff_test 'difftool --dir-diff branch from subdirectory w/ pathspec' '
+ (
+ cd sub &&
+ git difftool --dir-diff $symlinks --extcmd ls branch -- .>output &&
+ # "sub" only exists in "right"
+ # "file" and "file2" must not be listed
+ test "1" = $(grep sub output | wc -l) &&
+ test "0" = $(grep file output | wc -l)
+ )
+'
+
+run_dir_diff_test 'difftool --dir-diff v1 from subdirectory w/ pathspec' '
+ (
+ cd sub &&
+ git difftool --dir-diff $symlinks --extcmd ls v1 -- .>output &&
+ # "sub" exists in v1 and HEAD
+ # "file" is filtered out by the pathspec
+ test "2" = $(grep sub output | wc -l) &&
+ test "0" = $(grep file output | wc -l)
)
'
;;
esac
-deepdir=nothing-above
-ceiling=$PWD
-
test_expect_success 'git svn --version works anywhere' '
- mkdir -p "$deepdir" && (
- GIT_CEILING_DIRECTORIES="$ceiling" &&
- export GIT_CEILING_DIRECTORIES &&
- cd "$deepdir" &&
- git svn --version
- )
+ nongit git svn --version
'
test_expect_success 'git svn help works anywhere' '
- mkdir -p "$deepdir" && (
- GIT_CEILING_DIRECTORIES="$ceiling" &&
- export GIT_CEILING_DIRECTORIES &&
- cd "$deepdir" &&
- git svn help
- )
+ nongit git svn help
'
test_expect_success \
)
'
+test_expect_success 'submit from worktree' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$git" &&
+ git worktree add ../worktree-test
+ ) &&
+ (
+ cd "$git/../worktree-test" &&
+ test_commit "worktree-commit" &&
+ git config git-p4.skipSubmitEdit true &&
+ git p4 submit
+ ) &&
+ (
+ cd "$cli" &&
+ p4 sync &&
+ test_path_is_file worktree-commit.t
+ )
+'
+
test_expect_success 'kill p4d' '
kill_p4d
'
)
'
+test_expect_success 'use --git-dir option and GIT_DIR' '
+ test_when_finished cleanup_git &&
+ git p4 clone //depot --destination="$git" &&
+ (
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ test_commit first-change &&
+ git p4 submit --git-dir "$git"
+ ) &&
+ (
+ cd "$cli" &&
+ p4 sync &&
+ test_path_is_file first-change.t &&
+ echo "cli_file" >cli_file.t &&
+ p4 add cli_file.t &&
+ p4 submit -d "cli change"
+ ) &&
+ (git --git-dir "$git" p4 sync) &&
+ (cd "$git" && git checkout -q p4/master) &&
+ test_path_is_file "$git"/cli_file.t &&
+ (
+ cd "$cli" &&
+ echo "cli_file2" >cli_file2.t &&
+ p4 add cli_file2.t &&
+ p4 submit -d "cli change2"
+ ) &&
+ (GIT_DIR="$git" git p4 sync) &&
+ (cd "$git" && git checkout -q p4/master) &&
+ test_path_is_file "$git"/cli_file2.t
+'
+
+
test_expect_success 'kill p4d' '
kill_p4d
'
)
'
+test_expect_success 'submit --shelve' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$cli" &&
+ p4 revert ... &&
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ test_commit "shelveme1" &&
+ git p4 submit --origin=HEAD^ &&
+
+ echo 654321 >shelveme2.t &&
+ echo 123456 >>shelveme1.t &&
+ git add shelveme* &&
+ git commit -m"shelvetest" &&
+ git p4 submit --shelve --origin=HEAD^ &&
+
+ test_path_is_file shelveme1.t &&
+ test_path_is_file shelveme2.t
+ ) &&
+ (
+ cd "$cli" &&
+ change=$(p4 -G changes -s shelved -m 1 //depot/... | \
+ marshal_dump change) &&
+ p4 describe -S $change | grep shelveme2 &&
+ p4 describe -S $change | grep 123456 &&
+ test_path_is_file shelveme1.t &&
+ test_path_is_missing shelveme2.t
+ )
+'
+
+# Update an existing shelved changelist
+
+test_expect_success 'submit --update-shelve' '
+ test_when_finished cleanup_git &&
+ git p4 clone --dest="$git" //depot &&
+ (
+ cd "$cli" &&
+ p4 revert ... &&
+ cd "$git" &&
+ git config git-p4.skipSubmitEdit true &&
+ test_commit "test-update-shelved-change" &&
+ git p4 submit --origin=HEAD^ --shelve &&
+
+ shelf_cl=$(p4 -G changes -s shelved -m 1 |\
+ marshal_dump change) &&
+ test -n $shelf_cl &&
+ echo "updating shelved change list $shelf_cl" &&
+
+ echo "updated-line" >>shelf.t &&
+ echo added-file.t >added-file.t &&
+ git add shelf.t added-file.t &&
+ git rm -f test-update-shelved-change.t &&
+ git commit --amend -C HEAD &&
+ git show --stat HEAD &&
+ git p4 submit -v --origin HEAD^ --update-shelve $shelf_cl &&
+ echo "done git p4 submit"
+ ) &&
+ (
+ cd "$cli" &&
+ change=$(p4 -G changes -s shelved -m 1 //depot/... | \
+ marshal_dump change) &&
+ p4 unshelve -c $change -s $change &&
+ grep -q updated-line shelf.t &&
+ p4 describe -S $change | grep added-file.t &&
+ test_path_is_missing test-update-shelved-change.t
+ )
+'
+
test_expect_success 'kill p4d' '
kill_p4d
'
(
cd "$cli" &&
+ >file0.dat &&
+ p4 add file0.dat &&
echo "content 1 txt 23 bytes" >file1.txt &&
p4 add file1.txt &&
echo "content 2-3 bin 25 bytes" >file2.dat &&
'
test_expect_success '__gitdir - not a git repository' '
- (
- cd subdir/subsubdir &&
- GIT_CEILING_DIRECTORIES="$TRASH_DIRECTORY" &&
- export GIT_CEILING_DIRECTORIES &&
- test_must_fail __gitdir
- )
+ nongit test_must_fail __gitdir
'
test_expect_success '__gitcomp - trailing space - options' '
}
' - "$1"
}
+
+# run "$@" inside a non-git directory
+nongit () {
+ test -d non-repo ||
+ mkdir non-repo ||
+ return 1
+
+ (
+ GIT_CEILING_DIRECTORIES=$(pwd) &&
+ export GIT_CEILING_DIRECTORIES &&
+ cd non-repo &&
+ "$@"
+ )
+}
#include "string-list.h"
#include "strbuf.h"
#include "argv-array.h"
+#include "quote.h"
struct tmp_objdir {
struct strbuf path;
*/
static void env_append(struct argv_array *env, const char *key, const char *val)
{
- const char *old = getenv(key);
+ struct strbuf quoted = STRBUF_INIT;
+ const char *old;
+ /*
+ * Avoid quoting if it's not necessary, for maximum compatibility
+ * with older parsers which don't understand the quoting.
+ */
+ if (*val == '"' || strchr(val, PATH_SEP)) {
+ strbuf_addch("ed, '"');
+ quote_c_style(val, "ed, NULL, 1);
+ strbuf_addch("ed, '"');
+ val = quoted.buf;
+ }
+
+ old = getenv(key);
if (!old)
argv_array_pushf(env, "%s=%s", key, val);
else
argv_array_pushf(env, "%s=%s%c%s", key, old, PATH_SEP, val);
+
+ strbuf_release("ed);
}
static void env_replace(struct argv_array *env, const char *key, const char *val)
static char *separators = ":";
+static int configured;
+
#define TRAILER_ARG_STRING "$ARG"
static const char *git_generated_prefixes[] = {
return same_token(a, b) && same_value(a, b);
}
-static inline int contains_only_spaces(const char *str)
+static inline int is_blank_line(const char *str)
{
const char *s = str;
- while (*s && isspace(*s))
+ while (*s && *s != '\n' && isspace(*s))
s++;
- return !*s;
+ return !*s || *s == '\n';
}
static inline void strbuf_replace(struct strbuf *sb, const char *a, const char *b)
return 0;
}
+static void ensure_configured(void)
+{
+ if (configured)
+ return;
+
+ /* Default config must be setup first */
+ git_config(git_trailer_default_config, NULL);
+ git_config(git_trailer_config, NULL);
+ configured = 1;
+}
+
static const char *token_from_item(struct arg_item *item, char *tok)
{
if (item->conf.key)
}
/*
- * Return the location of the first separator in line, or -1 if there is no
- * separator.
+ * If the given line is of the form
+ * "<token><optional whitespace><separator>..." or "<separator>...", return the
+ * location of the separator. Otherwise, return -1. The optional whitespace
+ * is allowed there primarily to allow things like "Bug #43" where <token> is
+ * "Bug" and <separator> is "#".
+ *
+ * The separator-starts-line case (in which this function returns 0) is
+ * distinguished from the non-well-formed-line case (in which this function
+ * returns -1) because some callers of this function need such a distinction.
*/
static int find_separator(const char *line, const char *separators)
{
- int loc = strcspn(line, separators);
- if (!line[loc])
- return -1;
- return loc;
+ int whitespace_found = 0;
+ const char *c;
+ for (c = line; *c; c++) {
+ if (strchr(separators, *c))
+ return c - line;
+ if (!whitespace_found && (isalnum(*c) || *c == '-'))
+ continue;
+ if (c != line && (*c == ' ' || *c == '\t')) {
+ whitespace_found = 1;
+ continue;
+ }
+ break;
+ }
+ return -1;
}
/*
free(cl_separators);
}
-static struct strbuf **read_input_file(const char *file)
+static void read_input_file(struct strbuf *sb, const char *file)
{
- struct strbuf **lines;
- struct strbuf sb = STRBUF_INIT;
-
if (file) {
- if (strbuf_read_file(&sb, file, 0) < 0)
+ if (strbuf_read_file(sb, file, 0) < 0)
die_errno(_("could not read input file '%s'"), file);
} else {
- if (strbuf_read(&sb, fileno(stdin), 0) < 0)
+ if (strbuf_read(sb, fileno(stdin), 0) < 0)
die_errno(_("could not read from stdin"));
}
+}
- lines = strbuf_split(&sb, '\n');
+static const char *next_line(const char *str)
+{
+ const char *nl = strchrnul(str, '\n');
+ return nl + !!*nl;
+}
- strbuf_release(&sb);
+/*
+ * Return the position of the start of the last line. If len is 0, return -1.
+ */
+static int last_line(const char *buf, size_t len)
+{
+ int i;
+ if (len == 0)
+ return -1;
+ if (len == 1)
+ return 0;
+ /*
+ * Skip the last character (in addition to the null terminator),
+ * because if the last character is a newline, it is considered as part
+ * of the last line anyway.
+ */
+ i = len - 2;
- return lines;
+ for (; i >= 0; i--) {
+ if (buf[i] == '\n')
+ return i + 1;
+ }
+ return 0;
}
/*
- * Return the (0 based) index of the start of the patch or the line
- * count if there is no patch in the message.
+ * Return the position of the start of the patch or the length of str if there
+ * is no patch in the message.
*/
-static int find_patch_start(struct strbuf **lines, int count)
+static int find_patch_start(const char *str)
{
- int i;
+ const char *s;
- /* Get the start of the patch part if any */
- for (i = 0; i < count; i++) {
- if (starts_with(lines[i]->buf, "---"))
- return i;
+ for (s = str; *s; s = next_line(s)) {
+ if (starts_with(s, "---"))
+ return s - str;
}
- return count;
+ return s - str;
}
/*
- * Return the (0 based) index of the first trailer line or count if
- * there are no trailers. Trailers are searched only in the lines from
- * index (count - 1) down to index 0.
+ * Return the position of the first trailer line or len if there are no
+ * trailers.
*/
-static int find_trailer_start(struct strbuf **lines, int count)
+static int find_trailer_start(const char *buf, size_t len)
{
- int start, end_of_title, only_spaces = 1;
+ const char *s;
+ int end_of_title, l, only_spaces = 1;
int recognized_prefix = 0, trailer_lines = 0, non_trailer_lines = 0;
/*
* Number of possible continuation lines encountered. This will be
int possible_continuation_lines = 0;
/* The first paragraph is the title and cannot be trailers */
- for (start = 0; start < count; start++) {
- if (lines[start]->buf[0] == comment_line_char)
+ for (s = buf; s < buf + len; s = next_line(s)) {
+ if (s[0] == comment_line_char)
continue;
- if (contains_only_spaces(lines[start]->buf))
+ if (is_blank_line(s))
break;
}
- end_of_title = start;
+ end_of_title = s - buf;
/*
* Get the start of the trailers by looking starting from the end for a
* trailers, or (ii) contains at least one Git-generated trailer and
* consists of at least 25% trailers.
*/
- for (start = count - 1; start >= end_of_title; start--) {
+ for (l = last_line(buf, len);
+ l >= end_of_title;
+ l = last_line(buf, l)) {
+ const char *bol = buf + l;
const char **p;
int separator_pos;
- if (lines[start]->buf[0] == comment_line_char) {
+ if (bol[0] == comment_line_char) {
non_trailer_lines += possible_continuation_lines;
possible_continuation_lines = 0;
continue;
}
- if (contains_only_spaces(lines[start]->buf)) {
+ if (is_blank_line(bol)) {
if (only_spaces)
continue;
non_trailer_lines += possible_continuation_lines;
if (recognized_prefix &&
trailer_lines * 3 >= non_trailer_lines)
- return start + 1;
- if (trailer_lines && !non_trailer_lines)
- return start + 1;
- return count;
+ return next_line(bol) - buf;
+ else if (trailer_lines && !non_trailer_lines)
+ return next_line(bol) - buf;
+ return len;
}
only_spaces = 0;
for (p = git_generated_prefixes; *p; p++) {
- if (starts_with(lines[start]->buf, *p)) {
+ if (starts_with(bol, *p)) {
trailer_lines++;
possible_continuation_lines = 0;
recognized_prefix = 1;
}
}
- separator_pos = find_separator(lines[start]->buf, separators);
- if (separator_pos >= 1 && !isspace(lines[start]->buf[0])) {
+ separator_pos = find_separator(bol, separators);
+ if (separator_pos >= 1 && !isspace(bol[0])) {
struct list_head *pos;
trailer_lines++;
list_for_each(pos, &conf_head) {
struct arg_item *item;
item = list_entry(pos, struct arg_item, list);
- if (token_matches_item(lines[start]->buf, item,
+ if (token_matches_item(bol, item,
separator_pos)) {
recognized_prefix = 1;
break;
}
}
- } else if (isspace(lines[start]->buf[0]))
+ } else if (isspace(bol[0]))
possible_continuation_lines++;
else {
non_trailer_lines++;
;
}
- return count;
-}
-
-/* Get the index of the end of the trailers */
-static int find_trailer_end(struct strbuf **lines, int patch_start)
-{
- struct strbuf sb = STRBUF_INIT;
- int i, ignore_bytes;
-
- for (i = 0; i < patch_start; i++)
- strbuf_addbuf(&sb, lines[i]);
- ignore_bytes = ignore_non_trailer(&sb);
- strbuf_release(&sb);
- for (i = patch_start - 1; i >= 0 && ignore_bytes > 0; i--)
- ignore_bytes -= lines[i]->len;
-
- return i + 1;
+ return len;
}
-static int has_blank_line_before(struct strbuf **lines, int start)
+/* Return the position of the end of the trailers. */
+static int find_trailer_end(const char *buf, size_t len)
{
- for (;start >= 0; start--) {
- if (lines[start]->buf[0] == comment_line_char)
- continue;
- return contains_only_spaces(lines[start]->buf);
- }
- return 0;
+ return len - ignore_non_trailer(buf, len);
}
-static void print_lines(FILE *outfile, struct strbuf **lines, int start, int end)
+static int ends_with_blank_line(const char *buf, size_t len)
{
- int i;
- for (i = start; lines[i] && i < end; i++)
- fprintf(outfile, "%s", lines[i]->buf);
+ int ll = last_line(buf, len);
+ if (ll < 0)
+ return 0;
+ return is_blank_line(buf + ll);
}
static int process_input_file(FILE *outfile,
- struct strbuf **lines,
+ const char *str,
struct list_head *head)
{
- int count = 0;
- int patch_start, trailer_start, trailer_end, i;
+ struct trailer_info info;
struct strbuf tok = STRBUF_INIT;
struct strbuf val = STRBUF_INIT;
- struct trailer_item *last = NULL;
-
- /* Get the line count */
- while (lines[count])
- count++;
+ int i;
- patch_start = find_patch_start(lines, count);
- trailer_end = find_trailer_end(lines, patch_start);
- trailer_start = find_trailer_start(lines, trailer_end);
+ trailer_info_get(&info, str);
/* Print lines before the trailers as is */
- print_lines(outfile, lines, 0, trailer_start);
+ fwrite(str, 1, info.trailer_start - str, outfile);
- if (!has_blank_line_before(lines, trailer_start - 1))
+ if (!info.blank_line_before_trailer)
fprintf(outfile, "\n");
- /* Parse trailer lines */
- for (i = trailer_start; i < trailer_end; i++) {
+ for (i = 0; i < info.trailer_nr; i++) {
int separator_pos;
- if (lines[i]->buf[0] == comment_line_char)
+ char *trailer = info.trailers[i];
+ if (trailer[0] == comment_line_char)
continue;
- if (last && isspace(lines[i]->buf[0])) {
- struct strbuf sb = STRBUF_INIT;
- strbuf_addf(&sb, "%s\n%s", last->value, lines[i]->buf);
- strbuf_strip_suffix(&sb, "\n");
- free(last->value);
- last->value = strbuf_detach(&sb, NULL);
- continue;
- }
- separator_pos = find_separator(lines[i]->buf, separators);
+ separator_pos = find_separator(trailer, separators);
if (separator_pos >= 1) {
- parse_trailer(&tok, &val, NULL, lines[i]->buf,
+ parse_trailer(&tok, &val, NULL, trailer,
separator_pos);
- last = add_trailer_item(head,
- strbuf_detach(&tok, NULL),
- strbuf_detach(&val, NULL));
+ add_trailer_item(head,
+ strbuf_detach(&tok, NULL),
+ strbuf_detach(&val, NULL));
} else {
- strbuf_addbuf(&val, lines[i]);
+ strbuf_addstr(&val, trailer);
strbuf_strip_suffix(&val, "\n");
add_trailer_item(head,
NULL,
strbuf_detach(&val, NULL));
- last = NULL;
}
}
- return trailer_end;
+ trailer_info_release(&info);
+
+ return info.trailer_end - str;
}
static void free_all(struct list_head *head)
{
LIST_HEAD(head);
LIST_HEAD(arg_head);
- struct strbuf **lines;
+ struct strbuf sb = STRBUF_INIT;
int trailer_end;
FILE *outfile = stdout;
- /* Default config must be setup first */
- git_config(git_trailer_default_config, NULL);
- git_config(git_trailer_config, NULL);
+ ensure_configured();
- lines = read_input_file(file);
+ read_input_file(&sb, file);
if (in_place)
outfile = create_in_place_tempfile(file);
/* Print the lines before the trailers */
- trailer_end = process_input_file(outfile, lines, &head);
+ trailer_end = process_input_file(outfile, sb.buf, &head);
process_command_line_args(&arg_head, trailers);
free_all(&head);
/* Print the lines after the trailers as is */
- print_lines(outfile, lines, trailer_end, INT_MAX);
+ fwrite(sb.buf + trailer_end, 1, sb.len - trailer_end, outfile);
if (in_place)
if (rename_tempfile(&trailers_tempfile, file))
die_errno(_("could not rename temporary file to %s"), file);
- strbuf_list_free(lines);
+ strbuf_release(&sb);
+}
+
+void trailer_info_get(struct trailer_info *info, const char *str)
+{
+ int patch_start, trailer_end, trailer_start;
+ struct strbuf **trailer_lines, **ptr;
+ char **trailer_strings = NULL;
+ size_t nr = 0, alloc = 0;
+ char **last = NULL;
+
+ ensure_configured();
+
+ patch_start = find_patch_start(str);
+ trailer_end = find_trailer_end(str, patch_start);
+ trailer_start = find_trailer_start(str, trailer_end);
+
+ trailer_lines = strbuf_split_buf(str + trailer_start,
+ trailer_end - trailer_start,
+ '\n',
+ 0);
+ for (ptr = trailer_lines; *ptr; ptr++) {
+ if (last && isspace((*ptr)->buf[0])) {
+ struct strbuf sb = STRBUF_INIT;
+ strbuf_attach(&sb, *last, strlen(*last), strlen(*last));
+ strbuf_addbuf(&sb, *ptr);
+ *last = strbuf_detach(&sb, NULL);
+ continue;
+ }
+ ALLOC_GROW(trailer_strings, nr + 1, alloc);
+ trailer_strings[nr] = strbuf_detach(*ptr, NULL);
+ last = find_separator(trailer_strings[nr], separators) >= 1
+ ? &trailer_strings[nr]
+ : NULL;
+ nr++;
+ }
+ strbuf_list_free(trailer_lines);
+
+ info->blank_line_before_trailer = ends_with_blank_line(str,
+ trailer_start);
+ info->trailer_start = str + trailer_start;
+ info->trailer_end = str + trailer_end;
+ info->trailers = trailer_strings;
+ info->trailer_nr = nr;
+}
+
+void trailer_info_release(struct trailer_info *info)
+{
+ int i;
+ for (i = 0; i < info->trailer_nr; i++)
+ free(info->trailers[i]);
+ free(info->trailers);
}
#ifndef TRAILER_H
#define TRAILER_H
+struct trailer_info {
+ /*
+ * True if there is a blank line before the location pointed to by
+ * trailer_start.
+ */
+ int blank_line_before_trailer;
+
+ /*
+ * Pointers to the start and end of the trailer block found. If there
+ * is no trailer block found, these 2 pointers point to the end of the
+ * input string.
+ */
+ const char *trailer_start, *trailer_end;
+
+ /*
+ * Array of trailers found.
+ */
+ char **trailers;
+ size_t trailer_nr;
+};
+
void process_trailers(const char *file, int in_place, int trim_empty,
struct string_list *trailers);
+void trailer_info_get(struct trailer_info *info, const char *str);
+
+void trailer_info_release(struct trailer_info *info);
+
#endif /* TRAILER_H */
if ((flags & TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND) && !is_bare_repository()) {
struct ref *ref = remote_refs;
+ struct sha1_array commits = SHA1_ARRAY_INIT;
+
for (; ref; ref = ref->next)
- if (!is_null_oid(&ref->new_oid) &&
- !push_unpushed_submodules(ref->new_oid.hash,
- transport->remote->name))
- die ("Failed to push all needed submodules!");
+ if (!is_null_oid(&ref->new_oid))
+ sha1_array_append(&commits, ref->new_oid.hash);
+
+ if (!push_unpushed_submodules(&commits,
+ transport->remote->name,
+ pretend)) {
+ sha1_array_clear(&commits);
+ die("Failed to push all needed submodules!");
+ }
+ sha1_array_clear(&commits);
}
- if ((flags & (TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND |
- TRANSPORT_RECURSE_SUBMODULES_CHECK)) && !is_bare_repository()) {
+ if (((flags & TRANSPORT_RECURSE_SUBMODULES_CHECK) ||
+ ((flags & TRANSPORT_RECURSE_SUBMODULES_ON_DEMAND) &&
+ !pretend)) && !is_bare_repository()) {
struct ref *ref = remote_refs;
struct string_list needs_pushing = STRING_LIST_INIT_DUP;
+ struct sha1_array commits = SHA1_ARRAY_INIT;
for (; ref; ref = ref->next)
- if (!is_null_oid(&ref->new_oid) &&
- find_unpushed_submodules(ref->new_oid.hash,
- transport->remote->name, &needs_pushing))
- die_with_unpushed_submodules(&needs_pushing);
+ if (!is_null_oid(&ref->new_oid))
+ sha1_array_append(&commits, ref->new_oid.hash);
+
+ if (find_unpushed_submodules(&commits, transport->remote->name,
+ &needs_pushing)) {
+ sha1_array_clear(&commits);
+ die_with_unpushed_submodules(&needs_pushing);
+ }
+ string_list_clear(&needs_pushing, 0);
+ sha1_array_clear(&commits);
}
push_ret = transport->push_refs(transport, remote_refs, flags);
{ 0x0825, 0x0827 },
{ 0x0829, 0x082D },
{ 0x0859, 0x085B },
-{ 0x08E4, 0x0902 },
+{ 0x08D4, 0x0902 },
{ 0x093A, 0x093A },
{ 0x093C, 0x093C },
{ 0x0941, 0x0948 },
{ 0x17C9, 0x17D3 },
{ 0x17DD, 0x17DD },
{ 0x180B, 0x180E },
+{ 0x1885, 0x1886 },
{ 0x18A9, 0x18A9 },
{ 0x1920, 0x1922 },
{ 0x1927, 0x1928 },
{ 0x1CF4, 0x1CF4 },
{ 0x1CF8, 0x1CF9 },
{ 0x1DC0, 0x1DF5 },
-{ 0x1DFC, 0x1DFF },
+{ 0x1DFB, 0x1DFF },
{ 0x200B, 0x200F },
{ 0x202A, 0x202E },
{ 0x2060, 0x2064 },
{ 0x3099, 0x309A },
{ 0xA66F, 0xA672 },
{ 0xA674, 0xA67D },
-{ 0xA69F, 0xA69F },
+{ 0xA69E, 0xA69F },
{ 0xA6F0, 0xA6F1 },
{ 0xA802, 0xA802 },
{ 0xA806, 0xA806 },
{ 0xA80B, 0xA80B },
{ 0xA825, 0xA826 },
-{ 0xA8C4, 0xA8C4 },
+{ 0xA8C4, 0xA8C5 },
{ 0xA8E0, 0xA8F1 },
{ 0xA926, 0xA92D },
{ 0xA947, 0xA951 },
{ 0xABED, 0xABED },
{ 0xFB1E, 0xFB1E },
{ 0xFE00, 0xFE0F },
-{ 0xFE20, 0xFE2D },
+{ 0xFE20, 0xFE2F },
{ 0xFEFF, 0xFEFF },
{ 0xFFF9, 0xFFFB },
{ 0x101FD, 0x101FD },
{ 0x11173, 0x11173 },
{ 0x11180, 0x11181 },
{ 0x111B6, 0x111BE },
+{ 0x111CA, 0x111CC },
{ 0x1122F, 0x11231 },
{ 0x11234, 0x11234 },
{ 0x11236, 0x11237 },
+{ 0x1123E, 0x1123E },
{ 0x112DF, 0x112DF },
{ 0x112E3, 0x112EA },
-{ 0x11301, 0x11301 },
+{ 0x11300, 0x11301 },
{ 0x1133C, 0x1133C },
{ 0x11340, 0x11340 },
{ 0x11366, 0x1136C },
{ 0x11370, 0x11374 },
+{ 0x11438, 0x1143F },
+{ 0x11442, 0x11444 },
+{ 0x11446, 0x11446 },
{ 0x114B3, 0x114B8 },
{ 0x114BA, 0x114BA },
{ 0x114BF, 0x114C0 },
{ 0x115B2, 0x115B5 },
{ 0x115BC, 0x115BD },
{ 0x115BF, 0x115C0 },
+{ 0x115DC, 0x115DD },
{ 0x11633, 0x1163A },
{ 0x1163D, 0x1163D },
{ 0x1163F, 0x11640 },
{ 0x116AD, 0x116AD },
{ 0x116B0, 0x116B5 },
{ 0x116B7, 0x116B7 },
+{ 0x1171D, 0x1171F },
+{ 0x11722, 0x11725 },
+{ 0x11727, 0x1172B },
+{ 0x11C30, 0x11C36 },
+{ 0x11C38, 0x11C3D },
+{ 0x11C3F, 0x11C3F },
+{ 0x11C92, 0x11CA7 },
+{ 0x11CAA, 0x11CB0 },
+{ 0x11CB2, 0x11CB3 },
+{ 0x11CB5, 0x11CB6 },
{ 0x16AF0, 0x16AF4 },
{ 0x16B30, 0x16B36 },
{ 0x16F8F, 0x16F92 },
{ 0x1D185, 0x1D18B },
{ 0x1D1AA, 0x1D1AD },
{ 0x1D242, 0x1D244 },
+{ 0x1DA00, 0x1DA36 },
+{ 0x1DA3B, 0x1DA6C },
+{ 0x1DA75, 0x1DA75 },
+{ 0x1DA84, 0x1DA84 },
+{ 0x1DA9B, 0x1DA9F },
+{ 0x1DAA1, 0x1DAAF },
+{ 0x1E000, 0x1E006 },
+{ 0x1E008, 0x1E018 },
+{ 0x1E01B, 0x1E021 },
+{ 0x1E023, 0x1E024 },
+{ 0x1E026, 0x1E02A },
{ 0x1E8D0, 0x1E8D6 },
+{ 0x1E944, 0x1E94A },
{ 0xE0001, 0xE0001 },
{ 0xE0020, 0xE007F },
{ 0xE0100, 0xE01EF }
};
static const struct interval double_width[] = {
-{ /* plane */ 0x0, 0x1C },
-{ /* plane */ 0x1C, 0x21 },
-{ /* plane */ 0x21, 0x22 },
-{ /* plane */ 0x22, 0x23 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
-{ /* plane */ 0x0, 0x0 },
{ 0x1100, 0x115F },
+{ 0x231A, 0x231B },
{ 0x2329, 0x232A },
+{ 0x23E9, 0x23EC },
+{ 0x23F0, 0x23F0 },
+{ 0x23F3, 0x23F3 },
+{ 0x25FD, 0x25FE },
+{ 0x2614, 0x2615 },
+{ 0x2648, 0x2653 },
+{ 0x267F, 0x267F },
+{ 0x2693, 0x2693 },
+{ 0x26A1, 0x26A1 },
+{ 0x26AA, 0x26AB },
+{ 0x26BD, 0x26BE },
+{ 0x26C4, 0x26C5 },
+{ 0x26CE, 0x26CE },
+{ 0x26D4, 0x26D4 },
+{ 0x26EA, 0x26EA },
+{ 0x26F2, 0x26F3 },
+{ 0x26F5, 0x26F5 },
+{ 0x26FA, 0x26FA },
+{ 0x26FD, 0x26FD },
+{ 0x2705, 0x2705 },
+{ 0x270A, 0x270B },
+{ 0x2728, 0x2728 },
+{ 0x274C, 0x274C },
+{ 0x274E, 0x274E },
+{ 0x2753, 0x2755 },
+{ 0x2757, 0x2757 },
+{ 0x2795, 0x2797 },
+{ 0x27B0, 0x27B0 },
+{ 0x27BF, 0x27BF },
+{ 0x2B1B, 0x2B1C },
+{ 0x2B50, 0x2B50 },
+{ 0x2B55, 0x2B55 },
{ 0x2E80, 0x2E99 },
{ 0x2E9B, 0x2EF3 },
{ 0x2F00, 0x2FD5 },
{ 0xFE68, 0xFE6B },
{ 0xFF01, 0xFF60 },
{ 0xFFE0, 0xFFE6 },
+{ 0x16FE0, 0x16FE0 },
+{ 0x17000, 0x187EC },
+{ 0x18800, 0x18AF2 },
{ 0x1B000, 0x1B001 },
+{ 0x1F004, 0x1F004 },
+{ 0x1F0CF, 0x1F0CF },
+{ 0x1F18E, 0x1F18E },
+{ 0x1F191, 0x1F19A },
{ 0x1F200, 0x1F202 },
-{ 0x1F210, 0x1F23A },
+{ 0x1F210, 0x1F23B },
{ 0x1F240, 0x1F248 },
{ 0x1F250, 0x1F251 },
+{ 0x1F300, 0x1F320 },
+{ 0x1F32D, 0x1F335 },
+{ 0x1F337, 0x1F37C },
+{ 0x1F37E, 0x1F393 },
+{ 0x1F3A0, 0x1F3CA },
+{ 0x1F3CF, 0x1F3D3 },
+{ 0x1F3E0, 0x1F3F0 },
+{ 0x1F3F4, 0x1F3F4 },
+{ 0x1F3F8, 0x1F43E },
+{ 0x1F440, 0x1F440 },
+{ 0x1F442, 0x1F4FC },
+{ 0x1F4FF, 0x1F53D },
+{ 0x1F54B, 0x1F54E },
+{ 0x1F550, 0x1F567 },
+{ 0x1F57A, 0x1F57A },
+{ 0x1F595, 0x1F596 },
+{ 0x1F5A4, 0x1F5A4 },
+{ 0x1F5FB, 0x1F64F },
+{ 0x1F680, 0x1F6C5 },
+{ 0x1F6CC, 0x1F6CC },
+{ 0x1F6D0, 0x1F6D2 },
+{ 0x1F6EB, 0x1F6EC },
+{ 0x1F6F4, 0x1F6F6 },
+{ 0x1F910, 0x1F91E },
+{ 0x1F920, 0x1F927 },
+{ 0x1F930, 0x1F930 },
+{ 0x1F933, 0x1F93E },
+{ 0x1F940, 0x1F94B },
+{ 0x1F950, 0x1F95E },
+{ 0x1F980, 0x1F991 },
+{ 0x1F9C0, 0x1F9C0 },
{ 0x20000, 0x2FFFD },
{ 0x30000, 0x3FFFD }
};
xstrfmt(msg, cmd, cmd);
msgs[ERROR_NOT_UPTODATE_DIR] =
- _("Updating the following directories would lose untracked files in it:\n%s");
+ _("Updating the following directories would lose untracked files in them:\n%s");
if (!strcmp(cmd, "checkout"))
msg = advice_commit_before_merge
+++ /dev/null
-#!/bin/sh
-#See http://www.unicode.org/reports/tr44/
-#
-#Me Enclosing_Mark an enclosing combining mark
-#Mn Nonspacing_Mark a nonspacing combining mark (zero advance width)
-#Cf Format a format control character
-#
-UNICODEWIDTH_H=../unicode_width.h
-if ! test -d unicode; then
- mkdir unicode
-fi &&
-( cd unicode &&
- if ! test -f UnicodeData.txt; then
- wget http://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt
- fi &&
- if ! test -f EastAsianWidth.txt; then
- wget http://www.unicode.org/Public/UCD/latest/ucd/EastAsianWidth.txt
- fi &&
- if ! test -d uniset; then
- git clone https://github.com/depp/uniset.git
- fi &&
- (
- cd uniset &&
- if ! test -x uniset; then
- autoreconf -i &&
- ./configure --enable-warnings=-Werror CFLAGS='-O0 -ggdb'
- fi &&
- make
- ) &&
- UNICODE_DIR=. && export UNICODE_DIR &&
- cat >$UNICODEWIDTH_H <<-EOF
- static const struct interval zero_width[] = {
- $(uniset/uniset --32 cat:Me,Mn,Cf + U+1160..U+11FF - U+00AD |
- grep -v plane)
- };
- static const struct interval double_width[] = {
- $(uniset/uniset --32 eaw:F,W)
- };
- EOF
-)
strbuf_addf(&path, "%s/HEAD", get_git_common_dir());
- if (parse_ref(path.buf, &head_ref, &is_detached) < 0)
- goto done;
-
- worktree = xmalloc(sizeof(struct worktree));
+ worktree = xcalloc(1, sizeof(*worktree));
worktree->path = strbuf_detach(&worktree_path, NULL);
- worktree->id = NULL;
worktree->is_bare = is_bare;
- worktree->head_ref = NULL;
worktree->is_detached = is_detached;
- worktree->is_current = 0;
- add_head_info(&head_ref, worktree);
- worktree->lock_reason = NULL;
- worktree->lock_reason_valid = 0;
+ if (!parse_ref(path.buf, &head_ref, &is_detached))
+ add_head_info(&head_ref, worktree);
-done:
strbuf_release(&path);
strbuf_release(&worktree_path);
strbuf_release(&head_ref);
if (parse_ref(path.buf, &head_ref, &is_detached) < 0)
goto done;
- worktree = xmalloc(sizeof(struct worktree));
+ worktree = xcalloc(1, sizeof(*worktree));
worktree->path = strbuf_detach(&worktree_path, NULL);
worktree->id = xstrdup(id);
- worktree->is_bare = 0;
- worktree->head_ref = NULL;
worktree->is_detached = is_detached;
- worktree->is_current = 0;
add_head_info(&head_ref, worktree);
- worktree->lock_reason = NULL;
- worktree->lock_reason_valid = 0;
done:
strbuf_release(&path);
free(git_dir);
}
-struct worktree **get_worktrees(void)
+static int compare_worktree(const void *a_, const void *b_)
+{
+ const struct worktree *const *a = a_;
+ const struct worktree *const *b = b_;
+ return fspathcmp((*a)->path, (*b)->path);
+}
+
+struct worktree **get_worktrees(unsigned flags)
{
struct worktree **list = NULL;
struct strbuf path = STRBUF_INIT;
list = xmalloc(alloc * sizeof(struct worktree *));
- if ((list[counter] = get_main_worktree()))
- counter++;
+ list[counter++] = get_main_worktree();
strbuf_addf(&path, "%s/worktrees", get_git_common_dir());
dir = opendir(path.buf);
ALLOC_GROW(list, counter + 1, alloc);
list[counter] = NULL;
+ if (flags & GWT_SORT_LINKED)
+ /*
+ * don't sort the first item (main worktree), which will
+ * always be the first
+ */
+ QSORT(list + 1, counter - 1, compare_worktree);
+
mark_current_worktree(list);
return list;
}
if (worktrees)
free_worktrees(worktrees);
- worktrees = get_worktrees();
+ worktrees = get_worktrees(0);
for (i = 0; worktrees[i]; i++) {
struct worktree *wt = worktrees[i];
/* Functions for acting on the information about worktrees. */
+#define GWT_SORT_LINKED (1 << 0) /* keeps linked worktrees sorted */
+
/*
* Get the worktrees. The primary worktree will always be the first returned,
* and linked worktrees will be pointed to by 'next' in each subsequent
* The caller is responsible for freeing the memory from the returned
* worktree(s).
*/
-extern struct worktree **get_worktrees(void);
+extern struct worktree **get_worktrees(unsigned flags);
/*
* Return git dir of the worktree. Note that the path may be relative.
int require_clean_work_tree(const char *action, const char *hint, int ignore_submodules, int gently)
{
struct lock_file *lock_file = xcalloc(1, sizeof(*lock_file));
- int err = 0;
+ int err = 0, fd;
- hold_locked_index(lock_file, 0);
+ fd = hold_locked_index(lock_file, 0);
refresh_cache(REFRESH_QUIET);
- update_index_if_able(&the_index, lock_file);
+ if (0 <= fd)
+ update_index_if_able(&the_index, lock_file);
rollback_lock_file(lock_file);
if (has_unstaged_changes(ignore_submodules)) {
return ha;
}
-#ifdef XDL_FAST_HASH
-
-#define REPEAT_BYTE(x) ((~0ul / 0xff) * (x))
-
-#define ONEBYTES REPEAT_BYTE(0x01)
-#define NEWLINEBYTES REPEAT_BYTE(0x0a)
-#define HIGHBITS REPEAT_BYTE(0x80)
-
-/* Return the high bit set in the first byte that is a zero */
-static inline unsigned long has_zero(unsigned long a)
-{
- return ((a - ONEBYTES) & ~a) & HIGHBITS;
-}
-
-static inline long count_masked_bytes(unsigned long mask)
-{
- if (sizeof(long) == 8) {
- /*
- * Jan Achrenius on G+: microoptimized version of
- * the simpler "(mask & ONEBYTES) * ONEBYTES >> 56"
- * that works for the bytemasks without having to
- * mask them first.
- */
- /*
- * return mask * 0x0001020304050608 >> 56;
- *
- * Doing it like this avoids warnings on 32-bit machines.
- */
- long a = (REPEAT_BYTE(0x01) / 0xff + 1);
- return mask * a >> (sizeof(long) * 7);
- } else {
- /* Carl Chatfield / Jan Achrenius G+ version for 32-bit */
- /* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */
- long a = (0x0ff0001 + mask) >> 23;
- /* Fix the 1 for 00 case */
- return a & mask;
- }
-}
-
-unsigned long xdl_hash_record(char const **data, char const *top, long flags)
-{
- unsigned long hash = 5381;
- unsigned long a = 0, mask = 0;
- char const *ptr = *data;
- char const *end = top - sizeof(unsigned long) + 1;
-
- if (flags & XDF_WHITESPACE_FLAGS)
- return xdl_hash_record_with_whitespace(data, top, flags);
-
- ptr -= sizeof(unsigned long);
- do {
- hash += hash << 5;
- hash ^= a;
- ptr += sizeof(unsigned long);
- if (ptr >= end)
- break;
- a = *(unsigned long *)ptr;
- /* Do we have any '\n' bytes in this word? */
- mask = has_zero(a ^ NEWLINEBYTES);
- } while (!mask);
-
- if (ptr >= end) {
- /*
- * There is only a partial word left at the end of the
- * buffer. Because we may work with a memory mapping,
- * we have to grab the rest byte by byte instead of
- * blindly reading it.
- *
- * To avoid problems with masking in a signed value,
- * we use an unsigned char here.
- */
- const char *p;
- for (p = top - 1; p >= ptr; p--)
- a = (a << 8) + *((const unsigned char *)p);
- mask = has_zero(a ^ NEWLINEBYTES);
- if (!mask)
- /*
- * No '\n' found in the partial word. Make a
- * mask that matches what we read.
- */
- mask = 1UL << (8 * (top - ptr) + 7);
- }
-
- /* The mask *below* the first high bit set */
- mask = (mask - 1) & ~mask;
- mask >>= 7;
- hash += hash << 5;
- hash ^= a & mask;
-
- /* Advance past the last (possibly partial) word */
- ptr += count_masked_bytes(mask);
-
- if (ptr < top) {
- assert(*ptr == '\n');
- ptr++;
- }
-
- *data = ptr;
-
- return hash;
-}
-
-#else /* XDL_FAST_HASH */
-
unsigned long xdl_hash_record(char const **data, char const *top, long flags) {
unsigned long ha = 5381;
char const *ptr = *data;
return ha;
}
-#endif /* XDL_FAST_HASH */
-
unsigned int xdl_hashbits(unsigned int size) {
unsigned int val = 1, bits = 0;